Compare commits

..

3 Commits

Author SHA1 Message Date
platane
7b5258d549 . 2023-01-09 15:51:09 +01:00
platane
f3820e8edc . 2023-01-09 08:13:40 +01:00
platane
d9d2fa1b52 add scripts to output usage stats 📈 2023-01-09 08:07:43 +01:00
28 changed files with 2017 additions and 1131 deletions

View File

@@ -7,21 +7,21 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
cache: yarn
node-version: 16
- run: yarn install --frozen-lockfile
- run: npm run type
- run: npm run lint
- run: npm run test --ci
- run: yarn type
- run: yarn lint
- run: yarn test --ci
test-action:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: update action.yml to use image from local Dockerfile
run: |
@@ -63,7 +63,7 @@ jobs:
- name: build svg-only action
run: |
npm run build:action
yarn build:action
rm -r svg-only/dist
mv packages/action/dist svg-only/dist
@@ -92,18 +92,18 @@ jobs:
deploy-ghpages:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
cache: yarn
node-version: 16
- run: yarn install --frozen-lockfile
- run: npm run build:demo
- run: yarn build:demo
env:
GITHUB_USER_CONTRIBUTION_API_ENDPOINT: https://snk-one.vercel.app/api/github-user-contribution/
- uses: crazy-max/ghaction-github-pages@v3.1.0
- uses: crazy-max/ghaction-github-pages@v2.6.0
if: success() && github.ref == 'refs/heads/main'
with:
target_branch: gh-pages

View File

@@ -21,19 +21,19 @@ jobs:
permissions:
contents: write
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- uses: docker/setup-qemu-action@v2
- uses: docker/setup-qemu-action@v1
- uses: docker/setup-buildx-action@v2
- uses: docker/setup-buildx-action@v1
- uses: docker/login-action@v2
- uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: build and publish the docker image
uses: docker/build-push-action@v4
uses: docker/build-push-action@v2
id: docker-build
with:
push: true
@@ -45,7 +45,7 @@ jobs:
run: |
sed -i "s/image: .*/image: docker:\/\/platane\/snk@${{ steps.docker-build.outputs.digest }}/" action.yml
- uses: actions/setup-node@v3
- uses: actions/setup-node@v2
with:
cache: yarn
node-version: 16
@@ -53,7 +53,7 @@ jobs:
- name: build svg-only action
run: |
yarn install --frozen-lockfile
npm run build:action
yarn build:action
rm -r svg-only/dist
mv packages/action/dist svg-only/dist
@@ -81,7 +81,7 @@ jobs:
echo "prerelease=true" >> $GITHUB_OUTPUT
fi
- uses: ncipollo/release-action@v1.12.0
- uses: ncipollo/release-action@v1.11.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:

View File

@@ -8,20 +8,7 @@
Generates a snake game from a github user contributions graph
<picture>
<source
media="(prefers-color-scheme: dark)"
srcset="https://raw.githubusercontent.com/platane/snk/output/github-contribution-grid-snake-dark.svg"
/>
<source
media="(prefers-color-scheme: light)"
srcset="https://raw.githubusercontent.com/platane/snk/output/github-contribution-grid-snake.svg"
/>
<img
alt="github contribution grid snake animation"
src="https://raw.githubusercontent.com/platane/snk/output/github-contribution-grid-snake.svg"
/>
</picture>
![](https://github.com/Platane/snk/raw/output/github-contribution-grid-snake.svg)
Pull a github user's contribution graph.
Make it a snake Game, generate a snake path where the cells get eaten in an orderly fashion.
@@ -62,14 +49,11 @@ If you are only interested in generating a svg, consider using this faster actio
**dark mode**
For **dark mode** support on github, use this [special syntax](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#specifying-the-theme-an-image-is-shown-to) in your readme.
For **dark mode** support on github, use this [special syntax](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#specifying-the-theme-an-image-is-shown-to=) in your readme.
```html
<picture>
<source media="(prefers-color-scheme: dark)" srcset="github-snake-dark.svg" />
<source media="(prefers-color-scheme: light)" srcset="github-snake.svg" />
<img alt="github-snake" src="github-snake.svg" />
</picture>
```md
![GitHub Snake Light](github-snake.svg#gh-light-mode-only)
![GitHub Snake dark](github-snake-dark.svg#gh-dark-mode-only)
```
**interactive demo**

View File

@@ -4,7 +4,7 @@ author: "platane"
runs:
using: docker
image: docker://platane/snk@sha256:2115ffeb538e355aa155630e6e32b6d77ea2345fa8584645c41ace7f5ad667fc
image: docker://platane/snk@sha256:dcb351bdad223f2a2161fa5d6e3c9102e6ebe9fbde99a10fa3bf443d69f61a0f
inputs:
github_user_name:

View File

@@ -1,17 +1,17 @@
{
"name": "snk",
"description": "Generates a snake game from a github user contributions grid",
"version": "2.3.0",
"version": "2.2.0",
"private": true,
"repository": "github:platane/snk",
"devDependencies": {
"@sucrase/jest-plugin": "3.0.0",
"@types/jest": "29.5.3",
"@types/node": "16.18.38",
"jest": "29.6.1",
"prettier": "2.8.8",
"sucrase": "3.33.0",
"typescript": "5.1.6"
"@types/jest": "29.2.1",
"@types/node": "16.11.7",
"jest": "29.2.2",
"prettier": "2.7.1",
"sucrase": "3.28.0",
"typescript": "4.8.4"
},
"workspaces": [
"packages/**"
@@ -27,10 +27,10 @@
},
"scripts": {
"type": "tsc --noEmit",
"lint": "prettier -c '**/*.{ts,js,json,md,yml,yaml}' '!packages/*/dist/**' '!svg-only/dist/**'",
"test": "jest --verbose --no-cache",
"dev:demo": "( cd packages/demo ; npm run dev )",
"build:demo": "( cd packages/demo ; npm run build )",
"build:action": "( cd packages/action ; npm run build )"
"lint": "yarn prettier -c '**/*.{ts,js,json,md,yml,yaml}' '!packages/*/dist/**' '!svg-only/dist/**'",
"test": "jest --verbose --passWithNoTests --no-cache",
"dev:demo": "( cd packages/demo ; yarn dev )",
"build:demo": "( cd packages/demo ; yarn build )",
"build:action": "( cd packages/action ; yarn build )"
}
}

View File

@@ -1,5 +1,81 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`should parse /out.svg {"color_snake":"yellow"} 1`] = `
{
"animationOptions": {
"frameDuration": 100,
"step": 1,
},
"drawOptions": {
"colorDotBorder": "#1b1f230a",
"colorDots": [
"#ebedf0",
"#9be9a8",
"#40c463",
"#30a14e",
"#216e39",
],
"colorEmpty": "#ebedf0",
"colorSnake": "yellow",
"dark": {
"colorDotBorder": "#1b1f230a",
"colorDots": [
"#161b22",
"#01311f",
"#034525",
"#0f6d31",
"#00c647",
],
"colorEmpty": "#161b22",
"colorSnake": "purple",
},
"sizeCell": 16,
"sizeDot": 12,
"sizeDotBorderRadius": 2,
},
"filename": "/out.svg",
"format": "svg",
}
`;
exports[`should parse /out.svg?.gif.svg?color_snake=orange 1`] = `
{
"animationOptions": {
"frameDuration": 100,
"step": 1,
},
"drawOptions": {
"colorDotBorder": "#1b1f230a",
"colorDots": [
"#ebedf0",
"#9be9a8",
"#40c463",
"#30a14e",
"#216e39",
],
"colorEmpty": "#ebedf0",
"colorSnake": "orange",
"dark": {
"colorDotBorder": "#1b1f230a",
"colorDots": [
"#161b22",
"#01311f",
"#034525",
"#0f6d31",
"#00c647",
],
"colorEmpty": "#161b22",
"colorSnake": "purple",
},
"sizeCell": 16,
"sizeDot": 12,
"sizeDotBorderRadius": 2,
},
"filename": "/out.svg?.gif.svg",
"format": "svg",
}
`;
exports[`should parse /out.svg?{"color_snake":"yellow","color_dots":["#000","#111","#222","#333","#444"]} 1`] = `
{
"animationOptions": {
@@ -72,7 +148,6 @@ exports[`should parse /out.svg?color_snake=orange&color_dots=#000,#111,#222,#333
"colorEmpty": "#000",
"colorSnake": "orange",
"dark": {
"colorDotBorder": "#1b1f230a",
"colorDots": [
"#a00",
"#a11",
@@ -81,7 +156,6 @@ exports[`should parse /out.svg?color_snake=orange&color_dots=#000,#111,#222,#333
"#a44",
],
"colorEmpty": "#a00",
"colorSnake": "orange",
},
"sizeCell": 16,
"sizeDot": 12,
@@ -109,7 +183,18 @@ exports[`should parse path/to/out.gif 1`] = `
],
"colorEmpty": "#ebedf0",
"colorSnake": "purple",
"dark": undefined,
"dark": {
"colorDotBorder": "#1b1f230a",
"colorDots": [
"#161b22",
"#01311f",
"#034525",
"#0f6d31",
"#00c647",
],
"colorEmpty": "#161b22",
"colorSnake": "purple",
},
"sizeCell": 16,
"sizeDot": 12,
"sizeDotBorderRadius": 2,

View File

@@ -1,58 +1,17 @@
import { parseEntry } from "../outputsOptions";
it("should parse options as json", () => {
expect(
parseEntry(`/out.svg {"color_snake":"yellow"}`)?.drawOptions
).toHaveProperty("colorSnake", "yellow");
expect(
parseEntry(`/out.svg?{"color_snake":"yellow"}`)?.drawOptions
).toHaveProperty("colorSnake", "yellow");
expect(
parseEntry(`/out.svg?{"color_dots":["#000","#111","#222","#333","#444"]}`)
?.drawOptions.colorDots
).toEqual(["#000", "#111", "#222", "#333", "#444"]);
});
it("should parse options as searchparams", () => {
expect(parseEntry(`/out.svg?color_snake=yellow`)?.drawOptions).toHaveProperty(
"colorSnake",
"yellow"
);
expect(
parseEntry(`/out.svg?color_dots=#000,#111,#222,#333,#444`)?.drawOptions
.colorDots
).toEqual(["#000", "#111", "#222", "#333", "#444"]);
});
it("should parse filename", () => {
expect(parseEntry(`/a/b/c.svg?{"color_snake":"yellow"}`)).toHaveProperty(
"filename",
"/a/b/c.svg"
);
expect(
parseEntry(`/a/b/out.svg?.gif.svg?{"color_snake":"yellow"}`)
).toHaveProperty("filename", "/a/b/out.svg?.gif.svg");
expect(
parseEntry(`/a/b/{[-1].svg?.gif.svg?{"color_snake":"yellow"}`)
).toHaveProperty("filename", "/a/b/{[-1].svg?.gif.svg");
});
[
// default
"path/to/out.gif",
// overwrite colors (search params)
"/out.svg?color_snake=orange&color_dots=#000,#111,#222,#333,#444",
// overwrite colors (json)
`/out.svg?{"color_snake":"yellow","color_dots":["#000","#111","#222","#333","#444"]}`,
// overwrite dark colors
`/out.svg {"color_snake":"yellow"}`,
"/out.svg?color_snake=orange&color_dots=#000,#111,#222,#333,#444&dark_color_dots=#a00,#a11,#a22,#a33,#a44",
"/out.svg?.gif.svg?color_snake=orange",
].forEach((entry) =>
it(`should parse ${entry}`, () => {
expect(parseEntry(entry)).toMatchSnapshot();

View File

@@ -32,7 +32,6 @@ export const parseEntry = (entry: string) => {
sizeCell: 16,
sizeDot: 12,
...palettes["default"],
dark: palettes["default"].dark && { ...palettes["default"].dark },
};
const animationOptions: AnimationOptions = { step: 1, frameDuration: 100 };
@@ -44,14 +43,6 @@ export const parseEntry = (entry: string) => {
}
}
{
const dark_palette = palettes[sp.get("dark_palette")!];
if (dark_palette) {
const clone = { ...dark_palette, dark: undefined };
drawOptions.dark = clone;
}
}
if (sp.has("color_snake")) drawOptions.colorSnake = sp.get("color_snake")!;
if (sp.has("color_dots")) {
const colors = sp.get("color_dots")!.split(/[,;]/);
@@ -65,8 +56,6 @@ export const parseEntry = (entry: string) => {
if (sp.has("dark_color_dots")) {
const colors = sp.get("dark_color_dots")!.split(/[,;]/);
drawOptions.dark = {
colorDotBorder: drawOptions.colorDotBorder,
colorSnake: drawOptions.colorSnake,
...drawOptions.dark,
colorDots: colors,
colorEmpty: colors[0],

View File

@@ -10,7 +10,7 @@
"@snk/types": "1.0.0"
},
"devDependencies": {
"@vercel/ncc": "0.36.1"
"@vercel/ncc": "0.34.0"
},
"scripts": {
"build": "ncc build --external canvas --external gifsicle --out dist ./index.ts",

View File

@@ -23,5 +23,8 @@ export const basePalettes: Record<
// aliases
export const palettes = { ...basePalettes };
palettes["github"] = palettes["github-light"];
palettes["github"] = {
...palettes["github-light"],
dark: { ...palettes["github-dark"] },
};
palettes["default"] = palettes["github"];

View File

@@ -10,14 +10,14 @@
"@snk/types": "1.0.0"
},
"devDependencies": {
"@types/dat.gui": "0.7.10",
"@types/dat.gui": "0.7.7",
"dat.gui": "0.7.9",
"html-webpack-plugin": "5.5.3",
"ts-loader": "9.4.4",
"html-webpack-plugin": "5.5.0",
"ts-loader": "9.4.1",
"ts-node": "10.9.1",
"webpack": "5.88.1",
"webpack-cli": "5.1.4",
"webpack-dev-server": "4.15.1"
"webpack": "5.74.0",
"webpack-cli": "4.10.0",
"webpack-dev-server": "4.11.1"
},
"scripts": {
"build": "webpack",

View File

@@ -12,7 +12,7 @@
"devDependencies": {
"@types/gifsicle": "5.2.0",
"@types/tmp": "0.2.3",
"@vercel/ncc": "0.36.1"
"@vercel/ncc": "0.34.0"
},
"scripts": {
"benchmark": "ncc run __tests__/benchmark.ts --quiet"

View File

@@ -3,6 +3,6 @@
"version": "1.0.0",
"dependencies": {
"@snk/github-user-contribution": "1.0.0",
"@vercel/node": "2.15.5"
"@vercel/node": "2.6.1"
}
}

View File

@@ -39,36 +39,40 @@ export const getGithubUserContribution = async (
};
const parseUserPage = (content: string) => {
// take roughly the table block
// take roughly the svg block
const block = content
.split(`aria-describedby="contribution-graph-description"`)[1]
.split("<tbody>")[1]
.split("</tbody>")[0];
.split(`class="js-calendar-graph-svg"`)[1]
.split("</svg>")[0];
const cells = block.split("</tr>").flatMap((inside, y) =>
inside.split("</td>").flatMap((m) => {
const date = m.match(/data-date="([^"]+)"/)?.[1];
let x = 0;
let lastYAttribute = 0;
const literalLevel = m.match(/data-level="([^"]+)"/)?.[1];
const literalX = m.match(/data-ix="([^"]+)"/)?.[1];
const literalCount = m.match(/(No|\d+) contributions? on/)?.[1];
const rects = Array.from(block.matchAll(/<rect[^>]*>[^<]*<\/rect>/g)).map(
([m]) => {
const date = m.match(/data-date="([^"]+)"/)![1];
const level = +m.match(/data-level="([^"]+)"/)![1];
const yAttribute = +m.match(/y="([^"]+)"/)![1];
if (date && literalLevel && literalX && literalCount)
return [
{
x: +literalX,
y,
const literalCount = m.match(/(No|\d+) contributions? on/)![1];
const count = literalCount === "No" ? 0 : +literalCount;
date,
count: +literalCount,
level: +literalLevel,
},
];
if (lastYAttribute > yAttribute) x++;
return [];
})
lastYAttribute = yAttribute;
return { date, count, level, x, yAttribute };
}
);
const yAttributes = Array.from(
new Set(rects.map((c) => c.yAttribute)).keys()
).sort();
const cells = rects.map(({ yAttribute, ...c }) => ({
y: yAttributes.indexOf(yAttribute),
...c,
}));
return cells;
};

View File

@@ -2,9 +2,10 @@
"name": "@snk/github-user-contribution",
"version": "1.0.0",
"dependencies": {
"node-fetch": "2.6.12"
"cheerio": "1.0.0-rc.10",
"node-fetch": "2.6.7"
},
"devDependencies": {
"@types/node-fetch": "2.6.2"
"@types/node-fetch": "2.6.1"
}
}

2
packages/usage-stats/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
.env
cache

View File

@@ -0,0 +1,53 @@
import { Octokit } from "octokit";
import { httpGet } from "./httpGet";
require("dotenv").config();
const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN });
export const getLastRunInfo = async (repo_: string) => {
const [owner, repo] = repo_.split("/");
try {
const {
data: { workflow_runs },
} = await octokit.request(
"GET /repos/{owner}/{repo}/actions/runs{?actor,branch,event,status,per_page,page,created,exclude_pull_requests,check_suite_id,head_sha}",
{ owner, repo }
);
for (const r of workflow_runs) {
const {
run_started_at: date,
head_sha,
path,
conclusion,
} = r as {
run_started_at: string;
head_sha: string;
path: string;
conclusion: "failure" | "success";
};
const workflow_url = `https://raw.githubusercontent.com/${owner}/${repo}/${head_sha}/${path}`;
const workflow_code = await httpGet(workflow_url);
const [_, dependency] =
workflow_code.match(/uses\s*:\s*(Platane\/snk(\/svg-only)?@\w*)/) ?? [];
const cronMatch = workflow_code.match(/cron\s*:([^\n]*)/);
if (dependency)
return {
dependency,
success: conclusion === "success",
date,
cron: cronMatch?.[1].replace(/["|']/g, "").trim(),
workflow_code,
};
}
} catch (err) {
console.error(err);
}
};

View File

@@ -0,0 +1,56 @@
import { load as CheerioLoad } from "cheerio";
import { httpGet } from "./httpGet";
export const getDependentInfo = async (repo: string) => {
const pageText = await httpGet(`https://github.com/${repo}/actions`).catch(
() => null
);
if (!pageText) return;
const $ = CheerioLoad(pageText);
const runs = $("#partial-actions-workflow-runs [data-url]")
.toArray()
.map((el) => {
const success =
$(el).find('[aria-label="completed successfully"]').toArray().length ===
1;
const workflow_file_href = $(el)
.find("a")
.toArray()
.map((el) => $(el).attr("href")!)
.find((href) => href.match(/\/actions\/runs\/\d+\/workflow/))!;
const workflow_file_url = workflow_file_href
? new URL(workflow_file_href, "https://github.com").toString()
: null;
const date = $(el).find("relative-time").attr("datetime");
return { success, workflow_file_url, date };
});
for (const { workflow_file_url, success, date } of runs) {
if (!workflow_file_url) continue;
const $ = CheerioLoad(await httpGet(workflow_file_url));
const workflow_code = $("table[data-hpc]").text();
const [_, dependency] =
workflow_code.match(/uses\s*:\s*(Platane\/snk(\/svg-only)?@\w*)/) ?? [];
const cronMatch = workflow_code.match(/cron\s*:([^\n]*)/);
if (dependency)
return {
dependency,
success,
date,
cron: cronMatch?.[1].replace(/["|']/g, "").trim(),
workflow_code,
};
}
};

View File

@@ -0,0 +1,67 @@
import { load as CheerioLoad } from "cheerio";
import { httpGet } from "./httpGet";
const getPackages = async (repo: string) => {
const pageText = await httpGet(
`https://github.com/${repo}/network/dependents`
);
const $ = CheerioLoad(pageText);
return $("#dependents .select-menu-list a")
.toArray()
.map((el) => {
const name = $(el).text().trim();
const href = $(el).attr("href");
const u = new URL(href!, "http://example.com");
return { name, id: u.searchParams.get("package_id")! };
});
};
const getDependentByPackage = async (repo: string, packageId: string) => {
const repos = [] as string[];
const pages = [];
let url:
| string
| null = `https://github.com/${repo}/network/dependents?package_id=${packageId}`;
while (url) {
const $ = CheerioLoad(await httpGet(url));
console.log(repos.length);
const reposOnPage = $(`#dependents [data-hovercard-type="repository"]`)
.toArray()
.map((el) => $(el).attr("href")!.slice(1));
repos.push(...reposOnPage);
const nextButton = $(`#dependents a`)
.filter((_, el) => $(el).text().trim().toLowerCase() === "next")
.eq(0);
const href = nextButton ? nextButton.attr("href") : null;
pages.push({ url, reposOnPage, next: href });
url = href ? new URL(href, "https://github.com").toString() : null;
}
return { repos, pages };
};
export const getDependents = async (repo: string) => {
const packages = await getPackages(repo);
const ps: (typeof packages[number] & { dependents: string[] })[] = [];
for (const p of packages)
ps.push({
...p,
dependents: (await getDependentByPackage(repo, p.id)).repos,
});
return ps;
};

View File

@@ -0,0 +1,125 @@
import * as fs from "fs";
import fetch from "node-fetch";
import { Octokit } from "octokit";
require("dotenv").config();
// @ts-ignore
import packages from "./out.json";
const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN });
const getLastRunInfo = async (repo_: string) => {
const [owner, repo] = repo_.split("/");
try {
const {
data: { workflow_runs },
} = await octokit.request(
"GET /repos/{owner}/{repo}/actions/runs{?actor,branch,event,status,per_page,page,created,exclude_pull_requests,check_suite_id,head_sha}",
{ owner, repo }
);
for (const r of workflow_runs) {
const { run_started_at, head_sha, path, conclusion } = r as {
run_started_at: string;
head_sha: string;
path: string;
conclusion: "failure" | "success";
};
const workflow_url = `https://raw.githubusercontent.com/${owner}/${repo}/${head_sha}/${path}`;
const workflow_file = await fetch(workflow_url).then((res) => res.text());
const [_, dependency, __, version] =
workflow_file.match(/uses\s*:\s*(Platane\/snk(\/svg-only)?@(\w*))/) ??
[];
const cronMatch = workflow_file.match(/cron\s*:([^\n]*)/);
if (dependency)
return {
dependency,
version,
run_started_at,
conclusion,
cron: cronMatch?.[1].replace(/["|']/g, "").trim(),
workflow_file,
workflow_url,
};
}
} catch (err) {
console.error(err);
}
};
const wait = (delay = 0) => new Promise((r) => setTimeout(r, delay));
const getRepos = () => {
try {
return JSON.parse(fs.readFileSync(__dirname + "/cache/out.json").toString())
.map((p: any) => p.dependents)
.flat() as string[];
} catch (err) {
return [];
}
};
const getReposInfo = () => {
try {
return JSON.parse(
fs.readFileSync(__dirname + "/cache/stats.json").toString()
) as any[];
} catch (err) {
return [];
}
};
const saveRepoInfo = (rr: any[]) => {
fs.writeFileSync(__dirname + "/cache/stats.json", JSON.stringify(rr));
};
(async () => {
const repos = getRepos();
const total = repos.length;
const reposInfo = getReposInfo().slice(0, -20);
for (const { repo } of reposInfo) {
const i = repos.indexOf(repo);
if (i >= 0) repos.splice(i, 1);
}
while (repos.length) {
const {
data: { rate },
} = await octokit.request("GET /rate_limit", {});
console.log(rate);
if (rate.remaining < 100) {
const delay = rate.reset - Math.floor(Date.now() / 1000);
console.log(
`waiting ${delay} second (${(delay / 60).toFixed(
1
)} minutes) for reset `
);
await wait(Math.max(0, delay) * 1000);
}
const rs = repos.splice(0, 20);
await Promise.all(
rs.map(async (repo) => {
reposInfo.push({ repo, ...(await getLastRunInfo(repo)) });
saveRepoInfo(reposInfo);
console.log(
reposInfo.length.toString().padStart(5, " "),
"/",
total,
repo
);
})
);
}
})();

View File

@@ -0,0 +1,84 @@
import fetch from "node-fetch";
import * as path from "path";
import * as fs from "fs";
const CACHE_DIR = path.join(__dirname, "cache", "http");
fs.mkdirSync(CACHE_DIR, { recursive: true });
const createMutex = () => {
let locked = false;
const q: any[] = [];
const update = () => {
if (locked) return;
if (q[0]) {
locked = true;
q.shift()(() => {
locked = false;
update();
});
}
};
const request = () =>
new Promise<() => void>((resolve) => {
q.push(resolve);
update();
});
return request;
};
const mutex = createMutex();
export const httpGet = async (url: string | URL): Promise<string> => {
const cacheKey = url
.toString()
.replace(/https?:\/\//, "")
.replace(/[^\w=&\?\.]/g, "_");
const cacheFilename = path.join(CACHE_DIR, cacheKey);
if (fs.existsSync(cacheFilename))
return new Promise((resolve, reject) =>
fs.readFile(cacheFilename, (err, data) =>
err ? reject(err) : resolve(data.toString())
)
);
const release = await mutex();
try {
const res = await fetch(url);
if (!res.ok) {
if (res.status === 429 || res.statusText === "Too Many Requests") {
const delay = +(res.headers.get("retry-after") ?? 300) * 1000;
console.log("Too Many Requests", delay);
await wait(delay);
console.log("waited long enough");
return httpGet(url);
}
console.error(url, res.status, res.statusText);
throw new Error("res not ok");
}
const text = await res.text();
fs.writeFileSync(cacheFilename, text);
// await wait(Math.random() * 200 + 100);
return text;
} finally {
release();
}
};
const wait = (delay = 0) => new Promise((r) => setTimeout(r, delay));

View File

@@ -0,0 +1,51 @@
import { getDependentInfo } from "./getDependentInfo";
import { getDependents } from "./getDependents";
import ParkMiller from "park-miller";
const toChunk = <T>(arr: T[], n = 1) =>
Array.from({ length: Math.ceil(arr.length / n) }, (_, i) =>
arr.slice(i * n, (i + 1) * n)
);
const random = new ParkMiller(10);
const shuffle = <T>(array: T[]) => {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(random.float() * (i + 1));
const temp = array[i];
array[i] = array[j];
array[j] = temp;
}
};
(async () => {
const packages = await getDependents("Platane/snk");
const repos = packages.map((p) => p.dependents).flat();
shuffle(repos);
repos.splice(0, repos.length - 5000);
console.log(repos);
const infos: any[] = [];
// for (const chunk of toChunk(repos, 10))
// await Promise.all(
// chunk.map(async (repo) => {
// console.log(
// infos.length.toString().padStart(5, " "),
// "/",
// repos.length
// );
// infos.push({ repo, ...(await getDependentInfo(repo)) });
// })
// );
for (const repo of repos) {
console.log(infos.length.toString().padStart(5, " "), "/", repos.length);
infos.push({ repo, ...(await getDependentInfo(repo)) });
}
})();

View File

@@ -0,0 +1,16 @@
{
"name": "@snk/usage-stats",
"version": "1.0.0",
"dependencies": {},
"devDependencies": {
"sucrase": "3.29.0",
"cheerio": "1.0.0-rc.12",
"node-fetch": "2.6.7",
"octokit": "2.0.11",
"dotenv": "16.0.3",
"park-miller": "1.1.0"
},
"scripts": {
"start": "sucrase-node index.ts"
}
}

View File

@@ -0,0 +1,62 @@
import * as fs from "fs";
type R = { repo: string } & Partial<{
dependency: string;
version: string;
run_started_at: string;
conclusion: "failure" | "success";
cron?: string;
workflow_file: string;
}>;
(async () => {
const repos: R[] = JSON.parse(
fs.readFileSync(__dirname + "/cache/stats.json").toString()
);
const total = repos.length;
const recent_repos = repos.filter(
(r) =>
new Date(r.run_started_at!).getTime() >
Date.now() - 7 * 24 * 60 * 60 * 1000
);
const recent_successful_repos = recent_repos.filter(
(r) => r?.conclusion === "success"
);
const versions = new Map();
for (const { dependency } of recent_successful_repos) {
versions.set(dependency, (versions.get(dependency) ?? 0) + 1);
}
console.log(`total ${total}`);
console.log(
`recent_repos ${recent_repos.length} (${(
(recent_repos.length / total) *
100
).toFixed(2)}%)`
);
console.log(
`recent_successful_repos ${recent_successful_repos.length} (${(
(recent_successful_repos.length / total) *
100
).toFixed(2)}%)`
);
console.log("versions");
for (const [name, count] of Array.from(versions.entries()).sort(
([, a], [, b]) => b - a
))
console.log(
`${(name as string).split("Platane/")[1].padEnd(20, " ")} ${(
(count / recent_successful_repos.length) *
100
)
.toFixed(2)
.padStart(6, " ")}% ${count} `
);
const gif_repos = repos.filter((r) => r.workflow_file?.includes(".gif"));
console.log("repo with git ouput", gif_repos.length);
})();

View File

@@ -1425,20 +1425,6 @@ const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original)
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
};
/**
* isSameProtocol reports whether the two provided URLs use the same protocol.
*
* Both domains must already be in canonical form.
* @param {string|URL} original
* @param {string|URL} destination
*/
const isSameProtocol = function isSameProtocol(destination, original) {
const orig = new URL$1(original).protocol;
const dest = new URL$1(destination).protocol;
return orig === dest;
};
/**
* Fetch function
*
@@ -1470,7 +1456,7 @@ function fetch(url, opts) {
let error = new AbortError('The user aborted a request.');
reject(error);
if (request.body && request.body instanceof Stream.Readable) {
destroyStream(request.body, error);
request.body.destroy(error);
}
if (!response || !response.body) return;
response.body.emit('error', error);
@@ -1511,43 +1497,9 @@ function fetch(url, opts) {
req.on('error', function (err) {
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
if (response && response.body) {
destroyStream(response.body, err);
}
finalize();
});
fixResponseChunkedTransferBadEnding(req, function (err) {
if (signal && signal.aborted) {
return;
}
if (response && response.body) {
destroyStream(response.body, err);
}
});
/* c8 ignore next 18 */
if (parseInt(process.version.substring(1)) < 14) {
// Before Node.js 14, pipeline() does not fully support async iterators and does not always
// properly handle when the socket close/end events are out of order.
req.on('socket', function (s) {
s.addListener('close', function (hadError) {
// if a data listener is still present we didn't end cleanly
const hasDataListener = s.listenerCount('data') > 0;
// if end happened before close but the socket didn't emit an error, do it now
if (response && hasDataListener && !hadError && !(signal && signal.aborted)) {
const err = new Error('Premature close');
err.code = 'ERR_STREAM_PREMATURE_CLOSE';
response.body.emit('error', err);
}
});
});
}
req.on('response', function (res) {
clearTimeout(reqTimeout);
@@ -1619,7 +1571,7 @@ function fetch(url, opts) {
size: request.size
};
if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {
if (!isDomainOrSubdomain(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOpts.headers.delete(name);
}
@@ -1712,13 +1664,6 @@ function fetch(url, opts) {
response = new Response(body, response_options);
resolve(response);
});
raw.on('end', function () {
// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.
if (!response) {
response = new Response(body, response_options);
resolve(response);
}
});
return;
}
@@ -1738,44 +1683,6 @@ function fetch(url, opts) {
writeToStream(req, request);
});
}
function fixResponseChunkedTransferBadEnding(request, errorCallback) {
let socket;
request.on('socket', function (s) {
socket = s;
});
request.on('response', function (response) {
const headers = response.headers;
if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {
response.once('close', function (hadError) {
// tests for socket presence, as in some situations the
// the 'socket' event is not triggered for the request
// (happens in deno), avoids `TypeError`
// if a data listener is still present we didn't end cleanly
const hasDataListener = socket && socket.listenerCount('data') > 0;
if (hasDataListener && !hadError) {
const err = new Error('Premature close');
err.code = 'ERR_STREAM_PREMATURE_CLOSE';
errorCallback(err);
}
});
}
});
}
function destroyStream(stream, err) {
if (stream.destroy) {
stream.destroy(err);
} else {
// node < 8
stream.emit('error', err);
stream.end();
}
}
/**
* Redirect code matching
*

View File

@@ -78,27 +78,27 @@ const getGithubUserContribution = async (userName, options = {}) => {
return parseUserPage(resText);
};
const parseUserPage = (content) => {
// take roughly the table block
// take roughly the svg block
const block = content
.split(`aria-describedby="contribution-graph-description"`)[1]
.split("<tbody>")[1]
.split("</tbody>")[0];
const cells = block.split("</tr>").flatMap((inside, y) => inside.split("</td>").flatMap((m) => {
const date = m.match(/data-date="([^"]+)"/)?.[1];
const literalLevel = m.match(/data-level="([^"]+)"/)?.[1];
const literalX = m.match(/data-ix="([^"]+)"/)?.[1];
const literalCount = m.match(/(No|\d+) contributions? on/)?.[1];
if (date && literalLevel && literalX && literalCount)
return [
{
x: +literalX,
y,
date,
count: +literalCount,
level: +literalLevel,
},
];
return [];
.split(`class="js-calendar-graph-svg"`)[1]
.split("</svg>")[0];
let x = 0;
let lastYAttribute = 0;
const rects = Array.from(block.matchAll(/<rect[^>]*>[^<]*<\/rect>/g)).map(([m]) => {
const date = m.match(/data-date="([^"]+)"/)[1];
const level = +m.match(/data-level="([^"]+)"/)[1];
const yAttribute = +m.match(/y="([^"]+)"/)[1];
const literalCount = m.match(/(No|\d+) contributions? on/)[1];
const count = literalCount === "No" ? 0 : +literalCount;
if (lastYAttribute > yAttribute)
x++;
lastYAttribute = yAttribute;
return { date, count, level, x, yAttribute };
});
const yAttributes = Array.from(new Set(rects.map((c) => c.yAttribute)).keys()).sort();
const cells = rects.map(({ yAttribute, ...c }) => ({
y: yAttributes.indexOf(yAttribute),
...c,
}));
return cells;
};
@@ -689,14 +689,14 @@ const generateContributionSnake = async (userName, outputs) => {
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "Dy": () => (/* binding */ setColorEmpty),
/* harmony export */ "HJ": () => (/* binding */ isInsideLarge),
/* harmony export */ "Lq": () => (/* binding */ getColor),
/* harmony export */ "V0": () => (/* binding */ isInside),
/* harmony export */ "HJ": () => (/* binding */ isInsideLarge),
/* harmony export */ "VJ": () => (/* binding */ copyGrid),
/* harmony export */ "u1": () => (/* binding */ createEmptyGrid),
/* harmony export */ "Lq": () => (/* binding */ getColor),
/* harmony export */ "xb": () => (/* binding */ isEmpty),
/* harmony export */ "vk": () => (/* binding */ setColor),
/* harmony export */ "xb": () => (/* binding */ isEmpty)
/* harmony export */ "Dy": () => (/* binding */ setColorEmpty),
/* harmony export */ "u1": () => (/* binding */ createEmptyGrid)
/* harmony export */ });
/* unused harmony exports isGridEmpty, gridEquals */
const isInside = (grid, x, y) => x >= 0 && y >= 0 && x < grid.width && y < grid.height;
@@ -733,13 +733,13 @@ const createEmptyGrid = (width, height) => ({
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "IP": () => (/* binding */ getHeadY),
/* harmony export */ "If": () => (/* binding */ getHeadX),
/* harmony export */ "IP": () => (/* binding */ getHeadY),
/* harmony export */ "JJ": () => (/* binding */ getSnakeLength),
/* harmony export */ "Ks": () => (/* binding */ snakeToCells),
/* harmony export */ "kE": () => (/* binding */ snakeEquals),
/* harmony export */ "kv": () => (/* binding */ nextSnake),
/* harmony export */ "nJ": () => (/* binding */ snakeWillSelfCollide),
/* harmony export */ "Ks": () => (/* binding */ snakeToCells),
/* harmony export */ "xG": () => (/* binding */ createSnakeFromCells)
/* harmony export */ });
/* unused harmony export copySnake */

View File

@@ -2991,7 +2991,7 @@ var external_path_ = __nccwpck_require__(1017);
// EXTERNAL MODULE: ../../node_modules/@actions/core/lib/core.js
var core = __nccwpck_require__(7117);
;// CONCATENATED MODULE: ./palettes.ts
const basePalettes = {
const palettes = {
"github-light": {
colorDotBorder: "#1b1f230a",
colorDots: ["#ebedf0", "#9be9a8", "#40c463", "#30a14e", "#216e39"],
@@ -3006,8 +3006,10 @@ const basePalettes = {
},
};
// aliases
const palettes = { ...basePalettes };
palettes["github"] = palettes["github-light"];
palettes["github"] = {
...palettes["github-light"],
dark: { ...palettes["github-dark"] },
};
palettes["default"] = palettes["github"];
;// CONCATENATED MODULE: ./outputsOptions.ts
@@ -3037,7 +3039,6 @@ const parseEntry = (entry) => {
sizeCell: 16,
sizeDot: 12,
...palettes["default"],
dark: palettes["default"].dark && { ...palettes["default"].dark },
};
const animationOptions = { step: 1, frameDuration: 100 };
{
@@ -3047,13 +3048,6 @@ const parseEntry = (entry) => {
drawOptions.dark = palette.dark && { ...palette.dark };
}
}
{
const dark_palette = palettes[sp.get("dark_palette")];
if (dark_palette) {
const clone = { ...dark_palette, dark: undefined };
drawOptions.dark = clone;
}
}
if (sp.has("color_snake"))
drawOptions.colorSnake = sp.get("color_snake");
if (sp.has("color_dots")) {
@@ -3067,8 +3061,6 @@ const parseEntry = (entry) => {
if (sp.has("dark_color_dots")) {
const colors = sp.get("dark_color_dots").split(/[,;]/);
drawOptions.dark = {
colorDotBorder: drawOptions.colorDotBorder,
colorSnake: drawOptions.colorSnake,
...drawOptions.dark,
colorDots: colors,
colorEmpty: colors[0],

2146
yarn.lock

File diff suppressed because it is too large Load Diff