Skip to content

Commit

Permalink
Merge branch 'SukkaW:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
FYLSen authored Jan 20, 2025
2 parents e3037e2 + a0ccd43 commit eea2091
Show file tree
Hide file tree
Showing 40 changed files with 680 additions and 722 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/check-source-domain.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:
jobs:
check:
name: Check
runs-on: ubuntu-latest
runs-on: ubuntu-24.04-arm

steps:
# - name: Tune GitHub-hosted runner network
Expand Down
49 changes: 40 additions & 9 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@ concurrency:
jobs:
build:
name: Build
runs-on: ubuntu-latest
runs-on: ubuntu-24.04-arm

steps:
- run: df -h
# - name: Tune GitHub-hosted runner network
# # https://github.com/actions/runner-images/issues/1187
# uses: smorimoto/tune-github-hosted-runner-network@v1
Expand All @@ -29,6 +30,33 @@ jobs:
with:
node-version-file: ".node-version"
cache: "pnpm"
- name: Create RAM Disk for building
id: ramdisk
run: |
BUILD_DIR=$(mktemp -d -p /dev/shm/ -t sukka-surge-public.XXXXXXXXXX)
echo "Build dir created at $BUILD_DIR"
echo "build_dir=$BUILD_DIR" >> $GITHUB_OUTPUT
- name: Download Previous Build
uses: actions/checkout@v4
with:
repository: SukkaLab/ruleset.skk.moe
persist-credentials: false
path: previous-build-${{ github.run_id }}-${{ github.run_number }}
- run: mv previous-build-${{ github.run_id }}-${{ github.run_number }}/{.,}* ${{ steps.ramdisk.outputs.build_dir }}/
- name: build folder check
# If the public directory doesn't exist, the build should fail.
# If the public directory is empty, the build should fail.
run: |
if [ ! -d ${{ steps.ramdisk.outputs.build_dir }}/.git ]; then
echo ".git not found"
exit 1
fi
if [ ! -d ${{ steps.ramdisk.outputs.build_dir }}/List ]; then
echo "List not found"
exit 1
fi
echo "public directory is ready: ${{ steps.ramdisk.outputs.build_dir }}"
- run: rm -rf "${{ steps.ramdisk.outputs.build_dir }}/.git"
- name: Get current date
id: date
run: |
Expand Down Expand Up @@ -56,26 +84,29 @@ jobs:
${{ runner.os }}-v3-
- run: pnpm install
- run: pnpm run build
env:
PUBLIC_DIR: ${{ steps.ramdisk.outputs.build_dir }}
- name: Pre-deploy check
# If the public directory doesn't exist, the build should fail.
# If the public directory is empty, the build should fail.
run: |
if [ ! -d public ]; then
if [ ! -d ${{ steps.ramdisk.outputs.build_dir }} ]; then
echo "public directory not found"
exit 1
fi
if [ ! "$(ls -A public)" ]; then
if [ ! "$(ls -A ${{ steps.ramdisk.outputs.build_dir }})" ]; then
echo "public directory is empty"
exit 1
fi
if [ ! -f .BUILD_FINISHED ]; then
echo ".BUILD_FINISHED not found"
exit 1
fi
echo "public directory is ready: ${{ steps.ramdisk.outputs.build_dir }}"
- uses: actions/upload-artifact@v4
with:
name: build-artifact-${{ github. ref_name }}
path: public
path: ${{ steps.ramdisk.outputs.build_dir }}
if-no-files-found: error
retention-days: 1
compression-level: 4
Expand All @@ -92,7 +123,7 @@ jobs:
- build
name: Deploy to Cloudflare Pages
if: github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
runs-on: ubuntu-24.04-arm
steps:
- name: Get NPM cache directory path
id: npm_cache_path
Expand All @@ -104,7 +135,7 @@ jobs:
path: |
${{ steps.npm_cache_path.outputs.dir }}
node_modules
key: ${{ runner.os }}-deploy-to-cloudflare-npm
key: ${{ runner.os }}-${{ runner.arch }}-deploy-to-cloudflare-npm
- uses: actions/download-artifact@v4
with:
name: build-artifact-${{ github.ref_name }}
Expand All @@ -121,7 +152,7 @@ jobs:
- build
name: Deploy to GitHub and GitLab
if: github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
runs-on: ubuntu-24.04-arm
steps:
- uses: actions/download-artifact@v4
with:
Expand All @@ -130,7 +161,7 @@ jobs:
- name: Upload Dist to GitLab
continue-on-error: true
run: |
git clone --filter=tree:0 --no-tags --prune https://${GITLAB_TOKEN_NAME}:${GITLAB_TOKEN}@gitlab.com/SukkaW/ruleset.skk.moe.git ./deploy-git
git clone --filter=tree:0 --no-tags https://${GITLAB_TOKEN_NAME}:${GITLAB_TOKEN}@gitlab.com/SukkaW/ruleset.skk.moe.git ./deploy-git
cd ./deploy-git
git config --global push.default matching
git config --global user.email "${GITLAB_EMAIL}"
Expand All @@ -150,7 +181,7 @@ jobs:
- name: Upload Dist to GitHub
continue-on-error: true
run: |
git clone --filter=tree:0 --no-tags --prune https://${GH_USER}:${GH_TOKEN}@github.com/SukkaLab/ruleset.skk.moe.git ./deploy-git
git clone --filter=tree:0 --no-tags https://${GH_USER}:${GH_TOKEN}@github.com/SukkaLab/ruleset.skk.moe.git ./deploy-git
cd ./deploy-git
git config --global push.default matching
git config --global user.email "${GH_EMAIL}"
Expand Down
76 changes: 58 additions & 18 deletions Build/build-domestic-direct-lan-ruleset-dns-mapping-module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { SHARED_DESCRIPTION } from './constants/description';
import { createMemoizedPromise } from './lib/memo-promise';
import * as yaml from 'yaml';
import { appendArrayInPlace } from './lib/append-array-in-place';
import { OUTPUT_INTERNAL_DIR, OUTPUT_MODULES_DIR, SOURCE_DIR } from './constants/dir';
import { OUTPUT_INTERNAL_DIR, OUTPUT_MODULES_DIR, OUTPUT_MODULES_RULES_DIR, SOURCE_DIR } from './constants/dir';
import { RulesetOutput } from './lib/create-file';

export function createGetDnsMappingRule(allowWildcard: boolean) {
Expand Down Expand Up @@ -78,7 +78,7 @@ export const getDomesticAndDirectDomainsRulesetPromise = createMemoizedPromise(a
export const buildDomesticRuleset = task(require.main === module, __filename)(async (span) => {
const [domestics, directs, lans] = await getDomesticAndDirectDomainsRulesetPromise();

const dataset: DNSMapping[] = ([DOH_BOOTSTRAP, DOMESTICS, DIRECTS] as const).flatMap(Object.values);
const dataset: Array<[name: string, DNSMapping]> = ([DOH_BOOTSTRAP, DOMESTICS, DIRECTS, LAN] as const).flatMap(Object.entries);

return Promise.all([
new RulesetOutput(span, 'domestic', 'non_ip')
Expand Down Expand Up @@ -108,6 +108,41 @@ export const buildDomesticRuleset = task(require.main === module, __filename)(as
])
.addFromRuleset(lans)
.write(),

...dataset.map(([name, { ruleset, domains }]) => {
if (!ruleset) {
return;
}

const output = new RulesetOutput(span, name.toLowerCase(), 'sukka_local_dns_mapping').withTitle(`Sukka's Ruleset - Local DNS Mapping (${name})`).withDescription([
...SHARED_DESCRIPTION,
'',
'This is an internal rule that is only referenced by sukka_local_dns_mapping.sgmodule',
'Do not use this file in your Rule section, all rules are included in non_ip/domestic.conf already.'
]);

domains.forEach((domain) => {
switch (domain[0]) {
case '$':
output.addDomain(domain.slice(1));
break;
case '+':
output.addDomainSuffix(domain.slice(1));
break;
default:
output.addDomainSuffix(domain);
break;
}
});

return output.write({
surge: true,
clash: false,
singbox: false,
surgeDir: OUTPUT_MODULES_RULES_DIR
});
}),

compareAndWriteFile(
span,
[
Expand All @@ -119,26 +154,31 @@ export const buildDomesticRuleset = task(require.main === module, __filename)(as
// I use an object to deduplicate the domains
// Otherwise I could just construct an array directly
dataset.reduce<Record<string, string>>((acc, cur) => {
const { domains, dns, hosts } = cur;
const ruleset_name = cur[0].toLowerCase();
const { domains, dns, hosts, ruleset } = cur[1];

Object.entries(hosts).forEach(([dns, ips]) => {
acc[dns] ||= ips.join(', ');
});

domains.forEach((domain) => {
switch (domain[0]) {
case '$':
acc[domain.slice(1)] ||= `server:${dns}`;
break;
case '+':
acc[`*.${domain.slice(1)}`] ||= `server:${dns}`;
break;
default:
acc[domain] ||= `server:${dns}`;
acc[`*.${domain}`] ||= `server:${dns}`;
break;
}
});
if (ruleset) {
acc[`RULE-SET:https://ruleset.skk.moe/Modules/Rules/sukka_local_dns_mapping/${ruleset_name}.conf`] ||= `server:${dns}`;
} else {
domains.forEach((domain) => {
switch (domain[0]) {
case '$':
acc[domain.slice(1)] ||= `server:${dns}`;
break;
case '+':
acc[`*.${domain.slice(1)}`] ||= `server:${dns}`;
break;
default:
acc[domain] ||= `server:${dns}`;
acc[`*.${domain}`] ||= `server:${dns}`;
break;
}
});
}

return acc;
}, {})
Expand All @@ -153,7 +193,7 @@ export const buildDomesticRuleset = task(require.main === module, __filename)(as
dns: { 'nameserver-policy': Record<string, string | string[]> },
hosts: Record<string, string>
}>((acc, cur) => {
const { domains, dns, ...rest } = cur;
const { domains, dns, ...rest } = cur[1];
domains.forEach((domain) => {
let domainWildcard = domain;
if (domain[0] === '$') {
Expand Down
5 changes: 3 additions & 2 deletions Build/build-public.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { task } from './trace';
import { treeDir, TreeFileType } from './lib/tree-dir';
import type { TreeType, TreeTypeArray } from './lib/tree-dir';

import { OUTPUT_MOCK_DIR, OUTPUT_MODULES_DIR, PUBLIC_DIR, ROOT_DIR } from './constants/dir';
import { OUTPUT_MOCK_DIR, OUTPUT_MODULES_DIR, OUTPUT_MODULES_RULES_DIR, PUBLIC_DIR, ROOT_DIR } from './constants/dir';
import { fastStringCompare, mkdirp, writeFile } from './lib/misc';
import picocolors from 'picocolors';
import { tagged as html } from 'foxts/tagged';
Expand Down Expand Up @@ -34,7 +34,8 @@ async function copyDirContents(srcDir: string, destDir: string) {
export const buildPublic = task(require.main === module, __filename)(async (span) => {
await span.traceChildAsync('copy rest of the files', async () => {
await Promise.all([
mkdirp(OUTPUT_MODULES_DIR),
// mkdirp(OUTPUT_MODULES_DIR),
mkdirp(OUTPUT_MODULES_RULES_DIR),
mkdirp(OUTPUT_MOCK_DIR)
]);

Expand Down
75 changes: 33 additions & 42 deletions Build/build-reject-domainset.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
import path from 'node:path';
import process from 'node:process';

import { processHosts } from './lib/parse-filter/hosts';
import { processDomainLists } from './lib/parse-filter/domainlists';
import { processFilterRules } from './lib/parse-filter/filters';
import { processHostsWithPreload } from './lib/parse-filter/hosts';
import { processDomainListsWithPreload } from './lib/parse-filter/domainlists';
import { processFilterRulesWithPreload } from './lib/parse-filter/filters';

import { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST, DOMAIN_LISTS, HOSTS_EXTRA, DOMAIN_LISTS_EXTRA, ADGUARD_FILTERS_EXTRA, PHISHING_DOMAIN_LISTS_EXTRA, ADGUARD_FILTERS_WHITELIST } from './constants/reject-data-source';
import { compareAndWriteFile } from './lib/create-file';
Expand All @@ -29,6 +29,14 @@ const readLocalRejectDropRulesetPromise = readFileIntoProcessedArray(path.join(S
const readLocalRejectNoDropRulesetPromise = readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/reject-no-drop.conf'));
const readLocalMyRejectRulesetPromise = readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/my_reject.conf'));

const hostsDownloads = HOSTS.map(entry => processHostsWithPreload(...entry));
const hostsExtraDownloads = HOSTS_EXTRA.map(entry => processHostsWithPreload(...entry));
const domainListsDownloads = DOMAIN_LISTS.map(entry => processDomainListsWithPreload(...entry));
const domainListsExtraDownloads = DOMAIN_LISTS_EXTRA.map(entry => processDomainListsWithPreload(...entry));
const adguardFiltersDownloads = ADGUARD_FILTERS.map(entry => processFilterRulesWithPreload(...entry));
const adguardFiltersExtraDownloads = ADGUARD_FILTERS_EXTRA.map(entry => processFilterRulesWithPreload(...entry));
const adguardFiltersWhitelistsDownloads = ADGUARD_FILTERS_WHITELIST.map(entry => processFilterRulesWithPreload(...entry));

export const buildRejectDomainSet = task(require.main === module, __filename)(async (span) => {
const rejectBaseDescription = [
...SHARED_DESCRIPTION,
Expand Down Expand Up @@ -70,30 +78,30 @@ export const buildRejectDomainSet = task(require.main === module, __filename)(as
.traceChild('download and process hosts / adblock filter rules')
.traceAsyncFn((childSpan) => Promise.all([
// Parse from remote hosts & domain lists
HOSTS.map(entry => processHosts(childSpan, ...entry).then(appendArrayToRejectOutput)),
HOSTS_EXTRA.map(entry => processHosts(childSpan, ...entry).then(appendArrayToRejectExtraOutput)),

DOMAIN_LISTS.map(entry => processDomainLists(childSpan, ...entry).then(appendArrayToRejectOutput)),
DOMAIN_LISTS_EXTRA.map(entry => processDomainLists(childSpan, ...entry).then(appendArrayToRejectExtraOutput)),

ADGUARD_FILTERS.map(
entry => processFilterRules(childSpan, ...entry)
.then(({ white, black }) => {
addArrayElementsToSet(filterRuleWhitelistDomainSets, white);
appendArrayToRejectOutput(black);
})
hostsDownloads.map(task => task(childSpan).then(appendArrayToRejectOutput)),
hostsExtraDownloads.map(task => task(childSpan).then(appendArrayToRejectExtraOutput)),

domainListsDownloads.map(task => task(childSpan).then(appendArrayToRejectOutput)),
domainListsExtraDownloads.map(task => task(childSpan).then(appendArrayToRejectExtraOutput)),

adguardFiltersDownloads.map(
task => task(childSpan).then(({ white, black }) => {
addArrayElementsToSet(filterRuleWhitelistDomainSets, white);
appendArrayToRejectOutput(black);
})
),
adguardFiltersExtraDownloads.map(
task => task(childSpan).then(({ white, black }) => {
addArrayElementsToSet(filterRuleWhitelistDomainSets, white);
appendArrayToRejectExtraOutput(black);
})
),
ADGUARD_FILTERS_EXTRA.map(
entry => processFilterRules(childSpan, ...entry)
.then(({ white, black }) => {
addArrayElementsToSet(filterRuleWhitelistDomainSets, white);
appendArrayToRejectExtraOutput(black);
})
adguardFiltersWhitelistsDownloads.map(
task => task(childSpan).then(({ white, black }) => {
addArrayElementsToSet(filterRuleWhitelistDomainSets, white);
addArrayElementsToSet(filterRuleWhitelistDomainSets, black);
})
),
ADGUARD_FILTERS_WHITELIST.map(entry => processFilterRules(childSpan, ...entry).then(({ white, black }) => {
addArrayElementsToSet(filterRuleWhitelistDomainSets, white);
addArrayElementsToSet(filterRuleWhitelistDomainSets, black);
})),
getPhishingDomains(childSpan).then(appendArrayToRejectExtraOutput),
readLocalRejectDomainsetPromise.then(appendArrayToRejectOutput),
readLocalRejectDomainsetPromise.then(appendArrayToRejectExtraOutput),
Expand Down Expand Up @@ -129,26 +137,9 @@ export const buildRejectDomainSet = task(require.main === module, __filename)(as
}
});

// Create reject stats
const rejectDomainsStats: string[] = span
.traceChild('create reject stats')
.traceSyncFn(() => {
const results = [];
results.push('=== base ===');
appendArrayInPlace(results, rejectOutput.getStatMap());
results.push('=== extra ===');
appendArrayInPlace(results, rejectExtraOutput.getStatMap());
return results;
});

return Promise.all([
rejectOutput.write(),
rejectExtraOutput.write(),
compareAndWriteFile(
span,
rejectDomainsStats,
path.join(OUTPUT_INTERNAL_DIR, 'reject-stats.txt')
),
compareAndWriteFile(
span,
appendArrayInPlace(
Expand Down
4 changes: 2 additions & 2 deletions Build/build-reject-ip-list.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { processLine } from './lib/process-line';
import { RulesetOutput } from './lib/create-file';
import { SOURCE_DIR } from './constants/dir';
import { $$fetch } from './lib/fetch-retry';
import { fetchAssetsWithout304 } from './lib/fetch-assets';
import { fetchAssets } from './lib/fetch-assets';

const BOGUS_NXDOMAIN_URL = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf';
const getBogusNxDomainIPsPromise: Promise<[ipv4: string[], ipv6: string[]]> = $$fetch(BOGUS_NXDOMAIN_URL).then(async (resp) => {
Expand Down Expand Up @@ -37,7 +37,7 @@ const BOTNET_FILTER_MIRROR_URL = [
// https://curbengh.github.io/malware-filter/botnet-filter-dnscrypt-blocked-ips.txt
];

const getBotNetFilterIPsPromise: Promise<[ipv4: string[], ipv6: string[]]> = fetchAssetsWithout304(BOTNET_FILTER_URL, BOTNET_FILTER_MIRROR_URL).then(text => text.split('\n').reduce<[ipv4: string[], ipv6: string[]]>((acc, cur) => {
const getBotNetFilterIPsPromise: Promise<[ipv4: string[], ipv6: string[]]> = fetchAssets(BOTNET_FILTER_URL, BOTNET_FILTER_MIRROR_URL).then(text => text.split('\n').reduce<[ipv4: string[], ipv6: string[]]>((acc, cur) => {
const ip = processLine(cur);
if (ip) {
if (isProbablyIpv4(ip)) {
Expand Down
Loading

0 comments on commit eea2091

Please sign in to comment.