mirror of
https://github.com/TecharoHQ/anubis.git
synced 2026-04-11 11:08:48 +00:00
Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
51bd058f2d | ||
|
|
1614504922 |
8
.github/workflows/docker-pr.yml
vendored
8
.github/workflows/docker-pr.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
uses: Homebrew/actions/setup-homebrew@master
|
uses: Homebrew/actions/setup-homebrew@master
|
||||||
|
|
||||||
- name: Setup Homebrew cellar cache
|
- name: Setup Homebrew cellar cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
/home/linuxbrew/.linuxbrew/Cellar
|
/home/linuxbrew/.linuxbrew/Cellar
|
||||||
@@ -45,11 +45,9 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew bundle
|
brew bundle
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/techarohq/anubis
|
images: ghcr.io/techarohq/anubis
|
||||||
|
|
||||||
|
|||||||
14
.github/workflows/docker.yml
vendored
14
.github/workflows/docker.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -31,7 +31,7 @@ jobs:
|
|||||||
uses: Homebrew/actions/setup-homebrew@master
|
uses: Homebrew/actions/setup-homebrew@master
|
||||||
|
|
||||||
- name: Setup Homebrew cellar cache
|
- name: Setup Homebrew cellar cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
/home/linuxbrew/.linuxbrew/Cellar
|
/home/linuxbrew/.linuxbrew/Cellar
|
||||||
@@ -50,11 +50,9 @@ jobs:
|
|||||||
- name: Install Brew dependencies
|
- name: Install Brew dependencies
|
||||||
run: |
|
run: |
|
||||||
brew bundle
|
brew bundle
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
|
|
||||||
- name: Log into registry
|
- name: Log into registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: techarohq
|
username: techarohq
|
||||||
@@ -62,7 +60,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/techarohq/anubis
|
images: ghcr.io/techarohq/anubis
|
||||||
|
|
||||||
@@ -76,8 +74,8 @@ jobs:
|
|||||||
SLOG_LEVEL: debug
|
SLOG_LEVEL: debug
|
||||||
|
|
||||||
- name: Generate artifact attestation
|
- name: Generate artifact attestation
|
||||||
uses: actions/attest-build-provenance@v2
|
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||||
with:
|
with:
|
||||||
subject-name: ghcr.io/techarohq/anubis
|
subject-name: ghcr.io/techarohq/anubis
|
||||||
subject-digest: ${{ steps.build.outputs.digest }}
|
subject-digest: ${{ steps.build.outputs.digest }}
|
||||||
push-to-registry: true
|
push-to-registry: true
|
||||||
|
|||||||
14
.github/workflows/docs-deploy.yml
vendored
14
.github/workflows/docs-deploy.yml
vendored
@@ -16,15 +16,15 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||||
|
|
||||||
- name: Log into registry
|
- name: Log into registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: techarohq
|
username: techarohq
|
||||||
@@ -32,13 +32,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/techarohq/anubis/docs
|
images: ghcr.io/techarohq/anubis/docs
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: build
|
id: build
|
||||||
uses: docker/build-push-action@v6
|
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||||
with:
|
with:
|
||||||
context: ./docs
|
context: ./docs
|
||||||
cache-to: type=gha
|
cache-to: type=gha
|
||||||
@@ -49,14 +49,14 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
|
|
||||||
- name: Apply k8s manifests to aeacus
|
- name: Apply k8s manifests to aeacus
|
||||||
uses: actions-hub/kubectl@master
|
uses: actions-hub/kubectl@9270913c29699788b51bc04becd0ebdf048ffb49 # v1.32.3
|
||||||
env:
|
env:
|
||||||
KUBE_CONFIG: ${{ secrets.AEACUS_KUBECONFIG }}
|
KUBE_CONFIG: ${{ secrets.AEACUS_KUBECONFIG }}
|
||||||
with:
|
with:
|
||||||
args: apply -k docs/manifest
|
args: apply -k docs/manifest
|
||||||
|
|
||||||
- name: Apply k8s manifests to aeacus
|
- name: Apply k8s manifests to aeacus
|
||||||
uses: actions-hub/kubectl@master
|
uses: actions-hub/kubectl@9270913c29699788b51bc04becd0ebdf048ffb49 # v1.32.3
|
||||||
env:
|
env:
|
||||||
KUBE_CONFIG: ${{ secrets.AEACUS_KUBECONFIG }}
|
KUBE_CONFIG: ${{ secrets.AEACUS_KUBECONFIG }}
|
||||||
with:
|
with:
|
||||||
|
|||||||
12
.github/workflows/go.yml
vendored
12
.github/workflows/go.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
#runs-on: alrest-techarohq
|
#runs-on: alrest-techarohq
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
uses: Homebrew/actions/setup-homebrew@master
|
uses: Homebrew/actions/setup-homebrew@master
|
||||||
|
|
||||||
- name: Setup Homebrew cellar cache
|
- name: Setup Homebrew cellar cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
/home/linuxbrew/.linuxbrew/Cellar
|
/home/linuxbrew/.linuxbrew/Cellar
|
||||||
@@ -48,10 +48,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew bundle
|
brew bundle
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
|
|
||||||
- name: Setup Golang caches
|
- name: Setup Golang caches
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.cache/go-build
|
~/.cache/go-build
|
||||||
@@ -61,7 +59,7 @@ jobs:
|
|||||||
${{ runner.os }}-golang-
|
${{ runner.os }}-golang-
|
||||||
|
|
||||||
- name: Cache playwright binaries
|
- name: Cache playwright binaries
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||||
id: playwright-cache
|
id: playwright-cache
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
@@ -84,6 +82,6 @@ jobs:
|
|||||||
- name: Test
|
- name: Test
|
||||||
run: npm run test
|
run: npm run test
|
||||||
|
|
||||||
- uses: dominikh/staticcheck-action@v1
|
- uses: dominikh/staticcheck-action@fe1dd0c3658873b46f8c9bb3291096a617310ca6 # v1.3.1
|
||||||
with:
|
with:
|
||||||
version: "latest"
|
version: "latest"
|
||||||
|
|||||||
6
.github/workflows/zizmor.yml
vendored
6
.github/workflows/zizmor.yml
vendored
@@ -16,12 +16,12 @@ jobs:
|
|||||||
security-events: write
|
security-events: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
- name: Install the latest version of uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1
|
||||||
|
|
||||||
- name: Run zizmor 🌈
|
- name: Run zizmor 🌈
|
||||||
run: uvx zizmor --format sarif . > results.sarif
|
run: uvx zizmor --format sarif . > results.sarif
|
||||||
@@ -29,7 +29,7 @@ jobs:
|
|||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Upload SARIF file
|
- name: Upload SARIF file
|
||||||
uses: github/codeql-action/upload-sarif@v3
|
uses: github/codeql-action/upload-sarif@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13
|
||||||
with:
|
with:
|
||||||
sarif_file: results.sarif
|
sarif_file: results.sarif
|
||||||
category: zizmor
|
category: zizmor
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -20,7 +20,3 @@ node_modules
|
|||||||
|
|
||||||
# how does this get here
|
# how does this get here
|
||||||
doc/VERSION
|
doc/VERSION
|
||||||
|
|
||||||
*.wasm
|
|
||||||
|
|
||||||
target
|
|
||||||
482
Cargo.lock
generated
482
Cargo.lock
generated
@@ -1,482 +0,0 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
version = 4
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "anubis"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"wee_alloc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "argon2"
|
|
||||||
version = "0.5.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072"
|
|
||||||
dependencies = [
|
|
||||||
"base64ct",
|
|
||||||
"blake2",
|
|
||||||
"cpufeatures",
|
|
||||||
"password-hash",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "argon2id"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anubis",
|
|
||||||
"argon2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "arrayvec"
|
|
||||||
version = "0.7.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "autocfg"
|
|
||||||
version = "1.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "base64ct"
|
|
||||||
version = "1.7.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "2.9.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "blake2"
|
|
||||||
version = "0.10.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
|
|
||||||
dependencies = [
|
|
||||||
"digest 0.10.7",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "block-buffer"
|
|
||||||
version = "0.10.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
|
|
||||||
dependencies = [
|
|
||||||
"generic-array",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
<<<<<<< HEAD
|
|
||||||
name = "block-buffer"
|
|
||||||
version = "0.11.0-rc.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a229bfd78e4827c91b9b95784f69492c1b77c1ab75a45a8a037b139215086f94"
|
|
||||||
dependencies = [
|
|
||||||
"hybrid-array",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "0.1.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
|
||||||
=======
|
|
||||||
name = "byteorder"
|
|
||||||
version = "1.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
|
||||||
>>>>>>> 8793853 (feat(wasm): broken equi-x solver)
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "const-oid"
|
|
||||||
version = "0.10.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0dabb6555f92fb9ee4140454eb5dcd14c7960e1225c6d1a6cc361f032947713e"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cpufeatures"
|
|
||||||
version = "0.2.17"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "crypto-common"
|
|
||||||
version = "0.1.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
|
|
||||||
dependencies = [
|
|
||||||
"generic-array",
|
|
||||||
"typenum",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "crypto-common"
|
|
||||||
version = "0.2.0-rc.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "170d71b5b14dec99db7739f6fc7d6ec2db80b78c3acb77db48392ccc3d8a9ea0"
|
|
||||||
dependencies = [
|
|
||||||
"hybrid-array",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "digest"
|
|
||||||
version = "0.10.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
|
||||||
dependencies = [
|
|
||||||
"block-buffer 0.10.4",
|
|
||||||
"crypto-common 0.1.6",
|
|
||||||
"subtle",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
<<<<<<< HEAD
|
|
||||||
name = "digest"
|
|
||||||
version = "0.11.0-pre.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6c478574b20020306f98d61c8ca3322d762e1ff08117422ac6106438605ea516"
|
|
||||||
dependencies = [
|
|
||||||
"block-buffer 0.11.0-rc.4",
|
|
||||||
"const-oid",
|
|
||||||
"crypto-common 0.2.0-rc.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
=======
|
|
||||||
name = "dynasm"
|
|
||||||
version = "3.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f0cecff24995c8a5a3c3169cff4c733fe7d91aedf5d8cc96238738bfe53186b8"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
"byteorder",
|
|
||||||
"lazy_static",
|
|
||||||
"proc-macro-error2",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "dynasmrt"
|
|
||||||
version = "3.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7f5eab96b8688bcbf1d2354bcfe0261005ac1dd0616747152ada34948d4e9582"
|
|
||||||
dependencies = [
|
|
||||||
"byteorder",
|
|
||||||
"dynasm",
|
|
||||||
"fnv",
|
|
||||||
"memmap2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "equix"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anubis",
|
|
||||||
"equix 0.2.3",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "equix"
|
|
||||||
version = "0.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "194df1f219a987430956f20faaf702fd4d434b1b2f7300014119854184107ac7"
|
|
||||||
dependencies = [
|
|
||||||
"arrayvec",
|
|
||||||
"hashx",
|
|
||||||
"num-traits",
|
|
||||||
"thiserror",
|
|
||||||
"visibility",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fixed-capacity-vec"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6b31a14f5ee08ed1a40e1252b35af18bed062e3f39b69aab34decde36bc43e40"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fnv"
|
|
||||||
version = "1.0.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
>>>>>>> 8793853 (feat(wasm): broken equi-x solver)
|
|
||||||
name = "generic-array"
|
|
||||||
version = "0.14.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
|
|
||||||
dependencies = [
|
|
||||||
"typenum",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
<<<<<<< HEAD
|
|
||||||
name = "hybrid-array"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4dab50e193aebe510fe0e40230145820e02f48dae0cf339ea4204e6e708ff7bd"
|
|
||||||
dependencies = [
|
|
||||||
"typenum",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
=======
|
|
||||||
name = "hashx"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "572a61c460658ae7db71878dd2caa163f47ffe041cb40aeee1483d1ffbf5e84b"
|
|
||||||
dependencies = [
|
|
||||||
"arrayvec",
|
|
||||||
"blake2",
|
|
||||||
"dynasmrt",
|
|
||||||
"fixed-capacity-vec",
|
|
||||||
"hex",
|
|
||||||
"rand_core 0.9.3",
|
|
||||||
"thiserror",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hex"
|
|
||||||
version = "0.4.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazy_static"
|
|
||||||
version = "1.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
>>>>>>> 8793853 (feat(wasm): broken equi-x solver)
|
|
||||||
name = "libc"
|
|
||||||
version = "0.2.171"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
<<<<<<< HEAD
|
|
||||||
name = "memory_units"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3"
|
|
||||||
=======
|
|
||||||
name = "memmap2"
|
|
||||||
version = "0.9.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-traits"
|
|
||||||
version = "0.2.19"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg",
|
|
||||||
]
|
|
||||||
>>>>>>> 8793853 (feat(wasm): broken equi-x solver)
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "password-hash"
|
|
||||||
version = "0.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166"
|
|
||||||
dependencies = [
|
|
||||||
"base64ct",
|
|
||||||
"rand_core 0.6.4",
|
|
||||||
"subtle",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error-attr2"
|
|
||||||
version = "2.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error2"
|
|
||||||
version = "2.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro-error-attr2",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "1.0.94"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-ident",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "1.0.40"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.6.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.9.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sha2"
|
|
||||||
version = "0.11.0-pre.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "19b4241d1a56954dce82cecda5c8e9c794eef6f53abe5e5216bac0a0ea71ffa7"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if 1.0.0",
|
|
||||||
"cpufeatures",
|
|
||||||
"digest 0.11.0-pre.10",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sha256"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anubis",
|
|
||||||
"sha2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "subtle"
|
|
||||||
version = "2.6.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "2.0.100"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"unicode-ident",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror"
|
|
||||||
version = "2.0.12"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
|
|
||||||
dependencies = [
|
|
||||||
"thiserror-impl",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror-impl"
|
|
||||||
version = "2.0.12"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "typenum"
|
|
||||||
version = "1.18.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-ident"
|
|
||||||
version = "1.0.18"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "version_check"
|
|
||||||
version = "0.9.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
<<<<<<< HEAD
|
|
||||||
name = "wee_alloc"
|
|
||||||
version = "0.4.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dbb3b5a6b2bb17cb6ad44a2e68a43e8d2722c997da10e928665c72ec6c0a0b8e"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if 0.1.10",
|
|
||||||
"libc",
|
|
||||||
"memory_units",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi"
|
|
||||||
version = "0.3.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-i686-pc-windows-gnu",
|
|
||||||
"winapi-x86_64-pc-windows-gnu",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-i686-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|
||||||
=======
|
|
||||||
name = "visibility"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
>>>>>>> 8793853 (feat(wasm): broken equi-x solver)
|
|
||||||
10
Cargo.toml
10
Cargo.toml
@@ -1,10 +0,0 @@
|
|||||||
[workspace]
|
|
||||||
resolver = "2"
|
|
||||||
members = ["wasm/anubis", "wasm/pow/*"]
|
|
||||||
|
|
||||||
[profile.release]
|
|
||||||
#strip = true
|
|
||||||
opt-level = "s"
|
|
||||||
lto = "thin"
|
|
||||||
codegen-units = 1
|
|
||||||
panic = "abort"
|
|
||||||
10
Makefile
10
Makefile
@@ -1,9 +1,7 @@
|
|||||||
NODE_MODULES = node_modules
|
NODE_MODULES = node_modules
|
||||||
VERSION := $(shell cat ./VERSION)
|
VERSION := $(shell cat ./VERSION)
|
||||||
|
|
||||||
export RUSTFLAGS=-Ctarget-feature=+simd128
|
.PHONY: build assets deps lint prebaked-build test
|
||||||
|
|
||||||
.PHONY: build assets deps lint prebaked-build test wasm
|
|
||||||
|
|
||||||
assets:
|
assets:
|
||||||
npm run assets
|
npm run assets
|
||||||
@@ -26,8 +24,4 @@ prebaked-build:
|
|||||||
go build -o ./var/anubis -ldflags "-X 'github.com/TecharoHQ/anubis.Version=$(VERSION)'" ./cmd/anubis
|
go build -o ./var/anubis -ldflags "-X 'github.com/TecharoHQ/anubis.Version=$(VERSION)'" ./cmd/anubis
|
||||||
|
|
||||||
test:
|
test:
|
||||||
npm run test
|
npm run test
|
||||||
|
|
||||||
wasm:
|
|
||||||
cargo build --release --target wasm32-unknown-unknown
|
|
||||||
cp -vf ./target/wasm32-unknown-unknown/release/*.wasm ./web/static/wasm
|
|
||||||
@@ -33,11 +33,3 @@ For live chat, please join the [Patreon](https://patreon.com/cadey) and ask in t
|
|||||||
## Packaging Status
|
## Packaging Status
|
||||||
|
|
||||||
[](https://repology.org/project/anubis-anti-crawler/versions)
|
[](https://repology.org/project/anubis-anti-crawler/versions)
|
||||||
|
|
||||||
## Contributors
|
|
||||||
|
|
||||||
<a href="https://github.com/TecharoHQ/anubis/graphs/contributors">
|
|
||||||
<img src="https://contrib.rocks/image?repo=TecharoHQ/anubis" />
|
|
||||||
</a>
|
|
||||||
|
|
||||||
Made with [contrib.rocks](https://contrib.rocks).
|
|
||||||
|
|||||||
@@ -16,4 +16,4 @@ const StaticPath = "/.within.website/x/cmd/anubis/"
|
|||||||
|
|
||||||
// DefaultDifficulty is the default "difficulty" (number of leading zeroes)
|
// DefaultDifficulty is the default "difficulty" (number of leading zeroes)
|
||||||
// that must be met by the client in order to pass the challenge.
|
// that must be met by the client in order to pass the challenge.
|
||||||
const DefaultDifficulty uint32 = 4
|
const DefaultDifficulty = 4
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ import (
|
|||||||
var (
|
var (
|
||||||
bind = flag.String("bind", ":8923", "network address to bind HTTP to")
|
bind = flag.String("bind", ":8923", "network address to bind HTTP to")
|
||||||
bindNetwork = flag.String("bind-network", "tcp", "network family to bind HTTP to, e.g. unix, tcp")
|
bindNetwork = flag.String("bind-network", "tcp", "network family to bind HTTP to, e.g. unix, tcp")
|
||||||
challengeDifficulty = flag.Int("difficulty", int(anubis.DefaultDifficulty), "difficulty of the challenge")
|
challengeDifficulty = flag.Int("difficulty", anubis.DefaultDifficulty, "difficulty of the challenge")
|
||||||
cookieDomain = flag.String("cookie-domain", "", "if set, the top-level domain that the Anubis cookie will be valid for")
|
cookieDomain = flag.String("cookie-domain", "", "if set, the top-level domain that the Anubis cookie will be valid for")
|
||||||
cookiePartitioned = flag.Bool("cookie-partitioned", false, "if true, sets the partitioned flag on Anubis cookies, enabling CHIPS support")
|
cookiePartitioned = flag.Bool("cookie-partitioned", false, "if true, sets the partitioned flag on Anubis cookies, enabling CHIPS support")
|
||||||
ed25519PrivateKeyHex = flag.String("ed25519-private-key-hex", "", "private key used to sign JWTs, if not set a random one will be assigned")
|
ed25519PrivateKeyHex = flag.String("ed25519-private-key-hex", "", "private key used to sign JWTs, if not set a random one will be assigned")
|
||||||
@@ -58,7 +58,7 @@ var (
|
|||||||
ogPassthrough = flag.Bool("og-passthrough", false, "enable Open Graph tag passthrough")
|
ogPassthrough = flag.Bool("og-passthrough", false, "enable Open Graph tag passthrough")
|
||||||
ogTimeToLive = flag.Duration("og-expiry-time", 24*time.Hour, "Open Graph tag cache expiration time")
|
ogTimeToLive = flag.Duration("og-expiry-time", 24*time.Hour, "Open Graph tag cache expiration time")
|
||||||
extractResources = flag.String("extract-resources", "", "if set, extract the static resources to the specified folder")
|
extractResources = flag.String("extract-resources", "", "if set, extract the static resources to the specified folder")
|
||||||
webmasterEmail = flag.String("webmaster-email", "", "if set, displays webmaster's email on the reject page for appeals")
|
webmasterEmail = flag.String("webmaster-email", "", "if set, displays webmaster's email on the reject page for appeals")
|
||||||
)
|
)
|
||||||
|
|
||||||
func keyFromHex(value string) (ed25519.PrivateKey, error) {
|
func keyFromHex(value string) (ed25519.PrivateKey, error) {
|
||||||
@@ -194,7 +194,7 @@ func main() {
|
|||||||
log.Fatalf("can't make reverse proxy: %v", err)
|
log.Fatalf("can't make reverse proxy: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
policy, err := libanubis.LoadPoliciesOrDefault(*policyFname, uint32(*challengeDifficulty))
|
policy, err := libanubis.LoadPoliciesOrDefault(*policyFname, *challengeDifficulty)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("can't parse policy file: %v", err)
|
log.Fatalf("can't parse policy file: %v", err)
|
||||||
}
|
}
|
||||||
@@ -261,7 +261,7 @@ func main() {
|
|||||||
OGPassthrough: *ogPassthrough,
|
OGPassthrough: *ogPassthrough,
|
||||||
OGTimeToLive: *ogTimeToLive,
|
OGTimeToLive: *ogTimeToLive,
|
||||||
Target: *target,
|
Target: *target,
|
||||||
WebmasterEmail: *webmasterEmail,
|
WebmasterEmail: *webmasterEmail,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("can't construct libanubis.Server: %v", err)
|
log.Fatalf("can't construct libanubis.Server: %v", err)
|
||||||
|
|||||||
@@ -11,9 +11,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## v1.16.0
|
||||||
|
|
||||||
|
Fordola rem Lupis
|
||||||
|
|
||||||
|
> I want to make them pay! All of them! Everyone who ever mocked or looked down on me -- I want the power to make them pay!
|
||||||
|
|
||||||
|
The following features are the "big ticket" items:
|
||||||
|
|
||||||
- Added support for native Debian, Red Hat, and tarball packaging strategies including installation and use directions.
|
- Added support for native Debian, Red Hat, and tarball packaging strategies including installation and use directions.
|
||||||
- A prebaked tarball has been added, allowing distros to build Anubis like they could in v1.15.x.
|
- A prebaked tarball has been added, allowing distros to build Anubis like they could in v1.15.x.
|
||||||
- The placeholder Anubis mascot has been replaced with a design by [CELPHASE](https://bsky.app/profile/celphase.bsky.social).
|
- The placeholder Anubis mascot has been replaced with a design by [CELPHASE](https://bsky.app/profile/celphase.bsky.social).
|
||||||
|
- Verification page now shows hash rate and a progress bar for completion probability.
|
||||||
|
- Added support for [OpenGraph tags](https://ogp.me/) when rendering the challenge page. This allows for social previews to be generated when sharing the challenge page on social media platforms ([#195](https://github.com/TecharoHQ/anubis/pull/195))
|
||||||
|
- Added support for passing the ed25519 signing key in a file with `-ed25519-private-key-hex-file` or `ED25519_PRIVATE_KEY_HEX_FILE`.
|
||||||
|
|
||||||
|
The other small fixes have been made:
|
||||||
|
|
||||||
- Added a periodic cleanup routine for the decaymap that removes expired entries, ensuring stale data is properly pruned.
|
- Added a periodic cleanup routine for the decaymap that removes expired entries, ensuring stale data is properly pruned.
|
||||||
- Added a no-store Cache-Control header to the challenge page
|
- Added a no-store Cache-Control header to the challenge page
|
||||||
- Hide the directory listings for Anubis' internal static content
|
- Hide the directory listings for Anubis' internal static content
|
||||||
@@ -23,7 +37,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- The Dockerfile has been removed as it is no longer in use
|
- The Dockerfile has been removed as it is no longer in use
|
||||||
- Developer documentation has been added to the docs site
|
- Developer documentation has been added to the docs site
|
||||||
- Show more errors when some predictable challenge page errors happen ([#150](https://github.com/TecharoHQ/anubis/issues/150))
|
- Show more errors when some predictable challenge page errors happen ([#150](https://github.com/TecharoHQ/anubis/issues/150))
|
||||||
- Verification page now shows hash rate and a progress bar for completion probability.
|
|
||||||
- Added the `--debug-benchmark-js` flag for testing proof-of-work performance during development.
|
- Added the `--debug-benchmark-js` flag for testing proof-of-work performance during development.
|
||||||
- Use `TrimSuffix` instead of `TrimRight` on containerbuild
|
- Use `TrimSuffix` instead of `TrimRight` on containerbuild
|
||||||
- Fix the startup logs to correctly show the address and port the server is listening on
|
- Fix the startup logs to correctly show the address and port the server is listening on
|
||||||
@@ -31,17 +44,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Added a wait with button continue + 30 second auto continue after 30s if you click "Why am I seeing this?"
|
- Added a wait with button continue + 30 second auto continue after 30s if you click "Why am I seeing this?"
|
||||||
- Fixed a typo in the challenge page title.
|
- Fixed a typo in the challenge page title.
|
||||||
- Disabled running integration tests on Windows hosts due to it's reliance on posix features (see [#133](https://github.com/TecharoHQ/anubis/pull/133#issuecomment-2764732309)).
|
- Disabled running integration tests on Windows hosts due to it's reliance on posix features (see [#133](https://github.com/TecharoHQ/anubis/pull/133#issuecomment-2764732309)).
|
||||||
- Added support for passing the ed25519 signing key in a file with `-ed25519-private-key-hex-file` or `ED25519_PRIVATE_KEY_HEX_FILE`.
|
|
||||||
- Fixed minor typos
|
- Fixed minor typos
|
||||||
- Added a Makefile to enable comfortable workflows for downstream packagers.
|
- Added a Makefile to enable comfortable workflows for downstream packagers.
|
||||||
- Added `zizmor` for GitHub Actions static analysis
|
- Added `zizmor` for GitHub Actions static analysis
|
||||||
- Fixed most `zizmor` findings
|
- Fixed most `zizmor` findings
|
||||||
- Enabled Dependabot
|
- Enabled Dependabot
|
||||||
- Added an air config for autoreload support in development ([#195](https://github.com/TecharoHQ/anubis/pull/195))
|
- Added an air config for autoreload support in development ([#195](https://github.com/TecharoHQ/anubis/pull/195))
|
||||||
- Added support for [OpenGraph tags](https://ogp.me/) when rendering the challenge page. This allows for social previews to be generated when sharing the challenge page on social media platforms ([#195](https://github.com/TecharoHQ/anubis/pull/195))
|
|
||||||
- Added an `--extract-resources` flag to extract static resources to a local folder.
|
- Added an `--extract-resources` flag to extract static resources to a local folder.
|
||||||
- Add noindex flag to all Anubis pages ([#227](https://github.com/TecharoHQ/anubis/issues/227)).
|
- Add noindex flag to all Anubis pages ([#227](https://github.com/TecharoHQ/anubis/issues/227)).
|
||||||
- Added `WEBMASTER_EMAIL` variable, if it is present then display that email address on error pages ([#235](https://github.com/TecharoHQ/anubis/pull/235), [#115](https://github.com/TecharoHQ/anubis/issues/115))
|
- Added `WEBMASTER_EMAIL` variable, if it is present then display that email address on error pages ([#235](https://github.com/TecharoHQ/anubis/pull/235), [#115](https://github.com/TecharoHQ/anubis/issues/115))
|
||||||
|
- Hash pinned all GitHub Actions
|
||||||
|
|
||||||
## v1.15.1
|
## v1.15.1
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,12 @@ These instructions may work, but for right now they are informative for downstre
|
|||||||
|
|
||||||
If you are doing a build entirely from source, here's what you need to do:
|
If you are doing a build entirely from source, here's what you need to do:
|
||||||
|
|
||||||
|
:::info
|
||||||
|
|
||||||
|
If you maintain a package for Anubis v1.15.x or older, you will need to update your package build. You may want to use one of the half-baked tarballs if your distro/environment of choice makes it difficult to use npm.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
### Tools needed
|
### Tools needed
|
||||||
|
|
||||||
In order to build a production-ready binary of Anubis, you need the following packages in your environment:
|
In order to build a production-ready binary of Anubis, you need the following packages in your environment:
|
||||||
|
|||||||
12
go.mod
12
go.mod
@@ -9,15 +9,13 @@ require (
|
|||||||
github.com/playwright-community/playwright-go v0.5001.0
|
github.com/playwright-community/playwright-go v0.5001.0
|
||||||
github.com/prometheus/client_golang v1.21.1
|
github.com/prometheus/client_golang v1.21.1
|
||||||
github.com/sebest/xff v0.0.0-20210106013422-671bd2870b3a
|
github.com/sebest/xff v0.0.0-20210106013422-671bd2870b3a
|
||||||
github.com/tetratelabs/wazero v1.9.0
|
|
||||||
github.com/yl2chen/cidranger v1.0.2
|
github.com/yl2chen/cidranger v1.0.2
|
||||||
golang.org/x/net v0.39.0
|
golang.org/x/net v0.38.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect
|
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect
|
||||||
github.com/a-h/parse v0.0.0-20250122154542-74294addb73e // indirect
|
github.com/a-h/parse v0.0.0-20250122154542-74294addb73e // indirect
|
||||||
github.com/aclements/go-moremath v0.0.0-20210112150236-f10218a38794 // indirect
|
|
||||||
github.com/andybalholm/brotli v1.1.0 // indirect
|
github.com/andybalholm/brotli v1.1.0 // indirect
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||||
@@ -43,17 +41,15 @@ require (
|
|||||||
github.com/prometheus/procfs v0.15.1 // indirect
|
github.com/prometheus/procfs v0.15.1 // indirect
|
||||||
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect
|
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect
|
||||||
golang.org/x/mod v0.24.0 // indirect
|
golang.org/x/mod v0.24.0 // indirect
|
||||||
golang.org/x/perf v0.0.0-20250408013232-71ba5bc8ccce // indirect
|
golang.org/x/sync v0.12.0 // indirect
|
||||||
golang.org/x/sync v0.13.0 // indirect
|
golang.org/x/sys v0.31.0 // indirect
|
||||||
golang.org/x/sys v0.32.0 // indirect
|
golang.org/x/tools v0.31.0 // indirect
|
||||||
golang.org/x/tools v0.32.0 // indirect
|
|
||||||
google.golang.org/protobuf v1.36.4 // indirect
|
google.golang.org/protobuf v1.36.4 // indirect
|
||||||
honnef.co/go/tools v0.6.1 // indirect
|
honnef.co/go/tools v0.6.1 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
tool (
|
tool (
|
||||||
github.com/a-h/templ/cmd/templ
|
github.com/a-h/templ/cmd/templ
|
||||||
golang.org/x/perf/cmd/benchstat
|
|
||||||
golang.org/x/tools/cmd/stringer
|
golang.org/x/tools/cmd/stringer
|
||||||
honnef.co/go/tools/cmd/staticcheck
|
honnef.co/go/tools/cmd/staticcheck
|
||||||
)
|
)
|
||||||
|
|||||||
14
go.sum
14
go.sum
@@ -4,8 +4,6 @@ github.com/a-h/parse v0.0.0-20250122154542-74294addb73e h1:HjVbSQHy+dnlS6C3XajZ6
|
|||||||
github.com/a-h/parse v0.0.0-20250122154542-74294addb73e/go.mod h1:3mnrkvGpurZ4ZrTDbYU84xhwXW2TjTKShSwjRi2ihfQ=
|
github.com/a-h/parse v0.0.0-20250122154542-74294addb73e/go.mod h1:3mnrkvGpurZ4ZrTDbYU84xhwXW2TjTKShSwjRi2ihfQ=
|
||||||
github.com/a-h/templ v0.3.857 h1:6EqcJuGZW4OL+2iZ3MD+NnIcG7nGkaQeF2Zq5kf9ZGg=
|
github.com/a-h/templ v0.3.857 h1:6EqcJuGZW4OL+2iZ3MD+NnIcG7nGkaQeF2Zq5kf9ZGg=
|
||||||
github.com/a-h/templ v0.3.857/go.mod h1:qhrhAkRFubE7khxLZHsBFHfX+gWwVNKbzKeF9GlPV4M=
|
github.com/a-h/templ v0.3.857/go.mod h1:qhrhAkRFubE7khxLZHsBFHfX+gWwVNKbzKeF9GlPV4M=
|
||||||
github.com/aclements/go-moremath v0.0.0-20210112150236-f10218a38794 h1:xlwdaKcTNVW4PtpQb8aKA4Pjy0CdJHEqvFbAnvR5m2g=
|
|
||||||
github.com/aclements/go-moremath v0.0.0-20210112150236-f10218a38794/go.mod h1:7e+I0LQFUI9AXWxOfsQROs9xPhoJtbsyWcjJqDd4KPY=
|
|
||||||
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
||||||
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
||||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
@@ -77,8 +75,6 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P
|
|||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
|
|
||||||
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
|
|
||||||
github.com/yl2chen/cidranger v1.0.2 h1:lbOWZVCG1tCRX4u24kuM1Tb4nHqWkDxwLdoS+SevawU=
|
github.com/yl2chen/cidranger v1.0.2 h1:lbOWZVCG1tCRX4u24kuM1Tb4nHqWkDxwLdoS+SevawU=
|
||||||
github.com/yl2chen/cidranger v1.0.2/go.mod h1:9U1yz7WPYDwf0vpNWFaeRh0bjwz5RVgRy/9UEQfHl0g=
|
github.com/yl2chen/cidranger v1.0.2/go.mod h1:9U1yz7WPYDwf0vpNWFaeRh0bjwz5RVgRy/9UEQfHl0g=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
@@ -98,17 +94,11 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
|||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
||||||
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||||
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
|
||||||
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
|
||||||
golang.org/x/perf v0.0.0-20250408013232-71ba5bc8ccce h1:KAIyikguO7lID+oSo3Dnut9RawUS+RWK8Ejj9KPvwU4=
|
|
||||||
golang.org/x/perf v0.0.0-20250408013232-71ba5bc8ccce/go.mod h1:tAdCL3nMN92yGFHY2TrzbGPP0q+LaOFewlib1WPJdpA=
|
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
|
||||||
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||||
golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
|
|
||||||
golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
@@ -121,8 +111,6 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
|
||||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
@@ -140,8 +128,6 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
|||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU=
|
golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU=
|
||||||
golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ=
|
golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ=
|
||||||
golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU=
|
|
||||||
golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s=
|
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
|
google.golang.org/protobuf v1.36.4 h1:6A3ZDJHn/eNqc1i+IdefRzy/9PokBTPvcqMySR7NNIM=
|
||||||
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
google.golang.org/protobuf v1.36.4/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -379,7 +378,7 @@ func saveScreenshot(t *testing.T, page playwright.Page) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
f, err := os.CreateTemp("./var", "anubis-test-fail-"+strings.ReplaceAll(t.Name(), "/", "--")+"-*.png")
|
f, err := os.CreateTemp("", "anubis-test-fail-*.png")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Logf("could not create temporary file: %v", err)
|
t.Logf("could not create temporary file: %v", err)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -1,23 +1,19 @@
|
|||||||
package lib
|
package lib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"crypto/ed25519"
|
"crypto/ed25519"
|
||||||
"crypto/rand"
|
"crypto/rand"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"crypto/subtle"
|
"crypto/subtle"
|
||||||
"encoding/hex"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/fs"
|
|
||||||
"log"
|
"log"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"math"
|
"math"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
@@ -35,7 +31,6 @@ import (
|
|||||||
"github.com/TecharoHQ/anubis/internal/ogtags"
|
"github.com/TecharoHQ/anubis/internal/ogtags"
|
||||||
"github.com/TecharoHQ/anubis/lib/policy"
|
"github.com/TecharoHQ/anubis/lib/policy"
|
||||||
"github.com/TecharoHQ/anubis/lib/policy/config"
|
"github.com/TecharoHQ/anubis/lib/policy/config"
|
||||||
"github.com/TecharoHQ/anubis/wasm"
|
|
||||||
"github.com/TecharoHQ/anubis/web"
|
"github.com/TecharoHQ/anubis/web"
|
||||||
"github.com/TecharoHQ/anubis/xess"
|
"github.com/TecharoHQ/anubis/xess"
|
||||||
)
|
)
|
||||||
@@ -85,7 +80,7 @@ type Options struct {
|
|||||||
WebmasterEmail string
|
WebmasterEmail string
|
||||||
}
|
}
|
||||||
|
|
||||||
func LoadPoliciesOrDefault(fname string, defaultDifficulty uint32) (*policy.ParsedConfig, error) {
|
func LoadPoliciesOrDefault(fname string, defaultDifficulty int) (*policy.ParsedConfig, error) {
|
||||||
var fin io.ReadCloser
|
var fin io.ReadCloser
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
@@ -127,36 +122,6 @@ func New(opts Options) (*Server, error) {
|
|||||||
opts: opts,
|
opts: opts,
|
||||||
DNSBLCache: decaymap.New[string, dnsbl.DroneBLResponse](),
|
DNSBLCache: decaymap.New[string, dnsbl.DroneBLResponse](),
|
||||||
OGTags: ogtags.NewOGTagCache(opts.Target, opts.OGPassthrough, opts.OGTimeToLive),
|
OGTags: ogtags.NewOGTagCache(opts.Target, opts.OGPassthrough, opts.OGTimeToLive),
|
||||||
validators: map[string]Verifier{
|
|
||||||
"fast": VerifierFunc(BasicSHA256Verify),
|
|
||||||
"slow": VerifierFunc(BasicSHA256Verify),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
finfos, err := fs.ReadDir(web.Static, "static/wasm")
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("[unexpected] can't read any webassembly files in the static folder: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, finfo := range finfos {
|
|
||||||
fin, err := web.Static.Open("static/wasm/" + finfo.Name())
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("[unexpected] can't read static/wasm/%s: %w", finfo.Name(), err)
|
|
||||||
}
|
|
||||||
defer fin.Close()
|
|
||||||
|
|
||||||
name := strings.TrimSuffix(finfo.Name(), filepath.Ext(finfo.Name()))
|
|
||||||
|
|
||||||
runner, err := wasm.NewRunner(context.Background(), finfo.Name(), fin)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("can't load static/wasm/%s: %w", finfo.Name(), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var concurrentLimit int64 = 4
|
|
||||||
|
|
||||||
cv := NewConcurrentVerifier(runner, concurrentLimit)
|
|
||||||
|
|
||||||
result.validators[name] = cv
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
@@ -196,15 +161,13 @@ type Server struct {
|
|||||||
opts Options
|
opts Options
|
||||||
DNSBLCache *decaymap.Impl[string, dnsbl.DroneBLResponse]
|
DNSBLCache *decaymap.Impl[string, dnsbl.DroneBLResponse]
|
||||||
OGTags *ogtags.OGTagCache
|
OGTags *ogtags.OGTagCache
|
||||||
|
|
||||||
validators map[string]Verifier
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
s.mux.ServeHTTP(w, r)
|
s.mux.ServeHTTP(w, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) challengeFor(r *http.Request, difficulty uint32) string {
|
func (s *Server) challengeFor(r *http.Request, difficulty int) string {
|
||||||
fp := sha256.Sum256(s.priv.Seed())
|
fp := sha256.Sum256(s.priv.Seed())
|
||||||
|
|
||||||
challengeData := fmt.Sprintf(
|
challengeData := fmt.Sprintf(
|
||||||
@@ -441,7 +404,7 @@ func (s *Server) PassChallenge(w http.ResponseWriter, r *http.Request) {
|
|||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage("Internal Server Error: administrator has misconfigured Anubis. Please contact the administrator and ask them to look for the logs around \"passChallenge\".", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
templ.Handler(web.Base("Oh noes!", web.ErrorPage("Internal Server Error: administrator has misconfigured Anubis. Please contact the administrator and ask them to look for the logs around \"passChallenge\".", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
lg = lg.With("check_result", cr, "algorithm", rule.Challenge.Algorithm)
|
lg = lg.With("check_result", cr)
|
||||||
|
|
||||||
nonceStr := r.FormValue("nonce")
|
nonceStr := r.FormValue("nonce")
|
||||||
if nonceStr == "" {
|
if nonceStr == "" {
|
||||||
@@ -473,52 +436,33 @@ func (s *Server) PassChallenge(w http.ResponseWriter, r *http.Request) {
|
|||||||
response := r.FormValue("response")
|
response := r.FormValue("response")
|
||||||
redir := r.FormValue("redir")
|
redir := r.FormValue("redir")
|
||||||
|
|
||||||
responseBytes, err := hex.DecodeString(response)
|
|
||||||
if err != nil {
|
|
||||||
s.ClearCookie(w)
|
|
||||||
lg.Debug("response doesn't parse", "err", err)
|
|
||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage("invalid response format", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
challenge := s.challengeFor(r, rule.Challenge.Difficulty)
|
challenge := s.challengeFor(r, rule.Challenge.Difficulty)
|
||||||
challengeBytes, err := hex.DecodeString(challenge)
|
|
||||||
if err != nil {
|
|
||||||
s.ClearCookie(w)
|
|
||||||
lg.Debug("challenge doesn't parse", "err", err)
|
|
||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage("invalid internal challenge format", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
nonceRaw, err := strconv.ParseUint(nonceStr, 10, 32)
|
nonce, err := strconv.Atoi(nonceStr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.ClearCookie(w)
|
s.ClearCookie(w)
|
||||||
lg.Debug("nonce doesn't parse", "err", err)
|
lg.Debug("nonce doesn't parse", "err", err)
|
||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage("invalid nonce", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
templ.Handler(web.Base("Oh noes!", web.ErrorPage("invalid nonce", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
nonce := uint32(nonceRaw)
|
|
||||||
|
|
||||||
validator, ok := s.validators[string(rule.Challenge.Algorithm)]
|
calcString := fmt.Sprintf("%s%d", challenge, nonce)
|
||||||
if !ok {
|
calculated := internal.SHA256sum(calcString)
|
||||||
|
|
||||||
|
if subtle.ConstantTimeCompare([]byte(response), []byte(calculated)) != 1 {
|
||||||
s.ClearCookie(w)
|
s.ClearCookie(w)
|
||||||
lg.Debug("no validator found for algorithm", "algorithm", rule.Challenge.Algorithm)
|
lg.Debug("hash does not match", "got", response, "want", calculated)
|
||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage(fmt.Sprintf("Internal anubis error has been detected and you cannot proceed. Tried to look up a validator for algorithm %s but wasn't able to find one. Please contact the administrator of this instance of anubis", rule.Challenge.Algorithm), s.opts.WebmasterEmail)), templ.WithStatus(http.StatusInternalServerError)).ServeHTTP(w, r)
|
templ.Handler(web.Base("Oh noes!", web.ErrorPage("invalid response", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusForbidden)).ServeHTTP(w, r)
|
||||||
|
failedValidations.Inc()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
ok, err = validator.Verify(r.Context(), challengeBytes, responseBytes, nonce, rule.Challenge.Difficulty)
|
// compare the leading zeroes
|
||||||
if err != nil {
|
if !strings.HasPrefix(response, strings.Repeat("0", rule.Challenge.Difficulty)) {
|
||||||
s.ClearCookie(w)
|
s.ClearCookie(w)
|
||||||
lg.Debug("verification error", "err", err)
|
lg.Debug("difficulty check failed", "response", response, "difficulty", rule.Challenge.Difficulty)
|
||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage("Your challenge failed validation. Please go back and try your challenge again", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusBadRequest)).ServeHTTP(w, r)
|
templ.Handler(web.Base("Oh noes!", web.ErrorPage("invalid response", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusForbidden)).ServeHTTP(w, r)
|
||||||
return
|
failedValidations.Inc()
|
||||||
}
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
s.ClearCookie(w)
|
|
||||||
lg.Debug("response invalid")
|
|
||||||
templ.Handler(web.Base("Oh noes!", web.ErrorPage("Your challenge failed validation. Please go back and try your challenge again", s.opts.WebmasterEmail)), templ.WithStatus(http.StatusBadRequest)).ServeHTTP(w, r)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -197,7 +197,7 @@ func TestCheckDefaultDifficultyMatchesPolicy(t *testing.T) {
|
|||||||
fmt.Fprintln(w, "OK")
|
fmt.Fprintln(w, "OK")
|
||||||
})
|
})
|
||||||
|
|
||||||
for i := uint32(1); i < 10; i++ {
|
for i := 1; i < 10; i++ {
|
||||||
t.Run(fmt.Sprint(i), func(t *testing.T) {
|
t.Run(fmt.Sprint(i), func(t *testing.T) {
|
||||||
anubisPolicy, err := LoadPoliciesOrDefault("", i)
|
anubisPolicy, err := LoadPoliciesOrDefault("", i)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -31,11 +31,9 @@ const (
|
|||||||
type Algorithm string
|
type Algorithm string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
AlgorithmUnknown Algorithm = ""
|
AlgorithmUnknown Algorithm = ""
|
||||||
AlgorithmFast Algorithm = "fast"
|
AlgorithmFast Algorithm = "fast"
|
||||||
AlgorithmSlow Algorithm = "slow"
|
AlgorithmSlow Algorithm = "slow"
|
||||||
AlgorithmArgon2ID Algorithm = "argon2id"
|
|
||||||
AlgorithmSHA256 Algorithm = "sha256"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type BotConfig struct {
|
type BotConfig struct {
|
||||||
@@ -103,8 +101,8 @@ func (b BotConfig) Valid() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ChallengeRules struct {
|
type ChallengeRules struct {
|
||||||
Difficulty uint32 `json:"difficulty"`
|
Difficulty int `json:"difficulty"`
|
||||||
ReportAs uint32 `json:"report_as"`
|
ReportAs int `json:"report_as"`
|
||||||
Algorithm Algorithm `json:"algorithm"`
|
Algorithm Algorithm `json:"algorithm"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -126,7 +124,7 @@ func (cr ChallengeRules) Valid() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch cr.Algorithm {
|
switch cr.Algorithm {
|
||||||
case AlgorithmFast, AlgorithmSlow, AlgorithmArgon2ID, AlgorithmSHA256, AlgorithmUnknown:
|
case AlgorithmFast, AlgorithmSlow, AlgorithmUnknown:
|
||||||
// do nothing, it's all good
|
// do nothing, it's all good
|
||||||
default:
|
default:
|
||||||
errs = append(errs, fmt.Errorf("%w: %q", ErrChallengeRuleHasWrongAlgorithm, cr.Algorithm))
|
errs = append(errs, fmt.Errorf("%w: %q", ErrChallengeRuleHasWrongAlgorithm, cr.Algorithm))
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ type ParsedConfig struct {
|
|||||||
|
|
||||||
Bots []Bot
|
Bots []Bot
|
||||||
DNSBL bool
|
DNSBL bool
|
||||||
DefaultDifficulty uint32
|
DefaultDifficulty int
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewParsedConfig(orig config.Config) *ParsedConfig {
|
func NewParsedConfig(orig config.Config) *ParsedConfig {
|
||||||
@@ -36,7 +36,7 @@ func NewParsedConfig(orig config.Config) *ParsedConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseConfig(fin io.Reader, fname string, defaultDifficulty uint32) (*ParsedConfig, error) {
|
func ParseConfig(fin io.Reader, fname string, defaultDifficulty int) (*ParsedConfig, error) {
|
||||||
var c config.Config
|
var c config.Config
|
||||||
if err := json.NewDecoder(fin).Decode(&c); err != nil {
|
if err := json.NewDecoder(fin).Decode(&c); err != nil {
|
||||||
return nil, fmt.Errorf("can't parse policy config JSON %s: %w", fname, err)
|
return nil, fmt.Errorf("can't parse policy config JSON %s: %w", fname, err)
|
||||||
@@ -99,12 +99,12 @@ func ParseConfig(fin io.Reader, fname string, defaultDifficulty uint32) (*Parsed
|
|||||||
parsedBot.Challenge = &config.ChallengeRules{
|
parsedBot.Challenge = &config.ChallengeRules{
|
||||||
Difficulty: defaultDifficulty,
|
Difficulty: defaultDifficulty,
|
||||||
ReportAs: defaultDifficulty,
|
ReportAs: defaultDifficulty,
|
||||||
Algorithm: config.AlgorithmArgon2ID,
|
Algorithm: config.AlgorithmFast,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
parsedBot.Challenge = b.Challenge
|
parsedBot.Challenge = b.Challenge
|
||||||
if parsedBot.Challenge.Algorithm == config.AlgorithmUnknown {
|
if parsedBot.Challenge.Algorithm == config.AlgorithmUnknown {
|
||||||
parsedBot.Challenge.Algorithm = config.AlgorithmArgon2ID
|
parsedBot.Challenge.Algorithm = config.AlgorithmFast
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
package lib
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"crypto/sha256"
|
|
||||||
"crypto/subtle"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"golang.org/x/sync/semaphore"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
ErrChallengeFailed = errors.New("libanubis: challenge failed, hash does not match what the server calculated")
|
|
||||||
ErrWrongChallengeDifficulty = errors.New("libanubis: wrong challenge difficulty")
|
|
||||||
)
|
|
||||||
|
|
||||||
type Verifier interface {
|
|
||||||
Verify(ctx context.Context, challenge, verify []byte, nonce, difficulty uint32) (bool, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type VerifierFunc func(ctx context.Context, challenge, verify []byte, nonce, difficulty uint32) (bool, error)
|
|
||||||
|
|
||||||
func (vf VerifierFunc) Verify(ctx context.Context, challenge, verify []byte, nonce, difficulty uint32) (bool, error) {
|
|
||||||
return vf(ctx, challenge, verify, nonce, difficulty)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BasicSHA256Verify(ctx context.Context, challenge, verify []byte, nonce, difficulty uint32) (bool, error) {
|
|
||||||
h := sha256.New()
|
|
||||||
fmt.Fprintf(h, "%x%d", challenge, nonce)
|
|
||||||
data := h.Sum(nil)
|
|
||||||
|
|
||||||
if subtle.ConstantTimeCompare(data, verify) != 1 {
|
|
||||||
return false, fmt.Errorf("%w: wanted %x, got: %x", ErrChallengeFailed, verify, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasLeadingZeroNibbles(data, difficulty) {
|
|
||||||
return false, fmt.Errorf("%w: wanted %d leading zeroes in calculated data %x, but did not get it", ErrWrongChallengeDifficulty, difficulty, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasLeadingZeroNibbles(verify, difficulty) {
|
|
||||||
return false, fmt.Errorf("%w: wanted %d leading zeroes in verification data %x, but did not get it", ErrWrongChallengeDifficulty, verify, difficulty)
|
|
||||||
}
|
|
||||||
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// hasLeadingZeroNibbles checks if the first `n` nibbles (in order) are zero.
|
|
||||||
// Nibbles are read from high to low for each byte (e.g., 0x12 -> nibbles [0x1, 0x2]).
|
|
||||||
func hasLeadingZeroNibbles(data []byte, n uint32) bool {
|
|
||||||
count := uint32(0)
|
|
||||||
for _, b := range data {
|
|
||||||
// Check high nibble (first 4 bits)
|
|
||||||
if (b >> 4) != 0 {
|
|
||||||
break // Non-zero found in leading nibbles
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
if count >= n {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check low nibble (last 4 bits)
|
|
||||||
if (b & 0x0F) != 0 {
|
|
||||||
break // Non-zero found in leading nibbles
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
if count >= n {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return count >= n
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConcurrentVerifier struct {
|
|
||||||
Verifier
|
|
||||||
sem *semaphore.Weighted
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewConcurrentVerifier(v Verifier, maxConcurrent int64) *ConcurrentVerifier {
|
|
||||||
return &ConcurrentVerifier{
|
|
||||||
Verifier: v,
|
|
||||||
sem: semaphore.NewWeighted(maxConcurrent),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cv *ConcurrentVerifier) Verify(ctx context.Context, challenge, verify []byte, nonce, difficulty uint32) (bool, error) {
|
|
||||||
if err := cv.sem.Acquire(ctx, 1); err != nil {
|
|
||||||
return false, fmt.Errorf("can't verify solution: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return cv.Verifier.Verify(ctx, challenge, verify, nonce, difficulty)
|
|
||||||
}
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
package lib
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/hex"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
// echo -n "hi2" | sha256sum
|
|
||||||
const hi2SHA256 = "0251f1ec2880f67631b8d0b3a62cf71a17dfa31858a323e7fc38068fcfaeded0"
|
|
||||||
const nonce uint32 = 5
|
|
||||||
const expectedVerifyString = "0543cbd94db5da055e82263cb775ac16f59fbbc1900645458baa197f9036ae9d"
|
|
||||||
|
|
||||||
func TestBasicSHA256Verify(t *testing.T) {
|
|
||||||
ctx := context.Background()
|
|
||||||
|
|
||||||
challenge, err := hex.DecodeString(hi2SHA256)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("[unexpected] %s does not decode as hex", hi2SHA256)
|
|
||||||
}
|
|
||||||
|
|
||||||
expectedVerify, err := hex.DecodeString(expectedVerifyString)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("[unexpected] %s does not decode as hex", expectedVerifyString)
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("got nonce: %d", nonce)
|
|
||||||
t.Logf("got hash: %x", expectedVerify)
|
|
||||||
|
|
||||||
invalidVerify := make([]byte, len(expectedVerify))
|
|
||||||
copy(invalidVerify, expectedVerify)
|
|
||||||
invalidVerify[len(invalidVerify)-1] ^= 0xFF // Flip the last byte
|
|
||||||
|
|
||||||
testCases := []struct {
|
|
||||||
name string
|
|
||||||
challenge []byte
|
|
||||||
verify []byte
|
|
||||||
nonce uint32
|
|
||||||
difficulty uint32
|
|
||||||
want bool
|
|
||||||
expectError error
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "valid verification",
|
|
||||||
challenge: challenge,
|
|
||||||
verify: expectedVerify,
|
|
||||||
nonce: nonce,
|
|
||||||
difficulty: 1,
|
|
||||||
want: true,
|
|
||||||
expectError: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "invalid verify data",
|
|
||||||
challenge: challenge,
|
|
||||||
verify: invalidVerify,
|
|
||||||
nonce: nonce,
|
|
||||||
difficulty: 1,
|
|
||||||
want: false,
|
|
||||||
expectError: ErrChallengeFailed,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "insufficient computed data difficulty",
|
|
||||||
challenge: challenge,
|
|
||||||
verify: expectedVerify,
|
|
||||||
nonce: nonce,
|
|
||||||
difficulty: 5,
|
|
||||||
want: false,
|
|
||||||
expectError: ErrWrongChallengeDifficulty,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "zero difficulty",
|
|
||||||
challenge: challenge,
|
|
||||||
verify: expectedVerify,
|
|
||||||
nonce: nonce,
|
|
||||||
difficulty: 0,
|
|
||||||
want: true,
|
|
||||||
expectError: nil,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tc := range testCases {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
got, err := BasicSHA256Verify(ctx, tc.challenge, tc.verify, tc.nonce, tc.difficulty)
|
|
||||||
if !errors.Is(err, tc.expectError) {
|
|
||||||
t.Errorf("BasicSHA256Verify() error = %v, expectError %v", err, tc.expectError)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if got != tc.want {
|
|
||||||
t.Errorf("BasicSHA256Verify() got = %v, want %v", got, tc.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHasLeadingZeroNibbles(t *testing.T) {
|
|
||||||
for _, cs := range []struct {
|
|
||||||
data []byte
|
|
||||||
difficulty uint32
|
|
||||||
valid bool
|
|
||||||
}{
|
|
||||||
{[]byte{0x10, 0x00}, 1, false},
|
|
||||||
{[]byte{0x00, 0x00}, 4, true},
|
|
||||||
{[]byte{0x01, 0x00}, 4, false},
|
|
||||||
} {
|
|
||||||
t.Run(fmt.Sprintf("%x-%d-%v", cs.data, cs.difficulty, cs.valid), func(t *testing.T) {
|
|
||||||
result := hasLeadingZeroNibbles(cs.data, cs.difficulty)
|
|
||||||
if result != cs.valid {
|
|
||||||
t.Errorf("wanted %v, but got: %v", cs.valid, result)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@techaro/anubis",
|
"name": "@xeserv/xess",
|
||||||
"version": "1.0.0-see-VERSION-file",
|
"version": "1.0.0-see-VERSION-file",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "@techaro/anubis",
|
"name": "@xeserv/xess",
|
||||||
"version": "1.0.0-see-VERSION-file",
|
"version": "1.0.0-see-VERSION-file",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -6,9 +6,7 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "npm run assets && go test ./...",
|
"test": "npm run assets && go test ./...",
|
||||||
"test:integration": "npm run assets && go test -v ./internal/test",
|
"test:integration": "npm run assets && go test -v ./internal/test",
|
||||||
"assets:frontend": "go generate ./... && ./web/build.sh && ./xess/build.sh",
|
"assets": "go generate ./... && ./web/build.sh && ./xess/build.sh",
|
||||||
"assets:wasm": "RUSTFLAGS='-C target-feature=+simd128' cargo build --release --target wasm32-unknown-unknown && sh -c 'cp -vf ./target/wasm32-unknown-unknown/release/*.wasm ./web/static/wasm'",
|
|
||||||
"assets": "npm run assets:frontend && npm run assets:wasm",
|
|
||||||
"build": "npm run assets && go build -o ./var/anubis ./cmd/anubis",
|
"build": "npm run assets && go build -o ./var/anubis ./cmd/anubis",
|
||||||
"dev": "npm run assets && go run ./cmd/anubis --use-remote-address",
|
"dev": "npm run assets && go run ./cmd/anubis --use-remote-address",
|
||||||
"container": "npm run assets && go run ./cmd/containerbuild",
|
"container": "npm run assets && go run ./cmd/containerbuild",
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
[toolchain]
|
|
||||||
channel = "stable"
|
|
||||||
targets = ["wasm32-unknown-unknown"]
|
|
||||||
profile = "minimal"
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "anubis"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
wee_alloc = "0.4"
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
use std::sync::{LazyLock, Mutex};
|
|
||||||
|
|
||||||
extern crate wee_alloc;
|
|
||||||
|
|
||||||
#[global_allocator]
|
|
||||||
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
|
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
mod hostimport {
|
|
||||||
use crate::{DATA_BUFFER, DATA_LENGTH};
|
|
||||||
|
|
||||||
#[link(wasm_import_module = "anubis")]
|
|
||||||
unsafe extern "C" {
|
|
||||||
/// The runtime expects this function to be defined. It is called whenever the Anubis check
|
|
||||||
/// worker processes about 1024 hashes. This can be a no-op if you want.
|
|
||||||
fn anubis_update_nonce(nonce: u32);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Safe wrapper to `anubis_update_nonce`.
|
|
||||||
pub fn update_nonce(nonce: u32) {
|
|
||||||
unsafe {
|
|
||||||
anubis_update_nonce(nonce);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn data_ptr() -> *const u8 {
|
|
||||||
let challenge = &DATA_BUFFER;
|
|
||||||
challenge.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn set_data_length(len: u32) {
|
|
||||||
let mut data_length = DATA_LENGTH.lock().unwrap();
|
|
||||||
*data_length = len as usize;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
mod hostimport {
|
|
||||||
pub fn update_nonce(_nonce: u32) {
|
|
||||||
// This is intentionally blank
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The data buffer is a bit weird in that it doesn't have an explicit length as it can
|
|
||||||
/// and will change depending on the challenge input that was sent by the server.
|
|
||||||
/// However, it can only fit 4096 bytes of data (one amd64 machine page). This is
|
|
||||||
/// slightly overkill for the purposes of an Anubis check, but it's fine to assume
|
|
||||||
/// that the browser can afford this much ram usage.
|
|
||||||
///
|
|
||||||
/// Callers should fetch the base data pointer, write up to 4096 bytes, and then
|
|
||||||
/// `set_data_length` the number of bytes they have written
|
|
||||||
///
|
|
||||||
/// This is also functionally a write-only buffer, so it doesn't really matter that
|
|
||||||
/// the length of this buffer isn't exposed.
|
|
||||||
pub static DATA_BUFFER: LazyLock<[u8; 4096]> = LazyLock::new(|| [0; 4096]);
|
|
||||||
pub static DATA_LENGTH: LazyLock<Mutex<usize>> = LazyLock::new(|| Mutex::new(0));
|
|
||||||
|
|
||||||
pub use hostimport::update_nonce;
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "argon2id"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
argon2 = "0.5"
|
|
||||||
|
|
||||||
anubis = { path = "../../anubis" }
|
|
||||||
|
|
||||||
[lints.clippy]
|
|
||||||
nursery = { level = "warn", priority = -1 }
|
|
||||||
pedantic = { level = "warn", priority = -1 }
|
|
||||||
unwrap_used = "warn"
|
|
||||||
uninlined_format_args = "allow"
|
|
||||||
missing_panics_doc = "allow"
|
|
||||||
missing_errors_doc = "allow"
|
|
||||||
cognitive_complexity = "allow"
|
|
||||||
@@ -1,176 +0,0 @@
|
|||||||
use anubis::{DATA_BUFFER, DATA_LENGTH, update_nonce};
|
|
||||||
use argon2::Argon2;
|
|
||||||
use std::boxed::Box;
|
|
||||||
use std::sync::{LazyLock, Mutex};
|
|
||||||
|
|
||||||
/// SHA-256 hashes are 32 bytes (256 bits). These are stored in static buffers due to the
|
|
||||||
/// fact that you cannot easily pass data from host space to WebAssembly space.
|
|
||||||
pub static RESULT_HASH: LazyLock<Mutex<[u8; 32]>> = LazyLock::new(|| Mutex::new([0; 32]));
|
|
||||||
|
|
||||||
pub static VERIFICATION_HASH: LazyLock<Box<Mutex<[u8; 32]>>> =
|
|
||||||
LazyLock::new(|| Box::new(Mutex::new([0; 32])));
|
|
||||||
|
|
||||||
/// Core validation function. Compare each bit in the hash by progressively masking bits until
|
|
||||||
/// some are found to not be matching.
|
|
||||||
///
|
|
||||||
/// There are probably more clever ways to do this, likely involving lookup tables or something
|
|
||||||
/// really fun like that. However in my testing this lets us get up to 200 kilohashes per second
|
|
||||||
/// on my Ryzen 7950x3D, up from about 50 kilohashes per second in JavaScript.
|
|
||||||
fn validate(hash: &[u8], difficulty: u32) -> bool {
|
|
||||||
let mut remaining = difficulty;
|
|
||||||
for &byte in hash {
|
|
||||||
// If we're out of bits to check, exit. This is all good.
|
|
||||||
if remaining == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there are more than 8 bits remaining, the entire byte should be a
|
|
||||||
// zero. This fast-path compares the byte to 0 and if it matches, subtract
|
|
||||||
// 8 bits.
|
|
||||||
if remaining >= 8 {
|
|
||||||
if byte != 0 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
remaining -= 8;
|
|
||||||
} else {
|
|
||||||
// Otherwise mask off individual bits and check against them.
|
|
||||||
let mask = 0xFF << (8 - remaining);
|
|
||||||
if (byte & mask) != 0 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
remaining = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Computes hash for given nonce.
|
|
||||||
///
|
|
||||||
/// This differs from the JavaScript implementations by constructing the hash differently. In
|
|
||||||
/// JavaScript implementations, the SHA-256 input is the result of appending the nonce as an
|
|
||||||
/// integer to the hex-formatted challenge, eg:
|
|
||||||
///
|
|
||||||
/// sha256(`${challenge}${nonce}`);
|
|
||||||
///
|
|
||||||
/// This **does work**, however I think that this can be done a bit better by operating on the
|
|
||||||
/// challenge bytes _directly_ and treating the nonce as a salt.
|
|
||||||
///
|
|
||||||
/// The nonce is also randomly encoded in either big or little endian depending on the last
|
|
||||||
/// byte of the data buffer in an effort to make it more annoying to automate with GPUs.
|
|
||||||
fn compute_hash(nonce: u32) -> [u8; 32] {
|
|
||||||
let data = &DATA_BUFFER;
|
|
||||||
let data_len = *DATA_LENGTH.lock().unwrap();
|
|
||||||
let use_le = data[data_len - 1] >= 128;
|
|
||||||
let mut result = [0u8; 32];
|
|
||||||
|
|
||||||
let nonce = nonce as u64;
|
|
||||||
|
|
||||||
let data_slice = &data[..data_len];
|
|
||||||
|
|
||||||
let nonce = if use_le {
|
|
||||||
nonce.to_le_bytes()
|
|
||||||
} else {
|
|
||||||
nonce.to_be_bytes()
|
|
||||||
};
|
|
||||||
|
|
||||||
let argon2 = Argon2::default();
|
|
||||||
argon2
|
|
||||||
.hash_password_into(&data_slice, &nonce, &mut result)
|
|
||||||
.unwrap();
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is the main entrypoint for the Anubis proof of work implementation.
|
|
||||||
///
|
|
||||||
/// This expects `DATA_BUFFER` to be pre-populated with the challenge value as "raw bytes".
|
|
||||||
/// The definition of what goes in the data buffer is an exercise for the implementor, but
|
|
||||||
/// for SHA-256 we store the hash as "raw bytes". The data buffer is intentionally oversized
|
|
||||||
/// so that the challenge value can be expanded in the future.
|
|
||||||
///
|
|
||||||
/// `difficulty` is the number of leading bits that must match `0` in order for the
|
|
||||||
/// challenge to be successfully passed. This will be validated by the server.
|
|
||||||
///
|
|
||||||
/// `initial_nonce` is the initial value of the nonce (number used once). This nonce will be
|
|
||||||
/// appended to the challenge value in order to find a hash matching the specified
|
|
||||||
/// difficulty.
|
|
||||||
///
|
|
||||||
/// `iterand` (noun form of iterate) is the amount that the nonce should be increased by
|
|
||||||
/// every iteration of the proof of work loop. This will vary by how many threads are
|
|
||||||
/// running the proof-of-work check, and also functions as a thread ID. This prevents
|
|
||||||
/// wasting CPU time retrying a hash+nonce pair that likely won't work.
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn anubis_work(difficulty: u32, initial_nonce: u32, iterand: u32) -> u32 {
|
|
||||||
let mut nonce = initial_nonce;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let hash = compute_hash(nonce);
|
|
||||||
|
|
||||||
if validate(&hash, difficulty) {
|
|
||||||
// If the challenge worked, copy the bytes into `RESULT_HASH` so the runtime
|
|
||||||
// can pick it up.
|
|
||||||
let mut challenge = RESULT_HASH.lock().unwrap();
|
|
||||||
challenge.copy_from_slice(&hash);
|
|
||||||
return nonce;
|
|
||||||
}
|
|
||||||
|
|
||||||
let old_nonce = nonce;
|
|
||||||
nonce = nonce.wrapping_add(iterand);
|
|
||||||
|
|
||||||
// send a progress update every 1024 iterations. since each thread checks
|
|
||||||
// separate values, one simple way to do this is by bit masking the
|
|
||||||
// nonce for multiples of 1024. unfortunately, if the number of threads
|
|
||||||
// is not prime, only some of the threads will be sending the status
|
|
||||||
// update and they will get behind the others. this is slightly more
|
|
||||||
// complicated but ensures an even distribution between threads.
|
|
||||||
if nonce > old_nonce + 1023 && (nonce >> 10) % iterand == initial_nonce {
|
|
||||||
update_nonce(nonce);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is called by the server in order to validate a proof-of-work challenge.
|
|
||||||
/// This expects `DATA_BUFFER` to be set to the challenge value and `VERIFICATION_HASH` to
|
|
||||||
/// be set to the "raw bytes" of the SHA-256 hash that the client calculated.
|
|
||||||
///
|
|
||||||
/// If everything is good, it returns true. Otherwise, it returns false.
|
|
||||||
///
|
|
||||||
/// XXX(Xe): this could probably return an error code for what step fails, but this is fine
|
|
||||||
/// for now.
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn anubis_validate(nonce: u32, difficulty: u32) -> bool {
|
|
||||||
let computed = compute_hash(nonce);
|
|
||||||
let valid = validate(&computed, difficulty);
|
|
||||||
if !valid {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let verification = VERIFICATION_HASH.lock().unwrap();
|
|
||||||
computed == *verification
|
|
||||||
}
|
|
||||||
|
|
||||||
// These functions exist to give pointers and lengths to the runtime around the Anubis
|
|
||||||
// checks, this allows JavaScript and Go to safely manipulate the memory layout that Rust
|
|
||||||
// has statically allocated at compile time without having to assume how the Rust compiler
|
|
||||||
// is going to lay it out.
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn result_hash_ptr() -> *const u8 {
|
|
||||||
let challenge = RESULT_HASH.lock().unwrap();
|
|
||||||
challenge.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn result_hash_size() -> usize {
|
|
||||||
RESULT_HASH.lock().unwrap().len()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn verification_hash_ptr() -> *const u8 {
|
|
||||||
let verification = VERIFICATION_HASH.lock().unwrap();
|
|
||||||
verification.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn verification_hash_size() -> usize {
|
|
||||||
VERIFICATION_HASH.lock().unwrap().len()
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "equix"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
equix = "0.2"
|
|
||||||
|
|
||||||
anubis = { path = "../../anubis" }
|
|
||||||
|
|
||||||
[lints.clippy]
|
|
||||||
nursery = { level = "warn", priority = -1 }
|
|
||||||
pedantic = { level = "warn", priority = -1 }
|
|
||||||
unwrap_used = "warn"
|
|
||||||
uninlined_format_args = "allow"
|
|
||||||
missing_panics_doc = "allow"
|
|
||||||
missing_errors_doc = "allow"
|
|
||||||
cognitive_complexity = "allow"
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
use anubis::{DATA_BUFFER, DATA_LENGTH, update_nonce};
|
|
||||||
use std::boxed::Box;
|
|
||||||
use std::mem::size_of;
|
|
||||||
use std::sync::{LazyLock, Mutex};
|
|
||||||
|
|
||||||
pub static RESULT_HASH: LazyLock<Mutex<[u8; 16]>> = LazyLock::new(|| Mutex::new([0; 16]));
|
|
||||||
|
|
||||||
pub static VERIFICATION_HASH: LazyLock<Box<Mutex<[u8; 16]>>> =
|
|
||||||
LazyLock::new(|| Box::new(Mutex::new([0; 16])));
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn anubis_work(_difficulty: u32, initial_nonce: u32, iterand: u32) -> u32 {
|
|
||||||
let data = &mut DATA_BUFFER.clone();
|
|
||||||
let mut data_len = DATA_LENGTH.lock().unwrap();
|
|
||||||
|
|
||||||
// Ensure there's enough space in the buffer for the nonce (4 bytes)
|
|
||||||
if *data_len + size_of::<u32>() > data.len() {
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
unreachable!();
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
panic!("Not enough space in DATA_BUFFER to write nonce");
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut nonce = initial_nonce;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let nonce_bytes = nonce.to_le_bytes();
|
|
||||||
let start = *data_len;
|
|
||||||
let end = start + size_of::<u32>();
|
|
||||||
data[start..end].copy_from_slice(&nonce_bytes);
|
|
||||||
|
|
||||||
// Update the data length
|
|
||||||
*data_len += size_of::<u32>();
|
|
||||||
let data_slice = &data[..*data_len];
|
|
||||||
|
|
||||||
let result = equix::solve(data_slice).unwrap();
|
|
||||||
|
|
||||||
if result.len() == 0 {
|
|
||||||
nonce += iterand;
|
|
||||||
update_nonce(nonce);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut challenge = RESULT_HASH.lock().unwrap();
|
|
||||||
challenge.copy_from_slice(&result[0].to_bytes());
|
|
||||||
return nonce;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn anubis_validate(nonce: u32, difficulty: u32) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn result_hash_ptr() -> *const u8 {
|
|
||||||
let challenge = RESULT_HASH.lock().unwrap();
|
|
||||||
challenge.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn result_hash_size() -> usize {
|
|
||||||
RESULT_HASH.lock().unwrap().len()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn verification_hash_ptr() -> *const u8 {
|
|
||||||
let verification = VERIFICATION_HASH.lock().unwrap();
|
|
||||||
verification.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn verification_hash_size() -> usize {
|
|
||||||
VERIFICATION_HASH.lock().unwrap().len()
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "sha256"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2024"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
sha2 = "0.11.0-pre.5"
|
|
||||||
|
|
||||||
anubis = { path = "../../anubis" }
|
|
||||||
|
|
||||||
[lints.clippy]
|
|
||||||
nursery = { level = "warn", priority = -1 }
|
|
||||||
pedantic = { level = "warn", priority = -1 }
|
|
||||||
unwrap_used = "warn"
|
|
||||||
uninlined_format_args = "allow"
|
|
||||||
missing_panics_doc = "allow"
|
|
||||||
missing_errors_doc = "allow"
|
|
||||||
cognitive_complexity = "allow"
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
<script src="run.js" type="module"></script>
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
// Load and instantiate the .wasm file
|
|
||||||
const response = await fetch("sha256.wasm");
|
|
||||||
|
|
||||||
const importObject = {
|
|
||||||
anubis: {
|
|
||||||
anubis_update_nonce: (nonce) => {
|
|
||||||
console.log(`Received nonce update: ${nonce}`);
|
|
||||||
// Your logic here
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const module = await WebAssembly.compileStreaming(response);
|
|
||||||
const instance = await WebAssembly.instantiate(module, importObject);
|
|
||||||
|
|
||||||
// Get exports
|
|
||||||
const {
|
|
||||||
anubis_work,
|
|
||||||
anubis_validate,
|
|
||||||
data_ptr,
|
|
||||||
result_hash_ptr,
|
|
||||||
result_hash_size,
|
|
||||||
verification_hash_ptr,
|
|
||||||
verification_hash_size,
|
|
||||||
set_data_length,
|
|
||||||
memory
|
|
||||||
} = instance.exports;
|
|
||||||
|
|
||||||
console.log(instance.exports);
|
|
||||||
|
|
||||||
function uint8ArrayToHex(arr) {
|
|
||||||
return Array.from(arr)
|
|
||||||
.map((c) => c.toString(16).padStart(2, "0"))
|
|
||||||
.join("");
|
|
||||||
}
|
|
||||||
|
|
||||||
function hexToUint8Array(hexString) {
|
|
||||||
// Remove whitespace and optional '0x' prefix
|
|
||||||
hexString = hexString.replace(/\s+/g, '').replace(/^0x/, '');
|
|
||||||
|
|
||||||
// Check for valid length
|
|
||||||
if (hexString.length % 2 !== 0) {
|
|
||||||
throw new Error('Invalid hex string length');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for valid characters
|
|
||||||
if (!/^[0-9a-fA-F]+$/.test(hexString)) {
|
|
||||||
throw new Error('Invalid hex characters');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to Uint8Array
|
|
||||||
const byteArray = new Uint8Array(hexString.length / 2);
|
|
||||||
for (let i = 0; i < byteArray.length; i++) {
|
|
||||||
const byteValue = parseInt(hexString.substr(i * 2, 2), 16);
|
|
||||||
byteArray[i] = byteValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
return byteArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write data to buffer
|
|
||||||
function writeToBuffer(data) {
|
|
||||||
if (data.length > 1024) throw new Error("Data exceeds buffer size");
|
|
||||||
|
|
||||||
// Get pointer and create view
|
|
||||||
const offset = data_ptr();
|
|
||||||
const buffer = new Uint8Array(memory.buffer, offset, data.length);
|
|
||||||
|
|
||||||
// Copy data
|
|
||||||
buffer.set(data);
|
|
||||||
|
|
||||||
// Set data length
|
|
||||||
set_data_length(data.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
function readFromChallenge() {
|
|
||||||
const offset = result_hash_ptr();
|
|
||||||
const buffer = new Uint8Array(memory.buffer, offset, result_hash_size());
|
|
||||||
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Example usage:
|
|
||||||
const data = hexToUint8Array("98ea6e4f216f2fb4b69fff9b3a44842c38686ca685f3f55dc48c5d3fb1107be4");
|
|
||||||
writeToBuffer(data);
|
|
||||||
|
|
||||||
// Call work function
|
|
||||||
const t0 = Date.now();
|
|
||||||
const nonce = anubis_work(16, 0, 1);
|
|
||||||
const t1 = Date.now();
|
|
||||||
|
|
||||||
console.log(`Done! Took ${t1 - t0}ms, ${nonce} iterations`);
|
|
||||||
|
|
||||||
const challengeBuffer = readFromChallenge();
|
|
||||||
|
|
||||||
{
|
|
||||||
const buffer = new Uint8Array(memory.buffer, verification_hash_ptr(), verification_hash_size());
|
|
||||||
buffer.set(challengeBuffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate
|
|
||||||
const isValid = anubis_validate(nonce, 10) === 1;
|
|
||||||
console.log(isValid);
|
|
||||||
|
|
||||||
console.log(uint8ArrayToHex(readFromChallenge()));
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
use anubis::{DATA_BUFFER, DATA_LENGTH, update_nonce};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use std::boxed::Box;
|
|
||||||
use std::sync::{LazyLock, Mutex};
|
|
||||||
|
|
||||||
/// SHA-256 hashes are 32 bytes (256 bits). These are stored in static buffers due to the
|
|
||||||
/// fact that you cannot easily pass data from host space to WebAssembly space.
|
|
||||||
pub static RESULT_HASH: LazyLock<Box<Mutex<[u8; 32]>>> =
|
|
||||||
LazyLock::new(|| Box::new(Mutex::new([0; 32])));
|
|
||||||
|
|
||||||
pub static VERIFICATION_HASH: LazyLock<Box<Mutex<[u8; 32]>>> =
|
|
||||||
LazyLock::new(|| Box::new(Mutex::new([0; 32])));
|
|
||||||
|
|
||||||
/// Core validation function. Compare each bit in the hash by progressively masking bits until
|
|
||||||
/// some are found to not be matching.
|
|
||||||
///
|
|
||||||
/// There are probably more clever ways to do this, likely involving lookup tables or something
|
|
||||||
/// really fun like that. However in my testing this lets us get up to 200 kilohashes per second
|
|
||||||
/// on my Ryzen 7950x3D, up from about 50 kilohashes per second in JavaScript.
|
|
||||||
fn validate(hash: &[u8], difficulty: u32) -> bool {
|
|
||||||
let mut remaining = difficulty;
|
|
||||||
for &byte in hash {
|
|
||||||
// If we're out of bits to check, exit. This is all good.
|
|
||||||
if remaining == 0 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there are more than 8 bits remaining, the entire byte should be a
|
|
||||||
// zero. This fast-path compares the byte to 0 and if it matches, subtract
|
|
||||||
// 8 bits.
|
|
||||||
if remaining >= 8 {
|
|
||||||
if byte != 0 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
remaining -= 8;
|
|
||||||
} else {
|
|
||||||
// Otherwise mask off individual bits and check against them.
|
|
||||||
let mask = 0xFF << (8 - remaining);
|
|
||||||
if (byte & mask) != 0 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
remaining = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Computes hash for given nonce.
|
|
||||||
///
|
|
||||||
/// This differs from the JavaScript implementations by constructing the hash differently. In
|
|
||||||
/// JavaScript implementations, the SHA-256 input is the result of appending the nonce as an
|
|
||||||
/// integer to the hex-formatted challenge, eg:
|
|
||||||
///
|
|
||||||
/// sha256(`${challenge}${nonce}`);
|
|
||||||
///
|
|
||||||
/// This **does work**, however I think that this can be done a bit better by operating on the
|
|
||||||
/// challenge bytes _directly_ and treating the nonce as a salt.
|
|
||||||
///
|
|
||||||
/// The nonce is also randomly encoded in either big or little endian depending on the last
|
|
||||||
/// byte of the data buffer in an effort to make it more annoying to automate with GPUs.
|
|
||||||
fn compute_hash(nonce: u32) -> [u8; 32] {
|
|
||||||
let data = &DATA_BUFFER;
|
|
||||||
let data_len = *DATA_LENGTH.lock().unwrap();
|
|
||||||
let use_le = data[data_len - 1] >= 128;
|
|
||||||
|
|
||||||
let data_slice = &data[..data_len];
|
|
||||||
|
|
||||||
let mut hasher = Sha256::new();
|
|
||||||
hasher.update(data_slice);
|
|
||||||
hasher.update(if use_le {
|
|
||||||
nonce.to_le_bytes()
|
|
||||||
} else {
|
|
||||||
nonce.to_be_bytes()
|
|
||||||
});
|
|
||||||
hasher.finalize().into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is the main entrypoint for the Anubis proof of work implementation.
|
|
||||||
///
|
|
||||||
/// This expects `DATA_BUFFER` to be pre-populated with the challenge value as "raw bytes".
|
|
||||||
/// The definition of what goes in the data buffer is an exercise for the implementor, but
|
|
||||||
/// for SHA-256 we store the hash as "raw bytes". The data buffer is intentionally oversized
|
|
||||||
/// so that the challenge value can be expanded in the future.
|
|
||||||
///
|
|
||||||
/// `difficulty` is the number of leading bits that must match `0` in order for the
|
|
||||||
/// challenge to be successfully passed. This will be validated by the server.
|
|
||||||
///
|
|
||||||
/// `initial_nonce` is the initial value of the nonce (number used once). This nonce will be
|
|
||||||
/// appended to the challenge value in order to find a hash matching the specified
|
|
||||||
/// difficulty.
|
|
||||||
///
|
|
||||||
/// `iterand` (noun form of iterate) is the amount that the nonce should be increased by
|
|
||||||
/// every iteration of the proof of work loop. This will vary by how many threads are
|
|
||||||
/// running the proof-of-work check, and also functions as a thread ID. This prevents
|
|
||||||
/// wasting CPU time retrying a hash+nonce pair that likely won't work.
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn anubis_work(difficulty: u32, initial_nonce: u32, iterand: u32) -> u32 {
|
|
||||||
let mut nonce = initial_nonce;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let hash = compute_hash(nonce);
|
|
||||||
|
|
||||||
if validate(&hash, difficulty) {
|
|
||||||
// If the challenge worked, copy the bytes into `RESULT_HASH` so the runtime
|
|
||||||
// can pick it up.
|
|
||||||
let mut challenge = RESULT_HASH.lock().unwrap();
|
|
||||||
challenge.copy_from_slice(&hash);
|
|
||||||
return nonce;
|
|
||||||
}
|
|
||||||
|
|
||||||
let old_nonce = nonce;
|
|
||||||
nonce = nonce.wrapping_add(iterand);
|
|
||||||
|
|
||||||
// send a progress update every 1024 iterations. since each thread checks
|
|
||||||
// separate values, one simple way to do this is by bit masking the
|
|
||||||
// nonce for multiples of 1024. unfortunately, if the number of threads
|
|
||||||
// is not prime, only some of the threads will be sending the status
|
|
||||||
// update and they will get behind the others. this is slightly more
|
|
||||||
// complicated but ensures an even distribution between threads.
|
|
||||||
if nonce > old_nonce | 1023 && (nonce >> 10) % iterand == initial_nonce {
|
|
||||||
update_nonce(nonce);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is called by the server in order to validate a proof-of-work challenge.
|
|
||||||
/// This expects `DATA_BUFFER` to be set to the challenge value and `VERIFICATION_HASH` to
|
|
||||||
/// be set to the "raw bytes" of the SHA-256 hash that the client calculated.
|
|
||||||
///
|
|
||||||
/// If everything is good, it returns true. Otherwise, it returns false.
|
|
||||||
///
|
|
||||||
/// XXX(Xe): this could probably return an error code for what step fails, but this is fine
|
|
||||||
/// for now.
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn anubis_validate(nonce: u32, difficulty: u32) -> bool {
|
|
||||||
let computed = compute_hash(nonce);
|
|
||||||
let valid = validate(&computed, difficulty);
|
|
||||||
if !valid {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let verification = VERIFICATION_HASH.lock().unwrap();
|
|
||||||
computed == *verification
|
|
||||||
}
|
|
||||||
|
|
||||||
// These functions exist to give pointers and lengths to the runtime around the Anubis
|
|
||||||
// checks, this allows JavaScript and Go to safely manipulate the memory layout that Rust
|
|
||||||
// has statically allocated at compile time without having to assume how the Rust compiler
|
|
||||||
// is going to lay it out.
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn result_hash_ptr() -> *const u8 {
|
|
||||||
let challenge = RESULT_HASH.lock().unwrap();
|
|
||||||
challenge.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn result_hash_size() -> usize {
|
|
||||||
RESULT_HASH.lock().unwrap().len()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn verification_hash_ptr() -> *const u8 {
|
|
||||||
let verification = VERIFICATION_HASH.lock().unwrap();
|
|
||||||
verification.as_ptr()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unsafe(no_mangle)]
|
|
||||||
pub extern "C" fn verification_hash_size() -> usize {
|
|
||||||
VERIFICATION_HASH.lock().unwrap().len()
|
|
||||||
}
|
|
||||||
299
wasm/wasm.go
299
wasm/wasm.go
@@ -1,299 +0,0 @@
|
|||||||
package wasm
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"math"
|
|
||||||
"os"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
|
||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
|
||||||
"github.com/tetratelabs/wazero"
|
|
||||||
"github.com/tetratelabs/wazero/api"
|
|
||||||
)
|
|
||||||
|
|
||||||
func UpdateNonce(uint32) {}
|
|
||||||
|
|
||||||
var (
|
|
||||||
validationTime = promauto.NewHistogramVec(prometheus.HistogramOpts{
|
|
||||||
Name: "anubis_wasm_validation_time",
|
|
||||||
Help: "The time taken for the validation function to run per checker (nanoseconds)",
|
|
||||||
Buckets: prometheus.ExponentialBucketsRange(1, math.Pow(2, 31), 32),
|
|
||||||
}, []string{"fname"})
|
|
||||||
|
|
||||||
validationCount = promauto.NewCounterVec(prometheus.CounterOpts{
|
|
||||||
Name: "anubis_wasm_validation",
|
|
||||||
Help: "The number of times the validation logic has been run and its success rate",
|
|
||||||
}, []string{"fname", "success"})
|
|
||||||
)
|
|
||||||
|
|
||||||
type Runner struct {
|
|
||||||
r wazero.Runtime
|
|
||||||
code wazero.CompiledModule
|
|
||||||
fname string
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewRunner(ctx context.Context, fname string, fin io.ReadCloser) (*Runner, error) {
|
|
||||||
data, err := io.ReadAll(fin)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("wasm: can't read from fin: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
r := wazero.NewRuntime(ctx)
|
|
||||||
|
|
||||||
_, err = r.NewHostModuleBuilder("anubis").
|
|
||||||
NewFunctionBuilder().
|
|
||||||
WithFunc(func(context.Context, uint32) {}).
|
|
||||||
Export("anubis_update_nonce").
|
|
||||||
Instantiate(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("wasm: can't export anubis_update_nonce: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
code, err := r.CompileModule(ctx, data)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("wasm: can't compile module: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
result := &Runner{
|
|
||||||
r: r,
|
|
||||||
code: code,
|
|
||||||
fname: fname,
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) checkExports(module api.Module) error {
|
|
||||||
funcs := []string{
|
|
||||||
"anubis_work",
|
|
||||||
"anubis_validate",
|
|
||||||
"data_ptr",
|
|
||||||
"set_data_length",
|
|
||||||
"result_hash_ptr",
|
|
||||||
"result_hash_size",
|
|
||||||
"verification_hash_ptr",
|
|
||||||
"verification_hash_size",
|
|
||||||
}
|
|
||||||
|
|
||||||
var errs []error
|
|
||||||
|
|
||||||
for _, fun := range funcs {
|
|
||||||
if module.ExportedFunction(fun) == nil {
|
|
||||||
errs = append(errs, fmt.Errorf("function %s is not defined", fun))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(errs) != 0 {
|
|
||||||
return errors.Join(errs...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) anubisWork(ctx context.Context, module api.Module, difficulty, initialNonce, iterand uint32) (uint32, error) {
|
|
||||||
results, err := module.ExportedFunction("anubis_work").Call(ctx, uint64(difficulty), uint64(initialNonce), uint64(iterand))
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return uint32(results[0]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) anubisValidate(ctx context.Context, module api.Module, nonce, difficulty uint32) (bool, error) {
|
|
||||||
results, err := module.ExportedFunction("anubis_validate").Call(ctx, uint64(nonce), uint64(difficulty))
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rust booleans are 1 if true
|
|
||||||
return results[0] == 1, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) dataPtr(ctx context.Context, module api.Module) (uint32, error) {
|
|
||||||
results, err := module.ExportedFunction("data_ptr").Call(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return uint32(results[0]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) setDataLength(ctx context.Context, module api.Module, length uint32) error {
|
|
||||||
_, err := module.ExportedFunction("set_data_length").Call(ctx, uint64(length))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) resultHashPtr(ctx context.Context, module api.Module) (uint32, error) {
|
|
||||||
results, err := module.ExportedFunction("result_hash_ptr").Call(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return uint32(results[0]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) resultHashSize(ctx context.Context, module api.Module) (uint32, error) {
|
|
||||||
results, err := module.ExportedFunction("result_hash_size").Call(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return uint32(results[0]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) verificationHashPtr(ctx context.Context, module api.Module) (uint32, error) {
|
|
||||||
results, err := module.ExportedFunction("verification_hash_ptr").Call(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return uint32(results[0]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) verificationHashSize(ctx context.Context, module api.Module) (uint32, error) {
|
|
||||||
results, err := module.ExportedFunction("verification_hash_size").Call(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return uint32(results[0]), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) writeData(ctx context.Context, module api.Module, data []byte) error {
|
|
||||||
if len(data) > 4096 {
|
|
||||||
return os.ErrInvalid
|
|
||||||
}
|
|
||||||
|
|
||||||
length := uint32(len(data))
|
|
||||||
|
|
||||||
dataPtr, err := r.dataPtr(ctx, module)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("can't read data pointer: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !module.Memory().Write(dataPtr, data) {
|
|
||||||
return fmt.Errorf("[unexpected] can't write memory, is data out of range??")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.setDataLength(ctx, module, length); err != nil {
|
|
||||||
return fmt.Errorf("can't set data length: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) readResult(ctx context.Context, module api.Module) ([]byte, error) {
|
|
||||||
length, err := r.resultHashSize(ctx, module)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("can't get result hash size: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ptr, err := r.resultHashPtr(ctx, module)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("can't get result hash pointer: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
buf, ok := module.Memory().Read(ptr, length)
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("[unexpected] can't read from memory, is something out of range??")
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) run(ctx context.Context, data []byte, difficulty, initialNonce, iterand uint32) (uint32, []byte, api.Module, error) {
|
|
||||||
mod, err := r.r.InstantiateModule(ctx, r.code, wazero.NewModuleConfig().WithName(r.fname))
|
|
||||||
if err != nil {
|
|
||||||
return 0, nil, nil, fmt.Errorf("can't instantiate module: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.checkExports(mod); err != nil {
|
|
||||||
return 0, nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.writeData(ctx, mod, data); err != nil {
|
|
||||||
return 0, nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
nonce, err := r.anubisWork(ctx, mod, difficulty, initialNonce, iterand)
|
|
||||||
if err != nil {
|
|
||||||
return 0, nil, nil, fmt.Errorf("can't run work function: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
hash, err := r.readResult(ctx, mod)
|
|
||||||
if err != nil {
|
|
||||||
return 0, nil, nil, fmt.Errorf("can't read result: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nonce, hash, mod, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) Run(ctx context.Context, data []byte, difficulty, initialNonce, iterand uint32) (uint32, []byte, error) {
|
|
||||||
nonce, hash, _, err := r.run(ctx, data, difficulty, initialNonce, iterand)
|
|
||||||
if err != nil {
|
|
||||||
return 0, nil, fmt.Errorf("can't run %s: %w", r.fname, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nonce, hash, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) verify(ctx context.Context, data, verify []byte, nonce, difficulty uint32) (bool, api.Module, error) {
|
|
||||||
mod, err := r.r.InstantiateModule(ctx, r.code, wazero.NewModuleConfig().WithName(r.fname))
|
|
||||||
if err != nil {
|
|
||||||
return false, nil, fmt.Errorf("can't instantiate module: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.checkExports(mod); err != nil {
|
|
||||||
return false, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.writeData(ctx, mod, data); err != nil {
|
|
||||||
return false, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := r.writeVerification(ctx, mod, verify); err != nil {
|
|
||||||
return false, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ok, err := r.anubisValidate(ctx, mod, nonce, difficulty)
|
|
||||||
if err != nil {
|
|
||||||
return false, nil, fmt.Errorf("can't validate hash %x from challenge %x, nonce %d and difficulty %d: %w", verify, data, nonce, difficulty, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ok, mod, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) Verify(ctx context.Context, data, verify []byte, nonce, difficulty uint32) (bool, error) {
|
|
||||||
t0 := time.Now()
|
|
||||||
ok, _, err := r.verify(ctx, data, verify, nonce, difficulty)
|
|
||||||
validationTime.WithLabelValues(r.fname).Observe(float64(time.Since(t0)))
|
|
||||||
validationCount.WithLabelValues(r.fname, strconv.FormatBool(ok))
|
|
||||||
return ok, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Runner) writeVerification(ctx context.Context, module api.Module, data []byte) error {
|
|
||||||
length, err := r.verificationHashSize(ctx, module)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("can't get verification hash size: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if length != uint32(len(data)) {
|
|
||||||
return fmt.Errorf("data is too big, want %d bytes, got: %d", length, len(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
ptr, err := r.verificationHashPtr(ctx, module)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("can't get verification hash pointer: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !module.Memory().Write(ptr, data) {
|
|
||||||
return fmt.Errorf("[unexpected] can't write memory, is data out of range??")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
@@ -1,164 +0,0 @@
|
|||||||
package wasm
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"crypto/sha256"
|
|
||||||
"fmt"
|
|
||||||
"io/fs"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/TecharoHQ/anubis/web"
|
|
||||||
)
|
|
||||||
|
|
||||||
func abiTest(t testing.TB, fname string, difficulty uint32) {
|
|
||||||
fin, err := web.Static.Open("static/wasm/" + fname)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
defer fin.Close()
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
|
||||||
t.Cleanup(cancel)
|
|
||||||
|
|
||||||
runner, err := NewRunner(ctx, fname, fin)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
h := sha256.New()
|
|
||||||
fmt.Fprint(h, t.Name())
|
|
||||||
data := h.Sum(nil)
|
|
||||||
|
|
||||||
nonce, hash, mod, err := runner.run(ctx, data, difficulty, 0, 1)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := runner.writeVerification(ctx, mod, hash); err != nil {
|
|
||||||
t.Fatalf("can't write verification: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ok, err := runner.anubisValidate(ctx, mod, nonce, difficulty)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("can't run validation: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !ok {
|
|
||||||
t.Error("validation failed")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Logf("used %d pages of wasm memory (%d bytes)", mod.Memory().Size()/63356, mod.Memory().Size())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAlgos(t *testing.T) {
|
|
||||||
fnames, err := fs.ReadDir(web.Static, "static/wasm")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, fname := range fnames {
|
|
||||||
fname := fname
|
|
||||||
t.Run(fname.Name(), func(t *testing.T) {
|
|
||||||
abiTest(t, fname.Name(), 4)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func bench(b *testing.B, fname string, difficulties []uint32) {
|
|
||||||
b.Helper()
|
|
||||||
|
|
||||||
fin, err := web.Static.Open("static/wasm/" + fname)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
defer fin.Close()
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
|
||||||
b.Cleanup(cancel)
|
|
||||||
|
|
||||||
runner, err := NewRunner(ctx, fname, fin)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
h := sha256.New()
|
|
||||||
fmt.Fprint(h, "This is an example value that exists only to test the system.")
|
|
||||||
data := h.Sum(nil)
|
|
||||||
|
|
||||||
_, _, mod, err := runner.run(ctx, data, 0, 0, 1)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, difficulty := range difficulties {
|
|
||||||
b.Run(fmt.Sprintf("difficulty/%d", difficulty), func(b *testing.B) {
|
|
||||||
for b.Loop() {
|
|
||||||
difficulty := difficulty
|
|
||||||
_, err := runner.anubisWork(ctx, mod, difficulty, 0, 1)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatalf("can't do test work run: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkSHA256(b *testing.B) {
|
|
||||||
bench(b, "sha256.wasm", []uint32{4, 6, 8, 10, 12, 14, 16})
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkArgon2ID(b *testing.B) {
|
|
||||||
bench(b, "argon2id.wasm", []uint32{4, 6, 8})
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkValidate(b *testing.B) {
|
|
||||||
fnames, err := fs.ReadDir(web.Static, "static/wasm")
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
h := sha256.New()
|
|
||||||
fmt.Fprint(h, "This is an example value that exists only to test the system.")
|
|
||||||
data := h.Sum(nil)
|
|
||||||
|
|
||||||
for _, fname := range fnames {
|
|
||||||
fname := fname.Name()
|
|
||||||
|
|
||||||
difficulty := uint32(1)
|
|
||||||
|
|
||||||
switch fname {
|
|
||||||
case "sha256.wasm":
|
|
||||||
difficulty = 16
|
|
||||||
}
|
|
||||||
|
|
||||||
b.Run(fname, func(b *testing.B) {
|
|
||||||
fin, err := web.Static.Open("static/wasm/" + fname)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
defer fin.Close()
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
|
||||||
b.Cleanup(cancel)
|
|
||||||
|
|
||||||
runner, err := NewRunner(ctx, fname, fin)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
nonce, hash, mod, err := runner.run(ctx, data, difficulty, 0, 1)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := runner.writeVerification(ctx, mod, hash); err != nil {
|
|
||||||
b.Fatalf("can't write verification: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for b.Loop() {
|
|
||||||
_, err := runner.anubisValidate(ctx, mod, nonce, difficulty)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatalf("can't run validation: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
import { u } from "../xeact.mjs";
|
|
||||||
|
|
||||||
export default function process(
|
|
||||||
data,
|
|
||||||
difficulty = 16,
|
|
||||||
signal = null,
|
|
||||||
pc = null,
|
|
||||||
threads = (navigator.hardwareConcurrency || 1),
|
|
||||||
) {
|
|
||||||
return new Promise(async (resolve, reject) => {
|
|
||||||
let webWorkerURL = URL.createObjectURL(new Blob([
|
|
||||||
'(', processTask(), ')()'
|
|
||||||
], { type: 'application/javascript' }));
|
|
||||||
|
|
||||||
const module = await fetch(u("/.within.website/x/cmd/anubis/static/wasm/argon2id.wasm"))
|
|
||||||
.then(resp => WebAssembly.compileStreaming(resp));
|
|
||||||
|
|
||||||
const workers = [];
|
|
||||||
const terminate = () => {
|
|
||||||
workers.forEach((w) => w.terminate());
|
|
||||||
if (signal != null) {
|
|
||||||
// clean up listener to avoid memory leak
|
|
||||||
signal.removeEventListener("abort", terminate);
|
|
||||||
if (signal.aborted) {
|
|
||||||
console.log("PoW aborted");
|
|
||||||
reject(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (signal != null) {
|
|
||||||
signal.addEventListener("abort", terminate, { once: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < threads; i++) {
|
|
||||||
let worker = new Worker(webWorkerURL);
|
|
||||||
|
|
||||||
worker.onmessage = (event) => {
|
|
||||||
if (typeof event.data === "number") {
|
|
||||||
pc?.(event.data);
|
|
||||||
} else {
|
|
||||||
terminate();
|
|
||||||
resolve(event.data);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
worker.onerror = (event) => {
|
|
||||||
terminate();
|
|
||||||
reject(event);
|
|
||||||
};
|
|
||||||
|
|
||||||
worker.postMessage({
|
|
||||||
data,
|
|
||||||
difficulty,
|
|
||||||
nonce: i,
|
|
||||||
threads,
|
|
||||||
module,
|
|
||||||
});
|
|
||||||
|
|
||||||
workers.push(worker);
|
|
||||||
}
|
|
||||||
|
|
||||||
URL.revokeObjectURL(webWorkerURL);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function processTask() {
|
|
||||||
return function () {
|
|
||||||
addEventListener('message', async (event) => {
|
|
||||||
const importObject = {
|
|
||||||
anubis: {
|
|
||||||
anubis_update_nonce: (nonce) => postMessage(nonce),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const instance = await WebAssembly.instantiate(event.data.module, importObject);
|
|
||||||
|
|
||||||
// Get exports
|
|
||||||
const {
|
|
||||||
anubis_work,
|
|
||||||
data_ptr,
|
|
||||||
result_hash_ptr,
|
|
||||||
result_hash_size,
|
|
||||||
set_data_length,
|
|
||||||
memory
|
|
||||||
} = instance.exports;
|
|
||||||
|
|
||||||
function uint8ArrayToHex(arr) {
|
|
||||||
return Array.from(arr)
|
|
||||||
.map((c) => c.toString(16).padStart(2, "0"))
|
|
||||||
.join("");
|
|
||||||
}
|
|
||||||
|
|
||||||
function hexToUint8Array(hexString) {
|
|
||||||
// Remove whitespace and optional '0x' prefix
|
|
||||||
hexString = hexString.replace(/\s+/g, '').replace(/^0x/, '');
|
|
||||||
|
|
||||||
// Check for valid length
|
|
||||||
if (hexString.length % 2 !== 0) {
|
|
||||||
throw new Error('Invalid hex string length');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for valid characters
|
|
||||||
if (!/^[0-9a-fA-F]+$/.test(hexString)) {
|
|
||||||
throw new Error('Invalid hex characters');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to Uint8Array
|
|
||||||
const byteArray = new Uint8Array(hexString.length / 2);
|
|
||||||
for (let i = 0; i < byteArray.length; i++) {
|
|
||||||
const byteValue = parseInt(hexString.substr(i * 2, 2), 16);
|
|
||||||
byteArray[i] = byteValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
return byteArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write data to buffer
|
|
||||||
function writeToBuffer(data) {
|
|
||||||
if (data.length > 1024) throw new Error("Data exceeds buffer size");
|
|
||||||
|
|
||||||
// Get pointer and create view
|
|
||||||
const offset = data_ptr();
|
|
||||||
const buffer = new Uint8Array(memory.buffer, offset, data.length);
|
|
||||||
|
|
||||||
// Copy data
|
|
||||||
buffer.set(data);
|
|
||||||
|
|
||||||
// Set data length
|
|
||||||
set_data_length(data.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
function readFromChallenge() {
|
|
||||||
const offset = result_hash_ptr();
|
|
||||||
const buffer = new Uint8Array(memory.buffer, offset, result_hash_size());
|
|
||||||
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = event.data.data;
|
|
||||||
let difficulty = event.data.difficulty;
|
|
||||||
let nonce = event.data.nonce;
|
|
||||||
let interand = event.data.threads;
|
|
||||||
|
|
||||||
writeToBuffer(hexToUint8Array(data));
|
|
||||||
|
|
||||||
nonce = anubis_work(difficulty, nonce, interand);
|
|
||||||
const challenge = readFromChallenge();
|
|
||||||
|
|
||||||
data = uint8ArrayToHex(challenge);
|
|
||||||
|
|
||||||
postMessage({
|
|
||||||
hash: data,
|
|
||||||
difficulty,
|
|
||||||
nonce,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
import { u } from "../xeact.mjs";
|
|
||||||
|
|
||||||
export default function process(
|
|
||||||
data,
|
|
||||||
difficulty = 16,
|
|
||||||
signal = null,
|
|
||||||
pc = null,
|
|
||||||
threads = (navigator.hardwareConcurrency || 1),
|
|
||||||
) {
|
|
||||||
return new Promise(async (resolve, reject) => {
|
|
||||||
let webWorkerURL = URL.createObjectURL(new Blob([
|
|
||||||
'(', processTask(), ')()'
|
|
||||||
], { type: 'application/javascript' }));
|
|
||||||
|
|
||||||
const module = await fetch(u("/.within.website/x/cmd/anubis/static/wasm/sha256.wasm"))
|
|
||||||
.then(resp => WebAssembly.compileStreaming(resp));
|
|
||||||
|
|
||||||
const workers = [];
|
|
||||||
const terminate = () => {
|
|
||||||
workers.forEach((w) => w.terminate());
|
|
||||||
if (signal != null) {
|
|
||||||
// clean up listener to avoid memory leak
|
|
||||||
signal.removeEventListener("abort", terminate);
|
|
||||||
if (signal.aborted) {
|
|
||||||
console.log("PoW aborted");
|
|
||||||
reject(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (signal != null) {
|
|
||||||
signal.addEventListener("abort", terminate, { once: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < threads; i++) {
|
|
||||||
let worker = new Worker(webWorkerURL);
|
|
||||||
|
|
||||||
worker.onmessage = (event) => {
|
|
||||||
if (typeof event.data === "number") {
|
|
||||||
pc?.(event.data);
|
|
||||||
} else {
|
|
||||||
terminate();
|
|
||||||
resolve(event.data);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
worker.onerror = (event) => {
|
|
||||||
terminate();
|
|
||||||
reject(event);
|
|
||||||
};
|
|
||||||
|
|
||||||
worker.postMessage({
|
|
||||||
data,
|
|
||||||
difficulty,
|
|
||||||
nonce: i,
|
|
||||||
threads,
|
|
||||||
module,
|
|
||||||
});
|
|
||||||
|
|
||||||
workers.push(worker);
|
|
||||||
}
|
|
||||||
|
|
||||||
URL.revokeObjectURL(webWorkerURL);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function processTask() {
|
|
||||||
return function () {
|
|
||||||
addEventListener('message', async (event) => {
|
|
||||||
const importObject = {
|
|
||||||
anubis: {
|
|
||||||
anubis_update_nonce: (nonce) => postMessage(nonce),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const instance = await WebAssembly.instantiate(event.data.module, importObject);
|
|
||||||
|
|
||||||
// Get exports
|
|
||||||
const {
|
|
||||||
anubis_work,
|
|
||||||
data_ptr,
|
|
||||||
result_hash_ptr,
|
|
||||||
result_hash_size,
|
|
||||||
set_data_length,
|
|
||||||
memory
|
|
||||||
} = instance.exports;
|
|
||||||
|
|
||||||
function uint8ArrayToHex(arr) {
|
|
||||||
return Array.from(arr)
|
|
||||||
.map((c) => c.toString(16).padStart(2, "0"))
|
|
||||||
.join("");
|
|
||||||
}
|
|
||||||
|
|
||||||
function hexToUint8Array(hexString) {
|
|
||||||
// Remove whitespace and optional '0x' prefix
|
|
||||||
hexString = hexString.replace(/\s+/g, '').replace(/^0x/, '');
|
|
||||||
|
|
||||||
// Check for valid length
|
|
||||||
if (hexString.length % 2 !== 0) {
|
|
||||||
throw new Error('Invalid hex string length');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for valid characters
|
|
||||||
if (!/^[0-9a-fA-F]+$/.test(hexString)) {
|
|
||||||
throw new Error('Invalid hex characters');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to Uint8Array
|
|
||||||
const byteArray = new Uint8Array(hexString.length / 2);
|
|
||||||
for (let i = 0; i < byteArray.length; i++) {
|
|
||||||
const byteValue = parseInt(hexString.substr(i * 2, 2), 16);
|
|
||||||
byteArray[i] = byteValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
return byteArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write data to buffer
|
|
||||||
function writeToBuffer(data) {
|
|
||||||
if (data.length > 1024) throw new Error("Data exceeds buffer size");
|
|
||||||
|
|
||||||
// Get pointer and create view
|
|
||||||
const offset = data_ptr();
|
|
||||||
const buffer = new Uint8Array(memory.buffer, offset, data.length);
|
|
||||||
|
|
||||||
// Copy data
|
|
||||||
buffer.set(data);
|
|
||||||
|
|
||||||
// Set data length
|
|
||||||
set_data_length(data.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
function readFromChallenge() {
|
|
||||||
const offset = result_hash_ptr();
|
|
||||||
const buffer = new Uint8Array(memory.buffer, offset, result_hash_size());
|
|
||||||
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
let data = event.data.data;
|
|
||||||
let difficulty = event.data.difficulty;
|
|
||||||
let nonce = event.data.nonce;
|
|
||||||
let interand = event.data.threads;
|
|
||||||
|
|
||||||
writeToBuffer(hexToUint8Array(data));
|
|
||||||
|
|
||||||
nonce = anubis_work(difficulty, nonce, interand);
|
|
||||||
const challenge = readFromChallenge();
|
|
||||||
|
|
||||||
data = uint8ArrayToHex(challenge);
|
|
||||||
|
|
||||||
postMessage({
|
|
||||||
hash: data,
|
|
||||||
difficulty,
|
|
||||||
nonce,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,12 +1,10 @@
|
|||||||
import fast from "./algos/fast.mjs";
|
import processFast from "./proof-of-work.mjs";
|
||||||
import slow from "./algos/slow.mjs";
|
import processSlow from "./proof-of-work-slow.mjs";
|
||||||
import sha256 from "./algos/sha256.mjs";
|
|
||||||
|
|
||||||
const defaultDifficulty = 16;
|
const defaultDifficulty = 4;
|
||||||
const algorithms = {
|
const algorithms = {
|
||||||
sha256: sha256,
|
fast: processFast,
|
||||||
fast: fast,
|
slow: processSlow,
|
||||||
slow: slow,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const status = document.getElementById("status");
|
const status = document.getElementById("status");
|
||||||
@@ -43,13 +41,10 @@ const benchmarkTrial = async (stats, difficulty, algorithm, signal) => {
|
|||||||
.map((c) => c.toString(16).padStart(2, "0"))
|
.map((c) => c.toString(16).padStart(2, "0"))
|
||||||
.join("");
|
.join("");
|
||||||
|
|
||||||
if (algorithm != "sha256") {
|
|
||||||
difficulty = Math.round(difficulty / 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
const t0 = performance.now();
|
const t0 = performance.now();
|
||||||
const { hash, nonce } = await process(challenge, Number(difficulty), signal);
|
const { hash, nonce } = await process(challenge, Number(difficulty), signal);
|
||||||
const t1 = performance.now();
|
const t1 = performance.now();
|
||||||
|
console.log({ hash, nonce });
|
||||||
|
|
||||||
stats.time += t1 - t0;
|
stats.time += t1 - t0;
|
||||||
stats.iters += nonce;
|
stats.iters += nonce;
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
import argon2id from "./algos/argon2id.mjs";
|
import processFast from "./proof-of-work.mjs";
|
||||||
import fast from "./algos/fast.mjs";
|
import processSlow from "./proof-of-work-slow.mjs";
|
||||||
import slow from "./algos/slow.mjs";
|
|
||||||
import sha256 from "./algos/sha256.mjs";
|
|
||||||
import { testVideo } from "./video.mjs";
|
import { testVideo } from "./video.mjs";
|
||||||
import { u } from "./xeact.mjs";
|
|
||||||
|
|
||||||
const algorithms = {
|
const algorithms = {
|
||||||
"argon2id": argon2id,
|
"fast": processFast,
|
||||||
"fast": fast,
|
"slow": processSlow,
|
||||||
"slow": slow,
|
};
|
||||||
"sha256": sha256,
|
|
||||||
|
// from Xeact
|
||||||
|
const u = (url = "", params = {}) => {
|
||||||
|
let result = new URL(url, window.location.href);
|
||||||
|
Object.entries(params).forEach(([k, v]) => result.searchParams.set(k, v));
|
||||||
|
return result.toString();
|
||||||
};
|
};
|
||||||
|
|
||||||
const imageURL = (mood, cacheBuster) =>
|
const imageURL = (mood, cacheBuster) =>
|
||||||
@@ -26,11 +28,6 @@ const dependencies = [
|
|||||||
msg: "Your browser doesn't support web workers (Anubis uses this to avoid freezing your browser). Do you have a plugin like JShelter installed?",
|
msg: "Your browser doesn't support web workers (Anubis uses this to avoid freezing your browser). Do you have a plugin like JShelter installed?",
|
||||||
value: window.Worker,
|
value: window.Worker,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "WebAssembly",
|
|
||||||
msg: "Your browser doesn't have WebAssembly support. If you are running a big endian system, I'm sorry but this is something we can't work around with a polyfill.",
|
|
||||||
value: window.WebAssembly,
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
||||||
function showContinueBar(hash, nonce, t0, t1) {
|
function showContinueBar(hash, nonce, t0, t1) {
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ export default function process(
|
|||||||
progressCallback = null,
|
progressCallback = null,
|
||||||
_threads = 1,
|
_threads = 1,
|
||||||
) {
|
) {
|
||||||
|
console.debug("slow algo");
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let webWorkerURL = URL.createObjectURL(new Blob([
|
let webWorkerURL = URL.createObjectURL(new Blob([
|
||||||
'(', processTask(), ')()'
|
'(', processTask(), ')()'
|
||||||
@@ -5,6 +5,7 @@ export default function process(
|
|||||||
progressCallback = null,
|
progressCallback = null,
|
||||||
threads = (navigator.hardwareConcurrency || 1),
|
threads = (navigator.hardwareConcurrency || 1),
|
||||||
) {
|
) {
|
||||||
|
console.debug("fast algo");
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
let webWorkerURL = URL.createObjectURL(new Blob([
|
let webWorkerURL = URL.createObjectURL(new Blob([
|
||||||
'(', processTask(), ')()'
|
'(', processTask(), ')()'
|
||||||
@@ -98,6 +99,7 @@ function processTask() {
|
|||||||
|
|
||||||
if (valid) {
|
if (valid) {
|
||||||
hash = uint8ArrayToHexString(thisHash);
|
hash = uint8ArrayToHexString(thisHash);
|
||||||
|
console.log(hash);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
/**
|
|
||||||
* Generate a relative URL from `url`, appending all key-value pairs from `params` as URL-encoded parameters.
|
|
||||||
*
|
|
||||||
* @type{function(string=, Object=): string}
|
|
||||||
*/
|
|
||||||
export const u = (url = "", params = {}) => {
|
|
||||||
let result = new URL(url, window.location.href);
|
|
||||||
Object.entries(params).forEach((kv) => {
|
|
||||||
let [k, v] = kv;
|
|
||||||
result.searchParams.set(k, v);
|
|
||||||
});
|
|
||||||
return result.toString();
|
|
||||||
};
|
|
||||||
2
web/static/wasm/.gitignore
vendored
2
web/static/wasm/.gitignore
vendored
@@ -1,2 +0,0 @@
|
|||||||
*
|
|
||||||
!.gitignore
|
|
||||||
Reference in New Issue
Block a user