This commit is contained in:
Ludo Galabru
2020-05-27 21:49:13 -04:00
committed by Ludo Galabru
commit fcb87b635c
177 changed files with 38918 additions and 0 deletions

2
.cargo/config Normal file
View File

@@ -0,0 +1,2 @@
[alias]
chainhook-install = "install --path components/chainhook-cli --locked --force"

9
.dockerignore Normal file
View File

@@ -0,0 +1,9 @@
target/
examples/
dockerfiles/
Dockerfile
.dockerignore
.git*
node_modules
package-lock.json
*.tar.gz

11
.gitattributes vendored Normal file
View File

@@ -0,0 +1,11 @@
# Have linguist ignore typescript compiler sources
components/clarinet-cli/js/* linguist-vendored
components/clarinet-cli/dts/* linguist-vendored
components/clarinet-cli/tsc/* linguist-vendored
# Declare Clarity files that will always have LF line endings on checkout.
*.clar text eol=lf
# Denote all files that are truly binary and should not be modified.
*.png binary
*.jpg binary

731
.github/workflows/ci.yaml vendored Normal file
View File

@@ -0,0 +1,731 @@
name: CI
on:
pull_request:
push:
branches:
- main
- develop
- rc/next
workflow_dispatch:
jobs:
pre_run:
name: Cancel previous runs
runs-on: ubuntu-latest
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@ad6cb1b847ffb509a69b745b6ee2f1d14dfe14b8
with:
access_token: ${{ github.token }}
persist-credentials: false
get_release_info:
name: Get Release Info
runs-on: ubuntu-latest
needs: pre_run
outputs:
tag: ${{ steps.new_release_tag.outputs.TAG }}
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Get latest release
if: startsWith(github.ref, 'refs/heads/main')
id: release
uses: pozetroninc/github-action-get-latest-release@master
with:
repository: ${{ github.repository }}
excludes: prerelease, draft
- name: Determine if release build
if: startsWith(github.ref, 'refs/heads/main')
id: new_release_tag
env:
LATEST_RELEASE: ${{ steps.release.outputs.release }}
run: |
CARGO_VERSION=v$(grep "version" components/clarinet-cli/Cargo.toml | head -n 1 | cut -d\" -f2)
if [[ "${CARGO_VERSION}" != "${LATEST_RELEASE}" ]]; then
echo "::set-output name=TAG::${CARGO_VERSION}"
echo "::warning::Will create release for version: ${CARGO_VERSION}"
else
echo "::warning::Will not create a release"
fi
audit:
name: Audit and format
runs-on: ubuntu-latest
needs: pre_run
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
profile: minimal
components: rustfmt
override: true
- name: Set Cargo file permissions
run: sudo chown -R $(whoami):$(id -ng) ~/.cargo/
- name: Cache cargo
uses: actions/cache@v2
with:
path: ~/.cargo/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Install dependencies
run: cargo install cargo-audit
- name: Run audit
run: cargo audit --ignore RUSTSEC-2021-0076 --ignore RUSTSEC-2021-0119 --ignore RUSTSEC-2022-0028 --ignore RUSTSEC-2020-0071 --ignore RUSTSEC-2021-0124 --ignore RUSTSEC-2022-0040
- name: Run rustfmt
run: cargo fmt --all -- --check
# test_coverage_cargo:
# name: Generate test coverage
# runs-on: ubuntu-latest
# needs: pre_run
# strategy:
# fail-fast: false
# matrix:
# include:
# - name: clarinet
# working-directory: components/clarinet-cli
# - name: clarity-repl
# working-directory: components/clarity-repl
# - name: chainhook-cli
# working-directory: components/chainhook-event-observer
# steps:
# - name: Checkout repository
# uses: actions/checkout@v3
# with:
# submodules: recursive
# - name: Install Rust toolchain stable
# uses: actions-rs/toolchain@v1
# with:
# toolchain: stable
# profile: minimal
# override: true
# - name: Cache cargo
# uses: actions/cache@v2
# id: cache-cargo
# with:
# path: |
# ~/.cargo/bin/
# ~/.cargo/registry/index/
# ~/.cargo/registry/cache/
# ~/.cargo/git/db/
# target/
# key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
# - name: Install dependencies
# if: steps.cache-cargo.outputs.cache-hit != 'true'
# run: RUSTC_BOOTSTRAP=1 cargo install grcov
# - name: Install Rust toolchain
# uses: actions-rs/toolchain@v1
# with:
# toolchain: stable
# profile: minimal
# components: llvm-tools-preview
# override: true
# - name: Unit Tests
# env:
# RUSTFLAGS: "-C instrument-coverage"
# LLVM_PROFILE_FILE: "${{ matrix.name }}-%p-%m.profraw"
# run: cargo build --package=clarinet-cli --locked && cargo test --package=clarinet-cli
# - name: Generate coverage
# run: grcov . --binary-path ./target/debug/ -s . -t lcov --branch --ignore-not-existing --ignore "/*" -o lcov.info
# # Run functional tests here in addition to the other jobs so we can fail fast
# # Since these tests are reached much earlier in the pipeline
# - name: Functional Tests
# if: matrix.name == 'clarinet'
# run: |
# for testdir in $(ls components/clarinet-cli/examples); do
# ./target/debug/clarinet test --manifest-path components/clarinet-cli/examples/${testdir}/Clarinet.toml
# done
# - name: Upload coverage report
# uses: codecov/codecov-action@v1
# with:
# flags: unittests
# name: ${{ matrix.name }}
# verbose: true
dist_clarinet:
name: Build Clarinet Distributions
runs-on: ${{ matrix.os }}
needs: pre_run
# Related upstream issue:
# https://github.com/nagisa/rust_libloading/issues/61#issuecomment-607941377
#
# env:
# CC: deny_c
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
platform: linux
target: x86_64-unknown-linux-gnu
architecture: x64
libc: glibc
- os: windows-latest
platform: windows
target: x86_64-pc-windows-msvc
architecture: x64
- os: macos-latest
platform: darwin
target: x86_64-apple-darwin
architecture: x64
- os: macos-latest
platform: darwin
target: aarch64-apple-darwin
architecture: arm64
steps:
- name: Configure git to use LF (Windows)
if: matrix.os == 'windows-latest'
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: ${{ matrix.target }}
profile: minimal
components: llvm-tools-preview
override: true
- name: Install wix (Windows)
if: matrix.os == 'windows-latest'
run: cargo install cargo-wix
- if: matrix.os != 'windows-latest'
run: sudo chown -R $(whoami):$(id -ng) ~/.cargo/
- name: Cache cargo
uses: actions/cache@v2
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/${{ matrix.target }}/release/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
# Set environment variables required from cross compiling from macos-x86_64 to macos-arm64
- name: Configure macos-arm64 cross compile config
if: matrix.target == 'aarch64-apple-darwin'
run: |
echo "SDKROOT=$(xcrun -sdk macosx --show-sdk-path)" >> $GITHUB_ENV
echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx --show-sdk-platform-version)" >> $GITHUB_ENV
- name: Configure artifact names (libc)
if: ${{ matrix.libc }}
shell: bash
run: |
echo "SHORT_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}-${{ matrix.libc }}" >> $GITHUB_ENV
echo "PRE_GYP_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}-${{ matrix.libc }}" >> $GITHUB_ENV
- name: Configure artifact names (not libc)
if: ${{ ! matrix.libc }}
shell: bash
run: |
echo "SHORT_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}" >> $GITHUB_ENV
echo "PRE_GYP_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}-unknown" >> $GITHUB_ENV
- name: Build - Cargo
if: matrix.target != 'x86_64-unknown-linux-musl'
run: cargo build --release --features=telemetry --locked --target ${{ matrix.target }}
- name: Code sign bin (Windows)
if: startsWith(github.ref, 'refs/heads/main') && matrix.os == 'windows-latest'
run: |
$certificate_file_name = "${env:TEMP}\certificate.pfx"
$bytes_cert = [Convert]::FromBase64String('${{ secrets.WINDOWS_CODE_SIGNING_CERTIFICATE }}')
[IO.File]::WriteAllBytes(${certificate_file_name}, ${bytes_cert})
$signtool_path = ((Resolve-Path -Path "${env:ProgramFiles(x86)}/Windows Kits/10/bin/10*/x86").Path[-1]) + "/signtool.exe"
$bin_path = (Resolve-Path -Path "target/${{ matrix.target }}/release/clarinet.exe").Path
& ${signtool_path} sign `
/d "Clarinet is a clarity runtime packaged as a command line tool, designed to facilitate smart contract understanding, development, testing and deployment." `
/du "https://github.com/hirosystems/clarinet" `
/tr http://timestamp.digicert.com `
/td sha256 `
/fd sha256 `
-f "${certificate_file_name}" `
-p "${{ secrets.WINDOWS_CODE_SIGNING_PASSWORD }}" `
"${bin_path}"
- name: Build Installer (Windows)
if: matrix.os == 'windows-latest'
run: cargo wix -v --no-build --nocapture -p clarinet-cli
- name: Code sign installer (Windows)
if: startsWith(github.ref, 'refs/heads/main') && matrix.os == 'windows-latest'
run: |
$certificate_file_name = "${env:TEMP}\certificate.pfx"
$bytes_cert = [Convert]::FromBase64String('${{ secrets.WINDOWS_CODE_SIGNING_CERTIFICATE }}')
[IO.File]::WriteAllBytes(${certificate_file_name}, ${bytes_cert})
$signtool_path = ((Resolve-Path -Path "${env:ProgramFiles(x86)}/Windows Kits/10/bin/10*/x86").Path[-1]) + "/signtool.exe"
$msi_path = (Resolve-Path -Path "target/wix/*.msi").Path
& ${signtool_path} sign `
/d "Clarinet is a clarity runtime packaged as a command line tool, designed to facilitate smart contract understanding, development, testing and deployment." `
/du "https://github.com/hirosystems/clarinet" `
/tr http://timestamp.digicert.com `
/td sha256 `
/fd sha256 `
-f "${certificate_file_name}" `
-p "${{ secrets.WINDOWS_CODE_SIGNING_PASSWORD }}" `
"${msi_path}"
# Don't compress for Windows because winget can't yet unzip files
- name: Compress cargo artifact (Linux)
if: matrix.os != 'windows-latest'
run: tar -C target/${{ matrix.target }}/release -zcvf clarinet-${{ env.SHORT_TARGET_NAME }}.tar.gz clarinet
- name: Rename cargo artifact (Windows)
if: matrix.os == 'windows-latest'
shell: bash
run: mv target/wix/*.msi clarinet-${{ env.SHORT_TARGET_NAME }}.msi
# Separate uploads to prevent paths from being preserved
- name: Upload cargo artifacts (Linux)
if: matrix.os != 'windows-latest'
uses: actions/upload-artifact@v2
with:
name: clarinet-${{ env.SHORT_TARGET_NAME }}
path: clarinet-${{ env.SHORT_TARGET_NAME }}.tar.gz
- name: Upload cargo artifact (Windows)
if: matrix.os == 'windows-latest'
uses: actions/upload-artifact@v2
with:
name: clarinet-${{ env.SHORT_TARGET_NAME }}
path: clarinet-${{ env.SHORT_TARGET_NAME }}.msi
- name: Unit Tests - Cargo
# can't easily run mac-arm64 tests in GH without native runners for that arch
if: matrix.target != 'aarch64-apple-darwin'
run: cargo test --release --locked --target ${{ matrix.target }}
- name: Functional Tests (Linux)
# can't easily run mac-arm64 tests in GH without native runners for that arch
if: matrix.os != 'windows-latest' && matrix.target != 'aarch64-apple-darwin'
run: |
for testdir in $(ls components/clarinet-cli/examples); do
./target/${{ matrix.target }}/release/clarinet test --manifest-path components/clarinet-cli/examples/${testdir}/Clarinet.toml
done
- name: Functional Tests (Windows)
if: matrix.os == 'windows-latest'
run: |
foreach($testdir in Get-ChildItem components/clarinet-cli/examples) {
./target/${{ matrix.target }}/release/clarinet test --manifest-path ${testdir}/Clarinet.toml
}
dist_stacks_devnet_js:
name: Build Stacks Devnet JS Distributions
runs-on: ${{ matrix.os }}
needs:
- pre_run
- get_release_info
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
platform: linux
target: x86_64-unknown-linux-gnu
architecture: x64
libc: glibc
- os: windows-latest
platform: windows
target: x86_64-pc-windows-msvc
architecture: x64
- os: macos-latest
platform: darwin
target: x86_64-apple-darwin
architecture: x64
- os: macos-latest
platform: darwin
target: aarch64-apple-darwin
architecture: arm64
- os: ubuntu-latest
platform: linux
target: x86_64-unknown-linux-musl
architecture: x64
libc: musl
steps:
- name: Configure git to use LF (Windows)
if: matrix.os == 'windows-latest' && github.event_name != 'pull_request'
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout repository
if: github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu'
uses: actions/checkout@v2
with:
submodules: recursive
- name: Install Rust toolchain
if: github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu'
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: ${{ matrix.target }}
profile: minimal
components: llvm-tools-preview
override: true
- name: Install and cache Node
if: github.event_name != 'pull_request' && matrix.target != 'x86_64-unknown-linux-musl'
uses: actions/setup-node@v3
with:
node-version: "14"
# Host nodejs arch does not matter in our usecase for compiling cross-platform npm packages,
# if enabled, this will fail after installing an arm64 nodejs runtime for an x86_64 arch macos system
# architecture: ${{ matrix.architecture }}
cache: npm
cache-dependency-path: components/stacks-devnet-js/package-lock.json
- name: Cache cargo
if: github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu'
uses: actions/cache@v2
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/${{ matrix.target }}/release/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
# Set environment variables required from cross compiling from macos-x86_64 to macos-arm64
- name: Configure macos-arm64 cross compile config
if: github.event_name != 'pull_request' && matrix.target == 'aarch64-apple-darwin'
run: |
echo "SDKROOT=$(xcrun -sdk macosx --show-sdk-path)" >> $GITHUB_ENV
echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx --show-sdk-platform-version)" >> $GITHUB_ENV
- name: Configure artifact names (libc)
if: (github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu') && matrix.libc
shell: bash
run: |
echo "SHORT_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}-${{ matrix.libc }}" >> $GITHUB_ENV
echo "PRE_GYP_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}-${{ matrix.libc }}" >> $GITHUB_ENV
- name: Configure artifact names (not libc)
if: github.event_name != 'pull_request' && ! matrix.libc
shell: bash
run: |
echo "SHORT_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}" >> $GITHUB_ENV
echo "PRE_GYP_TARGET_NAME=${{ matrix.platform }}-${{ matrix.architecture }}-unknown" >> $GITHUB_ENV
- name: Build - Node
if: (github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu') && matrix.target != 'x86_64-unknown-linux-musl'
working-directory: components/stacks-devnet-js
run: |
npm install --ignore-scripts
npm run build-${{ env.SHORT_TARGET_NAME }}
- name: Build - Node (linux-musl)
if: github.event_name != 'pull_request' && matrix.target == 'x86_64-unknown-linux-musl'
uses: docker://rust:alpine3.15
env:
RUSTFLAGS: -C target-feature=-crt-static
with:
entrypoint: /bin/sh
args: -c "
cd components/stacks-devnet-js &&
apk add alpine-sdk nodejs npm git &&
npm install --ignore-scripts &&
npm run build-${{ env.SHORT_TARGET_NAME }}"
- name: Compress node artifact
if: github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu'
shell: bash
run: tar -C components/stacks-devnet-js -zcvf stacks-devnet-js-${{ env.PRE_GYP_TARGET_NAME }}.tar.gz native/index.node
- name: Upload node artifact
if: github.event_name != 'pull_request' || matrix.target == 'x86_64-unknown-linux-gnu'
uses: actions/upload-artifact@v2
with:
name: stacks-devnet-js-${{ env.PRE_GYP_TARGET_NAME }}
path: stacks-devnet-js-${{ env.PRE_GYP_TARGET_NAME }}.tar.gz
# - name: Unit Tests - Node
# working-directory: node-bindings
# run: npm run spec
- name: NPM Publish
uses: JS-DevTools/npm-publish@v1
if: matrix.target == 'x86_64-unknown-linux-gnu' && startsWith(github.ref, 'refs/heads/main') && needs.get_release_info.outputs.tag != ''
with:
package: components/stacks-devnet-js/package.json
access: public
greater-version-only: true
token: ${{ secrets.NPM_TOKEN }}
dist_chainhook_node:
name: Build Chainhook node Distribution
runs-on: ubuntu-latest
needs: pre_run
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: x86_64-unknown-linux-gnu
profile: minimal
components: llvm-tools-preview
override: true
- name: Cache cargo
uses: actions/cache@v2
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/x86_64-unknown-linux-gnu/release/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Configure artifact names
shell: bash
run: |
echo "SHORT_TARGET_NAME=linux-x64-glibc" >> $GITHUB_ENV
echo "PRE_GYP_TARGET_NAME=linux-x64-glibc" >> $GITHUB_ENV
- name: Build - Cargo
working-directory: components/chainhook-cli
run: cargo build --release --locked --target x86_64-unknown-linux-gnu
- name: Compress cargo artifact
run: tar -C target/x86_64-unknown-linux-gnu/release -zcvf chainhook-cli-${{ env.SHORT_TARGET_NAME }}.tar.gz chainhook-cli
- name: Upload cargo artifacts
uses: actions/upload-artifact@v2
with:
name: chainhook-cli-${{ env.SHORT_TARGET_NAME }}
path: chainhook-cli-${{ env.SHORT_TARGET_NAME }}.tar.gz
- name: Unit Tests - Cargo
run: cargo test --release --locked --target x86_64-unknown-linux-gnu
dist_clarity_repl:
name: Build Clarity REPL Distribution
runs-on: ubuntu-latest
needs: pre_run
defaults:
run:
working-directory: components/clarity-repl
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: x86_64-unknown-linux-gnu
profile: minimal
components: llvm-tools-preview
override: true
- name: Cache cargo
uses: actions/cache@v2
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/x86_64-unknown-linux-gnu/release/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Configure artifact names
run: |
echo "SHORT_TARGET_NAME=linux-x64-glibc" >> $GITHUB_ENV
echo "PRE_GYP_TARGET_NAME=linux-x64-glibc" >> $GITHUB_ENV
- name: Install dependencies
run: cargo install --force wasm-pack
- name: Build - Cargo
run: cargo build --release --locked --target x86_64-unknown-linux-gnu
# - name: Build - WASM
# run: wasm-pack build --target web --release -- --no-default-features --features wasm
- name: Compress cargo artifact
working-directory: "."
run: tar -C target/x86_64-unknown-linux-gnu/release -zcvf clarity-repl-${{ env.SHORT_TARGET_NAME }}.tar.gz clarity-repl
# - name: Compress wasm artifact
# working-directory: "."
# run: tar -C target/wasm32-unknown-unknown/release -zcvf clarity-repl-wasm.tar.gz clarity_repl.wasm
- name: Upload cargo artifact
uses: actions/upload-artifact@v2
with:
name: clarity-repl-${{ env.SHORT_TARGET_NAME }}
path: clarity-repl-${{ env.SHORT_TARGET_NAME }}.tar.gz
# - name: Upload wasm artifact
# uses: actions/upload-artifact@v2
# with:
# name: clarity-repl-wasm
# path: clarity-repl-wasm.tar.gz
- name: Unit Tests - Cargo
run: cargo test --release --locked --target x86_64-unknown-linux-gnu
- name: Publish clarity-repl to crates.io
if: startsWith(github.ref, 'refs/heads/main') && needs.get_release_info.outputs.tag != ''
run: |
cargo login ${{ secrets.CARGO_CRATES_IO_API_KEY }}
cargo publish
docker_images:
name: Create ${{ matrix.name }} Docker Image
runs-on: ubuntu-latest
needs:
- get_release_info
- dist_clarinet
- dist_chainhook_node
outputs:
version: ${{ steps.docker_meta.outputs.version }}
strategy:
fail-fast: false
matrix:
include:
- name: Clarinet
description: Clarinet is a simple, modern and opinionated runtime for testing, integrating and deploying Clarity smart contracts.
image: ${{ github.repository }}
artifact: clarinet-linux-x64-glibc
dockerfile: dockerfiles/components/clarinet.dockerfile
- name: Chainhook Node
description: A Stacks event observer.
image: ${{ github.repository_owner }}/chainhook-cli
artifact: chainhook-cli-linux-x64-glibc
dockerfile: dockerfiles/components/chainhook-cli.dockerfile
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Generate Docker tags/labels
id: docker_meta
uses: docker/metadata-action@v4
with:
images: ${{ matrix.image }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}},value=${{ needs.get_release_info.outputs.tag }},enable=${{ needs.get_release_info.outputs.tag != '' }}
type=semver,pattern={{major}}.{{minor}},value=${{ needs.get_release_info.outputs.tag }},enable=${{ needs.get_release_info.outputs.tag != '' }}
labels: |
org.opencontainers.image.title=${{ matrix.name }}
org.opencontainers.image.description=${{ matrix.description }}
- name: Login to Dockerhub
uses: docker/login-action@v2
if: github.event_name != 'pull_request'
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Download pre-built dist
uses: actions/download-artifact@v3
with:
name: ${{ matrix.artifact }}
- name: Untar pre-built dist
run: tar zxvf *.tar.gz
- name: Create Image
uses: docker/build-push-action@v3
with:
context: .
file: ${{ matrix.dockerfile }}
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
release:
name: Release
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/heads/main') && needs.get_release_info.outputs.tag != ''
needs:
- get_release_info
- audit
# - test_coverage_cargo
- dist_stacks_devnet_js
- dist_clarity_repl
- docker_images
permissions:
actions: write
contents: write
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Download pre-built dists
uses: actions/download-artifact@v3
- name: Tag and Release
uses: ncipollo/release-action@v1
with:
artifacts: "**/*.tar.gz,**/*.msi"
tag: ${{ needs.get_release_info.outputs.tag }}
commit: ${{ env.GITHUB_SHA }}
- name: Trigger pkg-version-bump workflow
uses: peter-evans/repository-dispatch@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
event-type: released
client-payload: '{"tag": "${{ needs.get_release_info.outputs.tag }}"}'

16
.github/workflows/new-issues.yaml vendored Normal file
View File

@@ -0,0 +1,16 @@
name: Add issues to Clarinet project
on:
issues:
types:
- opened
jobs:
add-to-project:
name: Add issue to project Clarinet
runs-on: ubuntu-latest
steps:
- uses: actions/add-to-project@main
with:
project-url: https://github.com/orgs/hirosystems/projects/15
github-token: ${{ secrets.GH_TOKEN }}

64
.github/workflows/pkg-version-bump.yaml vendored Normal file
View File

@@ -0,0 +1,64 @@
##
## Bumps the Clarinet version listed on various package managers.
##
name: Package Version Bump
on:
workflow_dispatch:
inputs:
tag:
description: 'The tag of the release.'
required: true
repository_dispatch:
types:
- released
env:
GIT_USER_NAME: Hiro DevOps
GIT_USER_EMAIL: 45208873+blockstack-devops@users.noreply.github.com
jobs:
homebrew:
name: Homebrew
runs-on: macos-latest
steps:
- name: Homebrew version bump
env:
HOMEBREW_GITHUB_API_TOKEN: ${{ secrets.GH_TOKEN }}
TAG: ${{ github.event.client_payload.tag || github.event.inputs.tag }}
run: |
git config --global user.name "${GIT_USER_NAME}"
git config --global user.email "${GIT_USER_EMAIL}"
brew update
brew bump-formula-pr \
--no-browse \
--no-audit \
--tag "${TAG}" \
${{ github.event.repository.name }}
winget:
name: Winget
runs-on: windows-latest
steps:
- name: Winget version bump
env:
TAG: ${{ github.event.client_payload.tag || github.event.inputs.tag }}
run: |
# Get version infoq
$VERSION=${env:TAG}.substring(1)
# Configure git configs
git config --global user.name "${env:GIT_USER_NAME}"
git config --global user.email "${env:GIT_USER_EMAIL}"
# Get wingetcreate
iwr https://aka.ms/wingetcreate/latest -OutFile wingetcreate.exe
# Update manifest and submit PR
./wingetcreate.exe update `
--urls https://github.com/${{ github.repository }}/releases/download/${env:TAG}/clarinet-windows-x64.msi `
--version ${VERSION} `
--token ${{ secrets.GH_TOKEN }} `
--submit `
HiroSystems.Clarinet

25
.gitignore vendored Normal file
View File

@@ -0,0 +1,25 @@
/target
.DS_Store
clarinet.code-workspace
history.txt
node_modules
**/node_modules
target
index.node
npm-debug.log*
**/settings/Mainnet.toml
**/settings/Testnet.toml
**/.requirements
**/.cache
**/.build
components/stacks-devnet-js/dist
components/stacks-devnet-js/build
components/chainhook-types-js/dist
*.tar.gz
*.zip
*.rdb
components/chainhook-db/examples/arkadiko-data-indexing/vault-monitor/bin
components/chainhook-db/examples/arkadiko-data-indexing/vault-monitor/tmp
components/chainhook-db/examples/arkadiko-data-indexing/vault-monitor/vendor
components/chainhook-cli/cache

30
CHANGELOG.md Normal file
View File

@@ -0,0 +1,30 @@
#### 1.4.0 (2023-01-23)
##### New Features
* Polish LSP completion capability ([4cc24ed3](https://github.com/hirosystems/clarinet/commit/4cc24ed3c5edaf61d057c4c1e1ab3d32957e6a15), [16db8dd4](https://github.com/hirosystems/clarinet/commit/16db8dd454ddc5acaec1161ef4aba26cba4c37bf), [905e5433](https://github.com/hirosystems/clarinet/commit/905e5433cc7bf208ea480cc148865e8198bb0420), [9ffdad0f](https://github.com/hirosystems/clarinet/commit/9ffdad0f46294dd36c83ab92c3241b2b01499576), [d3a27933](https://github.com/hirosystems/clarinet/commit/d3a2793350e96ad224f038b11a6ada602fef46af), [cad54358](https://github.com/hirosystems/clarinet/commit/cad54358a1978ab4953aca9e0f3a6ff52ac3afc4), [439c4933](https://github.com/hirosystems/clarinet/commit/439c4933bcbeaaec9f3413892bbcc12fc8ec1b15))
* Upgrade clarity vm ([fefdd1e0](https://github.com/hirosystems/clarinet/commit/fefdd1e092dad8e546e2db7683202d81dd91407a))
* Upgrade stacks-node next image ([492804bb](https://github.com/hirosystems/clarinet/commit/492804bb472a950dded1b1d0c8a951b434a141ac))
* Expose stacks-node settings wait_time_for_microblocks, first_attempt_time_ms, subsequent_attempt_time_ms in Devnet config file
* Improve Epoch 2.1 deployments handling
* Improve `stacks-devnet-js` stability
##### Documentation
* Updated documentation to set clarity version of contract ([b124d96f](https://github.com/hirosystems/clarinet/commit/b124d96fbbef29befc26601cdbd8ed521d4a162a))
# [1.3.1](https://github.com/hirosystems/clarinet/compare/v1.3.0...v1.3.1) (2023-01-03)
### New Features
* Introduce use_docker_gateway_routing setting for CI environments
* Improve signature help in LSP ([eee03cff](https://github.com/hirosystems/clarinet/commit/eee03cff757d3e288abe7436eca06d4c440c71dc))
* Add support for more keyword help in REPL ([f564d469](https://github.com/hirosystems/clarinet/commit/f564d469ccf5e79ab924643627fdda8715da6a1d, [0efcc75e](https://github.com/hirosystems/clarinet/commit/0efcc75e7da3b801e1a862094791f3747452f9e0))
* Various Docker management optimizations / fixes ([b379d29f](https://github.com/hirosystems/clarinet/commit/b379d29f4ad4e85df42e804bc00cec2baff375c0), [4f4c8806](https://github.com/hirosystems/clarinet/commit/4f4c88064e2045de9e48d75b507dd321d4543046))
### Bug Fixes
* Fix STX assets title ([fdc748e7](https://github.com/hirosystems/clarinet/commit/fdc748e7b7df6ef1a6b62ab5cb8c1b68bde9b1ad), [ce5d107c](https://github.com/hirosystems/clarinet/commit/ce5d107c76950d989eb0be8283adf35930283f18))
* Fix define function grammar ([d02835ba](https://github.com/hirosystems/clarinet/commit/d02835bab06578eebb13a791f9faa1c2571d3fb9))
* Fix get_costs panicking ([822d8e29](https://github.com/hirosystems/clarinet/commit/822d8e29965e11864f708a1efd7a8ad385bc1ba3), [e41ae715](https://github.com/hirosystems/clarinet/commit/e41ae71585a432d21cc16c109d2858f9e1d8e22b))

8079
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

8
Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[workspace]
members = [
"components/chainhook-event-observer",
"components/chainhook-cli",
"components/chainhook-types-rs",
"components/hiro-system-kit",
]
default-members = ["components/chainhook-cli", "components/chainhook-event-observer"]

674
LICENSE Normal file
View File

@@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
{one line to give the program's name and a brief idea of what it does.}
Copyright (C) {year} {name of author}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
{project} Copyright (C) {year} {fullname}
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

816
README.md Normal file
View File

@@ -0,0 +1,816 @@
# Clarinet
Clarinet is a Clarity runtime packaged as a command line tool, designed to facilitate smart contract understanding,
development, testing and deployment. Clarinet consists of a Clarity Read-Evaluate-Print-Loop (REPL) environment and a testing harness. When used together, Clarity and the REPL environment enable you to rapidly develop and test a Clarity smart contract, allowing you to deploy the contract to a:
- devnet - a local standalone development environment that simulates Bitcoin, Stacks node and other helpful components, similar to a staging environment.
- [testnet](https://docs.stacks.co/docs/understand-stacks/testnet) - a testing environment not running in production.
- [mainnet](https://stacks.org/stacks2mainnet) - a production environment where you can deploy smart contracts.
Clarity is a **decidable** smart contract language that optimizes for predictability and security, meaning you can know with certainty, from the code itself, what the program will do. Smart contracts allow you to encode essential business logic on a blockchain.
![screenshot](docs/images/demo.gif)
### Clarinet 101
Hiro has created an introductory video tutorial series that guides you through some of the fundamentals of Clarinet, and how it can help develop, test, and deploy Clarity smart contracts.
Please review and watch the YouTube playlist on [Hiro's Youtube](https://www.youtube.com/c/HiroSystems):
[<img src="docs/images/clarinet101.png">](https://youtube.com/playlist?list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb) channel for information on how to use Clarinet for smart contracts.
## Installation
### Install on macOS (Homebrew)
To install Clarinet on macOS, run the following command:
```bash
brew install clarinet
```
### Install on Windows
If you would like to install Clarinet on Windows, the easiest way to install Clarinet on Windows is to use the MSI installer. You can download the executable from the [releases page](https://github.com/hirosystems/clarinet/releases).
Clarinet is also available on Winget; the package manager that Microsoft began including in the latest Windows updates:
```powershell
winget install clarinet
```
### Install from a pre-built binary
To install Clarinet from pre-built binaries, download the latest release from the [releases page](https://github.com/hirosystems/clarinet/releases).
Unzip the binary, then copy it to a location that is already in your path, such as `/usr/local/bin`.
```sh
# note: you can change the v0.27.0 with version that are available in the releases page.
wget -nv https://github.com/hirosystems/clarinet/releases/download/v0.27.0/clarinet-linux-x64-glibc.tar.gz -O clarinet-linux-x64.tar.gz
tar -xf clarinet-linux-x64.tar.gz
chmod +x ./clarinet
mv ./clarinet /usr/local/bin
```
On MacOS, you may get security errors when trying to run the pre-compiled binary. You can resolve the security warning
with command
```sh
xattr -d com.apple.quarantine /path/to/downloaded/clarinet/binary
```
### Install from source using Cargo
If you would like to install Clarinet from source using Cargo, there are some specific steps you will need to follow, which are described below.
#### Prerequisites
You must first [Install Rust](https://www.rust-lang.org/tools/install) to use the Rust package manager Cargo.
If you are using Debian and Ubuntu-based distributions, make sure to run the following command to install required packages before building Clarinet.
```bash
sudo apt install build-essential pkg-config libssl-dev
```
#### Build Clarinet
When you are ready to build Clarinet, you can build from source using Cargo with the following commands:
```bash
git clone https://github.com/hirosystems/clarinet.git
cd clarinet
cargo clarinet-install
```
By default, you will be in Hiro's development branch, `develop`, with code that has not been released yet. If you plan on submitting any changes to the code, then this is the right branch for you.
If you want the latest stable version, switch to the main branch by entering the following command:
```bash
git checkout main
```
## Getting started with Clarinet
The following sections describe how to create a new project in Clarinet and populate it with smart contracts. Clarinet
also provides tools for interacting with your contracts in a REPL, and performing automated testing of contracts.
### Setup shell completions
Clarinet has many different built-in commands, which are useful to enable tab-completion in your shell. You can use `clarinet` to generate the shell completion scripts for many common shells using the command:
```sh
clarinet completions (bash|elvish|fish|powershell|zsh)
```
After generating the file, please refer to the documentation for your shell to determine where this file should be moved and what other steps may be necessary to enable tab-completion for `clarinet`.
### Create a new project
Once you have installed Clarinet, you can create a new project by entering the following command:
```bash
clarinet new my-project && cd my-project
```
Clarinet will create a project directory with the following directory layout:
```bash
.
├── Clarinet.toml
├── contracts
├── settings
│ └── Devnet.toml
│ └── Testnet.toml
│ └── Mainnet.toml
└── tests
```
The `Clarinet.toml` file contains configuration for the smart contracts in your project. When you create contracts in
your project, Clarinet will add them to this file.
The `settings/Devnet.toml` file contains configuration for accounts in the Clarinet console, including the seed
phrases and initial balances. Initial balances are in microSTX.
For a detailed video description on how you can create a new project, please see the [Creating a New Project](https://www.youtube.com/watch?v=F_Sb0sNafEg&list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb&index=4) YouTube video.
### Add a new contract
Clarinet can handle adding a new contract and its configuration to your project with the following command:
```bash
$ clarinet contract new bbtc
```
Clarinet will add 2 files to your project: the contract file in the `contracts` directory, and the contract test file
in the `tests` directory.
```bash
.
├── Clarinet.toml
├── contracts
│ └── bbtc.clar
├── settings
│ └── Devnet.toml
│ └── Mainnet.toml
│ └── Testnet.toml
└── tests
└── bbtc_test.ts
```
Clarinet will also add configuration to the `Clarinet.toml` file for your contract.
```toml
[project.cache_location]
path = ".cache"
[contracts.bbtc]
path = "contracts/bbtc.clar"
```
You may add contracts to your project by adding the files manually; however, you must add the appropriate configuration
to `Clarinet.toml` in order for Clarinet to recognize the contracts.
### Check your contracts
Clarinet provides syntax and semantics checkers for Clarity, which enable you to check if the Clarity code in your project is valid by using the following command:
```bash
$ clarinet check
```
This command uses the `Clarinet.toml` file to locate and analyze all of the contracts in the project. If the Clarity code is valid, the command will indicate success with the following message:
```
✔ 2 contracts checked
```
The checker may also report warnings that indicate the code is valid; however, you should be aware of a specific condition that might arise. For example, the check-checker analysis discussed below will generate warnings. If there are errors in the code, the output of the command will indicate the kind and location of the errors.
You may also perform syntax-check on a single file by using the following command.
```bash
$ clarinet check <path/to/file.clar>
```
If there are no syntax errors, the output of the command will be a success message similar to the example below.
```
✔ Syntax of contract successfully checked
```
**Note** Any syntactical errors in the Clarity code will be reported, but type-checking and other semantic checks will not be performed because Clarinet will only look at this one contract, since it does not have the full context to perform a complete check.
### Static Analysis
#### Check-Checker
The check-checker is a static analysis pass you can use to help find potential vulnerabilities in your contracts. To enable this pass, add the following lines to your `Clarinet.toml` file:
```toml
[repl.analysis]
passes = ["check_checker"]
```
The check-checker pass analyzes your contract to identify places where untrusted inputs might be used in a potentially dangerous way. Since public functions can be called by anyone, any arguments passed to these public functions should be considered untrusted. This analysis pass takes the opinion that all untrusted data must be checked before being used to modify the state on the blockchain. Modifying the state includes any operations that affect wallet balances, or any data stored in your contracts.
- Actions on Stacks wallets:
- stx-burn?
- stx-transfer?
- Actions on fungible tokens:
- ft-burn?
- ft-mint?
- ft-transfer?
- Actions on non-fungible tokens:
- nft-burn?
- nft-mint?
- nft-transfer?
- Actions on persisted data:
- Maps:
- map-delete
- map-insert
- map-set
- Variables:
- var-set
In addition to those operations, the check-checker is also a bit opinionated and prefers that untrusted data be checked near the source, making the code more readable and maintainable. For this reason, the check-checker also requires that arguments passed into private functions and return values must be checked.
- Calls to private functions
- Return values
Finally, another opportunity for exploits appears when contracts call functions from traits. Those traits are untrusted, just like other parameters to public functions, so they are also required to be checked.
- Dynamic contract calls (through traits)
When an untrusted input is used in one of these ways, you will see a warning like this:
```
bank:27:37: warning: use of potentially unchecked data
(as-contract (stx-transfer? (to-uint amount) tx-sender customer))
^~~~~~~~~~~~~~~~
bank:21:36: note: source of untrusted input here
(define-public (withdrawal-unsafe (amount int))
```
In the case where an operation affects only the sender's own wallet (e.g. calling `stx-transfer?` with the sender set to `tx-sender`), then there is no need to generate a warning, because the untrusted input is only affecting the sender, who is the source of that input. To put it another way, the sender should be able to safely specify parameters in an operation that affects only themselves. This sender is also potentially protected by post-conditions.
For a more detailed description on how to use the Check-Checker, please see the [Catch Smart Contract Vulnerabilities With Clarinets Check-Checker Feature](https://www.youtube.com/watch?v=v2qXFL2owC8&list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb&index=14) YouTube video.
##### Options
The check-checker provides some options that can be specified in `Clarinet.toml` to handle common usage scenarios that may reduce false positives from the analysis:
```toml
[repl.analysis.check_checker]
strict = false
trusted_sender = true
trusted_caller = true
callee_filter = true
```
If `strict` is set to true, all other options are ignored and the analysis proceeds with the most strict interpretation of the rules.
The `trusted_sender` and `trusted_caller` options handle a common practice in smart contracts where there is a concept of a trusted transaction sender (or transaction caller), which is treated like an admin user. Once a check has been performed to validate the sender (or caller), then all inputs should be trusted.
In the example below, the `asserts!` on line 3 is verifying the `tx-sender`. Because of that check, all inputs are trusted (if the `trusted_sender` option is enabled):
```clarity
(define-public (take (amount int) (from principal))
(let ((balance (- (default-to 0 (get amount (map-get? accounts {holder: from}))) amount)))
(asserts! (is-eq tx-sender (var-get bank-owner)) err-unauthorized)
(map-set accounts {holder: from} {amount: balance})
(stx-transfer? (to-uint amount) (as-contract tx-sender) tx-sender)
)
)
```
The `callee_filter` option loosens the restriction on passing untrusted data to private functions, and instead, allows checks in a called function to propagate up to the caller. This is helpful, because it allows you to define input checks in a function that can be reused.
In the example below, the private function `validate` checks its parameter. The public function `save` calls `validate`, and when the `callee_filter` option is enabled, that call to `validate` will count as a check for the untrusted input, `amount`, resulting in no warnings from the check-checker.
```clarity
(define-public (save (amount uint))
(begin
(try! (validate amount))
(var-set saved amount)
(ok amount)
)
)
(define-private (validate (amount uint))
(let ((current (var-get saved)))
(asserts! (> amount current) err-too-low)
(asserts! (<= amount (* current u2)) err-too-high)
(ok amount)
)
)
```
##### Annotations
Sometimes, there is code that the check-checker analysis is unable to determine is safe; however, you know the code is safe. You want to pass this information to the check-checker to disable warnings that you consider to be false positives. To handle these cases, the check-checker supports several annotations, implemented using "magic comments" in the contract code.
**`#[allow(unchecked_params)]`**
This annotation tells the check-checker that the associated private function is allowed to receive unchecked arguments. It will not generate a warning for calls to this function that pass unchecked inputs. Inside the private function, the parameters are considered unchecked and could generate warnings.
```clarity
;; #[allow(unchecked_params)]
(define-private (my-func (amount uint))
...
)
```
**`#[allow(unchecked_data)]`**
This annotation tells the check-checker that the following expression is allowed to use unchecked data without warnings. It should be used with care, since this will disable all warnings from the associated expression.
```clarity
(define-public (dangerous (amount uint))
(let ((sender tx-sender))
;; #[allow(unchecked_data)]
(as-contract (stx-transfer? amount tx-sender sender))
)
)
```
**`#[filter(var1, var2)]`**
This annotation tells the check-checker to consider the specified variables to be checked by the following expression. This is useful for the case where your contract does some indirect check that validates that an input is safe, but there is no way for the analysis to recognize this. In place of the list of variable names in the annotation, an `*` may be used to filter all inputs.
**Note** The command below is the safest and preferred way to silence warnings that you consider false positives.
```clarity
(define-public (filter_one (amount uint))
(let ((sender tx-sender))
;; #[filter(amount)]
(asserts! (> block-height u1000) (err u400))
(as-contract (stx-transfer? amount tx-sender sender))
)
)
```
### Execute a test suite
Clarinet provides a testing harness based on Deno that can enable you to create automated unit tests or pseudo-integration tests using Typescript.
```bash
$ clarinet test
```
For more information on how to create unit tests using Typescript, see the [Writing Unit Tests Using Typescript](https://www.youtube.com/watch?v=Z4YEHUxHWuE&list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb&index=7) YouTube video.
When you use the `clarinet contract new foo` command to create a new contract, Clarinet will automatically create a unit test file for this new contract, `tests/foo_test.ts`. Other files under the `tests/_ directory` following the Deno test naming convention will also be included:
- named test.{ts, tsx, mts, js, mjs, jsx, cjs, cts},
- or ending with .test.{ts, tsx, mts, js, mjs, jsx, cjs, cts},
- or ending with \_test.{ts, tsx, mts, js, mjs, jsx, cjs, cts}
Within these tests, you can simulate mining a block containing transactions using your contract, and then examine the results of those transactions as well as the events generated by them.
See the [billboard example](examples/billboard/tests/billboard_test.ts) for sample unit tests.
**Note:** If you see an error in VS Code on the imports in the generated test file(s), that says, "An import path cannot end with a '.ts' extension" (an example is shown below), installing the [Deno extension](https://marketplace.visualstudio.com/items?itemName=denoland.vscode-deno) will resolve this error.
![VS Code deno error](docs/images/deno-error.png)
#### Measure and increase code coverage
To help maximize a smart contract's test coverage, Clarinet can produce a `lcov` report, using the following option:
```bash
$ clarinet test --coverage
```
From there, you can use the `lcov` tooling suite to produce HTML reports:
```bash
$ brew install lcov
$ genhtml coverage.lcov
$ open index.html
```
![lcov](docs/images/lcov.png)
### Cost optimizations
Clarinet can also be used to optimize costs. When executing a test suite, Clarinet will keep track of all the costs being computed when executing the `contract-call`, and display the most expensive ones in a table:
```bash
$ clarinet test --cost
```
The `--cost` option can be used in conjunction with `--watch` and filters to maximize productivity, as illustrated here:
![costs](docs/images/costs.gif)
### Load contracts in a console
The Clarinet console is an interactive Clarity REPL environment that runs in-memory. Any contracts in the current project will be
automatically loaded into memory.
```bash
$ clarinet console
```
You can use the `::help` command in the console for a list of valid commands, which can control the state of the
REPL chain, and allow you advance the chain tip. Additionally, you can enter Clarity commands into the console and observe
the result of the command.
You can exit the console by pressing `Ctrl + C` twice.
Changes to contracts are not loaded into the console while it is running. If you make any changes to your contracts you
must exit the console and run it again.
### Spawn a local Devnet
You can use Clarinet to deploy your contracts to your own local offline environment for testing and
evaluation on a blockchain by using the following command:
```bash
$ clarinet integrate
```
**Note** Make sure you have a working installation of Docker running locally.
### Interacting with contracts deployed on Mainnet
Composition and interactions between protocols and contracts are one of the key innovations in blockchains. Clarinet was designed to handle these types of interactions.
Before referring to contracts deployed on Mainnet, these contracts should be explicitly be listed as a `requirement` in the manifest `Clarinet.toml`, either manually:
```toml
[project]
name = "my-project"
[[project.requirements]]
contract_id = "SP2KAF9RF86PVX3NEE27DFV1CQX0T4WGR41X3S45C.bitcoin-whales"
```
or with the command:
```bash
clarinet requirements add SP2KAF9RF86PVX3NEE27DFV1CQX0T4WGR41X3S45C.bitcoin-whales
```
Clarinet will be able to resolve the `contract-call?` statements invoking requirements present in your local contracts by downloading and caching a copy of these contracts and using them during the execution of your test suites. All of the different features are available in `clarinet`.
When deploying your protocol to Devnet / Testnet, for the contracts involving requirements, the setting `remap_requirements` in your deployment plans must be set.
As a step-by-step example, we use here the following contract, [**bitcoin-whales**](https://explorer.stacks.co/txid/SP2KAF9RF86PVX3NEE27DFV1CQX0T4WGR41X3S45C.bitcoin-whales?chain=mainnet)
If you examine this contract, you will see that there are 3 different dependencies: two from the **same**
project (included in the same Clarinet.toml file), and one referring to a contract deployed outside of the current project.
### Same Project
In the contract snippet shown below *(line:260-265)*, there are dependencies on the contracts conversion and conversion-v2 which are included in the same `Clarinet.toml` file.
```clarity
(define-read-only (get-token-uri (token-id uint))
(if (< token-id u5001)
(ok (some (concat (concat (var-get ipfs-root) (unwrap-panic (contract-call? .conversion lookup token-id))) ".json")))
(ok (some (concat (concat (var-get ipfs-root) (unwrap-panic (contract-call? .conversion-v2 lookup (- token-id u5001)))) ".json")))
)
)
```
### External Deployer
In this code snippet, there is a dependency on the `nft-trait` *(line:001)* deployed by `'SP2PABAF9FTAJYNFZH93XENAJ8FVY99RRM50D2JG9`.
```clarity
(impl-trait 'SP2PABAF9FTAJYNFZH93XENAJ8FVY99RRM50D2JG9.nft-trait.nft-trait)
```
- Dependencies from **external** contracts should be set in `[[project.requirements]]`
```toml
[project]
name = "my-project"
[[project.requirements]]
contract_id = "SP2PABAF9FTAJYNFZH93XENAJ8FVY99RRM50D2JG9.nft-trait"
[project.cache_location]
path = ".cache"
[contracts.bitcoin-whales]
path = "contracts/bitcoin-whales.clar"
[contracts.conversion]
path = "contracts/conversion.clar"
[contracts.conversion-v2]
path = "contracts/conversion-v2.clar"
[repl.analysis]
passes = ["check_checker"]
[repl.analysis.check_checker]
strict = false
trusted_sender = false
trusted_caller = false
callee_filter = false
```
As a next step we can generate a deployment plan for this project. If you are running `$ clarinet integrate` for the first time, this file should be created by Clarinet. In addition, you can run `$ clarinet deployment generate --devnet` to create or overwrite the file.
```yaml
---
id: 0
name: Devnet deployment
network: devnet
stacks-node: "http://localhost:20443"
bitcoin-node: "http://devnet:devnet@localhost:18443"
plan:
batches:
- id: 0
transactions:
- requirement-publish:
contract-id: SP2PABAF9FTAJYNFZH93XENAJ8FVY99RRM50D2JG9.nft-trait
remap-sender: ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM
remap-principals:
SP2PABAF9FTAJYNFZH93XENAJ8FVY99RRM50D2JG9: ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM
cost: 4680
path: ".requirements\\SP2PABAF9FTAJYNFZH93XENAJ8FVY99RRM50D2JG9.nft-trait.clar"
- contract-publish:
contract-name: conversion
expected-sender: ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM
cost: 340250
path: "contracts\\conversion.clar"
anchor-block-only: true
- contract-publish:
contract-name: conversion-v2
expected-sender: ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM
cost: 351290
path: "contracts\\conversion-v2.clar"
anchor-block-only: true
- contract-publish:
contract-name: bitcoin-whales
expected-sender: ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM
cost: 87210
path: "contracts\\bitcoin-whales.clar"
anchor-block-only: true
```
As the example above shows, Clarinet will remap the external contract to Hiro's Devnet address. In addition, it will also create a copy of the contract in the folder `requirements`
### Deploy contracts to Devnet / Testnet / Mainnet
You can use Clarinet to publish your contracts to Devnet / Testnet / Mainnet environment for testing and evaluation on a blockchain.
The first step to deploy a contract is to generate a deployment plan, with the following command:
```bash
$ clarinet deployment generate --mainnet
```
After **cautiously** reviewing (and updating if needed) the generated plan, you can use the command to handle the deployments of your contract, according to your deployment plan:
```bash
$ clarinet deployment apply -p <path-to-plan.yaml>
```
### Use Clarinet in your CI workflow as a GitHub Action
Clarinet may also be used in GitHub Actions as a step of your CI workflows.
You may set-up a simple workflow by adding the following steps in a file `.github/workflows/github-actions-clarinet.yml`:
```yaml
name: CI
on: [push]
jobs:
tests:
name: "Test contracts with Clarinet"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: "Execute unit tests"
uses: docker://hirosystems/clarinet:latest
with:
args: test --coverage --manifest-path=./Clarinet.toml
- name: "Export code coverage"
uses: codecov/codecov-action@v1
with:
files: ./coverage.lcov
verbose: true
```
You may also add the steps above in your existing workflows. The generated code coverage output can then be used as is with GitHub Apps like https://codecov.io.
For more information on how you can use GitHub Actions with Clarinet, please see the [A Simple CI With Clarinet and GitHub](https://www.youtube.com/watch?v=cEv6Mi4EcKQ&list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb&index=8) YouTube video
### Extensions
Clarinet can easily be extended by community members: open source contributions to Clarinet are welcome, but you may also write your own Clarinet extensions if you want to integrate Clarity contracts with your own tooling and workflow.
| Name | wallet access | disk write | disk read | Deployment | Description |
| ------------------------- | ------------- | ---------- | --------- | --------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- |
| stacksjs-helper-generator | no | yes | no | https://deno.land/x/clarinet@v1.0.2/ext/stacksjs-helper-generator.ts | Facilitates contract integration by generating some typescript constants that can be used with stacks.js. Never hard code a stacks address again! |
| | | | | |
#### How to use extensions
Extensions are run with the following syntax:
```
$ clarinet run --allow-write https://deno.land/x/clarinet@v0.29.0/ext/stacksjs-helper-generator.ts
```
An extension can be deployed as a standalone plugin on Deno, or may also be a local file if it includes sensitive / private setup informations.
As illustrated in the example above, permissions (wallet / disk read / disk write) are declared using command flags. If at runtime, the Clarinet extension is trying to write to disk, read disk, or access wallets without permission, the script will fail.
### Debug your contracts
#### VS Code Debugger
Clarinet supports the [Debug Adapter Protocol](https://microsoft.github.io/debug-adapter-protocol/) (DAP) which enables you to debug your smart contracts inside of VS Code, or any code editor that supports the DAP protocol.
To setup a debug session, you will first need to create a `launch.json` file to tell VS Code what you want to debug. The easiest way to do this is to let VS Code generate the template for you by opening the "Run and Debug" view and clicking "create a launch.json file".
![Run and Debug View](docs/images/run-and-debug.png)
This will create the file `.vscode/launch.json` with the default template:
```json
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "clarinet",
"request": "launch",
"name": "Call .foo.bar",
"manifest": "${workspaceFolder}/Clarinet.toml",
"expression": "(contract-call? .foo bar 42)"
}
]
}
```
Depending on your needs, you will want to set the `name` field to whatever makes sense for your project, then set the `expression` to the Clarity expression that you would like to debug. In the case of the default example shown in the template above, the debugger would start executing the `bar` function of the `foo` contract, passing the argument `42`. Once this file is configured, the debugger works as expected for any [VS Code debugging](https://code.visualstudio.com/docs/editor/debugging).
Execution begins paused at the first expression. The debug toolbar includes buttons to continue, step over, step into, step out, restart, and stop, in that order.
![debug toolbar](docs/images/debug-toolbar.png)
Breakpoints can be set by clicking in the left gutter next to the code or using the right-click menu at a specific code location.
![breakpoint](docs/images/breakpoint.png)
Data watchpoints may also be set, by clicking the + in the Watch section of the Debug side bar and typing the contract variable to watch in the format `<principal>.<contract>.<name>` or using the shortcut for a local contract, `.<contract>.<name>`. When a watchpoint is set on a contract variable, execution will pause when its value will change.
![watchpoint](docs/images/watchpoint.png)
During execution, the values of the current contract's variables, the current function's arguments, and any local variables (i.e. from a `let` expression) are shown in the side bar. The current watchpoints are also shown with their current values. In both cases, the contents of a map are not shown, but can be queried in the Debug Console. The call stack is also updated to show the call stack of the current execution.
![view of side bar, showing variables, watchpoints, and call stack](docs/images/sidebar.png)
At any point during execution, an expression can be evaluated in the current context via the Debug Console. Just type any valid Clarity expression and hit enter to evaluate it. Upon completion, the events emitted and the return value are printed to the debug console.
![debug console](docs/images/debug-console.png)
For more information on how you can use VS Code to debug smart contracts, please see the [How to Debug Smart Contracts in Clarinets VS Code Extension](https://www.youtube.com/watch?v=DsLCDQSijwk&list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb&index=15) YouTube video.
#### Command Line Debugger
Inside of the console (`clarinet console`), there is a debugger for stepping through your contracts on the command line, including support for:
- Breakpoints
- **Source**: Break at a specific line (and optional column) of a contract (`break` or `b` command)
```
b SP466FNC0P7JWTNM2R9T199QRZN1MYEDTAR0KP27.miamicoin-token:28:4
```
- **Function**: Break at a specific function (`break` or `b` command)
```
b .crashpunks-v2.transfer
```
- **Data**: Break on read/write to a variable or map (`watch` or `w` to break on write, `rwatch` or `rw` to break on read, and `awatch` or `aw` to break on read or write)
```
w contracts/SP2KAF9RF86PVX3NEE27DFV1CQX0T4WGR41X3S45C.bitcoin-whales.payout
```
- Step execution
- **Step-in**: Step into the sub-expressions (`step` or `s` command)
- **Step-out**: Complete execution of the current expression and return the result back to the parent (`finish` or `f` command)
- **Step-over**: Continue to completion of the current expression, stepping over sub-expressions (`next` or `n` command)
- **Continue**: Continue execution until hitting a breakpoint or completing execution (`continue` or `c` command)
- Evaluate and print the result of any expression (`print` or `p` command)
To initiate a debug session, first enter the REPL console using the following command:
```
clarinet console
```
Then, at the REPL prompt, enter this command to debug any expression:
```
::debug (contract-call? .foo hello .bar))
```
At the debug prompt, use any of the commands described above, or use `help` to get the full help documentation.
For more information about how to debug a smart contract using the command line, please see the [Debug Your Smart Contracts With Clarinets New Inline Debugger](https://www.youtube.com/watch?v=nVDWeuMnkDs&list=PL5Ujm489LoJaAz9kUJm8lYUWdGJ2AnQTb&index=13) YouTube video.
#### Execution Trace
When you are in the console (`clarinet console`), the `::trace <expr>` command enables you to execute an expression and print a trace of the execution, which can be very helpful for identifying problems with the contract.
This trace shows all function calls, both internal calls to private functions, and contract calls to other contracts. For each call, the parameters and return value are shown in the trace. Any events that are emitted are also shown in the trace.
![execution trace](docs/images/trace.png)
### Deploy with Hyperchains on Devnet
Clarinet can be used for facilitating experimentations with [Hyperchains](https://www.youtube.com/watch?v=PFPwuVCGGuI).
To begin working with subnets, in your `Devnet.toml`, enable the following flag:
```toml
[devnet]
# ...
enable_subnet_node = true
```
This same file may also be used for customizing the subnet-node (miner, etc).
When running the command:
```bash
$ clarinet integrate
```
Clarinet will spin-up a subnet node. More documentation on how to use and interact with this incoming L2 can be found on the [Hyperchain repository](https://github.com/hirosystems/stacks-subnets).
## Contributing to Clarinet
Contributions are welcome and appreciated. The following sections provide information on how you can contribute to Clarinet.
### Prerequisites
Before contributing to Clarinet, please ensure you meet the following requirements:
- rust (>=1.52.0)
- cargo (>=1.52.0)
- node (>=v14.16.0) - Used for git commit hook
- npm (>=7.18.0) - Used for git commit hook
### Guide
This repo follows the [Conventional Commit](https://www.conventionalcommits.org/en/v1.0.0/#summary) specification when writing commit messages.
**Note**It is important that any pull requests you submit have commit messages that follow this standard.
To start contributing:
1. Fork this repo and clone the fork locally.
2. Create a new branch
```bash
git checkout -b <my-branch>
```
3. Run `npm i` in the local repo to install and initialize `husky` and `commitlint`.
```bash
npm i
```
4. These tools will be used in a `git commit` hook to lint and validate your commit message. If the message is invalid, `commitlint` will alert you to try again and fix it.
Here is an example of a bad message response:
```bash
$ git commit -m "bad message"
$ ⧗ input: bad message
$ ✖ subject may not be empty [subject-empty]
$ ✖ type may not be empty [type-empty]
$
$ ✖ found 2 problems, 0 warnings
$ ⓘ Get help: https://github.com/conventional-changelog/commitlint/#what-is-commitlint
$
$ husky - commit-msg hook exited with code 1 (error)
```
Here is an example of a good message response:
```bash
$ git commit -m "fix: added missing dependency"
$ [my-branch 4c028af] fix: added missing dependency
$ 1 file changed, 50 insertions(+)
```
5. After making your changes, ensure the following:
- `cargo build` runs successfully.
- `cargo test` runs successfully.
- You have formatted your code with `cargo fmt --all --`
- All functional tests in the `examples` directory pass.
```bash
for testdir in $(ls examples); do
pushd examples/${testdir}
../../target/debug/clarinet test .
popd
done
```
6. Submit a pull request against the `develop` branch for review.

View File

@@ -0,0 +1,45 @@
[package]
name = "chainhook-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
csv = "1"
num_cpus = "1.4"
serde = "1"
serde_json = "1"
serde_derive = "1"
redis = "0.21.5"
serde-redis = "0.12.0"
chainhook_event_observer = { package = "chainhook-event-observer", default-features = false, path = "../chainhook-event-observer" }
chainhook_types = { package = "chainhook-types", path = "../chainhook-types-rs" }
hiro_system_kit = { package = "hiro-system-kit", path = "../hiro-system-kit" }
clap = { version = "3.2.23", features = ["derive"], optional = true }
clap_generate = { version = "3.0.3", optional = true }
toml = { version = "0.5.6", features = ["preserve_order"], optional = true }
ctrlc = { version = "3.2.2", optional = true }
reqwest = { version = "0.11", features = ["stream", "json"] }
tokio = { version = "=1.24", features = ["full"] }
futures-util = "0.3.24"
flate2 = "1.0.24"
tar = "0.4.38"
flume = "0.10.14"
ansi_term = "0.12.1"
atty = "0.2.14"
bitcoincore-rpc = "0.16.0"
bitcoincore-rpc-json = "0.16.0"
crossbeam-channel = "0.5.6"
[dev-dependencies]
criterion = "0.3"
redis = "0.21.5"
clarity_repl = { package = "clarity-repl", path = "../../../clarinet/components/clarity-repl" }
hex = "0.4.3"
[features]
default = ["cli"]
cli = ["clap", "clap_generate", "toml", "ctrlc", "hiro_system_kit/log"]
debug = ["hiro_system_kit/debug"]
release = ["hiro_system_kit/release"]

View File

@@ -0,0 +1,14 @@
[storage]
driver = "redis"
redis_uri = "redis://localhost:6379/"
[chainhooks]
max_stacks_registrations = 500
max_bitcoin_registrations = 500
[network]
mode = "devnet"
bitcoin_node_rpc_url = "http://0.0.0.0:18443"
bitcoin_node_rpc_username = "devnet"
bitcoin_node_rpc_password = "devnet"
stacks_node_rpc_url = "http://0.0.0.0:20443"

View File

@@ -0,0 +1,207 @@
# chainhook-cli
## Usage
To get started, [build `clarinet` from source](https://github.com/hirosystems/clarinet#install-from-source-using-cargo), and then `cd components/chainhook-cli` and run `cargo install --path .` to install `chainhook-cli`.
Before running `chainhook-cli`, you need to [install redis](https://redis.io/docs/getting-started/installation/) and run a redis server locally.
### Start a Testnet node
```bash
$ chainhook-cli start --testnet
```
### Start a Mainnet node
```bash
$ chainhook-cli start --mainnet
```
### Start a Devnet node
```bash
$ chainhook-cli start --devnet
```
## Predicates available
### Bitcoin
```yaml
# Get any transaction matching a given txid
# `txid` mandatory argument admits:
# - 32 bytes hex encoded type. example: "0xfaaac1833dc4883e7ec28f61e35b41f896c395f8d288b1a177155de2abd6052f"
predicate:
txid: 0xfaaac1833dc4883e7ec28f61e35b41f896c395f8d288b1a177155de2abd6052f
# Get any transaction including an OP_RETURN output starting with a set of characters.
# `starts-with` mandatory argument admits:
# - ASCII string type. example: `X2[`
# - hex encoded bytes. example: `0x589403`
predicate:
scope: outputs
op-return:
starts-with: X2[
# Get any transaction including an OP_RETURN output matching the sequence of bytes specified
# `equals` mandatory argument admits:
# - hex encoded bytes. example: `0x589403`
predicate:
scope: outputs
op-return:
equals: 0x69bd04208265aca9424d0337dac7d9e84371a2c91ece1891d67d3554bd9fdbe60afc6924d4b0773d90000006700010000006600012
# Get any transaction including an OP_RETURN output ending with a set of characters
# `ends-with` mandatory argument admits:
# - ASCII string type. example: `X2[`
# - hex encoded bytes. example: `0x589403`
predicate:
scope: outputs
op-return:
ends-with: 0x76a914000000000000000000000000000000000000000088ac
# Get any transaction including a Stacks Proof of Burn commitment
predicate:
scope: outputs
stacks-op:
type: pob-commit
# Get any transaction including a Stacks Proof of Transfer commitment
# `recipients` mandatory argument admits:
# - string "*"
# - array of strings type. example: ["mr1iPkD9N3RJZZxXRk7xF9d36gffa6exNC", "muYdXKmX9bByAueDe6KFfHd5Ff1gdN9ErG"]
# - array of hex encoded bytes type. example: ["76a914000000000000000000000000000000000000000088ac", "0x76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac"]
predicate:
scope: outputs
stacks-op:
type: pox-commit
recipients: *
# Get any transaction including a key registration operation
predicate:
scope: outputs
stacks-op:
type: key-registration
# Get any transaction including a STX transfer operation
# `recipient` optional argument admits:
# - string encoding a valid STX address. example: "ST2CY5V39NHDPWSXMW9QDT3HC3GD6Q6XX4CFRK9AG"
# `sender` optional argument admits:
# - string type. example: "mr1iPkD9N3RJZZxXRk7xF9d36gffa6exNC"
# - hex encoded bytes type. example: "0x76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac"
predicate:
scope: outputs
stacks-op:
type: stx-transfer
# Get any transaction including a STX lock operation
# `sender` optional argument admits:
# - string type. example: "mr1iPkD9N3RJZZxXRk7xF9d36gffa6exNC"
# - hex encoded bytes type. example: "0x76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac"
predicate:
scope: outputs
stacks-op:
type: stx-lock
# Get any transaction including a p2pkh output paying a given recipient
# `p2pkh` construct admits:
# - string type. example: "mr1iPkD9N3RJZZxXRk7xF9d36gffa6exNC"
# - hex encoded bytes type. example: "0x76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac"
predicate:
scope: outputs
p2pkh: mr1iPkD9N3RJZZxXRk7xF9d36gffa6exNC
# Get any transaction including a p2sh output paying a given recipient
# `p2sh` construct admits:
# - string type. example: "2MxDJ723HBJtEMa2a9vcsns4qztxBuC8Zb2"
# - hex encoded bytes type. example: "0x76a914ee9369fb719c0ba43ddf4d94638a970b84775f4788ac"
predicate:
scope: outputs
p2sh: 2MxDJ723HBJtEMa2a9vcsns4qztxBuC8Zb2
# Get any transaction including a p2wpkh output paying a given recipient
# `p2wpkh` construct admits:
# - string type. example: "bcrt1qnxknq3wqtphv7sfwy07m7e4sr6ut9yt6ed99jg"
predicate:
scope: outputs
p2wpkh: bcrt1qnxknq3wqtphv7sfwy07m7e4sr6ut9yt6ed99jg
# Get any transaction including a p2wsh output paying a given recipient
# `p2wsh` construct admits:
# - string type. example: "bc1qklpmx03a8qkv263gy8te36w0z9yafxplc5kwzc"
predicate:
scope: outputs
p2wsh: bc1qklpmx03a8qkv263gy8te36w0z9yafxplc5kwzc
# Additional predicates including support for taproot coming soon
```
### Stacks
```yaml
# Get any transaction matching a given txid
# `txid` mandatory argument admits:
# - 32 bytes hex encoded type. example: "0xfaaac1833dc4883e7ec28f61e35b41f896c395f8d288b1a177155de2abd6052f"
predicate:
txid: 0xfaaac1833dc4883e7ec28f61e35b41f896c395f8d288b1a177155de2abd6052f
# Get any transaction related to a given fungible token asset identifier
# `asset-identifier` mandatory argument admits:
# - string type, fully qualifying the asset identifier to observe. example: `ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.cbtc-sip10::cbtc`
# `actions` mandatory argument admits:
# - array of string type constrained to `mint`, `transfer` and `burn` values. example: ["mint", "burn"]
predicate:
ft-event:
asset-identifier: 'ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.cbtc-sip10::cbtc'
actions:
- mint
- burn
# Get any transaction related to a given non fungible token asset identifier
# `asset-identifier` mandatory argument admits:
# - string type, fully qualifying the asset identifier to observe. example: `ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.monkey-sip09::monkeys`
# `actions` mandatory argument admits:
# - array of string type constrained to `mint`, `transfer` and `burn` values. example: ["mint", "burn"]
predicate:
nft-event:
asset-identifier: 'ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.monkey-sip09::monkeys'
actions:
- transfer
- burn
# Get any transaction moving STX tokens
# `actions` mandatory argument admits:
# - array of string type constrained to `mint`, `transfer` and `lock` values. example: ["mint", "lock"]
predicate:
stx-event:
actions:
- mint
- lock
# Get any transaction emitting given print events predicate
# `contract-identifier` mandatory argument admits:
# - string type, fully qualifying the contract to observe. example: `ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.monkey-sip09`
# `contains` mandatory argument admits:
# - string type, used for matching event
predicate:
print-event:
contract-identifier: 'ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.monkey-sip09'
contains: "vault"
# Get any transaction including a contract deployment
# `deployer` mandatory argument admits:
# - string "*"
# - string encoding a valid STX address. example: "ST2CY5V39NHDPWSXMW9QDT3HC3GD6Q6XX4CFRK9AG"
predicate:
contract-deploy:
deployer: "ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM"
# Get any transaction including a contract deployment implementing a given trait (coming soon)
# `impl-trait` mandatory argument admits:
# - string type, fully qualifying the trait's shape to observe. example: `ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.sip09-protocol`
predicate:
contract-deploy:
impl-trait: "ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.sip09-protocol"
```

View File

@@ -0,0 +1,28 @@
use clarity_repl::clarity::codec::StacksString;
use clarity_repl::clarity::util::hash::{hex_bytes, to_hex};
use clarity_repl::clarity::ClarityName;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use hex::{decode, encode};
#[inline]
fn canonical_is_clarity_variable() {
let function_name = ClarityName::try_from("my-method-name").unwrap();
StacksString::from(function_name.clone()).is_clarity_variable();
}
#[inline]
fn proposed_is_clarity_variable() {
let function_name = ClarityName::try_from("my-method-name").unwrap();
}
pub fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("canonical_is_clarity_variable <my-method-name>", |b| {
b.iter(|| canonical_is_clarity_variable())
});
c.bench_function("proposed_is_clarity_variable <my-method-name>", |b| {
b.iter(|| proposed_is_clarity_variable())
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

View File

@@ -0,0 +1,398 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use clarity_repl::clarity::util::hash::{hex_bytes, to_hex};
use hex::{decode, encode};
#[inline]
fn decode_hex_str_stacks(input: &str) -> Vec<u8> {
hex_bytes(input).unwrap()
}
#[inline]
fn encode_hex_str_stacks() -> String {
to_hex(&[
0x00,
0x00,
0x00,
0x00,
0x01,
0x04,
0x00,
0xa4,
0x68,
0xe7,
0x2d,
0xf6,
0x75,
0x43,
0x87,
0x20,
0x98,
0x27,
0x32,
0x7b,
0x98,
0x9f,
0x4a,
0x99,
0x54,
0x2a,
0x69,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x77,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0xb4,
0x00,
0x01,
0x23,
0x1c,
0x21,
0x39,
0x49,
0x45,
0x53,
0x17,
0xd1,
0x5f,
0xf2,
0x5d,
0xfd,
0x7a,
0xd9,
0xbc,
0x04,
0x26,
0xcf,
0xfa,
0x5c,
0x6f,
0x5b,
0xe4,
0x6e,
0x79,
0xfa,
0x1e,
0x18,
0x2b,
0x61,
0x45,
0x18,
0x0c,
0x1c,
0x13,
0x5d,
0x6c,
0xcc,
0xfc,
0xc4,
0x9e,
0x85,
0xcb,
0x82,
0x5e,
0x61,
0x52,
0x28,
0xda,
0xf2,
0xd1,
0x66,
0x3e,
0x49,
0xd7,
0x0c,
0xd5,
0xd0,
0xe1,
0x96,
0xd1,
0x26,
0xc0,
0x03,
0x02,
0x00,
0x00,
0x00,
0x00,
0x00,
0x05,
0x16,
0x7b,
0x2d,
0xd1,
0xf0,
0xd4,
0x7f,
0x59,
0x67,
0x21,
0xc2,
0x33,
0xfd,
0x9b,
0x2e,
0x99,
0x19,
0xac,
0x91,
0x28,
0x8a,
0x00,
0x00,
0x00,
0x00,
0x0c,
0x1b,
0x5e,
0x48,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00
])
}
#[inline]
fn decode_hex_str_crate(input: &str) -> Vec<u8> {
decode(input).unwrap()
}
#[inline]
fn encode_hex_str_crate() -> String {
encode(&[
0x00,
0x00,
0x00,
0x00,
0x01,
0x04,
0x00,
0xa4,
0x68,
0xe7,
0x2d,
0xf6,
0x75,
0x43,
0x87,
0x20,
0x98,
0x27,
0x32,
0x7b,
0x98,
0x9f,
0x4a,
0x99,
0x54,
0x2a,
0x69,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x77,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0xb4,
0x00,
0x01,
0x23,
0x1c,
0x21,
0x39,
0x49,
0x45,
0x53,
0x17,
0xd1,
0x5f,
0xf2,
0x5d,
0xfd,
0x7a,
0xd9,
0xbc,
0x04,
0x26,
0xcf,
0xfa,
0x5c,
0x6f,
0x5b,
0xe4,
0x6e,
0x79,
0xfa,
0x1e,
0x18,
0x2b,
0x61,
0x45,
0x18,
0x0c,
0x1c,
0x13,
0x5d,
0x6c,
0xcc,
0xfc,
0xc4,
0x9e,
0x85,
0xcb,
0x82,
0x5e,
0x61,
0x52,
0x28,
0xda,
0xf2,
0xd1,
0x66,
0x3e,
0x49,
0xd7,
0x0c,
0xd5,
0xd0,
0xe1,
0x96,
0xd1,
0x26,
0xc0,
0x03,
0x02,
0x00,
0x00,
0x00,
0x00,
0x00,
0x05,
0x16,
0x7b,
0x2d,
0xd1,
0xf0,
0xd4,
0x7f,
0x59,
0x67,
0x21,
0xc2,
0x33,
0xfd,
0x9b,
0x2e,
0x99,
0x19,
0xac,
0x91,
0x28,
0x8a,
0x00,
0x00,
0x00,
0x00,
0x0c,
0x1b,
0x5e,
0x48,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00
])
}
pub fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("decode_hex_str ('hex_bytes' from stacks) <360>", |b| b.iter(|| decode_hex_str_stacks(black_box("00000000010400a468e72df6754387209827327b989f4a99542a69000000000000007700000000000000b40001231c213949455317d15ff25dfd7ad9bc0426cffa5c6f5be46e79fa1e182b6145180c1c135d6cccfcc49e85cb825e615228daf2d1663e49d70cd5d0e196d126c00302000000000005167b2dd1f0d47f596721c233fd9b2e9919ac91288a000000000c1b5e4800000000000000000000000000000000000000000000000000000000000000000000"))));
c.bench_function("decode_hex_str (function from crate 'hex') <360>", |b| b.iter(|| decode_hex_str_crate(black_box("00000000010400a468e72df6754387209827327b989f4a99542a69000000000000007700000000000000b40001231c213949455317d15ff25dfd7ad9bc0426cffa5c6f5be46e79fa1e182b6145180c1c135d6cccfcc49e85cb825e615228daf2d1663e49d70cd5d0e196d126c00302000000000005167b2dd1f0d47f596721c233fd9b2e9919ac91288a000000000c1b5e4800000000000000000000000000000000000000000000000000000000000000000000"))));
c.bench_function("encode_to_hex_str ('to_hex' from stacks) <360>", |b| b.iter(|| encode_hex_str_stacks()));
c.bench_function("encode_to_hex_str (function from crate 'hex') <360>", |b| b.iter(|| encode_hex_str_crate()));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

View File

@@ -0,0 +1,17 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use chainhook_event_indexer::ingestion::start_ingesting;
use chainhook_event_observer::indexer::IndexerConfig;
fn criterion_benchmark(c: &mut Criterion) {
let config = IndexerConfig {
stacks_node_rpc_url: "http://0.0.0.0:20443".into(),
bitcoin_node_rpc_url: "http://0.0.0.0:18443".into(),
bitcoin_node_rpc_username: "devnet".into(),
bitcoin_node_rpc_password: "devnet".into(),
};
c.bench_function("redis", |b| b.iter(|| start_ingesting("/Users/ludovic/Downloads/stacks-blockchain-api.tsv".into(), config.clone()).unwrap()));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

View File

@@ -0,0 +1,16 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
fn ingest_tsv(n: u64) -> u64 {
match n {
0 => 1,
1 => 1,
n => fibonacci(n-1) + fibonacci(n-2),
}
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("fib 20", |b| b.iter(|| fibonacci(black_box(20))));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

View File

@@ -0,0 +1,16 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
fn ingest_tsv(n: u64) -> u64 {
match n {
0 => 1,
1 => 1,
n => fibonacci(n-1) + fibonacci(n-2),
}
}
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("fib 20", |b| b.iter(|| fibonacci(black_box(20))));
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

View File

@@ -0,0 +1,61 @@
In a console, launch `redis-server` with the following command
```bash
$ redis-server
```
In another console, we will launch `vault-monitor`. `vault-monitor` is a program that will be processing the events triggered by `chainhook-db`. Ruby on Rails (ruby 2.7+, rails 7+) was used to demonstrate that Chainhooks is a language agnostic layer.
```bash
# Navigate to vault monitor directory
$ cd vault-monitor
# Install dependencies
$ bundle install
# Create database and run db migrations (will use sqlite in development mode)
$ rails db:migrate
# Run program
$ rails server
```
`vault-monitor` exposes an admin readonly user interface at this address `http://localhost:3000/admin`.
In another console, launch `chainhook-cli`, using the command:
```bash
$ chainhook-cli replay --testnet
```
Finally, make `vault-monitor` register a chainhook, using the following command:
```bash
curl -X "POST" "http://0.0.0.0:20446/v1/chainhooks/" \
-H 'Content-Type: application/json' \
-d $'{
"stacks": {
"predicate": {
"type": "print_event",
"rule": {
"contains": "vault",
"contract_identifier": "SP2C2YFP12AJZB4MABJBAJ55XECVS7E4PMMZ89YZR.arkadiko-freddie-v1-1"
}
},
"action": {
"http": {
"url": "http://localhost:3000/chainhooks/v1/vaults",
"method": "POST",
"authorization_header": "Bearer cn389ncoiwuencr"
}
},
"uuid": "1",
"decode_clarity_values": true,
"version": 1,
"name": "Vault events observer",
"network": "mainnet"
}
}'
```

View File

@@ -0,0 +1,7 @@
# See https://git-scm.com/docs/gitattributes for more about git attribute files.
# Mark the database schema as having been generated.
db/schema.rb linguist-generated
# Mark any vendored files as having been vendored.
vendor/* linguist-vendored

View File

@@ -0,0 +1,35 @@
# See https://help.github.com/articles/ignoring-files for more about ignoring files.
#
# If you find yourself ignoring temporary files generated by your text editor
# or operating system, you probably want to add a global ignore instead:
# git config --global core.excludesfile '~/.gitignore_global'
# Ignore bundler config.
/.bundle
# Ignore the default SQLite database.
/db/*.sqlite3
/db/*.sqlite3-*
# Ignore all logfiles and tempfiles.
/log/*
/tmp/*
!/log/.keep
!/tmp/.keep
# Ignore pidfiles, but keep the directory.
/tmp/pids/*
!/tmp/pids/
!/tmp/pids/.keep
# Ignore uploaded files in development.
/storage/*
!/storage/.keep
/tmp/storage/*
!/tmp/storage/
!/tmp/storage/.keep
/public/assets
# Ignore master key for decrypting credentials and more.
/config/master.key

View File

@@ -0,0 +1,77 @@
source "https://rubygems.org"
git_source(:github) { |repo| "https://github.com/#{repo}.git" }
ruby "2.7.6"
# Bundle edge Rails instead: gem "rails", github: "rails/rails", branch: "main"
gem "rails", "~> 7.0.3", ">= 7.0.3.1"
# The original asset pipeline for Rails [https://github.com/rails/sprockets-rails]
gem "sprockets-rails"
# Use sqlite3 as the database for Active Record
gem "sqlite3", "~> 1.4"
# Use the Puma web server [https://github.com/puma/puma]
gem "puma", "~> 5.0"
# Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
gem "importmap-rails"
# Hotwire's SPA-like page accelerator [https://turbo.hotwired.dev]
gem "turbo-rails"
# Hotwire's modest JavaScript framework [https://stimulus.hotwired.dev]
gem "stimulus-rails"
# Build JSON APIs with ease [https://github.com/rails/jbuilder]
gem "jbuilder"
# Use Redis adapter to run Action Cable in production
gem "redis", "~> 4.0"
# Use Kredis to get higher-level data types in Redis [https://github.com/rails/kredis]
# gem "kredis"
# Use Active Model has_secure_password [https://guides.rubyonrails.org/active_model_basics.html#securepassword]
# gem "bcrypt", "~> 3.1.7"
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
gem "tzinfo-data", platforms: %i[ mingw mswin x64_mingw jruby ]
# Reduces boot times through caching; required in config/boot.rb
gem "bootsnap", require: false
gem 'logging'
gem 'logging-rails'
# Use Sass to process CSS
# gem "sassc-rails"
# Use Active Storage variants [https://guides.rubyonrails.org/active_storage_overview.html#transforming-images]
# gem "image_processing", "~> 1.2"
gem "administrate"
group :development, :test do
# See https://guides.rubyonrails.org/debugging_rails_applications.html#debugging-with-the-debug-gem
gem "debug", platforms: %i[ mri mingw x64_mingw ]
end
group :development do
# Use console on exceptions pages [https://github.com/rails/web-console]
gem "web-console"
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
# gem "rack-mini-profiler"
# Speed up commands on slow machines / big apps [https://github.com/rails/spring]
# gem "spring"
end
group :test do
# Use system testing [https://guides.rubyonrails.org/testing.html#system-testing]
gem "capybara"
gem "selenium-webdriver"
gem "webdrivers"
end

View File

@@ -0,0 +1,287 @@
GEM
remote: https://rubygems.org/
specs:
actioncable (7.0.3.1)
actionpack (= 7.0.3.1)
activesupport (= 7.0.3.1)
nio4r (~> 2.0)
websocket-driver (>= 0.6.1)
actionmailbox (7.0.3.1)
actionpack (= 7.0.3.1)
activejob (= 7.0.3.1)
activerecord (= 7.0.3.1)
activestorage (= 7.0.3.1)
activesupport (= 7.0.3.1)
mail (>= 2.7.1)
net-imap
net-pop
net-smtp
actionmailer (7.0.3.1)
actionpack (= 7.0.3.1)
actionview (= 7.0.3.1)
activejob (= 7.0.3.1)
activesupport (= 7.0.3.1)
mail (~> 2.5, >= 2.5.4)
net-imap
net-pop
net-smtp
rails-dom-testing (~> 2.0)
actionpack (7.0.3.1)
actionview (= 7.0.3.1)
activesupport (= 7.0.3.1)
rack (~> 2.0, >= 2.2.0)
rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0)
actiontext (7.0.3.1)
actionpack (= 7.0.3.1)
activerecord (= 7.0.3.1)
activestorage (= 7.0.3.1)
activesupport (= 7.0.3.1)
globalid (>= 0.6.0)
nokogiri (>= 1.8.5)
actionview (7.0.3.1)
activesupport (= 7.0.3.1)
builder (~> 3.1)
erubi (~> 1.4)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.1, >= 1.2.0)
activejob (7.0.3.1)
activesupport (= 7.0.3.1)
globalid (>= 0.3.6)
activemodel (7.0.3.1)
activesupport (= 7.0.3.1)
activerecord (7.0.3.1)
activemodel (= 7.0.3.1)
activesupport (= 7.0.3.1)
activestorage (7.0.3.1)
actionpack (= 7.0.3.1)
activejob (= 7.0.3.1)
activerecord (= 7.0.3.1)
activesupport (= 7.0.3.1)
marcel (~> 1.0)
mini_mime (>= 1.1.0)
activesupport (7.0.3.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
tzinfo (~> 2.0)
addressable (2.8.0)
public_suffix (>= 2.0.2, < 5.0)
administrate (0.18.0)
actionpack (>= 5.0)
actionview (>= 5.0)
activerecord (>= 5.0)
jquery-rails (>= 4.0)
kaminari (>= 1.0)
sassc-rails (~> 2.1)
selectize-rails (~> 0.6)
bindex (0.8.1)
bootsnap (1.13.0)
msgpack (~> 1.2)
builder (3.2.4)
capybara (3.37.1)
addressable
matrix
mini_mime (>= 0.1.3)
nokogiri (~> 1.8)
rack (>= 1.6.0)
rack-test (>= 0.6.3)
regexp_parser (>= 1.5, < 3.0)
xpath (~> 3.2)
childprocess (4.1.0)
concurrent-ruby (1.1.10)
crass (1.0.6)
debug (1.6.2)
irb (>= 1.3.6)
reline (>= 0.3.1)
digest (3.1.0)
erubi (1.11.0)
ffi (1.15.5)
globalid (1.0.0)
activesupport (>= 5.0)
i18n (1.12.0)
concurrent-ruby (~> 1.0)
importmap-rails (1.1.5)
actionpack (>= 6.0.0)
railties (>= 6.0.0)
io-console (0.5.11)
irb (1.4.1)
reline (>= 0.3.0)
jbuilder (2.11.5)
actionview (>= 5.0.0)
activesupport (>= 5.0.0)
jquery-rails (4.5.0)
rails-dom-testing (>= 1, < 3)
railties (>= 4.2.0)
thor (>= 0.14, < 2.0)
kaminari (1.2.2)
activesupport (>= 4.1.0)
kaminari-actionview (= 1.2.2)
kaminari-activerecord (= 1.2.2)
kaminari-core (= 1.2.2)
kaminari-actionview (1.2.2)
actionview
kaminari-core (= 1.2.2)
kaminari-activerecord (1.2.2)
activerecord
kaminari-core (= 1.2.2)
kaminari-core (1.2.2)
little-plugger (1.1.4)
logging (2.3.1)
little-plugger (~> 1.1)
multi_json (~> 1.14)
logging-rails (0.6.0)
logging (>= 1.8)
loofah (2.19.1)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
mail (2.7.1)
mini_mime (>= 0.1.1)
marcel (1.0.2)
matrix (0.4.2)
method_source (1.0.0)
mini_mime (1.1.2)
minitest (5.16.2)
msgpack (1.5.4)
multi_json (1.15.0)
net-imap (0.2.3)
digest
net-protocol
strscan
net-pop (0.1.1)
digest
net-protocol
timeout
net-protocol (0.1.3)
timeout
net-smtp (0.3.1)
digest
net-protocol
timeout
nio4r (2.5.8)
nokogiri (1.13.10-arm64-darwin)
racc (~> 1.4)
nokogiri (1.13.10-x86_64-linux)
racc (~> 1.4)
public_suffix (4.0.7)
puma (5.6.4)
nio4r (~> 2.0)
racc (1.6.1)
rack (2.2.4)
rack-test (2.0.2)
rack (>= 1.3)
rails (7.0.3.1)
actioncable (= 7.0.3.1)
actionmailbox (= 7.0.3.1)
actionmailer (= 7.0.3.1)
actionpack (= 7.0.3.1)
actiontext (= 7.0.3.1)
actionview (= 7.0.3.1)
activejob (= 7.0.3.1)
activemodel (= 7.0.3.1)
activerecord (= 7.0.3.1)
activestorage (= 7.0.3.1)
activesupport (= 7.0.3.1)
bundler (>= 1.15.0)
railties (= 7.0.3.1)
rails-dom-testing (2.0.3)
activesupport (>= 4.2.0)
nokogiri (>= 1.6)
rails-html-sanitizer (1.4.4)
loofah (~> 2.19, >= 2.19.1)
railties (7.0.3.1)
actionpack (= 7.0.3.1)
activesupport (= 7.0.3.1)
method_source
rake (>= 12.2)
thor (~> 1.0)
zeitwerk (~> 2.5)
rake (13.0.6)
redis (4.7.1)
regexp_parser (2.5.0)
reline (0.3.1)
io-console (~> 0.5)
rexml (3.2.5)
rubyzip (2.3.2)
sassc (2.4.0)
ffi (~> 1.9)
sassc-rails (2.1.2)
railties (>= 4.0.0)
sassc (>= 2.0)
sprockets (> 3.0)
sprockets-rails
tilt
selectize-rails (0.12.6)
selenium-webdriver (4.4.0)
childprocess (>= 0.5, < 5.0)
rexml (~> 3.2, >= 3.2.5)
rubyzip (>= 1.2.2, < 3.0)
websocket (~> 1.0)
sprockets (4.1.1)
concurrent-ruby (~> 1.0)
rack (> 1, < 3)
sprockets-rails (3.4.2)
actionpack (>= 5.2)
activesupport (>= 5.2)
sprockets (>= 3.0.0)
sqlite3 (1.4.4)
stimulus-rails (1.1.0)
railties (>= 6.0.0)
strscan (3.0.4)
thor (1.2.1)
tilt (2.0.11)
timeout (0.3.0)
turbo-rails (1.1.1)
actionpack (>= 6.0.0)
activejob (>= 6.0.0)
railties (>= 6.0.0)
tzinfo (2.0.5)
concurrent-ruby (~> 1.0)
web-console (4.2.0)
actionview (>= 6.0.0)
activemodel (>= 6.0.0)
bindex (>= 0.4.0)
railties (>= 6.0.0)
webdrivers (5.0.0)
nokogiri (~> 1.6)
rubyzip (>= 1.3.0)
selenium-webdriver (~> 4.0)
websocket (1.2.9)
websocket-driver (0.7.5)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5)
xpath (3.2.0)
nokogiri (~> 1.8)
zeitwerk (2.6.0)
PLATFORMS
arm64-darwin-21
x86_64-linux
DEPENDENCIES
administrate
bootsnap
capybara
debug
importmap-rails
jbuilder
logging
logging-rails
puma (~> 5.0)
rails (~> 7.0.3, >= 7.0.3.1)
redis (~> 4.0)
selenium-webdriver
sprockets-rails
sqlite3 (~> 1.4)
stimulus-rails
turbo-rails
tzinfo-data
web-console
webdrivers
RUBY VERSION
ruby 2.7.6p219
BUNDLED WITH
2.3.20

View File

@@ -0,0 +1,19 @@
# Use case: USDA collateral monitoring
* Ruby version: 2
* System dependencies
* Configuration
* Database creation
* Database initialization
* How to run the test suite
* Services (job queues, cache servers, search engines, etc.)
* Deployment instructions
* ...

View File

@@ -0,0 +1,6 @@
# Add your own tasks in files placed in lib/tasks ending in .rake,
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
require_relative "config/application"
Rails.application.load_tasks

View File

@@ -0,0 +1,4 @@
//= link_tree ../images
//= link_directory ../stylesheets .css
//= link_tree ../../javascript .js
//= link_tree ../../../vendor/javascript .js

View File

@@ -0,0 +1,15 @@
/*
* This is a manifest file that'll be compiled into application.css, which will include all the files
* listed below.
*
* Any CSS (and SCSS, if configured) file within this directory, lib/assets/stylesheets, or any plugin's
* vendor/assets/stylesheets directory can be referenced here using a relative path.
*
* You're free to add application-wide styles to this file and they'll appear at the bottom of the
* compiled file so the styles you add here take precedence over styles defined in any other CSS
* files in this directory. Styles in this file should be added after the last require_* statement.
* It is generally better to create a new file per style scope.
*
*= require_tree .
*= require_self
*/

View File

@@ -0,0 +1,23 @@
{
"stacks": {
"uuid": "1",
"name": "Vault events observer",
"network": "mainnet",
"version": 1,
"decode_clarity_values": true,
"predicate": {
"type": "print_event",
"rule": {
"contract_identifier": "SP2C2YFP12AJZB4MABJBAJ55XECVS7E4PMMZ89YZR.arkadiko-freddie-v1-1",
"contains": "vault"
}
},
"action": {
"http": {
"url": "http://localhost:3000/chainhooks/v1/vaults",
"method": "POST",
"authorization_header": "Bearer cn389ncoiwuencr"
}
}
}
}

View File

@@ -0,0 +1,4 @@
module ApplicationCable
class Channel < ActionCable::Channel::Base
end
end

View File

@@ -0,0 +1,4 @@
module ApplicationCable
class Connection < ActionCable::Connection::Base
end
end

View File

@@ -0,0 +1,21 @@
# All Administrate controllers inherit from this
# `Administrate::ApplicationController`, making it the ideal place to put
# authentication logic or other before_actions.
#
# If you want to add pagination or other controller-level concerns,
# you're free to overwrite the RESTful controller actions.
module Admin
class ApplicationController < Administrate::ApplicationController
before_action :authenticate_admin
def authenticate_admin
# TODO Add authentication logic here.
end
# Override this value to specify the number of elements to display at a time
# on index pages. Defaults to 20.
# def records_per_page
# params[:per_page] || 20
# end
end
end

View File

@@ -0,0 +1,47 @@
module Admin
class VaultsController < Admin::ApplicationController
# Overwrite any of the RESTful controller actions to implement custom behavior
# For example, you may want to send an email after a foo is updated.
#
# def update
# super
# send_foo_updated_email(requested_resource)
# end
# Override this method to specify custom lookup behavior.
# This will be used to set the resource for the `show`, `edit`, and `update`
# actions.
#
# def find_resource(param)
# Foo.find_by!(slug: param)
# end
# The result of this lookup will be available as `requested_resource`
# Override this if you have certain roles that require a subset
# this will be used to set the records shown on the `index` action.
#
# def scoped_resource
# if current_user.super_admin?
# resource_class
# else
# resource_class.with_less_stuff
# end
# end
# Override `resource_params` if you want to transform the submitted
# data before it's persisted. For example, the following would turn all
# empty values into nil values. It uses other APIs such as `resource_class`
# and `dashboard`:
#
# def resource_params
# params.require(resource_class.model_name.param_key).
# permit(dashboard.permitted_attributes).
# transform_values { |value| value == "" ? nil : value }
# end
# See https://administrate-prototype.herokuapp.com/customizing_controller_actions
# for more information
end
end

View File

@@ -0,0 +1,2 @@
class ApplicationController < ActionController::Base
end

View File

@@ -0,0 +1,28 @@
class ChainhooksController < ApplicationController
skip_before_action :verify_authenticity_token
wrap_parameters false
def vaults
payload = JSON.parse request.body.read
payload["apply"].each do |block|
block["transactions"].each do |transaction|
transaction["metadata"]["receipt"]["events"].each do |event|
next if event["type"] != "SmartContractEvent"
event_data = event["data"]["value"]
next if event_data.nil? || event_data["type"] != "vault"
vault_event_data = event_data["data"]
if event_data["action"] == "created"
Vault.create_from_onchain_event(vault_event_data)
elsif ["deposit", "burn", "close", "mint"].include? event_data["action"]
Vault.update_attributes_from_onchain_event(vault_event_data)
else
p "Unknown event type #{event_data["action"]}"
end
end
end
end
respond_to do |format|
format.json { head :no_content, status: :ok }
end
end
end

View File

@@ -0,0 +1,70 @@
class VaultsController < ApplicationController
before_action :set_vault, only: %i[ show edit update destroy ]
# GET /vaults or /vaults.json
def index
@vaults = Vault.all
end
# GET /vaults/1 or /vaults/1.json
def show
end
# GET /vaults/new
def new
@vault = Vault.new
end
# GET /vaults/1/edit
def edit
end
# POST /vaults or /vaults.json
def create
@vault = Vault.new(vault_params)
respond_to do |format|
if @vault.save
format.html { redirect_to vault_url(@vault), notice: "Vault was successfully created." }
format.json { render :show, status: :created, location: @vault }
else
format.html { render :new, status: :unprocessable_entity }
format.json { render json: @vault.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /vaults/1 or /vaults/1.json
def update
respond_to do |format|
if @vault.update(vault_params)
format.html { redirect_to vault_url(@vault), notice: "Vault was successfully updated." }
format.json { render :show, status: :ok, location: @vault }
else
format.html { render :edit, status: :unprocessable_entity }
format.json { render json: @vault.errors, status: :unprocessable_entity }
end
end
end
# DELETE /vaults/1 or /vaults/1.json
def destroy
@vault.destroy
respond_to do |format|
format.html { redirect_to vaults_url, notice: "Vault was successfully destroyed." }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_vault
@vault = Vault.find(params[:id])
end
# Only allow a list of trusted parameters through.
def vault_params
params.require(:vault).permit(:id, :owner, :collateral, :collateral_type, :collateral_token, :stacked_tokens, :stacker_name, :revoked_stacking, :auto_payoff, :debt, :created_at_block_height, :updated_at_block_height, :stability_fee_accrued, :stability_fee_last_accrued, :is_liquidated, :auction_ended, :leftover_collateral)
end
end

View File

@@ -0,0 +1,122 @@
require "administrate/base_dashboard"
class VaultDashboard < Administrate::BaseDashboard
# ATTRIBUTE_TYPES
# a hash that describes the type of each of the model's fields.
#
# Each different type represents an Administrate::Field object,
# which determines how the attribute is displayed
# on pages throughout the dashboard.
ATTRIBUTE_TYPES = {
id: Field::Number,
auction_ended: Field::Boolean,
auto_payoff: Field::Boolean,
collateral: Field::Number,
collateral_token: Field::String,
collateral_type: Field::String,
created_at_block_height: Field::Number,
debt: Field::Number,
is_liquidated: Field::Boolean,
leftover_collateral: Field::Number,
onchain_id: Field::Number,
owner: Field::String,
revoked_stacking: Field::Boolean,
stability_fee_accrued: Field::Number,
stability_fee_last_accrued: Field::Number,
stacked_tokens: Field::Number,
stacker_name: Field::String,
updated_at_block_height: Field::Number,
created_at: Field::DateTime,
updated_at: Field::DateTime,
}.freeze
# COLLECTION_ATTRIBUTES
# an array of attributes that will be displayed on the model's index page.
#
# By default, it's limited to four items to reduce clutter on index pages.
# Feel free to add, remove, or rearrange items.
COLLECTION_ATTRIBUTES = %i[
onchain_id
created_at_block_height
collateral_token
collateral_type
collateral
debt
stacked_tokens
owner
].freeze
# SHOW_PAGE_ATTRIBUTES
# an array of attributes that will be displayed on the model's show page.
SHOW_PAGE_ATTRIBUTES = %i[
auction_ended
auto_payoff
collateral
collateral_token
collateral_type
created_at_block_height
debt
is_liquidated
leftover_collateral
onchain_id
owner
revoked_stacking
stability_fee_accrued
stability_fee_last_accrued
stacked_tokens
stacker_name
updated_at_block_height
].freeze
# FORM_ATTRIBUTES
# an array of attributes that will be displayed
# on the model's form (`new` and `edit`) pages.
FORM_ATTRIBUTES = %i[
auction_ended
auto_payoff
collateral
collateral_token
collateral_type
created_at_block_height
debt
is_liquidated
leftover_collateral
onchain_id
owner
revoked_stacking
stability_fee_accrued
stability_fee_last_accrued
stacked_tokens
stacker_name
updated_at_block_height
].freeze
# COLLECTION_FILTERS
# a hash that defines filters that can be used while searching via the search
# field of the dashboard.
#
# For example to add an option to search for open resources by typing "open:"
# in the search field:
#
# COLLECTION_FILTERS = {
# open: ->(resources) { resources.where(open: true) }
# }.freeze
COLLECTION_FILTERS = {}.freeze
# disable 'edit' and 'destroy' links
def valid_action?(name, resource = resource_class)
%w[edit destroy].exclude?(name.to_s) && super
end
def default_sorting_attribute
:age
end
def default_sorting_direction
:desc
end
def display_resource(vault)
"Vault ##{vault.onchain_id}"
end
end

View File

@@ -0,0 +1,2 @@
module ApplicationHelper
end

View File

@@ -0,0 +1,2 @@
module VaultsHelper
end

View File

@@ -0,0 +1,3 @@
// Configure your import map in config/importmap.rb. Read more: https://github.com/rails/importmap-rails
import "@hotwired/turbo-rails"
import "controllers"

View File

@@ -0,0 +1,9 @@
import { Application } from "@hotwired/stimulus"
const application = Application.start()
// Configure Stimulus development experience
application.debug = false
window.Stimulus = application
export { application }

View File

@@ -0,0 +1,7 @@
import { Controller } from "@hotwired/stimulus"
export default class extends Controller {
connect() {
this.element.textContent = "Hello World!"
}
}

View File

@@ -0,0 +1,11 @@
// Import and register all your controllers from the importmap under controllers/*
import { application } from "controllers/application"
// Eager load all controllers defined in the import map under controllers/**/*_controller
import { eagerLoadControllersFrom } from "@hotwired/stimulus-loading"
eagerLoadControllersFrom("controllers", application)
// Lazy load controllers as they appear in the DOM (remember not to preload controllers in import map!)
// import { lazyLoadControllersFrom } from "@hotwired/stimulus-loading"
// lazyLoadControllersFrom("controllers", application)

View File

@@ -0,0 +1,7 @@
class ApplicationJob < ActiveJob::Base
# Automatically retry jobs that encountered a deadlock
# retry_on ActiveRecord::Deadlocked
# Most jobs are safe to ignore if the underlying records are no longer available
# discard_on ActiveJob::DeserializationError
end

View File

@@ -0,0 +1,4 @@
class ApplicationMailer < ActionMailer::Base
default from: "from@example.com"
layout "mailer"
end

View File

@@ -0,0 +1,3 @@
class ApplicationRecord < ActiveRecord::Base
primary_abstract_class
end

View File

@@ -0,0 +1,48 @@
class Vault < ApplicationRecord
def Vault.create_from_onchain_event(params)
Vault.create({
:onchain_id => params["id"],
:owner => params["owner"],
:collateral => params["collateral"],
:collateral_type => params["collateral-type"],
:collateral_token => params["collateral-token"],
:stacked_tokens => params["stacked-tokens"],
:stacker_name => params["stacker-name"],
:revoked_stacking => params["revoked-stacking"],
:auto_payoff => params["auto-payoff"],
:debt => params["debt"],
:created_at_block_height => params["created-at-block-height"],
:updated_at_block_height => params["updated-at-block-height"],
:stability_fee_accrued => params["stability-fee-accrued"],
:stability_fee_last_accrued => params["stability-fee-last-accrued"],
:is_liquidated => params["is-liquidated"],
:auction_ended => params["auction-ended"],
:leftover_collateral => params["leftover-collateral"],
})
end
def Vault.update_attributes_from_onchain_event(params)
Vault
.where(:onchain_id => params["id"])
.update_all(
:owner => params["owner"],
:collateral => params["collateral"],
:collateral_type => params["collateral-type"],
:collateral_token => params["collateral-token"],
:stacked_tokens => params["stacked-tokens"],
:stacker_name => params["stacker-name"],
:revoked_stacking => params["revoked-stacking"],
:auto_payoff => params["auto-payoff"],
:debt => params["debt"],
:created_at_block_height => params["created-at-block-height"],
:updated_at_block_height => params["updated-at-block-height"],
:stability_fee_accrued => params["stability-fee-accrued"],
:stability_fee_last_accrued => params["stability-fee-last-accrued"],
:is_liquidated => params["is-liquidated"],
:auction_ended => params["auction-ended"],
:leftover_collateral => params["leftover-collateral"],
)
end
end

View File

@@ -0,0 +1,76 @@
<%#
# Collection
This partial is used on the `index` and `show` pages
to display a collection of resources in an HTML table.
## Local variables:
- `collection_presenter`:
An instance of [Administrate::Page::Collection][1].
The table presenter uses `ResourceDashboard::COLLECTION_ATTRIBUTES` to determine
the columns displayed in the table
- `resources`:
An ActiveModel::Relation collection of resources to be displayed in the table.
By default, the number of resources is limited by pagination
or by a hard limit to prevent excessive page load times
[1]: http://www.rubydoc.info/gems/administrate/Administrate/Page/Collection
%>
<table aria-labelledby="<%= table_title %>">
<thead>
<tr>
<% collection_presenter.attribute_types.each do |attr_name, attr_type| %>
<th class="cell-label
cell-label--<%= attr_type.html_class %>
cell-label--<%= collection_presenter.ordered_html_class(attr_name) %>
cell-label--<%= "#{collection_presenter.resource_name}_#{attr_name}" %>"
scope="col"
role="columnheader"
aria-sort="<%= sort_order(collection_presenter.ordered_html_class(attr_name)) %>">
<%= link_to(sanitized_order_params(page, collection_field_name).merge(
collection_presenter.order_params_for(attr_name, key: collection_field_name)
)) do %>
<%= t(
"helpers.label.#{collection_presenter.resource_name}.#{attr_name}",
default: resource_class.human_attribute_name(attr_name),
).titleize %>
<% if collection_presenter.ordered_by?(attr_name) %>
<span class="cell-label__sort-indicator cell-label__sort-indicator--<%= collection_presenter.ordered_html_class(attr_name) %>">
<svg aria-hidden="true">
<use xlink:href="#icon-up-caret" />
</svg>
</span>
<% end %>
<% end %>
</th>
<% end %>
</tr>
</thead>
<tbody>
<% resources.each do |resource| %>
<tr class="js-table-row"
<% if accessible_action?(resource, :show) %>
<%= %(tabindex=0 role=link data-url=#{polymorphic_path([namespace, resource])}) %>
<% end %>
>
<% collection_presenter.attributes_for(resource).each do |attribute| %>
<td class="cell-data cell-data--<%= attribute.html_class %>">
<% if accessible_action?(resource, :show) -%>
<a href="<%= polymorphic_path([namespace, resource]) -%>"
tabindex="-1"
class="action-show"
>
<%= render_field attribute %>
</a>
<% else %>
<%= render_field attribute %>
<% end -%>
</td>
<% end %>
</tr>
<% end %>
</tbody>
</table>

View File

@@ -0,0 +1,21 @@
<%=
render("index_header",
resources: resources,
search_term: search_term,
page: page,
show_search_bar: show_search_bar,
)
%>
<section class="main-content__body main-content__body--flush">
<%= render(
"collection",
collection_presenter: page,
collection_field_name: resource_name,
page: page,
resources: resources,
table_title: "page-title"
) %>
<%= render("pagination", resources: resources) %>
</section>

View File

@@ -0,0 +1,23 @@
<% content_for(:title) { t("administrate.actions.show_resource", name: page.page_title) } %>
<header class="main-content__header" role="banner">
<h1 class="main-content__page-title">
<%= content_for(:title) %>
</h1>
</header>
<section class="main-content__body">
<dl>
<% page.attributes.each do |attribute| %>
<dt class="attribute-label" id="<%= attribute.name %>">
<%= t(
"helpers.label.#{resource_name}.#{attribute.name}",
default: page.resource.class.human_attribute_name(attribute.name),
) %>
</dt>
<dd class="attribute-data attribute-data--<%=attribute.html_class%>"
><%= render_field attribute, page: page %></dd>
<% end %>
</dl>
</section>

View File

@@ -0,0 +1,16 @@
<!DOCTYPE html>
<html>
<head>
<title>UsdaCollateralMonitor</title>
<meta name="viewport" content="width=device-width,initial-scale=1">
<%= csrf_meta_tags %>
<%= csp_meta_tag %>
<%= stylesheet_link_tag "application", "data-turbo-track": "reload" %>
<%= javascript_importmap_tags %>
</head>
<body>
<%= yield %>
</body>
</html>

View File

@@ -0,0 +1,13 @@
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style>
/* Email styles need to be inline */
</style>
</head>
<body>
<%= yield %>
</body>
</html>

View File

@@ -0,0 +1,102 @@
<%= form_with(model: vault) do |form| %>
<% if vault.errors.any? %>
<div style="color: red">
<h2><%= pluralize(vault.errors.count, "error") %> prohibited this vault from being saved:</h2>
<ul>
<% vault.errors.each do |error| %>
<li><%= error.full_message %></li>
<% end %>
</ul>
</div>
<% end %>
<div>
<%= form.label :id, style: "display: block" %>
<%= form.number_field :id %>
</div>
<div>
<%= form.label :owner, style: "display: block" %>
<%= form.text_field :owner %>
</div>
<div>
<%= form.label :collateral, style: "display: block" %>
<%= form.number_field :collateral %>
</div>
<div>
<%= form.label :collateral_type, style: "display: block" %>
<%= form.text_field :collateral_type %>
</div>
<div>
<%= form.label :collateral_token, style: "display: block" %>
<%= form.text_field :collateral_token %>
</div>
<div>
<%= form.label :stacked_tokens, style: "display: block" %>
<%= form.number_field :stacked_tokens %>
</div>
<div>
<%= form.label :stacker_name, style: "display: block" %>
<%= form.text_field :stacker_name %>
</div>
<div>
<%= form.label :revoked_stacking, style: "display: block" %>
<%= form.check_box :revoked_stacking %>
</div>
<div>
<%= form.label :auto_payoff, style: "display: block" %>
<%= form.check_box :auto_payoff %>
</div>
<div>
<%= form.label :debt, style: "display: block" %>
<%= form.number_field :debt %>
</div>
<div>
<%= form.label :created_at_block_height, style: "display: block" %>
<%= form.number_field :created_at_block_height %>
</div>
<div>
<%= form.label :updated_at_block_height, style: "display: block" %>
<%= form.number_field :updated_at_block_height %>
</div>
<div>
<%= form.label :stability_fee_accrued, style: "display: block" %>
<%= form.number_field :stability_fee_accrued %>
</div>
<div>
<%= form.label :stability_fee_last_accrued, style: "display: block" %>
<%= form.number_field :stability_fee_last_accrued %>
</div>
<div>
<%= form.label :is_liquidated, style: "display: block" %>
<%= form.check_box :is_liquidated %>
</div>
<div>
<%= form.label :auction_ended, style: "display: block" %>
<%= form.check_box :auction_ended %>
</div>
<div>
<%= form.label :leftover_collateral, style: "display: block" %>
<%= form.number_field :leftover_collateral %>
</div>
<div>
<%= form.submit %>
</div>
<% end %>

View File

@@ -0,0 +1,87 @@
<div id="<%= dom_id vault %>">
<p>
<strong>Id:</strong>
<%= vault.id %>
</p>
<p>
<strong>Owner:</strong>
<%= vault.owner %>
</p>
<p>
<strong>Collateral:</strong>
<%= vault.collateral %>
</p>
<p>
<strong>Collateral type:</strong>
<%= vault.collateral_type %>
</p>
<p>
<strong>Collateral token:</strong>
<%= vault.collateral_token %>
</p>
<p>
<strong>Stacked tokens:</strong>
<%= vault.stacked_tokens %>
</p>
<p>
<strong>Stacker name:</strong>
<%= vault.stacker_name %>
</p>
<p>
<strong>Revoked stacking:</strong>
<%= vault.revoked_stacking %>
</p>
<p>
<strong>Auto payoff:</strong>
<%= vault.auto_payoff %>
</p>
<p>
<strong>Debt:</strong>
<%= vault.debt %>
</p>
<p>
<strong>Created at block height:</strong>
<%= vault.created_at_block_height %>
</p>
<p>
<strong>Updated at block height:</strong>
<%= vault.updated_at_block_height %>
</p>
<p>
<strong>Stability fee accrued:</strong>
<%= vault.stability_fee_accrued %>
</p>
<p>
<strong>Stability fee last accrued:</strong>
<%= vault.stability_fee_last_accrued %>
</p>
<p>
<strong>Is liquidated:</strong>
<%= vault.is_liquidated %>
</p>
<p>
<strong>Auction ended:</strong>
<%= vault.auction_ended %>
</p>
<p>
<strong>Leftover collateral:</strong>
<%= vault.leftover_collateral %>
</p>
</div>

View File

@@ -0,0 +1,2 @@
json.extract! vault, :id, :id, :owner, :collateral, :collateral_type, :collateral_token, :stacked_tokens, :stacker_name, :revoked_stacking, :auto_payoff, :debt, :created_at_block_height, :updated_at_block_height, :stability_fee_accrued, :stability_fee_last_accrued, :is_liquidated, :auction_ended, :leftover_collateral, :created_at, :updated_at
json.url vault_url(vault, format: :json)

View File

@@ -0,0 +1,10 @@
<h1>Editing vault</h1>
<%= render "form", vault: @vault %>
<br>
<div>
<%= link_to "Show this vault", @vault %> |
<%= link_to "Back to vaults", vaults_path %>
</div>

View File

@@ -0,0 +1,14 @@
<p style="color: green"><%= notice %></p>
<h1>Vaults</h1>
<div id="vaults">
<% @vaults.each do |vault| %>
<%= render vault %>
<p>
<%= link_to "Show this vault", vault %>
</p>
<% end %>
</div>
<%= link_to "New vault", new_vault_path %>

View File

@@ -0,0 +1 @@
json.array! @vaults, partial: "vaults/vault", as: :vault

View File

@@ -0,0 +1,9 @@
<h1>New vault</h1>
<%= render "form", vault: @vault %>
<br>
<div>
<%= link_to "Back to vaults", vaults_path %>
</div>

View File

@@ -0,0 +1,10 @@
<p style="color: green"><%= notice %></p>
<%= render @vault %>
<div>
<%= link_to "Edit this vault", edit_vault_path(@vault) %> |
<%= link_to "Back to vaults", vaults_path %>
<%= button_to "Destroy this vault", @vault, method: :delete %>
</div>

View File

@@ -0,0 +1 @@
json.partial! "vaults/vault", vault: @vault

View File

@@ -0,0 +1,114 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
#
# This file was generated by Bundler.
#
# The application 'bundle' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "rubygems"
m = Module.new do
module_function
def invoked_as_script?
File.expand_path($0) == File.expand_path(__FILE__)
end
def env_var_version
ENV["BUNDLER_VERSION"]
end
def cli_arg_version
return unless invoked_as_script? # don't want to hijack other binstubs
return unless "update".start_with?(ARGV.first || " ") # must be running `bundle update`
bundler_version = nil
update_index = nil
ARGV.each_with_index do |a, i|
if update_index && update_index.succ == i && a =~ Gem::Version::ANCHORED_VERSION_PATTERN
bundler_version = a
end
next unless a =~ /\A--bundler(?:[= ](#{Gem::Version::VERSION_PATTERN}))?\z/
bundler_version = $1
update_index = i
end
bundler_version
end
def gemfile
gemfile = ENV["BUNDLE_GEMFILE"]
return gemfile if gemfile && !gemfile.empty?
File.expand_path("../Gemfile", __dir__)
end
def lockfile
lockfile =
case File.basename(gemfile)
when "gems.rb" then gemfile.sub(/\.rb$/, gemfile)
else "#{gemfile}.lock"
end
File.expand_path(lockfile)
end
def lockfile_version
return unless File.file?(lockfile)
lockfile_contents = File.read(lockfile)
return unless lockfile_contents =~ /\n\nBUNDLED WITH\n\s{2,}(#{Gem::Version::VERSION_PATTERN})\n/
Regexp.last_match(1)
end
def bundler_requirement
@bundler_requirement ||=
env_var_version || cli_arg_version ||
bundler_requirement_for(lockfile_version)
end
def bundler_requirement_for(version)
return "#{Gem::Requirement.default}.a" unless version
bundler_gem_version = Gem::Version.new(version)
requirement = bundler_gem_version.approximate_recommendation
return requirement unless Gem.rubygems_version < Gem::Version.new("2.7.0")
requirement += ".a" if bundler_gem_version.prerelease?
requirement
end
def load_bundler!
ENV["BUNDLE_GEMFILE"] ||= gemfile
activate_bundler
end
def activate_bundler
gem_error = activation_error_handling do
gem "bundler", bundler_requirement
end
return if gem_error.nil?
require_error = activation_error_handling do
require "bundler/version"
end
return if require_error.nil? && Gem::Requirement.new(bundler_requirement).satisfied_by?(Gem::Version.new(Bundler::VERSION))
warn "Activating bundler (#{bundler_requirement}) failed:\n#{gem_error.message}\n\nTo install the version of bundler this project requires, run `gem install bundler -v '#{bundler_requirement}'`"
exit 42
end
def activation_error_handling
yield
nil
rescue StandardError, LoadError => e
e
end
end
m.load_bundler!
if m.invoked_as_script?
load Gem.bin_path("bundler", "bundle")
end

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env ruby
require_relative "../config/application"
require "importmap/commands"

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env ruby
APP_PATH = File.expand_path("../config/application", __dir__)
require_relative "../config/boot"
require "rails/commands"

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env ruby
require_relative "../config/boot"
require "rake"
Rake.application.run

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env ruby
require "fileutils"
# path to your application root.
APP_ROOT = File.expand_path("..", __dir__)
def system!(*args)
system(*args) || abort("\n== Command #{args} failed ==")
end
FileUtils.chdir APP_ROOT do
# This script is a way to set up or update your development environment automatically.
# This script is idempotent, so that you can run it at any time and get an expectable outcome.
# Add necessary setup steps to this file.
puts "== Installing dependencies =="
system! "gem install bundler --conservative"
system("bundle check") || system!("bundle install")
# puts "\n== Copying sample files =="
# unless File.exist?("config/database.yml")
# FileUtils.cp "config/database.yml.sample", "config/database.yml"
# end
puts "\n== Preparing database =="
system! "bin/rails db:prepare"
puts "\n== Removing old logs and tempfiles =="
system! "bin/rails log:clear tmp:clear"
puts "\n== Restarting application server =="
system! "bin/rails restart"
end

View File

@@ -0,0 +1,6 @@
# This file is used by Rack-based servers to start the application.
require_relative "config/environment"
run Rails.application
Rails.application.load_server

View File

@@ -0,0 +1,26 @@
require_relative "boot"
require "rails/all"
# require 'logging'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module UsdaCollateralMonitor
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 7.0
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
# logger = Logging.logger(STDOUT)
# logger.level = :warn
# config.logger = logger
end
end

View File

@@ -0,0 +1,4 @@
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../Gemfile", __dir__)
require "bundler/setup" # Set up gems listed in the Gemfile.
require "bootsnap/setup" # Speed up boot time by caching expensive operations.

View File

@@ -0,0 +1,11 @@
development:
adapter: redis
url: redis://localhost:6379/1
test:
adapter: test
production:
adapter: redis
url: <%= ENV.fetch("REDIS_URL") { "redis://localhost:6379/1" } %>
channel_prefix: usda_collateral_monitor_production

View File

@@ -0,0 +1 @@
M9q7nviDmGHWs2Gkx0rB0V86JvtEVq7k/lEZOJV6QncuIokTVvNr6kTEw4O5+kGqdAWxUrkfc3pBgWAcBNes4D3PHWWNPtX+wIPfRFfoVcG7kTGTGIqKq4VTieFKo8lx8Qv/CAZxxyXBSr+Nd2jUoW8p1cItSlUmQ01L6+4/m1Njzgxa+R7w6Trk/1OeNl0mb9P6po4jl1H4+6hNAsnnjlxBCoQdNy7FpuCv7498p+wOW9CbN5DCcG/ix347F4IN9JR9WwkUTbik1EkmjCKBox64cAlZBu8lsESMPEWMLfnztAK3di8r9HnsTgFSHcpjJsd1joZoY1+GCS9vbTrhsGB3HruENitIFyn+87C9QRa98FNOfQ7x9fY70xKbWAaoFmTdoxVQgTv3i8X10r6buhb5rnSnW2PvC1A/--ZiIEclsKkI3k+lA/--6USfkiHlZbWLbzOrXcbp4Q==

View File

@@ -0,0 +1,25 @@
# SQLite. Versions 3.8.0 and up are supported.
# gem install sqlite3
#
# Ensure the SQLite 3 gem is defined in your Gemfile
# gem "sqlite3"
#
default: &default
adapter: sqlite3
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 5 } %>
timeout: 5000
development:
<<: *default
database: db/development.sqlite3
# Warning: The database defined as "test" will be erased and
# re-generated from your development database when you run "rake".
# Do not set this db to the same as development or production.
test:
<<: *default
database: db/test.sqlite3
production:
<<: *default
database: db/production.sqlite3

View File

@@ -0,0 +1,5 @@
# Load the Rails application.
require_relative "application"
# Initialize the Rails application.
Rails.application.initialize!

View File

@@ -0,0 +1,76 @@
require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded any time
# it changes. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable server timing
config.server_timing = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
# Uncomment if you wish to allow Action Cable access from any origin.
# config.action_cable.disable_request_forgery_protection = true
# Set the logging destination(s)
config.log_to = %w[stdout file]
# Show the logging configuration on STDOUT
config.show_log_configuration = true
end

View File

@@ -0,0 +1,99 @@
require "active_support/core_ext/integer/time"
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = "http://assets.example.com"
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for Apache
# config.action_dispatch.x_sendfile_header = "X-Accel-Redirect" # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = "wss://example.com/cable"
# config.action_cable.allowed_request_origins = [ "http://example.com", /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "usda_collateral_monitor_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Don't log any deprecations.
config.active_support.report_deprecations = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new "app-name")
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Set the logging destination(s)
config.log_to = %w[file]
# Show the logging configuration on STDOUT
config.show_log_configuration = false
end

View File

@@ -0,0 +1,60 @@
require "active_support/core_ext/integer/time"
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Turn false under Spring and add config.action_view.cache_template_loading = true.
config.cache_classes = true
# Eager loading loads your whole application. When running a single test locally,
# this probably isn't necessary. It's a good idea to do in a continuous integration
# system, or in some way before deploying your code.
config.eager_load = ENV["CI"].present?
# Configure public file server for tests with Cache-Control for performance.
config.public_file_server.enabled = true
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=#{1.hour.to_i}"
}
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.cache_store = :null_store
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Store uploaded files on the local file system in a temporary directory.
config.active_storage.service = :test
config.action_mailer.perform_caching = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raise exceptions for disallowed deprecations.
config.active_support.disallowed_deprecation = :raise
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Raises error for missing translations.
# config.i18n.raise_on_missing_translations = true
# Annotate rendered view with file names.
# config.action_view.annotate_rendered_view_with_filenames = true
end

View File

@@ -0,0 +1,7 @@
# Pin npm packages by running ./bin/importmap
pin "application", preload: true
pin "@hotwired/turbo-rails", to: "turbo.min.js", preload: true
pin "@hotwired/stimulus", to: "stimulus.min.js", preload: true
pin "@hotwired/stimulus-loading", to: "stimulus-loading.js", preload: true
pin_all_from "app/javascript/controllers", under: "controllers"

View File

@@ -0,0 +1,12 @@
# Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
Rails.application.config.assets.version = "1.0"
# Add additional assets to the asset load path.
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in the app/assets
# folder are already added.
# Rails.application.config.assets.precompile += %w( admin.js admin.css )

View File

@@ -0,0 +1,25 @@
# Be sure to restart your server when you modify this file.
# Define an application-wide content security policy.
# See the Securing Rails Applications Guide for more information:
# https://guides.rubyonrails.org/security.html#content-security-policy-header
# Rails.application.configure do
# config.content_security_policy do |policy|
# policy.default_src :self, :https
# policy.font_src :self, :https, :data
# policy.img_src :self, :https, :data
# policy.object_src :none
# policy.script_src :self, :https
# policy.style_src :self, :https
# # Specify URI for violation reports
# # policy.report_uri "/csp-violation-report-endpoint"
# end
#
# # Generate session nonces for permitted importmap and inline scripts
# config.content_security_policy_nonce_generator = ->(request) { request.session.id.to_s }
# config.content_security_policy_nonce_directives = %w(script-src)
#
# # Report violations without enforcing the policy.
# # config.content_security_policy_report_only = true
# end

View File

@@ -0,0 +1,8 @@
# Be sure to restart your server when you modify this file.
# Configure parameters to be filtered from the log file. Use this to limit dissemination of
# sensitive information. See the ActiveSupport::ParameterFilter documentation for supported
# notations and behaviors.
Rails.application.config.filter_parameters += [
:passw, :secret, :token, :_key, :crypt, :salt, :certificate, :otp, :ssn
]

View File

@@ -0,0 +1,16 @@
# Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, "\\1en"
# inflect.singular /^(ox)en/i, "\\1"
# inflect.irregular "person", "people"
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym "RESTful"
# end

View File

@@ -0,0 +1,11 @@
# Define an application-wide HTTP permissions policy. For further
# information see https://developers.google.com/web/updates/2018/06/feature-policy
#
# Rails.application.config.permissions_policy do |f|
# f.camera :none
# f.gyroscope :none
# f.microphone :none
# f.usb :none
# f.fullscreen :self
# f.payment :self, "https://secure.example.com"
# end

View File

@@ -0,0 +1,33 @@
# Files in the config/locales directory are used for internationalization
# and are automatically loaded by Rails. If you want to use locales other
# than English, add the necessary files in this directory.
#
# To use the locales, use `I18n.t`:
#
# I18n.t "hello"
#
# In views, this is aliased to just `t`:
#
# <%= t("hello") %>
#
# To use a different locale, set it with `I18n.locale`:
#
# I18n.locale = :es
#
# This would use the information in config/locales/es.yml.
#
# The following keys must be escaped otherwise they will not be retrieved by
# the default I18n backend:
#
# true, false, on, off, yes, no
#
# Instead, surround them with single quotes.
#
# en:
# "true": "foo"
#
# To learn more, please read the Rails Internationalization guide
# available at https://guides.rubyonrails.org/i18n.html.
en:
hello: "Hello world"

View File

@@ -0,0 +1,113 @@
Logging::Rails.configure do |config|
# Configure the Logging framework with the default log levels
Logging.init %w[debug info warn error fatal]
# Objects will be converted to strings using the :inspect method.
Logging.format_as :inspect
# The default layout used by the appenders.
layout = Logging.layouts.pattern(:pattern => '[%d] %-5l %c : %m\n')
# Setup a color scheme called 'bright' than can be used to add color codes
# to the pattern layout. Color schemes should only be used with appenders
# that write to STDOUT or STDERR; inserting terminal color codes into a file
# is generally considered bad form.
Logging.color_scheme( 'bright',
:levels => {
:info => :green,
:warn => :yellow,
:error => :red,
:fatal => [:white, :on_red]
},
:date => :blue,
:logger => :cyan,
:message => :magenta
)
# Configure an appender that will write log events to STDOUT. A colorized
# pattern layout is used to format the log events into strings before
# writing.
Logging.appenders.stdout( 'stdout',
:auto_flushing => true,
:layout => Logging.layouts.pattern(
:pattern => '[%d] %-5l %c : %m\n',
:color_scheme => 'bright'
)
) if config.log_to.include? 'stdout'
# Configure an appender that will write log events to a file. The file will
# be rolled on a daily basis, and the past 7 rolled files will be kept.
# Older files will be deleted. The default pattern layout is used when
# formatting log events into strings.
Logging.appenders.rolling_file( 'file',
:filename => config.paths['log'].first,
:keep => 7,
:age => 'daily',
:truncate => false,
:auto_flushing => true,
:layout => layout
) if config.log_to.include? 'file'
=begin
# NOTE: You will need to install the `logging-email` gem to use this appender
# with loggging-2.0. The email appender was extracted into a plugin gem. That
# is the reason this code is commented out.
#
# Configure an appender that will send an email for "error" and "fatal" log
# events. All other log events will be ignored. Furthermore, log events will
# be buffered for one minute (or 200 events) before an email is sent. This
# is done to prevent a flood of messages.
Logging.appenders.email( 'email',
:from => "server@#{config.action_mailer.smtp_settings[:domain]}",
:to => "developers@#{config.action_mailer.smtp_settings[:domain]}",
:subject => "Rails Error [#{%x(uname -n).strip}]",
:server => config.action_mailer.smtp_settings[:address],
:domain => config.action_mailer.smtp_settings[:domain],
:acct => config.action_mailer.smtp_settings[:user_name],
:passwd => config.action_mailer.smtp_settings[:password],
:authtype => config.action_mailer.smtp_settings[:authentication],
:auto_flushing => 200, # send an email after 200 messages have been buffered
:flush_period => 60, # send an email after one minute
:level => :error, # only process log events that are "error" or "fatal"
:layout => layout
) if config.log_to.include? 'email'
=end
# Setup the root logger with the Rails log level and the desired set of
# appenders. The list of appenders to use should be set in the environment
# specific configuration file.
#
# For example, in a production application you would not want to log to
# STDOUT, but you would want to send an email for "error" and "fatal"
# messages:
#
# => config/environments/production.rb
#
# config.log_to = %w[file email]
#
# In development you would want to log to STDOUT and possibly to a file:
#
# => config/environments/development.rb
#
# config.log_to = %w[stdout file]
#
Logging.logger.root.level = config.log_level
Logging.logger.root.appenders = config.log_to unless config.log_to.empty?
Logging.logger['ActionController::Base'].level = :warn
# Under Phusion Passenger smart spawning, we need to reopen all IO streams
# after workers have forked.
#
# The rolling file appender uses shared file locks to ensure that only one
# process will roll the log file. Each process writing to the file must have
# its own open file descriptor for `flock` to function properly. Reopening
# the file descriptors after forking ensures that each worker has a unique
# file descriptor.
if defined? PhusionPassenger
PhusionPassenger.on_event(:starting_worker_process) do |forked|
Logging.reopen if forked
end
end
end

View File

@@ -0,0 +1,43 @@
# Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers: a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum; this matches the default thread size of Active Record.
#
max_threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }
min_threads_count = ENV.fetch("RAILS_MIN_THREADS") { max_threads_count }
threads min_threads_count, max_threads_count
# Specifies the `worker_timeout` threshold that Puma will use to wait before
# terminating a worker in development environments.
#
worker_timeout 3600 if ENV.fetch("RAILS_ENV", "development") == "development"
# Specifies the `port` that Puma will listen on to receive requests; default is 3000.
#
port ENV.fetch("PORT") { 3000 }
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch("RAILS_ENV") { "development" }
# Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" }
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked web server processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory.
#
# preload_app!
# Allow puma to be restarted by `bin/rails restart` command.
plugin :tmp_restart

View File

@@ -0,0 +1,8 @@
Rails.application.routes.draw do
namespace :admin do
resources :vaults, only: %i(index show)
root to: "vaults#index"
end
post '/chainhooks/v1/vaults', to: 'chainhooks#vaults'
end

View File

@@ -0,0 +1,34 @@
test:
service: Disk
root: <%= Rails.root.join("tmp/storage") %>
local:
service: Disk
root: <%= Rails.root.join("storage") %>
# Use bin/rails credentials:edit to set the AWS secrets (as aws:access_key_id|secret_access_key)
# amazon:
# service: S3
# access_key_id: <%= Rails.application.credentials.dig(:aws, :access_key_id) %>
# secret_access_key: <%= Rails.application.credentials.dig(:aws, :secret_access_key) %>
# region: us-east-1
# bucket: your_own_bucket-<%= Rails.env %>
# Remember not to checkin your GCS keyfile to a repository
# google:
# service: GCS
# project: your_project
# credentials: <%= Rails.root.join("path/to/gcs.keyfile") %>
# bucket: your_own_bucket-<%= Rails.env %>
# Use bin/rails credentials:edit to set the Azure Storage secret (as azure_storage:storage_access_key)
# microsoft:
# service: AzureStorage
# storage_account_name: your_account_name
# storage_access_key: <%= Rails.application.credentials.dig(:azure_storage, :storage_access_key) %>
# container: your_container_name-<%= Rails.env %>
# mirror:
# service: Mirror
# primary: local
# mirrors: [ amazon, google, microsoft ]

View File

@@ -0,0 +1,25 @@
class CreateVaults < ActiveRecord::Migration[7.0]
def change
create_table :vaults do |t|
t.integer :onchain_id
t.string :owner
t.integer :collateral
t.string :collateral_type
t.string :collateral_token
t.integer :stacked_tokens
t.string :stacker_name
t.boolean :revoked_stacking
t.boolean :auto_payoff
t.integer :debt
t.integer :created_at_block_height
t.integer :updated_at_block_height
t.integer :stability_fee_accrued
t.integer :stability_fee_last_accrued
t.boolean :is_liquidated
t.boolean :auction_ended
t.integer :leftover_collateral
t.timestamps
end
end
end

View File

@@ -0,0 +1,36 @@
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.0].define(version: 2022_08_17_092514) do
create_table "vaults", force: :cascade do |t|
t.integer "onchain_id"
t.string "owner"
t.integer "collateral"
t.string "collateral_type"
t.string "collateral_token"
t.integer "stacked_tokens"
t.string "stacker_name"
t.boolean "revoked_stacking"
t.boolean "auto_payoff"
t.integer "debt"
t.integer "created_at_block_height"
t.integer "updated_at_block_height"
t.integer "stability_fee_accrued"
t.integer "stability_fee_last_accrued"
t.boolean "is_liquidated"
t.boolean "auction_ended"
t.integer "leftover_collateral"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end

View File

@@ -0,0 +1,7 @@
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the bin/rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: "Star Wars" }, { name: "Lord of the Rings" }])
# Character.create(name: "Luke", movie: movies.first)

View File

@@ -0,0 +1,67 @@
<!DOCTYPE html>
<html>
<head>
<title>The page you were looking for doesn't exist (404)</title>
<meta name="viewport" content="width=device-width,initial-scale=1">
<style>
.rails-default-error-page {
background-color: #EFEFEF;
color: #2E2F30;
text-align: center;
font-family: arial, sans-serif;
margin: 0;
}
.rails-default-error-page div.dialog {
width: 95%;
max-width: 33em;
margin: 4em auto 0;
}
.rails-default-error-page div.dialog > div {
border: 1px solid #CCC;
border-right-color: #999;
border-left-color: #999;
border-bottom-color: #BBB;
border-top: #B00100 solid 4px;
border-top-left-radius: 9px;
border-top-right-radius: 9px;
background-color: white;
padding: 7px 12% 0;
box-shadow: 0 3px 8px rgba(50, 50, 50, 0.17);
}
.rails-default-error-page h1 {
font-size: 100%;
color: #730E15;
line-height: 1.5em;
}
.rails-default-error-page div.dialog > p {
margin: 0 0 1em;
padding: 1em;
background-color: #F7F7F7;
border: 1px solid #CCC;
border-right-color: #999;
border-left-color: #999;
border-bottom-color: #999;
border-bottom-left-radius: 4px;
border-bottom-right-radius: 4px;
border-top-color: #DADADA;
color: #666;
box-shadow: 0 3px 8px rgba(50, 50, 50, 0.17);
}
</style>
</head>
<body class="rails-default-error-page">
<!-- This file lives in public/404.html -->
<div class="dialog">
<div>
<h1>The page you were looking for doesn't exist.</h1>
<p>You may have mistyped the address or the page may have moved.</p>
</div>
<p>If you are the application owner check the logs for more information.</p>
</div>
</body>
</html>

Some files were not shown because too many files have changed in this diff Show More