This commit is contained in:
Zitao Xiong
2024-02-05 02:58:14 +08:00
commit 4fe8c67149
27 changed files with 3028 additions and 0 deletions

79
.envrc Normal file
View File

@@ -0,0 +1,79 @@
#!/usr/bin/env bash
set -Eeo pipefail
export DOCKER_IMAGE_GROUP=alexgo-io
export OPI_IMAGE=alexgo-io/opi
export BITCOIND_IMAGE=alexgo-io/bitcoind
export WORKSPACE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export OPI_PG_DATA_PATH="${WORKSPACE_ROOT}/data/opi/postgres-data"
export OPI_BITCOIND_PATH="${WORKSPACE_ROOT}/data/bitcoind"
export DB_USER="postgres"
export DB_HOST="postgres-opi-server"
export DB_PORT="5432"
export DB_DATABASE="db_opi"
export DB_PASSWD="passwd_123!@#"
export DB_SSL="false"
export DB_MAX_CONNECTIONS=10
export API_HOST="127.0.0.1"
export API_PORT="8001"
export API_TRUSTED_PROXY_CNT="0"
# export DB_METAPROTOCOL_USER="postgres"
# export DB_METAPROTOCOL_HOST="postgres-metaprotocol-server"
# export DB_METAPROTOCOL_PORT="5432"
# export DB_METAPROTOCOL_DATABASE="db_metaprotocol"
# export DB_METAPROTOCOL_PASSWD="passwd_456!@#"
export DB_METAPROTOCOL_USER="postgres"
export DB_METAPROTOCOL_HOST="postgres-opi-server"
export DB_METAPROTOCOL_PORT="5432"
export DB_METAPROTOCOL_DATABASE="db_opi"
export DB_METAPROTOCOL_PASSWD="passwd_123!@#"
export API_HOST="127.0.0.1"
export API_PORT="8001"
export API_TRUSTED_PROXY_CNT="0"
export NETWORK_TYPE="mainnet"
export REPORT_TO_INDEXER="true"
export REPORT_URL="https://api.opi.network/report_block"
export REPORT_RETRIES="10"
export REPORT_NAME="alexgo-opi-sir"
export USE_EXTRA_TABLES="true"
export DB_MAX_CONNECTIONS="50"
export BITCOIN_CHAIN_FOLDER="/bitcoind/datadir"
# export COOKIE_FILE="/bitcoind/datadir/.cookie"
export BITCOIN_RPC_USER="bitcoin"
export BITCOIN_RPC_PASSWD="3Pz9zHvEkNrHkKRg"
export BITCOIN_RPC_URL="http://bitcoind:8332"
export BITCOIN_RPC_PORT="8332"
export BITCOIN_ZMQ_PORT="18543"
export ORD_BINARY="/usr/local/OPI/ord/target/release/ord"
export ORD_FOLDER="/usr/local/OPI/ord/target/release"
export ORD_DATADIR="/ord_data"
export OPI_VOLUME_SIZE="1200"
export OPI_VOLUME_SNAPSHOT_ID=""
# DEPLOY
export DIGITAL_OCEAN_SSH_KEY_NAME=""
export DIGITAL_OCEAN_SSH_KEY_ID=""
export DIGITAL_OCEAN_API_KEY=""
export PRIVATE_KEY_PATH="~/.ssh/id_rsa"
if [[ -f .envrc.override ]]; then
source_env .envrc.override
fi
configs/gen.sh

11
.gitignore vendored Normal file
View File

@@ -0,0 +1,11 @@
data
configs/bitmap_api
configs/bitmap_index
configs/brc20_api
configs/brc20_index
configs/main_index
configs/sns_api
configs/sns_index
.envrc.override
deploy/node_modules
.vscode/sftp.json

3
.tool-versions Normal file
View File

@@ -0,0 +1,3 @@
packer 1.10.1
pulumi 3.104.2
pnpm 8.15.1

8
build.sh Executable file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -Eeo pipefail
pushd docker/opi && docker build -t $OPI_IMAGE . && popd;
pushd docker/bitcoind && docker build -t $BITCOIND_IMAGE . && popd;
docker push $OPI_IMAGE;
docker push $BITCOIND_IMAGE;

333
configs/gen.sh Executable file
View File

@@ -0,0 +1,333 @@
#!/usr/bin/env bash
set -Eeo pipefail
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
cd $DIR
# genearte env file for bitmap_api
: '
# .env file
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10
API_HOST="127.0.0.1"
API_PORT="8001"
API_TRUSTED_PROXY_CNT="0"
'
generate_env_bitmap_api() {
mkdir -p bitmap_api
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_SSL=\"${DB_SSL:-true}\""
echo "DB_MAX_CONNECTIONS=${DB_MAX_CONNECTIONS:-10}"
echo "API_HOST=\"${API_HOST:-127.0.0.1}\""
echo "API_PORT=\"${API_PORT:-8001}\""
echo "API_TRUSTED_PROXY_CNT=\"${API_TRUSTED_PROXY_CNT:-0}\""
} >"bitmap_api/.env"
}
# generate env file for bitmap_index
: '
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
NETWORK_TYPE="mainnet"
## reporting system settings
REPORT_TO_INDEXER="true"
REPORT_URL="https://api.opi.network/report_block"
REPORT_RETRIES="10"
# set a name for report dashboard
REPORT_NAME="opi_bitmap_index"
'
generate_env_bitmap_index() {
mkdir -p bitmap_index
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_METAPROTOCOL_USER=\"${DB_METAPROTOCOL_USER:-postgres}\""
echo "DB_METAPROTOCOL_HOST=\"${DB_METAPROTOCOL_HOST:-localhost}\""
echo "DB_METAPROTOCOL_PORT=\"${DB_METAPROTOCOL_PORT:-5432}\""
echo "DB_METAPROTOCOL_DATABASE=\"${DB_METAPROTOCOL_DATABASE:-postgres}\""
echo "DB_METAPROTOCOL_PASSWD=\"${DB_METAPROTOCOL_PASSWD}\""
echo "NETWORK_TYPE=\"${NETWORK_TYPE:-mainnet}\""
echo "REPORT_TO_INDEXER=\"${REPORT_TO_INDEXER:-true}\""
echo "REPORT_URL=\"${REPORT_URL:-https://api.opi.network/report_block}\""
echo "REPORT_RETRIES=\"${REPORT_RETRIES:-10}\""
echo "REPORT_NAME=\"${REPORT_NAME:-opi_bitmap_index}\""
} >"bitmap_index/.env"
}
# generate env file for brc20_api
: '
# .env file
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10
API_HOST="127.0.0.1"
API_PORT="8000"
API_TRUSTED_PROXY_CNT="0"
USE_EXTRA_TABLES="true"
'
generate_env_brc20_api() {
mkdir -p brc20_api
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_SSL=\"${DB_SSL:-true}\""
echo "DB_MAX_CONNECTIONS=${DB_MAX_CONNECTIONS:-10}"
echo "API_HOST=\"${API_HOST:-127.0.0.1}\""
echo "API_PORT=\"${API_PORT:-8000}\""
echo "API_TRUSTED_PROXY_CNT=\"${API_TRUSTED_PROXY_CNT:-0}\""
echo "USE_EXTRA_TABLES=\"${USE_EXTRA_TABLES:-true}\""
} >"brc20_api/.env"
}
# genearte env file for brc20_index
: '
# .env
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
## main indexer database settings
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
NETWORK_TYPE="mainnet"
## reporting system settings
REPORT_TO_INDEXER="true"
REPORT_URL="https://api.opi.network/report_block"
REPORT_RETRIES="10"
# set a name for report dashboard
REPORT_NAME="opi_brc20_index"
# create brc20_current_balances and brc20_unused_tx_inscrs tables
CREATE_EXTRA_TABLES="true"
'
generate_env_brc20_index() {
mkdir -p brc20_index
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_METAPROTOCOL_USER=\"${DB_METAPROTOCOL_USER:-postgres}\""
echo "DB_METAPROTOCOL_HOST=\"${DB_METAPROTOCOL_HOST:-localhost}\""
echo "DB_METAPROTOCOL_PORT=\"${DB_METAPROTOCOL_PORT:-5432}\""
echo "DB_METAPROTOCOL_DATABASE=\"${DB_METAPROTOCOL_DATABASE:-postgres}\""
echo "DB_METAPROTOCOL_PASSWD=\"${DB_METAPROTOCOL_PASSWD}\""
echo "NETWORK_TYPE=\"${NETWORK_TYPE:-mainnet}\""
echo "REPORT_TO_INDEXER=\"${REPORT_TO_INDEXER:-true}\""
echo "REPORT_URL=\"${REPORT_URL:-https://api.opi.network/report_block}\""
echo "REPORT_RETRIES=\"${REPORT_RETRIES:-10}\""
echo "REPORT_NAME=\"${REPORT_NAME:-opi_brc20_index}\""
} >"brc20_index/.env"
}
# generate env file for main_index
: '
# .env file
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=50
BITCOIN_CHAIN_FOLDER="~/.bitcoin/"
COOKIE_FILE=""
# leave these empty to use .cookie file
BITCOIN_RPC_USER=""
BITCOIN_RPC_PASSWD=""
# `--rpc-url` parameter for `ord`, example: `127.0.0.1:8332`
BITCOIN_RPC_URL=""
# change to ord.exe on Windows (without ./)
ORD_BINARY="./ord"
# leave default if repository folder structure hasnt been changed
ORD_FOLDER="../../ord/target/release/"
# relative to ord folder
ORD_DATADIR="."
NETWORK_TYPE="mainnet"
'
generate_env_main_index() {
mkdir -p main_index
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_SSL=\"${DB_SSL:-true}\""
echo "DB_MAX_CONNECTIONS=${DB_MAX_CONNECTIONS:-50}"
echo "BITCOIN_CHAIN_FOLDER=\"${BITCOIN_CHAIN_FOLDER:-~/.bitcoin/}\""
echo "COOKIE_FILE=\"${COOKIE_FILE}\""
echo "BITCOIN_RPC_USER=\"${BITCOIN_RPC_USER}\""
echo "BITCOIN_RPC_PASSWD=\"${BITCOIN_RPC_PASSWD}\""
echo "BITCOIN_RPC_URL=\"${BITCOIN_RPC_URL}\""
echo "ORD_BINARY=\"${ORD_BINARY:-./ord}\""
echo "ORD_FOLDER=\"${ORD_FOLDER:-../../ord/target/release/}\""
echo "ORD_DATADIR=\"${ORD_DATADIR:-.}\""
echo "NETWORK_TYPE=\"${NETWORK_TYPE:-mainnet}\""
} >"main_index/.env"
}
# genearte env file for sns_api
: '
# .env file
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10
API_HOST="127.0.0.1"
API_PORT="8002"
API_TRUSTED_PROXY_CNT="0"
'
generate_env_sns_api() {
mkdir -p sns_api
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_SSL=\"${DB_SSL:-true}\""
echo "DB_MAX_CONNECTIONS=${DB_MAX_CONNECTIONS:-10}"
echo "API_HOST=\"${API_HOST:-127.0.0.1}\""
echo "API_PORT=\"${API_PORT:-8002}\""
echo "API_TRUSTED_PROXY_CNT=\"${API_TRUSTED_PROXY_CNT:-0}\""
} >"sns_api/.env"
}
# generate env file for sns_index
: '
# .env
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
NETWORK_TYPE="mainnet"
## reporting system settings
REPORT_TO_INDEXER="true"
REPORT_URL="https://api.opi.network/report_block"
REPORT_RETRIES="10"
# set a name for report dashboard
REPORT_NAME="opi_sns_index"
'
generate_env_sns_index() {
mkdir -p sns_index
{
echo "DB_USER=\"${DB_USER:-postgres}\""
echo "DB_HOST=\"${DB_HOST:-localhost}\""
echo "DB_PORT=\"${DB_PORT:-5432}\""
echo "DB_DATABASE=\"${DB_DATABASE:-postgres}\""
echo "DB_PASSWD=\"${DB_PASSWD}\""
echo "DB_METAPROTOCOL_USER=\"${DB_METAPROTOCOL_USER:-postgres}\""
echo "DB_METAPROTOCOL_HOST=\"${DB_METAPROTOCOL_HOST:-localhost}\""
echo "DB_METAPROTOCOL_PORT=\"${DB_METAPROTOCOL_PORT:-5432}\""
echo "DB_METAPROTOCOL_DATABASE=\"${DB_METAPROTOCOL_DATABASE:-postgres}\""
echo "DB_METAPROTOCOL_PASSWD=\"${DB_METAPROTOCOL_PASSWD}\""
echo "NETWORK_TYPE=\"${NETWORK_TYPE:-mainnet}\""
echo "REPORT_TO_INDEXER=\"${REPORT_TO_INDEXER:-true}\""
echo "REPORT_URL=\"${REPORT_URL:-https://api.opi.network/report_block}\""
echo "REPORT_RETRIES=\"${REPORT_RETRIES:-10}\""
echo "REPORT_NAME=\"${REPORT_NAME:-opi_sns_index}\""
} >"sns_index/.env"
}
rm -rf bitmap_api
rm -rf bitmap_index
rm -rf brc20_api
rm -rf brc20_index
rm -rf main_index
rm -rf sns_api
rm -rf sns_index
generate_env_bitmap_api
generate_env_bitmap_index
generate_env_brc20_api
generate_env_brc20_index
generate_env_main_index
generate_env_sns_api
generate_env_sns_index
green() {
echo -e "\033[32m$1\033[0m"
}
green "generated env files at $PWD"

3
deploy/Pulumi.dev.yaml Normal file
View File

@@ -0,0 +1,3 @@
config:
digitalocean:token:
secure: AAABADkzEMuJAeOt2sSca5LUIUd8qA1X48n3P6jtlAy4OKkGfsv1kjFBhiGT2op77cocNgUyvO7cY/QI8JrxZMvhB4HPimbarL+PDMYEMKabhjHdCzQ1rIw7pybZn9Zn13YxheJCJg==

3
deploy/Pulumi.yaml Normal file
View File

@@ -0,0 +1,3 @@
name: opi-infra
runtime: nodejs
description: DigitalOcean OPI

13
deploy/package.json Normal file
View File

@@ -0,0 +1,13 @@
{
"name": "pulumi-opi",
"main": "src/index.ts",
"devDependencies": {
"@types/node": "^20",
"dotenv": "^16.3.1"
},
"dependencies": {
"@pulumi/command": "^0.9.2",
"@pulumi/digitalocean": "4.25.1",
"@pulumi/pulumi": "3.104.2"
}
}

1660
deploy/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,87 @@
version: "3.7"
services:
postgres-opi-server:
image: postgres:15-alpine
restart: always
environment:
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASSWD}
POSTGRES_DB: ${DB_DATABASE}
volumes:
- ${OPI_PG_DATA_PATH}:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER}"]
interval: 10s
timeout: 5s
retries: 5
ports:
- "21432:5432"
bitcoind:
image: ${BITCOIND_IMAGE}
volumes:
- ${OPI_BITCOIND_PATH}:/usr/local/bitcoind
environment:
- BITCOIN_RPC_USER=${BITCOIN_RPC_USER}
- BITCOIN_RPC_PASSWD=${BITCOIN_RPC_PASSWD}
ports:
- "${BITCOIN_RPC_PORT}:${BITCOIN_RPC_PORT}"
meta-protocol-indexer:
image: ${OPI_IMAGE}
restart: always
command: 'bash -c "cd /usr/local/OPI/modules/main_index && node index.js"'
volumes:
- ./configs/main_index/.env:/usr/local/OPI/modules/main_index/.env
depends_on:
postgres-opi-server:
condition: service_healthy
brc20-indexer:
image: ${OPI_IMAGE}
restart: always
command: 'bash -c "cd /usr/local/OPI/modules/brc20_index && python brc20_index.py"'
volumes:
- ./configs/brc20_index/.env:/usr/local/OPI/modules/brc20_index/.env
depends_on:
postgres-opi-server:
condition: service_healthy
brc20-api:
image: ${OPI_IMAGE}
command: 'bash -c "cd /usr/local/OPI/modules/brc20_api && node api.js"'
volumes:
- ./configs/brc20_api/.env:/usr/local/OPI/modules/brc20_api/.env
depends_on:
postgres-opi-server:
condition: service_healthy
bitmap-indexer:
image: ${OPI_IMAGE}
restart: always
command: 'bash -c "cd /usr/local/OPI/modules/bitmap_index && python bitmap_index.py"'
volumes:
- ./configs/bitmap_index/.env:/usr/local/OPI/modules/bitmap_index/.env
depends_on:
postgres-opi-server:
condition: service_healthy
bitmap-api:
image: ${OPI_IMAGE}
command: 'bash -c "cd /usr/local/OPI/modules/bitmap_api && node api.js"'
volumes:
- ./configs/bitmap_api/.env:/usr/local/OPI/modules/bitmap_api/.env
depends_on:
postgres-opi-server:
condition: service_healthy
sns-indexer:
image: ${OPI_IMAGE}
restart: always
command: 'bash -c "cd /usr/local/OPI/modules/sns_index && python sns_index.py"'
volumes:
- ./configs/sns_index/.env:/usr/local/OPI/modules/sns_index/.env
depends_on:
postgres-opi-server:
condition: service_healthy
sns-api:
image: ${OPI_IMAGE}
command: 'bash -c "cd /usr/local/OPI/modules/sns_api && node api.js"'
volumes:
- ./configs/sns_api/.env:/usr/local/OPI/modules/sns_api/.env
depends_on:
postgres-opi-server:
condition: service_healthy

View File

@@ -0,0 +1,30 @@
version: "3.7"
services:
postgres-opi-server:
image: postgres:15-alpine
restart: always
environment:
POSTGRES_USER: ${DB_USER}
POSTGRES_PASSWORD: ${DB_PASSWD}
POSTGRES_DB: ${DB_DATABASE}
volumes:
- ${OPI_PG_DATA_PATH}:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER}"]
interval: 10s
timeout: 5s
retries: 5
restore:
image: ${OPI_IMAGE}
command: "bash -c \"cd /usr/local/OPI/modules/ && python restore.py\""
volumes:
- ${WORKSPACE_ROOT}/configs/bitmap_api/.env:/usr/local/OPI/modules/bitmap_api/.env
- ${WORKSPACE_ROOT}/configs/bitmap_index/.env:/usr/local/OPI/modules/bitmap_index/.env
- ${WORKSPACE_ROOT}/configs/brc20_api/.env:/usr/local/OPI/modules/brc20_api/.env
- ${WORKSPACE_ROOT}/configs/brc20_index/.env:/usr/local/OPI/modules/brc20_index/.env
- ${WORKSPACE_ROOT}/configs/main_index/.env:/usr/local/OPI/modules/main_index/.env
- ${WORKSPACE_ROOT}/configs/sns_api/.env:/usr/local/OPI/modules/sns_api/.env
- ${WORKSPACE_ROOT}/configs/sns_index/.env:/usr/local/OPI/modules/sns_index/.env
depends_on:
postgres-opi-server:
condition: service_healthy

230
deploy/src/index.ts Normal file
View File

@@ -0,0 +1,230 @@
import * as digitalocean from "@pulumi/digitalocean";
import { getPrivateKey, getScript, root, sshKey } from './utils'
import assert from "assert";
import { local, remote, types } from "@pulumi/command";
import fs from 'fs'
import path from 'path'
import os from 'os';
import { join } from 'path';
import { createHash } from 'crypto';
import * as pulumi from "@pulumi/pulumi";
import { Output } from '@pulumi/pulumi';
function create(params: { name: string, region: string, size: string, image: string }) {
const { region, size, name, image } = params
const snapshotId = (() => {
const id = process.env['OPI_VOLUME_SNAPSHOT_ID']
return id?.length == 0 ? undefined : id
})()
const volume = new digitalocean.Volume(`${name}volume`, {
region,
size: parseInt(process.env['OPI_VOLUME_SIZE'] ?? "1000", 10),
initialFilesystemType: "ext4",
snapshotId,
})
const droplet = new digitalocean.Droplet(`${name}-droplet`, {
image,
region,
size,
// monitoring: true,
sshKeys: [sshKey.id],
});
const copyFiles = (loc: string, remotePath: pulumi.Output<string>) => {
return new local.Command(`${name}:copyFiles: ${loc}`, {
create: pulumi.interpolate`rsync -avP ${root(loc)} ${connection.user}@${droplet.ipv4Address}:${remotePath}`,
})
}
const volumeAttachment = new digitalocean.VolumeAttachment(
`${name}-volume-attachment`,
{
dropletId: droplet.id.apply(id => parseInt(id, 10)),
volumeId: volume.id,
}
);
const privateKey = getPrivateKey();
const connection: types.input.remote.ConnectionArgs = {
host: droplet.ipv4Address,
user: "root",
privateKey,
};
const volumePathPrint = new remote.Command(`${name}-read-volume-path`, {
connection,
create: getScript('print_mnt_name.sh'),
}, { dependsOn: [droplet, volumeAttachment, volume], customTimeouts: { create: '5m' } });
const cpRestoreDockerCompose = new remote.CopyFile(`${name}:restore`, {
connection,
localPath: volumePathPrint.stdout.apply(volumeName => transformFile(name, './src/docker-composes/restore.docker-compose.yaml', [
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
['${OPI_IMAGE}', process.env['OPI_IMAGE']!],
// DB_USER
['${DB_USER}', process.env['DB_USER']!],
// DB_PASSWORD
['${DB_PASSWD}', process.env['DB_PASSWD']!],
// DB_DATABASE
['${DB_DATABASE}', process.env['DB_DATABASE']!],
// WORKSPACE_ROOT
['${WORKSPACE_ROOT}', volumeName],
])),
remotePath: volumePathPrint.stdout.apply(name => (`${name}/restore.docker-compose.yaml`)),
});
const execScriptOnRemote = (loc: string, options: { cwd?: pulumi.Output<string>, commandOpts?: any } = {}) => {
// cwd is the CWD
const createContent = fs.readFileSync(root(loc), "utf-8");
if (options.cwd) {
return new remote.Command(`${name}:run[remote]: ${loc}`, {
connection,
create: pulumi.interpolate`mkdir -p ${options.cwd};
cd ${options.cwd};
${createContent}`
}, options.commandOpts);
}
else {
return new remote.Command(`${name}:run[remote]: ${loc}`, {
connection,
create: createContent
}, options.commandOpts);
}
}
const cpConfig = copyFiles('configs', pulumi.interpolate`${volumePathPrint.stdout}`)
const restore = execScriptOnRemote('deploy/src/scripts/restore.sh', {
cwd: pulumi.interpolate`/${volumePathPrint.stdout}`,
commandOpts: {
dependsOn: [cpConfig, cpRestoreDockerCompose],
}
})
const cpDockerCompose = volumePathPrint.stdout.apply(volumeName => {
return new remote.CopyFile(`${name}:cp:opi.docker-compose -> ${volumeName}`, {
connection,
localPath: transformFile(name, './src/docker-composes/opi.docker-compose.yaml', [
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
['${OPI_BITCOIND_PATH}', `${volumeName}/bitcoind_data`],
['${OPI_IMAGE}', process.env['OPI_IMAGE']!],
['${BITCOIND_IMAGE}', process.env['BITCOIND_IMAGE']!],
// DB_USER
['${DB_USER}', process.env['DB_USER']!],
// DB_PASSWORD
['${DB_PASSWD}', process.env['DB_PASSWD']!],
// DB_DATABASE
['${DB_DATABASE}', process.env['DB_DATABASE']!],
// WORKSPACE_ROOT
['${WORKSPACE_ROOT}', volumeName],
// BITCOIN_RPC_USER
['${BITCOIN_RPC_USER}', process.env['BITCOIN_RPC_USER']!],
// BITCOIN_RPC_PASSWD
['${BITCOIN_RPC_PASSWD}', process.env['BITCOIN_RPC_PASSWD']!],
// BITCOIN_RPC_PORT
['${BITCOIN_RPC_PORT}', process.env['BITCOIN_RPC_PORT']!],
]),
remotePath: `${volumeName}/opi.docker-compose.yaml`,
}, { dependsOn: [restore] });
})
new remote.Command(`${name}:start-opi..`, {
connection,
create: pulumi.interpolate`cd ${volumePathPrint.stdout} && docker-compose -f opi.docker-compose.yaml pull && docker-compose -f opi.docker-compose.yaml up -d`,
}, { dependsOn: [cpDockerCompose] })
exports[`ip_${name}`] = droplet.ipv4Address;
exports[`name_${name}`] = droplet.name;
exports[`volume_id_${name}`] = volume.id;
exports[`volume_attachment_id_${name}`] = volumeAttachment.id;
exports[`volume_path_${name}`] = volumePathPrint.stdout;
return { droplet, volume, name };
}
// write takeSnapshot function which input is the output of function create.
function takeSnapshot(params: { name: string, volume: digitalocean.Volume }) {
const { name, volume } = params;
const createSnapshot = new digitalocean.VolumeSnapshot(`${name}-snapshot`, {
volumeId: volume.id,
name: `${name}-snapshot`,
});
exports[`volume_snapshot_${name}`] = createSnapshot.id;
return { createSnapshot };
}
function transformFile(seed: string, filePath: string, transforms: string[][]): string {
// Read the content of the source file
const content = fs.readFileSync(filePath, 'utf8');
// Apply all transformations
let transformedContent = content;
for (const transform of transforms) {
const [original, replacement] = transform;
transformedContent = transformedContent.split(original).join(replacement);
}
// Create a temp file in a random location
const tempDir = createTmpDirFromSeed(filePath + seed);
const tempFilePath = path.join(tempDir, path.basename(filePath));
// Write the transformed content to the temp file
fs.writeFileSync(tempFilePath, transformedContent);
// Return the path of the temp file
return tempFilePath;
}
const createTmpDirFromSeed = (seed: string): string => {
const hash = createHash('sha256').update(seed).digest('hex');
const tmpBaseDir = '/tmp';
const dirPath = join(tmpBaseDir, hash);
try {
fs.mkdirSync(dirPath, { recursive: true });
return dirPath;
} catch (error) {
throw new Error(`Failed to create temp directory: ${error}`);
}
};
// ===============
// Create Droplet
// ===============
const instances = [
create({
name: "opi1sfo",
region: 'sfo3',
size: 's-8vcpu-16gb-amd',
image: '149367446'
}),
create({
name: "opi1lon",
region: 'lon1',
size: 's-8vcpu-16gb-amd',
image: '149439505'
}),
create({
name: "opi1sgp",
region: 'sgp1',
size: 's-8vcpu-16gb-amd',
image: '149439499'
})];
// takeSnapshot(instances[0])

View File

@@ -0,0 +1,17 @@
#!/bin/bash
# The target file to update
BASHRC_FILE="$HOME/.bashrc"
# The command to source the .env file
SOURCE_COMMAND='source $HOME/.env'
# Escape any PATH or other environment variables in .bashrc that may interfere
ESCAPED_SOURCE_COMMAND=$(printf '%q' "$SOURCE_COMMAND")
# Check if .env sourcing command is already in .bashrc, if not, append it
if ! grep -qxF "$ESCAPED_SOURCE_COMMAND" "$BASHRC_FILE"; then
echo "$SOURCE_COMMAND" >> "$BASHRC_FILE"
echo ".env sourcing added to $BASHRC_FILE"
else
echo ".env sourcing already exists in $BASHRC_FILE"
fi

View File

@@ -0,0 +1,28 @@
#!/bin/bash
retry_count=0
max_retries=100
retry_delay=5
while [ $retry_count -lt $max_retries ]; do
# Search for directories containing 'volume' in their name under /mnt
directories=$(find /mnt -type d -name "*volume*" 2>/dev/null)
dir_count=$(echo "$directories" | grep -c 'volume')
if [ "$dir_count" -eq 1 ]; then
# If exactly one directory is found, print the directory name and exit with success
echo "$directories"
exit 0
elif [ "$dir_count" -gt 1 ]; then
# More than one directory found, exit with code 2 (misuse of shell builtins according to Bash documentation)
echo "Multiple directories found."
exit 2
else
# No directories found, increment the retry counter and wait
((retry_count++))
sleep $retry_delay
fi
done
# If no directory is found after the maximum number of retries, exit with code 3
exit 3

View File

@@ -0,0 +1,18 @@
#!/bin/bash
# Start all services
docker-compose -f restore.docker-compose.yaml up -d
# Wait for the "restore" service to finish and exit with a 0
docker wait $(docker-compose -f restore.docker-compose.yaml ps -q restore)
# Capture the exit code
exit_code=$?
# If exit code is 0, shutdown all services
if [ $exit_code -eq 0 ]; then
docker-compose -f restore.docker-compose.yaml down
else
echo "Restore service exited with code $exit_code"
exit $exit_code
fi

44
deploy/src/utils.ts Normal file
View File

@@ -0,0 +1,44 @@
import * as digitalocean from "@pulumi/digitalocean";
import assert from "assert";
import path from 'path';
import os from 'os';
import fs from 'fs';
export function root(filePath: string) {
const p = path.resolve(__dirname, `../../${filePath}`);
if (fs.existsSync(p)) {
return p;
}
throw new Error(`File not found: ${p}`);
}
const id = process.env['DIGITAL_OCEAN_SSH_KEY_ID'];
const name = process.env['DIGITAL_OCEAN_SSH_KEY_NAME'];
assert(id, "DIGITAL_OCEAN_SSH_KEY_ID is required");
assert(name, "DIGITAL_OCEAN_SSH_KEY_NAME is required");
export const sshKey = digitalocean.SshKey.get(name, id);
export const getPrivateKey = () => {
// Assuming your environment variable is named 'PRIVATE_KEY_PATH'
const privateKeyPath = process.env['PRIVATE_KEY_PATH'];
if (!privateKeyPath) {
console.error('The environment variable PRIVATE_KEY_PATH is not set.');
process.exit(1); // Exit with an error code
}
// Handles the tilde by replacing it with the user's home directory
const resolvedPrivateKeyPath = privateKeyPath.startsWith('~')
? path.join(os.homedir(), privateKeyPath.slice(1))
: path.resolve(privateKeyPath);
const key = fs.readFileSync(resolvedPrivateKeyPath, 'utf-8');
return key;
}
export function getScript(scriptName: string) {
return fs.readFileSync(`./src/scripts/${scriptName}`, "utf-8");
}

17
deploy/tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"strict": true,
"outDir": "bin",
"target": "ES2020",
"module": "commonjs",
"moduleResolution": "node",
"sourceMap": true,
"experimentalDecorators": true,
"esModuleInterop": true,
"pretty": true,
"noFallthroughCasesInSwitch": true,
"noImplicitReturns": true,
"forceConsistentCasingInFileNames": true
},
"include": ["stacks"]
}

View File

@@ -0,0 +1,28 @@
FROM debian:bullseye-slim
RUN set -ex; \
apt-get update; \
apt-get install -y --no-install-recommends \
gnupg \
less \
curl \
wget \
ca-certificates \
git \
; \
rm -rf /var/lib/apt/lists/*;
RUN set -eux; \
curl -L https://bitcoincore.org/bin/bitcoin-core-25.1/bitcoin-25.1-x86_64-linux-gnu.tar.gz -o bitcoin.tar.gz; \
tar -xzvf bitcoin.tar.gz; \
install -m 0755 -o root -g root -t /usr/local/bin bitcoin-25.1/bin/*; \
rm -rf bitcoin.tar.gz bitcoin-25.1; \
bitcoind --version;
ENV BITCOIN_HOME /usr/local/bitcoind
VOLUME /usr/local/bitcoind
EXPOSE 8332 8333 8334 18543
COPY docker-entrypoint.sh /usr/local/bin/
ENTRYPOINT ["docker-entrypoint.sh"]

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env bash
set -Eeo pipefail
export BITCOIN_RPC_PORT=${BITCOIN_RPC_PORT:-"8332"}
export BITCOIN_ZMQ_PORT=${BITCOIN_ZMQ_PORT:-"18543"}
export BITCOIN_RPC_USER=${BITCOIN_RPC_USER:-"bitcoin"}
export BITCOIN_RPC_PASSWD=${BITCOIN_RPC_PASSWD:-"abcd1234"}
export BITCOIN_HOME="/usr/local/bitcoind"
_main() {
mkdir -p $BITCOIN_HOME/datadir
mkdir -p $BITCOIN_HOME/blocksdir
echo "
# Generated by https://jlopp.github.io/bitcoin-core-config-generator/
# [core]
blocksdir=$BITCOIN_HOME/blocksdir
datadir=$BITCOIN_HOME/datadir
dbcache=14000
txindex=1
pid=/bitcoind.pid
daemon=0
# [rpc]
server=1
rest=1
rpcuser=$BITCOIN_RPC_USER
rpcpassword=$BITCOIN_RPC_PASSWD
# [wallet]
disablewallet=1
rpcport=$BITCOIN_RPC_PORT
rpcallowip=0.0.0.0/0
rpcallowip=::/0
listen=1
discover=0
dns=0
dnsseed=0
listenonion=0
rpcserialversion=1
fallbackfee=0.00001
rpcthreads=8
blocksonly=1
" > /bitcoin.conf
exec /usr/local/bin/bitcoind -conf=/bitcoin.conf
}
_main "$@"

66
docker/opi/Dockerfile Normal file
View File

@@ -0,0 +1,66 @@
FROM python:3.9.18-bookworm
ENV NODE_MAJOR=20 \
RUSTUP_HOME=/usr/local/rustup \
CARGO_HOME=/usr/local/cargo \
PATH=$PATH:/usr/local/cargo/bin:/usr/local/OPI/ord/target/release \
RUST_VERSION=1.75.0
RUN set -eux; \
apt-get update; \
apt-get install -y --no-install-recommends \
postgresql-client-common \
postgresql-client-15 \
build-essential \
ca-certificates \
curl \
gnupg \
wget \
git \
pbzip2 \
; \
rm -rf /var/lib/apt/lists/*
# install nodejs
RUN set -eux; \
mkdir -p /etc/apt/keyrings; \
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg; \
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list; \
apt-get update; \
apt-get install nodejs -y; \
rm -rf /var/lib/apt/lists/*
# install rust
RUN set -eux; \
dpkgArch="$(dpkg --print-architecture)"; \
case "${dpkgArch##*-}" in \
amd64) rustArch='x86_64-unknown-linux-gnu'; rustupSha256='0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db' ;; \
armhf) rustArch='armv7-unknown-linux-gnueabihf'; rustupSha256='f21c44b01678c645d8fbba1e55e4180a01ac5af2d38bcbd14aa665e0d96ed69a' ;; \
arm64) rustArch='aarch64-unknown-linux-gnu'; rustupSha256='673e336c81c65e6b16dcdede33f4cc9ed0f08bde1dbe7a935f113605292dc800' ;; \
i386) rustArch='i686-unknown-linux-gnu'; rustupSha256='e7b0f47557c1afcd86939b118cbcf7fb95a5d1d917bdd355157b63ca00fc4333' ;; \
*) echo >&2 "unsupported architecture: ${dpkgArch}"; exit 1 ;; \
esac; \
url="https://static.rust-lang.org/rustup/archive/1.26.0/${rustArch}/rustup-init"; \
wget "$url"; \
echo "${rustupSha256} *rustup-init" | sha256sum -c -; \
chmod +x rustup-init; \
./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION --default-host ${rustArch}; \
rm rustup-init; \
chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \
rustup default stable; \
rustup --version; \
cargo --version; \
rustc --version;
RUN pip install --no-cache-dir python-dotenv psycopg2 psycopg2-binary boto3 tqdm json5 stdiomask requests;
RUN git clone https://github.com/alexgo-io/OPI.git --branch feat/none-interactive /usr/local/OPI; \
cd /usr/local/OPI/ord; cargo build --release; \
cd /usr/local/OPI/modules/main_index; npm install; \
cd /usr/local/OPI/modules/brc20_api; npm install; \
cd /usr/local/OPI/modules/bitmap_api; npm install; \
cd /usr/local/OPI/modules/sns_api; npm install;
COPY remvoe_pubkey_validate.js /usr/local/OPI/modules/main_index
RUN node /usr/local/OPI/modules/main_index/remvoe_pubkey_validate.js && rm /usr/local/OPI/modules/main_index/remvoe_pubkey_validate.js
RUN mkdir /ord_data; mkdir -p /bitcoind

View File

@@ -0,0 +1,39 @@
const fs = require('fs');
const path = require('path')
const TARGET_FILE = path.resolve(__dirname, 'node_modules/bitcoinjs-lib/src/payments/p2tr.js');
const BACKUP_FILE = `${TARGET_FILE}.backup`;
// Exact content to be removed
const contentToRemove = `
if (pubkey && pubkey.length) {
if (!(0, ecc_lib_1.getEccLib)().isXOnlyPoint(pubkey))
throw new TypeError('Invalid pubkey for p2tr');
}
`.trim(); // trim() to remove the extra newlines
fs.readFile(TARGET_FILE, 'utf8', (err, data) => {
if (err) {
return console.error(`Error reading ${TARGET_FILE}: ${err.message}`);
}
// Check if the content is present
if (!data.includes(contentToRemove)) {
return console.log('The specified content is not present in the file.');
}
// Create a backup if it doesn't exist
if (!fs.existsSync(BACKUP_FILE)) {
fs.writeFileSync(BACKUP_FILE, data);
console.log(`Backup created at ${BACKUP_FILE}`);
}
// Remove the specified content
const updatedData = data.replace(contentToRemove, '');
fs.writeFile(TARGET_FILE, updatedData, 'utf8', (writeErr) => {
if (writeErr) {
return console.error(`Error writing to ${TARGET_FILE}: ${writeErr.message}`);
}
console.log(`The specified content has been removed from ${TARGET_FILE}.`);
});
});

View File

@@ -0,0 +1,44 @@
#!/bin/bash -e
################################################################################
## File: cleanup.sh
## Desc: Perform cleanup
################################################################################
# before cleanup
before=$(df / -Pm | awk 'NR==2{print $4}')
# clears out the local repository of retrieved package files
# It removes everything but the lock file from /var/cache/apt/archives/ and /var/cache/apt/archives/partial
apt-get clean
rm -rf /tmp/*
rm -rf /root/.cache
# journalctl
if command -v journalctl; then
journalctl --rotate
journalctl --vacuum-time=1s
fi
# delete all .gz and rotated file
find /var/log -type f -regex ".*\.gz$" -delete
find /var/log -type f -regex ".*\.[0-9]$" -delete
# wipe log files
find /var/log/ -type f -exec cp /dev/null {} \;
# delete symlink for tests running
rm -f /usr/local/bin/invoke_tests
# remove apt mock
prefix=/usr/local/bin
for tool in apt apt-get apt-fast apt-key;do
sudo rm -f $prefix/$tool
done
# after cleanup
after=$(df / -Pm | awk 'NR==2{print $4}')
# display size
echo "Before: $before MB"
echo "After : $after MB"
echo "Delta : $(($after-$before)) MB"

View File

@@ -0,0 +1,54 @@
#!/bin/bash -e
################################################################################
## File: configure-apt-mock.sh
## Desc: A temporary workaround for https://github.com/Azure/azure-linux-extensions/issues/1238.
## Cleaned up during configure-cleanup.sh.
################################################################################
prefix=/usr/local/bin
for real_tool in /usr/bin/apt /usr/bin/apt-get /usr/bin/apt-fast /usr/bin/apt-key; do
tool=$(basename $real_tool)
cat >$prefix/$tool <<EOT
#!/bin/sh
i=1
while [ \$i -le 30 ];do
err=\$(mktemp)
$real_tool "\$@" 2>\$err
# no errors, break the loop and continue normal flow
test -f \$err || break
cat \$err >&2
retry=false
if grep -q 'Could not get lock' \$err;then
# apt db locked needs retry
retry=true
elif grep -q 'Could not open file /var/lib/apt/lists' \$err;then
# apt update is not completed, needs retry
retry=true
elif grep -q 'IPC connect call failed' \$err;then
# the delay should help with gpg-agent not ready
retry=true
elif grep -q 'Temporary failure in name resolution' \$err;then
# It looks like DNS is not updated with random generated hostname yet
retry=true
elif grep -q 'dpkg frontend is locked by another process' \$err;then
# dpkg process is busy by another process
retry=true
fi
rm \$err
if [ \$retry = false ]; then
break
fi
sleep 5
echo "...retry \$i"
i=\$((i + 1))
done
EOT
chmod +x $prefix/$tool
done

View File

@@ -0,0 +1,43 @@
#!/bin/bash -e
export DEBIAN_FRONTEND=noninteractive
# Stop and disable apt-daily upgrade services;
systemctl stop apt-daily.timer
systemctl disable apt-daily.timer
systemctl disable apt-daily.service
systemctl stop apt-daily-upgrade.timer
systemctl disable apt-daily-upgrade.timer
systemctl disable apt-daily-upgrade.service
# Enable retry logic for apt up to 10 times
echo "APT::Acquire::Retries \"10\";" > /etc/apt/apt.conf.d/80-retries
# Configure apt to always assume Y
echo "APT::Get::Assume-Yes \"true\";" > /etc/apt/apt.conf.d/90assumeyes
# APT understands a field called Phased-Update-Percentage which can be used to control the rollout of a new version. It is an integer between 0 and 100.
# In case you have multiple systems that you want to receive the same set of updates,
# you can set APT::Machine-ID to a UUID such that they all phase the same,
# or set APT::Get::Never-Include-Phased-Updates or APT::Get::Always-Include-Phased-Updates to true such that APT will never/always consider phased updates.
# apt-cache policy pkgname
echo 'APT::Get::Always-Include-Phased-Updates "true";' > /etc/apt/apt.conf.d/99-phased-updates
# Fix bad proxy and http headers settings
cat <<EOF >> /etc/apt/apt.conf.d/99bad_proxy
Acquire::http::Pipeline-Depth 0;
Acquire::http::No-Cache true;
Acquire::BrokenProxy true;
EOF
# Uninstall unattended-upgrades
rm -rf /var/log/unattended-upgrades
apt-get purge unattended-upgrades -y
apt-get install -y apt-transport-https ca-certificates curl software-properties-common
apt-get -yq update
apt-get -yq dist-upgrade
# Install apt-fast using quick-install.sh
# https://github.com/ilikenwf/apt-fast
bash -c "$(curl -fsSL https://raw.githubusercontent.com/ilikenwf/apt-fast/master/quick-install.sh)"

View File

@@ -0,0 +1,4 @@
#!/bin/bash -e
docker pull caoer/opi:latest
docker pull caoer/bitcoind:latest

View File

@@ -0,0 +1,42 @@
#!/bin/bash -e
## Common
# while sudo lsof /var/lib/dpkg/lock-frontend ; do sleep 1; done;
# Install packages to allow apt to use a repository over HTTPS
sudo apt-get install -y \
ca-certificates \
curl \
gnupg \
wget \
git \
build-essential \
ncdu \
bpytop \
pbzip2 \
lsb-release
## Install Docker
curl -fsSL https://get.docker.com | sh
# Enable docker.service
systemctl is-active --quiet docker.service || systemctl start docker.service
systemctl is-enabled --quiet docker.service || systemctl enable docker.service
# Docker daemon takes time to come up after installing
sleep 10
docker info
## Install docker-compose
# Download the current stable release of Docker Compose
sudo curl -L "https://github.com/docker/compose/releases/download/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep -Po '"tag_name": "\K.*?(?=")')/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
# Apply executable permissions to the binary
sudo chmod +x /usr/local/bin/docker-compose
# Optionally, install command completion for the bash shell
sudo curl -L https://raw.githubusercontent.com/docker/compose/$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep -Po '"tag_name": "\K.*?(?=")')/contrib/completion/bash/docker-compose -o /etc/bash_completion.d/docker-compose
# Check the installation
docker-compose version

View File

@@ -0,0 +1,74 @@
packer {
required_plugins {
digitalocean = {
version = ">= 1.0.4"
source = "github.com/digitalocean/digitalocean"
}
}
}
variable "digitalocean_api_token" {
type = string
default = "${env("DIGITAL_OCEAN_API_KEY")}"
}
source "digitalocean" "opi-sfo3" {
api_token = "${var.digitalocean_api_token}"
image = "ubuntu-22-04-x64"
region = "sfo3"
size = "s-1vcpu-1gb"
ssh_username = "root"
}
source "digitalocean" "opi-sgp1" {
api_token = "${var.digitalocean_api_token}"
image = "ubuntu-22-04-x64"
region = "sgp1"
size = "s-1vcpu-1gb"
ssh_username = "root"
}
source "digitalocean" "opi-lon1" {
api_token = "${var.digitalocean_api_token}"
image = "ubuntu-22-04-x64"
region = "lon1"
size = "s-1vcpu-1gb"
ssh_username = "root"
}
build {
name = "opi-ubuntu-22-04-x64"
sources = [
"source.digitalocean.opi-sfo3",
"source.digitalocean.opi-lon1",
"source.digitalocean.opi-",
]
provisioner "shell" {
execute_command = "sudo sh -c '{{ .Vars }} {{ .Path }}'"
script = "${path.root}/../scripts/build/configure-apt-mock.sh"
}
provisioner "shell" {
environment_vars = ["DEBIAN_FRONTEND=noninteractive"]
execute_command = "sudo sh -c '{{ .Vars }} {{ .Path }}'"
scripts = [
"${path.root}/../scripts/build/configure-apt-mock.sh",
"${path.root}/../scripts/build/configure-apt.sh",
"${path.root}/../scripts/build/setup.sh",
"${path.root}/../scripts/build/pull.sh",
]
}
provisioner "shell" {
execute_command = "sudo sh -c '{{ .Vars }} {{ .Path }}'"
expect_disconnect = true
inline = ["echo 'Reboot VM'", "sudo reboot"]
}
provisioner "shell" {
execute_command = "sudo sh -c '{{ .Vars }} {{ .Path }}'"
pause_before = "1m0s"
scripts = ["${path.root}/../scripts/build/cleanup.sh"]
start_retry_timeout = "10m"
}
}