mirror of
https://github.com/alexgo-io/opi-on-bitcoin-oracle.git
synced 2026-01-12 22:23:20 +08:00
use biome
This commit is contained in:
1
.envrc
1
.envrc
@@ -9,6 +9,7 @@ export BITCOIND_IMAGE=caoer/bitcoind
|
|||||||
export WORKSPACE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
export WORKSPACE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
|
|
||||||
PATH_add $WORKSPACE_ROOT/tools/bin
|
PATH_add $WORKSPACE_ROOT/tools/bin
|
||||||
|
PATH_add $WORKSPACE_ROOT/deploy/node_modules/.bin
|
||||||
|
|
||||||
export OPI_PG_DATA_PATH="${WORKSPACE_ROOT}/data/opi/postgres-data"
|
export OPI_PG_DATA_PATH="${WORKSPACE_ROOT}/data/opi/postgres-data"
|
||||||
export OPI_BITCOIND_PATH="${WORKSPACE_ROOT}/data/bitcoind"
|
export OPI_BITCOIND_PATH="${WORKSPACE_ROOT}/data/bitcoind"
|
||||||
|
|||||||
@@ -7,12 +7,7 @@
|
|||||||
},
|
},
|
||||||
"files": {
|
"files": {
|
||||||
"ignoreUnknown": false,
|
"ignoreUnknown": false,
|
||||||
"ignore": [
|
"ignore": ["data/**/*", ".cursorrules", "**/dist/**", "**/node_modules/**"]
|
||||||
"data/**/*",
|
|
||||||
".cursorrules",
|
|
||||||
"**/dist/**",
|
|
||||||
"**/node_modules/**"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"formatter": {
|
"formatter": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
|
|||||||
BIN
deploy/bun.lockb
BIN
deploy/bun.lockb
Binary file not shown.
@@ -2,6 +2,7 @@
|
|||||||
"name": "pulumi-opi",
|
"name": "pulumi-opi",
|
||||||
"main": "src/index.ts",
|
"main": "src/index.ts",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@biomejs/biome": "^1.9.4",
|
||||||
"@types/node": "^20",
|
"@types/node": "^20",
|
||||||
"dotenv": "^16.3.1",
|
"dotenv": "^16.3.1",
|
||||||
"prettier": "3.2.5",
|
"prettier": "3.2.5",
|
||||||
|
|||||||
@@ -1,12 +1,40 @@
|
|||||||
const inquirer = require('inquirer');
|
const inquirer = require('inquirer');
|
||||||
const fs = require('fs');
|
const fs = require('node:fs');
|
||||||
const path = require('path');
|
const path = require('node:path');
|
||||||
|
|
||||||
const regions = ['nyc1', 'sfo1', 'nyc2', 'ams2', 'sgp1', 'lon1', 'nyc3', 'ams3', 'fra1', 'tor1', 'sfo2', 'blr1', 'sfo3', 'syd1'];
|
const regions = [
|
||||||
|
'nyc1',
|
||||||
|
'sfo1',
|
||||||
|
'nyc2',
|
||||||
|
'ams2',
|
||||||
|
'sgp1',
|
||||||
|
'lon1',
|
||||||
|
'nyc3',
|
||||||
|
'ams3',
|
||||||
|
'fra1',
|
||||||
|
'tor1',
|
||||||
|
'sfo2',
|
||||||
|
'blr1',
|
||||||
|
'sfo3',
|
||||||
|
'syd1',
|
||||||
|
];
|
||||||
const sizes = [
|
const sizes = [
|
||||||
's-4vcpu-8gb', 's-4vcpu-8gb-amd', 's-4vcpu-8gb-intel', 'g-2vcpu-8gb', 's-4vcpu-8gb-240gb-intel',
|
's-4vcpu-8gb',
|
||||||
'gd-2vcpu-8gb', 'g-2vcpu-8gb-intel', 'gd-2vcpu-8gb-intel', 's-4vcpu-16gb-amd', 'm-2vcpu-16gb',
|
's-4vcpu-8gb-amd',
|
||||||
'c-4', 'c2-4vcpu-8gb', 's-4vcpu-16gb-320gb-intel', 's-8vcpu-16gb', 'm3-2vcpu-16gb', 'c-4-intel',
|
's-4vcpu-8gb-intel',
|
||||||
|
'g-2vcpu-8gb',
|
||||||
|
's-4vcpu-8gb-240gb-intel',
|
||||||
|
'gd-2vcpu-8gb',
|
||||||
|
'g-2vcpu-8gb-intel',
|
||||||
|
'gd-2vcpu-8gb-intel',
|
||||||
|
's-4vcpu-16gb-amd',
|
||||||
|
'm-2vcpu-16gb',
|
||||||
|
'c-4',
|
||||||
|
'c2-4vcpu-8gb',
|
||||||
|
's-4vcpu-16gb-320gb-intel',
|
||||||
|
's-8vcpu-16gb',
|
||||||
|
'm3-2vcpu-16gb',
|
||||||
|
'c-4-intel',
|
||||||
// ... Include all sizes here
|
// ... Include all sizes here
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -30,8 +58,9 @@ const questionSize = {
|
|||||||
choices: sizes,
|
choices: sizes,
|
||||||
};
|
};
|
||||||
|
|
||||||
inquirer.prompt([questionService, questionRegion, questionSize])
|
inquirer
|
||||||
.then(answers => {
|
.prompt([questionService, questionRegion, questionSize])
|
||||||
|
.then((answers) => {
|
||||||
const { serviceName, region, size } = answers;
|
const { serviceName, region, size } = answers;
|
||||||
const outputTemplate = `
|
const outputTemplate = `
|
||||||
services:
|
services:
|
||||||
@@ -43,5 +72,3 @@ services:
|
|||||||
fs.writeFileSync(outputPath, outputTemplate);
|
fs.writeFileSync(outputPath, outputTemplate);
|
||||||
console.log(`file generated at: ${path.resolve(outputPath)}`);
|
console.log(`file generated at: ${path.resolve(outputPath)}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
@@ -1,4 +1,11 @@
|
|||||||
import * as digitalocean from '@pulumi/digitalocean'
|
import { createHash } from 'node:crypto';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import { local, remote } from '@pulumi/command';
|
||||||
|
import type { types } from '@pulumi/command';
|
||||||
|
import * as digitalocean from '@pulumi/digitalocean';
|
||||||
|
import * as pulumi from '@pulumi/pulumi';
|
||||||
|
import * as time from '@pulumiverse/time';
|
||||||
|
import YAML from 'yaml';
|
||||||
import {
|
import {
|
||||||
generateDirectoryHash,
|
generateDirectoryHash,
|
||||||
getScript,
|
getScript,
|
||||||
@@ -6,28 +13,22 @@ import {
|
|||||||
root$,
|
root$,
|
||||||
transformFile,
|
transformFile,
|
||||||
unroot,
|
unroot,
|
||||||
} from './utils'
|
} from './utils';
|
||||||
import * as pulumi from '@pulumi/pulumi'
|
|
||||||
import fs from 'fs'
|
|
||||||
import { local, remote, types } from '@pulumi/command'
|
|
||||||
import { createHash } from 'crypto'
|
|
||||||
import * as time from '@pulumiverse/time'
|
|
||||||
import YAML from 'yaml'
|
|
||||||
|
|
||||||
import { getPrivateKey, sshKey } from './keys'
|
import { getPrivateKey, sshKey } from './keys';
|
||||||
|
|
||||||
export function provisionInstance(params: {
|
export function provisionInstance(params: {
|
||||||
name: string
|
name: string;
|
||||||
connection: types.input.remote.ConnectionArgs
|
connection: types.input.remote.ConnectionArgs;
|
||||||
}) {
|
}) {
|
||||||
const { connection, name } = params
|
const { connection, name } = params;
|
||||||
|
|
||||||
const setupCommands = execScriptsOnRemote(name, connection, [
|
const setupCommands = execScriptsOnRemote(name, connection, [
|
||||||
root('deploy/src/provision/configure-apt-mock.sh'),
|
root('deploy/src/provision/configure-apt-mock.sh'),
|
||||||
root('deploy/src/provision/configure-apt.sh'),
|
root('deploy/src/provision/configure-apt.sh'),
|
||||||
root('deploy/src/provision/setup.sh'),
|
root('deploy/src/provision/setup.sh'),
|
||||||
root('deploy/src/provision/pull.sh'),
|
root('deploy/src/provision/pull.sh'),
|
||||||
])
|
]);
|
||||||
|
|
||||||
const reboot = new remote.Command(
|
const reboot = new remote.Command(
|
||||||
`${name}:reboot`,
|
`${name}:reboot`,
|
||||||
@@ -37,7 +38,7 @@ export function provisionInstance(params: {
|
|||||||
environment: { DEBIAN_FRONTEND: 'noninteractive' },
|
environment: { DEBIAN_FRONTEND: 'noninteractive' },
|
||||||
},
|
},
|
||||||
{ dependsOn: setupCommands },
|
{ dependsOn: setupCommands },
|
||||||
)
|
);
|
||||||
|
|
||||||
const wait = new time.Sleep(
|
const wait = new time.Sleep(
|
||||||
`${name}:wait60Seconds`,
|
`${name}:wait60Seconds`,
|
||||||
@@ -45,7 +46,7 @@ export function provisionInstance(params: {
|
|||||||
{
|
{
|
||||||
dependsOn: [reboot],
|
dependsOn: [reboot],
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
|
|
||||||
return execScriptOnRemote(
|
return execScriptOnRemote(
|
||||||
name,
|
name,
|
||||||
@@ -54,7 +55,7 @@ export function provisionInstance(params: {
|
|||||||
{
|
{
|
||||||
commandOpts: { dependsOn: [wait] },
|
commandOpts: { dependsOn: [wait] },
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function execScriptsOnRemote(
|
export function execScriptsOnRemote(
|
||||||
@@ -62,23 +63,23 @@ export function execScriptsOnRemote(
|
|||||||
connection: types.input.remote.ConnectionArgs,
|
connection: types.input.remote.ConnectionArgs,
|
||||||
locations: string[],
|
locations: string[],
|
||||||
) {
|
) {
|
||||||
let command: remote.Command | null = null
|
let command: remote.Command | null = null;
|
||||||
const commands: remote.Command[] = []
|
const commands: remote.Command[] = [];
|
||||||
for (const loc of locations) {
|
for (const loc of locations) {
|
||||||
if (command == null) {
|
if (command == null) {
|
||||||
command = execScriptOnRemote(name, connection, loc)
|
command = execScriptOnRemote(name, connection, loc);
|
||||||
} else {
|
} else {
|
||||||
command = execScriptOnRemote(name, connection, loc, {
|
command = execScriptOnRemote(name, connection, loc, {
|
||||||
commandOpts: {
|
commandOpts: {
|
||||||
dependsOn: [command],
|
dependsOn: [command],
|
||||||
},
|
},
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
commands.push(command)
|
commands.push(command);
|
||||||
}
|
}
|
||||||
|
|
||||||
return commands
|
return commands;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function execScriptOnRemote(
|
export function execScriptOnRemote(
|
||||||
@@ -86,15 +87,14 @@ export function execScriptOnRemote(
|
|||||||
connection: types.input.remote.ConnectionArgs,
|
connection: types.input.remote.ConnectionArgs,
|
||||||
loc: string,
|
loc: string,
|
||||||
options: {
|
options: {
|
||||||
cwd?: pulumi.Output<string>
|
cwd?: pulumi.Output<string>;
|
||||||
commandOpts?: pulumi.CustomResourceOptions
|
commandOpts?: pulumi.CustomResourceOptions;
|
||||||
} = {},
|
} = {},
|
||||||
) {
|
) {
|
||||||
// cwd is the CWD
|
const createContent = fs.readFileSync(loc, 'utf-8');
|
||||||
const createContent = fs.readFileSync(loc, 'utf-8')
|
|
||||||
const createContentHash = createHash('md5')
|
const createContentHash = createHash('md5')
|
||||||
.update(createContent)
|
.update(createContent)
|
||||||
.digest('hex')
|
.digest('hex');
|
||||||
|
|
||||||
if (options.cwd) {
|
if (options.cwd) {
|
||||||
return new remote.Command(
|
return new remote.Command(
|
||||||
@@ -110,8 +110,9 @@ export function execScriptOnRemote(
|
|||||||
customTimeouts: { create: '240m' },
|
customTimeouts: { create: '240m' },
|
||||||
...options.commandOpts,
|
...options.commandOpts,
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
} else {
|
}
|
||||||
|
|
||||||
return new remote.Command(
|
return new remote.Command(
|
||||||
`${name}:run:remote: ${unroot(loc)}`,
|
`${name}:run:remote: ${unroot(loc)}`,
|
||||||
{
|
{
|
||||||
@@ -123,18 +124,17 @@ export function execScriptOnRemote(
|
|||||||
customTimeouts: { create: '240m' },
|
customTimeouts: { create: '240m' },
|
||||||
...options.commandOpts,
|
...options.commandOpts,
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const image = 'ubuntu-22-04-x64'
|
const image = 'ubuntu-22-04-x64';
|
||||||
|
|
||||||
export function create(params: { name: string; region: string; size: string }) {
|
export function create(params: { name: string; region: string; size: string }) {
|
||||||
const { region, size, name } = params
|
const { region, size, name } = params;
|
||||||
const snapshotId = (() => {
|
const snapshotId = (() => {
|
||||||
const id = process.env['OPI_VOLUME_SNAPSHOT_ID']
|
const id = process.env.OPI_VOLUME_SNAPSHOT_ID;
|
||||||
return id?.length == 0 ? undefined : id
|
return id?.length === 0 ? undefined : id;
|
||||||
})()
|
})();
|
||||||
|
|
||||||
// create instance
|
// create instance
|
||||||
|
|
||||||
@@ -143,47 +143,47 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
region,
|
region,
|
||||||
size,
|
size,
|
||||||
sshKeys: [sshKey.id],
|
sshKeys: [sshKey.id],
|
||||||
})
|
});
|
||||||
const privateKey = getPrivateKey()
|
const privateKey = getPrivateKey();
|
||||||
|
|
||||||
const connection: types.input.remote.ConnectionArgs = {
|
const connection: types.input.remote.ConnectionArgs = {
|
||||||
host: droplet.ipv4Address,
|
host: droplet.ipv4Address,
|
||||||
user: 'root',
|
user: 'root',
|
||||||
privateKey,
|
privateKey,
|
||||||
dialErrorLimit: 50,
|
dialErrorLimit: 50,
|
||||||
}
|
};
|
||||||
|
|
||||||
const provision = provisionInstance({ name, connection })
|
const provision = provisionInstance({ name, connection });
|
||||||
|
|
||||||
const copyConfigDir = (loc: string, remotePath: pulumi.Output<string>) => {
|
const copyConfigDir = (loc: string, remotePath: pulumi.Output<string>) => {
|
||||||
if (!fs.existsSync(loc)) {
|
if (!fs.existsSync(loc)) {
|
||||||
throw new Error(`not found: ${loc}`)
|
throw new Error(`not found: ${loc}`);
|
||||||
}
|
}
|
||||||
const hash = generateDirectoryHash(loc).slice(0, 5)
|
const hash = generateDirectoryHash(loc).slice(0, 5);
|
||||||
return new local.Command(`${name}:copyFiles ${unroot(loc)}`, {
|
return new local.Command(`${name}:copyFiles ${unroot(loc)}`, {
|
||||||
create: pulumi.interpolate`rsync -avP -e "ssh -i ${process.env['PRIVATE_KEY_PATH']}" ${loc} ${connection.user}@${droplet.ipv4Address}:${remotePath}`,
|
create: pulumi.interpolate`rsync -avP -e "ssh -i ${process.env.PRIVATE_KEY_PATH}" ${loc} ${connection.user}@${droplet.ipv4Address}:${remotePath}`,
|
||||||
triggers: [hash, loc, remotePath],
|
triggers: [hash, loc, remotePath],
|
||||||
})
|
});
|
||||||
}
|
};
|
||||||
|
|
||||||
const volume = new digitalocean.Volume(
|
const volume = new digitalocean.Volume(
|
||||||
`${name}volume`,
|
`${name}volume`,
|
||||||
{
|
{
|
||||||
region,
|
region,
|
||||||
size: parseInt(process.env['OPI_VOLUME_SIZE'] ?? '1000', 10),
|
size: Number.parseInt(process.env.OPI_VOLUME_SIZE ?? '1000', 10),
|
||||||
initialFilesystemType: 'ext4',
|
initialFilesystemType: 'ext4',
|
||||||
snapshotId,
|
snapshotId,
|
||||||
},
|
},
|
||||||
{ dependsOn: [provision, droplet] },
|
{ dependsOn: [provision, droplet] },
|
||||||
)
|
);
|
||||||
// mount disk
|
// mount disk
|
||||||
const volumeAttachment = new digitalocean.VolumeAttachment(
|
const volumeAttachment = new digitalocean.VolumeAttachment(
|
||||||
`${name}-volume-attachment`,
|
`${name}-volume-attachment`,
|
||||||
{
|
{
|
||||||
dropletId: droplet.id.apply((id) => parseInt(id, 10)),
|
dropletId: droplet.id.apply((id) => Number.parseInt(id, 10)),
|
||||||
volumeId: volume.id,
|
volumeId: volume.id,
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
|
|
||||||
const volumePathPrint = new remote.Command(
|
const volumePathPrint = new remote.Command(
|
||||||
`${name}-read-volume-path`,
|
`${name}-read-volume-path`,
|
||||||
@@ -195,7 +195,7 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
dependsOn: [droplet, volumeAttachment, volume],
|
dependsOn: [droplet, volumeAttachment, volume],
|
||||||
customTimeouts: { create: '5m' },
|
customTimeouts: { create: '5m' },
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
|
|
||||||
// cp restore files
|
// cp restore files
|
||||||
const cpRestoreDockerCompose = volumePathPrint.stdout.apply((volumeName) => {
|
const cpRestoreDockerCompose = volumePathPrint.stdout.apply((volumeName) => {
|
||||||
@@ -204,41 +204,36 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
'./src/docker-composes/restore.docker-compose.yaml',
|
'./src/docker-composes/restore.docker-compose.yaml',
|
||||||
[
|
[
|
||||||
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
|
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
|
||||||
['${OPI_IMAGE}', process.env['OPI_IMAGE']!],
|
['${OPI_IMAGE}', process.env.OPI_IMAGE!],
|
||||||
// DB_USER
|
['${DB_USER}', process.env.DB_USER!],
|
||||||
['${DB_USER}', process.env['DB_USER']!],
|
['${DB_PASSWD}', process.env.DB_PASSWD!],
|
||||||
// DB_PASSWORD
|
['${DB_DATABASE}', process.env.DB_DATABASE!],
|
||||||
['${DB_PASSWD}', process.env['DB_PASSWD']!],
|
|
||||||
// DB_DATABASE
|
|
||||||
['${DB_DATABASE}', process.env['DB_DATABASE']!],
|
|
||||||
// WORKSPACE_ROOT
|
|
||||||
['${WORKSPACE_ROOT}', volumeName],
|
['${WORKSPACE_ROOT}', volumeName],
|
||||||
// ORD_DATADIR
|
|
||||||
['${ORD_DATADIR}', `${volumeName}/ord_data`],
|
['${ORD_DATADIR}', `${volumeName}/ord_data`],
|
||||||
],
|
],
|
||||||
)
|
);
|
||||||
|
|
||||||
const file = fs.readFileSync(localPath, 'utf-8')
|
const file = fs.readFileSync(localPath, 'utf-8');
|
||||||
const hash = createHash('md5').update(file).digest('hex').slice(0, 5)
|
const hash = createHash('md5').update(file).digest('hex').slice(0, 5);
|
||||||
const remotePath = `${volumeName}/restore.docker-compose.yaml`
|
const remotePath = `${volumeName}/restore.docker-compose.yaml`;
|
||||||
|
|
||||||
return new remote.CopyFile(`${name}:restore`, {
|
return new remote.CopyFile(`${name}:restore`, {
|
||||||
connection,
|
connection,
|
||||||
localPath,
|
localPath,
|
||||||
remotePath,
|
remotePath,
|
||||||
triggers: [hash, localPath],
|
triggers: [hash, localPath],
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
|
|
||||||
const cpConfig = copyConfigDir(
|
const cpConfig = copyConfigDir(
|
||||||
root('configs'),
|
root('configs'),
|
||||||
pulumi.interpolate`${volumePathPrint.stdout}`,
|
pulumi.interpolate`${volumePathPrint.stdout}`,
|
||||||
)
|
);
|
||||||
|
|
||||||
// create swap space
|
// create swap space
|
||||||
execScriptOnRemote(name, connection, root('deploy/src/scripts/mkswap.sh'), {
|
execScriptOnRemote(name, connection, root('deploy/src/scripts/mkswap.sh'), {
|
||||||
commandOpts: { dependsOn: [provision] },
|
commandOpts: { dependsOn: [provision] },
|
||||||
})
|
});
|
||||||
|
|
||||||
// restore pg database and ord_data
|
// restore pg database and ord_data
|
||||||
const restore = execScriptOnRemote(
|
const restore = execScriptOnRemote(
|
||||||
@@ -251,7 +246,7 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
dependsOn: [cpConfig, cpRestoreDockerCompose],
|
dependsOn: [cpConfig, cpRestoreDockerCompose],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
);
|
||||||
|
|
||||||
// cp service docker-compose file
|
// cp service docker-compose file
|
||||||
/**
|
/**
|
||||||
@@ -266,30 +261,21 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
[
|
[
|
||||||
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
|
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
|
||||||
['${OPI_BITCOIND_PATH}', `${volumeName}/bitcoind_data`],
|
['${OPI_BITCOIND_PATH}', `${volumeName}/bitcoind_data`],
|
||||||
['${OPI_IMAGE}', process.env['OPI_IMAGE']!],
|
['${OPI_IMAGE}', process.env.OPI_IMAGE!],
|
||||||
['${BITCOIND_IMAGE}', process.env['BITCOIND_IMAGE']!],
|
['${BITCOIND_IMAGE}', process.env.BITCOIND_IMAGE!],
|
||||||
// DB_USER
|
['${DB_USER}', process.env.DB_USER!],
|
||||||
['${DB_USER}', process.env['DB_USER']!],
|
['${DB_PASSWD}', process.env.DB_PASSWD!],
|
||||||
// DB_PASSWORD
|
['${DB_DATABASE}', process.env.DB_DATABASE!],
|
||||||
['${DB_PASSWD}', process.env['DB_PASSWD']!],
|
|
||||||
// DB_DATABASE
|
|
||||||
['${DB_DATABASE}', process.env['DB_DATABASE']!],
|
|
||||||
// WORKSPACE_ROOT
|
|
||||||
['${WORKSPACE_ROOT}', volumeName],
|
['${WORKSPACE_ROOT}', volumeName],
|
||||||
// BITCOIN_RPC_USER
|
['${BITCOIN_RPC_USER}', process.env.BITCOIN_RPC_USER!],
|
||||||
['${BITCOIN_RPC_USER}', process.env['BITCOIN_RPC_USER']!],
|
['${BITCOIN_RPC_PASSWD}', process.env.BITCOIN_RPC_PASSWD!],
|
||||||
// BITCOIN_RPC_PASSWD
|
['${BITCOIN_RPC_PORT}', process.env.BITCOIN_RPC_PORT!],
|
||||||
['${BITCOIN_RPC_PASSWD}', process.env['BITCOIN_RPC_PASSWD']!],
|
|
||||||
// BITCOIN_RPC_POR
|
|
||||||
['${BITCOIN_RPC_PORT}', process.env['BITCOIN_RPC_PORT']!],
|
|
||||||
// ORD_DATADIR
|
|
||||||
['${ORD_DATADIR}', `${volumeName}/ord_data`],
|
['${ORD_DATADIR}', `${volumeName}/ord_data`],
|
||||||
// BITCOIN_CHAIN_FOLDER
|
|
||||||
['${BITCOIN_CHAIN_FOLDER}', `${volumeName}/bitcoind_data/datadir`],
|
['${BITCOIN_CHAIN_FOLDER}', `${volumeName}/bitcoind_data/datadir`],
|
||||||
],
|
],
|
||||||
)
|
);
|
||||||
const file = fs.readFileSync(localPath, 'utf-8')
|
const file = fs.readFileSync(localPath, 'utf-8');
|
||||||
const hash = createHash('md5').update(file).digest('hex').slice(0, 5)
|
const hash = createHash('md5').update(file).digest('hex').slice(0, 5);
|
||||||
|
|
||||||
const cpDockerCompose = new remote.CopyFile(
|
const cpDockerCompose = new remote.CopyFile(
|
||||||
`${name}:cp:opi.docker-compose. -> ${volumeName}`,
|
`${name}:cp:opi.docker-compose. -> ${volumeName}`,
|
||||||
@@ -300,7 +286,7 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
triggers: [hash, localPath],
|
triggers: [hash, localPath],
|
||||||
},
|
},
|
||||||
{ dependsOn: [restore] },
|
{ dependsOn: [restore] },
|
||||||
)
|
);
|
||||||
|
|
||||||
// start opi
|
// start opi
|
||||||
new remote.Command(
|
new remote.Command(
|
||||||
@@ -311,67 +297,69 @@ export function create(params: { name: string; region: string; size: string }) {
|
|||||||
triggers: [hash],
|
triggers: [hash],
|
||||||
},
|
},
|
||||||
{ dependsOn: [cpDockerCompose] },
|
{ dependsOn: [cpDockerCompose] },
|
||||||
)
|
);
|
||||||
})
|
});
|
||||||
|
|
||||||
exports[`ip_${name}`] = droplet.ipv4Address
|
exports[`ip_${name}`] = droplet.ipv4Address;
|
||||||
exports[`name_${name}`] = droplet.name
|
exports[`name_${name}`] = droplet.name;
|
||||||
exports[`volume_id_${name}`] = volume.id
|
exports[`volume_id_${name}`] = volume.id;
|
||||||
exports[`volume_attachment_id_${name}`] = volumeAttachment.id
|
exports[`volume_attachment_id_${name}`] = volumeAttachment.id;
|
||||||
exports[`volume_path_${name}`] = volumePathPrint.stdout
|
exports[`volume_path_${name}`] = volumePathPrint.stdout;
|
||||||
|
|
||||||
return { droplet, volume, name }
|
return { droplet, volume, name };
|
||||||
}
|
}
|
||||||
|
|
||||||
type Instance = {
|
interface Instance {
|
||||||
name: string
|
name: string;
|
||||||
region: string
|
region: string;
|
||||||
size: string
|
size: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function validateInstance(instance: any): instance is Instance {
|
function validateInstance(instance: unknown): instance is Instance {
|
||||||
return (
|
return (
|
||||||
typeof instance.name === 'string' &&
|
typeof instance === 'object' &&
|
||||||
typeof instance.region === 'string' &&
|
instance !== null &&
|
||||||
typeof instance.size === 'string'
|
'name' in instance &&
|
||||||
)
|
'region' in instance &&
|
||||||
|
'size' in instance &&
|
||||||
|
typeof (instance as Instance).name === 'string' &&
|
||||||
|
typeof (instance as Instance).region === 'string' &&
|
||||||
|
typeof (instance as Instance).size === 'string'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function readYamlAndCreateInstance() {
|
function readYamlAndCreateInstance() {
|
||||||
// read yaml file
|
// read yaml file
|
||||||
|
|
||||||
const file = (() => {
|
const file = (() => {
|
||||||
if (fs.existsSync(root$('deploy/src/config.user.yaml'))) {
|
if (fs.existsSync(root$('deploy/src/config.user.yaml'))) {
|
||||||
return fs.readFileSync(root('deploy/src/config.user.yaml'), 'utf8')
|
return fs.readFileSync(root('deploy/src/config.user.yaml'), 'utf8');
|
||||||
}
|
}
|
||||||
|
return fs.readFileSync(root('deploy/src/config.yaml'), 'utf8');
|
||||||
return fs.readFileSync(root('deploy/src/config.yaml'), 'utf8')
|
})();
|
||||||
})()
|
|
||||||
|
|
||||||
// parse yaml file
|
// parse yaml file
|
||||||
const data = YAML.parse(file)
|
const data = YAML.parse(file);
|
||||||
|
const instances = [];
|
||||||
|
|
||||||
let instances = []
|
for (const serviceName in data.services) {
|
||||||
|
|
||||||
for (let serviceName in data.services) {
|
|
||||||
// validate required fields
|
// validate required fields
|
||||||
const instance = {
|
const instance = {
|
||||||
name: serviceName,
|
name: serviceName,
|
||||||
region: data.services[serviceName].region,
|
region: data.services[serviceName].region,
|
||||||
size: data.services[serviceName].size,
|
size: data.services[serviceName].size,
|
||||||
}
|
};
|
||||||
|
|
||||||
if (validateInstance(instance)) {
|
if (validateInstance(instance)) {
|
||||||
// create instance and push to instances array
|
// create instance and push to instances array
|
||||||
instances.push(create(instance))
|
instances.push(create(instance));
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`Invalid instance data '${JSON.stringify(instance)}'`)
|
throw new Error(`Invalid instance data '${JSON.stringify(instance)}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return instances
|
return instances;
|
||||||
}
|
}
|
||||||
|
|
||||||
const instances = readYamlAndCreateInstance()
|
const instances = readYamlAndCreateInstance();
|
||||||
|
|
||||||
console.log(`created: ${instances.length} instances`)
|
console.log(`created: ${instances.length} instances`);
|
||||||
|
|||||||
@@ -1,27 +1,27 @@
|
|||||||
import * as digitalocean from '@pulumi/digitalocean'
|
import fs from 'node:fs';
|
||||||
import path from 'path'
|
import os from 'node:os';
|
||||||
import os from 'os'
|
import path from 'node:path';
|
||||||
import fs from 'fs'
|
import * as digitalocean from '@pulumi/digitalocean';
|
||||||
|
|
||||||
export const id = process.env['DIGITAL_OCEAN_SSH_KEY_ID']!
|
export const id = process.env.DIGITAL_OCEAN_SSH_KEY_ID!;
|
||||||
export const name = process.env['DIGITAL_OCEAN_SSH_KEY_NAME']!
|
export const name = process.env.DIGITAL_OCEAN_SSH_KEY_NAME!;
|
||||||
|
|
||||||
export const sshKey = digitalocean.SshKey.get(name, id)
|
export const sshKey = digitalocean.SshKey.get(name, id);
|
||||||
export const getPrivateKey = () => {
|
export const getPrivateKey = () => {
|
||||||
// Assuming your environment variable is named 'PRIVATE_KEY_PATH'
|
// Assuming your environment variable is named 'PRIVATE_KEY_PATH'
|
||||||
const privateKeyPath = process.env['PRIVATE_KEY_PATH']
|
const privateKeyPath = process.env.PRIVATE_KEY_PATH;
|
||||||
|
|
||||||
if (!privateKeyPath) {
|
if (!privateKeyPath) {
|
||||||
console.error('The environment variable PRIVATE_KEY_PATH is not set.')
|
console.error('The environment variable PRIVATE_KEY_PATH is not set.');
|
||||||
process.exit(1) // Exit with an error code
|
process.exit(1); // Exit with an error code
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handles the tilde by replacing it with the user's home directory
|
// Handles the tilde by replacing it with the user's home directory
|
||||||
const resolvedPrivateKeyPath = privateKeyPath.startsWith('~')
|
const resolvedPrivateKeyPath = privateKeyPath.startsWith('~')
|
||||||
? path.join(os.homedir(), privateKeyPath.slice(1))
|
? path.join(os.homedir(), privateKeyPath.slice(1))
|
||||||
: path.resolve(privateKeyPath)
|
: path.resolve(privateKeyPath);
|
||||||
|
|
||||||
const key = fs.readFileSync(resolvedPrivateKeyPath, 'utf-8')
|
const key = fs.readFileSync(resolvedPrivateKeyPath, 'utf-8');
|
||||||
|
|
||||||
return key
|
return key;
|
||||||
}
|
};
|
||||||
|
|||||||
@@ -1,45 +1,50 @@
|
|||||||
import assert from 'assert'
|
import assert from 'node:assert';
|
||||||
import path, { join } from 'path'
|
import { createHash } from 'node:crypto';
|
||||||
import fs, { readdirSync, readFileSync, statSync } from 'fs'
|
import fs, { readdirSync, readFileSync, statSync } from 'node:fs';
|
||||||
import { id, name } from './keys'
|
import path, { join } from 'node:path';
|
||||||
import { createHash } from 'crypto'
|
import * as digitalocean from '@pulumi/digitalocean';
|
||||||
import * as digitalocean from '@pulumi/digitalocean'
|
import { id, name } from './keys';
|
||||||
|
|
||||||
export function root(filePath: string) {
|
export function root(filePath: string) {
|
||||||
const p = path.resolve(__dirname, `../../${filePath}`)
|
const p = path.resolve(__dirname, `../../${filePath}`);
|
||||||
if (fs.existsSync(p)) {
|
if (fs.existsSync(p)) {
|
||||||
return p
|
return p;
|
||||||
}
|
}
|
||||||
throw new Error(`File not found: ${p}`)
|
throw new Error(`File not found: ${p}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function root$(filePath: string) {
|
export function root$(filePath: string) {
|
||||||
return path.resolve(__dirname, `../../${filePath}`)
|
return path.resolve(__dirname, `../../${filePath}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert the absolute path from root(filePath: string) to relative path
|
// convert the absolute path from root(filePath: string) to relative path
|
||||||
// example: unroot(root(a)) === a
|
// example: unroot(root(a)) === a
|
||||||
export function unroot(filePath: string) {
|
export function unroot(filePath: string) {
|
||||||
return filePath.replace(root('') + '/', '')
|
return filePath.replace(root('') + '/', '');
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(id, 'DIGITAL_OCEAN_SSH_KEY_ID is required')
|
assert(id, 'DIGITAL_OCEAN_SSH_KEY_ID is required');
|
||||||
assert(name, 'DIGITAL_OCEAN_SSH_KEY_NAME is required')
|
assert(name, 'DIGITAL_OCEAN_SSH_KEY_NAME is required');
|
||||||
|
|
||||||
export function getScript(scriptName: string) {
|
export function getScript(scriptName: string) {
|
||||||
return fs.readFileSync(`./src/scripts/${scriptName}`, 'utf-8')
|
return fs.readFileSync(`./src/scripts/${scriptName}`, 'utf-8');
|
||||||
} // write takeSnapshot function which input is the output of function create.
|
}
|
||||||
function takeSnapshot(params: { name: string; volume: digitalocean.Volume }) {
|
|
||||||
const { name, volume } = params
|
// write takeSnapshot function which input is the output of function create.
|
||||||
|
export function takeSnapshot(params: {
|
||||||
|
name: string;
|
||||||
|
volume: digitalocean.Volume;
|
||||||
|
}) {
|
||||||
|
const { name, volume } = params;
|
||||||
|
|
||||||
const createSnapshot = new digitalocean.VolumeSnapshot(`${name}-snapshot`, {
|
const createSnapshot = new digitalocean.VolumeSnapshot(`${name}-snapshot`, {
|
||||||
volumeId: volume.id,
|
volumeId: volume.id,
|
||||||
name: `${name}-snapshot`,
|
name: `${name}-snapshot`,
|
||||||
})
|
});
|
||||||
|
|
||||||
exports[`volume_snapshot_${name}`] = createSnapshot.id
|
exports[`volume_snapshot_${name}`] = createSnapshot.id;
|
||||||
|
|
||||||
return { createSnapshot }
|
return { createSnapshot };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function transformFile(
|
export function transformFile(
|
||||||
@@ -48,62 +53,62 @@ export function transformFile(
|
|||||||
transforms: string[][],
|
transforms: string[][],
|
||||||
): string {
|
): string {
|
||||||
// Read the content of the source file
|
// Read the content of the source file
|
||||||
const content = fs.readFileSync(filePath, 'utf8')
|
const content = fs.readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
// Apply all transformations
|
// Apply all transformations
|
||||||
let transformedContent = content
|
let transformedContent = content;
|
||||||
for (const transform of transforms) {
|
for (const transform of transforms) {
|
||||||
const [original, replacement] = transform
|
const [original, replacement] = transform;
|
||||||
transformedContent = transformedContent.split(original).join(replacement)
|
transformedContent = transformedContent.split(original).join(replacement);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a temp file in a random location
|
// Create a temp file in a random location
|
||||||
const tempDir = createTmpDirFromSeed(filePath + seed)
|
const tempDir = createTmpDirFromSeed(filePath + seed);
|
||||||
const tempFilePath = path.join(tempDir, path.basename(filePath))
|
const tempFilePath = path.join(tempDir, path.basename(filePath));
|
||||||
|
|
||||||
// Write the transformed content to the temp file
|
// Write the transformed content to the temp file
|
||||||
fs.writeFileSync(tempFilePath, transformedContent)
|
fs.writeFileSync(tempFilePath, transformedContent);
|
||||||
|
|
||||||
// Return the path of the temp file
|
// Return the path of the temp file
|
||||||
return tempFilePath
|
return tempFilePath;
|
||||||
}
|
}
|
||||||
|
|
||||||
const createTmpDirFromSeed = (seed: string): string => {
|
const createTmpDirFromSeed = (seed: string): string => {
|
||||||
const hash = createHash('sha256').update(seed).digest('hex')
|
const hash = createHash('sha256').update(seed).digest('hex');
|
||||||
const tmpBaseDir = '/tmp'
|
const tmpBaseDir = '/tmp';
|
||||||
const dirPath = join(tmpBaseDir, hash)
|
const dirPath = join(tmpBaseDir, hash);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
fs.mkdirSync(dirPath, { recursive: true })
|
fs.mkdirSync(dirPath, { recursive: true });
|
||||||
return dirPath
|
return dirPath;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Failed to create temp directory: ${error}`)
|
throw new Error(`Failed to create temp directory: ${error}`);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
export function hashFile(filePath: string): string {
|
export function hashFile(filePath: string): string {
|
||||||
const fileBuffer = readFileSync(filePath)
|
const fileBuffer = readFileSync(filePath);
|
||||||
const hashSum = createHash('sha256')
|
const hashSum = createHash('sha256');
|
||||||
hashSum.update(fileBuffer)
|
hashSum.update(fileBuffer);
|
||||||
return hashSum.digest('hex')
|
return hashSum.digest('hex');
|
||||||
}
|
}
|
||||||
|
|
||||||
export function generateDirectoryHash(dirPath: string): string {
|
export function generateDirectoryHash(dirPath: string): string {
|
||||||
let hashString = ''
|
let hashString = '';
|
||||||
|
|
||||||
const files = readdirSync(dirPath)
|
const files = readdirSync(dirPath);
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const filePath = join(dirPath, file)
|
const filePath = join(dirPath, file);
|
||||||
const fileStat = statSync(filePath)
|
const fileStat = statSync(filePath);
|
||||||
|
|
||||||
if (fileStat.isDirectory()) {
|
if (fileStat.isDirectory()) {
|
||||||
hashString += `${file}:${generateDirectoryHash(filePath)}`
|
hashString += `${file}:${generateDirectoryHash(filePath)}`;
|
||||||
} else {
|
} else {
|
||||||
hashString += `${file}:${hashFile(filePath)}`
|
hashString += `${file}:${hashFile(filePath)}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const hashSum = createHash('sha256')
|
const hashSum = createHash('sha256');
|
||||||
hashSum.update(hashString)
|
hashSum.update(hashString);
|
||||||
return hashSum.digest('hex')
|
return hashSum.digest('hex');
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user