mirror of
https://github.com/alexgo-io/opi-on-bitcoin-oracle.git
synced 2026-01-12 14:44:16 +08:00
use biome
This commit is contained in:
1
.envrc
1
.envrc
@@ -9,6 +9,7 @@ export BITCOIND_IMAGE=caoer/bitcoind
|
||||
export WORKSPACE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
PATH_add $WORKSPACE_ROOT/tools/bin
|
||||
PATH_add $WORKSPACE_ROOT/deploy/node_modules/.bin
|
||||
|
||||
export OPI_PG_DATA_PATH="${WORKSPACE_ROOT}/data/opi/postgres-data"
|
||||
export OPI_BITCOIND_PATH="${WORKSPACE_ROOT}/data/bitcoind"
|
||||
|
||||
@@ -7,12 +7,7 @@
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": [
|
||||
"data/**/*",
|
||||
".cursorrules",
|
||||
"**/dist/**",
|
||||
"**/node_modules/**"
|
||||
]
|
||||
"ignore": ["data/**/*", ".cursorrules", "**/dist/**", "**/node_modules/**"]
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
|
||||
BIN
deploy/bun.lockb
BIN
deploy/bun.lockb
Binary file not shown.
@@ -2,6 +2,7 @@
|
||||
"name": "pulumi-opi",
|
||||
"main": "src/index.ts",
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
"@types/node": "^20",
|
||||
"dotenv": "^16.3.1",
|
||||
"prettier": "3.2.5",
|
||||
|
||||
@@ -1,47 +1,74 @@
|
||||
const inquirer = require('inquirer');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
|
||||
const regions = ['nyc1', 'sfo1', 'nyc2', 'ams2', 'sgp1', 'lon1', 'nyc3', 'ams3', 'fra1', 'tor1', 'sfo2', 'blr1', 'sfo3', 'syd1'];
|
||||
const regions = [
|
||||
'nyc1',
|
||||
'sfo1',
|
||||
'nyc2',
|
||||
'ams2',
|
||||
'sgp1',
|
||||
'lon1',
|
||||
'nyc3',
|
||||
'ams3',
|
||||
'fra1',
|
||||
'tor1',
|
||||
'sfo2',
|
||||
'blr1',
|
||||
'sfo3',
|
||||
'syd1',
|
||||
];
|
||||
const sizes = [
|
||||
's-4vcpu-8gb', 's-4vcpu-8gb-amd', 's-4vcpu-8gb-intel', 'g-2vcpu-8gb', 's-4vcpu-8gb-240gb-intel',
|
||||
'gd-2vcpu-8gb', 'g-2vcpu-8gb-intel', 'gd-2vcpu-8gb-intel', 's-4vcpu-16gb-amd', 'm-2vcpu-16gb',
|
||||
'c-4', 'c2-4vcpu-8gb', 's-4vcpu-16gb-320gb-intel', 's-8vcpu-16gb', 'm3-2vcpu-16gb', 'c-4-intel',
|
||||
// ... Include all sizes here
|
||||
's-4vcpu-8gb',
|
||||
's-4vcpu-8gb-amd',
|
||||
's-4vcpu-8gb-intel',
|
||||
'g-2vcpu-8gb',
|
||||
's-4vcpu-8gb-240gb-intel',
|
||||
'gd-2vcpu-8gb',
|
||||
'g-2vcpu-8gb-intel',
|
||||
'gd-2vcpu-8gb-intel',
|
||||
's-4vcpu-16gb-amd',
|
||||
'm-2vcpu-16gb',
|
||||
'c-4',
|
||||
'c2-4vcpu-8gb',
|
||||
's-4vcpu-16gb-320gb-intel',
|
||||
's-8vcpu-16gb',
|
||||
'm3-2vcpu-16gb',
|
||||
'c-4-intel',
|
||||
// ... Include all sizes here
|
||||
];
|
||||
|
||||
const questionService = {
|
||||
type: 'input',
|
||||
name: 'serviceName',
|
||||
message: 'What is the service name?',
|
||||
type: 'input',
|
||||
name: 'serviceName',
|
||||
message: 'What is the service name?',
|
||||
};
|
||||
|
||||
const questionRegion = {
|
||||
type: 'list',
|
||||
name: 'region',
|
||||
message: 'Select the region:',
|
||||
choices: regions,
|
||||
type: 'list',
|
||||
name: 'region',
|
||||
message: 'Select the region:',
|
||||
choices: regions,
|
||||
};
|
||||
|
||||
const questionSize = {
|
||||
type: 'list',
|
||||
name: 'size',
|
||||
message: 'Select the size:',
|
||||
choices: sizes,
|
||||
type: 'list',
|
||||
name: 'size',
|
||||
message: 'Select the size:',
|
||||
choices: sizes,
|
||||
};
|
||||
|
||||
inquirer.prompt([questionService, questionRegion, questionSize])
|
||||
.then(answers => {
|
||||
const { serviceName, region, size } = answers;
|
||||
const outputTemplate = `
|
||||
inquirer
|
||||
.prompt([questionService, questionRegion, questionSize])
|
||||
.then((answers) => {
|
||||
const { serviceName, region, size } = answers;
|
||||
const outputTemplate = `
|
||||
services:
|
||||
${serviceName}:
|
||||
region: '${region}'
|
||||
size: '${size}'
|
||||
`;
|
||||
const outputPath = path.resolve(__dirname, 'config.user.yaml');
|
||||
fs.writeFileSync(outputPath, outputTemplate);
|
||||
console.log(`file generated at: ${path.resolve(outputPath)}`);
|
||||
});
|
||||
|
||||
|
||||
const outputPath = path.resolve(__dirname, 'config.user.yaml');
|
||||
fs.writeFileSync(outputPath, outputTemplate);
|
||||
console.log(`file generated at: ${path.resolve(outputPath)}`);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import * as digitalocean from '@pulumi/digitalocean'
|
||||
import { createHash } from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import { local, remote } from '@pulumi/command';
|
||||
import type { types } from '@pulumi/command';
|
||||
import * as digitalocean from '@pulumi/digitalocean';
|
||||
import * as pulumi from '@pulumi/pulumi';
|
||||
import * as time from '@pulumiverse/time';
|
||||
import YAML from 'yaml';
|
||||
import {
|
||||
generateDirectoryHash,
|
||||
getScript,
|
||||
@@ -6,28 +13,22 @@ import {
|
||||
root$,
|
||||
transformFile,
|
||||
unroot,
|
||||
} from './utils'
|
||||
import * as pulumi from '@pulumi/pulumi'
|
||||
import fs from 'fs'
|
||||
import { local, remote, types } from '@pulumi/command'
|
||||
import { createHash } from 'crypto'
|
||||
import * as time from '@pulumiverse/time'
|
||||
import YAML from 'yaml'
|
||||
} from './utils';
|
||||
|
||||
import { getPrivateKey, sshKey } from './keys'
|
||||
import { getPrivateKey, sshKey } from './keys';
|
||||
|
||||
export function provisionInstance(params: {
|
||||
name: string
|
||||
connection: types.input.remote.ConnectionArgs
|
||||
name: string;
|
||||
connection: types.input.remote.ConnectionArgs;
|
||||
}) {
|
||||
const { connection, name } = params
|
||||
const { connection, name } = params;
|
||||
|
||||
const setupCommands = execScriptsOnRemote(name, connection, [
|
||||
root('deploy/src/provision/configure-apt-mock.sh'),
|
||||
root('deploy/src/provision/configure-apt.sh'),
|
||||
root('deploy/src/provision/setup.sh'),
|
||||
root('deploy/src/provision/pull.sh'),
|
||||
])
|
||||
]);
|
||||
|
||||
const reboot = new remote.Command(
|
||||
`${name}:reboot`,
|
||||
@@ -37,7 +38,7 @@ export function provisionInstance(params: {
|
||||
environment: { DEBIAN_FRONTEND: 'noninteractive' },
|
||||
},
|
||||
{ dependsOn: setupCommands },
|
||||
)
|
||||
);
|
||||
|
||||
const wait = new time.Sleep(
|
||||
`${name}:wait60Seconds`,
|
||||
@@ -45,7 +46,7 @@ export function provisionInstance(params: {
|
||||
{
|
||||
dependsOn: [reboot],
|
||||
},
|
||||
)
|
||||
);
|
||||
|
||||
return execScriptOnRemote(
|
||||
name,
|
||||
@@ -54,7 +55,7 @@ export function provisionInstance(params: {
|
||||
{
|
||||
commandOpts: { dependsOn: [wait] },
|
||||
},
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export function execScriptsOnRemote(
|
||||
@@ -62,23 +63,23 @@ export function execScriptsOnRemote(
|
||||
connection: types.input.remote.ConnectionArgs,
|
||||
locations: string[],
|
||||
) {
|
||||
let command: remote.Command | null = null
|
||||
const commands: remote.Command[] = []
|
||||
let command: remote.Command | null = null;
|
||||
const commands: remote.Command[] = [];
|
||||
for (const loc of locations) {
|
||||
if (command == null) {
|
||||
command = execScriptOnRemote(name, connection, loc)
|
||||
command = execScriptOnRemote(name, connection, loc);
|
||||
} else {
|
||||
command = execScriptOnRemote(name, connection, loc, {
|
||||
commandOpts: {
|
||||
dependsOn: [command],
|
||||
},
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
commands.push(command)
|
||||
commands.push(command);
|
||||
}
|
||||
|
||||
return commands
|
||||
return commands;
|
||||
}
|
||||
|
||||
export function execScriptOnRemote(
|
||||
@@ -86,15 +87,14 @@ export function execScriptOnRemote(
|
||||
connection: types.input.remote.ConnectionArgs,
|
||||
loc: string,
|
||||
options: {
|
||||
cwd?: pulumi.Output<string>
|
||||
commandOpts?: pulumi.CustomResourceOptions
|
||||
cwd?: pulumi.Output<string>;
|
||||
commandOpts?: pulumi.CustomResourceOptions;
|
||||
} = {},
|
||||
) {
|
||||
// cwd is the CWD
|
||||
const createContent = fs.readFileSync(loc, 'utf-8')
|
||||
const createContent = fs.readFileSync(loc, 'utf-8');
|
||||
const createContentHash = createHash('md5')
|
||||
.update(createContent)
|
||||
.digest('hex')
|
||||
.digest('hex');
|
||||
|
||||
if (options.cwd) {
|
||||
return new remote.Command(
|
||||
@@ -110,31 +110,31 @@ export function execScriptOnRemote(
|
||||
customTimeouts: { create: '240m' },
|
||||
...options.commandOpts,
|
||||
},
|
||||
)
|
||||
} else {
|
||||
return new remote.Command(
|
||||
`${name}:run:remote: ${unroot(loc)}`,
|
||||
{
|
||||
connection,
|
||||
create: createContent,
|
||||
triggers: [createContentHash, loc],
|
||||
},
|
||||
{
|
||||
customTimeouts: { create: '240m' },
|
||||
...options.commandOpts,
|
||||
},
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return new remote.Command(
|
||||
`${name}:run:remote: ${unroot(loc)}`,
|
||||
{
|
||||
connection,
|
||||
create: createContent,
|
||||
triggers: [createContentHash, loc],
|
||||
},
|
||||
{
|
||||
customTimeouts: { create: '240m' },
|
||||
...options.commandOpts,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const image = 'ubuntu-22-04-x64'
|
||||
const image = 'ubuntu-22-04-x64';
|
||||
|
||||
export function create(params: { name: string; region: string; size: string }) {
|
||||
const { region, size, name } = params
|
||||
const { region, size, name } = params;
|
||||
const snapshotId = (() => {
|
||||
const id = process.env['OPI_VOLUME_SNAPSHOT_ID']
|
||||
return id?.length == 0 ? undefined : id
|
||||
})()
|
||||
const id = process.env.OPI_VOLUME_SNAPSHOT_ID;
|
||||
return id?.length === 0 ? undefined : id;
|
||||
})();
|
||||
|
||||
// create instance
|
||||
|
||||
@@ -143,47 +143,47 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
region,
|
||||
size,
|
||||
sshKeys: [sshKey.id],
|
||||
})
|
||||
const privateKey = getPrivateKey()
|
||||
});
|
||||
const privateKey = getPrivateKey();
|
||||
|
||||
const connection: types.input.remote.ConnectionArgs = {
|
||||
host: droplet.ipv4Address,
|
||||
user: 'root',
|
||||
privateKey,
|
||||
dialErrorLimit: 50,
|
||||
}
|
||||
};
|
||||
|
||||
const provision = provisionInstance({ name, connection })
|
||||
const provision = provisionInstance({ name, connection });
|
||||
|
||||
const copyConfigDir = (loc: string, remotePath: pulumi.Output<string>) => {
|
||||
if (!fs.existsSync(loc)) {
|
||||
throw new Error(`not found: ${loc}`)
|
||||
throw new Error(`not found: ${loc}`);
|
||||
}
|
||||
const hash = generateDirectoryHash(loc).slice(0, 5)
|
||||
const hash = generateDirectoryHash(loc).slice(0, 5);
|
||||
return new local.Command(`${name}:copyFiles ${unroot(loc)}`, {
|
||||
create: pulumi.interpolate`rsync -avP -e "ssh -i ${process.env['PRIVATE_KEY_PATH']}" ${loc} ${connection.user}@${droplet.ipv4Address}:${remotePath}`,
|
||||
create: pulumi.interpolate`rsync -avP -e "ssh -i ${process.env.PRIVATE_KEY_PATH}" ${loc} ${connection.user}@${droplet.ipv4Address}:${remotePath}`,
|
||||
triggers: [hash, loc, remotePath],
|
||||
})
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const volume = new digitalocean.Volume(
|
||||
`${name}volume`,
|
||||
{
|
||||
region,
|
||||
size: parseInt(process.env['OPI_VOLUME_SIZE'] ?? '1000', 10),
|
||||
size: Number.parseInt(process.env.OPI_VOLUME_SIZE ?? '1000', 10),
|
||||
initialFilesystemType: 'ext4',
|
||||
snapshotId,
|
||||
},
|
||||
{ dependsOn: [provision, droplet] },
|
||||
)
|
||||
);
|
||||
// mount disk
|
||||
const volumeAttachment = new digitalocean.VolumeAttachment(
|
||||
`${name}-volume-attachment`,
|
||||
{
|
||||
dropletId: droplet.id.apply((id) => parseInt(id, 10)),
|
||||
dropletId: droplet.id.apply((id) => Number.parseInt(id, 10)),
|
||||
volumeId: volume.id,
|
||||
},
|
||||
)
|
||||
);
|
||||
|
||||
const volumePathPrint = new remote.Command(
|
||||
`${name}-read-volume-path`,
|
||||
@@ -195,7 +195,7 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
dependsOn: [droplet, volumeAttachment, volume],
|
||||
customTimeouts: { create: '5m' },
|
||||
},
|
||||
)
|
||||
);
|
||||
|
||||
// cp restore files
|
||||
const cpRestoreDockerCompose = volumePathPrint.stdout.apply((volumeName) => {
|
||||
@@ -204,41 +204,36 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
'./src/docker-composes/restore.docker-compose.yaml',
|
||||
[
|
||||
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
|
||||
['${OPI_IMAGE}', process.env['OPI_IMAGE']!],
|
||||
// DB_USER
|
||||
['${DB_USER}', process.env['DB_USER']!],
|
||||
// DB_PASSWORD
|
||||
['${DB_PASSWD}', process.env['DB_PASSWD']!],
|
||||
// DB_DATABASE
|
||||
['${DB_DATABASE}', process.env['DB_DATABASE']!],
|
||||
// WORKSPACE_ROOT
|
||||
['${OPI_IMAGE}', process.env.OPI_IMAGE!],
|
||||
['${DB_USER}', process.env.DB_USER!],
|
||||
['${DB_PASSWD}', process.env.DB_PASSWD!],
|
||||
['${DB_DATABASE}', process.env.DB_DATABASE!],
|
||||
['${WORKSPACE_ROOT}', volumeName],
|
||||
// ORD_DATADIR
|
||||
['${ORD_DATADIR}', `${volumeName}/ord_data`],
|
||||
],
|
||||
)
|
||||
);
|
||||
|
||||
const file = fs.readFileSync(localPath, 'utf-8')
|
||||
const hash = createHash('md5').update(file).digest('hex').slice(0, 5)
|
||||
const remotePath = `${volumeName}/restore.docker-compose.yaml`
|
||||
const file = fs.readFileSync(localPath, 'utf-8');
|
||||
const hash = createHash('md5').update(file).digest('hex').slice(0, 5);
|
||||
const remotePath = `${volumeName}/restore.docker-compose.yaml`;
|
||||
|
||||
return new remote.CopyFile(`${name}:restore`, {
|
||||
connection,
|
||||
localPath,
|
||||
remotePath,
|
||||
triggers: [hash, localPath],
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
const cpConfig = copyConfigDir(
|
||||
root('configs'),
|
||||
pulumi.interpolate`${volumePathPrint.stdout}`,
|
||||
)
|
||||
);
|
||||
|
||||
// create swap space
|
||||
execScriptOnRemote(name, connection, root('deploy/src/scripts/mkswap.sh'), {
|
||||
commandOpts: { dependsOn: [provision] },
|
||||
})
|
||||
});
|
||||
|
||||
// restore pg database and ord_data
|
||||
const restore = execScriptOnRemote(
|
||||
@@ -251,7 +246,7 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
dependsOn: [cpConfig, cpRestoreDockerCompose],
|
||||
},
|
||||
},
|
||||
)
|
||||
);
|
||||
|
||||
// cp service docker-compose file
|
||||
/**
|
||||
@@ -266,30 +261,21 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
[
|
||||
['${OPI_PG_DATA_PATH}', `${volumeName}/pg_data`],
|
||||
['${OPI_BITCOIND_PATH}', `${volumeName}/bitcoind_data`],
|
||||
['${OPI_IMAGE}', process.env['OPI_IMAGE']!],
|
||||
['${BITCOIND_IMAGE}', process.env['BITCOIND_IMAGE']!],
|
||||
// DB_USER
|
||||
['${DB_USER}', process.env['DB_USER']!],
|
||||
// DB_PASSWORD
|
||||
['${DB_PASSWD}', process.env['DB_PASSWD']!],
|
||||
// DB_DATABASE
|
||||
['${DB_DATABASE}', process.env['DB_DATABASE']!],
|
||||
// WORKSPACE_ROOT
|
||||
['${OPI_IMAGE}', process.env.OPI_IMAGE!],
|
||||
['${BITCOIND_IMAGE}', process.env.BITCOIND_IMAGE!],
|
||||
['${DB_USER}', process.env.DB_USER!],
|
||||
['${DB_PASSWD}', process.env.DB_PASSWD!],
|
||||
['${DB_DATABASE}', process.env.DB_DATABASE!],
|
||||
['${WORKSPACE_ROOT}', volumeName],
|
||||
// BITCOIN_RPC_USER
|
||||
['${BITCOIN_RPC_USER}', process.env['BITCOIN_RPC_USER']!],
|
||||
// BITCOIN_RPC_PASSWD
|
||||
['${BITCOIN_RPC_PASSWD}', process.env['BITCOIN_RPC_PASSWD']!],
|
||||
// BITCOIN_RPC_POR
|
||||
['${BITCOIN_RPC_PORT}', process.env['BITCOIN_RPC_PORT']!],
|
||||
// ORD_DATADIR
|
||||
['${BITCOIN_RPC_USER}', process.env.BITCOIN_RPC_USER!],
|
||||
['${BITCOIN_RPC_PASSWD}', process.env.BITCOIN_RPC_PASSWD!],
|
||||
['${BITCOIN_RPC_PORT}', process.env.BITCOIN_RPC_PORT!],
|
||||
['${ORD_DATADIR}', `${volumeName}/ord_data`],
|
||||
// BITCOIN_CHAIN_FOLDER
|
||||
['${BITCOIN_CHAIN_FOLDER}', `${volumeName}/bitcoind_data/datadir`],
|
||||
],
|
||||
)
|
||||
const file = fs.readFileSync(localPath, 'utf-8')
|
||||
const hash = createHash('md5').update(file).digest('hex').slice(0, 5)
|
||||
);
|
||||
const file = fs.readFileSync(localPath, 'utf-8');
|
||||
const hash = createHash('md5').update(file).digest('hex').slice(0, 5);
|
||||
|
||||
const cpDockerCompose = new remote.CopyFile(
|
||||
`${name}:cp:opi.docker-compose. -> ${volumeName}`,
|
||||
@@ -300,7 +286,7 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
triggers: [hash, localPath],
|
||||
},
|
||||
{ dependsOn: [restore] },
|
||||
)
|
||||
);
|
||||
|
||||
// start opi
|
||||
new remote.Command(
|
||||
@@ -311,67 +297,69 @@ export function create(params: { name: string; region: string; size: string }) {
|
||||
triggers: [hash],
|
||||
},
|
||||
{ dependsOn: [cpDockerCompose] },
|
||||
)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
exports[`ip_${name}`] = droplet.ipv4Address
|
||||
exports[`name_${name}`] = droplet.name
|
||||
exports[`volume_id_${name}`] = volume.id
|
||||
exports[`volume_attachment_id_${name}`] = volumeAttachment.id
|
||||
exports[`volume_path_${name}`] = volumePathPrint.stdout
|
||||
exports[`ip_${name}`] = droplet.ipv4Address;
|
||||
exports[`name_${name}`] = droplet.name;
|
||||
exports[`volume_id_${name}`] = volume.id;
|
||||
exports[`volume_attachment_id_${name}`] = volumeAttachment.id;
|
||||
exports[`volume_path_${name}`] = volumePathPrint.stdout;
|
||||
|
||||
return { droplet, volume, name }
|
||||
return { droplet, volume, name };
|
||||
}
|
||||
|
||||
type Instance = {
|
||||
name: string
|
||||
region: string
|
||||
size: string
|
||||
interface Instance {
|
||||
name: string;
|
||||
region: string;
|
||||
size: string;
|
||||
}
|
||||
|
||||
function validateInstance(instance: any): instance is Instance {
|
||||
function validateInstance(instance: unknown): instance is Instance {
|
||||
return (
|
||||
typeof instance.name === 'string' &&
|
||||
typeof instance.region === 'string' &&
|
||||
typeof instance.size === 'string'
|
||||
)
|
||||
typeof instance === 'object' &&
|
||||
instance !== null &&
|
||||
'name' in instance &&
|
||||
'region' in instance &&
|
||||
'size' in instance &&
|
||||
typeof (instance as Instance).name === 'string' &&
|
||||
typeof (instance as Instance).region === 'string' &&
|
||||
typeof (instance as Instance).size === 'string'
|
||||
);
|
||||
}
|
||||
|
||||
function readYamlAndCreateInstance() {
|
||||
// read yaml file
|
||||
|
||||
const file = (() => {
|
||||
if (fs.existsSync(root$('deploy/src/config.user.yaml'))) {
|
||||
return fs.readFileSync(root('deploy/src/config.user.yaml'), 'utf8')
|
||||
return fs.readFileSync(root('deploy/src/config.user.yaml'), 'utf8');
|
||||
}
|
||||
|
||||
return fs.readFileSync(root('deploy/src/config.yaml'), 'utf8')
|
||||
})()
|
||||
return fs.readFileSync(root('deploy/src/config.yaml'), 'utf8');
|
||||
})();
|
||||
|
||||
// parse yaml file
|
||||
const data = YAML.parse(file)
|
||||
const data = YAML.parse(file);
|
||||
const instances = [];
|
||||
|
||||
let instances = []
|
||||
|
||||
for (let serviceName in data.services) {
|
||||
for (const serviceName in data.services) {
|
||||
// validate required fields
|
||||
const instance = {
|
||||
name: serviceName,
|
||||
region: data.services[serviceName].region,
|
||||
size: data.services[serviceName].size,
|
||||
}
|
||||
};
|
||||
|
||||
if (validateInstance(instance)) {
|
||||
// create instance and push to instances array
|
||||
instances.push(create(instance))
|
||||
instances.push(create(instance));
|
||||
} else {
|
||||
throw new Error(`Invalid instance data '${JSON.stringify(instance)}'`)
|
||||
throw new Error(`Invalid instance data '${JSON.stringify(instance)}'`);
|
||||
}
|
||||
}
|
||||
|
||||
return instances
|
||||
return instances;
|
||||
}
|
||||
|
||||
const instances = readYamlAndCreateInstance()
|
||||
const instances = readYamlAndCreateInstance();
|
||||
|
||||
console.log(`created: ${instances.length} instances`)
|
||||
console.log(`created: ${instances.length} instances`);
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
import * as digitalocean from '@pulumi/digitalocean'
|
||||
import path from 'path'
|
||||
import os from 'os'
|
||||
import fs from 'fs'
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import * as digitalocean from '@pulumi/digitalocean';
|
||||
|
||||
export const id = process.env['DIGITAL_OCEAN_SSH_KEY_ID']!
|
||||
export const name = process.env['DIGITAL_OCEAN_SSH_KEY_NAME']!
|
||||
export const id = process.env.DIGITAL_OCEAN_SSH_KEY_ID!;
|
||||
export const name = process.env.DIGITAL_OCEAN_SSH_KEY_NAME!;
|
||||
|
||||
export const sshKey = digitalocean.SshKey.get(name, id)
|
||||
export const sshKey = digitalocean.SshKey.get(name, id);
|
||||
export const getPrivateKey = () => {
|
||||
// Assuming your environment variable is named 'PRIVATE_KEY_PATH'
|
||||
const privateKeyPath = process.env['PRIVATE_KEY_PATH']
|
||||
const privateKeyPath = process.env.PRIVATE_KEY_PATH;
|
||||
|
||||
if (!privateKeyPath) {
|
||||
console.error('The environment variable PRIVATE_KEY_PATH is not set.')
|
||||
process.exit(1) // Exit with an error code
|
||||
console.error('The environment variable PRIVATE_KEY_PATH is not set.');
|
||||
process.exit(1); // Exit with an error code
|
||||
}
|
||||
|
||||
// Handles the tilde by replacing it with the user's home directory
|
||||
const resolvedPrivateKeyPath = privateKeyPath.startsWith('~')
|
||||
? path.join(os.homedir(), privateKeyPath.slice(1))
|
||||
: path.resolve(privateKeyPath)
|
||||
: path.resolve(privateKeyPath);
|
||||
|
||||
const key = fs.readFileSync(resolvedPrivateKeyPath, 'utf-8')
|
||||
const key = fs.readFileSync(resolvedPrivateKeyPath, 'utf-8');
|
||||
|
||||
return key
|
||||
}
|
||||
return key;
|
||||
};
|
||||
|
||||
@@ -1,45 +1,50 @@
|
||||
import assert from 'assert'
|
||||
import path, { join } from 'path'
|
||||
import fs, { readdirSync, readFileSync, statSync } from 'fs'
|
||||
import { id, name } from './keys'
|
||||
import { createHash } from 'crypto'
|
||||
import * as digitalocean from '@pulumi/digitalocean'
|
||||
import assert from 'node:assert';
|
||||
import { createHash } from 'node:crypto';
|
||||
import fs, { readdirSync, readFileSync, statSync } from 'node:fs';
|
||||
import path, { join } from 'node:path';
|
||||
import * as digitalocean from '@pulumi/digitalocean';
|
||||
import { id, name } from './keys';
|
||||
|
||||
export function root(filePath: string) {
|
||||
const p = path.resolve(__dirname, `../../${filePath}`)
|
||||
const p = path.resolve(__dirname, `../../${filePath}`);
|
||||
if (fs.existsSync(p)) {
|
||||
return p
|
||||
return p;
|
||||
}
|
||||
throw new Error(`File not found: ${p}`)
|
||||
throw new Error(`File not found: ${p}`);
|
||||
}
|
||||
|
||||
export function root$(filePath: string) {
|
||||
return path.resolve(__dirname, `../../${filePath}`)
|
||||
return path.resolve(__dirname, `../../${filePath}`);
|
||||
}
|
||||
|
||||
// convert the absolute path from root(filePath: string) to relative path
|
||||
// example: unroot(root(a)) === a
|
||||
export function unroot(filePath: string) {
|
||||
return filePath.replace(root('') + '/', '')
|
||||
return filePath.replace(root('') + '/', '');
|
||||
}
|
||||
|
||||
assert(id, 'DIGITAL_OCEAN_SSH_KEY_ID is required')
|
||||
assert(name, 'DIGITAL_OCEAN_SSH_KEY_NAME is required')
|
||||
assert(id, 'DIGITAL_OCEAN_SSH_KEY_ID is required');
|
||||
assert(name, 'DIGITAL_OCEAN_SSH_KEY_NAME is required');
|
||||
|
||||
export function getScript(scriptName: string) {
|
||||
return fs.readFileSync(`./src/scripts/${scriptName}`, 'utf-8')
|
||||
} // write takeSnapshot function which input is the output of function create.
|
||||
function takeSnapshot(params: { name: string; volume: digitalocean.Volume }) {
|
||||
const { name, volume } = params
|
||||
return fs.readFileSync(`./src/scripts/${scriptName}`, 'utf-8');
|
||||
}
|
||||
|
||||
// write takeSnapshot function which input is the output of function create.
|
||||
export function takeSnapshot(params: {
|
||||
name: string;
|
||||
volume: digitalocean.Volume;
|
||||
}) {
|
||||
const { name, volume } = params;
|
||||
|
||||
const createSnapshot = new digitalocean.VolumeSnapshot(`${name}-snapshot`, {
|
||||
volumeId: volume.id,
|
||||
name: `${name}-snapshot`,
|
||||
})
|
||||
});
|
||||
|
||||
exports[`volume_snapshot_${name}`] = createSnapshot.id
|
||||
exports[`volume_snapshot_${name}`] = createSnapshot.id;
|
||||
|
||||
return { createSnapshot }
|
||||
return { createSnapshot };
|
||||
}
|
||||
|
||||
export function transformFile(
|
||||
@@ -48,62 +53,62 @@ export function transformFile(
|
||||
transforms: string[][],
|
||||
): string {
|
||||
// Read the content of the source file
|
||||
const content = fs.readFileSync(filePath, 'utf8')
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// Apply all transformations
|
||||
let transformedContent = content
|
||||
let transformedContent = content;
|
||||
for (const transform of transforms) {
|
||||
const [original, replacement] = transform
|
||||
transformedContent = transformedContent.split(original).join(replacement)
|
||||
const [original, replacement] = transform;
|
||||
transformedContent = transformedContent.split(original).join(replacement);
|
||||
}
|
||||
|
||||
// Create a temp file in a random location
|
||||
const tempDir = createTmpDirFromSeed(filePath + seed)
|
||||
const tempFilePath = path.join(tempDir, path.basename(filePath))
|
||||
const tempDir = createTmpDirFromSeed(filePath + seed);
|
||||
const tempFilePath = path.join(tempDir, path.basename(filePath));
|
||||
|
||||
// Write the transformed content to the temp file
|
||||
fs.writeFileSync(tempFilePath, transformedContent)
|
||||
fs.writeFileSync(tempFilePath, transformedContent);
|
||||
|
||||
// Return the path of the temp file
|
||||
return tempFilePath
|
||||
return tempFilePath;
|
||||
}
|
||||
|
||||
const createTmpDirFromSeed = (seed: string): string => {
|
||||
const hash = createHash('sha256').update(seed).digest('hex')
|
||||
const tmpBaseDir = '/tmp'
|
||||
const dirPath = join(tmpBaseDir, hash)
|
||||
const hash = createHash('sha256').update(seed).digest('hex');
|
||||
const tmpBaseDir = '/tmp';
|
||||
const dirPath = join(tmpBaseDir, hash);
|
||||
|
||||
try {
|
||||
fs.mkdirSync(dirPath, { recursive: true })
|
||||
return dirPath
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
return dirPath;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to create temp directory: ${error}`)
|
||||
throw new Error(`Failed to create temp directory: ${error}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export function hashFile(filePath: string): string {
|
||||
const fileBuffer = readFileSync(filePath)
|
||||
const hashSum = createHash('sha256')
|
||||
hashSum.update(fileBuffer)
|
||||
return hashSum.digest('hex')
|
||||
const fileBuffer = readFileSync(filePath);
|
||||
const hashSum = createHash('sha256');
|
||||
hashSum.update(fileBuffer);
|
||||
return hashSum.digest('hex');
|
||||
}
|
||||
|
||||
export function generateDirectoryHash(dirPath: string): string {
|
||||
let hashString = ''
|
||||
let hashString = '';
|
||||
|
||||
const files = readdirSync(dirPath)
|
||||
const files = readdirSync(dirPath);
|
||||
for (const file of files) {
|
||||
const filePath = join(dirPath, file)
|
||||
const fileStat = statSync(filePath)
|
||||
const filePath = join(dirPath, file);
|
||||
const fileStat = statSync(filePath);
|
||||
|
||||
if (fileStat.isDirectory()) {
|
||||
hashString += `${file}:${generateDirectoryHash(filePath)}`
|
||||
hashString += `${file}:${generateDirectoryHash(filePath)}`;
|
||||
} else {
|
||||
hashString += `${file}:${hashFile(filePath)}`
|
||||
hashString += `${file}:${hashFile(filePath)}`;
|
||||
}
|
||||
}
|
||||
|
||||
const hashSum = createHash('sha256')
|
||||
hashSum.update(hashString)
|
||||
return hashSum.digest('hex')
|
||||
const hashSum = createHash('sha256');
|
||||
hashSum.update(hashString);
|
||||
return hashSum.digest('hex');
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"outDir": "bin",
|
||||
"target": "ES2020",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"experimentalDecorators": true,
|
||||
"esModuleInterop": true,
|
||||
"pretty": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src"]
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"outDir": "bin",
|
||||
"target": "ES2020",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"experimentalDecorators": true,
|
||||
"esModuleInterop": true,
|
||||
"pretty": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user