Merge pull request #1 from t4t5/new-jubilee-height

New jubilee height
This commit is contained in:
Tristan
2024-01-15 20:52:10 +00:00
committed by GitHub
20 changed files with 1184 additions and 86 deletions

View File

@@ -108,6 +108,14 @@ To update cargo & rust:
rustup update stable
```
## Cloning the repository
```bash
git clone https://github.com/bestinslot-xyz/OPI.git
```
All next shell script groups assumes that you are in OPI folder cloned by above command.
## Installing node modules
```bash
cd modules/main_index; npm install;
@@ -126,7 +134,7 @@ Otherwise, it cannot decode some addresses such as `512057cd4cfa03f27f7b18c2fe45
## Installing python libraries
If you don't have pip installed, start by installing pip. [guide](https://pip.pypa.io/en/stable/installation/).
1) If you don't have pip installed, start by installing pip. [guide](https://pip.pypa.io/en/stable/installation/).
```bash
wget https://bootstrap.pypa.io/get-pip.py
@@ -134,6 +142,14 @@ python3 get-pip.py
rm get-pip.py
```
or
```sh
sudo apt install python3-pip
```
2) Install dependencies
```bash
python3 -m pip install python-dotenv;
python3 -m pip install psycopg2-binary;
@@ -148,18 +164,9 @@ cd ord; cargo build --release;
**Do not run ord binary directly. Main indexer will run ord periodically**
## Setup .env files
## Initialise .env configuration and databases
Copy `.env_sample` in main_index, brc20_index, brc20_api, bitmap_index and bitmap_api as `.env` and fill necessary information.
- Do not change `FIRST_INSCRIPTION_HEIGHT` if you want to report hashes, since cumulative hash calculation will start from this height and it'll be faulty if you change this variable.
- All scripts can use the same database. In sample env files, we used different `DB_DATABASE` but using postgres on all of them will also work correctly.
- `BITCOIN_CHAIN_FOLDER` is the datadir folder that is set when starting bitcoind.
- `ORD_BINARY` `ORD_FOLDER` and `ORD_DATADIR` can stay the same if you do not change the folder structure after `git clone`.
## Initialise databases
After setting .env files, you can run `reset_init.py` in each indexer folder to initialise databases and set other necessary files.
Run `reset_init.py` in each module folder (preferrably start from main_index) to initialise .env file, databases and set other necessary files.
# Run

View File

@@ -69,7 +69,7 @@ cumulative_hash = sha256_hex(last_cumulative_hash + block_hash)
For detailed installation guides:
- Ubuntu: [installation guide](INSTALL.ubuntu.md)
OPI uses PostgreSQL as DB. Before running the indexer, setup a PostgreSQL DB (all modules can write into different databases as well as use a single database). Run reset_init.py in each indexer module after setting .env files, they'll initialise the DBs.
OPI uses PostgreSQL as DB. Before running the indexer, setup a PostgreSQL DB (all modules can write into different databases as well as use a single database).
**Build ord:**
```bash
@@ -98,9 +98,9 @@ pip3 install python-dotenv;
pip3 install psycopg2-binary;
```
**Setup .env files**
**Setup .env files and DBs**
Copy `.env_sample` in main_index, brc20_index, brc20_api, bitmap_index and bitmap_api as `.env` and fill necessary information.
Run `reset_init.py` in each module folder (preferrably start from main_index) to initialise .env file, databases and set other necessary files.
# Run

View File

@@ -2,7 +2,7 @@
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres_bitmap"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10

View File

@@ -0,0 +1,81 @@
# pip install python-dotenv
import os
from dotenv import load_dotenv
init_env = True
# does .env file exist?
if os.path.isfile('.env'):
res = input("Do you want to re-initialise .env file? (y/n) ")
if res != 'y':
init_env = False
if init_env:
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10
API_HOST="127.0.0.1"
API_PORT="8001"
API_TRUSTED_PROXY_CNT="0"
print("Initialising .env file")
print("leave blank to use default values")
use_other_env = False
other_env_exists = os.path.isfile('../bitmap_index/.env')
if other_env_exists:
res = input(".env on bitmap_index already exists, do you want to use values from there? (y/n) ")
if res == 'y':
use_other_env = True
if use_other_env:
load_dotenv(dotenv_path='../bitmap_index/.env')
DB_USER = os.getenv("DB_USER") or "postgres"
DB_HOST = os.getenv("DB_HOST") or "localhost"
DB_PORT = os.getenv("DB_PORT") or "5432"
DB_DATABASE = os.getenv("DB_DATABASE") or "postgres"
DB_PASSWD = os.getenv("DB_PASSWD")
else:
res = input("Bitmap Postgres DB username (Default: postgres): ")
if res != '':
DB_USER = res
res = input("Bitmap Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_HOST = res
res = input("Bitmap Postgres DB port (Default: 5432): ")
if res != '':
DB_PORT = res
res = input("Bitmap Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_DATABASE = res
res = input("Bitmap Postgres DB password: ")
DB_PASSWD = res
res = input("Bitmap Postgres DB use SSL (Default: true) may need to be set to false on Windows machines: ")
if res != '':
DB_SSL = res
res = input("Bitmap Postgres DB max connections (Default: 10): ")
if res != '':
DB_MAX_CONNECTIONS = res
res = input("API host (Default: 127.0.0.1): ")
if res != '':
API_HOST = res
res = input("API port (Default: 8001): ")
if res != '':
API_PORT = res
res = input("API trusted proxy count (Default: 0) if there are known proxies such as nginx in front of API, set this to the number of proxies: ")
if res != '':
API_TRUSTED_PROXY_CNT = res
f = open('.env', 'w')
f.write("DB_USER=\"" + DB_USER + "\"\n")
f.write("DB_HOST=\"" + DB_HOST + "\"\n")
f.write("DB_PORT=\"" + str(DB_PORT) + "\"\n")
f.write("DB_DATABASE=\"" + DB_DATABASE + "\"\n")
f.write("DB_PASSWD=\"" + DB_PASSWD + "\"\n")
f.write("DB_SSL=\"" + DB_SSL + "\"\n")
f.write("DB_MAX_CONNECTIONS=" + str(DB_MAX_CONNECTIONS) + "\n")
f.write("API_HOST=\"" + API_HOST + "\"\n")
f.write("API_PORT=\"" + API_PORT + "\"\n")
f.write("API_TRUSTED_PROXY_CNT=\"" + API_TRUSTED_PROXY_CNT + "\"\n")
f.close()

View File

@@ -2,13 +2,13 @@
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres_bitmap"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres_metaprotocol"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
FIRST_INSCRIPTION_HEIGHT="767430"

View File

@@ -4,7 +4,103 @@
import os, psycopg2
from dotenv import load_dotenv
res = input("Are you sure you want to reset the bitmaps database? (y/n) ")
init_env = True
# does .env file exist?
if os.path.isfile('.env'):
res = input("Do you want to re-initialise .env file? (y/n) ")
if res != 'y':
init_env = False
if init_env:
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
FIRST_INSCRIPTION_HEIGHT="767430"
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
print("Initialising .env file")
print("leave blank to use default values")
use_other_env = False
other_env_exists = os.path.isfile('../bitmap_api/.env')
if other_env_exists:
res = input(".env on bitmap_api already exists, do you want to use values from there? (y/n) ")
if res == 'y':
use_other_env = True
if use_other_env:
load_dotenv(dotenv_path='../bitmap_api/.env')
DB_USER = os.getenv("DB_USER") or "postgres"
DB_HOST = os.getenv("DB_HOST") or "localhost"
DB_PORT = os.getenv("DB_PORT") or "5432"
DB_DATABASE = os.getenv("DB_DATABASE") or "postgres"
DB_PASSWD = os.getenv("DB_PASSWD")
else:
res = input("Bitmap Postgres DB username (Default: postgres): ")
if res != '':
DB_USER = res
res = input("Bitmap Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_HOST = res
res = input("Bitmap Postgres DB port (Default: 5432): ")
if res != '':
DB_PORT = res
res = input("Bitmap Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_DATABASE = res
res = input("Bitmap Postgres DB password: ")
DB_PASSWD = res
use_main_env = False
main_env_exists = os.path.isfile('../main_index/.env')
if main_env_exists:
res = input(".env on main_index already exists, do you want to use values from there? (y/n) ")
if res == 'y':
use_main_env = True
if use_main_env:
load_dotenv(dotenv_path='../main_index/.env')
DB_METAPROTOCOL_USER = os.getenv("DB_USER") or "postgres"
DB_METAPROTOCOL_HOST = os.getenv("DB_HOST") or "localhost"
DB_METAPROTOCOL_PORT = os.getenv("DB_PORT") or "5432"
DB_METAPROTOCOL_DATABASE = os.getenv("DB_DATABASE") or "postgres"
DB_METAPROTOCOL_PASSWD = os.getenv("DB_PASSWD")
FIRST_INSCRIPTION_HEIGHT = os.getenv("FIRST_INSCRIPTION_HEIGHT") or "767430"
else:
res = input("Main Postgres DB username (Default: postgres): ")
if res != '':
DB_METAPROTOCOL_USER = res
res = input("Main Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_METAPROTOCOL_HOST = res
res = input("Main Postgres DB port (Default: 5432): ")
if res != '':
DB_METAPROTOCOL_PORT = res
res = input("Main Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_METAPROTOCOL_DATABASE = res
res = input("Main Postgres DB password: ")
DB_METAPROTOCOL_PASSWD = res
res = input("First inscription height (Default: 767430) leave default for correct hash reporting: ")
if res != '':
FIRST_INSCRIPTION_HEIGHT = res
f = open('.env', 'w')
f.write('DB_USER="' + DB_USER + '"\n')
f.write('DB_HOST="' + DB_HOST + '"\n')
f.write('DB_PORT="' + DB_PORT + '"\n')
f.write('DB_DATABASE="' + DB_DATABASE + '"\n')
f.write('DB_PASSWD="' + DB_PASSWD + '"\n')
f.write('FIRST_INSCRIPTION_HEIGHT="' + FIRST_INSCRIPTION_HEIGHT + '"\n')
f.write('DB_METAPROTOCOL_USER="' + DB_METAPROTOCOL_USER + '"\n')
f.write('DB_METAPROTOCOL_HOST="' + DB_METAPROTOCOL_HOST + '"\n')
f.write('DB_METAPROTOCOL_PORT="' + str(DB_METAPROTOCOL_PORT) + '"\n')
f.write('DB_METAPROTOCOL_DATABASE="' + DB_METAPROTOCOL_DATABASE + '"\n')
f.write('DB_METAPROTOCOL_PASSWD="' + DB_METAPROTOCOL_PASSWD + '"\n')
f.close()
res = input("Are you sure you want to initialise/reset the bitmaps database? (y/n) ")
if res != 'y':
print('aborting')
exit(1)
@@ -26,6 +122,21 @@ conn = psycopg2.connect(
conn.autocommit = True
cur = conn.cursor()
db_exists = False
try:
cur.execute('select count(*) from bitmap_block_hashes;')
hash_cnt = cur.fetchone()[0]
if hash_cnt > 0:
db_exists = True
except:
pass
if db_exists:
res = input("It seems like you have entries on DB, are you sure to reset databases? This WILL RESET indexing progress. (y/n) ")
if res != 'y':
print('aborting')
exit(1)
## reset db
sqls = open('db_reset.sql', 'r').read().split(';')
for sql in sqls:

View File

@@ -2,7 +2,7 @@
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres_brc20"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10
@@ -10,3 +10,5 @@ DB_MAX_CONNECTIONS=10
API_HOST="127.0.0.1"
API_PORT="8000"
API_TRUSTED_PROXY_CNT="0"
USE_EXTRA_TABLES="true"

View File

@@ -18,6 +18,9 @@ var db_pool = new Pool({
max: process.env.DB_MAX_CONNECTIONS || 10, // maximum number of clients!!
ssl: process.env.DB_SSL == 'true' ? true : false
})
var use_extra_tables = process.env.USE_EXTRA_TABLES == 'true' ? true : false
const api_port = parseInt(process.env.API_PORT || "8000")
const api_host = process.env.API_HOST || '127.0.0.1'
@@ -32,9 +35,13 @@ app.use([cors(corsOptions)])
app.get('/v1/brc20/ip', (request, response) => response.send(request.ip))
async function query_db(query, params = []) {
return await db_pool.query(query, params)
}
async function get_block_height_of_db() {
try {
let res = await db_pool.query('SELECT max(block_height) as max_block_height FROM brc20_block_hashes;')
let res = await query_db('SELECT max(block_height) as max_block_height FROM brc20_block_hashes;')
return res.rows[0].max_block_height
} catch (err) {
console.log(err)
@@ -74,7 +81,7 @@ app.get('/v1/brc20/balance_on_block', async (request, response) => {
and tick = $3
order by id desc
limit 1;`
let res = await db_pool.query(query, [block_height, pkscript, tick])
let res = await query_db(query, [block_height, pkscript, tick])
if (res.rows.length == 0) {
response.status(400).send({ error: 'no balance found', result: null })
return
@@ -98,7 +105,7 @@ app.get('/v1/brc20/activity_on_block', async (request, response) => {
return
}
let res1 = await db_pool.query('select event_type_name, event_type_id from brc20_event_types;')
let res1 = await query_db('select event_type_name, event_type_id from brc20_event_types;')
let event_type_id_to_name = {}
res1.rows.forEach((row) => {
event_type_id_to_name[row.event_type_id] = row.event_type_name
@@ -108,7 +115,7 @@ app.get('/v1/brc20/activity_on_block', async (request, response) => {
from brc20_events
where block_height = $1
order by id asc;`
let res = await db_pool.query(query, [block_height])
let res = await query_db(query, [block_height])
let result = []
for (const row of res.rows) {
let event = row.event
@@ -134,24 +141,46 @@ app.get('/v1/brc20/get_current_balance_of_wallet', async (request, response) =>
let tick = request.query.ticker.toLowerCase()
let current_block_height = await get_block_height_of_db()
let query = ` select overall_balance, available_balance
from brc20_historic_balances
where pkscript = $1
and tick = $2
order by id desc
limit 1;`
let params = [pkscript, tick]
if (address != '') {
query = query.replace('pkscript', 'wallet')
params = [address, tick]
let balance = null
if (!use_extra_tables) {
let query = ` select overall_balance, available_balance
from brc20_historic_balances
where pkscript = $1
and tick = $2
order by id desc
limit 1;`
let params = [pkscript, tick]
if (address != '') {
query = query.replace('pkscript', 'wallet')
params = [address, tick]
}
let res = await query_db(query, params)
if (res.rows.length == 0) {
response.status(400).send({ error: 'no balance found', result: null })
return
}
balance = res.rows[0]
} else {
let query = ` select overall_balance, available_balance
from brc20_current_balances
where pkscript = $1
and tick = $2
limit 1;`
let params = [pkscript, tick]
if (address != '') {
query = query.replace('pkscript', 'wallet')
params = [address, tick]
}
let res = await query_db(query, params)
if (res.rows.length == 0) {
response.status(400).send({ error: 'no balance found', result: null })
return
}
balance = res.rows[0]
}
let res = await db_pool.query(query, params)
if (res.rows.length == 0) {
response.status(400).send({ error: 'no balance found', result: null })
return
}
let balance = res.rows[0]
balance.block_height = current_block_height
response.send({ error: null, result: balance })
} catch (err) {
@@ -160,6 +189,120 @@ app.get('/v1/brc20/get_current_balance_of_wallet', async (request, response) =>
}
});
app.get('/v1/brc20/get_valid_tx_notes_of_wallet', async (request, response) => {
try {
console.log(`${request.protocol}://${request.get('host')}${request.originalUrl}`)
if (!use_extra_tables) {
response.status(400).send({ error: 'not supported', result: null })
return
}
let address = request.query.address || ''
let pkscript = request.query.pkscript || ''
let current_block_height = await get_block_height_of_db()
let query = ` select tick, inscription_id, amount, block_height as genesis_height
from brc20_unused_tx_inscrs
where current_holder_pkscript = $1
order by tick asc;`
let params = [pkscript]
if (address != '') {
query = query.replace('pkscript', 'wallet')
params = [address]
}
let res = await query_db(query, params)
if (res.rows.length == 0) {
response.status(400).send({ error: 'no unused tx found', result: null })
return
}
let result = {
unused_txes: res.rows,
block_height: current_block_height
}
response.send({ error: null, result: result })
} catch (err) {
console.log(err)
response.status(500).send({ error: 'internal error', result: null })
}
});
app.get('/v1/brc20/get_valid_tx_notes_of_ticker', async (request, response) => {
try {
console.log(`${request.protocol}://${request.get('host')}${request.originalUrl}`)
if (!use_extra_tables) {
response.status(400).send({ error: 'not supported', result: null })
return
}
let tick = request.query.ticker.toLowerCase() || ''
let current_block_height = await get_block_height_of_db()
let query = ` select current_holder_pkscript, current_holder_wallet, inscription_id, amount, block_height as genesis_height
from brc20_unused_tx_inscrs
where tick = $1
order by current_holder_pkscript asc;`
let params = [tick]
let res = await query_db(query, params)
if (res.rows.length == 0) {
response.status(400).send({ error: 'no unused tx found', result: null })
return
}
let result = {
unused_txes: res.rows,
block_height: current_block_height
}
response.send({ error: null, result: result })
} catch (err) {
console.log(err)
response.status(500).send({ error: 'internal error', result: null })
}
});
app.get('/v1/brc20/holders', async (request, response) => {
try {
console.log(`${request.protocol}://${request.get('host')}${request.originalUrl}`)
if (!use_extra_tables) {
response.status(400).send({ error: 'not supported', result: null })
return
}
let tick = request.query.ticker.toLowerCase() || ''
let current_block_height = await get_block_height_of_db()
let query = ` select pkscript, wallet, overall_balance, available_balance
from brc20_current_balances
where tick = $1
order by overall_balance asc;`
let params = [tick]
let res = await query_db(query, params)
if (res.rows.length == 0) {
response.status(400).send({ error: 'no unused tx found', result: null })
return
}
let rows = res.rows
// order rows using parseInt(overall_balance) desc
rows.sort((a, b) => parseInt(b.overall_balance) - parseInt(a.overall_balance))
// remove rows with parseInt(overall_balance) == 0
rows = rows.filter((row) => parseInt(row.overall_balance) != 0)
let result = {
unused_txes: rows,
block_height: current_block_height
}
response.send({ error: null, result: result })
} catch (err) {
console.log(err)
response.status(500).send({ error: 'internal error', result: null })
}
});
app.get('/v1/brc20/get_hash_of_all_activity', async (request, response) => {
try {
console.log(`${request.protocol}://${request.get('host')}${request.originalUrl}`)
@@ -174,11 +317,11 @@ app.get('/v1/brc20/get_hash_of_all_activity', async (request, response) => {
let query = `select cumulative_event_hash, block_event_hash
from brc20_cumulative_event_hashes
where block_height = $1;`
let res = await db_pool.query(query, [block_height])
let res = await query_db(query, [block_height])
let cumulative_event_hash = res.rows[0].cumulative_event_hash
let block_event_hash = res.rows[0].block_event_hash
let res2 = await db_pool.query('select indexer_version from brc20_indexer_version;')
let res2 = await query_db('select indexer_version from brc20_indexer_version;')
let indexer_version = res2.rows[0].indexer_version
response.send({ error: null, result: {
@@ -199,30 +342,83 @@ app.get('/v1/brc20/get_hash_of_all_current_balances', async (request, response)
try {
console.log(`${request.protocol}://${request.get('host')}${request.originalUrl}`)
let current_block_height = await get_block_height_of_db()
let query = ` with tempp as (
select max(id) as id
from brc20_historic_balances
where block_height <= $1
group by pkscript, tick
)
select bhb.pkscript, bhb.tick, bhb.overall_balance, bhb.available_balance
from tempp t
left join brc20_historic_balances bhb on bhb.id = t.id
order by bhb.pkscript asc, bhb.tick asc;`
let params = [current_block_height]
let hash_hex = null
if (!use_extra_tables) {
let query = ` with tempp as (
select max(id) as id
from brc20_historic_balances
where block_height <= $1
group by pkscript, tick
)
select bhb.pkscript, bhb.tick, bhb.overall_balance, bhb.available_balance
from tempp t
left join brc20_historic_balances bhb on bhb.id = t.id
order by bhb.pkscript asc, bhb.tick asc;`
let params = [current_block_height]
let res = await db_pool.query(query, params)
let whole_str = ''
res.rows.forEach((row) => {
whole_str += row.pkscript + ';' + row.tick + ';' + row.overall_balance + ';' + row.available_balance + EVENT_SEPARATOR
})
whole_str = whole_str.slice(0, -1)
// get sha256 hash hex of the whole string
const hash = crypto.createHash('sha256');
hash.update(whole_str);
let hash_hex = hash.digest('hex');
let res = await query_db(query, params)
res.rows.sort((a, b) => {
if (a.pkscript < b.pkscript) {
return -1
} else if (a.pkscript > b.pkscript) {
return 1
} else {
if (a.tick < b.tick) {
return -1
} else if (a.tick > b.tick) {
return 1
} else {
return 0
}
}
})
let whole_str = ''
res.rows.forEach((row) => {
if (parseInt(row.overall_balance) != 0) {
whole_str += row.pkscript + ';' + row.tick + ';' + row.overall_balance + ';' + row.available_balance + EVENT_SEPARATOR
}
})
whole_str = whole_str.slice(0, -1)
// get sha256 hash hex of the whole string
const hash = crypto.createHash('sha256');
hash.update(whole_str);
hash_hex = hash.digest('hex');
} else {
let query = ` select pkscript, tick, overall_balance, available_balance
from brc20_current_balances
order by pkscript asc, tick asc;`
let params = []
let res2 = await db_pool.query('select indexer_version from brc20_indexer_version;')
let res = await query_db(query, params)
res.rows.sort((a, b) => {
if (a.pkscript < b.pkscript) {
return -1
} else if (a.pkscript > b.pkscript) {
return 1
} else {
if (a.tick < b.tick) {
return -1
} else if (a.tick > b.tick) {
return 1
} else {
return 0
}
}
})
let whole_str = ''
res.rows.forEach((row) => {
if (parseInt(row.overall_balance) != 0) {
whole_str += row.pkscript + ';' + row.tick + ';' + row.overall_balance + ';' + row.available_balance + EVENT_SEPARATOR
}
})
whole_str = whole_str.slice(0, -1)
// get sha256 hash hex of the whole string
const hash = crypto.createHash('sha256');
hash.update(whole_str);
hash_hex = hash.digest('hex');
}
let res2 = await query_db('select indexer_version from brc20_indexer_version;')
let indexer_version = res2.rows[0].indexer_version
response.send({ error: null, result: {

View File

@@ -0,0 +1,87 @@
# pip install python-dotenv
import os
from dotenv import load_dotenv
init_env = True
# does .env file exist?
if os.path.isfile('.env'):
res = input("Do you want to re-initialise .env file? (y/n) ")
if res != 'y':
init_env = False
if init_env:
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=10
USE_EXTRA_TABLES="true"
API_HOST="127.0.0.1"
API_PORT="8000"
API_TRUSTED_PROXY_CNT="0"
print("Initialising .env file")
print("leave blank to use default values")
use_other_env = False
other_env_exists = os.path.isfile('../brc20_index/.env')
if other_env_exists:
res = input(".env on brc20_index already exists, do you want to use values from there? (y/n) ")
if res == 'y':
use_other_env = True
if use_other_env:
load_dotenv(dotenv_path='../brc20_index/.env')
DB_USER = os.getenv("DB_USER") or "postgres"
DB_HOST = os.getenv("DB_HOST") or "localhost"
DB_PORT = os.getenv("DB_PORT") or "5432"
DB_DATABASE = os.getenv("DB_DATABASE") or "postgres"
DB_PASSWD = os.getenv("DB_PASSWD")
USE_EXTRA_TABLES = os.getenv("CREATE_EXTRA_TABLES") or "false"
else:
res = input("BRC20 Postgres DB username (Default: postgres): ")
if res != '':
DB_USER = res
res = input("BRC20 Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_HOST = res
res = input("BRC20 Postgres DB port (Default: 5432): ")
if res != '':
DB_PORT = res
res = input("BRC20 Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_DATABASE = res
res = input("BRC20 Postgres DB password: ")
DB_PASSWD = res
res = input("Use extra tables (Default: true): ")
if res != '':
USE_EXTRA_TABLES = res
res = input("BRC20 Postgres DB use SSL (Default: true) may need to be set to false on Windows machines: ")
if res != '':
DB_SSL = res
res = input("BRC20 Postgres DB max connections (Default: 10): ")
if res != '':
DB_MAX_CONNECTIONS = res
res = input("API host (Default: 127.0.0.1): ")
if res != '':
API_HOST = res
res = input("API port (Default: 8000): ")
if res != '':
API_PORT = res
res = input("API trusted proxy count (Default: 0) if there are known proxies such as nginx in front of API, set this to the number of proxies: ")
if res != '':
API_TRUSTED_PROXY_CNT = res
f = open('.env', 'w')
f.write("DB_USER=\"" + DB_USER + "\"\n")
f.write("DB_HOST=\"" + DB_HOST + "\"\n")
f.write("DB_PORT=\"" + str(DB_PORT) + "\"\n")
f.write("DB_DATABASE=\"" + DB_DATABASE + "\"\n")
f.write("DB_PASSWD=\"" + DB_PASSWD + "\"\n")
f.write("DB_SSL=\"" + DB_SSL + "\"\n")
f.write("DB_MAX_CONNECTIONS=" + str(DB_MAX_CONNECTIONS) + "\n")
f.write("USE_EXTRA_TABLES=\"" + USE_EXTRA_TABLES + "\"\n")
f.write("API_HOST=\"" + API_HOST + "\"\n")
f.write("API_PORT=\"" + API_PORT + "\"\n")
f.write("API_TRUSTED_PROXY_CNT=\"" + API_TRUSTED_PROXY_CNT + "\"\n")
f.close()

View File

@@ -2,20 +2,25 @@
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres_brc20"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres_metaprotocol"
DB_METAPROTOCOL_PASSWD=""
## must not be changed for correct hash calculation
FIRST_INSCRIPTION_HEIGHT="767430"
## main indexer database settings
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
## reporting system settings
REPORT_TO_INDEXER="true"
REPORT_URL="https://api.opi.network/report_block"
REPORT_RETRIES="10"
# set a name for report dashboard
REPORT_NAME="opi_brc20_index"
REPORT_NAME="opi_brc20_index"
# create brc20_current_balances and brc20_unused_tx_inscrs tables
CREATE_EXTRA_TABLES="true"

View File

@@ -7,6 +7,10 @@ import traceback, time, codecs, json
import psycopg2
import hashlib
if not os.path.isfile('.env'):
print(".env file not found, please run \"python3 reset_init.py\" first")
sys.exit(1)
## global variables
ticks = {}
in_commit = False
@@ -40,6 +44,7 @@ report_to_indexer = (os.getenv("REPORT_TO_INDEXER") or "true") == "true"
report_url = os.getenv("REPORT_URL") or "https://api.opi.network/report_block"
report_retries = int(os.getenv("REPORT_RETRIES") or "10")
report_name = os.getenv("REPORT_NAME") or "opi_brc20_indexer"
create_extra_tables = (os.getenv("CREATE_EXTRA_TABLES") or "false") == "true"
## connect to db
conn = psycopg2.connect(
@@ -60,6 +65,25 @@ conn_metaprotocol = psycopg2.connect(
conn_metaprotocol.autocommit = True
cur_metaprotocol = conn_metaprotocol.cursor()
## create tables if not exists
## does brc20_block_hashes table exist?
cur.execute('''SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'brc20_block_hashes') AS table_existence;''')
if cur.fetchone()[0] == False:
print("Initialising database...")
with open('db_init.sql', 'r') as f:
sql = f.read()
cur.execute(sql)
conn.commit()
if create_extra_tables:
cur.execute('''SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'brc20_extras_block_hashes') AS table_existence;''')
if cur.fetchone()[0] == False:
print("Initialising extra tables...")
with open('db_init_extra.sql', 'r') as f:
sql = f.read()
cur.execute(sql)
conn.commit()
## helper functions
def utf8len(s):
@@ -593,6 +617,27 @@ def check_if_there_is_residue_from_last_run():
print("Rolled back to " + str(current_block - 1))
return
def check_if_there_is_residue_on_extra_tables_from_last_run():
cur.execute('''select max(block_height) from brc20_extras_block_hashes;''')
row = cur.fetchone()
current_block = None
if row[0] is None: current_block = first_inscription_height
else: current_block = row[0] + 1
residue_found = False
cur.execute('''select coalesce(max(block_height), -1) from brc20_unused_tx_inscrs;''')
if cur.rowcount != 0 and cur.fetchone()[0] >= current_block:
residue_found = True
print("residue on brc20_unused_tx_inscrs")
cur.execute('''select coalesce(max(block_height), -1) from brc20_current_balances;''')
if cur.rowcount != 0 and cur.fetchone()[0] >= current_block:
residue_found = True
print("residue on brc20_current_balances")
if residue_found:
print("There is residue on extra tables from last run, rolling back to " + str(current_block - 1))
reorg_on_extra_tables(current_block - 1)
print("Rolled back to " + str(current_block - 1))
return
cur.execute('select event_type_name, event_type_id from brc20_event_types;')
event_types = {}
for row in cur.fetchall():
@@ -680,6 +725,7 @@ def report_hashes(block_height):
to_send = {
"name": report_name,
"type": "brc20",
"node_type": "full_node",
"version": INDEXER_VERSION,
"db_version": DB_VERSION,
"block_height": block_height,
@@ -690,8 +736,255 @@ def report_hashes(block_height):
print("Sending hashes to metaprotocol indexer indexer...")
try_to_report_with_retries(to_send)
def reorg_on_extra_tables(reorg_height):
cur.execute('begin;')
cur.execute('delete from brc20_current_balances where block_height > %s RETURNING pkscript, tick;', (reorg_height,)) ## delete new balances
rows = cur.fetchall()
## fetch balances of deleted rows for reverting balances
for r in rows:
pkscript = r[0]
tick = r[1]
cur.execute(''' select overall_balance, available_balance, wallet, block_height
from brc20_historic_balances
where block_height <= %s and pkscript = %s and tick = %s
order by id desc
limit 1;''', (reorg_height, pkscript, tick))
if cur.rowcount != 0:
balance = cur.fetchone()
cur.execute('''insert into brc20_current_balances (pkscript, wallet, tick, overall_balance, available_balance, block_height)
values (%s, %s, %s, %s, %s, %s);''', (pkscript, balance[2], tick, balance[0], balance[1], balance[3]))
cur.execute('truncate table brc20_unused_tx_inscrs restart identity;')
cur.execute('''with tempp as (
select inscription_id, event, id, block_height
from brc20_events
where event_type = %s and block_height <= %s
), tempp2 as (
select inscription_id, event
from brc20_events
where event_type = %s and block_height <= %s
)
select t.event, t.id, t.block_height, t.inscription_id
from tempp t
left join tempp2 t2 on t.inscription_id = t2.inscription_id
where t2.inscription_id is null;''', (event_types['transfer-inscribe'], reorg_height, event_types['transfer-transfer'], reorg_height))
rows = cur.fetchall()
for row in rows:
new_event = row[0]
event_id = row[1]
block_height = row[2]
inscription_id = row[3]
cur.execute('''INSERT INTO brc20_unused_tx_inscrs (inscription_id, tick, amount, current_holder_pkscript, current_holder_wallet, event_id, block_height)
VALUES (%s, %s, %s, %s, %s, %s, %s)''',
(inscription_id, new_event["tick"], int(new_event["amount"]), new_event["source_pkScript"], new_event["source_wallet"], event_id, block_height))
cur.execute('delete from brc20_block_hashes_current_balances where block_height > %s;', (reorg_height,)) ## delete new block hashes
cur.execute("SELECT setval('brc20_block_hashes_current_balances_id_seq', max(id)) from brc20_block_hashes_current_balances;") ## reset id sequence
cur.execute('commit;')
def initial_index_of_extra_tables():
cur.execute('begin;')
print("resetting brc20_unused_tx_inscrs")
cur.execute('truncate table brc20_unused_tx_inscrs restart identity;')
print("selecting unused txes")
cur.execute('''with tempp as (
select inscription_id, event, id, block_height
from brc20_events
where event_type = %s
), tempp2 as (
select inscription_id, event
from brc20_events
where event_type = %s
)
select t.event, t.id, t.block_height, t.inscription_id
from tempp t
left join tempp2 t2 on t.inscription_id = t2.inscription_id
where t2.inscription_id is null;''', (event_types['transfer-inscribe'], event_types['transfer-transfer']))
rows = cur.fetchall()
print("inserting unused txes")
idx = 0
for row in rows:
idx += 1
if idx % 200 == 0: print(idx, '/', len(rows))
new_event = row[0]
event_id = row[1]
block_height = row[2]
inscription_id = row[3]
cur.execute('''INSERT INTO brc20_unused_tx_inscrs (inscription_id, tick, amount, current_holder_pkscript, current_holder_wallet, event_id, block_height)
VALUES (%s, %s, %s, %s, %s, %s, %s)''',
(inscription_id, new_event["tick"], int(new_event["amount"]), new_event["source_pkScript"], new_event["source_wallet"], event_id, block_height))
print("resetting brc20_current_balances")
cur.execute('truncate table brc20_current_balances restart identity;')
print("selecting current balances")
cur.execute('''with tempp as (
select max(id) as id
from brc20_historic_balances
group by pkscript, tick
)
select bhb.pkscript, bhb.tick, bhb.overall_balance, bhb.available_balance, bhb.wallet, bhb.block_height
from tempp t
left join brc20_historic_balances bhb on bhb.id = t.id
order by bhb.pkscript asc, bhb.tick asc;''')
rows = cur.fetchall()
print("inserting current balances")
idx = 0
for r in rows:
idx += 1
if idx % 200 == 0: print(idx, '/', len(rows))
pkscript = r[0]
tick = r[1]
overall_balance = r[2]
available_balance = r[3]
wallet = r[4]
block_height = r[5]
cur.execute('''insert into brc20_current_balances (pkscript, wallet, tick, overall_balance, available_balance, block_height)
values (%s, %s, %s, %s, %s, %s);''', (pkscript, wallet, tick, overall_balance, available_balance, block_height))
print("resetting brc20_extras_block_hashes")
cur.execute('truncate table brc20_extras_block_hashes restart identity;')
print("inserting brc20_extras_block_hashes")
cur.execute('''select block_height, block_hash from brc20_block_hashes order by block_height asc;''')
rows = cur.fetchall()
idx = 0
for row in rows:
idx += 1
if idx % 200 == 0: print(idx, '/', len(rows))
block_height = row[0]
block_hash = row[1]
cur.execute('''INSERT INTO brc20_extras_block_hashes (block_height, block_hash) VALUES (%s, %s);''', (block_height, block_hash))
cur.execute('commit;')
def index_extra_tables(block_height, block_hash):
ebh_current_height = 0
cur.execute('select max(block_height) as current_ebh_height from brc20_extras_block_hashes;')
if cur.rowcount > 0:
res = cur.fetchone()[0]
if res is not None:
ebh_current_height = res
if ebh_current_height >= block_height:
print("reorg detected on extra tables, rolling back to: " + str(block_height))
reorg_on_extra_tables(block_height - 1)
print("updating extra tables for block: " + str(block_height))
cur.execute('''select pkscript, wallet, tick, overall_balance, available_balance
from brc20_historic_balances
where block_height = %s
order by id asc;''', (block_height,))
balance_changes = cur.fetchall()
if len(balance_changes) == 0:
print("No balance_changes found for block " + str(block_height))
else:
balance_changes_map = {}
for balance_change in balance_changes:
pkscript = balance_change[0]
tick = balance_change[2]
key = pkscript + '_' + tick
balance_changes_map[key] = balance_change
print("Balance_change count: ", len(balance_changes_map))
idx = 0
for key in balance_changes_map:
new_balance = balance_changes_map[key]
idx += 1
if idx % 200 == 0: print(idx, '/', len(balance_changes_map))
cur.execute('''INSERT INTO brc20_current_balances (pkscript, wallet, tick, overall_balance, available_balance, block_height) VALUES (%s, %s, %s, %s, %s, %s)
ON CONFLICT (pkscript, tick)
DO UPDATE SET overall_balance = EXCLUDED.overall_balance
, available_balance = EXCLUDED.available_balance
, block_height = EXCLUDED.block_height;''', new_balance + (block_height,))
cur.execute('''select event, id, event_type, inscription_id
from brc20_events where block_height = %s and (event_type = %s or event_type = %s)
order by id asc;''', (block_height, event_types['transfer-inscribe'], event_types['transfer-transfer'],))
events = cur.fetchall()
if len(events) == 0:
print("No events found for block " + str(block_height))
else:
print("Events count: ", len(events))
idx = 0
for row in events:
new_event = row[0]
event_id = row[1]
new_event["event_type"] = event_types_rev[row[2]]
new_event["inscription_id"] = row[3]
idx += 1
if idx % 200 == 0: print(idx, '/', len(events))
if new_event["event_type"] == 'transfer-inscribe':
cur.execute('''INSERT INTO brc20_unused_tx_inscrs (inscription_id, tick, amount, current_holder_pkscript, current_holder_wallet, event_id, block_height)
VALUES (%s, %s, %s, %s, %s, %s, %s) ON CONFLICT (inscription_id) DO NOTHING''',
(new_event["inscription_id"], new_event["tick"], int(new_event["amount"]), new_event["source_pkScript"], new_event["source_wallet"], event_id, block_height))
elif new_event["event_type"] == 'transfer-transfer':
cur.execute('''DELETE FROM brc20_unused_tx_inscrs WHERE inscription_id = %s;''', (new_event["inscription_id"],))
else:
print("Unknown event type: " + new_event["event_type"])
sys.exit(1)
cur.execute('''INSERT INTO brc20_extras_block_hashes (block_height, block_hash) VALUES (%s, %s);''', (block_height, block_hash))
return True
def check_extra_tables():
global first_inscription_height
try:
cur.execute('''
select min(ebh.block_height) as ebh_tocheck_height
from brc20_extras_block_hashes ebh
left join brc20_block_hashes bh on bh.block_height = ebh.block_height
where bh.block_hash != ebh.block_hash
''')
ebh_tocheck_height = 0
if cur.rowcount > 0:
res = cur.fetchone()[0]
if res is not None:
ebh_tocheck_height = res
print("hash diff found on block: " + str(ebh_tocheck_height))
if ebh_tocheck_height == 0:
cur.execute('select max(block_height) as current_ebh_height from brc20_extras_block_hashes;')
if cur.rowcount > 0:
res = cur.fetchone()[0]
if res is not None:
ebh_tocheck_height = res + 1
if ebh_tocheck_height == 0:
print("no extra table data found")
ebh_tocheck_height = first_inscription_height
cur.execute('''select max(block_height) from brc20_block_hashes;''')
main_block_height = first_inscription_height
if cur.rowcount > 0:
res = cur.fetchone()[0]
if res is not None:
main_block_height = res
if ebh_tocheck_height > main_block_height:
print("no new extra table data found")
return
while ebh_tocheck_height <= main_block_height:
if ebh_tocheck_height == first_inscription_height:
print("initial indexing of extra tables, may take a few minutes")
initial_index_of_extra_tables()
return
cur.execute('''select block_hash from brc20_block_hashes where block_height = %s;''', (ebh_tocheck_height,))
block_hash = cur.fetchone()[0]
if index_extra_tables(ebh_tocheck_height, block_hash):
print("extra table data indexed for block: " + str(ebh_tocheck_height))
ebh_tocheck_height += 1
else:
print("extra table data index failed for block: " + str(ebh_tocheck_height))
return
except:
traceback.print_exc()
return
check_if_there_is_residue_from_last_run()
if create_extra_tables:
check_if_there_is_residue_on_extra_tables_from_last_run()
print("checking extra tables")
check_extra_tables()
last_report_height = 0
while True:
check_if_there_is_residue_from_last_run()
if create_extra_tables:
check_if_there_is_residue_on_extra_tables_from_last_run()
## check if a new block is indexed
cur_metaprotocol.execute('''SELECT coalesce(max(block_height), -1) as max_height from block_hashes;''')
max_block_of_metaprotocol_db = cur_metaprotocol.fetchone()[0]
@@ -716,8 +1009,12 @@ while True:
continue
try:
index_block(current_block, current_block_hash)
if max_block_of_metaprotocol_db - current_block < 10: ## do not report if there are more than 10 blocks to index
if create_extra_tables:
print("checking extra tables")
check_extra_tables()
if max_block_of_metaprotocol_db - current_block < 10 or current_block - last_report_height > 100: ## do not report if there are more than 10 blocks to index
report_hashes(current_block)
last_report_height = current_block
except:
traceback.print_exc()
if in_commit: ## rollback commit if any

View File

@@ -0,0 +1,39 @@
CREATE TABLE public.brc20_current_balances (
id bigserial NOT NULL,
pkscript text NOT NULL,
wallet text NULL,
tick varchar(4) NOT NULL,
overall_balance numeric(40) NOT NULL,
available_balance numeric(40) NOT NULL,
block_height int4 NOT NULL,
CONSTRAINT brc20_current_balances_pk PRIMARY KEY (id)
);
CREATE UNIQUE INDEX brc20_current_balances_pkscript_tick_idx ON public.brc20_current_balances USING btree (pkscript, tick);
CREATE INDEX brc20_current_balances_block_height_idx ON public.brc20_current_balances USING btree (block_height);
CREATE INDEX brc20_current_balances_pkscript_idx ON public.brc20_current_balances USING btree (pkscript);
CREATE INDEX brc20_current_balances_tick_idx ON public.brc20_current_balances USING btree (tick);
CREATE INDEX brc20_current_balances_wallet_idx ON public.brc20_current_balances USING btree (wallet);
CREATE TABLE public.brc20_unused_tx_inscrs (
id bigserial NOT NULL,
inscription_id text NOT NULL,
tick varchar(4) NOT NULL,
amount numeric(40) NOT NULL,
current_holder_pkscript text NOT NULL,
current_holder_wallet text NULL,
event_id int8 NOT NULL,
block_height int4 NOT NULL,
CONSTRAINT brc20_unused_tx_inscrs_pk PRIMARY KEY (id)
);
CREATE UNIQUE INDEX brc20_unused_tx_inscrs_inscription_id_idx ON public.brc20_unused_tx_inscrs USING btree (inscription_id);
CREATE INDEX brc20_unused_tx_inscrs_tick_idx ON public.brc20_unused_tx_inscrs USING btree (tick);
CREATE INDEX brc20_unused_tx_inscrs_pkscript_idx ON public.brc20_unused_tx_inscrs USING btree (current_holder_pkscript);
CREATE INDEX brc20_unused_tx_inscrs_wallet_idx ON public.brc20_unused_tx_inscrs USING btree (current_holder_wallet);
CREATE TABLE public.brc20_extras_block_hashes (
id bigserial NOT NULL,
block_height int4 NOT NULL,
block_hash text NOT NULL,
CONSTRAINT brc20_extras_block_hashes_pk PRIMARY KEY (id)
);
CREATE UNIQUE INDEX brc20_extras_block_hashes_block_height_idx ON public.brc20_extras_block_hashes USING btree (block_height);

View File

@@ -0,0 +1,3 @@
drop table if exists brc20_current_balances;
drop table if exists brc20_unused_tx_inscrs;
drop table if exists brc20_extras_block_hashes;

View File

@@ -4,7 +4,133 @@
import os, psycopg2
from dotenv import load_dotenv
res = input("Are you sure you want to reset the brc20 database? (y/n) ")
init_env = True
# does .env file exist?
if os.path.isfile('.env'):
res = input("Do you want to re-initialise .env file? (y/n) ")
if res != 'y':
init_env = False
if init_env:
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
FIRST_INSCRIPTION_HEIGHT="767430"
DB_METAPROTOCOL_USER="postgres"
DB_METAPROTOCOL_HOST="localhost"
DB_METAPROTOCOL_PORT="5432"
DB_METAPROTOCOL_DATABASE="postgres"
DB_METAPROTOCOL_PASSWD=""
REPORT_TO_INDEXER="true"
REPORT_URL="https://api.opi.network/report_block"
REPORT_RETRIES="10"
REPORT_NAME="opi_brc20_index"
CREATE_EXTRA_TABLES="true"
print("Initialising .env file")
print("leave blank to use default values")
use_other_env = False
other_env_exists = os.path.isfile('../brc20_api/.env')
if other_env_exists:
res = input(".env on brc20_api already exists, do you want to use values from there? (y/n) ")
if res == 'y':
use_other_env = True
if use_other_env:
load_dotenv(dotenv_path='../brc20_api/.env')
DB_USER = os.getenv("DB_USER") or "postgres"
DB_HOST = os.getenv("DB_HOST") or "localhost"
DB_PORT = os.getenv("DB_PORT") or "5432"
DB_DATABASE = os.getenv("DB_DATABASE") or "postgres"
DB_PASSWD = os.getenv("DB_PASSWD")
else:
res = input("BRC20 Postgres DB username (Default: postgres): ")
if res != '':
DB_USER = res
res = input("BRC20 Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_HOST = res
res = input("BRC20 Postgres DB port (Default: 5432): ")
if res != '':
DB_PORT = res
res = input("BRC20 Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_DATABASE = res
res = input("BRC20 Postgres DB password: ")
DB_PASSWD = res
use_main_env = False
main_env_exists = os.path.isfile('../main_index/.env')
if main_env_exists:
res = input(".env on main_index already exists, do you want to use values from there? (y/n) ")
if res == 'y':
use_main_env = True
if use_main_env:
load_dotenv(dotenv_path='../main_index/.env')
DB_METAPROTOCOL_USER = os.getenv("DB_USER") or "postgres"
DB_METAPROTOCOL_HOST = os.getenv("DB_HOST") or "localhost"
DB_METAPROTOCOL_PORT = os.getenv("DB_PORT") or "5432"
DB_METAPROTOCOL_DATABASE = os.getenv("DB_DATABASE") or "postgres"
DB_METAPROTOCOL_PASSWD = os.getenv("DB_PASSWD")
FIRST_INSCRIPTION_HEIGHT = os.getenv("FIRST_INSCRIPTION_HEIGHT") or "767430"
else:
res = input("Main Postgres DB username (Default: postgres): ")
if res != '':
DB_METAPROTOCOL_USER = res
res = input("Main Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_METAPROTOCOL_HOST = res
res = input("Main Postgres DB port (Default: 5432): ")
if res != '':
DB_METAPROTOCOL_PORT = res
res = input("Main Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_METAPROTOCOL_DATABASE = res
res = input("Main Postgres DB password: ")
DB_METAPROTOCOL_PASSWD = res
res = input("First inscription height (Default: 767430) leave default for correct hash reporting: ")
if res != '':
FIRST_INSCRIPTION_HEIGHT = res
res = input("Report to main indexer (Default: true): ")
if res != '':
REPORT_TO_INDEXER = res
if REPORT_TO_INDEXER == 'true':
res = input("Report URL (Default: https://api.opi.network/report_block): ")
if res != '':
REPORT_URL = res
res = input("Report retries (Default: 10): ")
if res != '':
REPORT_RETRIES = res
while True:
res = input("Report name: ")
if res != '':
REPORT_NAME = res
break
else:
print('Report name cannot be empty')
res = input("Create extra tables for faster queries (Default: true) set to true for creating brc20_current_balances and brc20_unused_tx_inscrs tables: ")
if res != '':
CREATE_EXTRA_TABLES = res
f = open('.env', 'w')
f.write('DB_USER="' + DB_USER + '"\n')
f.write('DB_HOST="' + DB_HOST + '"\n')
f.write('DB_PORT="' + DB_PORT + '"\n')
f.write('DB_DATABASE="' + DB_DATABASE + '"\n')
f.write('DB_PASSWD="' + DB_PASSWD + '"\n')
f.write('FIRST_INSCRIPTION_HEIGHT="' + FIRST_INSCRIPTION_HEIGHT + '"\n')
f.write('DB_METAPROTOCOL_USER="' + DB_METAPROTOCOL_USER + '"\n')
f.write('DB_METAPROTOCOL_HOST="' + DB_METAPROTOCOL_HOST + '"\n')
f.write('DB_METAPROTOCOL_PORT="' + str(DB_METAPROTOCOL_PORT) + '"\n')
f.write('DB_METAPROTOCOL_DATABASE="' + DB_METAPROTOCOL_DATABASE + '"\n')
f.write('DB_METAPROTOCOL_PASSWD="' + DB_METAPROTOCOL_PASSWD + '"\n')
f.write('REPORT_TO_INDEXER="' + REPORT_TO_INDEXER + '"\n')
f.write('REPORT_URL="' + REPORT_URL + '"\n')
f.write('REPORT_RETRIES="' + REPORT_RETRIES + '"\n')
f.write('REPORT_NAME="' + REPORT_NAME + '"\n')
f.write('CREATE_EXTRA_TABLES="' + CREATE_EXTRA_TABLES + '"\n')
f.close()
res = input("Are you sure you want to initialise/reset the brc20 database? (y/n) ")
if res != 'y':
print('aborting')
exit(1)
@@ -16,6 +142,8 @@ db_port = int(os.getenv("DB_PORT") or "5432")
db_database = os.getenv("DB_DATABASE") or "postgres"
db_password = os.getenv("DB_PASSWD")
create_extra_tables = (os.getenv("CREATE_EXTRA_TABLES") or "false") == "true"
## connect to db
conn = psycopg2.connect(
host=db_host,
@@ -26,6 +154,21 @@ conn = psycopg2.connect(
conn.autocommit = True
cur = conn.cursor()
db_exists = False
try:
cur.execute('select count(*) from brc20_block_hashes;')
hash_cnt = cur.fetchone()[0]
if hash_cnt > 0:
db_exists = True
except:
pass
if db_exists:
res = input("It seems like you have entries on DB, are you sure to reset databases? This WILL RESET indexing progress. (y/n) ")
if res != 'y':
print('aborting')
exit(1)
## reset db
sqls = open('db_reset.sql', 'r').read().split(';')
for sql in sqls:
@@ -37,6 +180,16 @@ for sql in sqls:
if sql.strip() != '':
cur.execute(sql)
if create_extra_tables:
sqls = open('db_reset_extra.sql', 'r').read().split(';')
for sql in sqls:
if sql.strip() != '':
cur.execute(sql)
sqls = open('db_init_extra.sql', 'r').read().split(';')
for sql in sqls:
if sql.strip() != '':
cur.execute(sql)
## close db
cur.close()
conn.close()

View File

@@ -2,15 +2,24 @@
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres_metaprotocol"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS=50
BITCOIN_CHAIN_FOLDER="~/.bitcoin/"
# leave these empty to use .cookie file
BITCOIN_RPC_USER=""
BITCOIN_RPC_PASSWD=""
# change to ord.exe on Windows (without ./)
ORD_BINARY="./ord"
# leave default if repository folder structure hasn't been changed
ORD_FOLDER="../../ord/target/release/"
# relative to ord folder
ORD_DATADIR="."
# leave default for correct indexing
FIRST_INSCRIPTION_HEIGHT="767430"

View File

@@ -32,6 +32,9 @@ var db_pool = new Pool({
})
var chain_folder = process.env.BITCOIN_CHAIN_FOLDER || "~/.bitcoin/"
var bitcoin_rpc_user = process.env.BITCOIN_RPC_USER || ""
var bitcoin_rpc_password = process.env.BITCOIN_RPC_PASSWD || ""
var ord_binary = process.env.ORD_BINARY || "ord"
var ord_folder = process.env.ORD_FOLDER || "../../ord/target/release/"
var ord_datadir = process.env.ORD_DATADIR || "."
@@ -89,7 +92,14 @@ async function main_index() {
process.chdir(ord_folder);
let ord_version_cmd = ord_binary + " --version"
let ord_index_cmd = ord_binary + " --bitcoin-data-dir " + chain_folder + " --data-dir " + ord_datadir + cookie_arg + " --height-limit " + (ord_end_block_height) + " index run"
let rpc_argument = ""
if (bitcoin_rpc_user != "") {
rpc_argument = " --bitcoin-rpc-user " + bitcoin_rpc_user + " --bitcoin-rpc-pass " + bitcoin_rpc_password
}
let ord_index_cmd = ord_binary + " --bitcoin-data-dir " + chain_folder + " --data-dir " + ord_datadir + cookie_arg + " --height-limit " + (ord_end_block_height) + " " + rpc_argument + " index run"
try {
let version_string = execSync(ord_version_cmd).toString()

View File

@@ -4,7 +4,90 @@
import os, psycopg2, pathlib
from dotenv import load_dotenv
res = input("Are you sure you want to reset the metaprotocol database? (y/n) ")
init_env = True
# does .env file exist?
if os.path.isfile('.env'):
res = input("Do you want to re-initialise .env file? (y/n) ")
if res != 'y':
init_env = False
if init_env:
DB_USER="postgres"
DB_HOST="localhost"
DB_PORT="5432"
DB_DATABASE="postgres"
DB_PASSWD=""
DB_SSL="true"
DB_MAX_CONNECTIONS="50"
BITCOIN_CHAIN_FOLDER="~/.bitcoin/"
BITCOIN_RPC_USER=""
BITCOIN_RPC_PASSWD=""
ORD_BINARY="./ord"
ORD_FOLDER="../../ord/target/release/"
ORD_DATADIR="."
FIRST_INSCRIPTION_HEIGHT="767430"
print("Initialising .env file")
print("leave blank to use default values")
res = input("Main Postgres DB username (Default: postgres): ")
if res != '':
DB_USER = res
res = input("Main Postgres DB host (Default: localhost) leave default if postgres is installed on the same machine: ")
if res != '':
DB_HOST = res
res = input("Main Postgres DB port (Default: 5432): ")
if res != '':
DB_PORT = res
res = input("Main Postgres DB name (Default: postgres) leave default if no new dbs are created: ")
if res != '':
DB_DATABASE = res
res = input("Main Postgres DB password: ")
DB_PASSWD = res
res = input("Main Postgres DB use SSL (Default: true) may need to be set to false on Windows machines: ")
if res != '':
DB_SSL = res
res = input("Main Postgres DB max connections (Default: 50): ")
if res != '':
DB_MAX_CONNECTIONS = res
res = input("Bitcoin datadir (Default: ~/.bitcoin/) use forward-slashes(/) even on Windows: ")
if res != '':
BITCOIN_CHAIN_FOLDER = res
res = input("Bitcoin RPC username (Default: (empty)) leave default to use .cookie file: ")
if res != '':
BITCOIN_RPC_USER = res
res = input("Bitcoin RPC password (Default: (empty)) leave default to use .cookie file: ")
if res != '':
BITCOIN_RPC_PASSWD = res
res = input("Ord binary command (Default: ./ord) change to ord.exe on Windows (without ./): ")
if res != '':
ORD_BINARY = res
res = input("Path to ord folder (Default: ../../ord/target/release/) leave default if repository folder structure hasn't been changed: ")
if res != '':
ORD_FOLDER = res
res = input("Ord datadir (relative to ord folder) (Default: .) leave default if repository folder structure hasn't been changed: ")
if res != '':
ORD_DATADIR = res
res = input("First inscription height (Default: 767430) leave default for correct indexing: ")
if res != '':
FIRST_INSCRIPTION_HEIGHT = res
f = open(".env", "w")
f.write("DB_USER=\"" + DB_USER + "\"\n")
f.write("DB_HOST=\"" + DB_HOST + "\"\n")
f.write("DB_PORT=\"" + DB_PORT + "\"\n")
f.write("DB_DATABASE=\"" + DB_DATABASE + "\"\n")
f.write("DB_PASSWD=\"" + DB_PASSWD + "\"\n")
f.write("DB_SSL=\"" + DB_SSL + "\"\n")
f.write("DB_MAX_CONNECTIONS=" + DB_MAX_CONNECTIONS + "\n")
f.write("BITCOIN_CHAIN_FOLDER=\"" + BITCOIN_CHAIN_FOLDER + "\"\n")
f.write("BITCOIN_RPC_USER=\"" + BITCOIN_RPC_USER + "\"\n")
f.write("BITCOIN_RPC_PASSWD=\"" + BITCOIN_RPC_PASSWD + "\"\n")
f.write("ORD_BINARY=\"" + ORD_BINARY + "\"\n")
f.write("ORD_FOLDER=\"" + ORD_FOLDER + "\"\n")
f.write("ORD_DATADIR=\"" + ORD_DATADIR + "\"\n")
f.write("FIRST_INSCRIPTION_HEIGHT=\"" + FIRST_INSCRIPTION_HEIGHT + "\"\n")
f.close()
res = input("Are you sure you want to initialise/reset the main database? (y/n) ")
if res != 'y':
print('aborting')
exit(1)
@@ -26,6 +109,21 @@ conn = psycopg2.connect(
conn.autocommit = True
cur = conn.cursor()
db_exists = False
try:
cur.execute('select count(*) from block_hashes;')
hash_cnt = cur.fetchone()[0]
if hash_cnt > 0:
db_exists = True
except:
pass
if db_exists:
res = input("It seems like you have entries on DB, are you sure to reset databases? This WILL RESET indexing progress. (y/n) ")
if res != 'y':
print('aborting')
exit(1)
## reset db
sqls = open('db_reset.sql', 'r').read().split(';')
for sql in sqls:

14
ord/Cargo.lock generated
View File

@@ -2141,13 +2141,7 @@ dependencies = [
]
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ord"
name = "opi-ord"
version = "0.14.0"
dependencies = [
"anyhow",
@@ -2206,6 +2200,12 @@ dependencies = [
"unindent",
]
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ord-bitcoincore-rpc"
version = "0.17.1"

View File

@@ -1,5 +1,5 @@
[package]
name = "ord"
name = "opi-ord"
description = "◉ Ordinal wallet and block explorer"
version = "0.14.0"
license = "CC0-1.0"

View File

@@ -59,7 +59,7 @@ impl Chain {
pub(crate) fn jubilee_height(self) -> u32 {
match self {
Self::Mainnet => 767430,
Self::Mainnet => 824544,
Self::Regtest => 110,
Self::Signet => 175392,
Self::Testnet => 2544192,