BLD: ipfs - blockstack addressing

This commit is contained in:
Victor Grau Serrat
2017-11-06 09:59:36 -07:00
parent 18a11ff78f
commit ebc00511b6
2 changed files with 117 additions and 88 deletions

View File

@@ -358,14 +358,14 @@ def get_url_type(url):
return ('http', url)
def index_make_mutable_url(host, data_id, scheme='https'):
def index_make_mutable_url(dvconf, host, data_id, scheme='ipfs'):
"""
Make a faux-mutable URL that will be identified
by the indexer as referring to indexed data.
"""
data_id = urllib.quote( data_id.replace('/', '-2f') )
#url = "{}://{}/blockstack/{}".format(scheme, host, data_id)
url = "{}://{}".format(scheme, data_id)
url = "{}://blockstack/{}/{}".format(scheme, dvconf['driver_info']['blockstack_id'], data_id)
return url
@@ -657,15 +657,21 @@ def index_get_page(dvconf, blockchain_id=None, path=None, url=None):
assert blockchain_id
serialized_index_page = None
if url and blockchain_id:
log.debug("Fetch index page {} via HTTP".format(url))
serialized_index_page = get_chunk_via_http(url, blockchain_id=blockchain_id)
else:
assert path
log.debug("Fetch index page {} via driver".format(path))
assert dvconf
get_chunk = dvconf['get_chunk']
serialized_index_page = get_chunk(dvconf, path)
# if url and blockchain_id:
# log.debug("Fetch index page {} via HTTP".format(url))
# serialized_index_page = get_chunk_via_http(url, blockchain_id=blockchain_id)
# else:
# assert path
# log.debug("Fetch index page {} via driver".format(path))
# assert dvconf
# get_chunk = dvconf['get_chunk']
# serialized_index_page = get_chunk(dvconf, path)
assert path
log.debug("Fetch index page {} via driver".format(path))
assert dvconf
get_chunk = dvconf['get_chunk']
serialized_index_page = get_chunk(dvconf, path)
if serialized_index_page is None:
# failed to get index
@@ -1047,10 +1053,10 @@ def _get_indexed_data_impl( dvconf, blockchain_id, name, raw=False, index_manife
return False, {}
log.debug("Fetch {} via HTTP at {} (cached url: {})".format(name, data_url, cache_hit))
data = get_chunk_via_http(data_url, blockchain_id=blockchain_id)
log.debug("Fetch {} via driver at {} (cached url: {})".format(name, data_url, cache_hit))
data = dvconf['get_chunk'](dvconf, data_url[len('ipfs://'):]) #data = get_chunk_via_http(data_url, blockchain_id=blockchain_id)
if data is None:
log.error("Failed to load {} from {}".format(name, data_url))
log.error("Failed to load {} from {}".format(name, data_url[len]))
if cache_hit:
# might be due to stale cached index data
@@ -1093,6 +1099,7 @@ def get_indexed_data(dvconf, blockchain_id, name, raw=False, index_manifest_url=
# try cache path first
data, pages = _get_indexed_data_impl(dvconf, blockchain_id, name, raw=raw, index_manifest_url=index_manifest_url)
if data == False:
if blockchain_id:
# reading someone else's datastore
@@ -1224,7 +1231,10 @@ def index_get_immutable_handler( dvconf, key, **kw ):
name = 'immutable-{}'.format(key)
name = name.replace('/', r'-2f')
path = '/{}'.format(name)
path = '/blockstack/{}/{}'.format(
dvconf['driver_info']['blockstack_id'],
name
)
return get_indexed_data(dvconf, blockchain_id, path, index_manifest_url=index_manifest_url)
@@ -1250,7 +1260,8 @@ def index_get_mutable_handler( dvconf, url, default_get_data=http_get_data, **kw
if urltype == 'blockstack':
# get via index
data_id = '/' + urlres.replace('/', r'-2f')
urlres = '/' + urlres
data_id = os.path.dirname(urlres) + '/' + os.path.basename(urlres).replace('/', r'-2f')
return get_indexed_data(dvconf, blockchain_id, data_id, index_manifest_url=index_manifest_url)
else:
@@ -1269,7 +1280,10 @@ def index_put_immutable_handler( dvconf, key, data, txid, **kw ):
name = 'immutable-{}'.format(key)
name = name.replace('/', r'-2f')
path = '/{}'.format(name)
path = '/blockstack/{}/{}'.format(
dvconf['driver_info']['blockstack_id'],
name
)
return put_indexed_data(dvconf, path, data)
@@ -1282,7 +1296,10 @@ def index_put_mutable_handler( dvconf, data_id, data_bin, **kw ):
Return None on error
"""
data_id = urllib.quote( data_id.replace('/', '-2f') )
path = '/{}'.format(data_id)
path = '/blockstack/{}/{}'.format(
dvconf['driver_info']['blockstack_id'],
data_id,
)
return put_indexed_data(dvconf, path, data_bin)
@@ -1297,7 +1314,10 @@ def index_delete_immutable_handler( dvconf, key, txid, sig_key_txid, **kw ):
"""
name = 'immutable-{}'.format(key)
name = name.replace('/', r'-2f')
path = '/{}'.format(name)
path = '/blockstack/{}/{}'.format(
dvconf['driver_info']['blockstack_id'],
name
)
return delete_indexed_data(dvconf, path)
@@ -1306,8 +1326,8 @@ def index_delete_mutable_handler( dvconf, data_id, signature, **kw ):
"""
Delete by data ID
"""
data_id = data_id.replace('/', r'-2f')
path = '/{}'.format(data_id)
data_id = '/' + data_id[len('ipfs://'):]
path = os.path.dirname(data_id) + '/' + os.path.basename(data_id).replace('/', r'-2f')
return delete_indexed_data(dvconf, path)

View File

@@ -34,7 +34,8 @@ from common_ipfs import get_logger, driver_config, DEBUG, \
index_get_manifest_page_path, index_insert, put_indexed_data, \
get_indexed_data, index_put_mutable_handler, \
index_get_immutable_handler, index_get_mutable_handler, \
index_put_immutable_handler, index_make_mutable_url
index_put_immutable_handler, index_make_mutable_url, \
index_delete_immutable_handler, index_delete_mutable_handler
from ConfigParser import SafeConfigParser
log = get_logger("blockstack-storage-driver-ipfs")
@@ -47,23 +48,21 @@ IPFS_DEFAULT_COMPRESS = False
INDEX_DIRNAME = 'index'
DVCONF = None
ipfs_api = None
# def ipfs_key_gen(key):
# """
# We need a custom wraper for this API endpoint, because the ipfsapi
# does not provide it, yet. Instead of checking whether a key exists
# before creating it, we create it, and ignore the error that throws
# if it exists already.
# """
def ipfs_key_gen(key):
"""
We need a custom wraper for this API endpoint, because the ipfsapi
does not provide it, yet. Instead of checking whether a key exists
before creating it, we create it, and ignore the error that throws
if it exists already.
"""
try:
r = ipfs_api._client.request('/key/gen', (key,), decoder='json',
opts={'type':'rsa','size':'2048'})
except ipfsapi.exceptions.ErrorResponse:
# An exception is thrown when the key already exists, we ignore it
pass
# try:
# r = ipfs_api._client.request('/key/gen', (key,), decoder='json',
# opts={'type':'rsa','size':'2048'})
# except ipfsapi.exceptions.ErrorResponse:
# # An exception is thrown when the key already exists, we ignore it
# pass
def ipfs_put_chunk( dvconf, chunk_buf, chunk_path ):
@@ -81,18 +80,20 @@ def ipfs_put_chunk( dvconf, chunk_buf, chunk_path ):
# )
# chunk_path = os.path.join(base_path, os.path.basename(chunk_path))
try:
ipfs_api.files_mkdir(os.path.dirname(chunk_path), parents = True)
dvconf['driver_info']['api'].files_mkdir(os.path.dirname(chunk_path), parents = True)
except Exception, e:
log.error('Failed to create {}'.format(base_path))
log.error('Failed to create {}'.format(chunk_path))
log.exception(e)
rc = False
else:
try:
r = ipfs_api.files_write( chunk_path,
r = dvconf['driver_info']['api'].files_write( chunk_path,
io.BytesIO(str(chunk_buf)),
create = True )
h = ipfs_api.files_stat (chunk_path)['Hash']
h = dvconf['driver_info']['api'].files_stat (chunk_path)['Hash']
rc = 'ipfs://{}'.format(h)
log.debug("{} available at {}".format(chunk_path, rc))
#rc = 'ipfs:/{}'.format(chunk_path)
@@ -175,11 +176,13 @@ def ipfs_get_chunk(dvconf, chunk_path):
data = None
compressed_data = None
log.debug('Getting chunk at {}'.format(chunk_path))
try:
compressed_data = ipfs_api.files_read(chunk_path)
compressed_data = dvconf['driver_info']['api'].files_read(chunk_path)
except:
try:
compressed_data = ipfs_api.cat(chunk_path)
compressed_data = dvconf['driver_info']['api'].cat(chunk_path)
except Exception, e:
log.error("Failed to read file '%s'" % chunk_path)
log.exception(e)
@@ -216,25 +219,19 @@ def ipfs_get_chunk(dvconf, chunk_path):
# return data
def ipfs_delete_chunk( chunk_path, is_mutable ):
def ipfs_delete_chunk(dvconf, chunk_path):
"""
Delete a chunk of data from IPFS.
"""
if is_mutable:
try:
ipfs_api.files_rm( chunk_path )
except Exception, e:
log.error("Failed to delete file '%s'" % data_id )
log.exception(e)
return False
else:
try:
ipfs_api.pin_rm( chunk_path )
except Exception, e:
log.error("Failed to delete '%s'" % data_hash )
log.exception(e)
return False
try:
dvconf['driver_info']['api'].files_rm(chunk_path)
except Exception, e:
try:
dvconf['driver_info']['api'].pin_rm(chunk_path)
except Exception, e:
log.error('Failed to delete {}'.format(chunk_path) )
log.exception(e)
return False
return True
@@ -261,7 +258,7 @@ def storage_init(conf, index=False, force_index=False, **kwargs):
"""
Initialize IPFS storage driver
"""
global DVCONF, ipfs_api
global DVCONF
ipfs_server = IPFS_DEFAULT_SERVER
ipfs_port = IPFS_DEFAULT_PORT
@@ -302,6 +299,8 @@ def storage_init(conf, index=False, force_index=False, **kwargs):
log.error("Blockstack.id is missing to initalize IPFS storage driver")
return False
ipfs_api = ipfsapi.connect( ipfs_server, ipfs_port )
DVCONF = driver_config(
driver_name = 'ipfs',
config_path = config_path,
@@ -310,14 +309,12 @@ def storage_init(conf, index=False, force_index=False, **kwargs):
delete_chunk = ipfs_delete_chunk,
driver_info={
'blockstack_id': blockstack_id,
'api': ipfs_api,
},
#index_stem='/blockstack/{}/{}'.format(blockstack_id, INDEX_DIRNAME),
index_stem=INDEX_DIRNAME,
index_stem='/blockstack/{}/{}'.format(blockstack_id,INDEX_DIRNAME),
compress=ipfs_compress,
)
ipfs_api = ipfsapi.connect( ipfs_server, ipfs_port )
d = '/blockstack/'+blockstack_id
try:
@@ -360,7 +357,7 @@ def make_mutable_url( data_id, **kw ):
"""
Get data by URL
"""
return index_make_mutable_url(None, data_id, scheme='ipfs')
return index_make_mutable_url(DVCONF, None, data_id, scheme='ipfs')
# blockstack_id = kw.get('fqu', None)
# if blockstack_id is None:
# return 'ipfs://blockstack/' + data_id.replace( "/", r"\x2f" )
@@ -374,7 +371,11 @@ def get_immutable_handler( key, **kw ):
Get data by hash
"""
#return ipfs_get_chunk_immutable(DVCONF, key)
kw.pop('fqu')
try:
kw.pop('fqu')
except:
pass
return index_get_immutable_handler(DVCONF, key, **kw)
#return ipfs_index_get_immutable_handler(DVCONF, key, **kw)
@@ -385,7 +386,10 @@ def get_mutable_handler( url, **kw ):
Get data by dynamic hash
"""
#url = url.replace('/', r'-2f')
kw.pop('fqu')
try:
kw.pop('fqu')
except:
pass
log.debug(url)
return index_get_mutable_handler(DVCONF, url, **kw)
#return ipfs_get_chunk(DVCONF, url)
@@ -405,38 +409,40 @@ def put_mutable_handler( data_id, data_txt, **kw ):
return index_put_mutable_handler(DVCONF, data_id, data_txt, **kw)
def delete_immutable_handler( data_hash, txid, tombstone, **kw ):
def delete_immutable_handler( key, txid, sig_key_txid, **kw ):
"""
Delete by hash
"""
return ipfs_delete_chunk( data_hash, False )
return index_delete_immutable_handler(DVCONF, key, txid, sig_key_txid, **kw)
#return ipfs_delete_chunk( data_hash, False )
def delete_mutable_handler( data_id, tombstone, **kw ):
def delete_mutable_handler( data_id, signature, **kw ):
"""
Delete by dynamic hash
"""
return ipfs_delete_chunk( data_id, True )
return index_delete_mutable_handler(DVCONF, data_id, signature, **kw)
#return ipfs_delete_chunk( data_id, True )
def hash_data( d ):
# def hash_data( d ):
h = None
# h = None
if DVCONF['compress']:
try:
h = ipfs_api.add_str(compress_chunk(d), opts={'only-hash':True})
except Exception, e:
log.error("Failed to get hash for '%s'" % d )
log.exception(e)
else:
try:
h = ipfs_api.add_str(d, opts={'only-hash':True})
except Exception, e:
log.error("Failed to get hash for '%s'" % d )
log.exception(e)
# if DVCONF['compress']:
# try:
# h = ipfs_api.add_str(compress_chunk(d), opts={'only-hash':True})
# except Exception, e:
# log.error("Failed to get hash for '%s'" % d )
# log.exception(e)
# else:
# try:
# h = ipfs_api.add_str(d, opts={'only-hash':True})
# except Exception, e:
# log.error("Failed to get hash for '%s'" % d )
# log.exception(e)
return h
# return h
if __name__ == "__main__":
@@ -446,7 +452,7 @@ if __name__ == "__main__":
import keylib
import json
import virtualchain
#from virtualchain.lib.hashing import hex_hash160
from virtualchain.lib.hashing import hex_hash160
from blockstack_client.storage import parse_mutable_data, \
serialize_mutable_data
@@ -474,6 +480,9 @@ if __name__ == "__main__":
["empty_string", "", 4, "unused", None],
]
def hash_data( d ):
return hex_hash160( d )
rc = storage_init(conf, fqu = 'test.id')
if not rc:
raise Exception("Failed to initialize")
@@ -484,7 +493,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
# try to get these profiles
for name in sys.argv[1:]:
prof = get_mutable_handler( make_mutable_url( name ) )
prof = get_mutable_handler( make_mutable_url( name ), index_manifest_url=index_manifest_url, blockchain_id='test.id' )
if prof is None:
raise Exception("Failed to get %s" % name)