This commit is contained in:
Jude Nelson
2015-08-16 21:28:25 -04:00
9 changed files with 171 additions and 101 deletions

View File

@@ -7,6 +7,8 @@ This package contains the client library for Blockstore. It talks to blockstore
The blockstore-cli currently supports the following commands:
* delete_immutable
* delete_mutable
* get_immutable
* get_mutable
* getindex

View File

@@ -61,8 +61,8 @@ def get_sorted_commands():
'put_immutable', 'get_mutable', 'get_immutable',
'lookup', 'getindex']
for x in sorted(command_list):
print x
for cmd in sorted(command_list):
print cmd
def run_cli():
@@ -79,15 +79,15 @@ def run_cli():
parser.add_argument(
'--blockstored-server',
help="""the hostname or IP address of the blockstored RPC server (default: {})""".format(config.BLOCKSTORED_SERVER))
help="""the hostname/IP of server (default: {})""".format(config.BLOCKSTORED_SERVER))
parser.add_argument(
'--blockstored-port', type=int,
help="""the blockstored RPC port to connect to (default: {})""".format(config.BLOCKSTORED_PORT))
help="""the server port to connect to (default: {})""".format(config.BLOCKSTORED_PORT))
parser.add_argument(
'--txid', type=str,
help="the transaction hash for a partially-failed storage operation")
help="tx hash of a partially-failed storage operation")
subparsers = parser.add_subparsers(
dest='action',
@@ -126,7 +126,7 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'get_immutable',
help='<name> <hash> | Get immutable data from the storage providers, and verify that the named user wrote it.')
help='<name> <hash> | get immutable data from storage')
subparser.add_argument(
'name', type=str,
help='the name of the user')
@@ -137,7 +137,7 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'get_mutable',
help='<name> <data_id> | Get mutable data from the storage providers, and verify that the named user wrote it.')
help='<name> <data_id> | get mutable data from storage')
subparser.add_argument(
'name', type=str,
help='the name associated with the data')
@@ -168,8 +168,8 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'namespace_begin',
help='<namespace_id> <privatekey> | begin the namespace, completing its definition and opening it for registration by other parties.')
'namespace_preorder',
help='preorder a namespace and claim the name')
subparser.add_argument(
'namespace_id', type=str,
help='the human-readable namespace identifier')
@@ -179,8 +179,8 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'namespace_define',
help='<namespace_id> <lifetime> <base_name_cost> <cost_decay_rate> <privatekey> | define a namespace\'s parameters, in preparation for importing names.')
'namespace_reveal',
help='define a namespace\'s parameters once preorder succeeds')
subparser.add_argument(
'namespace_id', type=str,
help='the human-readable namespace identifier')
@@ -199,8 +199,8 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'namespace_preorder',
help='<namespace_id> <privatekey> | preorder a namespace, in order to claim the namespace ID and begin populating it.')
'namespace_ready',
help='open namespace for registrations')
subparser.add_argument(
'namespace_id', type=str,
help='the human-readable namespace identifier')
@@ -227,7 +227,7 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'put_immutable',
help='<name> <data> <privatekey> | Store immutable data into the storage providers, creating it if it does not exist.')
help='store immutable data into storage')
subparser.add_argument(
'name', type=str,
help='the name that owns this data')
@@ -280,7 +280,7 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'transfer',
help='<name> <address> <keepdata> <privatekey> | transfer a name')
help='transfer a name')
subparser.add_argument(
'name', type=str,
help='the name that you want to register/claim')
@@ -297,7 +297,7 @@ def run_cli():
# ------------------------------------
subparser = subparsers.add_parser(
'update',
help='<name> <storage_index_json> <privatekey> | update storage index data and store it into the storage providers')
help='update storage index data and store it')
subparser.add_argument(
'name', type=str,
help='the name that you want to update')
@@ -352,15 +352,15 @@ def run_cli():
result = client.namespace_preorder(str(args.namespace_id),
str(args.privatekey))
elif args.action == 'namespace_define':
result = client.namespace_define(str(args.namespace_id),
elif args.action == 'namespace_reveal':
result = client.namespace_reveal(str(args.namespace_id),
int(args.lifetime),
int(args.base_name_cost),
float(args.cost_decay_rate),
str(args.privatekey))
elif args.action == 'namespace_begin':
result = client.namespace_begin(str(args.namespace_id),
elif args.action == 'namespace_ready':
result = client.namespace_ready(str(args.namespace_id),
str(args.privatekey))
elif args.action == 'put_mutable':

View File

@@ -530,7 +530,7 @@ def namespace_preorder(namespace_id, privatekey, proxy=None):
return proxy.namespace_preorder(namespace_id, privatekey)
def namespace_define(namespace_id, lifetime, base_name_cost, cost_decay_rate,
def namespace_reveal(namespace_id, lifetime, base_name_cost, cost_decay_rate,
privatekey, proxy=None):
"""
namesapce_define
@@ -543,7 +543,7 @@ def namespace_define(namespace_id, lifetime, base_name_cost, cost_decay_rate,
cost_decay_rate, privatekey)
def namespace_begin(namespace_id, privatekey, proxy=None):
def namespace_ready(namespace_id, privatekey, proxy=None):
"""
namespace_begin
"""

View File

@@ -91,7 +91,7 @@ def get_immutable_handler( key ):
return None
def get_mutable_handler( data_id ):
def get_mutable_handler( url ):
"""
Local disk implementation of the get_mutable_handler API call.
Given a route URL to data, return the data itself.
@@ -102,9 +102,12 @@ def get_mutable_handler( data_id ):
global MUTABLE_STORAGE_ROOT
# replace all /'s with \x2f's
data_id_noslash = data_id.replace( "/", r"\x2f" )
path = os.path.join( MUTABLE_STORAGE_ROOT, data_id_noslash )
if not url.startswith( "file://" ):
# invalid
return None
# get path from URL
path = url[ len("file://"): ]
try:
with open( path, "r" ) as f:

View File

@@ -161,7 +161,7 @@ BITCOIN_ADDRESS = BitcoinAddressType()
EMAIL = EmailType()
OPTIONAL = OptionalField
def schema_match( schema, obj, allow_extra=True, verbose=True ):
def schema_match( schema, obj, allow_extra=True, verbose=False ):
"""
Recursively verify that the given object has the given schema.
@@ -176,7 +176,7 @@ def schema_match( schema, obj, allow_extra=True, verbose=True ):
# object is literal?
if type(obj) != types.DictType:
if obj != schema:
if not schema.valid( obj ):
debug( "Literal '%s' does not match '%s'" % (obj, schema) )
return False
@@ -214,7 +214,7 @@ def schema_match( schema, obj, allow_extra=True, verbose=True ):
sub_schema = schema[field]
is_match = False
# debug("%s =~ %s" % (sub_object, sub_schema))
debug("%s =~ %s" % (sub_object, sub_schema))
if type(sub_schema) != types.DictType:
@@ -228,7 +228,7 @@ def schema_match( schema, obj, allow_extra=True, verbose=True ):
elif isinstance( sub_schema, types.ListType ) and len(sub_schema) == 1:
# array of objects with a given schema
sub_schema = sub_object[0]
sub_schema = sub_schema[0]
if not isinstance( sub_object, types.ListType ):
is_match = False
@@ -238,21 +238,21 @@ def schema_match( schema, obj, allow_extra=True, verbose=True ):
for so in sub_object:
# match each object in the list to this schema
is_match = schema_match( sub_schema, so )
is_match = schema_match( sub_schema, so, verbose=verbose )
if not is_match:
debug("%s is not %s" (sub_object, sub_schema))
debug("[%s] is not [%s]" % (so, sub_schema))
break
else:
# check type
is_match = sub_schema.valid( sub_object ) # (type(sub_schema) == type(sub_object))
if is_match is False:
debug( "%s is not %s" % (sub_object, sub_schema) )
# invalid schema
raise Exception("Invalid schema: '%s' ('%s') is neither a SchemaType nor a list of SchemaType instances" % (sub_schema, field))
else:
# recursively verify match
is_match = schema_match( sub_schema, sub_object )
is_match = schema_match( sub_schema, sub_object, verbose=verbose )
if not is_match:
debug( "%s is not %s" % (sub_object, sub_schema) )
if not is_match:
debug( "Mismatch on key '%s'" % literal)

View File

@@ -105,7 +105,7 @@ def make_mutable_urls( data_id ):
urls.append( new_url )
return new_urls
return urls
def mutable_data_route( data_id, data_urls, writer_pubkey=None ):
@@ -130,7 +130,7 @@ def mutable_data_route( data_id, data_urls, writer_pubkey=None ):
return None
if writer_pubkey is not None:
if writer_pubkey not in [types.StringType, types.UnicodeType]:
if type(writer_pubkey) not in [types.StringType, types.UnicodeType]:
log.error("Writer public key must be encoded as a string (got '%s')" % str(writer_pubkey))
return None
@@ -139,7 +139,7 @@ def mutable_data_route( data_id, data_urls, writer_pubkey=None ):
"urls": data_urls
}
if write_pubkey is not None:
if writer_pubkey is not None:
route['pubkey'] = writer_pubkey
return route
@@ -172,7 +172,29 @@ def mutable_data_route_parse( route_json_text ):
return route_object
def mutable_data( data_id, data_text, nonce, privkey=None, sig=None ):
def mutable_data_encode( data ):
"""
Encode the 'data' field of a mutable data dict, making
it suitable for storing and printing to a console.
Call this method after mutable_data( encode=False )
to encode the data.
"""
data['data'] = base64.b64encode( data['data'] )
def mutable_data_decode( data ):
"""
Decode the 'data' field of a mutable data dict.
Call this method after mutable_data_parse( decode=False )
to recover the data.
"""
data['data'] = base64.b64decode( data['data'] )
def mutable_data( data_id, data_text, nonce, privkey=None, sig=None, encode=True ):
"""
Generate a mutable data dict from the given information.
If sig is given, use sig
@@ -181,10 +203,13 @@ def mutable_data( data_id, data_text, nonce, privkey=None, sig=None ):
"""
data = {
"id": str(data_id),
"data": base64.b64encode( str(data_text) ),
"data": data_text,
"nonce": int(nonce)
}
if encode:
mutable_data_encode( data )
if sig is not None:
data['sig'] = sig
@@ -197,12 +222,14 @@ def mutable_data( data_id, data_text, nonce, privkey=None, sig=None ):
return data
def mutable_data_parse( mutable_data_json_text ):
def mutable_data_parse( mutable_data_json_text, decode=True ):
"""
Given the serialized JSON for a piece of mutable data,
parse it into a JSON document.
If decode is True, then decode the data string as well.
Return the parsed JSON dict on success
Return None on error
"""
@@ -236,11 +263,12 @@ def mutable_data_parse( mutable_data_json_text ):
return None
# decode data
data_object['data'] = base64.b64decode( data_object['data'] )
if decode:
mutable_data_decode( data_object )
return data_object
def register_storage( storage_impl ):
"""
Given a class, module, etc. with the methods,
@@ -309,7 +337,7 @@ def get_mutable_data_route_hash( route ):
Return the hash on success
Return None on error
"""
route_json = parsing.json_stable_serialize( route )
route_json = json_stable_serialize( route )
if route_json is None:
return None
@@ -361,6 +389,41 @@ def get_mutable_data_route( data_id, route_hash ):
return None
def sign_raw_data( raw_data, privatekey ):
"""
Sign a string of data.
Return a base64-encoded signature.
"""
data_hash = get_data_hash( raw_data )
data_sig_bin = pybitcointools.ecdsa_raw_sign( data_hash, privatekey )
return pybitcointools.encode_sig( data_sig_bin[0], data_sig_bin[1], data_sig_bin[2] )
def verify_raw_data( raw_data, pubkey, sigb64 ):
"""
Verify the signature over a string, given the public key
and base64-encode signature.
Return True on success.
Return False on error.
"""
data_hash = get_data_hash( raw_data )
return pybitcointools.ecdsa_raw_verify( data_hash, pybitcointools.decode_sig( sigb64 ), pubkey )
def sign_mutable_data( data, privatekey ):
"""
Given a mutable data dict and a ECDSA private key,
generate and return a base64-encoded signature over the fields that matter (i.e. the data_id, nonce, and data).
Return the signature (baes64-encoded)
"""
data_str = str(data['id']) + str(data['nonce']) + str(data['data'])
return sign_raw_data( data_str, privatekey )
def verify_mutable_data( data, pubkey ):
"""
Given the data (as a dict) and the base64-encoded signature,
@@ -369,21 +432,18 @@ def verify_mutable_data( data, pubkey ):
"""
sigb64 = data['sig']
data['sig'] = ""
data_text = parsing.json_stable_serialize( data )
data_hash = get_data_hash( data )
data_str = str(data['id']) + str(data['nonce']) + str(data['data'])
rc = pybitcointools.ecdsa_raw_verify( data_hash, pybitcointools.decode_sig( sigb64 ), pubkey )
return rc
return verify_raw_data( data_str, pubkey, sigb64 )
def get_mutable_data( data_route, min_nonce=None, max_nonce=None, nonce_check=None ):
def get_mutable_data( data_route, nonce_min=None, nonce_max=None, nonce_check=None ):
"""
Given a data's route, go fetch the data.
Optionally verify that the nonce in the data returned is within [min_nonce, max_nonce],
or no less than min_nonce, or no greater than max_nonce.
Optionally verify that the nonce in the data returned is within [nonce_min, nonce_max],
or no less than nonce_min, or no greater than nonce_max.
Optionally evaluate nonce with nonce_check, which takes the data structure and returns true if the nonce is valid.
@@ -425,29 +485,31 @@ def get_mutable_data( data_route, min_nonce=None, max_nonce=None, nonce_check=No
# no data
continue
# parse it
data = mutable_data_parse( data_json )
# parse it, but don't decode it yet
data = mutable_data_parse( data_json, decode=False )
if data is None:
log.error("Unparseable data")
continue
# if the route includes a private key, verify it
if pubkey is not None:
if data_pubkey is not None:
rc = verify_mutable_data( data, pubkey )
rc = verify_mutable_data( data, data_pubkey )
if not rc:
log.error("Invalid signature")
continue
# can decode the data now, since we've checked the sig
mutable_data_decode( data )
# verify nonce, if need be
if min_nonce is not None:
if data['nonce'] < min_nonce:
if nonce_min is not None:
if data['nonce'] < nonce_min:
continue
if max_nonce is not None:
if data['nonce'] > max_nonce:
if nonce_max is not None:
if data['nonce'] > nonce_max:
continue
if nonce_check is not None:
@@ -504,24 +566,12 @@ def put_immutable_data( data_text, txid, replication_strategy=REPLICATE_ALL ):
else:
continue
# succeeded
return data_hash
if replication_strategy == REPLICATE_ANY:
# succeeded once
return data_hash
return None
def sign_mutable_data( data, privatekey ):
"""
Given a mutable data dict and a ECDSA private key,
generate and return a base64-encoded signature over the fields that matter (i.e. the data_id, nonce, and data).
Return the signature.
"""
data_str = str(data['id']) + str(data['nonce']) + str(data['data'])
data_hash = get_data_hash( data_str )
data_sig_bin = pybitcointools.ecdsa_raw_sign( data_hash, privatekey )
return pybitcointools.encode_sig( data_sig_bin )
# succeeded everywhere
return data_hash
def put_mutable_data( data, privatekey, replication_strategy=REPLICATE_ALL ):
@@ -545,7 +595,7 @@ def put_mutable_data( data, privatekey, replication_strategy=REPLICATE_ALL ):
sig = sign_mutable_data( data, privatekey )
data['sig'] = sig
data_json = parsing.json_stable_serialize( data )
data_json = json_stable_serialize( data )
for handler in storage_handlers:
@@ -556,7 +606,7 @@ def put_mutable_data( data, privatekey, replication_strategy=REPLICATE_ALL ):
try:
handler.put_mutable_handler( data_id, nonce, sig, data_json )
rc = handler.put_mutable_handler( data_id, nonce, sig, data_json )
except Exception, e:
log.exception( e )
@@ -575,9 +625,12 @@ def put_mutable_data( data, privatekey, replication_strategy=REPLICATE_ALL ):
else:
continue
return True
if replication_strategy == REPLICATE_ANY:
# succeeded once
return True
return False
# succeeded for all storage providers
return True
def delete_immutable_data( data_hash, txid ):
@@ -614,8 +667,7 @@ def delete_mutable_data( data_id, privatekey ):
global storage_handlers
# sign the data_id to prove authenticity to the storage system
signature = sign_mutable_data( data_id, privatekey )
sigb64 = sign_raw_data( data_id, privatekey )
# remove data
for handler in storage_handlers:
@@ -625,7 +677,7 @@ def delete_mutable_data( data_id, privatekey ):
try:
handler.delete_mutable_handler( data_id, signature )
handler.delete_mutable_handler( data_id, sigb64 )
except Exception, e:
log.exception( e )

View File

@@ -164,10 +164,15 @@ def get_mutable_data_route( user, data_id ):
Return the route (as a dict) on success
Return None if not found
"""
if not has_mutable_data_route( user, data_id ):
return None
return user['mutable_data'][data_id]
if not user.has_key('mutable_data'):
return None
for route in user['mutable_data']:
if route['id'] == data_id:
return route
return None
def add_mutable_data_route( user, data_route ):

12
requirements.txt Normal file
View File

@@ -0,0 +1,12 @@
commontools==0.1.0
ecdsa==0.11
kademlia==0.5
pybitcoin==0.8.2
pybitcointools==1.1.15
python-bitcoinrpc==0.1
requests==2.7.0
rpcudp==1.0
Twisted==15.3.0
u-msgpack-python==2.1
utilitybelt==0.2.6
zope.interface==4.1.2

View File

@@ -2,13 +2,13 @@
from setuptools import setup, find_packages
setup(
name='blockstore',
name='blockstore-client',
version='0.0.3',
url='https://github.com/blockstack/blockstore-client',
license='MIT',
author='Onename',
license='GPLv3',
author='Blockstack.org',
author_email='support@onename.com',
description='Python client library to Blockstore',
description='Python client library for Blockstore',
keywords='blockchain bitcoin btc cryptocurrency name key value store data',
packages=find_packages(),
scripts=['bin/blockstore-cli'],
@@ -16,16 +16,12 @@ setup(
zip_safe=False,
include_package_data=True,
install_requires=[
'pybitcoin>=0.8.3',
'kademlia>=0.2',
'python-bitcoinrpc>=0.1',
'jsonrpc>=1.2',
'utilitybelt>=0.2.2'
'virtualchain>=0.0.1'
'pybitcoin>=0.8.2',
'kademlia>=0.5'
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'License :: GPLv3 License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',