Skip to content

Commit

Permalink
Wrap most long lines: fix all bad hanging indents.
Browse files Browse the repository at this point in the history
* Fixes many E501-s; more work needs to be done.
* Fixes all E127-s.
  • Loading branch information
Renelvon committed Sep 13, 2014
1 parent 96132fe commit ee33e49
Show file tree
Hide file tree
Showing 14 changed files with 481 additions and 208 deletions.
4 changes: 3 additions & 1 deletion node/arithmetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,9 @@ def point_to_hex(p):


def multiply(privkey, pubkey):
return point_to_hex(base10_multiply(hex_to_point(pubkey), decode(privkey, 16)))
return point_to_hex(
base10_multiply(hex_to_point(pubkey), decode(privkey, 16))
)


def privtopub(privkey):
Expand Down
64 changes: 41 additions & 23 deletions node/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ def cb(stream, msg):
stream.on_recv_stream(cb)
except Exception as e:
self.log.error(e)
# shouldn't we raise the exception here???? I think not doing this could cause buggy behavior on top
# Shouldn't we raise the exception here?
# I think not doing this could cause buggy behavior on top.
raise


Expand All @@ -95,13 +96,14 @@ def __init__(self, transport, address, pub=None, guid=None, nickname=None,
self.port = urlparse(address).port
self.nickname = nickname
self.sin = sin
self.peer_alive = False # not used for any logic, might remove it later if unnecessary
self.peer_alive = False # unused; might remove it later if unnecessary
self.guid = guid

PeerConnection.__init__(self, transport, address)

self.log = logging.getLogger('[%s] %s' % (transport.market_id,
self.__class__.__name__))
self.log = logging.getLogger(
'[%s] %s' % (transport.market_id, self.__class__.__name__)
)

def start_handshake(self, handshake_cb=None):
if self.check_port():
Expand All @@ -124,11 +126,13 @@ def cb(msg):
for idx, peer in enumerate(self.transport.dht.activePeers):
if peer.guid == self.guid or peer.address == self.address:
self.transport.dht.activePeers[idx] = self
self.transport.dht.add_peer(self.transport,
self.address,
self.pub,
self.guid,
self.nickname)
self.transport.dht.add_peer(
self.transport,
self.address,
self.pub,
self.guid,
self.nickname
)
return

self.transport.dht.activePeers.append(self)
Expand All @@ -137,16 +141,23 @@ def cb(msg):
if handshake_cb is not None:
handshake_cb()

self.send_raw(json.dumps({'type': 'hello',
'pubkey': self.transport.pubkey,
'uri': self.transport.uri,
'senderGUID': self.transport.guid,
'senderNick': self.transport.nickname}), cb)
self.send_raw(
json.dumps({
'type': 'hello',
'pubkey': self.transport.pubkey,
'uri': self.transport.uri,
'senderGUID': self.transport.guid,
'senderNick': self.transport.nickname
}),
cb
)
else:
self.log.error('CryptoPeerConnection.check_port() failed.')

def __repr__(self):
return '{ guid: %s, ip: %s, port: %s, pubkey: %s }' % (self.guid, self.ip, self.port, self.pub)
return '{ guid: %s, ip: %s, port: %s, pubkey: %s }' % (
self.guid, self.ip, self.port, self.pub
)

def generate_sin(self, guid):
return obelisk.EncodeBase58Check('\x0F\x02%s' + guid.decode('hex'))
Expand Down Expand Up @@ -182,10 +193,8 @@ def sign(self, data):
def encrypt(self, data):
try:
if self.pub is not None:
result = ec.ECC(curve='secp256k1').encrypt(data,
hexToPubkey(self.pub))

return result
hexkey = hexToPubkey(self.pub)
return ec.ECC(curve='secp256k1').encrypt(data, hexkey)
else:
self.log.error('Public Key is missing')
return False
Expand All @@ -205,7 +214,9 @@ def send(self, data, callback=lambda msg: None):
data['pubkey'] = self.transport.pubkey
data['senderNick'] = self.transport.nickname

self.log.debug('Sending to peer: %s %s' % (self.ip, pformat(data)))
self.log.debug(
'Sending to peer: %s %s' % (self.ip, pformat(data))
)

if self.pub == '':
self.log.info('There is no public key for encryption')
Expand All @@ -215,12 +226,19 @@ def send(self, data, callback=lambda msg: None):

try:
if data is not None:
encoded_data = data.encode('hex')
self.send_raw(json.dumps({'sig': signature.encode('hex'), 'data': encoded_data}), callback)
self.send_raw(
json.dumps({
'sig': signature.encode('hex'),
'data': data.encode('hex')
}),
callback
)
else:
self.log.error('Data was empty')
except Exception as e:
self.log.error("Was not able to encode empty data: %s" % e)
self.log.error(
"Was not able to encode empty data: %s" % e
)
else:
self.log.error('Peer is not available for sending data')
else:
Expand Down
38 changes: 26 additions & 12 deletions node/constants.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,47 @@
# ######## KADEMLIA CONSTANTS ###########

# Small number Representing the degree of parallelism in network calls
# Small number representing the degree of
# parallelism in network calls
alpha = 3

# Maximum number of contacts stored in a bucket; this should be an even number
# Maximum number of contacts stored in a bucket
# NOTE: Should be an even number
k = 80

# Timeout for network operations (in seconds)
# Timeout for network operations
# [seconds]
rpcTimeout = 0.1

# Delay between iterations of iterative node lookups (for loose parallelism) (in seconds)
# Delay between iterations of iterative node lookups
# (for loose parallelism)
# [seconds]
iterativeLookupDelay = rpcTimeout / 2

# If a k-bucket has not been used for this amount of time, refresh it (in seconds)
# If a k-bucket has not been used for this amount of time, refresh it.
# [seconds]
refreshTimeout = 60 * 60 * 1000 # 1 hour
# The interval at which nodes replicate (republish/refresh) data they are holding

# The interval at which nodes replicate (republish/refresh)
# the data they hold
# [seconds]
replicateInterval = refreshTimeout
# The time it takes for data to expire in the network; the original publisher of the data
# will also republish the data at this time if it is still valid

# The time it takes for data to expire in the network;
# the original publisher of the data will also republish
# the data at this time if it is still valid
# [seconds]
dataExpireTimeout = 86400 # 24 hours

# ####### IMPLEMENTATION-SPECIFIC CONSTANTS ###########

# The interval in which the node should check its whether any buckets need refreshing,
# or whether any data needs to be republished (in seconds)
# The interval in which the node should check whether any buckets
# need refreshing or whether any data needs to be republished
# [seconds]
checkRefreshInterval = refreshTimeout / 5

# Max size of a single UDP datagram, in bytes. If a message is larger than this, it will
# be spread accross several UDP packets.
# Max size of a single UDP datagram.
# Any larger message will be spread accross several UDP packets.
# [bytes]
udpDatagramMaxSize = 8192 # 8 KB

DB_PATH = "db/ob.db"
140 changes: 120 additions & 20 deletions node/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,10 @@ def __getitem__(self, key):
""" Get the value identified by C{key} """

def __setitem__(self, key, value):
""" Convenience wrapper to C{setItem}; this accepts a tuple in the
format: (value, lastPublished, originallyPublished, originalPublisherID) """
"""
Convenience wrapper to C{setItem}; this accepts a tuple in the format:
(value, lastPublished, originallyPublished, originalPublisherID).
"""
self.setItem(key, *value)

def __delitem__(self, key):
Expand All @@ -61,7 +63,12 @@ class DictDataStore(DataStore):
""" A datastore using an in-memory Python dictionary """
def __init__(self):
# Dictionary format:
# { <key>: (<value>, <lastPublished>, <originallyPublished> <originalPublisherID>) }
# <key>: (
# <value>,
# <lastPublished>,
# <originallyPublished>,
# <originalPublisherID>
# )
self.dict = {}
self.log = logging.getLogger(self.__class__.__name__)

Expand Down Expand Up @@ -96,7 +103,12 @@ def setItem(self, key, value, lastPublished, originallyPublished, originalPublis
pair to the current time
"""
print 'Here is the key: %s' % key
self.dict[key] = (value, lastPublished, originallyPublished, originalPublisherID)
self.dict[key] = (
value,
lastPublished,
originallyPublished,
originalPublisherID
)

def __getitem__(self, key):
""" Get the value identified by C{key} """
Expand Down Expand Up @@ -150,18 +162,62 @@ def originalPublishTime(self, key):

def setItem(self, key, value, lastPublished, originallyPublished, originalPublisherID, market_id=1):

rows = self.db.selectEntries("datastore", {"key": key, "market_id": market_id})
rows = self.db.selectEntries(
"datastore",
{"key": key, "market_id": market_id}
)

if len(rows) == 0:
# FIXME: Wrap text.
self.db.insertEntry("datastore", {'key': key, 'market_id': market_id, 'key': key, 'value': value, 'lastPublished': lastPublished, 'originallyPublished': originallyPublished, 'originalPublisherID': originalPublisherID, 'market_id': market_id})
self.db.insertEntry(
"datastore",
{
'key': key,
'value': value,
'lastPublished': lastPublished,
'originallyPublished': originallyPublished,
'originalPublisherID': originalPublisherID,
'market_id': market_id
}
)
else:
self.db.updateEntries("datastore", {'key': key, 'market_id': market_id}, {'key': key, 'value': value, 'lastPublished': lastPublished, 'originallyPublished': originallyPublished, 'originalPublisherID': originalPublisherID, 'market_id': market_id})

# if self._cursor.fetchone() == None:
# self._cursor.execute('INSERT INTO data(key, value, lastPublished, originallyPublished, originalPublisherID) VALUES (?, ?, ?, ?, ?)', (encodedKey, buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)), lastPublished, originallyPublished, originalPublisherID))
self.db.updateEntries(
"datastore",
{
'key': key,
'market_id': market_id
},
{
'key': key,
'value': value,
'lastPublished': lastPublished,
'originallyPublished': originallyPublished,
'originalPublisherID': originalPublisherID,
'market_id': market_id
}
)

# if self._cursor.fetchone() is None:
# self._cursor.execute(
# 'INSERT INTO data(key, value, lastPublished, originallyPublished, originalPublisherID) VALUES (?, ?, ?, ?, ?)',
# (
# encodedKey,
# buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)),
# lastPublished,
# originallyPublished,
# originalPublisherID
# )
# )
# else:
# self._cursor.execute('UPDATE data SET value=?, lastPublished=?, originallyPublished=?, originalPublisherID=? WHERE key=?', (buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)), lastPublished, originallyPublished, originalPublisherID, encodedKey))
# self._cursor.execute(
# 'UPDATE data SET value=?, lastPublished=?, originallyPublished=?, originalPublisherID=? WHERE key=?',
# (
# buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)),
# lastPublished,
# originallyPublished,
# originalPublisherID,
# encodedKey
# )
# )

def _dbQuery(self, key, columnName):

Expand Down Expand Up @@ -225,17 +281,61 @@ def originalPublishTime(self, key):

def setItem(self, key, value, lastPublished, originallyPublished, originalPublisherID, market_id=1):

rows = self.db.selectEntries("datastore", {"key": key, "market_id": market_id})
rows = self.db.selectEntries(
"datastore",
{"key": key, "market_id": market_id}
)
if len(rows) == 0:
# FIXME: Wrap text.
self.db.insertEntry("datastore", {'key': key, 'market_id': market_id, 'key': key, 'value': value, 'lastPublished': lastPublished, 'originallyPublished': originallyPublished, 'originalPublisherID': originalPublisherID, 'market_id': market_id})
self.db.insertEntry(
"datastore",
{
'key': key,
'value': value,
'lastPublished': lastPublished,
'originallyPublished': originallyPublished,
'originalPublisherID': originalPublisherID,
'market_id': market_id
}
)
else:
self.db.updateEntries("datastore", {'key': key, 'market_id': market_id}, {'key': key, 'value': value, 'lastPublished': lastPublished, 'originallyPublished': originallyPublished, 'originalPublisherID': originalPublisherID, 'market_id': market_id})

# if self._cursor.fetchone() == None:
# self._cursor.execute('INSERT INTO data(key, value, lastPublished, originallyPublished, originalPublisherID) VALUES (?, ?, ?, ?, ?)', (encodedKey, buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)), lastPublished, originallyPublished, originalPublisherID))
self.db.updateEntries(
"datastore",
{
'key': key,
'market_id': market_id
},
{
'key': key,
'value': value,
'lastPublished': lastPublished,
'originallyPublished': originallyPublished,
'originalPublisherID': originalPublisherID,
'market_id': market_id
}
)

# if self._cursor.fetchone() is None:
# self._cursor.execute(
# 'INSERT INTO data(key, value, lastPublished, originallyPublished, originalPublisherID) VALUES (?, ?, ?, ?, ?)',
# (
# encodedKey,
# buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)),
# lastPublished,
# originallyPublished,
# originalPublisherID
# )
# )
# else:
# self._cursor.execute('UPDATE data SET value=?, lastPublished=?, originallyPublished=?, originalPublisherID=? WHERE key=?', (buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)), lastPublished, originallyPublished, originalPublisherID, encodedKey))
# self._cursor.execute(
# 'UPDATE data SET value=?, lastPublished=?, originallyPublished=?, originalPublisherID=? WHERE key=?',
# (
# buffer(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)),
# lastPublished,
# originallyPublished,
# originalPublisherID,
# encodedKey
# )
# )

def _dbQuery(self, key, columnName):

Expand Down
Loading

0 comments on commit ee33e49

Please sign in to comment.