2016-04-24 09:22:02 +00:00
|
|
|
"""Decoders for GPG v2 data structures."""
|
2016-05-06 19:16:03 +00:00
|
|
|
import base64
|
2016-07-22 19:51:02 +00:00
|
|
|
import copy
|
2016-04-30 18:34:12 +00:00
|
|
|
import functools
|
2016-04-17 19:18:31 +00:00
|
|
|
import hashlib
|
|
|
|
import io
|
|
|
|
import logging
|
|
|
|
import struct
|
|
|
|
|
|
|
|
import ecdsa
|
2016-04-22 18:43:54 +00:00
|
|
|
import ed25519
|
|
|
|
|
2016-07-26 16:37:42 +00:00
|
|
|
from . import protocol
|
2016-04-24 10:04:53 +00:00
|
|
|
from .. import util
|
2016-04-17 19:18:31 +00:00
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_subpackets(s):
|
2016-04-24 09:22:02 +00:00
|
|
|
"""See https://tools.ietf.org/html/rfc4880#section-5.2.3.1 for details."""
|
2016-04-17 19:18:31 +00:00
|
|
|
subpackets = []
|
|
|
|
total_size = s.readfmt('>H')
|
|
|
|
data = s.read(total_size)
|
2016-04-24 10:04:53 +00:00
|
|
|
s = util.Reader(io.BytesIO(data))
|
2016-04-17 19:18:31 +00:00
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
subpacket_len = s.readfmt('B')
|
|
|
|
except EOFError:
|
|
|
|
break
|
|
|
|
|
|
|
|
subpackets.append(s.read(subpacket_len))
|
|
|
|
|
|
|
|
return subpackets
|
|
|
|
|
|
|
|
|
|
|
|
def parse_mpi(s):
|
2016-04-24 09:22:02 +00:00
|
|
|
"""See https://tools.ietf.org/html/rfc4880#section-3.2 for details."""
|
2016-04-17 19:18:31 +00:00
|
|
|
bits = s.readfmt('>H')
|
|
|
|
blob = bytearray(s.read(int((bits + 7) // 8)))
|
|
|
|
return sum(v << (8 * i) for i, v in enumerate(reversed(blob)))
|
|
|
|
|
|
|
|
|
2016-05-18 20:07:57 +00:00
|
|
|
def parse_mpis(s, n):
|
|
|
|
"""Parse multiple MPIs from stream."""
|
|
|
|
return [parse_mpi(s) for _ in range(n)]
|
|
|
|
|
|
|
|
|
2016-04-24 07:33:29 +00:00
|
|
|
def _parse_nist256p1_verifier(mpi):
|
2016-04-24 10:04:53 +00:00
|
|
|
prefix, x, y = util.split_bits(mpi, 4, 256, 256)
|
2016-04-24 07:33:29 +00:00
|
|
|
assert prefix == 4
|
|
|
|
point = ecdsa.ellipticcurve.Point(curve=ecdsa.NIST256p.curve,
|
|
|
|
x=x, y=y)
|
|
|
|
vk = ecdsa.VerifyingKey.from_public_point(
|
|
|
|
point=point, curve=ecdsa.curves.NIST256p,
|
|
|
|
hashfunc=hashlib.sha256)
|
|
|
|
|
|
|
|
def _nist256p1_verify(signature, digest):
|
2016-04-26 09:57:27 +00:00
|
|
|
result = vk.verify_digest(signature=signature,
|
|
|
|
digest=digest,
|
|
|
|
sigdecode=lambda rs, order: rs)
|
|
|
|
log.debug('nist256p1 ECDSA signature is OK (%s)', result)
|
2016-04-28 11:44:52 +00:00
|
|
|
return _nist256p1_verify, vk
|
2016-04-24 07:33:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _parse_ed25519_verifier(mpi):
|
2016-04-24 10:04:53 +00:00
|
|
|
prefix, value = util.split_bits(mpi, 8, 256)
|
2016-04-24 07:33:29 +00:00
|
|
|
assert prefix == 0x40
|
2016-04-24 10:04:53 +00:00
|
|
|
vk = ed25519.VerifyingKey(util.num2bytes(value, size=32))
|
2016-04-24 07:33:29 +00:00
|
|
|
|
|
|
|
def _ed25519_verify(signature, digest):
|
2016-04-24 10:04:53 +00:00
|
|
|
sig = b''.join(util.num2bytes(val, size=32)
|
2016-04-24 07:33:29 +00:00
|
|
|
for val in signature)
|
2016-04-26 09:57:27 +00:00
|
|
|
result = vk.verify(sig, digest)
|
|
|
|
log.debug('ed25519 ECDSA signature is OK (%s)', result)
|
2016-04-28 11:44:52 +00:00
|
|
|
return _ed25519_verify, vk
|
2016-04-24 07:33:29 +00:00
|
|
|
|
|
|
|
|
2016-04-22 18:43:54 +00:00
|
|
|
SUPPORTED_CURVES = {
|
2016-04-24 07:33:29 +00:00
|
|
|
b'\x2A\x86\x48\xCE\x3D\x03\x01\x07': _parse_nist256p1_verifier,
|
|
|
|
b'\x2B\x06\x01\x04\x01\xDA\x47\x0F\x01': _parse_ed25519_verifier,
|
2016-04-22 18:43:54 +00:00
|
|
|
}
|
2016-04-17 19:18:31 +00:00
|
|
|
|
2016-05-18 20:07:57 +00:00
|
|
|
RSA_ALGO_IDS = {1, 2, 3}
|
|
|
|
ELGAMAL_ALGO_ID = 16
|
|
|
|
DSA_ALGO_ID = 17
|
2016-05-12 19:15:05 +00:00
|
|
|
ECDSA_ALGO_IDS = {18, 19, 22} # {ecdsa, nist256, ed25519}
|
2016-04-30 07:56:15 +00:00
|
|
|
|
2016-04-22 20:37:04 +00:00
|
|
|
|
2016-04-26 18:35:05 +00:00
|
|
|
def _parse_literal(stream):
|
|
|
|
"""See https://tools.ietf.org/html/rfc4880#section-5.9 for details."""
|
|
|
|
p = {'type': 'literal'}
|
|
|
|
p['format'] = stream.readfmt('c')
|
|
|
|
filename_len = stream.readfmt('B')
|
|
|
|
p['filename'] = stream.read(filename_len)
|
|
|
|
p['date'] = stream.readfmt('>L')
|
|
|
|
p['content'] = stream.read()
|
|
|
|
p['_to_hash'] = p['content']
|
|
|
|
return p
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_embedded_signatures(subpackets):
|
|
|
|
for packet in subpackets:
|
|
|
|
data = bytearray(packet)
|
|
|
|
if data[0] == 32:
|
|
|
|
# https://tools.ietf.org/html/rfc4880#section-5.2.3.26
|
|
|
|
stream = io.BytesIO(data[1:])
|
|
|
|
yield _parse_signature(util.Reader(stream))
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_signature(stream):
|
|
|
|
"""See https://tools.ietf.org/html/rfc4880#section-5.2 for details."""
|
|
|
|
p = {'type': 'signature'}
|
|
|
|
|
|
|
|
to_hash = io.BytesIO()
|
|
|
|
with stream.capture(to_hash):
|
|
|
|
p['version'] = stream.readfmt('B')
|
|
|
|
p['sig_type'] = stream.readfmt('B')
|
|
|
|
p['pubkey_alg'] = stream.readfmt('B')
|
|
|
|
p['hash_alg'] = stream.readfmt('B')
|
|
|
|
p['hashed_subpackets'] = parse_subpackets(stream)
|
|
|
|
|
|
|
|
# https://tools.ietf.org/html/rfc4880#section-5.2.4
|
|
|
|
tail_to_hash = b'\x04\xff' + struct.pack('>L', to_hash.tell())
|
|
|
|
|
|
|
|
p['_to_hash'] = to_hash.getvalue() + tail_to_hash
|
|
|
|
|
|
|
|
p['unhashed_subpackets'] = parse_subpackets(stream)
|
|
|
|
embedded = list(_parse_embedded_signatures(p['unhashed_subpackets']))
|
|
|
|
if embedded:
|
2016-04-30 18:55:24 +00:00
|
|
|
log.debug('embedded sigs: %s', embedded)
|
2016-04-26 18:35:05 +00:00
|
|
|
p['embedded'] = embedded
|
|
|
|
|
2016-07-26 16:37:42 +00:00
|
|
|
p['_is_custom'] = (protocol.CUSTOM_SUBPACKET in p['unhashed_subpackets'])
|
2016-04-30 18:07:19 +00:00
|
|
|
|
2016-04-26 18:35:05 +00:00
|
|
|
p['hash_prefix'] = stream.readfmt('2s')
|
2016-04-30 07:56:15 +00:00
|
|
|
if p['pubkey_alg'] in ECDSA_ALGO_IDS:
|
|
|
|
p['sig'] = (parse_mpi(stream), parse_mpi(stream))
|
2016-05-18 20:07:57 +00:00
|
|
|
elif p['pubkey_alg'] in RSA_ALGO_IDS: # RSA
|
2016-04-30 07:56:15 +00:00
|
|
|
p['sig'] = (parse_mpi(stream),)
|
2016-05-18 20:07:57 +00:00
|
|
|
elif p['pubkey_alg'] == DSA_ALGO_ID:
|
|
|
|
p['sig'] = (parse_mpi(stream), parse_mpi(stream))
|
|
|
|
else:
|
|
|
|
log.error('unsupported public key algo: %d', p['pubkey_alg'])
|
2016-04-30 07:56:15 +00:00
|
|
|
|
2016-04-26 18:35:05 +00:00
|
|
|
assert not stream.read()
|
|
|
|
return p
|
|
|
|
|
|
|
|
|
2016-04-30 18:34:12 +00:00
|
|
|
def _parse_pubkey(stream, packet_type='pubkey'):
|
2016-04-26 18:35:05 +00:00
|
|
|
"""See https://tools.ietf.org/html/rfc4880#section-5.5 for details."""
|
2016-04-30 18:34:12 +00:00
|
|
|
p = {'type': packet_type}
|
2016-04-26 18:35:05 +00:00
|
|
|
packet = io.BytesIO()
|
|
|
|
with stream.capture(packet):
|
|
|
|
p['version'] = stream.readfmt('B')
|
|
|
|
p['created'] = stream.readfmt('>L')
|
|
|
|
p['algo'] = stream.readfmt('B')
|
2016-04-30 07:56:15 +00:00
|
|
|
if p['algo'] in ECDSA_ALGO_IDS:
|
2016-05-12 19:15:05 +00:00
|
|
|
log.debug('parsing elliptic curve key')
|
2016-04-30 07:56:15 +00:00
|
|
|
# https://tools.ietf.org/html/rfc6637#section-11
|
|
|
|
oid_size = stream.readfmt('B')
|
|
|
|
oid = stream.read(oid_size)
|
|
|
|
assert oid in SUPPORTED_CURVES, util.hexlify(oid)
|
|
|
|
parser = SUPPORTED_CURVES[oid]
|
|
|
|
|
|
|
|
mpi = parse_mpi(stream)
|
|
|
|
log.debug('mpi: %x (%d bits)', mpi, mpi.bit_length())
|
|
|
|
p['verifier'], p['verifying_key'] = parser(mpi)
|
2016-05-12 19:15:05 +00:00
|
|
|
leftover = stream.read()
|
|
|
|
if leftover:
|
|
|
|
leftover = io.BytesIO(leftover)
|
|
|
|
# https://tools.ietf.org/html/rfc6637#section-8
|
|
|
|
# should be b'\x03\x01\x08\x07': SHA256 + AES128
|
|
|
|
size, = util.readfmt(leftover, 'B')
|
|
|
|
p['kdf'] = leftover.read(size)
|
|
|
|
assert not leftover.read()
|
2016-05-18 20:07:57 +00:00
|
|
|
elif p['algo'] == DSA_ALGO_ID:
|
|
|
|
log.warning('DSA signatures are not verified')
|
|
|
|
parse_mpis(stream, n=4)
|
|
|
|
elif p['algo'] == ELGAMAL_ALGO_ID:
|
|
|
|
log.warning('ElGamal signatures are not verified')
|
|
|
|
parse_mpis(stream, n=3)
|
|
|
|
else: # assume RSA
|
2016-07-09 08:26:48 +00:00
|
|
|
log.warning('RSA signatures are not verified')
|
|
|
|
parse_mpis(stream, n=2)
|
|
|
|
assert not stream.read()
|
2016-04-17 19:18:31 +00:00
|
|
|
|
2016-04-26 18:35:05 +00:00
|
|
|
# https://tools.ietf.org/html/rfc4880#section-12.2
|
|
|
|
packet_data = packet.getvalue()
|
|
|
|
data_to_hash = (b'\x99' + struct.pack('>H', len(packet_data)) +
|
|
|
|
packet_data)
|
|
|
|
p['key_id'] = hashlib.sha1(data_to_hash).digest()[-8:]
|
|
|
|
p['_to_hash'] = data_to_hash
|
|
|
|
log.debug('key ID: %s', util.hexlify(p['key_id']))
|
|
|
|
return p
|
|
|
|
|
2016-04-30 18:34:12 +00:00
|
|
|
_parse_subkey = functools.partial(_parse_pubkey, packet_type='subkey')
|
2016-04-26 18:35:05 +00:00
|
|
|
|
|
|
|
|
2016-07-22 19:44:54 +00:00
|
|
|
def _parse_user_id(stream, packet_type='user_id'):
|
2016-04-26 18:35:05 +00:00
|
|
|
"""See https://tools.ietf.org/html/rfc4880#section-5.11 for details."""
|
|
|
|
value = stream.read()
|
|
|
|
to_hash = b'\xb4' + util.prefix_len('>L', value)
|
2016-07-22 19:44:54 +00:00
|
|
|
return {'type': packet_type, 'value': value, '_to_hash': to_hash}
|
2016-04-26 18:35:05 +00:00
|
|
|
|
2016-07-22 19:44:54 +00:00
|
|
|
# User attribute is handled as an opaque user ID
|
|
|
|
_parse_attribute = functools.partial(_parse_user_id,
|
|
|
|
packet_type='user_attribute')
|
2016-04-26 18:35:05 +00:00
|
|
|
|
|
|
|
PACKET_TYPES = {
|
|
|
|
2: _parse_signature,
|
|
|
|
6: _parse_pubkey,
|
|
|
|
11: _parse_literal,
|
|
|
|
13: _parse_user_id,
|
|
|
|
14: _parse_subkey,
|
2016-07-22 19:44:54 +00:00
|
|
|
17: _parse_attribute,
|
2016-04-26 18:35:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def parse_packets(stream):
|
|
|
|
"""
|
|
|
|
Support iterative parsing of available GPG packets.
|
|
|
|
|
|
|
|
See https://tools.ietf.org/html/rfc4880#section-4.2 for details.
|
|
|
|
"""
|
2016-07-22 18:45:13 +00:00
|
|
|
reader = util.Reader(stream)
|
2016-04-26 18:35:05 +00:00
|
|
|
while True:
|
2016-04-17 19:18:31 +00:00
|
|
|
try:
|
2016-07-22 18:45:13 +00:00
|
|
|
value = reader.readfmt('B')
|
2016-04-17 19:18:31 +00:00
|
|
|
except EOFError:
|
2016-04-26 18:35:05 +00:00
|
|
|
return
|
2016-04-17 19:18:31 +00:00
|
|
|
|
2016-04-26 18:35:05 +00:00
|
|
|
log.debug('prefix byte: %s', bin(value))
|
2016-04-24 10:04:53 +00:00
|
|
|
assert util.bit(value, 7) == 1
|
2016-04-17 19:18:31 +00:00
|
|
|
|
2016-04-24 10:04:53 +00:00
|
|
|
tag = util.low_bits(value, 6)
|
2016-06-21 22:18:11 +00:00
|
|
|
if util.bit(value, 6) == 0:
|
|
|
|
length_type = util.low_bits(tag, 2)
|
|
|
|
tag = tag >> 2
|
|
|
|
fmt = {0: '>B', 1: '>H', 2: '>L'}[length_type]
|
2016-07-22 18:45:13 +00:00
|
|
|
packet_size = reader.readfmt(fmt)
|
2016-06-21 22:18:11 +00:00
|
|
|
else:
|
2016-07-22 18:45:13 +00:00
|
|
|
first = reader.readfmt('B')
|
2016-06-21 22:18:11 +00:00
|
|
|
if first < 192:
|
|
|
|
packet_size = first
|
|
|
|
elif first < 224:
|
2016-07-22 18:45:13 +00:00
|
|
|
packet_size = ((first - 192) << 8) + reader.readfmt('B') + 192
|
2016-06-21 22:18:11 +00:00
|
|
|
elif first == 255:
|
2016-07-22 18:45:13 +00:00
|
|
|
packet_size = reader.readfmt('>L')
|
2016-06-21 22:18:11 +00:00
|
|
|
else:
|
|
|
|
log.error('Partial Body Lengths unsupported')
|
2016-04-26 18:35:05 +00:00
|
|
|
|
2016-04-17 19:18:31 +00:00
|
|
|
log.debug('packet length: %d', packet_size)
|
2016-07-22 18:45:13 +00:00
|
|
|
packet_data = reader.read(packet_size)
|
2016-04-26 18:35:05 +00:00
|
|
|
packet_type = PACKET_TYPES.get(tag)
|
|
|
|
|
2016-06-21 22:18:11 +00:00
|
|
|
if packet_type is not None:
|
|
|
|
p = packet_type(util.Reader(io.BytesIO(packet_data)))
|
|
|
|
p['tag'] = tag
|
|
|
|
else:
|
|
|
|
p = {'type': 'unknown', 'tag': tag, 'raw': packet_data}
|
|
|
|
|
2016-04-17 19:18:31 +00:00
|
|
|
log.debug('packet "%s": %s', p['type'], p)
|
2016-04-26 18:35:05 +00:00
|
|
|
yield p
|
2016-04-17 19:18:31 +00:00
|
|
|
|
|
|
|
|
2016-07-09 09:25:05 +00:00
|
|
|
def digest_packets(packets, hasher):
|
2016-04-26 18:35:05 +00:00
|
|
|
"""Compute digest on specified packets, according to '_to_hash' field."""
|
2016-04-26 09:34:50 +00:00
|
|
|
data_to_hash = io.BytesIO()
|
|
|
|
for p in packets:
|
|
|
|
data_to_hash.write(p['_to_hash'])
|
2016-07-09 09:25:05 +00:00
|
|
|
hasher.update(data_to_hash.getvalue())
|
|
|
|
return hasher.digest()
|
2016-06-21 22:18:11 +00:00
|
|
|
|
|
|
|
|
2016-07-22 19:51:02 +00:00
|
|
|
def collect_packets(packets, types_to_collect):
|
|
|
|
"""Collect specified packet types into their leading packet."""
|
|
|
|
packet = None
|
|
|
|
result = []
|
|
|
|
for p in packets:
|
|
|
|
if p['type'] in types_to_collect:
|
|
|
|
packet.setdefault(p['type'], []).append(p)
|
|
|
|
else:
|
|
|
|
packet = copy.deepcopy(p)
|
|
|
|
result.append(packet)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2016-07-26 14:35:07 +00:00
|
|
|
def parse_public_keys(stream):
|
2016-07-22 19:51:02 +00:00
|
|
|
"""Parse GPG public key into hierarchy of packets."""
|
|
|
|
packets = list(parse_packets(stream))
|
|
|
|
packets = collect_packets(packets, {'signature'})
|
|
|
|
packets = collect_packets(packets, {'user_id', 'user_attribute'})
|
|
|
|
packets = collect_packets(packets, {'subkey'})
|
2016-07-26 14:35:07 +00:00
|
|
|
return packets
|
2016-07-22 19:51:02 +00:00
|
|
|
|
|
|
|
|
2016-06-21 22:18:11 +00:00
|
|
|
HASH_ALGORITHMS = {
|
2016-07-09 09:25:05 +00:00
|
|
|
1: 'md5',
|
|
|
|
2: 'sha1',
|
|
|
|
3: 'ripemd160',
|
|
|
|
8: 'sha256',
|
|
|
|
9: 'sha384',
|
|
|
|
10: 'sha512',
|
|
|
|
11: 'sha224',
|
2016-06-21 22:18:11 +00:00
|
|
|
}
|
2016-04-26 09:34:50 +00:00
|
|
|
|
|
|
|
|
2016-06-17 06:32:43 +00:00
|
|
|
def load_public_key(pubkey_bytes, use_custom=False, ecdh=False):
|
2016-04-24 09:22:02 +00:00
|
|
|
"""Parse and validate GPG public key from an input stream."""
|
2016-05-27 08:19:10 +00:00
|
|
|
stream = io.BytesIO(pubkey_bytes)
|
2016-07-22 18:45:13 +00:00
|
|
|
packets = list(parse_packets(stream))
|
2016-04-30 18:07:19 +00:00
|
|
|
pubkey, userid, signature = packets[:3]
|
|
|
|
packets = packets[3:]
|
2016-04-28 19:17:08 +00:00
|
|
|
|
2016-07-09 09:25:05 +00:00
|
|
|
hash_alg = HASH_ALGORITHMS.get(signature['hash_alg'])
|
|
|
|
if hash_alg is not None:
|
|
|
|
digest = digest_packets(packets=[pubkey, userid, signature],
|
|
|
|
hasher=hashlib.new(hash_alg))
|
2016-06-21 22:18:11 +00:00
|
|
|
assert signature['hash_prefix'] == digest[:2]
|
|
|
|
|
2016-04-18 18:55:23 +00:00
|
|
|
log.debug('loaded public key "%s"', userid['value'])
|
2016-07-09 09:25:05 +00:00
|
|
|
if hash_alg is not None and pubkey.get('verifier'):
|
2016-05-18 20:07:57 +00:00
|
|
|
verify_digest(pubkey=pubkey, digest=digest,
|
|
|
|
signature=signature['sig'], label='GPG public key')
|
|
|
|
else:
|
2016-07-09 08:26:48 +00:00
|
|
|
log.warning('public key %s is not verified!',
|
2016-05-21 17:00:38 +00:00
|
|
|
util.hexlify(pubkey['key_id']))
|
2016-04-30 18:07:19 +00:00
|
|
|
|
|
|
|
packet = pubkey
|
|
|
|
while use_custom:
|
|
|
|
if packet['type'] in ('pubkey', 'subkey') and signature['_is_custom']:
|
2016-07-26 16:37:42 +00:00
|
|
|
if ecdh == (packet['algo'] == protocol.ECDH_ALGO_ID):
|
2016-06-17 06:32:43 +00:00
|
|
|
log.debug('found custom %s', packet['type'])
|
|
|
|
break
|
2016-04-30 18:07:19 +00:00
|
|
|
|
2016-06-21 22:18:11 +00:00
|
|
|
while packets[1]['type'] != 'signature':
|
|
|
|
packets = packets[1:]
|
2016-04-30 18:07:19 +00:00
|
|
|
packet, signature = packets[:2]
|
|
|
|
packets = packets[2:]
|
|
|
|
|
2016-05-06 19:16:03 +00:00
|
|
|
packet['user_id'] = userid['value']
|
2016-06-11 10:47:56 +00:00
|
|
|
packet['_is_custom'] = signature['_is_custom']
|
2016-04-30 18:07:19 +00:00
|
|
|
return packet
|
2016-04-17 19:18:31 +00:00
|
|
|
|
2016-04-26 09:53:51 +00:00
|
|
|
|
2016-04-26 09:34:50 +00:00
|
|
|
def load_signature(stream, original_data):
|
2016-04-26 18:35:05 +00:00
|
|
|
"""Load signature from stream, and compute GPG digest for verification."""
|
2016-07-22 18:45:13 +00:00
|
|
|
signature, = list(parse_packets((stream)))
|
2016-07-09 09:25:05 +00:00
|
|
|
hash_alg = HASH_ALGORITHMS[signature['hash_alg']]
|
|
|
|
digest = digest_packets([{'_to_hash': original_data}, signature],
|
|
|
|
hasher=hashlib.new(hash_alg))
|
2016-04-26 09:34:50 +00:00
|
|
|
assert signature['hash_prefix'] == digest[:2]
|
|
|
|
return signature, digest
|
|
|
|
|
2016-04-17 19:18:31 +00:00
|
|
|
|
|
|
|
def verify_digest(pubkey, digest, signature, label):
|
2016-04-24 09:22:02 +00:00
|
|
|
"""Verify a digest signature from a specified public key."""
|
2016-04-22 18:43:54 +00:00
|
|
|
verifier = pubkey['verifier']
|
2016-04-17 19:18:31 +00:00
|
|
|
try:
|
2016-04-22 18:43:54 +00:00
|
|
|
verifier(signature, digest)
|
2016-04-18 18:55:23 +00:00
|
|
|
log.debug('%s is OK', label)
|
2016-04-17 19:18:31 +00:00
|
|
|
except ecdsa.keys.BadSignatureError:
|
2016-04-17 20:03:41 +00:00
|
|
|
log.error('Bad %s!', label)
|
2016-05-27 18:52:00 +00:00
|
|
|
raise ValueError('Invalid ECDSA signature for {}'.format(label))
|
2016-05-06 19:16:03 +00:00
|
|
|
|
|
|
|
|
2016-05-27 13:59:10 +00:00
|
|
|
def remove_armor(armored_data):
|
|
|
|
"""Decode armored data into its binary form."""
|
2016-05-26 19:26:52 +00:00
|
|
|
stream = io.BytesIO(armored_data)
|
2016-05-06 19:16:03 +00:00
|
|
|
lines = stream.readlines()[3:-1]
|
2016-05-22 19:17:32 +00:00
|
|
|
data = base64.b64decode(b''.join(lines))
|
2016-05-06 19:16:03 +00:00
|
|
|
payload, checksum = data[:-3], data[-3:]
|
|
|
|
assert util.crc24(payload) == checksum
|
2016-05-26 19:26:52 +00:00
|
|
|
return payload
|
2016-05-06 19:16:03 +00:00
|
|
|
|
2016-05-26 19:26:52 +00:00
|
|
|
|
|
|
|
def verify(pubkey, signature, original_data):
|
|
|
|
"""Verify correctness of public key and signature."""
|
2016-05-27 13:59:10 +00:00
|
|
|
stream = io.BytesIO(remove_armor(signature))
|
2016-05-06 19:16:03 +00:00
|
|
|
signature, digest = load_signature(stream, original_data)
|
|
|
|
verify_digest(pubkey=pubkey, digest=digest,
|
|
|
|
signature=signature['sig'], label='GPG signature')
|