2015-01-15 18:49:00 +04:00
|
|
|
from google.protobuf.descriptor import FieldDescriptor as FD
|
2015-01-19 16:10:54 +02:00
|
|
|
import opts_pb2
|
2015-01-28 17:16:00 +03:00
|
|
|
import ipaddr
|
|
|
|
import socket
|
2015-06-24 11:57:00 +03:00
|
|
|
import collections
|
2015-12-04 15:19:00 +03:00
|
|
|
import os
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-01-15 18:49:00 +04:00
|
|
|
# pb2dict and dict2pb are methods to convert pb to/from dict.
|
|
|
|
# Inspired by:
|
|
|
|
# protobuf-to-dict - https://github.com/benhodgson/protobuf-to-dict
|
|
|
|
# protobuf-json - https://code.google.com/p/protobuf-json/
|
|
|
|
# protobuf source - https://code.google.com/p/protobuf/
|
|
|
|
# Both protobuf-to-dict/json do not fit here because of several reasons,
|
|
|
|
# here are some of them:
|
|
|
|
# - both have a common bug in treating optional field with empty
|
|
|
|
# repeated inside.
|
|
|
|
# - protobuf-to-json is not avalible in pip or in any other python
|
|
|
|
# repo, so it is hard to distribute and we can't rely on it.
|
|
|
|
# - both do not treat enums in a way we would like to. They convert
|
|
|
|
# protobuf enum to int, but we need a string here, because it is
|
|
|
|
# much more informative. BTW, protobuf text_format converts pb
|
|
|
|
# enums to string value too. (i.e. "march : x86_64" is better then
|
|
|
|
# "march : 1").
|
|
|
|
|
|
|
|
|
|
|
|
_basic_cast = {
|
crit: Fix casts for fixed and sfixed types
The native pb engine doesn't accept types other than int or long:
...
File "/root/src/criu/pycriu/images/pb2dict.py", line 264, in dict2pb
pb_val.append(_dict2pb_cast(field, v))
File "/usr/lib/python2.7/site-packages/google/protobuf/internal/containers.py", line 111, in append
self._type_checker.CheckValue(value)
File "/usr/lib/python2.7/site-packages/google/protobuf/internal/type_checkers.py", line 104, in CheckValue
raise TypeError(message)
TypeError: 1.1258999068426252e+16 has type <type 'float'>, but expected one of: (<type 'int'>, <type 'long'>)
In particular, this is seen when encoding back so_filter field from
inetsk image.
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
Acked-by: Ruslan Kuprieiev <kupruser@gmail.com>
2015-12-09 17:41:00 +03:00
|
|
|
FD.TYPE_FIXED64 : long,
|
|
|
|
FD.TYPE_FIXED32 : int,
|
|
|
|
FD.TYPE_SFIXED64 : long,
|
|
|
|
FD.TYPE_SFIXED32 : int,
|
2015-01-15 18:49:00 +04:00
|
|
|
|
|
|
|
FD.TYPE_INT64 : long,
|
|
|
|
FD.TYPE_UINT64 : long,
|
|
|
|
FD.TYPE_SINT64 : long,
|
|
|
|
|
|
|
|
FD.TYPE_INT32 : int,
|
|
|
|
FD.TYPE_UINT32 : int,
|
|
|
|
FD.TYPE_SINT32 : int,
|
|
|
|
|
|
|
|
FD.TYPE_BOOL : bool,
|
|
|
|
|
|
|
|
FD.TYPE_STRING : unicode
|
|
|
|
}
|
|
|
|
|
2015-01-19 16:10:54 +02:00
|
|
|
def _marked_as_hex(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].hex
|
|
|
|
|
2015-01-28 17:16:00 +03:00
|
|
|
def _marked_as_ip(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].ipadd
|
|
|
|
|
2015-09-16 16:17:17 +03:00
|
|
|
def _marked_as_flags(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].flags
|
|
|
|
|
2015-12-04 15:19:00 +03:00
|
|
|
def _marked_as_dev(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].dev
|
|
|
|
|
|
|
|
def _marked_as_odev(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].odev
|
|
|
|
|
2017-04-10 14:02:00 +03:00
|
|
|
def _marked_as_dict(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].dict
|
|
|
|
|
2017-04-10 14:02:43 +03:00
|
|
|
def _custom_conv(field):
|
|
|
|
return field.GetOptions().Extensions[opts_pb2.criu].conv
|
|
|
|
|
2015-09-16 16:17:42 +03:00
|
|
|
mmap_prot_map = [
|
|
|
|
('PROT_READ', 0x1),
|
|
|
|
('PROT_WRITE', 0x2),
|
|
|
|
('PROT_EXEC', 0x4),
|
|
|
|
];
|
|
|
|
|
|
|
|
mmap_flags_map = [
|
|
|
|
('MAP_SHARED', 0x1),
|
|
|
|
('MAP_PRIVATE', 0x2),
|
|
|
|
('MAP_ANON', 0x20),
|
2015-12-08 16:10:00 +03:00
|
|
|
('MAP_GROWSDOWN', 0x0100),
|
|
|
|
];
|
|
|
|
|
|
|
|
mmap_status_map = [
|
|
|
|
('VMA_AREA_NONE', 0 << 0),
|
|
|
|
('VMA_AREA_REGULAR', 1 << 0),
|
|
|
|
('VMA_AREA_STACK', 1 << 1),
|
|
|
|
('VMA_AREA_VSYSCALL', 1 << 2),
|
|
|
|
('VMA_AREA_VDSO', 1 << 3),
|
|
|
|
('VMA_AREA_HEAP', 1 << 5),
|
|
|
|
|
|
|
|
('VMA_FILE_PRIVATE', 1 << 6),
|
|
|
|
('VMA_FILE_SHARED', 1 << 7),
|
|
|
|
('VMA_ANON_SHARED', 1 << 8),
|
|
|
|
('VMA_ANON_PRIVATE', 1 << 9),
|
|
|
|
|
|
|
|
('VMA_AREA_SYSVIPC', 1 << 10),
|
|
|
|
('VMA_AREA_SOCKET', 1 << 11),
|
|
|
|
('VMA_AREA_VVAR', 1 << 12),
|
|
|
|
('VMA_AREA_AIORING', 1 << 13),
|
|
|
|
|
|
|
|
('VMA_UNSUPP', 1 << 31),
|
2015-09-16 16:17:42 +03:00
|
|
|
];
|
|
|
|
|
|
|
|
rfile_flags_map = [
|
|
|
|
('O_WRONLY', 01),
|
|
|
|
('O_RDWR', 02),
|
|
|
|
('O_APPEND', 02000),
|
|
|
|
('O_DIRECT', 040000),
|
|
|
|
('O_LARGEFILE', 0100000),
|
|
|
|
];
|
|
|
|
|
2016-09-22 10:38:00 +03:00
|
|
|
pmap_flags_map = [
|
|
|
|
('PE_PARENT', 1 << 0),
|
2016-12-15 14:10:40 +02:00
|
|
|
('PE_LAZY', 1 << 1),
|
|
|
|
('PE_PRESENT', 1 << 2),
|
2016-09-22 10:38:00 +03:00
|
|
|
];
|
|
|
|
|
2015-09-16 16:17:17 +03:00
|
|
|
flags_maps = {
|
2015-09-16 16:17:42 +03:00
|
|
|
'mmap.prot' : mmap_prot_map,
|
|
|
|
'mmap.flags' : mmap_flags_map,
|
2015-12-08 16:10:00 +03:00
|
|
|
'mmap.status' : mmap_status_map,
|
2015-09-16 16:17:42 +03:00
|
|
|
'rfile.flags' : rfile_flags_map,
|
2016-09-22 10:38:00 +03:00
|
|
|
'pmap.flags' : pmap_flags_map,
|
2015-09-16 16:17:17 +03:00
|
|
|
}
|
|
|
|
|
2017-04-10 14:02:00 +03:00
|
|
|
gen_maps = {
|
|
|
|
'task_state' : { 1: 'Alive', 3: 'Zombie', 6: 'Stopped' },
|
|
|
|
}
|
|
|
|
|
|
|
|
sk_maps = {
|
2017-11-01 16:19:18 +03:00
|
|
|
'family' : { 2: 'INET', 10: 'INET6'},
|
2017-04-10 14:02:00 +03:00
|
|
|
'type' : { 1: 'STREAM', 2: 'DGRAM' },
|
|
|
|
'state' : { 1: 'ESTABLISHED', 7: 'CLOSE', 10: 'LISTEN' },
|
|
|
|
'proto' : { 6: 'TCP' },
|
|
|
|
}
|
|
|
|
|
|
|
|
gen_rmaps = { k: {v2:k2 for k2,v2 in v.items()} for k,v in gen_maps.items() }
|
|
|
|
sk_rmaps = { k: {v2:k2 for k2,v2 in v.items()} for k,v in sk_maps.items() }
|
|
|
|
|
|
|
|
dict_maps = {
|
|
|
|
'gen' : ( gen_maps, gen_rmaps ),
|
|
|
|
'sk' : ( sk_maps, sk_rmaps ),
|
|
|
|
}
|
|
|
|
|
2015-09-16 16:17:17 +03:00
|
|
|
def map_flags(value, flags_map):
|
|
|
|
bs = map(lambda x: x[0], filter(lambda x: value & x[1], flags_map))
|
|
|
|
value &= ~sum(map(lambda x: x[1], flags_map))
|
|
|
|
if value:
|
|
|
|
bs.append("0x%x" % value)
|
|
|
|
return " | ".join(bs)
|
|
|
|
|
|
|
|
def unmap_flags(value, flags_map):
|
|
|
|
if value == '':
|
|
|
|
return 0
|
|
|
|
|
|
|
|
bd = dict(flags_map)
|
|
|
|
return sum(map(lambda x: int(str(bd.get(x, x)), 0), map(lambda x: x.strip(), value.split('|'))))
|
|
|
|
|
2015-12-04 15:19:00 +03:00
|
|
|
kern_minorbits = 20 # This is how kernel encodes dev_t in new format
|
|
|
|
|
|
|
|
def decode_dev(field, value):
|
|
|
|
if _marked_as_odev(field):
|
|
|
|
return "%d:%d" % (os.major(value), os.minor(value))
|
|
|
|
else:
|
|
|
|
return "%d:%d" % (value >> kern_minorbits, value & ((1 << kern_minorbits) - 1))
|
|
|
|
|
|
|
|
def encode_dev(field, value):
|
|
|
|
dev = map(lambda x: int(x), value.split(':'))
|
|
|
|
if _marked_as_odev(field):
|
|
|
|
return os.makedev(dev[0], dev[1])
|
|
|
|
else:
|
|
|
|
return dev[0] << kern_minorbits | dev[1]
|
|
|
|
|
2017-04-10 14:02:43 +03:00
|
|
|
def encode_base64(value):
|
|
|
|
return value.encode('base64')
|
|
|
|
def decode_base64(value):
|
|
|
|
return value.decode('base64')
|
|
|
|
|
|
|
|
def encode_unix(value):
|
|
|
|
return value.encode('quopri')
|
|
|
|
def decode_unix(value):
|
|
|
|
return value.decode('quopri')
|
|
|
|
|
|
|
|
encode = { 'unix_name': encode_unix }
|
|
|
|
decode = { 'unix_name': decode_unix }
|
|
|
|
|
|
|
|
def get_bytes_enc(field):
|
|
|
|
c = _custom_conv(field)
|
|
|
|
if c:
|
|
|
|
return encode[c]
|
|
|
|
else:
|
|
|
|
return encode_base64
|
|
|
|
|
|
|
|
def get_bytes_dec(field):
|
|
|
|
c = _custom_conv(field)
|
|
|
|
if c:
|
|
|
|
return decode[c]
|
|
|
|
else:
|
|
|
|
return decode_base64
|
|
|
|
|
2015-12-10 17:05:00 +03:00
|
|
|
def is_string(value):
|
|
|
|
return isinstance(value, unicode) or isinstance(value, str)
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def _pb2dict_cast(field, value, pretty = False, is_hex = False):
|
2015-01-19 16:10:54 +02:00
|
|
|
if not is_hex:
|
|
|
|
is_hex = _marked_as_hex(field)
|
|
|
|
|
2015-01-15 18:49:00 +04:00
|
|
|
if field.type == FD.TYPE_MESSAGE:
|
2015-01-28 17:15:00 +03:00
|
|
|
return pb2dict(value, pretty, is_hex)
|
2015-01-15 18:49:00 +04:00
|
|
|
elif field.type == FD.TYPE_BYTES:
|
2017-04-10 14:02:43 +03:00
|
|
|
return get_bytes_enc(field)(value)
|
2015-01-15 18:49:00 +04:00
|
|
|
elif field.type == FD.TYPE_ENUM:
|
|
|
|
return field.enum_type.values_by_number.get(value, None).name
|
|
|
|
elif field.type in _basic_cast:
|
2015-01-19 16:10:54 +02:00
|
|
|
cast = _basic_cast[field.type]
|
2015-09-16 16:17:17 +03:00
|
|
|
if pretty and (cast == int or cast == long):
|
|
|
|
if is_hex:
|
|
|
|
# Fields that have (criu).hex = true option set
|
|
|
|
# should be stored in hex string format.
|
|
|
|
return "0x%x" % value
|
|
|
|
|
2015-12-04 15:19:00 +03:00
|
|
|
if _marked_as_dev(field):
|
|
|
|
return decode_dev(field, value)
|
|
|
|
|
2015-09-16 16:17:17 +03:00
|
|
|
flags = _marked_as_flags(field)
|
|
|
|
if flags:
|
|
|
|
try:
|
|
|
|
flags_map = flags_maps[flags]
|
|
|
|
except:
|
|
|
|
return "0x%x" % value # flags are better seen as hex anyway
|
|
|
|
else:
|
|
|
|
return map_flags(value, flags_map)
|
|
|
|
|
2017-04-10 14:02:00 +03:00
|
|
|
dct = _marked_as_dict(field)
|
|
|
|
if dct:
|
|
|
|
return dict_maps[dct][0][field.name].get(value, cast(value))
|
|
|
|
|
2015-09-16 16:17:17 +03:00
|
|
|
return cast(value)
|
2015-01-15 18:49:00 +04:00
|
|
|
else:
|
|
|
|
raise Exception("Field(%s) has unsupported type %d" % (field.name, field.type))
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def pb2dict(pb, pretty = False, is_hex = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Convert protobuf msg to dictionary.
|
|
|
|
Takes a protobuf message and returns a dict.
|
|
|
|
"""
|
2015-06-24 11:57:00 +03:00
|
|
|
d = collections.OrderedDict() if pretty else {}
|
2015-01-15 18:49:00 +04:00
|
|
|
for field, value in pb.ListFields():
|
|
|
|
if field.label == FD.LABEL_REPEATED:
|
|
|
|
d_val = []
|
2015-01-28 17:16:00 +03:00
|
|
|
if pretty and _marked_as_ip(field):
|
|
|
|
if len(value) == 1:
|
|
|
|
v = socket.ntohl(value[0])
|
|
|
|
addr = ipaddr.IPv4Address(v)
|
|
|
|
else:
|
|
|
|
v = 0 + (socket.ntohl(value[0]) << (32 * 3)) + \
|
|
|
|
(socket.ntohl(value[1]) << (32 * 2)) + \
|
|
|
|
(socket.ntohl(value[2]) << (32 * 1)) + \
|
|
|
|
(socket.ntohl(value[3]))
|
|
|
|
addr = ipaddr.IPv6Address(v)
|
|
|
|
|
|
|
|
d_val.append(addr.compressed)
|
|
|
|
else:
|
|
|
|
for v in value:
|
|
|
|
d_val.append(_pb2dict_cast(field, v, pretty, is_hex))
|
2014-12-31 14:06:48 +02:00
|
|
|
else:
|
2015-01-28 17:15:00 +03:00
|
|
|
d_val = _pb2dict_cast(field, value, pretty, is_hex)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-01-15 18:49:00 +04:00
|
|
|
d[field.name] = d_val
|
2014-12-31 14:06:48 +02:00
|
|
|
return d
|
|
|
|
|
2015-01-15 18:49:00 +04:00
|
|
|
def _dict2pb_cast(field, value):
|
|
|
|
# Not considering TYPE_MESSAGE here, as repeated
|
|
|
|
# and non-repeated messages need special treatment
|
|
|
|
# in this case, and are hadled separately.
|
|
|
|
if field.type == FD.TYPE_BYTES:
|
2017-04-10 14:02:43 +03:00
|
|
|
return get_bytes_dec(field)(value)
|
2015-01-15 18:49:00 +04:00
|
|
|
elif field.type == FD.TYPE_ENUM:
|
|
|
|
return field.enum_type.values_by_name.get(value, None).number
|
2015-01-22 18:58:06 +03:00
|
|
|
elif field.type in _basic_cast:
|
2015-01-19 16:10:54 +02:00
|
|
|
cast = _basic_cast[field.type]
|
2015-12-10 17:05:00 +03:00
|
|
|
if (cast == int or cast == long) and is_string(value):
|
2015-12-04 15:19:00 +03:00
|
|
|
if _marked_as_dev(field):
|
|
|
|
return encode_dev(field, value)
|
|
|
|
|
2015-09-16 16:17:17 +03:00
|
|
|
flags = _marked_as_flags(field)
|
|
|
|
if flags:
|
|
|
|
try:
|
|
|
|
flags_map = flags_maps[flags]
|
|
|
|
except:
|
|
|
|
pass # Try to use plain string cast
|
|
|
|
else:
|
|
|
|
return unmap_flags(value, flags_map)
|
|
|
|
|
2017-04-10 14:02:00 +03:00
|
|
|
dct = _marked_as_dict(field)
|
|
|
|
if dct:
|
|
|
|
ret = dict_maps[dct][1][field.name].get(value, None)
|
|
|
|
if ret == None:
|
|
|
|
ret = cast(value, 0)
|
|
|
|
return ret
|
|
|
|
|
2015-01-19 16:10:54 +02:00
|
|
|
# Some int or long fields might be stored as hex
|
|
|
|
# strings. See _pb2dict_cast.
|
|
|
|
return cast(value, 0)
|
|
|
|
else:
|
|
|
|
return cast(value)
|
2015-01-22 18:58:06 +03:00
|
|
|
else:
|
|
|
|
raise Exception("Field(%s) has unsupported type %d" % (field.name, field.type))
|
2015-01-15 18:49:00 +04:00
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
def dict2pb(d, pb):
|
|
|
|
"""
|
|
|
|
Convert dictionary to protobuf msg.
|
|
|
|
Takes dict and protobuf message to be merged into.
|
|
|
|
"""
|
2015-01-15 18:49:00 +04:00
|
|
|
for field in pb.DESCRIPTOR.fields:
|
|
|
|
if field.name not in d:
|
|
|
|
continue
|
|
|
|
value = d[field.name]
|
|
|
|
if field.label == FD.LABEL_REPEATED:
|
|
|
|
pb_val = getattr(pb, field.name, None)
|
2015-12-10 17:05:00 +03:00
|
|
|
if is_string(value[0]) and _marked_as_ip(field):
|
2015-12-04 18:06:00 +03:00
|
|
|
val = ipaddr.IPAddress(value[0])
|
|
|
|
if val.version == 4:
|
|
|
|
pb_val.append(socket.htonl(int(val)))
|
|
|
|
elif val.version == 6:
|
|
|
|
ival = int(val)
|
|
|
|
pb_val.append(socket.htonl((ival >> (32 * 3)) & 0xFFFFFFFF))
|
|
|
|
pb_val.append(socket.htonl((ival >> (32 * 2)) & 0xFFFFFFFF))
|
|
|
|
pb_val.append(socket.htonl((ival >> (32 * 1)) & 0xFFFFFFFF))
|
|
|
|
pb_val.append(socket.htonl((ival >> (32 * 0)) & 0xFFFFFFFF))
|
|
|
|
else:
|
|
|
|
raise Exception("Unknown IP address version %d" % val.version)
|
|
|
|
continue
|
|
|
|
|
2015-01-15 18:49:00 +04:00
|
|
|
for v in value:
|
|
|
|
if field.type == FD.TYPE_MESSAGE:
|
|
|
|
dict2pb(v, pb_val.add())
|
2014-12-31 14:06:48 +02:00
|
|
|
else:
|
2015-01-15 18:49:00 +04:00
|
|
|
pb_val.append(_dict2pb_cast(field, v))
|
2014-12-31 14:06:48 +02:00
|
|
|
else:
|
2015-01-15 18:49:00 +04:00
|
|
|
if field.type == FD.TYPE_MESSAGE:
|
|
|
|
# SetInParent method acts just like has_* = true in C,
|
|
|
|
# and helps to properly treat cases when we have optional
|
|
|
|
# field with empty repeated inside.
|
|
|
|
getattr(pb, field.name).SetInParent()
|
|
|
|
|
|
|
|
dict2pb(value, getattr(pb, field.name, None))
|
|
|
|
else:
|
|
|
|
setattr(pb, field.name, _dict2pb_cast(field, value))
|
|
|
|
return pb
|