2014-12-31 14:06:48 +02:00
|
|
|
# This file contains methods to deal with criu images.
|
|
|
|
#
|
|
|
|
# According to http://criu.org/Images, criu images can be described
|
|
|
|
# with such IOW:
|
2015-04-10 13:21:49 +03:00
|
|
|
#
|
2014-12-31 14:06:48 +02:00
|
|
|
# IMAGE_FILE ::= MAGIC { ENTRY }
|
|
|
|
# ENTRY ::= SIZE PAYLOAD [ EXTRA ]
|
|
|
|
# PAYLOAD ::= "message encoded in ProtocolBuffer format"
|
|
|
|
# EXTRA ::= "arbitrary blob, depends on the PAYLOAD contents"
|
|
|
|
#
|
|
|
|
# MAGIC ::= "32 bit integer"
|
|
|
|
# SIZE ::= "32 bit integer, equals the PAYLOAD length"
|
|
|
|
#
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: MAGIC now consist of 2 32 bit integers, first one is
|
|
|
|
# MAGIC_COMMON or MAGIC_SERVICE and the second one is same as MAGIC
|
|
|
|
# in images V1.0. We don't keep "first" magic in json images.
|
|
|
|
#
|
2014-12-31 14:06:48 +02:00
|
|
|
# In order to convert images to human-readable format, we use dict(json).
|
|
|
|
# Using json not only allows us to easily read\write images, but also
|
|
|
|
# to use a great variety of tools out there to manipulate them.
|
|
|
|
# It also allows us to clearly describe criu images structure.
|
|
|
|
#
|
|
|
|
# Using dict(json) format, criu images can be described like:
|
|
|
|
#
|
|
|
|
# {
|
|
|
|
# 'magic' : 'FOO',
|
|
|
|
# 'entries' : [
|
|
|
|
# entry,
|
|
|
|
# ...
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# Entry, in its turn, could be described as:
|
|
|
|
#
|
|
|
|
# {
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
# pb_msg,
|
2014-12-31 14:06:48 +02:00
|
|
|
# 'extra' : extra_msg
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
import io
|
2018-06-02 00:02:54 +03:00
|
|
|
import base64
|
2014-12-31 14:06:48 +02:00
|
|
|
import struct
|
|
|
|
import os
|
2015-12-10 13:07:02 +03:00
|
|
|
import array
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-05-16 06:20:22 +00:00
|
|
|
from . import magic
|
2018-09-23 15:31:51 +01:00
|
|
|
from . import pb
|
|
|
|
from . import pb2dict
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-06-02 00:02:54 +03:00
|
|
|
if "encodebytes" not in dir(base64):
|
|
|
|
base64.encodebytes = base64.encodestring
|
|
|
|
base64.decodebytes = base64.decodestring
|
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
#
|
|
|
|
# Predefined hardcoded constants
|
|
|
|
sizeof_u16 = 2
|
|
|
|
sizeof_u32 = 4
|
|
|
|
sizeof_u64 = 8
|
|
|
|
|
|
|
|
# A helper for rounding
|
|
|
|
def round_up(x,y):
|
|
|
|
return (((x - 1) | (y - 1)) + 1)
|
|
|
|
|
2015-05-29 16:01:00 +03:00
|
|
|
class MagicException(Exception):
|
|
|
|
def __init__(self, magic):
|
|
|
|
self.magic = magic
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
# Generic class to handle loading/dumping criu images entries from/to bin
|
|
|
|
# format to/from dict(json).
|
|
|
|
class entry_handler:
|
|
|
|
"""
|
|
|
|
Generic class to handle loading/dumping criu images
|
|
|
|
entries from/to bin format to/from dict(json).
|
|
|
|
"""
|
|
|
|
def __init__(self, payload, extra_handler=None):
|
|
|
|
"""
|
|
|
|
Sets payload class and extra handler class.
|
|
|
|
"""
|
|
|
|
self.payload = payload
|
|
|
|
self.extra_handler = extra_handler
|
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
def load(self, f, pretty = False, no_payload = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Convert criu image entries from binary format to dict(json).
|
|
|
|
Takes a file-like object and returnes a list with entries in
|
|
|
|
dict(json) format.
|
|
|
|
"""
|
|
|
|
entries = []
|
|
|
|
|
|
|
|
while True:
|
|
|
|
entry = {}
|
|
|
|
|
|
|
|
# Read payload
|
2018-10-03 20:57:21 +01:00
|
|
|
pbuff = self.payload()
|
2014-12-31 14:06:48 +02:00
|
|
|
buf = f.read(4)
|
2018-05-16 06:20:22 +00:00
|
|
|
if buf == b'':
|
2014-12-31 14:06:48 +02:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
2018-10-03 20:57:21 +01:00
|
|
|
pbuff.ParseFromString(f.read(size))
|
|
|
|
entry = pb2dict.pb2dict(pbuff, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
# Read extra
|
|
|
|
if self.extra_handler:
|
2016-05-19 13:04:00 +03:00
|
|
|
if no_payload:
|
|
|
|
def human_readable(num):
|
|
|
|
for unit in ['','K','M','G','T','P','E','Z']:
|
|
|
|
if num < 1024.0:
|
|
|
|
if int(num) == num:
|
|
|
|
return "%d%sB" % (num, unit)
|
|
|
|
else:
|
|
|
|
return "%.1f%sB" % (num, unit)
|
|
|
|
num /= 1024.0
|
|
|
|
return "%.1fYB" % num
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
pl_size = self.extra_handler.skip(f, pbuff)
|
2016-05-19 13:04:00 +03:00
|
|
|
entry['extra'] = '... <%s>' % human_readable(pl_size)
|
|
|
|
else:
|
2018-10-03 20:57:21 +01:00
|
|
|
entry['extra'] = self.extra_handler.load(f, pbuff)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
entries.append(entry)
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def loads(self, s, pretty = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Same as load(), but takes a string as an argument.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO(s)
|
2015-01-28 17:15:00 +03:00
|
|
|
return self.load(f, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dump(self, entries, f):
|
|
|
|
"""
|
|
|
|
Convert criu image entries from dict(json) format to binary.
|
|
|
|
Takes a list of entries and a file-like object to write entries
|
|
|
|
in binary format to.
|
|
|
|
"""
|
|
|
|
for entry in entries:
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
extra = entry.pop('extra', None)
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
# Write payload
|
2018-10-03 20:57:21 +01:00
|
|
|
pbuff = self.payload()
|
|
|
|
pb2dict.dict2pb(entry, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
2014-12-31 14:06:48 +02:00
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
|
|
|
# Write extra
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
if self.extra_handler and extra:
|
2018-10-03 20:57:21 +01:00
|
|
|
self.extra_handler.dump(extra, f, pbuff)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
"""
|
|
|
|
Same as dump(), but doesn't take file-like object and just
|
|
|
|
returns a string.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def count(self, f):
|
|
|
|
"""
|
|
|
|
Counts the number of top-level object in the image file
|
|
|
|
"""
|
|
|
|
entries = 0
|
|
|
|
|
|
|
|
while True:
|
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
f.seek(size, 1)
|
|
|
|
entries += 1
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
# Special handler for pagemap.img
|
|
|
|
class pagemap_handler:
|
|
|
|
"""
|
|
|
|
Special entry handler for pagemap.img, which is unique in a way
|
|
|
|
that it has a header of pagemap_head type followed by entries
|
|
|
|
of pagemap_entry type.
|
|
|
|
"""
|
2016-05-19 13:04:00 +03:00
|
|
|
def load(self, f, pretty = False, no_payload = False):
|
2015-01-14 16:32:37 +02:00
|
|
|
entries = []
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff = pb.pagemap_head()
|
2014-12-31 14:06:48 +02:00
|
|
|
while True:
|
|
|
|
buf = f.read(4)
|
2018-06-02 00:02:54 +03:00
|
|
|
if buf == b'':
|
2014-12-31 14:06:48 +02:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff.ParseFromString(f.read(size))
|
|
|
|
entries.append(pb2dict.pb2dict(pbuff, pretty))
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff = pb.pagemap_entry()
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
return entries
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def loads(self, s, pretty = False):
|
2015-01-14 16:32:37 +02:00
|
|
|
f = io.BytesIO(s)
|
2015-01-28 17:15:00 +03:00
|
|
|
return self.load(f, pretty)
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
def dump(self, entries, f):
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff = pb.pagemap_head()
|
2015-01-14 16:32:37 +02:00
|
|
|
for item in entries:
|
2018-09-23 15:31:51 +01:00
|
|
|
pb2dict.dict2pb(item, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
2014-12-31 14:06:48 +02:00
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff = pb.pagemap_entry()
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def count(self, f):
|
|
|
|
return entry_handler(None).count(f) - 1
|
|
|
|
|
2017-06-15 19:04:46 +03:00
|
|
|
# Special handler for ghost-file.img
|
|
|
|
class ghost_file_handler:
|
|
|
|
def load(self, f, pretty = False, no_payload = False):
|
|
|
|
entries = []
|
|
|
|
|
2018-09-23 15:31:51 +01:00
|
|
|
gf = pb.ghost_file_entry()
|
2017-06-15 19:04:46 +03:00
|
|
|
buf = f.read(4)
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
gf.ParseFromString(f.read(size))
|
|
|
|
g_entry = pb2dict.pb2dict(gf, pretty)
|
|
|
|
|
|
|
|
if gf.chunks:
|
|
|
|
entries.append(g_entry)
|
|
|
|
while True:
|
2018-09-23 15:31:51 +01:00
|
|
|
gc = pb.ghost_chunk_entry()
|
2017-06-15 19:04:46 +03:00
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
gc.ParseFromString(f.read(size))
|
|
|
|
entry = pb2dict.pb2dict(gc, pretty)
|
|
|
|
if no_payload:
|
|
|
|
f.seek(gc.len, os.SEEK_CUR)
|
|
|
|
else:
|
2018-06-02 00:02:54 +03:00
|
|
|
entry['extra'] = base64.encodebytes(f.read(gc.len))
|
2017-06-15 19:04:46 +03:00
|
|
|
entries.append(entry)
|
|
|
|
else:
|
|
|
|
if no_payload:
|
|
|
|
f.seek(0, os.SEEK_END)
|
|
|
|
else:
|
2018-06-02 00:02:54 +03:00
|
|
|
g_entry['extra'] = base64.encodebytes(f.read())
|
2017-06-15 19:04:46 +03:00
|
|
|
entries.append(g_entry)
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
|
|
|
def loads(self, s, pretty = False):
|
|
|
|
f = io.BytesIO(s)
|
|
|
|
return self.load(f, pretty)
|
|
|
|
|
|
|
|
def dump(self, entries, f):
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff = pb.ghost_file_entry()
|
2017-06-15 19:04:46 +03:00
|
|
|
item = entries.pop(0)
|
2018-09-23 15:31:51 +01:00
|
|
|
pb2dict.dict2pb(item, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
2017-06-15 19:04:46 +03:00
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
2018-09-23 15:31:51 +01:00
|
|
|
if pbuff.chunks:
|
2017-06-15 19:04:46 +03:00
|
|
|
for item in entries:
|
2018-09-23 15:31:51 +01:00
|
|
|
pbuff = pb.ghost_chunk_entry()
|
|
|
|
pb2dict.dict2pb(item, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
2017-06-15 19:04:46 +03:00
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
2018-06-02 00:02:54 +03:00
|
|
|
f.write(base64.decodebytes(item['extra']))
|
2017-06-15 19:04:46 +03:00
|
|
|
else:
|
2018-06-02 00:02:54 +03:00
|
|
|
f.write(base64.decodebytes(item['extra']))
|
2017-06-15 19:04:46 +03:00
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
# In following extra handlers we use base64 encoding
|
2014-12-31 14:06:48 +02:00
|
|
|
# to store binary data. Even though, the nature
|
|
|
|
# of base64 is that it increases the total size,
|
|
|
|
# it doesn't really matter, because our images
|
|
|
|
# do not store big amounts of binary data. They
|
|
|
|
# are negligible comparing to pages size.
|
|
|
|
class pipes_data_extra_handler:
|
|
|
|
def load(self, f, pload):
|
|
|
|
size = pload.bytes
|
|
|
|
data = f.read(size)
|
2018-06-02 00:02:54 +03:00
|
|
|
return base64.encodebytes(data)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dump(self, extra, f, pload):
|
2018-06-02 00:02:54 +03:00
|
|
|
data = base64.decodebytes(extra)
|
2014-12-31 14:06:48 +02:00
|
|
|
f.write(data)
|
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
def skip(self, f, pload):
|
|
|
|
f.seek(pload.bytes, os.SEEK_CUR)
|
|
|
|
return pload.bytes
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
class sk_queues_extra_handler:
|
2015-01-15 02:07:00 +04:00
|
|
|
def load(self, f, pload):
|
2014-12-31 14:06:48 +02:00
|
|
|
size = pload.length
|
|
|
|
data = f.read(size)
|
2018-06-02 00:02:54 +03:00
|
|
|
return base64.encodebytes(data)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def dump(self, extra, f, _unused):
|
2018-06-02 00:02:54 +03:00
|
|
|
data = base64.decodebytes(extra)
|
2014-12-31 14:06:48 +02:00
|
|
|
f.write(data)
|
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
def skip(self, f, pload):
|
|
|
|
f.seek(pload.length, os.SEEK_CUR)
|
|
|
|
return pload.length
|
|
|
|
|
|
|
|
|
2015-01-28 21:38:00 +03:00
|
|
|
class tcp_stream_extra_handler:
|
2018-10-03 20:57:21 +01:00
|
|
|
def load(self, f, pbuff):
|
2015-01-28 21:38:00 +03:00
|
|
|
d = {}
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
inq = f.read(pbuff.inq_len)
|
|
|
|
outq = f.read(pbuff.outq_len)
|
2015-01-28 21:38:00 +03:00
|
|
|
|
2018-06-02 00:02:54 +03:00
|
|
|
d['inq'] = base64.encodebytes(inq)
|
|
|
|
d['outq'] = base64.encodebytes(outq)
|
2015-01-28 21:38:00 +03:00
|
|
|
|
|
|
|
return d
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def dump(self, extra, f, _unused):
|
2018-06-02 00:02:54 +03:00
|
|
|
inq = base64.decodebytes(extra['inq'])
|
|
|
|
outq = base64.decodebytes(extra['outq'])
|
2015-01-28 21:38:00 +03:00
|
|
|
|
|
|
|
f.write(inq)
|
|
|
|
f.write(outq)
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def skip(self, f, pbuff):
|
2016-05-19 13:04:00 +03:00
|
|
|
f.seek(0, os.SEEK_END)
|
2018-10-03 20:57:21 +01:00
|
|
|
return pbuff.inq_len + pbuff.outq_len
|
2016-05-19 13:04:00 +03:00
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
class ipc_sem_set_handler:
|
2018-10-03 20:57:21 +01:00
|
|
|
def load(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2015-12-10 13:07:02 +03:00
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
rounded = round_up(size, sizeof_u64)
|
|
|
|
s = array.array('H')
|
|
|
|
if s.itemsize != sizeof_u16:
|
|
|
|
raise Exception("Array size mismatch")
|
2015-12-10 22:08:00 +03:00
|
|
|
s.fromstring(f.read(size))
|
2015-12-10 13:07:02 +03:00
|
|
|
f.seek(rounded - size, 1)
|
|
|
|
return s.tolist()
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def dump(self, extra, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2015-12-10 13:07:06 +03:00
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
rounded = round_up(size, sizeof_u64)
|
|
|
|
s = array.array('H')
|
|
|
|
if s.itemsize != sizeof_u16:
|
|
|
|
raise Exception("Array size mismatch")
|
|
|
|
s.fromlist(extra)
|
|
|
|
if len(s) != entry['nsems']:
|
|
|
|
raise Exception("Number of semaphores mismatch")
|
2015-12-10 22:08:00 +03:00
|
|
|
f.write(s.tostring())
|
2015-12-10 13:07:06 +03:00
|
|
|
f.write('\0' * (rounded - size))
|
2015-12-10 13:07:02 +03:00
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def skip(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2016-05-19 13:04:00 +03:00
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
f.seek(round_up(size, sizeof_u64), os.SEEK_CUR)
|
|
|
|
return size
|
|
|
|
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
class ipc_msg_queue_handler:
|
2018-10-03 20:57:21 +01:00
|
|
|
def load(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
messages = []
|
|
|
|
for x in range (0, entry['qnum']):
|
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
2018-10-09 08:50:50 +01:00
|
|
|
msg = pb.ipc_msg()
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
msg.ParseFromString(f.read(size))
|
|
|
|
rounded = round_up(msg.msize, sizeof_u64)
|
|
|
|
data = f.read(msg.msize)
|
|
|
|
f.seek(rounded - msg.msize, 1)
|
|
|
|
messages.append(pb2dict.pb2dict(msg))
|
2018-06-12 23:50:54 +03:00
|
|
|
messages.append(base64.encodebytes(data))
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
return messages
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def dump(self, extra, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
for i in range (0, len(extra), 2):
|
2018-10-09 08:50:50 +01:00
|
|
|
msg = pb.ipc_msg()
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
pb2dict.dict2pb(extra[i], msg)
|
|
|
|
msg_str = msg.SerializeToString()
|
|
|
|
size = len(msg_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(msg_str)
|
|
|
|
rounded = round_up(msg.msize, sizeof_u64)
|
2018-06-02 00:02:54 +03:00
|
|
|
data = base64.decodebytes(extra[i + 1])
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
f.write(data[:msg.msize])
|
|
|
|
f.write('\0' * (rounded - msg.msize))
|
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def skip(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2016-05-19 13:04:00 +03:00
|
|
|
pl_len = 0
|
|
|
|
for x in range (0, entry['qnum']):
|
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
2018-10-09 08:50:50 +01:00
|
|
|
msg = pb.ipc_msg()
|
2016-05-19 13:04:00 +03:00
|
|
|
msg.ParseFromString(f.read(size))
|
|
|
|
rounded = round_up(msg.msize, sizeof_u64)
|
|
|
|
f.seek(rounded, os.SEEK_CUR)
|
|
|
|
pl_len += size + msg.msize
|
|
|
|
|
|
|
|
return pl_len
|
|
|
|
|
2015-12-10 13:07:03 +03:00
|
|
|
class ipc_shm_handler:
|
2018-10-03 20:57:21 +01:00
|
|
|
def load(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2015-12-10 13:07:03 +03:00
|
|
|
size = entry['size']
|
|
|
|
data = f.read(size)
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.seek(rounded - size, 1)
|
2018-06-02 00:02:54 +03:00
|
|
|
return base64.encodebytes(data)
|
2015-12-10 13:07:03 +03:00
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def dump(self, extra, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2015-12-10 13:07:04 +03:00
|
|
|
size = entry['size']
|
2018-06-02 00:02:54 +03:00
|
|
|
data = base64.decodebytes(extra)
|
2015-12-10 13:07:04 +03:00
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.write(data[:size])
|
|
|
|
f.write('\0' * (rounded - size))
|
2015-12-10 13:07:03 +03:00
|
|
|
|
2018-10-03 20:57:21 +01:00
|
|
|
def skip(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2016-05-19 13:04:00 +03:00
|
|
|
size = entry['size']
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.seek(rounded, os.SEEK_CUR)
|
|
|
|
return size
|
|
|
|
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
handlers = {
|
2018-09-23 15:31:51 +01:00
|
|
|
'INVENTORY' : entry_handler(pb.inventory_entry),
|
|
|
|
'CORE' : entry_handler(pb.core_entry),
|
|
|
|
'IDS' : entry_handler(pb.task_kobj_ids_entry),
|
|
|
|
'CREDS' : entry_handler(pb.creds_entry),
|
|
|
|
'UTSNS' : entry_handler(pb.utsns_entry),
|
|
|
|
'IPC_VAR' : entry_handler(pb.ipc_var_entry),
|
|
|
|
'FS' : entry_handler(pb.fs_entry),
|
2017-06-15 19:04:46 +03:00
|
|
|
'GHOST_FILE' : ghost_file_handler(),
|
2018-09-23 15:31:51 +01:00
|
|
|
'MM' : entry_handler(pb.mm_entry),
|
|
|
|
'CGROUP' : entry_handler(pb.cgroup_entry),
|
|
|
|
'TCP_STREAM' : entry_handler(pb.tcp_stream_entry, tcp_stream_extra_handler()),
|
|
|
|
'STATS' : entry_handler(pb.stats_entry),
|
2015-01-14 16:32:37 +02:00
|
|
|
'PAGEMAP' : pagemap_handler(), # Special one
|
2018-09-23 15:31:51 +01:00
|
|
|
'PSTREE' : entry_handler(pb.pstree_entry),
|
|
|
|
'REG_FILES' : entry_handler(pb.reg_file_entry),
|
|
|
|
'NS_FILES' : entry_handler(pb.ns_file_entry),
|
|
|
|
'EVENTFD_FILE' : entry_handler(pb.eventfd_file_entry),
|
|
|
|
'EVENTPOLL_FILE' : entry_handler(pb.eventpoll_file_entry),
|
|
|
|
'EVENTPOLL_TFD' : entry_handler(pb.eventpoll_tfd_entry),
|
|
|
|
'SIGNALFD' : entry_handler(pb.signalfd_entry),
|
|
|
|
'TIMERFD' : entry_handler(pb.timerfd_entry),
|
|
|
|
'INOTIFY_FILE' : entry_handler(pb.inotify_file_entry),
|
|
|
|
'INOTIFY_WD' : entry_handler(pb.inotify_wd_entry),
|
|
|
|
'FANOTIFY_FILE' : entry_handler(pb.fanotify_file_entry),
|
|
|
|
'FANOTIFY_MARK' : entry_handler(pb.fanotify_mark_entry),
|
|
|
|
'VMAS' : entry_handler(pb.vma_entry),
|
|
|
|
'PIPES' : entry_handler(pb.pipe_entry),
|
|
|
|
'FIFO' : entry_handler(pb.fifo_entry),
|
|
|
|
'SIGACT' : entry_handler(pb.sa_entry),
|
|
|
|
'NETLINK_SK' : entry_handler(pb.netlink_sk_entry),
|
|
|
|
'REMAP_FPATH' : entry_handler(pb.remap_file_path_entry),
|
|
|
|
'MNTS' : entry_handler(pb.mnt_entry),
|
|
|
|
'TTY_FILES' : entry_handler(pb.tty_file_entry),
|
|
|
|
'TTY_INFO' : entry_handler(pb.tty_info_entry),
|
|
|
|
'TTY_DATA' : entry_handler(pb.tty_data_entry),
|
|
|
|
'RLIMIT' : entry_handler(pb.rlimit_entry),
|
|
|
|
'TUNFILE' : entry_handler(pb.tunfile_entry),
|
|
|
|
'EXT_FILES' : entry_handler(pb.ext_file_entry),
|
|
|
|
'IRMAP_CACHE' : entry_handler(pb.irmap_cache_entry),
|
|
|
|
'FILE_LOCKS' : entry_handler(pb.file_lock_entry),
|
|
|
|
'FDINFO' : entry_handler(pb.fdinfo_entry),
|
|
|
|
'UNIXSK' : entry_handler(pb.unix_sk_entry),
|
|
|
|
'INETSK' : entry_handler(pb.inet_sk_entry),
|
|
|
|
'PACKETSK' : entry_handler(pb.packet_sock_entry),
|
|
|
|
'ITIMERS' : entry_handler(pb.itimer_entry),
|
|
|
|
'POSIX_TIMERS' : entry_handler(pb.posix_timer_entry),
|
|
|
|
'NETDEV' : entry_handler(pb.net_device_entry),
|
|
|
|
'PIPES_DATA' : entry_handler(pb.pipe_data_entry, pipes_data_extra_handler()),
|
|
|
|
'FIFO_DATA' : entry_handler(pb.pipe_data_entry, pipes_data_extra_handler()),
|
|
|
|
'SK_QUEUES' : entry_handler(pb.sk_packet_entry, sk_queues_extra_handler()),
|
|
|
|
'IPCNS_SHM' : entry_handler(pb.ipc_shm_entry, ipc_shm_handler()),
|
|
|
|
'IPCNS_SEM' : entry_handler(pb.ipc_sem_entry, ipc_sem_set_handler()),
|
|
|
|
'IPCNS_MSG' : entry_handler(pb.ipc_msg_entry, ipc_msg_queue_handler()),
|
|
|
|
'NETNS' : entry_handler(pb.netns_entry),
|
|
|
|
'USERNS' : entry_handler(pb.userns_entry),
|
|
|
|
'SECCOMP' : entry_handler(pb.seccomp_entry),
|
|
|
|
'AUTOFS' : entry_handler(pb.autofs_entry),
|
|
|
|
'FILES' : entry_handler(pb.file_entry),
|
|
|
|
'CPUINFO' : entry_handler(pb.cpuinfo_entry),
|
2014-12-31 14:06:48 +02:00
|
|
|
}
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def __rhandler(f):
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: First read "first" magic.
|
2014-12-31 14:06:48 +02:00
|
|
|
img_magic, = struct.unpack('i', f.read(4))
|
2015-04-08 16:37:28 +03:00
|
|
|
if img_magic in (magic.by_name['IMG_COMMON'], magic.by_name['IMG_SERVICE']):
|
|
|
|
img_magic, = struct.unpack('i', f.read(4))
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
try:
|
|
|
|
m = magic.by_val[img_magic]
|
|
|
|
except:
|
2015-05-29 16:01:00 +03:00
|
|
|
raise MagicException(img_magic)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
handler = handlers[m]
|
|
|
|
except:
|
2015-09-16 16:16:24 +03:00
|
|
|
raise Exception("No handler found for image with magic " + m)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
return m, handler
|
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
def load(f, pretty = False, no_payload = False):
|
2015-09-16 16:16:24 +03:00
|
|
|
"""
|
|
|
|
Convert criu image from binary format to dict(json).
|
|
|
|
Takes a file-like object to read criu image from.
|
|
|
|
Returns criu image in dict(json) format.
|
|
|
|
"""
|
|
|
|
image = {}
|
|
|
|
|
|
|
|
m, handler = __rhandler(f)
|
|
|
|
|
|
|
|
image['magic'] = m
|
2016-05-19 13:04:00 +03:00
|
|
|
image['entries'] = handler.load(f, pretty, no_payload)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
return image
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def info(f):
|
|
|
|
res = {}
|
|
|
|
|
|
|
|
m, handler = __rhandler(f)
|
|
|
|
|
|
|
|
res['magic'] = m
|
|
|
|
res['count'] = handler.count(f)
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def loads(s, pretty = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Same as load(), but takes a string.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO(s)
|
2015-01-28 17:15:00 +03:00
|
|
|
return load(f, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dump(img, f):
|
|
|
|
"""
|
|
|
|
Convert criu image from dict(json) format to binary.
|
|
|
|
Takes an image in dict(json) format and file-like
|
|
|
|
object to write to.
|
|
|
|
"""
|
|
|
|
m = img['magic']
|
|
|
|
magic_val = magic.by_name[img['magic']]
|
|
|
|
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: use "second" magic to identify what "first"
|
|
|
|
# should be written.
|
2015-04-08 16:37:28 +03:00
|
|
|
if m != 'INVENTORY':
|
|
|
|
if m in ('STATS', 'IRMAP_CACHE'):
|
|
|
|
f.write(struct.pack('i', magic.by_name['IMG_SERVICE']))
|
2015-04-10 13:21:49 +03:00
|
|
|
else:
|
|
|
|
f.write(struct.pack('i', magic.by_name['IMG_COMMON']))
|
2015-04-08 16:37:28 +03:00
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
f.write(struct.pack('i', magic_val))
|
|
|
|
|
|
|
|
try:
|
|
|
|
handler = handlers[m]
|
|
|
|
except:
|
|
|
|
raise Exception("No handler found for image with such magic")
|
|
|
|
|
|
|
|
handler.dump(img['entries'], f)
|
|
|
|
|
|
|
|
def dumps(img):
|
|
|
|
"""
|
|
|
|
Same as dump(), but takes only an image and returns
|
|
|
|
a string.
|
|
|
|
"""
|
2018-06-02 00:02:54 +03:00
|
|
|
f = io.BytesIO(b'')
|
2014-12-31 14:06:48 +02:00
|
|
|
dump(img, f)
|
|
|
|
return f.getvalue()
|