2014-12-31 14:06:48 +02:00
|
|
|
#!/bin/env python
|
|
|
|
|
|
|
|
# This file contains methods to deal with criu images.
|
|
|
|
#
|
|
|
|
# According to http://criu.org/Images, criu images can be described
|
|
|
|
# with such IOW:
|
2015-04-10 13:21:49 +03:00
|
|
|
#
|
2014-12-31 14:06:48 +02:00
|
|
|
# IMAGE_FILE ::= MAGIC { ENTRY }
|
|
|
|
# ENTRY ::= SIZE PAYLOAD [ EXTRA ]
|
|
|
|
# PAYLOAD ::= "message encoded in ProtocolBuffer format"
|
|
|
|
# EXTRA ::= "arbitrary blob, depends on the PAYLOAD contents"
|
|
|
|
#
|
|
|
|
# MAGIC ::= "32 bit integer"
|
|
|
|
# SIZE ::= "32 bit integer, equals the PAYLOAD length"
|
|
|
|
#
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: MAGIC now consist of 2 32 bit integers, first one is
|
|
|
|
# MAGIC_COMMON or MAGIC_SERVICE and the second one is same as MAGIC
|
|
|
|
# in images V1.0. We don't keep "first" magic in json images.
|
|
|
|
#
|
2014-12-31 14:06:48 +02:00
|
|
|
# In order to convert images to human-readable format, we use dict(json).
|
|
|
|
# Using json not only allows us to easily read\write images, but also
|
|
|
|
# to use a great variety of tools out there to manipulate them.
|
|
|
|
# It also allows us to clearly describe criu images structure.
|
|
|
|
#
|
|
|
|
# Using dict(json) format, criu images can be described like:
|
|
|
|
#
|
|
|
|
# {
|
|
|
|
# 'magic' : 'FOO',
|
|
|
|
# 'entries' : [
|
|
|
|
# entry,
|
|
|
|
# ...
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
# Entry, in its turn, could be described as:
|
|
|
|
#
|
|
|
|
# {
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
# pb_msg,
|
2014-12-31 14:06:48 +02:00
|
|
|
# 'extra' : extra_msg
|
|
|
|
# }
|
|
|
|
#
|
|
|
|
import io
|
|
|
|
import google
|
|
|
|
import struct
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
|
|
|
import pb2dict
|
2015-12-10 13:07:02 +03:00
|
|
|
import array
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
import magic
|
|
|
|
from pb import *
|
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
#
|
|
|
|
# Predefined hardcoded constants
|
|
|
|
sizeof_u16 = 2
|
|
|
|
sizeof_u32 = 4
|
|
|
|
sizeof_u64 = 8
|
|
|
|
|
|
|
|
# A helper for rounding
|
|
|
|
def round_up(x,y):
|
|
|
|
return (((x - 1) | (y - 1)) + 1)
|
|
|
|
|
2015-05-29 16:01:00 +03:00
|
|
|
class MagicException(Exception):
|
|
|
|
def __init__(self, magic):
|
|
|
|
self.magic = magic
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
# Generic class to handle loading/dumping criu images entries from/to bin
|
|
|
|
# format to/from dict(json).
|
|
|
|
class entry_handler:
|
|
|
|
"""
|
|
|
|
Generic class to handle loading/dumping criu images
|
|
|
|
entries from/to bin format to/from dict(json).
|
|
|
|
"""
|
|
|
|
def __init__(self, payload, extra_handler=None):
|
|
|
|
"""
|
|
|
|
Sets payload class and extra handler class.
|
|
|
|
"""
|
|
|
|
self.payload = payload
|
|
|
|
self.extra_handler = extra_handler
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def load(self, f, pretty = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Convert criu image entries from binary format to dict(json).
|
|
|
|
Takes a file-like object and returnes a list with entries in
|
|
|
|
dict(json) format.
|
|
|
|
"""
|
|
|
|
entries = []
|
|
|
|
|
|
|
|
while True:
|
|
|
|
entry = {}
|
|
|
|
|
|
|
|
# Read payload
|
|
|
|
pb = self.payload()
|
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
pb.ParseFromString(f.read(size))
|
2015-01-28 17:15:00 +03:00
|
|
|
entry = pb2dict.pb2dict(pb, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
# Read extra
|
|
|
|
if self.extra_handler:
|
|
|
|
entry['extra'] = self.extra_handler.load(f, pb)
|
|
|
|
|
|
|
|
entries.append(entry)
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def loads(self, s, pretty = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Same as load(), but takes a string as an argument.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO(s)
|
2015-01-28 17:15:00 +03:00
|
|
|
return self.load(f, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dump(self, entries, f):
|
|
|
|
"""
|
|
|
|
Convert criu image entries from dict(json) format to binary.
|
|
|
|
Takes a list of entries and a file-like object to write entries
|
|
|
|
in binary format to.
|
|
|
|
"""
|
|
|
|
for entry in entries:
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
extra = entry.pop('extra', None)
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
# Write payload
|
|
|
|
pb = self.payload()
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
pb2dict.dict2pb(entry, pb)
|
2014-12-31 14:06:48 +02:00
|
|
|
pb_str = pb.SerializeToString()
|
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
|
|
|
# Write extra
|
pycriu: images: merge payload field into entry and add extra field to entry if needed
Before:
{
"magic" : "FOO",
"entries" : [
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
},
{
"payload" : {
"foo" : "bar",
"bar" : "foo"
},
"extra" : "abc"
}
]
}
After:
{
"magic" : "FOO",
"entries" : [
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
},
{
"foo" : "bar",
"bar" : "foo"
"extra" : "abc"
}
]
}
We don't have any fields named "extra" in our pb msgs and it is
not likely that we will ever have one, so there is no reason to
worry about that.
Signed-off-by: Ruslan Kuprieiev <kupruser@gmail.com>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-01-14 16:32:36 +02:00
|
|
|
if self.extra_handler and extra:
|
|
|
|
self.extra_handler.dump(extra, f, pb)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
"""
|
|
|
|
Same as dump(), but doesn't take file-like object and just
|
|
|
|
returns a string.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def count(self, f):
|
|
|
|
"""
|
|
|
|
Counts the number of top-level object in the image file
|
|
|
|
"""
|
|
|
|
entries = 0
|
|
|
|
|
|
|
|
while True:
|
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
f.seek(size, 1)
|
|
|
|
entries += 1
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
# Special handler for pagemap.img
|
|
|
|
class pagemap_handler:
|
|
|
|
"""
|
|
|
|
Special entry handler for pagemap.img, which is unique in a way
|
|
|
|
that it has a header of pagemap_head type followed by entries
|
|
|
|
of pagemap_entry type.
|
|
|
|
"""
|
2015-01-28 17:15:00 +03:00
|
|
|
def load(self, f, pretty = False):
|
2015-01-14 16:32:37 +02:00
|
|
|
entries = []
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
pb = pagemap_head()
|
2014-12-31 14:06:48 +02:00
|
|
|
while True:
|
|
|
|
buf = f.read(4)
|
|
|
|
if buf == '':
|
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
pb.ParseFromString(f.read(size))
|
2015-01-28 17:15:00 +03:00
|
|
|
entries.append(pb2dict.pb2dict(pb, pretty))
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
pb = pagemap_entry()
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
return entries
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def loads(self, s, pretty = False):
|
2015-01-14 16:32:37 +02:00
|
|
|
f = io.BytesIO(s)
|
2015-01-28 17:15:00 +03:00
|
|
|
return self.load(f, pretty)
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
def dump(self, entries, f):
|
|
|
|
pb = pagemap_head()
|
|
|
|
for item in entries:
|
2014-12-31 14:06:48 +02:00
|
|
|
pb2dict.dict2pb(item, pb)
|
|
|
|
pb_str = pb.SerializeToString()
|
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
pb = pagemap_entry()
|
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def count(self, f):
|
|
|
|
return entry_handler(None).count(f) - 1
|
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
# In following extra handlers we use base64 encoding
|
2014-12-31 14:06:48 +02:00
|
|
|
# to store binary data. Even though, the nature
|
|
|
|
# of base64 is that it increases the total size,
|
|
|
|
# it doesn't really matter, because our images
|
|
|
|
# do not store big amounts of binary data. They
|
|
|
|
# are negligible comparing to pages size.
|
|
|
|
class pipes_data_extra_handler:
|
|
|
|
def load(self, f, pload):
|
|
|
|
size = pload.bytes
|
|
|
|
data = f.read(size)
|
|
|
|
return data.encode('base64')
|
|
|
|
|
|
|
|
def dump(self, extra, f, pload):
|
|
|
|
data = extra.decode('base64')
|
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
class sk_queues_extra_handler:
|
2015-01-15 02:07:00 +04:00
|
|
|
def load(self, f, pload):
|
2014-12-31 14:06:48 +02:00
|
|
|
size = pload.length
|
|
|
|
data = f.read(size)
|
|
|
|
return data.encode('base64')
|
|
|
|
|
|
|
|
def dump(self, extra, f, pb):
|
|
|
|
data = extra.decode('base64')
|
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
class ghost_file_extra_handler:
|
|
|
|
def load(self, f, pb):
|
|
|
|
data = f.read()
|
|
|
|
return data.encode('base64')
|
|
|
|
|
|
|
|
def dump(self, extra, f, pb):
|
|
|
|
data = extra.decode('base64')
|
|
|
|
f.write(data)
|
|
|
|
|
2015-01-28 21:38:00 +03:00
|
|
|
class tcp_stream_extra_handler:
|
|
|
|
def load(self, f, pb):
|
|
|
|
d = {}
|
|
|
|
|
|
|
|
inq = f.read(pb.inq_len)
|
|
|
|
outq = f.read(pb.outq_len)
|
|
|
|
|
|
|
|
d['inq'] = inq.encode('base64')
|
|
|
|
d['outq'] = outq.encode('base64')
|
|
|
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
def dump(self, extra, f, pb):
|
|
|
|
inq = extra['inq'].decode('base64')
|
|
|
|
outq = extra['outq'].decode('base64')
|
|
|
|
|
|
|
|
f.write(inq)
|
|
|
|
f.write(outq)
|
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
class ipc_sem_set_handler:
|
|
|
|
def load(self, f, pb):
|
|
|
|
entry = pb2dict.pb2dict(pb)
|
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
rounded = round_up(size, sizeof_u64)
|
|
|
|
s = array.array('H')
|
|
|
|
if s.itemsize != sizeof_u16:
|
|
|
|
raise Exception("Array size mismatch")
|
|
|
|
s.fromfile(f, entry['nsems'])
|
|
|
|
f.seek(rounded - size, 1)
|
|
|
|
return s.tolist()
|
|
|
|
|
|
|
|
def dump(self, extra, f, pb):
|
|
|
|
raise Exception("Not yet implemented")
|
|
|
|
|
2015-12-10 13:07:03 +03:00
|
|
|
class ipc_shm_handler:
|
|
|
|
def load(self, f, pb):
|
|
|
|
entry = pb2dict.pb2dict(pb)
|
|
|
|
size = entry['size']
|
|
|
|
data = f.read(size)
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.seek(rounded - size, 1)
|
|
|
|
return data.encode('base64')
|
|
|
|
|
|
|
|
def dump(self, extra, f, pb):
|
2015-12-10 13:07:04 +03:00
|
|
|
entry = pb2dict.pb2dict(pb)
|
|
|
|
size = entry['size']
|
|
|
|
data = extra.decode('base64')
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.write(data[:size])
|
|
|
|
f.write('\0' * (rounded - size))
|
2015-12-10 13:07:03 +03:00
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
handlers = {
|
|
|
|
'INVENTORY' : entry_handler(inventory_entry),
|
|
|
|
'CORE' : entry_handler(core_entry),
|
|
|
|
'IDS' : entry_handler(task_kobj_ids_entry),
|
|
|
|
'CREDS' : entry_handler(creds_entry),
|
|
|
|
'UTSNS' : entry_handler(utsns_entry),
|
|
|
|
'IPC_VAR' : entry_handler(ipc_var_entry),
|
|
|
|
'FS' : entry_handler(fs_entry),
|
|
|
|
'GHOST_FILE' : entry_handler(ghost_file_entry, ghost_file_extra_handler()),
|
|
|
|
'MM' : entry_handler(mm_entry),
|
|
|
|
'CGROUP' : entry_handler(cgroup_entry),
|
2015-01-28 21:38:00 +03:00
|
|
|
'TCP_STREAM' : entry_handler(tcp_stream_entry, tcp_stream_extra_handler()),
|
2014-12-31 14:06:48 +02:00
|
|
|
'STATS' : entry_handler(stats_entry),
|
2015-01-14 16:32:37 +02:00
|
|
|
'PAGEMAP' : pagemap_handler(), # Special one
|
2014-12-31 14:06:48 +02:00
|
|
|
'PSTREE' : entry_handler(pstree_entry),
|
|
|
|
'REG_FILES' : entry_handler(reg_file_entry),
|
|
|
|
'NS_FILES' : entry_handler(ns_file_entry),
|
|
|
|
'EVENTFD_FILE' : entry_handler(eventfd_file_entry),
|
|
|
|
'EVENTPOLL_FILE' : entry_handler(eventpoll_file_entry),
|
|
|
|
'EVENTPOLL_TFD' : entry_handler(eventpoll_tfd_entry),
|
|
|
|
'SIGNALFD' : entry_handler(signalfd_entry),
|
|
|
|
'TIMERFD' : entry_handler(timerfd_entry),
|
|
|
|
'INOTIFY_FILE' : entry_handler(inotify_file_entry),
|
|
|
|
'INOTIFY_WD' : entry_handler(inotify_wd_entry),
|
|
|
|
'FANOTIFY_FILE' : entry_handler(fanotify_file_entry),
|
|
|
|
'FANOTIFY_MARK' : entry_handler(fanotify_mark_entry),
|
|
|
|
'VMAS' : entry_handler(vma_entry),
|
|
|
|
'PIPES' : entry_handler(pipe_entry),
|
|
|
|
'FIFO' : entry_handler(fifo_entry),
|
|
|
|
'SIGACT' : entry_handler(sa_entry),
|
|
|
|
'NETLINK_SK' : entry_handler(netlink_sk_entry),
|
|
|
|
'REMAP_FPATH' : entry_handler(remap_file_path_entry),
|
|
|
|
'MNTS' : entry_handler(mnt_entry),
|
|
|
|
'TTY_FILES' : entry_handler(tty_file_entry),
|
|
|
|
'TTY_INFO' : entry_handler(tty_info_entry),
|
|
|
|
'RLIMIT' : entry_handler(rlimit_entry),
|
|
|
|
'TUNFILE' : entry_handler(tunfile_entry),
|
|
|
|
'EXT_FILES' : entry_handler(ext_file_entry),
|
|
|
|
'IRMAP_CACHE' : entry_handler(irmap_cache_entry),
|
|
|
|
'FILE_LOCKS' : entry_handler(file_lock_entry),
|
|
|
|
'FDINFO' : entry_handler(fdinfo_entry),
|
|
|
|
'UNIXSK' : entry_handler(unix_sk_entry),
|
|
|
|
'INETSK' : entry_handler(inet_sk_entry),
|
|
|
|
'PACKETSK' : entry_handler(packet_sock_entry),
|
|
|
|
'ITIMERS' : entry_handler(itimer_entry),
|
|
|
|
'POSIX_TIMERS' : entry_handler(posix_timer_entry),
|
|
|
|
'NETDEV' : entry_handler(net_device_entry),
|
|
|
|
'PIPES_DATA' : entry_handler(pipe_data_entry, pipes_data_extra_handler()),
|
|
|
|
'FIFO_DATA' : entry_handler(pipe_data_entry, pipes_data_extra_handler()),
|
|
|
|
'SK_QUEUES' : entry_handler(sk_packet_entry, sk_queues_extra_handler()),
|
2015-12-10 13:07:03 +03:00
|
|
|
'IPCNS_SHM' : entry_handler(ipc_shm_entry, ipc_shm_handler()),
|
2015-12-10 13:07:02 +03:00
|
|
|
'IPCNS_SEM' : entry_handler(ipc_sem_entry, ipc_sem_set_handler()),
|
2015-04-28 23:22:00 +03:00
|
|
|
'IPCNS_MSG' : entry_handler(ipc_msg_entry),
|
2015-10-19 19:07:00 +03:00
|
|
|
'NETNS' : entry_handler(netns_entry),
|
|
|
|
'USERNS' : entry_handler(userns_entry),
|
2015-11-16 22:17:45 -07:00
|
|
|
'SECCOMP' : entry_handler(seccomp_entry),
|
2014-12-31 14:06:48 +02:00
|
|
|
}
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def __rhandler(f):
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: First read "first" magic.
|
2014-12-31 14:06:48 +02:00
|
|
|
img_magic, = struct.unpack('i', f.read(4))
|
2015-04-08 16:37:28 +03:00
|
|
|
if img_magic in (magic.by_name['IMG_COMMON'], magic.by_name['IMG_SERVICE']):
|
|
|
|
img_magic, = struct.unpack('i', f.read(4))
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
try:
|
|
|
|
m = magic.by_val[img_magic]
|
|
|
|
except:
|
2015-05-29 16:01:00 +03:00
|
|
|
raise MagicException(img_magic)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
handler = handlers[m]
|
|
|
|
except:
|
2015-09-16 16:16:24 +03:00
|
|
|
raise Exception("No handler found for image with magic " + m)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
return m, handler
|
|
|
|
|
|
|
|
def load(f, pretty = False):
|
|
|
|
"""
|
|
|
|
Convert criu image from binary format to dict(json).
|
|
|
|
Takes a file-like object to read criu image from.
|
|
|
|
Returns criu image in dict(json) format.
|
|
|
|
"""
|
|
|
|
image = {}
|
|
|
|
|
|
|
|
m, handler = __rhandler(f)
|
|
|
|
|
|
|
|
image['magic'] = m
|
|
|
|
image['entries'] = handler.load(f, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
return image
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def info(f):
|
|
|
|
res = {}
|
|
|
|
|
|
|
|
m, handler = __rhandler(f)
|
|
|
|
|
|
|
|
res['magic'] = m
|
|
|
|
res['count'] = handler.count(f)
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
2015-01-28 17:15:00 +03:00
|
|
|
def loads(s, pretty = False):
|
2014-12-31 14:06:48 +02:00
|
|
|
"""
|
|
|
|
Same as load(), but takes a string.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO(s)
|
2015-01-28 17:15:00 +03:00
|
|
|
return load(f, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dump(img, f):
|
|
|
|
"""
|
|
|
|
Convert criu image from dict(json) format to binary.
|
|
|
|
Takes an image in dict(json) format and file-like
|
|
|
|
object to write to.
|
|
|
|
"""
|
|
|
|
m = img['magic']
|
|
|
|
magic_val = magic.by_name[img['magic']]
|
|
|
|
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: use "second" magic to identify what "first"
|
|
|
|
# should be written.
|
2015-04-08 16:37:28 +03:00
|
|
|
if m != 'INVENTORY':
|
|
|
|
if m in ('STATS', 'IRMAP_CACHE'):
|
|
|
|
f.write(struct.pack('i', magic.by_name['IMG_SERVICE']))
|
2015-04-10 13:21:49 +03:00
|
|
|
else:
|
|
|
|
f.write(struct.pack('i', magic.by_name['IMG_COMMON']))
|
2015-04-08 16:37:28 +03:00
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
f.write(struct.pack('i', magic_val))
|
|
|
|
|
|
|
|
try:
|
|
|
|
handler = handlers[m]
|
|
|
|
except:
|
|
|
|
raise Exception("No handler found for image with such magic")
|
|
|
|
|
|
|
|
handler.dump(img['entries'], f)
|
|
|
|
|
|
|
|
def dumps(img):
|
|
|
|
"""
|
|
|
|
Same as dump(), but takes only an image and returns
|
|
|
|
a string.
|
|
|
|
"""
|
|
|
|
f = io.BytesIO('')
|
|
|
|
dump(img, f)
|
|
|
|
return f.getvalue()
|