2014-12-31 14:06:48 +02:00
|
|
|
# This file contains methods to deal with criu images.
|
|
|
|
#
|
|
|
|
# According to http://criu.org/Images, criu images can be described
|
|
|
|
# with such IOW:
|
2015-04-10 13:21:49 +03:00
|
|
|
#
|
2014-12-31 14:06:48 +02:00
|
|
|
# IMAGE_FILE ::= MAGIC { ENTRY }
|
|
|
|
# ENTRY ::= SIZE PAYLOAD [ EXTRA ]
|
|
|
|
# PAYLOAD ::= "message encoded in ProtocolBuffer format"
|
|
|
|
# EXTRA ::= "arbitrary blob, depends on the PAYLOAD contents"
|
|
|
|
#
|
|
|
|
# MAGIC ::= "32 bit integer"
|
|
|
|
# SIZE ::= "32 bit integer, equals the PAYLOAD length"
|
|
|
|
#
|
2015-04-10 13:21:49 +03:00
|
|
|
# Images v1.1 NOTE: MAGIC now consist of 2 32 bit integers, first one is
|
2019-06-28 20:17:35 +03:00
|
|
|
# MAGIC_COMMON or MAGIC_SERVICE and the second one is same as MAGIC
|
|
|
|
# in images V1.0. We don't keep "first" magic in json images.
|
2015-04-10 13:21:49 +03:00
|
|
|
#
|
2014-12-31 14:06:48 +02:00
|
|
|
# In order to convert images to human-readable format, we use dict(json).
|
|
|
|
# Using json not only allows us to easily read\write images, but also
|
|
|
|
# to use a great variety of tools out there to manipulate them.
|
|
|
|
# It also allows us to clearly describe criu images structure.
|
|
|
|
#
|
|
|
|
# Using dict(json) format, criu images can be described like:
|
|
|
|
#
|
|
|
|
# {
|
2019-06-28 20:17:35 +03:00
|
|
|
# 'magic' : 'FOO',
|
|
|
|
# 'entries' : [
|
|
|
|
# entry,
|
|
|
|
# ...
|
|
|
|
# ]
|
2014-12-31 14:06:48 +02:00
|
|
|
# }
|
|
|
|
#
|
|
|
|
# Entry, in its turn, could be described as:
|
|
|
|
#
|
|
|
|
# {
|
2019-06-28 20:17:35 +03:00
|
|
|
# pb_msg,
|
|
|
|
# 'extra' : extra_msg
|
2014-12-31 14:06:48 +02:00
|
|
|
# }
|
|
|
|
#
|
|
|
|
import io
|
2018-06-02 00:02:54 +03:00
|
|
|
import base64
|
2014-12-31 14:06:48 +02:00
|
|
|
import struct
|
|
|
|
import os
|
2015-12-10 13:07:02 +03:00
|
|
|
import array
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-05-16 06:20:22 +00:00
|
|
|
from . import magic
|
2018-09-23 15:31:51 +01:00
|
|
|
from . import pb
|
|
|
|
from . import pb2dict
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2018-06-02 00:02:54 +03:00
|
|
|
if "encodebytes" not in dir(base64):
|
2019-09-07 15:46:22 +03:00
|
|
|
base64.encodebytes = base64.encodestring
|
|
|
|
base64.decodebytes = base64.decodestring
|
2018-06-02 00:02:54 +03:00
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
#
|
|
|
|
# Predefined hardcoded constants
|
|
|
|
sizeof_u16 = 2
|
|
|
|
sizeof_u32 = 4
|
|
|
|
sizeof_u64 = 8
|
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
# A helper for rounding
|
2019-09-07 15:46:22 +03:00
|
|
|
def round_up(x, y):
|
|
|
|
return (((x - 1) | (y - 1)) + 1)
|
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
|
2015-05-29 16:01:00 +03:00
|
|
|
class MagicException(Exception):
|
2019-09-07 15:46:22 +03:00
|
|
|
def __init__(self, magic):
|
|
|
|
self.magic = magic
|
|
|
|
|
2015-05-29 16:01:00 +03:00
|
|
|
|
2023-02-24 05:32:44 +03:00
|
|
|
def decode_base64_data(data):
|
|
|
|
"""A helper function to decode base64 data."""
|
2023-06-22 03:53:03 +01:00
|
|
|
return base64.decodebytes(str.encode(data))
|
2023-02-24 05:32:44 +03:00
|
|
|
|
|
|
|
|
|
|
|
def write_base64_data(f, data):
|
|
|
|
"""A helper function to write base64 encoded data to a file."""
|
2023-06-22 03:53:03 +01:00
|
|
|
f.write(base64.decodebytes(str.encode(data)))
|
2023-02-24 05:32:44 +03:00
|
|
|
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
# Generic class to handle loading/dumping criu images entries from/to bin
|
|
|
|
# format to/from dict(json).
|
|
|
|
class entry_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Generic class to handle loading/dumping criu images
|
|
|
|
entries from/to bin format to/from dict(json).
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def __init__(self, payload, extra_handler=None):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Sets payload class and extra handler class.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
self.payload = payload
|
|
|
|
self.extra_handler = extra_handler
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pretty=False, no_payload=False):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Convert criu image entries from binary format to dict(json).
|
2022-03-30 18:45:16 -07:00
|
|
|
Takes a file-like object and returns a list with entries in
|
2019-06-28 20:17:35 +03:00
|
|
|
dict(json) format.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
entries = []
|
|
|
|
|
|
|
|
while True:
|
|
|
|
entry = {}
|
|
|
|
|
|
|
|
# Read payload
|
|
|
|
pbuff = self.payload()
|
|
|
|
buf = f.read(4)
|
2023-02-23 11:11:36 +00:00
|
|
|
if len(buf) == 0:
|
2019-09-07 15:46:22 +03:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
pbuff.ParseFromString(f.read(size))
|
|
|
|
entry = pb2dict.pb2dict(pbuff, pretty)
|
|
|
|
|
|
|
|
# Read extra
|
|
|
|
if self.extra_handler:
|
|
|
|
if no_payload:
|
|
|
|
|
|
|
|
def human_readable(num):
|
|
|
|
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
|
|
|
|
if num < 1024.0:
|
|
|
|
if int(num) == num:
|
|
|
|
return "%d%sB" % (num, unit)
|
|
|
|
else:
|
|
|
|
return "%.1f%sB" % (num, unit)
|
|
|
|
num /= 1024.0
|
|
|
|
return "%.1fYB" % num
|
|
|
|
|
|
|
|
pl_size = self.extra_handler.skip(f, pbuff)
|
|
|
|
entry['extra'] = '... <%s>' % human_readable(pl_size)
|
|
|
|
else:
|
|
|
|
entry['extra'] = self.extra_handler.load(f, pbuff)
|
|
|
|
|
|
|
|
entries.append(entry)
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
|
|
|
def loads(self, s, pretty=False):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Same as load(), but takes a string as an argument.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
f = io.BytesIO(s)
|
|
|
|
return self.load(f, pretty)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def dump(self, entries, f):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Convert criu image entries from dict(json) format to binary.
|
|
|
|
Takes a list of entries and a file-like object to write entries
|
|
|
|
in binary format to.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
for entry in entries:
|
|
|
|
extra = entry.pop('extra', None)
|
|
|
|
|
|
|
|
# Write payload
|
|
|
|
pbuff = self.payload()
|
|
|
|
pb2dict.dict2pb(entry, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
|
|
|
# Write extra
|
|
|
|
if self.extra_handler and extra:
|
|
|
|
self.extra_handler.dump(extra, f, pbuff)
|
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Same as dump(), but doesn't take file-like object and just
|
|
|
|
returns a string.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def count(self, f):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Counts the number of top-level object in the image file
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
entries = 0
|
|
|
|
|
|
|
|
while True:
|
|
|
|
buf = f.read(4)
|
2023-02-23 11:11:36 +00:00
|
|
|
if len(buf) == 0:
|
2019-09-07 15:46:22 +03:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
f.seek(size, 1)
|
|
|
|
entries += 1
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
return entries
|
2015-09-16 16:16:24 +03:00
|
|
|
|
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
# Special handler for pagemap.img
|
|
|
|
class pagemap_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Special entry handler for pagemap.img, which is unique in a way
|
|
|
|
that it has a header of pagemap_head type followed by entries
|
|
|
|
of pagemap_entry type.
|
|
|
|
"""
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pretty=False, no_payload=False):
|
|
|
|
entries = []
|
|
|
|
|
|
|
|
pbuff = pb.pagemap_head()
|
|
|
|
while True:
|
|
|
|
buf = f.read(4)
|
2023-02-23 11:11:36 +00:00
|
|
|
if len(buf) == 0:
|
2019-09-07 15:46:22 +03:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
pbuff.ParseFromString(f.read(size))
|
|
|
|
entries.append(pb2dict.pb2dict(pbuff, pretty))
|
|
|
|
|
|
|
|
pbuff = pb.pagemap_entry()
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
return entries
|
2015-01-14 16:32:37 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def loads(self, s, pretty=False):
|
|
|
|
f = io.BytesIO(s)
|
|
|
|
return self.load(f, pretty)
|
2015-01-14 16:32:37 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def dump(self, entries, f):
|
|
|
|
pbuff = pb.pagemap_head()
|
|
|
|
for item in entries:
|
|
|
|
pb2dict.dict2pb(item, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
2015-01-14 16:32:37 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
pbuff = pb.pagemap_entry()
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def dumps(self, entries):
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
2015-01-14 16:32:37 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def count(self, f):
|
|
|
|
return entry_handler(None).count(f) - 1
|
2015-01-14 16:32:37 +02:00
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2017-06-15 19:04:46 +03:00
|
|
|
# Special handler for ghost-file.img
|
|
|
|
class ghost_file_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pretty=False, no_payload=False):
|
|
|
|
entries = []
|
|
|
|
|
|
|
|
gf = pb.ghost_file_entry()
|
|
|
|
buf = f.read(4)
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
gf.ParseFromString(f.read(size))
|
|
|
|
g_entry = pb2dict.pb2dict(gf, pretty)
|
|
|
|
|
|
|
|
if gf.chunks:
|
|
|
|
entries.append(g_entry)
|
|
|
|
while True:
|
|
|
|
gc = pb.ghost_chunk_entry()
|
|
|
|
buf = f.read(4)
|
2019-12-18 23:32:32 +00:00
|
|
|
if len(buf) == 0:
|
2019-09-07 15:46:22 +03:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
gc.ParseFromString(f.read(size))
|
|
|
|
entry = pb2dict.pb2dict(gc, pretty)
|
|
|
|
if no_payload:
|
|
|
|
f.seek(gc.len, os.SEEK_CUR)
|
|
|
|
else:
|
2019-12-18 23:32:32 +00:00
|
|
|
entry['extra'] = base64.encodebytes(f.read(gc.len)).decode('utf-8')
|
2019-09-07 15:46:22 +03:00
|
|
|
entries.append(entry)
|
|
|
|
else:
|
|
|
|
if no_payload:
|
|
|
|
f.seek(0, os.SEEK_END)
|
|
|
|
else:
|
2019-12-18 23:32:32 +00:00
|
|
|
g_entry['extra'] = base64.encodebytes(f.read()).decode('utf-8')
|
2019-09-07 15:46:22 +03:00
|
|
|
entries.append(g_entry)
|
|
|
|
|
|
|
|
return entries
|
|
|
|
|
|
|
|
def loads(self, s, pretty=False):
|
|
|
|
f = io.BytesIO(s)
|
|
|
|
return self.load(f, pretty)
|
|
|
|
|
|
|
|
def dump(self, entries, f):
|
|
|
|
pbuff = pb.ghost_file_entry()
|
|
|
|
item = entries.pop(0)
|
|
|
|
pb2dict.dict2pb(item, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
|
|
|
|
|
|
|
if pbuff.chunks:
|
|
|
|
for item in entries:
|
|
|
|
pbuff = pb.ghost_chunk_entry()
|
|
|
|
pb2dict.dict2pb(item, pbuff)
|
|
|
|
pb_str = pbuff.SerializeToString()
|
|
|
|
size = len(pb_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(pb_str)
|
2023-02-24 05:32:44 +03:00
|
|
|
write_base64_data(f, item['extra'])
|
2019-09-07 15:46:22 +03:00
|
|
|
else:
|
2023-02-24 05:32:44 +03:00
|
|
|
write_base64_data(f, item['extra'])
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def dumps(self, entries):
|
|
|
|
f = io.BytesIO('')
|
|
|
|
self.dump(entries, f)
|
|
|
|
return f.read()
|
2017-06-15 19:04:46 +03:00
|
|
|
|
2015-01-14 16:32:37 +02:00
|
|
|
|
|
|
|
# In following extra handlers we use base64 encoding
|
2014-12-31 14:06:48 +02:00
|
|
|
# to store binary data. Even though, the nature
|
|
|
|
# of base64 is that it increases the total size,
|
|
|
|
# it doesn't really matter, because our images
|
|
|
|
# do not store big amounts of binary data. They
|
|
|
|
# are negligible comparing to pages size.
|
|
|
|
class pipes_data_extra_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pload):
|
|
|
|
size = pload.bytes
|
|
|
|
data = f.read(size)
|
pycrit: fix the broken of cli the `crit show xxx.img`
It will broken when the cli `crit show ipcns-shm-9.img` is executed, msg:
{
"magic": "IPCNS_SHM",
"entries": [
{
"desc": {
"key": 0,
"uid": 0,
"gid": 0,
"cuid": 0,
"cgid": 0,
"mode": 438,
"id": 0
},
"size": 1048576,
"in_pagemaps": true,
"extra": Traceback (most recent call last):
File "/usr/bin/crit", line 6, in <module>
cli.main()
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 412, in main
opts["func"](opts)
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 45, in decode
json.dump(img, f, indent=indent)
File "/usr/lib/python3.9/json/__init__.py", line 179, in dump
for chunk in iterable:
File "/usr/lib/python3.9/json/encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 438, in _iterencode
o = _default(o)
File "/usr/lib/python3.9/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type bytes is not JSON serializable
This is caused by `img['magic'][0]['extra']` which is bytes. I find
other load condtions, fix them at the same time.
Signed-off-by: fu.lin <fulin10@huawei.com>
2021-06-25 19:52:33 +08:00
|
|
|
return base64.encodebytes(data).decode('utf-8')
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def dump(self, extra, f, pload):
|
2023-02-24 05:32:44 +03:00
|
|
|
data = decode_base64_data(extra)
|
2019-09-07 15:46:22 +03:00
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
def skip(self, f, pload):
|
|
|
|
f.seek(pload.bytes, os.SEEK_CUR)
|
|
|
|
return pload.bytes
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
class sk_queues_extra_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pload):
|
|
|
|
size = pload.length
|
|
|
|
data = f.read(size)
|
pycrit: fix the broken of cli the `crit show xxx.img`
It will broken when the cli `crit show ipcns-shm-9.img` is executed, msg:
{
"magic": "IPCNS_SHM",
"entries": [
{
"desc": {
"key": 0,
"uid": 0,
"gid": 0,
"cuid": 0,
"cgid": 0,
"mode": 438,
"id": 0
},
"size": 1048576,
"in_pagemaps": true,
"extra": Traceback (most recent call last):
File "/usr/bin/crit", line 6, in <module>
cli.main()
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 412, in main
opts["func"](opts)
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 45, in decode
json.dump(img, f, indent=indent)
File "/usr/lib/python3.9/json/__init__.py", line 179, in dump
for chunk in iterable:
File "/usr/lib/python3.9/json/encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 438, in _iterencode
o = _default(o)
File "/usr/lib/python3.9/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type bytes is not JSON serializable
This is caused by `img['magic'][0]['extra']` which is bytes. I find
other load condtions, fix them at the same time.
Signed-off-by: fu.lin <fulin10@huawei.com>
2021-06-25 19:52:33 +08:00
|
|
|
return base64.encodebytes(data).decode('utf-8')
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def dump(self, extra, f, _unused):
|
2023-02-24 05:32:44 +03:00
|
|
|
data = decode_base64_data(extra)
|
2019-09-07 15:46:22 +03:00
|
|
|
f.write(data)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def skip(self, f, pload):
|
|
|
|
f.seek(pload.length, os.SEEK_CUR)
|
|
|
|
return pload.length
|
2016-05-19 13:04:00 +03:00
|
|
|
|
|
|
|
|
2015-01-28 21:38:00 +03:00
|
|
|
class tcp_stream_extra_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pbuff):
|
|
|
|
d = {}
|
|
|
|
|
|
|
|
inq = f.read(pbuff.inq_len)
|
|
|
|
outq = f.read(pbuff.outq_len)
|
2015-01-28 21:38:00 +03:00
|
|
|
|
pycrit: fix the broken of cli the `crit show xxx.img`
It will broken when the cli `crit show ipcns-shm-9.img` is executed, msg:
{
"magic": "IPCNS_SHM",
"entries": [
{
"desc": {
"key": 0,
"uid": 0,
"gid": 0,
"cuid": 0,
"cgid": 0,
"mode": 438,
"id": 0
},
"size": 1048576,
"in_pagemaps": true,
"extra": Traceback (most recent call last):
File "/usr/bin/crit", line 6, in <module>
cli.main()
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 412, in main
opts["func"](opts)
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 45, in decode
json.dump(img, f, indent=indent)
File "/usr/lib/python3.9/json/__init__.py", line 179, in dump
for chunk in iterable:
File "/usr/lib/python3.9/json/encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 438, in _iterencode
o = _default(o)
File "/usr/lib/python3.9/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type bytes is not JSON serializable
This is caused by `img['magic'][0]['extra']` which is bytes. I find
other load condtions, fix them at the same time.
Signed-off-by: fu.lin <fulin10@huawei.com>
2021-06-25 19:52:33 +08:00
|
|
|
d['inq'] = base64.encodebytes(inq).decode('utf-8')
|
|
|
|
d['outq'] = base64.encodebytes(outq).decode('utf-8')
|
2015-01-28 21:38:00 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
return d
|
2015-01-28 21:38:00 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def dump(self, extra, f, _unused):
|
2023-02-24 05:32:44 +03:00
|
|
|
inq = decode_base64_data(extra['inq'])
|
|
|
|
outq = decode_base64_data(extra['outq'])
|
2015-01-28 21:38:00 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
f.write(inq)
|
|
|
|
f.write(outq)
|
2015-01-28 21:38:00 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
def skip(self, f, pbuff):
|
|
|
|
f.seek(0, os.SEEK_END)
|
|
|
|
return pbuff.inq_len + pbuff.outq_len
|
2015-01-28 21:38:00 +03:00
|
|
|
|
2023-02-24 05:32:44 +03:00
|
|
|
|
2020-07-25 16:17:07 +05:30
|
|
|
class bpfmap_data_extra_handler:
|
|
|
|
def load(self, f, pload):
|
|
|
|
size = pload.keys_bytes + pload.values_bytes
|
|
|
|
data = f.read(size)
|
pycrit: fix the broken of cli the `crit show xxx.img`
It will broken when the cli `crit show ipcns-shm-9.img` is executed, msg:
{
"magic": "IPCNS_SHM",
"entries": [
{
"desc": {
"key": 0,
"uid": 0,
"gid": 0,
"cuid": 0,
"cgid": 0,
"mode": 438,
"id": 0
},
"size": 1048576,
"in_pagemaps": true,
"extra": Traceback (most recent call last):
File "/usr/bin/crit", line 6, in <module>
cli.main()
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 412, in main
opts["func"](opts)
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 45, in decode
json.dump(img, f, indent=indent)
File "/usr/lib/python3.9/json/__init__.py", line 179, in dump
for chunk in iterable:
File "/usr/lib/python3.9/json/encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 438, in _iterencode
o = _default(o)
File "/usr/lib/python3.9/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type bytes is not JSON serializable
This is caused by `img['magic'][0]['extra']` which is bytes. I find
other load condtions, fix them at the same time.
Signed-off-by: fu.lin <fulin10@huawei.com>
2021-06-25 19:52:33 +08:00
|
|
|
return base64.encodebytes(data).decode('utf-8')
|
2020-07-25 16:17:07 +05:30
|
|
|
|
|
|
|
def dump(self, extra, f, pload):
|
|
|
|
data = base64.decodebytes(extra)
|
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
def skip(self, f, pload):
|
|
|
|
f.seek(pload.bytes, os.SEEK_CUR)
|
|
|
|
return pload.bytes
|
2016-05-19 13:04:00 +03:00
|
|
|
|
2023-02-24 05:32:44 +03:00
|
|
|
|
2015-12-10 13:07:02 +03:00
|
|
|
class ipc_sem_set_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
rounded = round_up(size, sizeof_u64)
|
2023-02-24 05:32:44 +03:00
|
|
|
s = self._get_sem_array()
|
2021-03-15 14:16:14 +00:00
|
|
|
s.frombytes(f.read(size))
|
2019-09-07 15:46:22 +03:00
|
|
|
f.seek(rounded - size, 1)
|
|
|
|
return s.tolist()
|
|
|
|
|
|
|
|
def dump(self, extra, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
rounded = round_up(size, sizeof_u64)
|
2023-02-24 05:32:44 +03:00
|
|
|
s = self._get_sem_array()
|
2019-09-07 15:46:22 +03:00
|
|
|
s.fromlist(extra)
|
|
|
|
if len(s) != entry['nsems']:
|
|
|
|
raise Exception("Number of semaphores mismatch")
|
2021-03-15 14:16:14 +00:00
|
|
|
f.write(s.tobytes())
|
2021-03-15 14:30:04 +00:00
|
|
|
f.write(b'\0' * (rounded - size))
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def skip(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
|
|
|
size = sizeof_u16 * entry['nsems']
|
|
|
|
f.seek(round_up(size, sizeof_u64), os.SEEK_CUR)
|
|
|
|
return size
|
|
|
|
|
2023-02-24 05:32:44 +03:00
|
|
|
def _get_sem_array(self):
|
|
|
|
s = array.array('H')
|
|
|
|
if s.itemsize != sizeof_u16:
|
|
|
|
raise Exception("Array size mismatch")
|
|
|
|
return s
|
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
|
crit: Add handling of msgqueue
Typical output
| {
| "magic": "IPCNS_MSG",
| "entries": [
| {
| "desc": {
| "key": 2181112128,
| "uid": 18943,
| "gid": 58467,
| "cuid": 18943,
| "cgid": 58467,
| "mode": 438,
| "id": 0
| },
| "qbytes": 16384,
| "qnum": 2,
| "extra": [
| {
| "mtype": 1,
| "msize": 15
| },
| "VGVzdCBzeXN2NSBtc2cA\n",
| {
| "mtype": 26538,
| "msize": 27
| },
| "WWV0IGFub3RoZXIgdGVzdCBzeXN2NSBtc2cA\n"
| ]
| }
| ]
| }
Signed-off-by: Cyrill Gorcunov <gorcunov@openvz.org>
Signed-off-by: Pavel Emelyanov <xemul@parallels.com>
2015-12-10 13:07:05 +03:00
|
|
|
class ipc_msg_queue_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pbuff):
|
2023-02-24 05:32:44 +03:00
|
|
|
messages, _ = self._read_messages(f, pbuff)
|
2019-09-07 15:46:22 +03:00
|
|
|
return messages
|
|
|
|
|
|
|
|
def dump(self, extra, f, pbuff):
|
|
|
|
for i in range(0, len(extra), 2):
|
|
|
|
msg = pb.ipc_msg()
|
|
|
|
pb2dict.dict2pb(extra[i], msg)
|
|
|
|
msg_str = msg.SerializeToString()
|
|
|
|
size = len(msg_str)
|
|
|
|
f.write(struct.pack('i', size))
|
|
|
|
f.write(msg_str)
|
|
|
|
rounded = round_up(msg.msize, sizeof_u64)
|
2023-02-24 05:32:44 +03:00
|
|
|
data = decode_base64_data(extra[i + 1])
|
2019-09-07 15:46:22 +03:00
|
|
|
f.write(data[:msg.msize])
|
2021-03-15 14:30:04 +00:00
|
|
|
f.write(b'\0' * (rounded - msg.msize))
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def skip(self, f, pbuff):
|
2023-02-24 05:32:44 +03:00
|
|
|
_, pl_len = self._read_messages(f, pbuff, skip_data=True)
|
|
|
|
return pl_len
|
|
|
|
|
|
|
|
def _read_messages(self, f, pbuff, skip_data=False):
|
2019-09-07 15:46:22 +03:00
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
2023-02-24 05:32:44 +03:00
|
|
|
messages = []
|
2019-09-07 15:46:22 +03:00
|
|
|
pl_len = 0
|
|
|
|
for x in range(0, entry['qnum']):
|
|
|
|
buf = f.read(4)
|
2023-02-23 11:11:36 +00:00
|
|
|
if len(buf) == 0:
|
2019-09-07 15:46:22 +03:00
|
|
|
break
|
|
|
|
size, = struct.unpack('i', buf)
|
|
|
|
msg = pb.ipc_msg()
|
|
|
|
msg.ParseFromString(f.read(size))
|
|
|
|
rounded = round_up(msg.msize, sizeof_u64)
|
|
|
|
pl_len += size + msg.msize
|
|
|
|
|
2023-02-24 05:32:44 +03:00
|
|
|
if skip_data:
|
|
|
|
f.seek(rounded, os.SEEK_CUR)
|
|
|
|
else:
|
|
|
|
data = f.read(msg.msize)
|
|
|
|
f.seek(rounded - msg.msize, 1)
|
|
|
|
messages.append(pb2dict.pb2dict(msg))
|
|
|
|
messages.append(base64.encodebytes(data).decode('utf-8'))
|
|
|
|
|
|
|
|
return messages, pl_len
|
2019-09-07 15:46:22 +03:00
|
|
|
|
2016-05-19 13:04:00 +03:00
|
|
|
|
2015-12-10 13:07:03 +03:00
|
|
|
class ipc_shm_handler:
|
2019-09-07 15:46:22 +03:00
|
|
|
def load(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
|
|
|
size = entry['size']
|
|
|
|
data = f.read(size)
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.seek(rounded - size, 1)
|
pycrit: fix the broken of cli the `crit show xxx.img`
It will broken when the cli `crit show ipcns-shm-9.img` is executed, msg:
{
"magic": "IPCNS_SHM",
"entries": [
{
"desc": {
"key": 0,
"uid": 0,
"gid": 0,
"cuid": 0,
"cgid": 0,
"mode": 438,
"id": 0
},
"size": 1048576,
"in_pagemaps": true,
"extra": Traceback (most recent call last):
File "/usr/bin/crit", line 6, in <module>
cli.main()
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 412, in main
opts["func"](opts)
File "/usr/lib/python3/dist-packages/pycriu/cli.py", line 45, in decode
json.dump(img, f, indent=indent)
File "/usr/lib/python3.9/json/__init__.py", line 179, in dump
for chunk in iterable:
File "/usr/lib/python3.9/json/encoder.py", line 431, in _iterencode
yield from _iterencode_dict(o, _current_indent_level)
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 325, in _iterencode_list
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 405, in _iterencode_dict
yield from chunks
File "/usr/lib/python3.9/json/encoder.py", line 438, in _iterencode
o = _default(o)
File "/usr/lib/python3.9/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type bytes is not JSON serializable
This is caused by `img['magic'][0]['extra']` which is bytes. I find
other load condtions, fix them at the same time.
Signed-off-by: fu.lin <fulin10@huawei.com>
2021-06-25 19:52:33 +08:00
|
|
|
return base64.encodebytes(data).decode('utf-8')
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def dump(self, extra, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
|
|
|
size = entry['size']
|
|
|
|
data = base64.decodebytes(extra)
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.write(data[:size])
|
2021-03-15 14:30:04 +00:00
|
|
|
f.write(b'\0' * (rounded - size))
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def skip(self, f, pbuff):
|
|
|
|
entry = pb2dict.pb2dict(pbuff)
|
|
|
|
size = entry['size']
|
|
|
|
rounded = round_up(size, sizeof_u32)
|
|
|
|
f.seek(rounded, os.SEEK_CUR)
|
|
|
|
return size
|
2016-05-19 13:04:00 +03:00
|
|
|
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
handlers = {
|
2019-09-07 15:46:22 +03:00
|
|
|
'INVENTORY': entry_handler(pb.inventory_entry),
|
|
|
|
'CORE': entry_handler(pb.core_entry),
|
|
|
|
'IDS': entry_handler(pb.task_kobj_ids_entry),
|
|
|
|
'CREDS': entry_handler(pb.creds_entry),
|
|
|
|
'UTSNS': entry_handler(pb.utsns_entry),
|
2019-08-14 07:40:40 +03:00
|
|
|
'TIMENS': entry_handler(pb.timens_entry),
|
2020-05-05 14:53:08 +00:00
|
|
|
'PIDNS': entry_handler(pb.pidns_entry),
|
2019-09-07 15:46:22 +03:00
|
|
|
'IPC_VAR': entry_handler(pb.ipc_var_entry),
|
|
|
|
'FS': entry_handler(pb.fs_entry),
|
|
|
|
'GHOST_FILE': ghost_file_handler(),
|
|
|
|
'MM': entry_handler(pb.mm_entry),
|
|
|
|
'CGROUP': entry_handler(pb.cgroup_entry),
|
|
|
|
'TCP_STREAM': entry_handler(pb.tcp_stream_entry,
|
|
|
|
tcp_stream_extra_handler()),
|
|
|
|
'STATS': entry_handler(pb.stats_entry),
|
|
|
|
'PAGEMAP': pagemap_handler(), # Special one
|
|
|
|
'PSTREE': entry_handler(pb.pstree_entry),
|
|
|
|
'REG_FILES': entry_handler(pb.reg_file_entry),
|
|
|
|
'NS_FILES': entry_handler(pb.ns_file_entry),
|
|
|
|
'EVENTFD_FILE': entry_handler(pb.eventfd_file_entry),
|
|
|
|
'EVENTPOLL_FILE': entry_handler(pb.eventpoll_file_entry),
|
|
|
|
'EVENTPOLL_TFD': entry_handler(pb.eventpoll_tfd_entry),
|
|
|
|
'SIGNALFD': entry_handler(pb.signalfd_entry),
|
|
|
|
'TIMERFD': entry_handler(pb.timerfd_entry),
|
|
|
|
'INOTIFY_FILE': entry_handler(pb.inotify_file_entry),
|
|
|
|
'INOTIFY_WD': entry_handler(pb.inotify_wd_entry),
|
|
|
|
'FANOTIFY_FILE': entry_handler(pb.fanotify_file_entry),
|
|
|
|
'FANOTIFY_MARK': entry_handler(pb.fanotify_mark_entry),
|
|
|
|
'VMAS': entry_handler(pb.vma_entry),
|
|
|
|
'PIPES': entry_handler(pb.pipe_entry),
|
|
|
|
'FIFO': entry_handler(pb.fifo_entry),
|
|
|
|
'SIGACT': entry_handler(pb.sa_entry),
|
|
|
|
'NETLINK_SK': entry_handler(pb.netlink_sk_entry),
|
|
|
|
'REMAP_FPATH': entry_handler(pb.remap_file_path_entry),
|
|
|
|
'MNTS': entry_handler(pb.mnt_entry),
|
|
|
|
'TTY_FILES': entry_handler(pb.tty_file_entry),
|
|
|
|
'TTY_INFO': entry_handler(pb.tty_info_entry),
|
|
|
|
'TTY_DATA': entry_handler(pb.tty_data_entry),
|
|
|
|
'RLIMIT': entry_handler(pb.rlimit_entry),
|
|
|
|
'TUNFILE': entry_handler(pb.tunfile_entry),
|
|
|
|
'EXT_FILES': entry_handler(pb.ext_file_entry),
|
|
|
|
'IRMAP_CACHE': entry_handler(pb.irmap_cache_entry),
|
|
|
|
'FILE_LOCKS': entry_handler(pb.file_lock_entry),
|
|
|
|
'FDINFO': entry_handler(pb.fdinfo_entry),
|
|
|
|
'UNIXSK': entry_handler(pb.unix_sk_entry),
|
|
|
|
'INETSK': entry_handler(pb.inet_sk_entry),
|
|
|
|
'PACKETSK': entry_handler(pb.packet_sock_entry),
|
|
|
|
'ITIMERS': entry_handler(pb.itimer_entry),
|
|
|
|
'POSIX_TIMERS': entry_handler(pb.posix_timer_entry),
|
|
|
|
'NETDEV': entry_handler(pb.net_device_entry),
|
|
|
|
'PIPES_DATA': entry_handler(pb.pipe_data_entry,
|
|
|
|
pipes_data_extra_handler()),
|
|
|
|
'FIFO_DATA': entry_handler(pb.pipe_data_entry, pipes_data_extra_handler()),
|
|
|
|
'SK_QUEUES': entry_handler(pb.sk_packet_entry, sk_queues_extra_handler()),
|
|
|
|
'IPCNS_SHM': entry_handler(pb.ipc_shm_entry, ipc_shm_handler()),
|
|
|
|
'IPCNS_SEM': entry_handler(pb.ipc_sem_entry, ipc_sem_set_handler()),
|
|
|
|
'IPCNS_MSG': entry_handler(pb.ipc_msg_entry, ipc_msg_queue_handler()),
|
|
|
|
'NETNS': entry_handler(pb.netns_entry),
|
|
|
|
'USERNS': entry_handler(pb.userns_entry),
|
|
|
|
'SECCOMP': entry_handler(pb.seccomp_entry),
|
|
|
|
'AUTOFS': entry_handler(pb.autofs_entry),
|
|
|
|
'FILES': entry_handler(pb.file_entry),
|
|
|
|
'CPUINFO': entry_handler(pb.cpuinfo_entry),
|
2019-12-18 23:32:32 +00:00
|
|
|
'MEMFD_FILE': entry_handler(pb.memfd_file_entry),
|
|
|
|
'MEMFD_INODE': entry_handler(pb.memfd_inode_entry),
|
2020-07-25 16:17:07 +05:30
|
|
|
'BPFMAP_FILE': entry_handler(pb.bpfmap_file_entry),
|
|
|
|
'BPFMAP_DATA': entry_handler(pb.bpfmap_data_entry,
|
2023-02-24 05:32:44 +03:00
|
|
|
bpfmap_data_extra_handler()),
|
2021-04-09 11:48:58 +02:00
|
|
|
'APPARMOR': entry_handler(pb.apparmor_entry),
|
2019-09-07 15:46:22 +03:00
|
|
|
}
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def __rhandler(f):
|
2019-09-07 15:46:22 +03:00
|
|
|
# Images v1.1 NOTE: First read "first" magic.
|
|
|
|
img_magic, = struct.unpack('i', f.read(4))
|
|
|
|
if img_magic in (magic.by_name['IMG_COMMON'],
|
|
|
|
magic.by_name['IMG_SERVICE']):
|
|
|
|
img_magic, = struct.unpack('i', f.read(4))
|
2015-04-08 16:37:28 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
try:
|
|
|
|
m = magic.by_val[img_magic]
|
2023-02-24 05:32:44 +03:00
|
|
|
except Exception:
|
2019-09-07 15:46:22 +03:00
|
|
|
raise MagicException(img_magic)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
try:
|
|
|
|
handler = handlers[m]
|
2023-02-24 05:32:44 +03:00
|
|
|
except Exception:
|
2019-09-07 15:46:22 +03:00
|
|
|
raise Exception("No handler found for image with magic " + m)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
return m, handler
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def load(f, pretty=False, no_payload=False):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Convert criu image from binary format to dict(json).
|
|
|
|
Takes a file-like object to read criu image from.
|
|
|
|
Returns criu image in dict(json) format.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
image = {}
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
m, handler = __rhandler(f)
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
image['magic'] = m
|
|
|
|
image['entries'] = handler.load(f, pretty, no_payload)
|
|
|
|
|
|
|
|
return image
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
|
2015-09-16 16:16:24 +03:00
|
|
|
def info(f):
|
2019-09-07 15:46:22 +03:00
|
|
|
res = {}
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
m, handler = __rhandler(f)
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
res['magic'] = m
|
|
|
|
res['count'] = handler.count(f)
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
return res
|
2015-09-16 16:16:24 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
|
|
|
|
def loads(s, pretty=False):
|
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Same as load(), but takes a string.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
f = io.BytesIO(s)
|
|
|
|
return load(f, pretty)
|
|
|
|
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
def dump(img, f):
|
2019-09-07 15:46:22 +03:00
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Convert criu image from dict(json) format to binary.
|
|
|
|
Takes an image in dict(json) format and file-like
|
|
|
|
object to write to.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
m = img['magic']
|
|
|
|
magic_val = magic.by_name[img['magic']]
|
|
|
|
|
|
|
|
# Images v1.1 NOTE: use "second" magic to identify what "first"
|
|
|
|
# should be written.
|
|
|
|
if m != 'INVENTORY':
|
|
|
|
if m in ('STATS', 'IRMAP_CACHE'):
|
|
|
|
f.write(struct.pack('i', magic.by_name['IMG_SERVICE']))
|
|
|
|
else:
|
|
|
|
f.write(struct.pack('i', magic.by_name['IMG_COMMON']))
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
f.write(struct.pack('i', magic_val))
|
2015-04-08 16:37:28 +03:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
try:
|
|
|
|
handler = handlers[m]
|
2023-02-24 05:32:44 +03:00
|
|
|
except Exception:
|
2019-09-07 15:46:22 +03:00
|
|
|
raise Exception("No handler found for image with such magic")
|
2014-12-31 14:06:48 +02:00
|
|
|
|
2019-09-07 15:46:22 +03:00
|
|
|
handler.dump(img['entries'], f)
|
2014-12-31 14:06:48 +02:00
|
|
|
|
|
|
|
|
|
|
|
def dumps(img):
|
2019-09-07 15:46:22 +03:00
|
|
|
"""
|
2019-06-28 20:17:35 +03:00
|
|
|
Same as dump(), but takes only an image and returns
|
|
|
|
a string.
|
|
|
|
"""
|
2019-09-07 15:46:22 +03:00
|
|
|
f = io.BytesIO(b'')
|
|
|
|
dump(img, f)
|
|
|
|
return f.getvalue()
|