mirror of
https://github.com/openvswitch/ovs
synced 2025-10-27 15:18:06 +00:00
python: Implement write support in Python IDL for OVSDB.
Until now, the Python bindings for OVSDB have not supported writing to the database. Instead, writes had to be done with "ovs-vsctl" subprocesses. This commit adds write support and brings the Python bindings in line with the C bindings. This commit deletes the Python-specific IDL tests in favor of using the same tests as the C version of the IDL, which now pass with both implementations. This commit updates the two users of the Python IDL to use the new write support. I tested this updates only by writing unit tests for them, which appear in upcoming commits.
This commit is contained in:
46
debian/ovs-monitor-ipsec
vendored
46
debian/ovs-monitor-ipsec
vendored
@@ -33,6 +33,7 @@ import socket
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import ovs.dirs
|
||||
from ovs.db import error
|
||||
from ovs.db import types
|
||||
import ovs.util
|
||||
@@ -376,7 +377,7 @@ def keep_table_columns(schema, table_name, column_types):
|
||||
table.columns = new_columns
|
||||
return table
|
||||
|
||||
def monitor_uuid_schema_cb(schema):
|
||||
def prune_schema(schema):
|
||||
string_type = types.Type(types.BaseType(types.StringType))
|
||||
optional_ssl_type = types.Type(types.BaseType(types.UuidType,
|
||||
ref_table_name='SSL'), None, 0, 1)
|
||||
@@ -425,18 +426,17 @@ def update_ipsec(ipsec, interfaces, new_interfaces):
|
||||
s_log.warning("skipping ipsec config for %s: %s" % (name, msg))
|
||||
|
||||
def get_ssl_cert(data):
|
||||
for ovs_rec in data["Open_vSwitch"].itervalues():
|
||||
if ovs_rec.ssl.as_list():
|
||||
ssl_rec = data["SSL"][ovs_rec.ssl.as_scalar()]
|
||||
return (ssl_rec.certificate.as_scalar(),
|
||||
ssl_rec.private_key.as_scalar())
|
||||
for ovs_rec in data["Open_vSwitch"].rows.itervalues():
|
||||
ssl = ovs_rec.ssl
|
||||
if ssl and ssl.certificate and ssl.private_key:
|
||||
return (ssl.certificate, ssl.private_key)
|
||||
|
||||
return None
|
||||
|
||||
def main(argv):
|
||||
try:
|
||||
options, args = getopt.gnu_getopt(
|
||||
argv[1:], 'h', ['help'] + ovs.daemon.LONG_OPTIONS)
|
||||
argv[1:], 'h', ['help', 'root-prefix='] + ovs.daemon.LONG_OPTIONS)
|
||||
except getopt.GetoptError, geo:
|
||||
sys.stderr.write("%s: %s\n" % (ovs.util.PROGRAM_NAME, geo.msg))
|
||||
sys.exit(1)
|
||||
@@ -444,6 +444,9 @@ def main(argv):
|
||||
for key, value in options:
|
||||
if key in ['-h', '--help']:
|
||||
usage()
|
||||
elif key == '--root-prefix':
|
||||
global root_prefix
|
||||
root_prefix = value
|
||||
elif not ovs.daemon.parse_opt(key, value):
|
||||
sys.stderr.write("%s: unhandled option %s\n"
|
||||
% (ovs.util.PROGRAM_NAME, key))
|
||||
@@ -455,7 +458,11 @@ def main(argv):
|
||||
sys.exit(1)
|
||||
|
||||
remote = args[0]
|
||||
idl = ovs.db.idl.Idl(remote, "Open_vSwitch", monitor_uuid_schema_cb)
|
||||
|
||||
schema_file = "%s/vswitch.ovsschema" % ovs.dirs.PKGDATADIR
|
||||
schema = ovs.db.schema.DbSchema.from_json(ovs.json.from_file(schema_file))
|
||||
prune_schema(schema)
|
||||
idl = ovs.db.idl.Idl(remote, schema)
|
||||
|
||||
ovs.daemon.daemonize()
|
||||
|
||||
@@ -469,20 +476,21 @@ def main(argv):
|
||||
poller.block()
|
||||
continue
|
||||
|
||||
ssl_cert = get_ssl_cert(idl.data)
|
||||
ssl_cert = get_ssl_cert(idl.tables)
|
||||
|
||||
new_interfaces = {}
|
||||
for rec in idl.data["Interface"].itervalues():
|
||||
if rec.type.as_scalar() == "ipsec_gre":
|
||||
name = rec.name.as_scalar()
|
||||
for rec in idl.tables["Interface"].rows.itervalues():
|
||||
if rec.type == "ipsec_gre":
|
||||
name = rec.name
|
||||
options = rec.options
|
||||
entry = {
|
||||
"remote_ip": rec.options.get("remote_ip"),
|
||||
"local_ip": rec.options.get("local_ip", "0.0.0.0/0"),
|
||||
"certificate": rec.options.get("certificate"),
|
||||
"private_key": rec.options.get("private_key"),
|
||||
"use_ssl_cert": rec.options.get("use_ssl_cert"),
|
||||
"peer_cert": rec.options.get("peer_cert"),
|
||||
"psk": rec.options.get("psk") }
|
||||
"remote_ip": options.get("remote_ip"),
|
||||
"local_ip": options.get("local_ip", "0.0.0.0/0"),
|
||||
"certificate": options.get("certificate"),
|
||||
"private_key": options.get("private_key"),
|
||||
"use_ssl_cert": options.get("use_ssl_cert"),
|
||||
"peer_cert": options.get("peer_cert"),
|
||||
"psk": options.get("psk") }
|
||||
|
||||
if entry["peer_cert"] and entry["psk"]:
|
||||
s_log.warning("both 'peer_cert' and 'psk' defined for %s"
|
||||
|
||||
@@ -67,7 +67,7 @@ EXTRA_DIST += \
|
||||
ovsdb/ovsdb-idlc.in \
|
||||
ovsdb/ovsdb-idlc.1
|
||||
DISTCLEANFILES += ovsdb/ovsdb-idlc
|
||||
SUFFIXES += .ovsidl
|
||||
SUFFIXES += .ovsidl .ovsschema .py
|
||||
OVSDB_IDLC = $(run_python) $(srcdir)/ovsdb/ovsdb-idlc.in
|
||||
.ovsidl.c:
|
||||
$(OVSDB_IDLC) c-idl-source $< > $@.tmp
|
||||
@@ -75,6 +75,9 @@ OVSDB_IDLC = $(run_python) $(srcdir)/ovsdb/ovsdb-idlc.in
|
||||
.ovsidl.h:
|
||||
$(OVSDB_IDLC) c-idl-header $< > $@.tmp
|
||||
mv $@.tmp $@
|
||||
.ovsschema.py:
|
||||
$(OVSDB_IDLC) python-module $< > $@.tmp
|
||||
mv $@.tmp $@
|
||||
|
||||
EXTRA_DIST += $(OVSIDL_BUILT)
|
||||
BUILT_SOURCES += $(OVSIDL_BUILT)
|
||||
|
||||
@@ -548,6 +548,21 @@ void
|
||||
print " %s_columns_init();" % structName
|
||||
print "}"
|
||||
|
||||
def print_python_module(schema_file):
|
||||
schema = ovs.db.schema.DbSchema.from_json(ovs.json.from_file(schema_file))
|
||||
print """\
|
||||
# Generated automatically -- do not modify! -*- buffer-read-only: t -*-
|
||||
|
||||
import ovs.db.schema
|
||||
import ovs.json
|
||||
|
||||
__schema_json = \"\"\"
|
||||
%s
|
||||
\"\"\"
|
||||
|
||||
schema = ovs.db.schema.DbSchema.from_json(ovs.json.from_string(__schema_json))
|
||||
""" % ovs.json.to_string(schema.to_json(), pretty=True)
|
||||
|
||||
def ovsdb_escape(string):
|
||||
def escape(match):
|
||||
c = match.group(0)
|
||||
@@ -569,8 +584,6 @@ def ovsdb_escape(string):
|
||||
return '\\x%02x' % ord(c)
|
||||
return re.sub(r'["\\\000-\037]', escape, string)
|
||||
|
||||
|
||||
|
||||
def usage():
|
||||
print """\
|
||||
%(argv0)s: ovsdb schema compiler
|
||||
@@ -580,6 +593,7 @@ The following commands are supported:
|
||||
annotate SCHEMA ANNOTATIONS print SCHEMA combined with ANNOTATIONS
|
||||
c-idl-header IDL print C header file for IDL
|
||||
c-idl-source IDL print C source file for IDL implementation
|
||||
python-module IDL print Python module for IDL
|
||||
nroff IDL print schema documentation in nroff format
|
||||
|
||||
The following options are also available:
|
||||
@@ -618,7 +632,8 @@ if __name__ == "__main__":
|
||||
|
||||
commands = {"annotate": (annotateSchema, 2),
|
||||
"c-idl-header": (printCIDLHeader, 1),
|
||||
"c-idl-source": (printCIDLSource, 1)}
|
||||
"c-idl-source": (printCIDLSource, 1),
|
||||
"python-module": (print_python_module, 1)}
|
||||
|
||||
if not args[0] in commands:
|
||||
sys.stderr.write("%s: unknown command \"%s\" "
|
||||
|
||||
@@ -81,6 +81,18 @@ class Atom(object):
|
||||
|
||||
@staticmethod
|
||||
def default(type_):
|
||||
"""Returns the default value for the given type_, which must be an
|
||||
instance of ovs.db.types.AtomicType.
|
||||
|
||||
The default value for each atomic type is;
|
||||
|
||||
- 0, for integer or real atoms.
|
||||
|
||||
- False, for a boolean atom.
|
||||
|
||||
- "", for a string atom.
|
||||
|
||||
- The all-zeros UUID, for a UUID atom."""
|
||||
return Atom(type_)
|
||||
|
||||
def is_default(self):
|
||||
@@ -102,12 +114,21 @@ class Atom(object):
|
||||
atom.check_constraints(base)
|
||||
return atom
|
||||
|
||||
@staticmethod
|
||||
def from_python(base, value):
|
||||
value = ovs.db.parser.float_to_int(value)
|
||||
if type(value) in base.type.python_types:
|
||||
atom = Atom(base.type, value)
|
||||
else:
|
||||
raise error.Error("expected %s, got %s" % (base.type, type(value)))
|
||||
atom.check_constraints(base)
|
||||
return atom
|
||||
|
||||
def check_constraints(self, base):
|
||||
"""Checks whether 'atom' meets the constraints (if any) defined in
|
||||
'base' and raises an ovs.db.error.Error if any constraint is violated.
|
||||
|
||||
'base' and 'atom' must have the same type.
|
||||
|
||||
Checking UUID constraints is deferred to transaction commit time, so
|
||||
this function does nothing for UUID constraints."""
|
||||
assert base.type == self.type
|
||||
@@ -363,6 +384,9 @@ class Datum(object):
|
||||
else:
|
||||
return [k.value for k in self.values.iterkeys()]
|
||||
|
||||
def as_dict(self):
|
||||
return dict(self.values)
|
||||
|
||||
def as_scalar(self):
|
||||
if len(self.values) == 1:
|
||||
if self.type.is_map():
|
||||
@@ -373,6 +397,97 @@ class Datum(object):
|
||||
else:
|
||||
return None
|
||||
|
||||
def to_python(self, uuid_to_row):
|
||||
"""Returns this datum's value converted into a natural Python
|
||||
representation of this datum's type, according to the following
|
||||
rules:
|
||||
|
||||
- If the type has exactly one value and it is not a map (that is,
|
||||
self.type.is_scalar() returns True), then the value is:
|
||||
|
||||
* An int or long, for an integer column.
|
||||
|
||||
* An int or long or float, for a real column.
|
||||
|
||||
* A bool, for a boolean column.
|
||||
|
||||
* A str or unicode object, for a string column.
|
||||
|
||||
* A uuid.UUID object, for a UUID column without a ref_table.
|
||||
|
||||
* An object represented the referenced row, for a UUID column with
|
||||
a ref_table. (For the Idl, this object will be an ovs.db.idl.Row
|
||||
object.)
|
||||
|
||||
If some error occurs (e.g. the database server's idea of the column
|
||||
is different from the IDL's idea), then the default value for the
|
||||
scalar type is used (see Atom.default()).
|
||||
|
||||
- Otherwise, if the type is not a map, then the value is a Python list
|
||||
whose elements have the types described above.
|
||||
|
||||
- Otherwise, the type is a map, and the value is a Python dict that
|
||||
maps from key to value, with key and value types determined as
|
||||
described above.
|
||||
|
||||
'uuid_to_row' must be a function that takes a value and an
|
||||
ovs.db.types.BaseType and translates UUIDs into row objects."""
|
||||
if self.type.is_scalar():
|
||||
value = uuid_to_row(self.as_scalar(), self.type.key)
|
||||
if value is None:
|
||||
return self.type.key.default()
|
||||
else:
|
||||
return value
|
||||
elif self.type.is_map():
|
||||
value = {}
|
||||
for k, v in self.values.iteritems():
|
||||
dk = uuid_to_row(k.value, self.type.key)
|
||||
dv = uuid_to_row(v.value, self.type.value)
|
||||
if dk is not None and dv is not None:
|
||||
value[dk] = dv
|
||||
return value
|
||||
else:
|
||||
s = set()
|
||||
for k in self.values:
|
||||
dk = uuid_to_row(k.value, self.type.key)
|
||||
if dk is not None:
|
||||
s.add(dk)
|
||||
return sorted(s)
|
||||
|
||||
@staticmethod
|
||||
def from_python(type_, value, row_to_uuid):
|
||||
"""Returns a new Datum with the given ovs.db.types.Type 'type_'. The
|
||||
new datum's value is taken from 'value', which must take the form
|
||||
described as a valid return value from Datum.to_python() for 'type'.
|
||||
|
||||
Each scalar value within 'value' is initally passed through
|
||||
'row_to_uuid', which should convert objects that represent rows (if
|
||||
any) into uuid.UUID objects and return other data unchanged.
|
||||
|
||||
Raises ovs.db.error.Error if 'value' is not in an appropriate form for
|
||||
'type_'."""
|
||||
d = {}
|
||||
if type(value) == dict:
|
||||
for k, v in value.iteritems():
|
||||
ka = Atom.from_python(type_.key, row_to_uuid(k))
|
||||
va = Atom.from_python(type_.value, row_to_uuid(v))
|
||||
d[ka] = va
|
||||
elif type(value) in (list, tuple):
|
||||
for k in value:
|
||||
ka = Atom.from_python(type_.key, row_to_uuid(k))
|
||||
d[ka] = None
|
||||
else:
|
||||
ka = Atom.from_python(type_.key, row_to_uuid(value))
|
||||
d[ka] = None
|
||||
|
||||
datum = Datum(type_, d)
|
||||
datum.check_constraints()
|
||||
if not datum.conforms_to_type():
|
||||
raise error.Error("%d values when type requires between %d and %d"
|
||||
% (len(d), type_.n_min, type_.n_max))
|
||||
|
||||
return datum
|
||||
|
||||
def __getitem__(self, key):
|
||||
if not isinstance(key, Atom):
|
||||
key = Atom.new(key)
|
||||
|
||||
1006
python/ovs/db/idl.py
1006
python/ovs/db/idl.py
File diff suppressed because it is too large
Load Diff
@@ -95,6 +95,9 @@ class DbSchema(object):
|
||||
json["version"] = self.version
|
||||
return json
|
||||
|
||||
def copy(self):
|
||||
return DbSchema.from_json(self.to_json())
|
||||
|
||||
def __follow_ref_table(self, column, base, base_name):
|
||||
if not base or base.type != types.UuidType or not base.ref_table_name:
|
||||
return
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from ovs.db import error
|
||||
import ovs.db.parser
|
||||
@@ -20,9 +21,10 @@ import ovs.db.data
|
||||
import ovs.ovsuuid
|
||||
|
||||
class AtomicType(object):
|
||||
def __init__(self, name, default):
|
||||
def __init__(self, name, default, python_types):
|
||||
self.name = name
|
||||
self.default = default
|
||||
self.python_types = python_types
|
||||
|
||||
@staticmethod
|
||||
def from_string(s):
|
||||
@@ -51,12 +53,12 @@ class AtomicType(object):
|
||||
def default_atom(self):
|
||||
return ovs.db.data.Atom(self, self.default)
|
||||
|
||||
VoidType = AtomicType("void", None)
|
||||
IntegerType = AtomicType("integer", 0)
|
||||
RealType = AtomicType("real", 0.0)
|
||||
BooleanType = AtomicType("boolean", False)
|
||||
StringType = AtomicType("string", "")
|
||||
UuidType = AtomicType("uuid", ovs.ovsuuid.zero())
|
||||
VoidType = AtomicType("void", None, ())
|
||||
IntegerType = AtomicType("integer", 0, (int, long))
|
||||
RealType = AtomicType("real", 0.0, (int, long, float))
|
||||
BooleanType = AtomicType("boolean", False, (bool,))
|
||||
StringType = AtomicType("string", "", (str, unicode))
|
||||
UuidType = AtomicType("uuid", ovs.ovsuuid.zero(), (uuid.UUID,))
|
||||
|
||||
ATOMIC_TYPES = [VoidType, IntegerType, RealType, BooleanType, StringType,
|
||||
UuidType]
|
||||
|
||||
@@ -4,7 +4,7 @@ HAVE_PYTHON='@HAVE_PYTHON@'
|
||||
PERL='@PERL@'
|
||||
PYTHON='@PYTHON@'
|
||||
|
||||
PYTHONPATH=$PYTHONPATH:$abs_top_srcdir/python
|
||||
PYTHONPATH=$PYTHONPATH:$abs_top_srcdir/python:$abs_top_builddir/tests
|
||||
export PYTHONPATH
|
||||
|
||||
PYTHONIOENCODING=utf_8
|
||||
|
||||
@@ -50,7 +50,6 @@ TESTSUITE_AT = \
|
||||
tests/ovsdb-server.at \
|
||||
tests/ovsdb-monitor.at \
|
||||
tests/ovsdb-idl.at \
|
||||
tests/ovsdb-idl-py.at \
|
||||
tests/ovs-vsctl.at \
|
||||
tests/interface-reconfigure.at
|
||||
TESTSUITE = $(srcdir)/tests/testsuite
|
||||
@@ -279,15 +278,22 @@ EXTRA_DIST += tests/uuidfilt.pl tests/ovsdb-monitor-sort.pl
|
||||
tests_test_ovsdb_LDADD = ovsdb/libovsdb.a lib/libopenvswitch.a $(SSL_LIBS)
|
||||
|
||||
# idltest schema and IDL
|
||||
OVSIDL_BUILT += tests/idltest.c tests/idltest.h tests/idltest.ovsidl
|
||||
OVSIDL_BUILT += \
|
||||
tests/idltest.c \
|
||||
tests/idltest.h \
|
||||
tests/idltest.ovsidl \
|
||||
tests/idltest.py
|
||||
IDLTEST_IDL_FILES = tests/idltest.ovsschema tests/idltest.ann
|
||||
EXTRA_DIST += $(IDLTEST_IDL_FILES)
|
||||
CLEANFILES += tests/idltest.pyc tests/idltest.pyo
|
||||
tests/idltest.ovsidl: $(IDLTEST_IDL_FILES)
|
||||
$(OVSDB_IDLC) -C $(srcdir) annotate $(IDLTEST_IDL_FILES) > $@.tmp
|
||||
mv $@.tmp $@
|
||||
|
||||
tests/idltest.c: tests/idltest.h
|
||||
|
||||
noinst_SCRIPTS += tests/idltest.py
|
||||
|
||||
noinst_PROGRAMS += tests/test-reconnect
|
||||
tests_test_reconnect_SOURCES = tests/test-reconnect.c
|
||||
tests_test_reconnect_LDADD = lib/libopenvswitch.a
|
||||
|
||||
@@ -1,149 +0,0 @@
|
||||
AT_BANNER([OVSDB -- interface description language (IDL) - Python])
|
||||
|
||||
# OVSDB_CHECK_IDL(TITLE, [PRE-IDL-TXN], TRANSACTIONS, OUTPUT, [KEYWORDS],
|
||||
# [FILTER])
|
||||
#
|
||||
# Creates a database with a schema derived from idltest.ovsidl, runs
|
||||
# each PRE-IDL-TXN (if any), starts an ovsdb-server on that database,
|
||||
# and runs "test-ovsdb idl" passing each of the TRANSACTIONS along.
|
||||
#
|
||||
# Checks that the overall output is OUTPUT. Before comparison, the
|
||||
# output is sorted (using "sort") and UUIDs in the output are replaced
|
||||
# by markers of the form <N> where N is a number. The first unique
|
||||
# UUID is replaced by <0>, the next by <1>, and so on. If a given
|
||||
# UUID appears more than once it is always replaced by the same
|
||||
# marker. If FILTER is supplied then the output is also filtered
|
||||
# through the specified program.
|
||||
#
|
||||
# TITLE is provided to AT_SETUP and KEYWORDS to AT_KEYWORDS.
|
||||
m4_define([OVSDB_CHECK_IDL_PY],
|
||||
[AT_SETUP([$1])
|
||||
AT_SKIP_IF([test $HAVE_PYTHON = no])
|
||||
AT_KEYWORDS([ovsdb server idl positive Python $5])
|
||||
AT_CHECK([ovsdb-tool create db $abs_srcdir/idltest.ovsschema],
|
||||
[0], [stdout], [ignore])
|
||||
AT_CHECK([ovsdb-server '-vPATTERN:console:ovsdb-server|%c|%m' --detach --pidfile=$PWD/pid --remote=punix:socket --unixctl=$PWD/unixctl db], [0], [ignore], [ignore])
|
||||
m4_if([$2], [], [],
|
||||
[AT_CHECK([ovsdb-client transact unix:socket $2], [0], [ignore], [ignore], [kill `cat pid`])])
|
||||
AT_CHECK([$PYTHON $srcdir/test-ovsdb.py -t10 idl unix:socket $3],
|
||||
[0], [stdout], [ignore], [kill `cat pid`])
|
||||
AT_CHECK([sort stdout | perl $srcdir/uuidfilt.pl]m4_if([$6],,, [[| $6]]),
|
||||
[0], [$4], [], [kill `cat pid`])
|
||||
OVSDB_SERVER_SHUTDOWN
|
||||
AT_CLEANUP])
|
||||
|
||||
OVSDB_CHECK_IDL_PY([simple idl, initially empty, no ops - Python],
|
||||
[],
|
||||
[],
|
||||
[000: empty
|
||||
001: done
|
||||
])
|
||||
|
||||
OVSDB_CHECK_IDL_PY([simple idl, initially empty, various ops - Python],
|
||||
[],
|
||||
[['["idltest",
|
||||
{"op": "insert",
|
||||
"table": "simple",
|
||||
"row": {"i": 1,
|
||||
"r": 2.0,
|
||||
"b": true,
|
||||
"s": "mystring",
|
||||
"u": ["uuid", "84f5c8f5-ac76-4dbc-a24f-8860eb407fc1"],
|
||||
"ia": ["set", [1, 2, 3]],
|
||||
"ra": ["set", [-0.5]],
|
||||
"ba": ["set", [true]],
|
||||
"sa": ["set", ["abc", "def"]],
|
||||
"ua": ["set", [["uuid", "69443985-7806-45e2-b35f-574a04e720f9"],
|
||||
["uuid", "aad11ef0-816a-4b01-93e6-03b8b4256b98"]]]}},
|
||||
{"op": "insert",
|
||||
"table": "simple",
|
||||
"row": {}}]' \
|
||||
'["idltest",
|
||||
{"op": "update",
|
||||
"table": "simple",
|
||||
"where": [],
|
||||
"row": {"b": true}}]' \
|
||||
'["idltest",
|
||||
{"op": "update",
|
||||
"table": "simple",
|
||||
"where": [],
|
||||
"row": {"r": 123.5}}]' \
|
||||
'["idltest",
|
||||
{"op": "insert",
|
||||
"table": "simple",
|
||||
"row": {"i": -1,
|
||||
"r": 125,
|
||||
"b": false,
|
||||
"s": "",
|
||||
"ia": ["set", [1]],
|
||||
"ra": ["set", [1.5]],
|
||||
"ba": ["set", [false]],
|
||||
"sa": ["set", []],
|
||||
"ua": ["set", []]}}]' \
|
||||
'["idltest",
|
||||
{"op": "update",
|
||||
"table": "simple",
|
||||
"where": [["i", "<", 1]],
|
||||
"row": {"s": "newstring"}}]' \
|
||||
'["idltest",
|
||||
{"op": "delete",
|
||||
"table": "simple",
|
||||
"where": [["i", "==", 0]]}]' \
|
||||
'reconnect']],
|
||||
[[000: empty
|
||||
001: {"error":null,"result":[{"uuid":["uuid","<0>"]},{"uuid":["uuid","<1>"]}]}
|
||||
002: i=0 r=0 b=false s= u=<2> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
002: i=1 r=2 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
003: {"error":null,"result":[{"count":2}]}
|
||||
004: i=0 r=0 b=true s= u=<2> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
004: i=1 r=2 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
005: {"error":null,"result":[{"count":2}]}
|
||||
006: i=0 r=123.5 b=true s= u=<2> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
006: i=1 r=123.5 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
007: {"error":null,"result":[{"uuid":["uuid","<6>"]}]}
|
||||
008: i=-1 r=125 b=false s= u=<2> ia=[1] ra=[1.5] ba=false sa=[] ua=[] uuid=<6>
|
||||
008: i=0 r=123.5 b=true s= u=<2> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
008: i=1 r=123.5 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
009: {"error":null,"result":[{"count":2}]}
|
||||
010: i=-1 r=125 b=false s=newstring u=<2> ia=[1] ra=[1.5] ba=false sa=[] ua=[] uuid=<6>
|
||||
010: i=0 r=123.5 b=true s=newstring u=<2> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
010: i=1 r=123.5 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
011: {"error":null,"result":[{"count":1}]}
|
||||
012: i=-1 r=125 b=false s=newstring u=<2> ia=[1] ra=[1.5] ba=false sa=[] ua=[] uuid=<6>
|
||||
012: i=1 r=123.5 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
013: reconnect
|
||||
014: i=-1 r=125 b=false s=newstring u=<2> ia=[1] ra=[1.5] ba=false sa=[] ua=[] uuid=<6>
|
||||
014: i=1 r=123.5 b=true s=mystring u=<3> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<4> <5>] uuid=<0>
|
||||
015: done
|
||||
]])
|
||||
|
||||
OVSDB_CHECK_IDL_PY([simple idl, initially populated - Python],
|
||||
[['["idltest",
|
||||
{"op": "insert",
|
||||
"table": "simple",
|
||||
"row": {"i": 1,
|
||||
"r": 2.0,
|
||||
"b": true,
|
||||
"s": "mystring",
|
||||
"u": ["uuid", "84f5c8f5-ac76-4dbc-a24f-8860eb407fc1"],
|
||||
"ia": ["set", [1, 2, 3]],
|
||||
"ra": ["set", [-0.5]],
|
||||
"ba": ["set", [true]],
|
||||
"sa": ["set", ["abc", "def"]],
|
||||
"ua": ["set", [["uuid", "69443985-7806-45e2-b35f-574a04e720f9"],
|
||||
["uuid", "aad11ef0-816a-4b01-93e6-03b8b4256b98"]]]}},
|
||||
{"op": "insert",
|
||||
"table": "simple",
|
||||
"row": {}}]']],
|
||||
[['["idltest",
|
||||
{"op": "update",
|
||||
"table": "simple",
|
||||
"where": [],
|
||||
"row": {"b": true}}]']],
|
||||
[[000: i=0 r=0 b=false s= u=<0> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
000: i=1 r=2 b=true s=mystring u=<2> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<3> <4>] uuid=<5>
|
||||
001: {"error":null,"result":[{"count":2}]}
|
||||
002: i=0 r=0 b=true s= u=<0> ia=[] ra=[] ba=[] sa=[] ua=[] uuid=<1>
|
||||
002: i=1 r=2 b=true s=mystring u=<2> ia=[1 2 3] ra=[-0.5] ba=true sa=[abc def] ua=[<3> <4>] uuid=<5>
|
||||
003: done
|
||||
]])
|
||||
@@ -1,7 +1,7 @@
|
||||
AT_BANNER([OVSDB -- interface description language (IDL)])
|
||||
|
||||
# OVSDB_CHECK_IDL(TITLE, [PRE-IDL-TXN], TRANSACTIONS, OUTPUT, [KEYWORDS],
|
||||
# [FILTER])
|
||||
# OVSDB_CHECK_IDL_C(TITLE, [PRE-IDL-TXN], TRANSACTIONS, OUTPUT, [KEYWORDS],
|
||||
# [FILTER])
|
||||
#
|
||||
# Creates a database with a schema derived from idltest.ovsidl, runs
|
||||
# each PRE-IDL-TXN (if any), starts an ovsdb-server on that database,
|
||||
@@ -16,8 +16,8 @@ AT_BANNER([OVSDB -- interface description language (IDL)])
|
||||
# through the specified program.
|
||||
#
|
||||
# TITLE is provided to AT_SETUP and KEYWORDS to AT_KEYWORDS.
|
||||
m4_define([OVSDB_CHECK_IDL],
|
||||
[AT_SETUP([$1])
|
||||
m4_define([OVSDB_CHECK_IDL_C],
|
||||
[AT_SETUP([$1 - C])
|
||||
AT_KEYWORDS([ovsdb server idl positive $5])
|
||||
AT_CHECK([ovsdb-tool create db $abs_srcdir/idltest.ovsschema],
|
||||
[0], [stdout], [ignore])
|
||||
@@ -31,6 +31,27 @@ m4_define([OVSDB_CHECK_IDL],
|
||||
OVSDB_SERVER_SHUTDOWN
|
||||
AT_CLEANUP])
|
||||
|
||||
# same as OVSDB_CHECK_IDL but uses the Python IDL implementation.
|
||||
m4_define([OVSDB_CHECK_IDL_PY],
|
||||
[AT_SETUP([$1 - Python])
|
||||
AT_SKIP_IF([test $HAVE_PYTHON = no])
|
||||
AT_KEYWORDS([ovsdb server idl positive Python $5])
|
||||
AT_CHECK([ovsdb-tool create db $abs_srcdir/idltest.ovsschema],
|
||||
[0], [stdout], [ignore])
|
||||
AT_CHECK([ovsdb-server '-vPATTERN:console:ovsdb-server|%c|%m' --detach --pidfile=$PWD/pid --remote=punix:socket --unixctl=$PWD/unixctl db], [0], [ignore], [ignore])
|
||||
m4_if([$2], [], [],
|
||||
[AT_CHECK([ovsdb-client transact unix:socket $2], [0], [ignore], [ignore], [kill `cat pid`])])
|
||||
AT_CHECK([$PYTHON $srcdir/test-ovsdb.py -t10 idl $srcdir/idltest.ovsschema unix:socket $3],
|
||||
[0], [stdout], [ignore], [kill `cat pid`])
|
||||
AT_CHECK([sort stdout | perl $srcdir/uuidfilt.pl]m4_if([$6],,, [[| $6]]),
|
||||
[0], [$4], [], [kill `cat pid`])
|
||||
OVSDB_SERVER_SHUTDOWN
|
||||
AT_CLEANUP])
|
||||
|
||||
m4_define([OVSDB_CHECK_IDL],
|
||||
[OVSDB_CHECK_IDL_C($@)
|
||||
OVSDB_CHECK_IDL_PY($@)])
|
||||
|
||||
OVSDB_CHECK_IDL([simple idl, initially empty, no ops],
|
||||
[],
|
||||
[],
|
||||
|
||||
@@ -104,4 +104,3 @@ m4_include([tests/ovsdb-tool.at])
|
||||
m4_include([tests/ovsdb-server.at])
|
||||
m4_include([tests/ovsdb-monitor.at])
|
||||
m4_include([tests/ovsdb-idl.at])
|
||||
m4_include([tests/ovsdb-idl-py.at])
|
||||
|
||||
@@ -12,12 +12,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import getopt
|
||||
import re
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from ovs.db import error
|
||||
import ovs.db.idl
|
||||
@@ -27,6 +27,7 @@ from ovs.db import types
|
||||
import ovs.ovsuuid
|
||||
import ovs.poller
|
||||
import ovs.util
|
||||
import idltest
|
||||
|
||||
def unbox_json(json):
|
||||
if type(json) == list and len(json) == 1:
|
||||
@@ -35,14 +36,14 @@ def unbox_json(json):
|
||||
return json
|
||||
|
||||
def do_default_atoms():
|
||||
for type in types.ATOMIC_TYPES:
|
||||
if type == types.VoidType:
|
||||
for type_ in types.ATOMIC_TYPES:
|
||||
if type_ == types.VoidType:
|
||||
continue
|
||||
|
||||
sys.stdout.write("%s: " % type.to_string())
|
||||
sys.stdout.write("%s: " % type_.to_string())
|
||||
|
||||
atom = data.Atom.default(type)
|
||||
if atom != data.Atom.default(type):
|
||||
atom = data.Atom.default(type_)
|
||||
if atom != data.Atom.default(type_):
|
||||
sys.stdout.write("wrong\n")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -59,14 +60,14 @@ def do_default_data():
|
||||
valueBase = None
|
||||
else:
|
||||
valueBase = types.BaseType(value)
|
||||
type = types.Type(types.BaseType(key), valueBase, n_min, 1)
|
||||
assert type.is_valid()
|
||||
type_ = types.Type(types.BaseType(key), valueBase, n_min, 1)
|
||||
assert type_.is_valid()
|
||||
|
||||
sys.stdout.write("key %s, value %s, n_min %d: "
|
||||
% (key.to_string(), value.to_string(), n_min))
|
||||
|
||||
datum = data.Datum.default(type)
|
||||
if datum != data.Datum.default(type):
|
||||
datum = data.Datum.default(type_)
|
||||
if datum != data.Datum.default(type_):
|
||||
sys.stdout.write("wrong\n")
|
||||
any_errors = True
|
||||
else:
|
||||
@@ -86,8 +87,8 @@ def do_parse_base_type(type_string):
|
||||
|
||||
def do_parse_type(type_string):
|
||||
type_json = unbox_json(ovs.json.from_string(type_string))
|
||||
type = types.Type.from_json(type_json)
|
||||
print ovs.json.to_string(type.to_json(), sort_keys=True)
|
||||
type_ = types.Type.from_json(type_json)
|
||||
print ovs.json.to_string(type_.to_json(), sort_keys=True)
|
||||
|
||||
def do_parse_atoms(type_string, *atom_strings):
|
||||
type_json = unbox_json(ovs.json.from_string(type_string))
|
||||
@@ -102,10 +103,10 @@ def do_parse_atoms(type_string, *atom_strings):
|
||||
|
||||
def do_parse_data(type_string, *data_strings):
|
||||
type_json = unbox_json(ovs.json.from_string(type_string))
|
||||
type = types.Type.from_json(type_json)
|
||||
type_ = types.Type.from_json(type_json)
|
||||
for datum_string in data_strings:
|
||||
datum_json = unbox_json(ovs.json.from_string(datum_string))
|
||||
datum = data.Datum.from_json(type, datum_json)
|
||||
datum = data.Datum.from_json(type_, datum_json)
|
||||
print ovs.json.to_string(datum.to_json())
|
||||
|
||||
def do_sort_atoms(type_string, atom_strings):
|
||||
@@ -127,26 +128,54 @@ def do_parse_table(name, table_string, default_is_root_string='false'):
|
||||
table = ovs.db.schema.TableSchema.from_json(table_json, name)
|
||||
print ovs.json.to_string(table.to_json(default_is_root), sort_keys=True)
|
||||
|
||||
def do_parse_rows(table_string, *rows):
|
||||
table_json = unbox_json(ovs.json.from_string(table_string))
|
||||
table = ovs.db.schema.TableSchema.from_json(table_json, name)
|
||||
|
||||
def do_parse_schema(schema_string):
|
||||
schema_json = unbox_json(ovs.json.from_string(schema_string))
|
||||
schema = ovs.db.schema.DbSchema.from_json(schema_json)
|
||||
print ovs.json.to_string(schema.to_json(), sort_keys=True)
|
||||
|
||||
def print_idl(idl, step):
|
||||
simple = idl.tables["simple"].rows
|
||||
l1 = idl.tables["link1"].rows
|
||||
l2 = idl.tables["link2"].rows
|
||||
|
||||
n = 0
|
||||
for uuid, row in idl.data["simple"].iteritems():
|
||||
for row in simple.itervalues():
|
||||
s = ("%03d: i=%s r=%s b=%s s=%s u=%s "
|
||||
"ia=%s ra=%s ba=%s sa=%s ua=%s uuid=%s"
|
||||
% (step, row.i, row.r, row.b, row.s, row.u,
|
||||
row.ia, row.ra, row.ba, row.sa, row.ua, uuid))
|
||||
print(re.sub('""|,', "", s))
|
||||
row.ia, row.ra, row.ba, row.sa, row.ua, row.uuid))
|
||||
s = re.sub('""|,|u?\'', "", s)
|
||||
s = re.sub('UUID\(([^)]+)\)', r'\1', s)
|
||||
s = re.sub('False', 'false', s)
|
||||
s = re.sub('True', 'true', s)
|
||||
s = re.sub(r'(ba)=([^[][^ ]*) ', r'\1=[\2] ', s)
|
||||
print(s)
|
||||
n += 1
|
||||
|
||||
for row in l1.itervalues():
|
||||
s = ["%03d: i=%s k=" % (step, row.i)]
|
||||
if row.k:
|
||||
s.append(str(row.k.i))
|
||||
s.append(" ka=[")
|
||||
s.append(' '.join(sorted(str(ka.i) for ka in row.ka)))
|
||||
s.append("] l2=")
|
||||
if row.l2:
|
||||
s.append(str(row.l2[0].i))
|
||||
s.append(" uuid=%s" % row.uuid)
|
||||
print(''.join(s))
|
||||
n += 1
|
||||
|
||||
for row in l2.itervalues():
|
||||
s = ["%03d: i=%s l1=" % (step, row.i)]
|
||||
if row.l1:
|
||||
s.append(str(row.l1.i))
|
||||
s.append(" uuid=%s" % row.uuid)
|
||||
print(''.join(s))
|
||||
n += 1
|
||||
|
||||
if not n:
|
||||
print("%03d: empty" % step)
|
||||
sys.stdout.flush()
|
||||
|
||||
def substitute_uuids(json, symtab):
|
||||
if type(json) in [str, unicode]:
|
||||
@@ -174,8 +203,108 @@ def parse_uuids(json, symtab):
|
||||
for value in json.itervalues():
|
||||
parse_uuids(value, symtab)
|
||||
|
||||
def do_idl(remote, *commands):
|
||||
idl = ovs.db.idl.Idl(remote, "idltest")
|
||||
def idltest_find_simple(idl, i):
|
||||
for row in idl.tables["simple"].rows.itervalues():
|
||||
if row.i == i:
|
||||
return row
|
||||
return None
|
||||
|
||||
def idl_set(idl, commands, step):
|
||||
txn = ovs.db.idl.Transaction(idl)
|
||||
increment = False
|
||||
for command in commands.split(','):
|
||||
words = command.split()
|
||||
name = words[0]
|
||||
args = words[1:]
|
||||
|
||||
if name == "set":
|
||||
if len(args) != 3:
|
||||
sys.stderr.write('"set" command requires 3 arguments\n')
|
||||
sys.exit(1)
|
||||
|
||||
s = idltest_find_simple(idl, int(args[0]))
|
||||
if not s:
|
||||
sys.stderr.write('"set" command asks for nonexistent i=%d\n'
|
||||
% int(args[0]))
|
||||
sys.exit(1)
|
||||
|
||||
if args[1] == "b":
|
||||
s.b = args[2] == "1"
|
||||
elif args[1] == "s":
|
||||
s.s = args[2]
|
||||
elif args[1] == "u":
|
||||
s.u = uuid.UUID(args[2])
|
||||
elif args[1] == "r":
|
||||
s.r = float(args[2])
|
||||
else:
|
||||
sys.stderr.write('"set" comamnd asks for unknown column %s\n'
|
||||
% args[2])
|
||||
sys.stderr.exit(1)
|
||||
elif name == "insert":
|
||||
if len(args) != 1:
|
||||
sys.stderr.write('"set" command requires 1 argument\n')
|
||||
sys.exit(1)
|
||||
|
||||
s = txn.insert(idl.tables["simple"])
|
||||
s.i = int(args[0])
|
||||
elif name == "delete":
|
||||
if len(args) != 1:
|
||||
sys.stderr.write('"delete" command requires 1 argument\n')
|
||||
sys.exit(1)
|
||||
|
||||
s = idltest_find_simple(idl, int(args[0]))
|
||||
if not s:
|
||||
sys.stderr.write('"delete" command asks for nonexistent i=%d\n'
|
||||
% int(args[0]))
|
||||
sys.exit(1)
|
||||
s.delete()
|
||||
elif name == "verify":
|
||||
if len(args) != 2:
|
||||
sys.stderr.write('"verify" command requires 2 arguments\n')
|
||||
sys.exit(1)
|
||||
|
||||
s = idltest_find_simple(idl, int(args[0]))
|
||||
if not s:
|
||||
sys.stderr.write('"verify" command asks for nonexistent i=%d\n'
|
||||
% int(args[0]))
|
||||
sys.exit(1)
|
||||
|
||||
if args[1] in ("i", "b", "s", "u", "r"):
|
||||
s.verify(args[1])
|
||||
else:
|
||||
sys.stderr.write('"verify" command asks for unknown column '
|
||||
'"%s"\n' % args[1])
|
||||
sys.exit(1)
|
||||
elif name == "increment":
|
||||
if len(args) != 2:
|
||||
sys.stderr.write('"increment" command requires 2 arguments\n')
|
||||
sys.exit(1)
|
||||
|
||||
txn.increment(args[0], args[1], [])
|
||||
increment = True
|
||||
elif name == "abort":
|
||||
txn.abort()
|
||||
break
|
||||
elif name == "destroy":
|
||||
print "%03d: destroy" % step
|
||||
sys.stdout.flush()
|
||||
txn.abort()
|
||||
return
|
||||
else:
|
||||
sys.stderr.write("unknown command %s\n" % name)
|
||||
sys.exit(1)
|
||||
|
||||
status = txn.commit_block()
|
||||
sys.stdout.write("%03d: commit, status=%s"
|
||||
% (step, ovs.db.idl.Transaction.status_to_string(status)))
|
||||
if increment and status == ovs.db.idl.Transaction.SUCCESS:
|
||||
sys.stdout.write(", increment=%d" % txn.get_increment_new_value())
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
def do_idl(schema_file, remote, *commands):
|
||||
schema = ovs.db.schema.DbSchema.from_json(ovs.json.from_file(schema_file))
|
||||
idl = ovs.db.idl.Idl(remote, schema)
|
||||
|
||||
if commands:
|
||||
error, stream = ovs.stream.Stream.open_block(
|
||||
@@ -196,7 +325,7 @@ def do_idl(remote, *commands):
|
||||
command = command[1:]
|
||||
else:
|
||||
# Wait for update.
|
||||
while idl.get_seqno() == seqno and not idl.run():
|
||||
while idl.change_seqno == seqno and not idl.run():
|
||||
rpc.run()
|
||||
|
||||
poller = ovs.poller.Poller()
|
||||
@@ -207,10 +336,11 @@ def do_idl(remote, *commands):
|
||||
print_idl(idl, step)
|
||||
step += 1
|
||||
|
||||
seqno = idl.get_seqno()
|
||||
seqno = idl.change_seqno
|
||||
|
||||
if command == "reconnect":
|
||||
print("%03d: reconnect" % step)
|
||||
sys.stdout.flush()
|
||||
step += 1
|
||||
idl.force_reconnect()
|
||||
elif not command.startswith("["):
|
||||
@@ -235,10 +365,11 @@ def do_idl(remote, *commands):
|
||||
parse_uuids(reply.result, symtab)
|
||||
reply.id = None
|
||||
sys.stdout.write("%s\n" % ovs.json.to_string(reply.to_json()))
|
||||
sys.stdout.flush()
|
||||
|
||||
if rpc:
|
||||
rpc.close()
|
||||
while idl.get_seqno() == seqno and not idl.run():
|
||||
while idl.change_seqno == seqno and not idl.run():
|
||||
poller = ovs.poller.Poller()
|
||||
idl.wait(poller)
|
||||
poller.block()
|
||||
@@ -277,10 +408,10 @@ parse-table NAME OBJECT [DEFAULT-IS-ROOT]
|
||||
parse table NAME with info OBJECT
|
||||
parse-schema JSON
|
||||
parse JSON as an OVSDB schema, and re-serialize
|
||||
idl SERVER [TRANSACTION...]
|
||||
connect to SERVER and dump the contents of the database
|
||||
as seen initially by the IDL implementation and after
|
||||
executing each TRANSACTION. (Each TRANSACTION must modify
|
||||
idl SCHEMA SERVER [TRANSACTION...]
|
||||
connect to SERVER (which has the specified SCHEMA) and dump the
|
||||
contents of the database as seen initially by the IDL implementation
|
||||
and after executing each TRANSACTION. (Each TRANSACTION must modify
|
||||
the database or this command will hang.)
|
||||
|
||||
The following options are also available:
|
||||
@@ -313,8 +444,6 @@ def main(argv):
|
||||
else:
|
||||
sys.exit(0)
|
||||
|
||||
optKeys = [key for key, value in options]
|
||||
|
||||
if not args:
|
||||
sys.stderr.write("%s: missing command argument "
|
||||
"(use --help for help)\n" % ovs.util.PROGRAM_NAME)
|
||||
@@ -331,7 +460,7 @@ def main(argv):
|
||||
"parse-column": (do_parse_column, 2),
|
||||
"parse-table": (do_parse_table, (2, 3)),
|
||||
"parse-schema": (do_parse_schema, 1),
|
||||
"idl": (do_idl, (1,))}
|
||||
"idl": (do_idl, (2,))}
|
||||
|
||||
command_name = args[0]
|
||||
args = args[1:]
|
||||
|
||||
@@ -32,6 +32,7 @@ import time
|
||||
|
||||
import XenAPI
|
||||
|
||||
import ovs.dirs
|
||||
from ovs.db import error
|
||||
from ovs.db import types
|
||||
import ovs.util
|
||||
@@ -116,19 +117,26 @@ def call_vsctl(args):
|
||||
if exitcode != 0:
|
||||
s_log.warning("Couldn't call ovs-vsctl")
|
||||
|
||||
def set_external_id(table, record, key, value):
|
||||
if value:
|
||||
col = 'external-ids:"%s"="%s"' % (key, value)
|
||||
call_vsctl(["set", table, record, col])
|
||||
def set_or_delete(d, key, value):
|
||||
if value is None:
|
||||
if key in d:
|
||||
del d[key]
|
||||
return True
|
||||
else:
|
||||
call_vsctl(["remove", table, record, "external-ids", key])
|
||||
if d.get(key) != value:
|
||||
d[key] = value
|
||||
return True
|
||||
return False
|
||||
|
||||
def set_external_id(row, key, value):
|
||||
external_ids = row.external_ids
|
||||
if set_or_delete(external_ids, key, value):
|
||||
row.external_ids = external_ids
|
||||
|
||||
# XenServer does not call interface-reconfigure on internal networks,
|
||||
# which is where the fail-mode would normally be set.
|
||||
def update_fail_mode(name):
|
||||
rec = get_network_by_bridge(name)
|
||||
|
||||
def update_fail_mode(row):
|
||||
rec = get_network_by_bridge(row.name)
|
||||
if not rec:
|
||||
return
|
||||
|
||||
@@ -143,80 +151,57 @@ def update_fail_mode(name):
|
||||
if fail_mode not in ['standalone', 'secure']:
|
||||
fail_mode = 'standalone'
|
||||
|
||||
call_vsctl(["set", "bridge", name, "fail_mode=" + fail_mode])
|
||||
|
||||
def update_in_band_mgmt(name):
|
||||
rec = get_network_by_bridge(name)
|
||||
if row.fail_mode != fail_mode:
|
||||
row.fail_mode = fail_mode
|
||||
|
||||
def update_in_band_mgmt(row):
|
||||
rec = get_network_by_bridge(row.name)
|
||||
if not rec:
|
||||
return
|
||||
|
||||
dib = rec['other_config'].get('vswitch-disable-in-band')
|
||||
if not dib:
|
||||
call_vsctl(['remove', 'bridge', name, 'other_config',
|
||||
'disable-in-band'])
|
||||
elif dib in ['true', 'false']:
|
||||
call_vsctl(['set', 'bridge', name,
|
||||
'other_config:disable-in-band=' + dib])
|
||||
else:
|
||||
s_log.warning('"' + dib + '"'
|
||||
"isn't a valid setting for other_config:disable-in-band on " +
|
||||
name)
|
||||
|
||||
def update_bridge_id(name, ids):
|
||||
id = get_bridge_id(name, ids.get("xs-network-uuids"))
|
||||
other_config = row.other_config
|
||||
if dib and dib not in ['true', 'false']:
|
||||
s_log.warning('"%s" isn\'t a valid setting for '
|
||||
"other_config:disable-in-band on %s" % (dib, row.name))
|
||||
elif set_or_delete(other_config, 'disable-in-band', dib):
|
||||
row.other_config = other_config
|
||||
|
||||
if not id:
|
||||
def update_bridge_id(row):
|
||||
id_ = get_bridge_id(row.name, row.external_ids.get("xs-network-uuids"))
|
||||
if not id_:
|
||||
return
|
||||
|
||||
primary_id = id.split(";")[0]
|
||||
set_external_id(row, "bridge-id", id_.split(";")[0])
|
||||
|
||||
if ids.get("bridge-id") != primary_id:
|
||||
set_external_id("Bridge", name, "bridge-id", primary_id)
|
||||
ids["bridge-id"] = primary_id
|
||||
|
||||
def update_iface(name, ids):
|
||||
id = get_iface_id(name, ids.get("xs-vif-uuid"))
|
||||
if ids.get("iface-id") != id and id:
|
||||
set_external_id("Interface", name, "iface-id", id)
|
||||
ids["iface-id"] = id
|
||||
|
||||
status = ids.get("iface-status")
|
||||
if status:
|
||||
set_external_id("Interface", name, "iface-status", status)
|
||||
|
||||
def keep_table_columns(schema, table_name, column_types):
|
||||
def keep_table_columns(schema, table_name, columns):
|
||||
table = schema.tables.get(table_name)
|
||||
if not table:
|
||||
raise error.Error("schema has no %s table" % table_name)
|
||||
|
||||
new_columns = {}
|
||||
for column_name, column_type in column_types.iteritems():
|
||||
for column_name in columns:
|
||||
column = table.columns.get(column_name)
|
||||
if not column:
|
||||
raise error.Error("%s table schema lacks %s column"
|
||||
% (table_name, column_name))
|
||||
if column.type != column_type:
|
||||
raise error.Error("%s column in %s table has type \"%s\", "
|
||||
"expected type \"%s\""
|
||||
% (column_name, table_name,
|
||||
column.type.toEnglish(),
|
||||
column_type.toEnglish()))
|
||||
new_columns[column_name] = column
|
||||
table.columns = new_columns
|
||||
return table
|
||||
|
||||
def monitor_uuid_schema_cb(schema):
|
||||
def prune_schema(schema):
|
||||
string_type = types.Type(types.BaseType(types.StringType))
|
||||
string_map_type = types.Type(types.BaseType(types.StringType),
|
||||
types.BaseType(types.StringType),
|
||||
0, sys.maxint)
|
||||
|
||||
new_tables = {}
|
||||
for table_name in ("Bridge", "Interface"):
|
||||
new_tables[table_name] = keep_table_columns(
|
||||
schema, table_name, {"name": string_type,
|
||||
"external_ids": string_map_type})
|
||||
new_tables["Bridge"] = keep_table_columns(
|
||||
schema, "Bridge", ("name", "external_ids", "other_config",
|
||||
"fail_mode"))
|
||||
new_tables["Interface"] = keep_table_columns(
|
||||
schema, "Interface", ("name", "external_ids"))
|
||||
schema.tables = new_tables
|
||||
|
||||
def usage():
|
||||
@@ -227,32 +212,11 @@ def usage():
|
||||
print " -h, --help display this help message"
|
||||
sys.exit(0)
|
||||
|
||||
def handler(signum, frame):
|
||||
def handler(signum, _):
|
||||
global force_run
|
||||
if (signum == signal.SIGHUP):
|
||||
force_run = True
|
||||
|
||||
def update_tap_from_vif(idl, tap_name, vif_name):
|
||||
ifaces = idl.data["Interface"]
|
||||
tap = None
|
||||
vif = None
|
||||
|
||||
for i in ifaces.values():
|
||||
name = i.name.as_scalar().strip('"')
|
||||
if name == tap_name:
|
||||
tap = i
|
||||
elif name == vif_name:
|
||||
vif = i
|
||||
|
||||
if vif and tap:
|
||||
vxid = vif.external_ids
|
||||
txid = tap.external_ids
|
||||
|
||||
keys = ["attached-mac", "xs-network-uuid", "xs-vif-uuid", "xs-vm-uuid"]
|
||||
for k in keys:
|
||||
if vxid.get(k) != txid.get(k):
|
||||
set_external_id("Interface", tap_name, k, vxid.get(k))
|
||||
|
||||
def main(argv):
|
||||
global force_run
|
||||
|
||||
@@ -284,7 +248,10 @@ def main(argv):
|
||||
sys.exit(1)
|
||||
|
||||
remote = args[0]
|
||||
idl = ovs.db.idl.Idl(remote, "Open_vSwitch", monitor_uuid_schema_cb)
|
||||
schema_file = "%s/vswitch.ovsschema" % ovs.dirs.PKGDATADIR
|
||||
schema = ovs.db.schema.DbSchema.from_json(ovs.json.from_file(schema_file))
|
||||
prune_schema(schema)
|
||||
idl = ovs.db.idl.Idl(remote, schema)
|
||||
|
||||
ovs.daemon.daemonize()
|
||||
|
||||
@@ -295,8 +262,8 @@ def main(argv):
|
||||
|
||||
signal.signal(signal.SIGHUP, handler)
|
||||
|
||||
bridges = {}
|
||||
interfaces = {}
|
||||
bridges = {} # Map from bridge name to xs_network_uuids
|
||||
interfaces = {} # Map from interface name to
|
||||
while True:
|
||||
if not force_run and not idl.run():
|
||||
poller = ovs.poller.Poller()
|
||||
@@ -310,58 +277,77 @@ def main(argv):
|
||||
interfaces = {}
|
||||
force_run = False
|
||||
|
||||
txn = ovs.db.idl.Transaction(idl)
|
||||
|
||||
new_bridges = {}
|
||||
for rec in idl.data["Bridge"].itervalues():
|
||||
name = rec.name.as_scalar()
|
||||
xs_network_uuids = rec.external_ids.get("xs-network-uuids")
|
||||
bridge_id = rec.external_ids.get("bridge-id")
|
||||
new_bridges[name] = {"xs-network-uuids": xs_network_uuids,
|
||||
"bridge-id": bridge_id}
|
||||
for row in idl.tables["Bridge"].rows.itervalues():
|
||||
old_xnu = bridges.get(row.name)
|
||||
new_xnu = row.external_ids.get("xs-network-uuids", "")
|
||||
if old_xnu is None:
|
||||
# New bridge.
|
||||
update_fail_mode(row)
|
||||
update_in_band_mgmt(row)
|
||||
if new_xnu != old_xnu:
|
||||
# New bridge or bridge's xs-network-uuids has changed.
|
||||
update_bridge_id(row)
|
||||
new_bridges[row.name] = new_xnu
|
||||
bridges = new_bridges
|
||||
|
||||
iface_by_name = {}
|
||||
for row in idl.tables["Interface"].rows.itervalues():
|
||||
iface_by_name[row.name] = row
|
||||
|
||||
new_interfaces = {}
|
||||
for rec in idl.data["Interface"].itervalues():
|
||||
name = rec.name.as_scalar()
|
||||
xs_vif_uuid = rec.external_ids.get("xs-vif-uuid")
|
||||
iface_id = rec.external_ids.get("iface-id")
|
||||
new_interfaces[name] = {"xs-vif-uuid": xs_vif_uuid,
|
||||
"iface-id": iface_id}
|
||||
for row in idl.tables["Interface"].rows.itervalues():
|
||||
# Match up paired vif and tap devices.
|
||||
if row.name.startswith("vif"):
|
||||
vif = row
|
||||
tap = iface_by_name.get("tap%s" % row.name[3:])
|
||||
elif row.name.startswith("tap"):
|
||||
tap = row
|
||||
vif = iface_by_name.get("vif%s" % row.name[3:])
|
||||
else:
|
||||
tap = vif = None
|
||||
|
||||
if name.startswith("vif"):
|
||||
new_interfaces[name]["iface-status"] = "active"
|
||||
# Several tap external-ids need to be copied from the vif.
|
||||
if row == tap and vif:
|
||||
keys = ["attached-mac",
|
||||
"xs-network-uuid",
|
||||
"xs-vif-uuid",
|
||||
"xs-vm-uuid"]
|
||||
for k in keys:
|
||||
set_external_id(row, k, vif.external_ids.get(k))
|
||||
|
||||
#Tap devices take their xs-vif-uuid from their corresponding vif and
|
||||
#cause that vif to be labled inactive.
|
||||
for name in new_interfaces:
|
||||
if not name.startswith("tap"):
|
||||
continue
|
||||
# If it's a new interface or its xs-vif-uuid has changed, then
|
||||
# obtain the iface-id from XAPI.
|
||||
#
|
||||
# (A tap's xs-vif-uuid comes from its vif. That falls out
|
||||
# naturally from the copy loop above.)
|
||||
new_xvu = row.external_ids.get("xs-vif-uuid", "")
|
||||
old_xvu = interfaces.get(row.name)
|
||||
if old_xvu != new_xvu:
|
||||
iface_id = get_iface_id(row.name, new_xvu)
|
||||
if iface_id and row.external_ids.get("iface-id") != iface_id:
|
||||
set_external_id(row, "iface-id", iface_id)
|
||||
|
||||
vif = name.replace("tap", "vif", 1)
|
||||
# When there's a vif and a tap, the tap is active (used for
|
||||
# traffic). When there's just a vif, the vif is active.
|
||||
#
|
||||
# A tap on its own shouldn't happen, and we don't know
|
||||
# anything about other kinds of devices, so we don't use
|
||||
# an iface-status for those devices at all.
|
||||
if vif and tap:
|
||||
set_external_id(tap, "iface-status", "active")
|
||||
set_external_id(vif, "iface-status", "inactive")
|
||||
elif vif:
|
||||
set_external_id(vif, "iface-status", "active")
|
||||
else:
|
||||
set_external_id(row, "iface-status", None)
|
||||
|
||||
if vif in new_interfaces:
|
||||
xs_vif_uuid = new_interfaces[vif]["xs-vif-uuid"]
|
||||
new_interfaces[name]["xs-vif-uuid"] = xs_vif_uuid
|
||||
new_interfaces[row.name] = new_xvu
|
||||
interfaces = new_interfaces
|
||||
|
||||
new_interfaces[vif]["iface-status"] = "inactive"
|
||||
new_interfaces[name]["iface-status"] = "active"
|
||||
|
||||
update_tap_from_vif(idl, name, vif)
|
||||
|
||||
if bridges != new_bridges:
|
||||
for name,ids in new_bridges.items():
|
||||
if name not in bridges:
|
||||
update_fail_mode(name)
|
||||
update_in_band_mgmt(name)
|
||||
|
||||
if (name not in bridges) or (bridges[name] != ids):
|
||||
update_bridge_id(name, ids)
|
||||
|
||||
bridges = new_bridges
|
||||
|
||||
if interfaces != new_interfaces:
|
||||
for name,ids in new_interfaces.items():
|
||||
if (name not in interfaces) or (interfaces[name] != ids):
|
||||
update_iface(name, ids)
|
||||
interfaces = new_interfaces
|
||||
txn.commit_block()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user