2014-12-29 12:46:16 +01:00
|
|
|
#
|
2015-01-19 18:03:23 +01:00
|
|
|
# The Qubes OS Project, https://www.qubes-os.org/
|
2014-12-29 12:46:16 +01:00
|
|
|
#
|
2015-01-19 18:03:23 +01:00
|
|
|
# Copyright (C) 2010-2015 Joanna Rutkowska <joanna@invisiblethingslab.com>
|
|
|
|
# Copyright (C) 2013-2015 Marek Marczykowski-Górecki
|
|
|
|
# <marmarek@invisiblethingslab.com>
|
|
|
|
# Copyright (C) 2014-2015 Wojtek Porczyk <woju@invisiblethingslab.com>
|
|
|
|
#
|
2017-10-12 00:11:50 +02:00
|
|
|
# This library is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
2014-12-29 12:46:16 +01:00
|
|
|
#
|
2017-10-12 00:11:50 +02:00
|
|
|
# This library is distributed in the hope that it will be useful,
|
2014-12-29 12:46:16 +01:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
2017-10-12 00:11:50 +02:00
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
2014-12-29 12:46:16 +01:00
|
|
|
#
|
2017-10-12 00:11:50 +02:00
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this library; if not, see <https://www.gnu.org/licenses/>.
|
2014-12-29 12:46:16 +01:00
|
|
|
#
|
|
|
|
|
2019-06-28 12:29:24 +02:00
|
|
|
import asyncio
|
2016-01-29 17:56:33 +01:00
|
|
|
import hashlib
|
2021-02-10 13:47:09 +01:00
|
|
|
import logging
|
2016-07-12 18:42:06 +02:00
|
|
|
import random
|
|
|
|
import string
|
2015-01-19 18:14:15 +01:00
|
|
|
import os
|
2021-02-10 13:47:09 +01:00
|
|
|
import os.path
|
2015-01-19 18:14:15 +01:00
|
|
|
import re
|
2017-02-02 13:03:08 +01:00
|
|
|
import socket
|
2015-01-19 18:14:15 +01:00
|
|
|
import subprocess
|
2021-02-10 13:47:09 +01:00
|
|
|
import tempfile
|
|
|
|
from contextlib import contextmanager, suppress
|
2015-01-19 18:14:15 +01:00
|
|
|
|
2016-07-13 20:38:46 +02:00
|
|
|
import pkg_resources
|
|
|
|
|
2015-01-23 18:37:40 +01:00
|
|
|
import docutils
|
2015-06-23 19:02:58 +02:00
|
|
|
import docutils.core
|
|
|
|
import docutils.io
|
2015-10-14 22:02:11 +02:00
|
|
|
import qubes.exc
|
2015-10-05 23:46:25 +02:00
|
|
|
|
2021-02-10 13:47:09 +01:00
|
|
|
LOGGER = logging.getLogger('qubes.utils')
|
|
|
|
|
2014-12-29 12:46:16 +01:00
|
|
|
|
2015-01-13 15:40:43 +01:00
|
|
|
def get_timezone():
|
2014-12-29 12:46:16 +01:00
|
|
|
# fc18
|
|
|
|
if os.path.islink('/etc/localtime'):
|
2019-08-04 18:18:53 +02:00
|
|
|
tz_path = '/'.join(os.readlink('/etc/localtime').split('/'))
|
|
|
|
return tz_path.split('zoneinfo/')[1]
|
2014-12-29 12:46:16 +01:00
|
|
|
# <=fc17
|
2018-07-15 23:08:23 +02:00
|
|
|
if os.path.exists('/etc/sysconfig/clock'):
|
2014-12-29 12:46:16 +01:00
|
|
|
clock_config = open('/etc/sysconfig/clock', "r")
|
|
|
|
clock_config_lines = clock_config.readlines()
|
|
|
|
clock_config.close()
|
|
|
|
zone_re = re.compile(r'^ZONE="(.*)"')
|
|
|
|
for line in clock_config_lines:
|
|
|
|
line_match = zone_re.match(line)
|
|
|
|
if line_match:
|
|
|
|
return line_match.group(1)
|
2018-07-15 23:08:23 +02:00
|
|
|
# last resort way, some applications makes /etc/localtime
|
|
|
|
# hardlink instead of symlink...
|
|
|
|
tz_info = os.stat('/etc/localtime')
|
|
|
|
if not tz_info:
|
|
|
|
return None
|
|
|
|
if tz_info.st_nlink > 1:
|
|
|
|
p = subprocess.Popen(['find', '/usr/share/zoneinfo',
|
|
|
|
'-inum', str(tz_info.st_ino), '-print', '-quit'],
|
|
|
|
stdout=subprocess.PIPE)
|
|
|
|
tz_path = p.communicate()[0].strip()
|
|
|
|
return tz_path.replace(b'/usr/share/zoneinfo/', b'')
|
2014-12-29 12:46:16 +01:00
|
|
|
return None
|
|
|
|
|
2015-01-23 18:37:40 +01:00
|
|
|
|
|
|
|
def format_doc(docstring):
|
|
|
|
'''Return parsed documentation string, stripping RST markup.
|
|
|
|
'''
|
|
|
|
|
|
|
|
if not docstring:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
# pylint: disable=unused-variable
|
|
|
|
output, pub = docutils.core.publish_programmatically(
|
|
|
|
source_class=docutils.io.StringInput,
|
|
|
|
source=' '.join(docstring.strip().split()),
|
|
|
|
source_path=None,
|
|
|
|
destination_class=docutils.io.NullOutput, destination=None,
|
|
|
|
destination_path=None,
|
|
|
|
reader=None, reader_name='standalone',
|
|
|
|
parser=None, parser_name='restructuredtext',
|
|
|
|
writer=None, writer_name='null',
|
|
|
|
settings=None, settings_spec=None, settings_overrides=None,
|
|
|
|
config_section=None, enable_exit_status=None)
|
|
|
|
return pub.writer.document.astext()
|
2015-10-01 22:14:35 +02:00
|
|
|
|
|
|
|
def parse_size(size):
|
2015-10-05 23:46:25 +02:00
|
|
|
units = [
|
2016-03-10 11:22:52 +01:00
|
|
|
('K', 1000), ('KB', 1000),
|
|
|
|
('M', 1000 * 1000), ('MB', 1000 * 1000),
|
|
|
|
('G', 1000 * 1000 * 1000), ('GB', 1000 * 1000 * 1000),
|
|
|
|
('Ki', 1024), ('KiB', 1024),
|
|
|
|
('Mi', 1024 * 1024), ('MiB', 1024 * 1024),
|
|
|
|
('Gi', 1024 * 1024 * 1024), ('GiB', 1024 * 1024 * 1024),
|
2015-10-01 22:14:35 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
size = size.strip().upper()
|
|
|
|
if size.isdigit():
|
|
|
|
return int(size)
|
|
|
|
|
|
|
|
for unit, multiplier in units:
|
2019-01-17 03:03:02 +01:00
|
|
|
if size.endswith(unit.upper()):
|
2015-10-01 22:14:35 +02:00
|
|
|
size = size[:-len(unit)].strip()
|
2016-03-10 11:22:52 +01:00
|
|
|
return int(size) * multiplier
|
2015-10-01 22:14:35 +02:00
|
|
|
|
2015-10-14 22:02:11 +02:00
|
|
|
raise qubes.exc.QubesException("Invalid size: {0}.".format(size))
|
2016-03-03 01:05:23 +01:00
|
|
|
|
2016-03-10 11:22:52 +01:00
|
|
|
def mbytes_to_kmg(size):
|
|
|
|
if size > 1024:
|
|
|
|
return "%d GiB" % (size / 1024)
|
2017-04-15 20:04:38 +02:00
|
|
|
|
|
|
|
return "%d MiB" % size
|
2016-03-10 11:22:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
def kbytes_to_kmg(size):
|
|
|
|
if size > 1024:
|
|
|
|
return mbytes_to_kmg(size / 1024)
|
2017-04-15 20:04:38 +02:00
|
|
|
|
|
|
|
return "%d KiB" % size
|
2016-03-10 11:22:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
def bytes_to_kmg(size):
|
|
|
|
if size > 1024:
|
|
|
|
return kbytes_to_kmg(size / 1024)
|
2017-04-15 20:04:38 +02:00
|
|
|
|
|
|
|
return "%d B" % size
|
2016-03-10 11:22:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
def size_to_human(size):
|
|
|
|
"""Humane readable size, with 1/10 precision"""
|
|
|
|
if size < 1024:
|
|
|
|
return str(size)
|
2018-07-15 23:08:23 +02:00
|
|
|
if size < 1024 * 1024:
|
2016-03-10 11:22:52 +01:00
|
|
|
return str(round(size / 1024.0, 1)) + ' KiB'
|
2018-07-15 23:08:23 +02:00
|
|
|
if size < 1024 * 1024 * 1024:
|
2016-03-10 11:22:52 +01:00
|
|
|
return str(round(size / (1024.0 * 1024), 1)) + ' MiB'
|
2017-04-15 20:04:38 +02:00
|
|
|
|
|
|
|
return str(round(size / (1024.0 * 1024 * 1024), 1)) + ' GiB'
|
2016-03-10 11:22:52 +01:00
|
|
|
|
|
|
|
|
2016-03-03 01:05:23 +01:00
|
|
|
def urandom(size):
|
|
|
|
rand = os.urandom(size)
|
|
|
|
if rand is None:
|
|
|
|
raise IOError('failed to read urandom')
|
|
|
|
return hashlib.sha512(rand).digest()
|
2016-04-28 16:00:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_entry_point_one(group, name):
|
|
|
|
epoints = tuple(pkg_resources.iter_entry_points(group, name))
|
|
|
|
if not epoints:
|
|
|
|
raise KeyError(name)
|
2019-02-27 16:19:29 +01:00
|
|
|
if len(epoints) > 1:
|
2016-04-28 16:00:29 +02:00
|
|
|
raise TypeError(
|
|
|
|
'more than 1 implementation of {!r} found: {}'.format(name,
|
|
|
|
', '.join('{}.{}'.format(ep.module_name, '.'.join(ep.attrs))
|
|
|
|
for ep in epoints)))
|
|
|
|
return epoints[0].load()
|
2016-07-12 18:42:06 +02:00
|
|
|
|
|
|
|
|
|
|
|
def random_string(length=5):
|
|
|
|
''' Return random string consisting of ascii_leters and digits '''
|
|
|
|
return ''.join(random.choice(string.ascii_letters + string.digits)
|
|
|
|
for _ in range(length))
|
2017-02-02 13:03:08 +01:00
|
|
|
|
|
|
|
def systemd_notify():
|
|
|
|
'''Notify systemd'''
|
|
|
|
nofity_socket = os.getenv('NOTIFY_SOCKET')
|
|
|
|
if not nofity_socket:
|
|
|
|
return
|
|
|
|
if nofity_socket.startswith('@'):
|
|
|
|
nofity_socket = '\0' + nofity_socket[1:]
|
2017-02-08 15:20:15 +01:00
|
|
|
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
|
|
|
sock.connect(nofity_socket)
|
|
|
|
sock.sendall(b'READY=1')
|
|
|
|
sock.close()
|
2017-07-20 03:07:46 +02:00
|
|
|
|
|
|
|
def match_vm_name_with_special(vm, name):
|
2018-02-16 05:17:20 +01:00
|
|
|
'''Check if *vm* matches given name, which may be specified as @tag:...
|
|
|
|
or @type:...'''
|
|
|
|
if name.startswith('@tag:'):
|
|
|
|
return name[len('@tag:'):] in vm.tags
|
2018-07-15 23:08:23 +02:00
|
|
|
if name.startswith('@type:'):
|
2018-02-16 05:17:20 +01:00
|
|
|
return name[len('@type:'):] == vm.__class__.__name__
|
2017-07-20 03:07:46 +02:00
|
|
|
return name == vm.name
|
2019-06-28 12:29:24 +02:00
|
|
|
|
2021-02-10 13:47:09 +01:00
|
|
|
@contextmanager
|
|
|
|
def replace_file(dst, *, permissions, close_on_success=True,
|
|
|
|
logger=LOGGER, log_level=logging.DEBUG):
|
|
|
|
''' Yield a tempfile whose name starts with dst. If the block does
|
|
|
|
not raise an exception, apply permissions and persist the
|
|
|
|
tempfile to dst (which is allowed to already exist). Otherwise
|
|
|
|
ensure that the tempfile is cleaned up.
|
|
|
|
'''
|
|
|
|
tmp_dir, prefix = os.path.split(dst + '~')
|
|
|
|
tmp = tempfile.NamedTemporaryFile(dir=tmp_dir, prefix=prefix, delete=False)
|
|
|
|
try:
|
|
|
|
yield tmp
|
|
|
|
tmp.flush()
|
|
|
|
os.fchmod(tmp.fileno(), permissions)
|
|
|
|
os.fsync(tmp.fileno())
|
|
|
|
if close_on_success:
|
|
|
|
tmp.close()
|
|
|
|
rename_file(tmp.name, dst, logger=logger, log_level=log_level)
|
|
|
|
except:
|
|
|
|
try:
|
|
|
|
tmp.close()
|
|
|
|
finally:
|
|
|
|
remove_file(tmp.name, logger=logger, log_level=log_level)
|
|
|
|
raise
|
|
|
|
|
|
|
|
def rename_file(src, dst, *, logger=LOGGER, log_level=logging.DEBUG):
|
|
|
|
''' Durably rename src to dst. '''
|
|
|
|
os.rename(src, dst)
|
|
|
|
dst_dir = os.path.dirname(dst)
|
|
|
|
src_dir = os.path.dirname(src)
|
|
|
|
fsync_path(dst_dir)
|
|
|
|
if src_dir != dst_dir:
|
|
|
|
fsync_path(src_dir)
|
|
|
|
logger.log(log_level, 'Renamed file: %r -> %r', src, dst)
|
|
|
|
|
|
|
|
def remove_file(path, *, logger=LOGGER, log_level=logging.DEBUG):
|
|
|
|
''' Durably remove the file at path, if it exists. Return whether
|
|
|
|
we removed it. '''
|
|
|
|
with suppress(FileNotFoundError):
|
|
|
|
os.remove(path)
|
|
|
|
fsync_path(os.path.dirname(path))
|
|
|
|
logger.log(log_level, 'Removed file: %r', path)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def fsync_path(path):
|
|
|
|
fd = os.open(path, os.O_RDONLY) # works for a file or a directory
|
|
|
|
try:
|
|
|
|
os.fsync(fd)
|
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
|
2019-06-28 12:29:24 +02:00
|
|
|
@asyncio.coroutine
|
|
|
|
def coro_maybe(value):
|
|
|
|
if asyncio.iscoroutine(value):
|
|
|
|
return (yield from value)
|
|
|
|
return value
|
2019-06-28 12:29:25 +02:00
|
|
|
|
|
|
|
@asyncio.coroutine
|
|
|
|
def void_coros_maybe(values):
|
|
|
|
''' Ignore elements of the iterable values that are not coroutine
|
|
|
|
objects. Run all coroutine objects to completion, in parallel
|
|
|
|
to each other. If there were exceptions, re-raise the leftmost
|
|
|
|
one (not necessarily chronologically first). Return nothing.
|
|
|
|
'''
|
|
|
|
coros = [val for val in values if asyncio.iscoroutine(val)]
|
|
|
|
if coros:
|
|
|
|
done, _ = yield from asyncio.wait(coros)
|
|
|
|
for task in done:
|
|
|
|
task.result() # re-raises exception if task failed
|