| #!/usr/bin/python |
| # |
| |
| # Copyright (C) 2007, 2008, 2009 Google Inc. |
| # |
| # This program is free software; you can redistribute it and/or modify |
| # it under the terms of the GNU General Public License as published by |
| # the Free Software Foundation; either version 2 of the License, or |
| # (at your option) any later version. |
| # |
| # This program is distributed in the hope that it will be useful, but |
| # WITHOUT ANY WARRANTY; without even the implied warranty of |
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| # General Public License for more details. |
| # |
| # You should have received a copy of the GNU General Public License |
| # along with this program; if not, write to the Free Software |
| # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA |
| # 02110-1301, USA. |
| |
| # pylint: disable=C0103,E1103 |
| |
| # C0103: invalid name NoDefault |
| # E1103: Instance of 'foor' has no 'bar' member (but some types could |
| # not be inferred) |
| |
| |
| """Tool to upgrade the configuration file. |
| |
| This code handles only the types supported by simplejson. As an |
| example, 'set' is a 'list'. |
| |
| @note: this has lots of duplicate content with C{cfgupgrade}. Ideally, it |
| should be merged. |
| |
| """ |
| |
| |
| import os |
| import os.path |
| import sys |
| import optparse |
| import logging |
| import errno |
| |
| from ganeti import constants |
| from ganeti import serializer |
| from ganeti import utils |
| from ganeti import cli |
| from ganeti import pathutils |
| |
| |
| options = None |
| args = None |
| |
| # Unique object to identify calls without default value |
| NoDefault = object() |
| |
| # Dictionary with instance old keys, and new hypervisor keys |
| INST_HV_CHG = { |
| "hvm_pae": constants.HV_PAE, |
| "vnc_bind_address": constants.HV_VNC_BIND_ADDRESS, |
| "initrd_path": constants.HV_INITRD_PATH, |
| "hvm_nic_type": constants.HV_NIC_TYPE, |
| "kernel_path": constants.HV_KERNEL_PATH, |
| "hvm_acpi": constants.HV_ACPI, |
| "hvm_cdrom_image_path": constants.HV_CDROM_IMAGE_PATH, |
| "hvm_boot_order": constants.HV_BOOT_ORDER, |
| "hvm_disk_type": constants.HV_DISK_TYPE, |
| } |
| |
| # Instance beparams changes |
| INST_BE_CHG = { |
| "vcpus": constants.BE_VCPUS, |
| "memory": constants.BE_MEMORY, |
| "auto_balance": constants.BE_AUTO_BALANCE, |
| } |
| |
| # Field names |
| F_SERIAL = "serial_no" |
| |
| |
| class Error(Exception): |
| """Generic exception""" |
| pass |
| |
| |
| def SsconfName(key): |
| """Returns the file name of an (old) ssconf key. |
| |
| """ |
| return "%s/ssconf_%s" % (options.data_dir, key) |
| |
| |
| def ReadFile(file_name, default=NoDefault): |
| """Reads a file. |
| |
| """ |
| logging.debug("Reading %s", file_name) |
| try: |
| fh = open(file_name, "r") |
| except IOError, err: |
| if default is not NoDefault and err.errno == errno.ENOENT: |
| return default |
| raise |
| |
| try: |
| return fh.read() |
| finally: |
| fh.close() |
| |
| |
| def WriteFile(file_name, data): |
| """Writes a configuration file. |
| |
| """ |
| logging.debug("Writing %s", file_name) |
| utils.WriteFile(file_name=file_name, data=data, mode=0600, |
| dry_run=options.dry_run, backup=True) |
| |
| |
| def GenerateSecret(all_secrets): |
| """Generate an unique DRBD secret. |
| |
| This is a copy from ConfigWriter. |
| |
| """ |
| retries = 64 |
| while retries > 0: |
| secret = utils.GenerateSecret() |
| if secret not in all_secrets: |
| break |
| retries -= 1 |
| else: |
| raise Error("Can't generate unique DRBD secret") |
| return secret |
| |
| |
| def SetupLogging(): |
| """Configures the logging module. |
| |
| """ |
| formatter = logging.Formatter("%(asctime)s: %(message)s") |
| |
| stderr_handler = logging.StreamHandler() |
| stderr_handler.setFormatter(formatter) |
| if options.debug: |
| stderr_handler.setLevel(logging.NOTSET) |
| elif options.verbose: |
| stderr_handler.setLevel(logging.INFO) |
| else: |
| stderr_handler.setLevel(logging.CRITICAL) |
| |
| root_logger = logging.getLogger("") |
| root_logger.setLevel(logging.NOTSET) |
| root_logger.addHandler(stderr_handler) |
| |
| |
| def Cluster12To20(cluster): |
| """Upgrades the cluster object from 1.2 to 2.0. |
| |
| """ |
| logging.info("Upgrading the cluster object") |
| # Upgrade the configuration version |
| if "config_version" in cluster: |
| del cluster["config_version"] |
| |
| # Add old ssconf keys back to config |
| logging.info(" - importing ssconf keys") |
| for key in ("master_node", "master_ip", "master_netdev", "cluster_name"): |
| if key not in cluster: |
| cluster[key] = ReadFile(SsconfName(key)).strip() |
| |
| if "default_hypervisor" not in cluster: |
| old_hyp = ReadFile(SsconfName("hypervisor")).strip() |
| if old_hyp == "xen-3.0": |
| hyp = "xen-pvm" |
| elif old_hyp == "xen-hvm-3.1": |
| hyp = "xen-hvm" |
| elif old_hyp == "fake": |
| hyp = "fake" |
| else: |
| raise Error("Unknown old hypervisor name '%s'" % old_hyp) |
| |
| logging.info("Setting the default and enabled hypervisor") |
| cluster["default_hypervisor"] = hyp |
| cluster["enabled_hypervisors"] = [hyp] |
| |
| # hv/be params |
| if "hvparams" not in cluster: |
| logging.info(" - adding hvparams") |
| cluster["hvparams"] = constants.HVC_DEFAULTS |
| if "beparams" not in cluster: |
| logging.info(" - adding beparams") |
| cluster["beparams"] = {constants.PP_DEFAULT: constants.BEC_DEFAULTS} |
| |
| # file storage |
| if "file_storage_dir" not in cluster: |
| cluster["file_storage_dir"] = pathutils.DEFAULT_FILE_STORAGE_DIR |
| |
| # candidate pool size |
| if "candidate_pool_size" not in cluster: |
| cluster["candidate_pool_size"] = constants.MASTER_POOL_SIZE_DEFAULT |
| |
| |
| def Node12To20(node): |
| """Upgrades a node from 1.2 to 2.0. |
| |
| """ |
| logging.info("Upgrading node %s", node['name']) |
| if F_SERIAL not in node: |
| node[F_SERIAL] = 1 |
| if "master_candidate" not in node: |
| node["master_candidate"] = True |
| for key in "offline", "drained": |
| if key not in node: |
| node[key] = False |
| |
| |
| def Instance12To20(drbd_minors, secrets, hypervisor, instance): |
| """Upgrades an instance from 1.2 to 2.0. |
| |
| """ |
| if F_SERIAL not in instance: |
| instance[F_SERIAL] = 1 |
| |
| if "hypervisor" not in instance: |
| instance["hypervisor"] = hypervisor |
| |
| # hvparams changes |
| if "hvparams" not in instance: |
| instance["hvparams"] = hvp = {} |
| for old, new in INST_HV_CHG.items(): |
| if old in instance: |
| if (instance[old] is not None and |
| instance[old] != constants.VALUE_DEFAULT and # no longer valid in 2.0 |
| new in constants.HVC_DEFAULTS[hypervisor]): |
| hvp[new] = instance[old] |
| del instance[old] |
| |
| # beparams changes |
| if "beparams" not in instance: |
| instance["beparams"] = bep = {} |
| for old, new in INST_BE_CHG.items(): |
| if old in instance: |
| if instance[old] is not None: |
| bep[new] = instance[old] |
| del instance[old] |
| |
| # disk changes |
| for disk in instance["disks"]: |
| Disk12To20(drbd_minors, secrets, disk) |
| |
| # other instance changes |
| if "status" in instance: |
| instance["admin_up"] = instance["status"] == "up" |
| del instance["status"] |
| |
| |
| def Disk12To20(drbd_minors, secrets, disk): |
| """Upgrades a disk from 1.2 to 2.0. |
| |
| """ |
| if "mode" not in disk: |
| disk["mode"] = constants.DISK_RDWR |
| if disk["dev_type"] == constants.DT_DRBD8: |
| old_lid = disk["logical_id"] |
| for node in old_lid[:2]: |
| if node not in drbd_minors: |
| raise Error("Can't find node '%s' while upgrading disk" % node) |
| drbd_minors[node] += 1 |
| minor = drbd_minors[node] |
| old_lid.append(minor) |
| old_lid.append(GenerateSecret(secrets)) |
| del disk["physical_id"] |
| if disk["children"]: |
| for child in disk["children"]: |
| Disk12To20(drbd_minors, secrets, child) |
| |
| |
| def main(): |
| """Main program. |
| |
| """ |
| # pylint: disable=W0603 |
| global options, args |
| |
| program = os.path.basename(sys.argv[0]) |
| |
| # Option parsing |
| parser = optparse.OptionParser(usage="%prog [--debug|--verbose] [--force]") |
| parser.add_option("--dry-run", dest="dry_run", |
| action="store_true", |
| help="Try to do the conversion, but don't write" |
| " output file") |
| parser.add_option(cli.FORCE_OPT) |
| parser.add_option(cli.DEBUG_OPT) |
| parser.add_option(cli.VERBOSE_OPT) |
| parser.add_option("--path", help="Convert configuration in this" |
| " directory instead of '%s'" % pathutils.DATA_DIR, |
| default=pathutils.DATA_DIR, dest="data_dir") |
| (options, args) = parser.parse_args() |
| |
| # We need to keep filenames locally because they might be renamed between |
| # versions. |
| options.data_dir = os.path.abspath(options.data_dir) |
| options.CONFIG_DATA_PATH = options.data_dir + "/config.data" |
| options.SERVER_PEM_PATH = options.data_dir + "/server.pem" |
| options.KNOWN_HOSTS_PATH = options.data_dir + "/known_hosts" |
| options.RAPI_CERT_FILE = options.data_dir + "/rapi.pem" |
| |
| SetupLogging() |
| |
| # Option checking |
| if args: |
| raise Error("No arguments expected") |
| |
| if not options.force: |
| usertext = ("%s MUST be run on the master node. Is this the master" |
| " node and are ALL instances down?" % program) |
| if not cli.AskUser(usertext): |
| sys.exit(1) |
| |
| # Check whether it's a Ganeti configuration directory |
| if not (os.path.isfile(options.CONFIG_DATA_PATH) and |
| os.path.isfile(options.SERVER_PEM_PATH) or |
| os.path.isfile(options.KNOWN_HOSTS_PATH)): |
| raise Error(("%s does not seem to be a known Ganeti configuration" |
| " directory") % options.data_dir) |
| |
| config_version = ReadFile(SsconfName("config_version"), "1.2").strip() |
| logging.info("Found configuration version %s", config_version) |
| |
| config_data = serializer.LoadJson(ReadFile(options.CONFIG_DATA_PATH)) |
| |
| # Ganeti 1.2? |
| if config_version == "1.2": |
| logging.info("Found a Ganeti 1.2 configuration") |
| |
| cluster = config_data["cluster"] |
| |
| old_config_version = cluster.get("config_version", None) |
| logging.info("Found old configuration version %s", old_config_version) |
| if old_config_version not in (3, ): |
| raise Error("Unsupported configuration version: %s" % |
| old_config_version) |
| if "version" not in config_data: |
| config_data["version"] = constants.BuildVersion(2, 0, 0) |
| if F_SERIAL not in config_data: |
| config_data[F_SERIAL] = 1 |
| |
| # Make sure no instance uses remote_raid1 anymore |
| remote_raid1_instances = [] |
| for instance in config_data["instances"].values(): |
| if instance["disk_template"] == "remote_raid1": |
| remote_raid1_instances.append(instance["name"]) |
| if remote_raid1_instances: |
| for name in remote_raid1_instances: |
| logging.error("Instance %s still using remote_raid1 disk template", |
| name) |
| raise Error("Unable to convert configuration as long as there are" |
| " instances using remote_raid1 disk template") |
| |
| # Build content of new known_hosts file |
| cluster_name = ReadFile(SsconfName("cluster_name")).rstrip() |
| cluster_key = cluster["rsahostkeypub"] |
| known_hosts = "%s ssh-rsa %s\n" % (cluster_name, cluster_key) |
| |
| Cluster12To20(cluster) |
| |
| # Add node attributes |
| logging.info("Upgrading nodes") |
| # stable-sort the names to have repeatable runs |
| for node_name in utils.NiceSort(config_data["nodes"].keys()): |
| Node12To20(config_data["nodes"][node_name]) |
| |
| # Instance changes |
| logging.info("Upgrading instances") |
| drbd_minors = dict.fromkeys(config_data["nodes"], 0) |
| secrets = set() |
| # stable-sort the names to have repeatable runs |
| for instance_name in utils.NiceSort(config_data["instances"].keys()): |
| Instance12To20(drbd_minors, secrets, cluster["default_hypervisor"], |
| config_data["instances"][instance_name]) |
| |
| else: |
| logging.info("Found a Ganeti 2.0 configuration") |
| |
| if "config_version" in config_data["cluster"]: |
| raise Error("Inconsistent configuration: found config_data in" |
| " configuration file") |
| |
| known_hosts = None |
| |
| try: |
| logging.info("Writing configuration file") |
| WriteFile(options.CONFIG_DATA_PATH, serializer.DumpJson(config_data)) |
| |
| if known_hosts is not None: |
| logging.info("Writing SSH known_hosts file (%s)", known_hosts.strip()) |
| WriteFile(options.KNOWN_HOSTS_PATH, known_hosts) |
| |
| if not options.dry_run: |
| if not os.path.exists(options.RAPI_CERT_FILE): |
| logging.debug("Writing RAPI certificate to %s", options.RAPI_CERT_FILE) |
| utils.GenerateSelfSignedSslCert(options.RAPI_CERT_FILE) |
| |
| except: |
| logging.critical("Writing configuration failed. It is probably in an" |
| " inconsistent state and needs manual intervention.") |
| raise |
| |
| logging.info("Configuration file updated.") |
| |
| |
| if __name__ == "__main__": |
| main() |