diff --git a/c3/experiment.py b/c3/experiment.py index 47ab2c87..bd2482cb 100755 --- a/c3/experiment.py +++ b/c3/experiment.py @@ -80,9 +80,9 @@ def set_created_by(self, config): self.created_by = config - def quick_setup(self, filepath: str) -> None: + def load_quick_setup(self, filepath: str) -> None: """ - Load a quick setup file and create all necessary components. + Load a quick setup file. Parameters ---------- @@ -92,7 +92,18 @@ def quick_setup(self, filepath: str) -> None: """ with open(filepath, "r") as cfg_file: cfg = hjson.loads(cfg_file.read()) + self.quick_setup(cfg) + def quick_setup(self, cfg) -> None: + """ + Load a quick setup cfg and create all necessary components. + + Parameters + ---------- + cfg : Dict + Configuration options + + """ model = Model() model.read_config(cfg["model"]) gen = Generator() diff --git a/c3/main.py b/c3/main.py index 25faaf1a..561fbc48 100755 --- a/c3/main.py +++ b/c3/main.py @@ -5,7 +5,6 @@ import os import hjson import argparse -import c3.utils.parsers as parsers import c3.utils.tf_utils as tf_utils import tensorflow as tf from c3.parametermap import ParameterMap @@ -13,71 +12,75 @@ from c3.system.model import Model from c3.generator.generator import Generator -logging.getLogger("tensorflow").disabled = True +from c3.optimizers.c1 import C1 +from c3.optimizers.c2 import C2 +from c3.optimizers.c3 import C3 +from c3.optimizers.sensitivity import SET -# flake8: noqa: C901 -if __name__ == "__main__": - - os.nice(5) # keep responsiveness when we overcommit memory - tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) - - parser = argparse.ArgumentParser() - parser.add_argument("master_config") - args = parser.parse_args() - - opt_config = args.master_config - with open(opt_config, "r") as cfg_file: - try: - cfg = hjson.loads(cfg_file.read()) - except hjson.decoder.HjsonDecodeError: - raise Exception(f"Config {opt_config} is invalid.") +logging.getLogger("tensorflow").disabled = True - optim_type = cfg["optim_type"] +# flake8: noqa: C901 +def run_cfg(cfg, opt_config_filename, debug=False): + """Execute an optimization problem described in the cfg file. + + Parameters + ---------- + cfg : Dict[str, Union[str, int, float]] + Configuration file containing optimization options and information needed to completely + setup the system and optimization problem. + debug : bool, optional + Skip running the actual optimization, by default False + + """ + optim_type = cfg.pop("optim_type") + optim_lib = { + "C1": C1, + "C2": C2, + "C3": C3, + "C3_confirm": C3, + "confirm": C3, + "SET": SET, + } + if not optim_type in optim_lib: + raise Exception("C3:ERROR:Unknown optimization type specified.") tf_utils.tf_setup() with tf.device("/CPU:0"): model = None gen = None + exp = None if "model" in cfg: model = Model() - model.read_config(cfg["model"]) + model.read_config(cfg.pop("model")) if "generator" in cfg: gen = Generator() - gen.read_config(cfg["generator"]) + gen.read_config(cfg.pop("generator")) if "instructions" in cfg: pmap = ParameterMap(model=model, generator=gen) - pmap.read_config(cfg["instructions"]) + pmap.read_config(cfg.pop("instructions")) exp = Experiment(pmap) if "exp_cfg" in cfg: exp = Experiment() - exp.read_config(cfg["exp_cfg"]) - else: + exp.read_config(cfg.pop("exp_cfg")) + if exp is None: print("C3:STATUS: No instructions specified. Performing quick setup.") exp = Experiment() - exp.quick_setup(opt_config) - - if optim_type == "C1": - opt = parsers.create_c1_opt(opt_config, exp) - if cfg.pop("include_model", False): - opt.include_model() - elif optim_type == "C2": - eval_func = cfg["eval_func"] - opt = parsers.create_c2_opt(opt_config, eval_func) - elif optim_type == "C3" or optim_type == "C3_confirm": - print("C3:STATUS: creating c3 opt ...") - opt = parsers.create_c3_opt(opt_config) - elif optim_type == "SET": - print("C3:STATUS: creating set obj") - opt = parsers.create_sensitivity(opt_config) - elif optim_type == "confirm": - print("C3:STATUS: creating c3 opt ...") - opt = parsers.create_c3_opt(opt_config) - opt.inverse = True - else: - raise Exception("C3:ERROR:Unknown optimization type specified.") + exp.quick_setup(cfg) + + exp.set_opt_gates(cfg.pop("opt_gates", None)) + if "gateset_opt_map" in cfg: + exp.pmap.set_opt_map( + [[tuple(par) for par in pset] for pset in cfg.pop("gateset_opt_map")] + ) + if "exp_opt_map" in cfg: + exp.pmap.set_opt_map( + [[tuple(par) for par in pset] for pset in cfg.pop("exp_opt_map")] + ) + + opt = optim_lib[optim_type](**cfg, pmap=exp.pmap) opt.set_exp(exp) - opt.set_created_by(opt_config) + opt.set_created_by(opt_config_filename) if "initial_point" in cfg: initial_points = cfg["initial_point"] @@ -94,18 +97,12 @@ "C3:STATUS:Loading initial point from : " f"{os.path.abspath(init_point)}" ) - init_dir = os.path.basename( - os.path.normpath(os.path.dirname(init_point)) - ) except FileNotFoundError as fnfe: raise Exception( f"C3:ERROR:No initial point found at " f"{os.path.abspath(init_point)}. " ) from fnfe - if "real_params" in cfg: - real_params = cfg["real_params"] - if optim_type == "C1": if "adjust_exp" in cfg: try: @@ -121,23 +118,24 @@ f"{os.path.abspath(adjust_exp)} " "Continuing with default." ) from fnfe - opt.optimize_controls() - elif optim_type == "C2": - opt.optimize_controls() + if not debug: + opt.run() - elif optim_type == "C3" or optim_type == "confirm": - opt.read_data(cfg["datafile"]) - opt.learn_model() - elif optim_type == "C3_confirm": - opt.read_data(cfg["datafile"]) - opt.log_setup() - opt.confirm() +if __name__ == "__main__": + os.nice(5) # keep responsiveness when we overcommit memory - elif optim_type == "SET": - opt.read_data(cfg["datafile"]) - opt.log_setup() + tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR) - print("sensitivity test ...") - opt.sensitivity() + parser = argparse.ArgumentParser() + parser.add_argument("master_config") + args = parser.parse_args() + + opt_config = args.master_config + with open(opt_config, "r") as cfg_file: + try: + cfg = hjson.load(cfg_file) + except hjson.decoder.HjsonDecodeError: + raise Exception(f"Config {opt_config} is invalid.") + run_cfg(cfg, opt_config) diff --git a/c3/optimizers/c1.py b/c3/optimizers/c1.py index c036c222..34e3633a 100755 --- a/c3/optimizers/c1.py +++ b/c3/optimizers/c1.py @@ -4,9 +4,14 @@ import shutil import time import tensorflow as tf +from typing import Callable, List + from c3.optimizers.optimizer import Optimizer from c3.utils.utils import log_setup +from c3.libraries.algorithms import algorithms +from c3.libraries.fidelities import fidelities + class C1(Optimizer): """ @@ -39,43 +44,79 @@ class C1(Optimizer): def __init__( self, - dir_path, fid_func, fid_subspace, pmap, - callback_fids=[], + dir_path=None, + callback_fids=None, algorithm=None, store_unitaries=False, options={}, run_name=None, interactive=True, + include_model=False, logger=None, fid_func_kwargs={}, ) -> None: + if type(algorithm) is str: + algorithm = algorithms[algorithm] super().__init__( pmap=pmap, algorithm=algorithm, store_unitaries=store_unitaries, logger=logger, ) - self.fid_func = fid_func + self.set_fid_func(fid_func) + self.callback_fids: List[Callable] = [] + if callback_fids: + self.set_callback_fids(callback_fids) self.fid_subspace = fid_subspace - self.callback_fids = callback_fids self.options = options self.__dir_path = dir_path self.__run_name = run_name self.interactive = interactive + self.update_model = include_model self.fid_func_kwargs = fid_func_kwargs + self.run = ( + self.optimize_controls + ) # Alias the legacy name for the method running the + # optimization + + def set_fid_func(self, fid_func) -> None: + if type(fid_func) is str: + if self.pmap.model.lindbladian: + fid = "lindbladian_" + fid_func + else: + fid = fid_func + try: + self.fid_func = fidelities[fid] + except KeyError: + raise Exception(f"C3:ERROR:Unkown goal function: {fid} ") + print(f"C3:STATUS:Found {fid} in libraries.") + else: + self.fid_func = fid_func + + def set_callback_fids(self, callback_fids) -> None: + if self.pmap.model.lindbladian: + cb_fids = ["lindbladian_" + f for f in callback_fids] + else: + cb_fids = callback_fids + for cb_fid in cb_fids: + try: + cb_fid_func = fidelities[cb_fid] + except KeyError: + raise Exception(f"C3:ERROR:Unkown goal function: {cb_fid}") + print(f"C3:STATUS:Found {cb_fid} in libraries.") + self.callback_fids.append(cb_fid_func) def log_setup(self) -> None: """ Create the folders to store data. """ - dir_path = os.path.abspath(self.__dir_path) run_name = self.__run_name if run_name is None: run_name = "c1_" + self.fid_func.__name__ + "_" + self.algorithm.__name__ - self.logdir = log_setup(dir_path, run_name) + self.logdir = log_setup(self.__dir_path, run_name) self.logname = "open_loop.log" if isinstance(self.exp.created_by, str): shutil.copy2(self.exp.created_by, self.logdir) diff --git a/c3/optimizers/c2.py b/c3/optimizers/c2.py index cd5c6712..d297da89 100755 --- a/c3/optimizers/c2.py +++ b/c3/optimizers/c2.py @@ -31,22 +31,23 @@ class C2(Optimizer): def __init__( self, - dir_path, eval_func, pmap, algorithm, + dir_path=None, + exp_type=None, exp_right=None, options={}, run_name=None, ): super().__init__(pmap=pmap, algorithm=algorithm) - self.eval_func = eval_func + self.set_eval_func(eval_func, exp_type) self.options = options self.exp_right = exp_right self.__dir_path = dir_path self.__run_name = run_name - def set_eval_func(self, eval_func): + def set_eval_func(self, eval_func, exp_type): """ Setter for the eval function. @@ -56,6 +57,7 @@ def set_eval_func(self, eval_func): Function to be evaluated """ + # TODO: Implement shell for experiment communication self.eval_func = eval_func def log_setup(self) -> None: @@ -70,11 +72,10 @@ def log_setup(self) -> None: User specified name for the run """ - dir_path = os.path.abspath(self.__dir_path) run_name = self.__run_name if run_name is None: run_name = self.eval_func.__name__ + self.algorithm.__name__ - self.logdir = log_setup(dir_path, run_name) + self.logdir = log_setup(self.__dir_path, run_name) self.logname = "calibration.log" # We create a copy of the source code of the evaluation function in the log diff --git a/c3/optimizers/c3.py b/c3/optimizers/c3.py index 66116d80..e80e12e0 100755 --- a/c3/optimizers/c3.py +++ b/c3/optimizers/c3.py @@ -4,12 +4,15 @@ import time import hjson import pickle -import itertools import numpy as np import tensorflow as tf from typing import List, Dict from c3.optimizers.optimizer import Optimizer from c3.utils.utils import log_setup + +from c3.libraries.algorithms import algorithms as alg_lib +from c3.libraries.estimators import estimators as est_lib +from c3.libraries.sampling import sampling as samp_lib from c3.libraries.estimators import ( dv_g_LL_prime, g_LL_prime_combined, @@ -48,10 +51,12 @@ class C3(Optimizer): def __init__( self, - dir_path, sampling, batch_sizes, pmap, + datafiles, + dir_path=None, + estimator=None, seqs_per_point=None, state_labels=None, callback_foms=[], @@ -60,15 +65,53 @@ def __init__( options={}, ): """Initiliase.""" + # Consistency checks + + if estimator: + raise Exception( + "C3:ERROR: Setting estimators is currently not supported." + "Only the standard logarithmic likelihood can be used at the moment." + "Please remove this setting." + ) + if type(algorithm) is str: + try: + algorithm = alg_lib[algorithm] + except KeyError: + raise KeyError("C3:ERROR:Unknown algorithm.") + if type(sampling) is str: + try: + sampling = samp_lib[sampling] + except KeyError: + raise KeyError("C3:ERROR:Unknown sampling method.") + super().__init__(pmap=pmap, algorithm=algorithm) - self.sampling = sampling - self.batch_sizes = batch_sizes - self.seqs_per_point = seqs_per_point - self.state_labels = state_labels - self.callback_foms = callback_foms + + self.state_labels = {"all": None} + for target, labels in state_labels.items(): + self.state_labels[target] = [tuple(lab) for lab in labels] + + self.callback_foms = [] + for cb_fom in callback_foms: + if type(cb_fom) is str: + try: + self.callback_foms.append(est_lib[cb_fom]) + except KeyError: + print( + f"C3:WARNING: No estimator named '{cb_fom}' found." + " Skipping this callback estimator." + ) + else: + self.callback_foms.append(cb_fom) + self.inverse = False self.options = options + self.learn_data = {} + self.read_data(datafiles) + self.sampling = sampling + self.batch_sizes = batch_sizes + self.seqs_per_point = seqs_per_point + self.fom = g_LL_prime_combined self.__dir_path = dir_path self.__run_name = run_name @@ -85,13 +128,12 @@ def log_setup(self) -> None: User specified name for the run """ - dir_path = os.path.abspath(self.__dir_path) run_name = self.__run_name if run_name is None: run_name = "-".join( [self.algorithm.__name__, self.sampling.__name__, self.fom.__name__] ) - self.logdir = log_setup(dir_path, run_name) + self.logdir = log_setup(self.__dir_path, run_name) self.logname = "model_learn.log" # shutil.copy2(self.__real_model_folder, self.logdir) @@ -189,7 +231,7 @@ def _one_par_sim_vals( self.pmap.set_parameters(gateset_params, self.gateset_opt_map) # We find the unique gates used in the sequence and compute # only those. - self.exp.opt_gates = list(set(itertools.chain.from_iterable(sequences))) + self.exp.set_opt_gates_seq(sequences) self.exp.compute_propagators() pops = self.exp.evaluate(sequences) sim_vals, pops = self.exp.process( diff --git a/c3/optimizers/sensitivity.py b/c3/optimizers/sensitivity.py index 1503f90b..acc3e83a 100755 --- a/c3/optimizers/sensitivity.py +++ b/c3/optimizers/sensitivity.py @@ -1,13 +1,16 @@ """Object that deals with the sensitivity test.""" import os -import shutil import pickle import itertools import numpy as np import tensorflow as tf from c3.optimizers.optimizer import Optimizer from c3.utils.utils import log_setup + +from c3.libraries.algorithms import algorithms as alg_lib +from c3.libraries.estimators import estimators as est_lib +from c3.libraries.sampling import sampling as samp_lib from c3.libraries.estimators import ( g_LL_prime_combined, g_LL_prime, @@ -44,12 +47,13 @@ class SET(Optimizer): def __init__( self, - dir_path, - fom, + estimator, estimator_list, sampling, batch_sizes, pmap, + datafiles, + dir_path=None, state_labels=None, sweep_map=None, sweep_bounds=None, @@ -59,18 +63,51 @@ def __init__( options={}, ): """Initiliase.""" + if type(algorithm) is str: + try: + algorithm = alg_lib[algorithm] + except KeyError: + raise KeyError("C3:ERROR:Unknown algorithm.") + if type(sampling) is str: + try: + sampling = samp_lib[sampling] + except KeyError: + raise KeyError("C3:ERROR:Unknown sampling method.") super().__init__(pmap=pmap, algorithm=algorithm) - self.fom = fom - self.estimator_list = estimator_list + + if type(estimator) is str: + try: + estimator = est_lib[estimator] + except KeyError: + raise KeyError("C3:ERROR:Unknown estimator.") + self.fom = estimator + + for est in estimator_list: + if type(est) is str: + try: + self.estimator_list.append(est_lib[est]) + except KeyError: + print( + f"C3:WARNING: No estimator named '{est}' found." + " Skipping this estimator." + ) + else: + self.estimator_list.append(est) + + self.learn_data = {} + self.read_data(datafiles) self.sampling = sampling self.batch_sizes = batch_sizes - self.state_labels = state_labels + + self.state_labels = {"all": None} + for target, labels in state_labels.items(): + self.state_labels[target] = [tuple(lab) for lab in labels] + self.sweep_map = sweep_map self.opt_map = [sweep_map[0]] self.sweep_bounds = sweep_bounds self.options = options self.inverse = False - self.learn_data = {} self.same_dyn = same_dyn self.__dir_path = dir_path self.__run_name = run_name @@ -87,7 +124,6 @@ def log_setup(self, dir_path, run_name) -> None: User specified name for the run """ - dir_path = os.path.abspath(self.__dir_path) run_name = self.__run_name if run_name is None: run_name = "-".join( @@ -98,9 +134,8 @@ def log_setup(self, dir_path, run_name) -> None: self.fom.__name__, ] ) - self.logdir = log_setup(dir_path, run_name) + self.logdir = log_setup(self.__dir_path, run_name) self.logname = "sensitivity.log" - shutil.copy2(self.__real_model_folder, self.logdir) def read_data(self, datafiles): # TODO move common methods of sensitivity and c3 to super class @@ -112,7 +147,6 @@ def read_data(self, datafiles): datafiles : list of str List of paths for files that contain learning data. """ - self.__real_model_folder = os.path.dirname(datafiles.values()[0]) for target, datafile in datafiles.items(): with open(datafile, "rb+") as file: self.learn_data[target] = pickle.load(file) diff --git a/c3/qiskit/c3_backend.py b/c3/qiskit/c3_backend.py index c05500cf..0a205e2e 100644 --- a/c3/qiskit/c3_backend.py +++ b/c3/qiskit/c3_backend.py @@ -368,7 +368,7 @@ def run_experiment(self, experiment: QasmQobjExperiment) -> Dict[str, Any]: # setup C3 Experiment exp = Experiment() - exp.quick_setup(self._device_config) + exp.load_quick_setup(self._device_config) pmap = exp.pmap instructions = pmap.instructions diff --git a/c3/test_transmon_expanded.py b/c3/test_transmon_expanded.py deleted file mode 100644 index e69de29b..00000000 diff --git a/c3/utils/parsers.py b/c3/utils/parsers.py deleted file mode 100755 index e5139fc4..00000000 --- a/c3/utils/parsers.py +++ /dev/null @@ -1,370 +0,0 @@ -"""Parsers to read in config files and construct the corresponding objects.""" - -import hjson -import time -import random -from c3.libraries.algorithms import algorithms -from c3.libraries.estimators import estimators -from c3.libraries.fidelities import fidelities -from c3.libraries.sampling import sampling -from c3.optimizers.c1 import C1 -from c3.optimizers.c2 import C2 -from c3.optimizers.c3 import C3 -from c3.optimizers.sensitivity import SET - -# flake8: noqa: C901 - - -def create_c1_opt(optimizer_config, exp): - """ - Create an object for C1 optimal control. - - Parameters - ---------- - optimizer_config : str - File path to a hjson file containing the C1 configuration. - lindblad : boolean - Include lindbladian dynamics. - Returns - ------- - C1 - Open loop optimizer object - - """ - parameter_map = exp.pmap - lindblad = parameter_map.model.lindbladian - - with open(optimizer_config, "r") as cfg_file: - cfg = hjson.loads(cfg_file.read()) - - if lindblad: - fid = "lindbladian_" + cfg["fid_func"] - else: - fid = cfg["fid_func"] - - callback_fids = [] - if "callback_fids" in cfg: - if lindblad: - cb_fids = ["lindbladian_" + f for f in cfg["callback_fids"]] - else: - cb_fids = cfg["callback_fids"] - for cb_fid in cb_fids: - try: - cb_fid_func = fidelities[cb_fid] - except KeyError: - raise Exception(f"C3:ERROR:Unkown goal function: {cb_fid}") - print(f"C3:STATUS:Found {cb_fid} in libraries.") - callback_fids.append(cb_fid_func) - - try: - fid_func = fidelities[fid] - except KeyError: - raise Exception(f"C3:ERROR:Unkown goal function: {fid} ") - print(f"C3:STATUS:Found {fid} in libraries.") - - exp.set_opt_gates(cfg["opt_gates"]) - gateset_opt_map = [[tuple(par) for par in pset] for pset in cfg["gateset_opt_map"]] - parameter_map.set_opt_map(gateset_opt_map) - - algorithm = algorithms[cfg["algorithm"]] - options = {} - if "options" in cfg: - options = cfg["options"] - if "plot_dynamics" in cfg: - if cfg["plot_dynamics"] == "False": - plot_dynamics = False - elif cfg["plot_dynamics"] == "True": - plot_dynamics = True - else: - raise (Exception("Couldn't resolve setting of 'plot_dynamics'")) - else: - plot_dynamics = False - if "plot_pulses" in cfg: - if cfg["plot_pulses"] == "False": - plot_pulses = False - elif cfg["plot_pulses"] == "True": - plot_pulses = True - else: - raise (Exception("Couldn't resolve setting of 'plot_pulses'")) - else: - plot_pulses = False - if "store_unitaries" in cfg: - if cfg["store_unitaries"] == "False": - store_unitaries = False - elif cfg["store_unitaries"] == "True": - store_unitaries = True - else: - raise (Exception("Couldn't resolve setting of 'plot_dynamics'")) - else: - store_unitaries = False - run_name = None - if "run_name" in cfg: - run_name = cfg["run_name"] - opt = C1( - dir_path=cfg["dir_path"], - fid_func=fid_func, - fid_subspace=cfg["fid_subspace"], - pmap=parameter_map, - callback_fids=callback_fids, - algorithm=algorithm, - store_unitaries=store_unitaries, - options=options, - run_name=run_name, - ) - return opt - - -def create_c2_opt(optimizer_config, eval_func_path): - """ - Create a C2 Calibration object. Can be used to simulate the calibration process, if - the eval_func_path contains a ''real'' experiment. - - Parameters - ---------- - optimizer_config : str - File path to a hjson configuration file. - eval_func_path : str - File path to a python script, containing the functions used perform an - experiment. - - Returns - ------- - C2, Experiment - The C2 optimizer and, in the case of simulated calibration, the ''real'' - experiment object. - - """ - with open(optimizer_config, "r") as cfg_file: - try: - cfg = hjson.loads(cfg_file.read()) - except hjson.decoder.HjsonDecodeError as hjerr: - raise Exception(f"Config {optimizer_config} is invalid.") from hjerr - - exp_eval_namespace = run_path(eval_func_path) - - try: - exp_type = cfg["exp_type"] - except KeyError: - raise Exception("C3:ERROR:No experiment type found in " f"{optimizer_config}") - try: - eval_func = exp_eval_namespace[exp_type] - except KeyError as kerr: - raise Exception(f"C3:ERROR:Unkown experiment type: {cfg['exp_type']}") from kerr - - run_name = None - if "run_name" in cfg: - run_name = cfg["run_name"] - - gateset_opt_map = [[tuple(par) for par in pset] for pset in cfg["gateset_opt_map"]] - state_labels = None - if "state_labels" in cfg: - state_labels = cfg["state_labels"] - logdir = ( - cfg["dir_path"] - + "RB_c2_" - + time.strftime("%Y_%m_%d_T_%H_%M_%S/", time.localtime()) - ) - # if not os.path.isdir(logdir): - # os.makedirs(logdir) - if "exp" in exp_eval_namespace: - exp = exp_eval_namespace["exp"] - - def eval(p): - return eval_func(p, exp, gateset_opt_map, state_labels, logdir) - - else: - eval = eval_func - algorithm = algorithms[cfg["algorithm"]] - options = {} - if "options" in cfg: - options = cfg["options"] - opt = C2( - dir_path=cfg["dir_path"], - run_name=run_name, - eval_func=eval, - gateset_opt_map=gateset_opt_map, - algorithm=algorithm, - exp_right=exp, - options=options, - ) - return opt - - -def create_c3_opt(optimizer_config): - """ - The optimizer object for C3 model learning, or characterization. - - Parameters - ---------- - optimizer_config : str - Path to the hjson configuration file. - - """ - with open(optimizer_config, "r") as cfg_file: - cfg = hjson.loads(cfg_file.read()) - - state_labels = {"all": None} - if "state_labels" in cfg: - for target, labels in cfg["state_labels"].items(): - state_labels[target] = [tuple(l) for l in labels] - - if "estimator" in cfg: - raise Exception( - f"C3:ERROR: Setting estimators is currently not supported." - "Only the standard logarithmic likelihood can be used at the moment." - "Please remove this setting." - ) - - try: - cb_foms = cfg["callback_est"] - except KeyError: - print("C3:WARNING: Unknown callback estimators given.") - cb_foms = [] - - callback_foms = [] - for cb_fom in cb_foms: - try: - callback_foms.append(estimators[cb_fom]) - except KeyError: - print( - f"C3:WARNING: No estimator named '{cb_fom}' found." - " Skipping this callback estimator." - ) - - exp_opt_map = [tuple(a) for a in cfg["exp_opt_map"]] - - try: - algorithm = algorithms[cfg["algorithm"]] - except KeyError: - raise KeyError("C3:ERROR:Unkown Algorithm.") - - try: - sampling_func = sampling[cfg["sampling"]] - except KeyError: - raise KeyError("C3:ERROR:Unkown sampling method.") - - options = {} - if "options" in cfg: - options = cfg["options"] - - batch_sizes = cfg["batch_size"] - - if "seqs_per_point" in cfg: - seqs_per_point = cfg["seqs_per_point"] - else: - seqs_per_point = None - - run_name = None - if "run_name" in cfg: - run_name = cfg["run_name"] - opt = C3( - dir_path=cfg["dir_path"], - sampling=sampling_func, - batch_sizes=batch_sizes, - seqs_per_point=seqs_per_point, - opt_map=exp_opt_map, - state_labels=state_labels, - callback_foms=callback_foms, - callback_figs=callback_figs, - algorithm=algorithm, - options=options, - run_name=run_name, - ) - return opt - - -def create_sensitivity(task_config): - """ - Create the object to perform a sensitivity analysis. - - Parameters - ---------- - task_config : str - File path to the hjson configuration file. - - Returns - ------- - Sensitivity object. - - """ - with open(task_config, "r") as cfg_file: - cfg = hjson.loads(cfg_file.read()) - - sweep_map = [tuple(a) for a in cfg["sweep_map"]] - - state_labels = {"all": None} - if "state_labels" in cfg: - for target, labels in cfg["state_labels"].items(): - state_labels[target] = [tuple(l) for l in labels] - - try: - estimator = cfg["estimator"] - except KeyError: - print( - "C3:WARNING: No estimator given." " Using default estimator RMS distance." - ) - estimator = "rms_dist" - try: - fom = estimators[estimator] - except KeyError: - print( - f"C3:WARNING: No estimator named '{estimator}' found." - " Using default estimator RMS distance." - ) - fom = estimators["rms_dist"] - - try: - algorithm = algorithms[cfg["algorithm"]] - except KeyError: - raise KeyError("C3:ERROR:Unkown Algorithm.") - - try: - sampling_func = sampling[cfg["sampling"]] - except KeyError: - raise KeyError("C3:ERROR:Unkown sampling method.") - - try: - est_list = cfg["estimator_list"] - except KeyError: - print("C3:WARNING: No estimators given. Using RMS") - est_list = ["rms_dist"] - - estimator_list = [] - for est in est_list: - try: - estimator_list.append(estimators[est]) - except KeyError: - print( - f"C3:WARNING: No estimator named '{est}' found." - " Skipping this estimator." - ) - - batch_sizes = cfg["batch_size"] - - options = {} - if "options" in cfg: - options = cfg["options"] - - sweep_bounds = [] - for a in cfg["sweep_bounds"]: - sweep_bounds.append([eval(a[0]), eval(a[1])]) - - if "same_dyn" in cfg: - same_dyn = bool(cfg["same_dyn"]) - else: - same_dyn = False - - set = SET( - dir_path=cfg["dir_path"], - fom=fom, - estimator_list=estimator_list, - sampling=sampling_func, - batch_sizes=batch_sizes, - state_labels=state_labels, - sweep_map=sweep_map, - sweep_bounds=sweep_bounds, - algorithm=algorithm, - same_dyn=same_dyn, - options=options, - ) - return set diff --git a/c3/utils/utils.py b/c3/utils/utils.py index 3b07343a..d322d399 100755 --- a/c3/utils/utils.py +++ b/c3/utils/utils.py @@ -1,6 +1,7 @@ """Miscellaneous, general utilities.""" import time import os +import tempfile import numpy as np from tensorflow.python.framework import ops from typing import Tuple @@ -8,7 +9,7 @@ # SYSTEM AND SETUP -def log_setup(data_path: str, run_name: str = "run") -> str: +def log_setup(data_path: str = None, run_name: str = "run") -> str: """ Make sure the file path to save data exists. Create an appropriately named folder with date and time. Also creates a symlink "recent" to the folder. @@ -26,6 +27,10 @@ def log_setup(data_path: str, run_name: str = "run") -> str: The file path to store new data. """ + if data_path: + data_path = os.path.abspath(data_path) + else: + data_path = os.path.join(tempfile.TemporaryDirectory().name, "c3logs") if not os.path.isdir(data_path): os.makedirs(data_path) diff --git a/test/c1.cfg b/test/c1.cfg new file mode 100644 index 00000000..ea828a0c --- /dev/null +++ b/test/c1.cfg @@ -0,0 +1,34 @@ +{ + "optim_type": "C1", + "run_name": "01_tuneup", + "exp_cfg" : "test/one_qubit.hjson", + "include_model": false, + "fid_func": "average_infid_set", + "fid_subspace": ["Q1"], + "algorithm" : "lbfgs", + "options" : { + "maxfun": 2 + } + "opt_gates" : + [ + "rx90p" + ], + "gateset_opt_map" : + [ + [ + ["rx90p","Q1","cosine","amp"] + ], + [ + ["rx90p","Q1","cosine","delta"] + ], + [ + ["rx90p","Q1","cosine","freq_offset"] + ], + [ + ["rx90p","Q1","cosine","xy_angle"] + ], + [ + ["rx90p","Q1","carrier","framechange"] + ] + ] +} \ No newline at end of file diff --git a/test/c2.cfg b/test/c2.cfg new file mode 100755 index 00000000..d322cfeb --- /dev/null +++ b/test/c2.cfg @@ -0,0 +1,35 @@ +{ + "optim_type": "C2", + "eval_func" : "call_experiment.py", + "exp_type" : "ORBIT", + "algorithm" : "cmaes", + "options" : { + "popsize" : 25, + "maxfevals" : 450, + "init_point" : "True", + "tolfun" : 0.01, + "spread" : 0.01 + }, + "instructions": "test/instructions.cfg", + "gateset_opt_map" : + [ + [ + ["rx90p","d1", "gaussian", "amp"], + ["ry90p","d1", "gaussian", "amp"], + ["rx90m","d1", "gaussian", "amp"], + ["ry90m","d1", "gaussian", "amp"] + ], + [ + ["rx90p","d1", "gaussian", "delta"], + ["ry90p","d1", "gaussian", "delta"], + ["rx90m","d1", "gaussian", "delta"], + ["ry90m","d1", "gaussian", "delta"] + ], + [ + ["rx90p","d1", "gaussian", "freq_offset"], + ["ry90p","d1", "gaussian", "freq_offset"], + ["rx90m","d1", "gaussian", "freq_offset"], + ["ry90m","d1", "gaussian", "freq_offset"] + ] + ] +} diff --git a/test/c3.cfg b/test/c3.cfg new file mode 100644 index 00000000..1dd11e54 --- /dev/null +++ b/test/c3.cfg @@ -0,0 +1,41 @@ +{ + "run_name": "c3", + "optim_type": "C3", + "exp_cfg" : "test/one_qubit.hjson", + "datafiles" : + { + "orbit" : "examples/data/calibration/dataset.pickle", + }, + "algorithm" : "cma_pre_lbfgs", + "options" : + { + "cmaes": + { + "popsize" : 12, + "init_point" : "True", + "stop_at_convergence" : 10, + "ftarget" : 4, + "spread" : 0.05, + "stop_at_sigma" : 0.01 + }, + "lbfgs":{} + }, + "sampling" : "high_std", + "batch_sizes" : + { + "orbit" : 8 + }, + "state_labels" : + { + "orbit" : [[1, 0], [1, 1]] + }, + "exp_opt_map" : + [ + [ + ["Q1", "anhar"], + ], + [ + ["Q1", "freq"], + ] + ] +} diff --git a/test/one_qubit.hjson b/test/one_qubit.hjson new file mode 100644 index 00000000..1998e2f0 --- /dev/null +++ b/test/one_qubit.hjson @@ -0,0 +1,213 @@ +{ + instructions: + { + "rx90p":{ + name: "rx90p", + targets: [0], + gate_length: 8e-9, + drive_channels:{ + Q1:{ + cosine:{ + c3type: Envelope + shape: cosine + params: { + amp: { + value:0.05, + min_val:0.01, + max_val:0.6, + unit:V + }, + delta: { + value:0, + min_val:-0.1, + max_val:0.6, + unit:"" + }, + freq_offset:{ + value : 0e6, + unit : "Hz 2pi", + min_val : -50e6, + max_val : 50e6 + } + t_final:{ + value : 8e-9, + unit : s, + min_val : 1e-9, + max_val : 10e-9 + } + } + }, + carrier: { + c3type: Carrier + params: { + freq: { + value : 3.81966664926965e9, + unit : "Hz 2pi", + min_val : 2.5e9, + max_val : 5.5e9 + }, + framechange: + { + value: 0 + min_val: -3 + max_val: 3 + unit: pi + symbol: \alpha + } + } + } + } + } + } + } + model: + { + Qubits: + { + Q1: + { + c3type: Qubit + params: + { + freq: + { + value: 3.82e9 + min_val: 3e9 + max_val: 8e9 + unit: "Hz 2pi" + symbol: \omega_1 + } + anhar: + { + value: -229e6 + min_val: -380000000.0 + max_val: -120000000.00000003 + unit: "Hz 2pi" + symbol: \alpha_1 + } + t1: + { + value: 77e-6 + min_val: 5e-06 + max_val: 9e-05 + unit: s + symbol: T_1 + } + t2star: + { + value: 49e-6 + min_val: 1e-05 + max_val: 9e-05 + unit: s + symbol: \T_2^* + } + temp: + { + value: 0.06 + min_val: 0.0 + max_val: 0.12 + unit: K + symbol: \alpha + } + } + hilbert_dim: 3 + } + } + Couplings: + { + Q1: + { + c3type: Drive + params: {} + hamiltonian_func: x_drive + connected: + [ + Q1 + ] + } + } + } + generator: + { + Devices: + { + lo: + { + c3type: LO + inputs: 0 + outputs: 1 + resolution: 100000000000.0 + } + awg: + { + c3type: AWG + inputs: 0 + outputs: 1 + resolution: 2400000000.0 + options: "drag_2" + } + mixer: + { + c3type: Mixer + inputs: 2 + outputs: 1 + resolution: 0 + } + dac: + { + c3type: DigitalToAnalog + inputs: 1 + outputs: 1 + resolution: 100000000000.0 + } + resp: + { + c3type: Response + inputs: 1 + outputs: 1 + params: + { + rise_time: + { + value: 3e-10 + min_val: 5e-11 + max_val: 6e-10 + unit: s + symbol: \alpha + } + } + resolution: 100000000000.0 + } + v2hz: + { + c3type: VoltsToHertz + inputs: 1 + outputs: 1 + params: + { + V_to_Hz: + { + value: 1000000000.0 + min_val: 900000000.0 + max_val: 1100000000.0 + unit: "Hz 2pi/V" + symbol: \alpha + } + } + resolution: 0 + } + } + Chains: + { + Q1: + [ + lo + awg + dac + resp + mixer + v2hz + ] + } + } +} diff --git a/test/sensitivity.cfg b/test/sensitivity.cfg new file mode 100644 index 00000000..cfce88e8 --- /dev/null +++ b/test/sensitivity.cfg @@ -0,0 +1,40 @@ +{ + "optim_type": "SET", + "exp_cfg" : "test/one_qubit.hjson", + "datafiles" : + { + "orbit" : "examples/data/calibration/dataset.pickle", + }, + "dir_path" : "/tmp", + "algorithm" : "sweep", + "options" : + { + "init_point" : "True", + "points" : 50 + }, + "sampling" : "high_std", + "batch_sizes" : + { + "orbit" : 8 + }, + "state_labels" : + { + "orbit" : [[1, 0], [1, 1]] + }, + "estimator" : "g_LL_prime", + "estimator_list" : [], + "sweep_map" : + [ + [ + ["Q1", "anhar"], + ], + [ + ["Q1", "freq"], + ] + ] + "sweep_bounds" : + [ + [4.9985e9, 5.0015e9], + [-215e6, -205e6] + ] +} diff --git a/test/test_noise.py b/test/test_noise.py index 8016c387..a742675d 100644 --- a/test/test_noise.py +++ b/test/test_noise.py @@ -1,3 +1,5 @@ +import os +import tempfile import pytest import pickle import numpy as np @@ -24,6 +26,8 @@ from c3.optimizers.c1_robust import C1_robust +logdir = os.path.join(tempfile.TemporaryDirectory().name, "c3logs") + qubit_lvls = 3 freq_q1 = 5e9 anhar_q1 = -210e6 @@ -231,7 +235,7 @@ def test_c1_robust(): noise_map = [[np.linspace(-0.1, 0.1, 5), [("dc_offset", "offset_amp")]]] opt = C1_robust( - dir_path="/tmp/c3log/", + dir_path=logdir, fid_func=fidelities.average_infid_set, fid_subspace=["Q1"], pmap=pmap, diff --git a/test/test_openqasm.py b/test/test_openqasm.py index 9a8eb205..8fc607d9 100644 --- a/test/test_openqasm.py +++ b/test/test_openqasm.py @@ -6,7 +6,7 @@ from c3.experiment import Experiment exp = Experiment() -exp.quick_setup("test/quickstart.hjson") +exp.load_quick_setup("test/quickstart.hjson") exp.enable_qasm() sequence = [ diff --git a/test/test_optim_init.py b/test/test_optim_init.py new file mode 100644 index 00000000..a58add23 --- /dev/null +++ b/test/test_optim_init.py @@ -0,0 +1,44 @@ +"""Test module to check if optimizer classes are initialized correcty by the main file. +""" + +import hjson + +from c3.optimizers.c1 import C1 +from c3.experiment import Experiment +from c3.main import run_cfg + + +def test_main_c1() -> None: + with open("test/c1.cfg", "r") as cfg_file: + cfg = hjson.load(cfg_file) + run_cfg(cfg, "test/c1.cfg", debug=True) + + +def test_main_c2() -> None: + with open("test/c2.cfg", "r") as cfg_file: + cfg = hjson.load(cfg_file) + run_cfg(cfg, "test/c2.cfg", debug=True) + + +def test_main_c3() -> None: + with open("test/c3.cfg", "r") as cfg_file: + cfg = hjson.load(cfg_file) + run_cfg(cfg, "test/c3.cfg", debug=True) + + +def test_main_sens() -> None: + with open("test/sensitivity.cfg", "r") as cfg_file: + cfg = hjson.load(cfg_file) + run_cfg(cfg, "test/sensitivity.cfg", debug=True) + + +def test_create_c1() -> None: + with open("test/c1.cfg", "r") as cfg_file: + cfg = hjson.load(cfg_file) + cfg.pop("optim_type") + cfg.pop("gateset_opt_map") + cfg.pop("opt_gates") + + exp = Experiment() + exp.read_config(cfg.pop("exp_cfg")) + C1(**cfg, pmap=exp.pmap) diff --git a/test/test_quick_setup.py b/test/test_quick_setup.py index cf1a76fe..af072645 100644 --- a/test/test_quick_setup.py +++ b/test/test_quick_setup.py @@ -6,7 +6,7 @@ from c3.experiment import Experiment exp = Experiment() -exp.quick_setup("test/quickstart.hjson") +exp.load_quick_setup("test/quickstart.hjson") pmap = exp.pmap model = pmap.model generator = pmap.generator diff --git a/test/test_two_qubits.py b/test/test_two_qubits.py index 7b89c060..61e431f1 100644 --- a/test/test_two_qubits.py +++ b/test/test_two_qubits.py @@ -2,6 +2,8 @@ integration testing module for C1 optimization through two-qubits example """ +import os +import tempfile import copy import pickle import numpy as np @@ -30,6 +32,7 @@ from c3.optimizers.c1 import C1 +logdir = os.path.join(tempfile.TemporaryDirectory().name, "c3logs") qubit_lvls = 3 freq_q1 = 5e9 @@ -307,7 +310,7 @@ pmap.set_opt_map(gateset_opt_map) opt = C1( - dir_path="/tmp/c3log/", + dir_path=logdir, fid_func=fidelities.average_infid_set, fid_subspace=["Q1", "Q2"], pmap=pmap, @@ -362,7 +365,7 @@ def test_optim_tf_sgd() -> None: @pytest.mark.integration def test_optim_lbfgs() -> None: lbfgs_opt = C1( - dir_path="/tmp/c3log/", + dir_path=logdir, fid_func=fidelities.average_infid_set, fid_subspace=["Q1", "Q2"], pmap=pmap,