Skip to content

Batch

Filter

filter_metadata

filter_metadata(__match_any__=False, **metadata)

Create a Batch object that has tasks filtered based on the values of metadata.

Parameters:

Name Type Description Default
__match_any__ bool

if True, then a task will be included if it matches any of the metadata filters. If False, then a task will be included only if it matches all of the metadata filters. Defaults to False.

False
**metadata MetadataFilterType

the metadata to filter on. The keys are the metadata names and the values (as a set) are the values to filter on. The elements in the set can be Real, Decimal, Tuple[Real], or Tuple[Decimal].

{}
Return

type(self): a Batch object with the filtered tasks, either LocalBatch or RemoteBatch depending on the type of self

Source code in src/bloqade/analog/task/batch.py
@beartype
def filter_metadata(
    self, __match_any__: bool = False, **metadata: MetadataFilterType
) -> Union["LocalBatch", "RemoteBatch"]:
    """Create a Batch object that has tasks filtered based on the
    values of metadata.

    Args:
        __match_any__: if True, then a task will be included if it
            matches any of the metadata filters. If False, then a
            task will be included only if it matches all of the
            metadata filters. Defaults to False.

        **metadata: the metadata to filter on. The keys are the metadata
            names and the values (as a set) are the values to filter on.
            The elements in the set can be Real, Decimal, Tuple[Real], or
            Tuple[Decimal].

    Return:
        type(self): a Batch object with the filtered tasks, either
            LocalBatch or RemoteBatch depending on the type of self

    """

    def convert_to_decimal(element):
        if isinstance(element, list):
            return list(map(convert_to_decimal, element))
        elif isinstance(element, (Real, Decimal)):
            return Decimal(str(element))
        else:
            raise ValueError(
                f"Invalid value {element} for metadata filter. "
                "Only Real, Decimal, List[Real], and List[Decimal] "
                "are supported."
            )

    def metadata_match_all(task):
        return all(
            task.metadata.get(key) in value for key, value in metadata.items()
        )

    def metadata_match_any(task):
        return any(
            task.metadata.get(key) in value for key, value in metadata.items()
        )

    metadata = {k: list(map(convert_to_decimal, v)) for k, v in metadata.items()}

    metadata_filter = metadata_match_any if __match_any__ else metadata_match_all

    new_tasks = OrderedDict(
        [(k, v) for k, v in self.tasks.items() if metadata_filter(v)]
    )

    kw = dict(self.__dict__)
    kw["tasks"] = new_tasks

    return self.__class__(**kw)

LocalBatch dataclass

LocalBatch(source, tasks, name=None)

Bases: Serializable, Filter

_run

_run(multiprocessing=False, num_workers=None, **kwargs)

Private method to run tasks in the batch.

Parameters:

Name Type Description Default
multiprocessing bool

If True, tasks are run in parallel using multiple processes. If False, tasks are run sequentially in a single process. Defaults to False.

False
num_workers Optional[int]

The maximum number of processes that can be used to execute the given calls if multiprocessing is True. If None, the number of workers will be the number of processors on the machine.

None
**kwargs

Arbitrary keyword arguments passed to the task's run method.

{}

Raises:

Type Description
ValueError

If num_workers is not None and multiprocessing is False.

Returns:

Name Type Description
self

The instance of the batch with tasks run.

Source code in src/bloqade/analog/task/batch.py
def _run(
    self, multiprocessing: bool = False, num_workers: Optional[int] = None, **kwargs
):
    """
    Private method to run tasks in the batch.

    Args:
        multiprocessing (bool, optional): If True, tasks are run in parallel using multiple processes.
            If False, tasks are run sequentially in a single process. Defaults to False.
        num_workers (Optional[int], optional): The maximum number of processes that can be used to
            execute the given calls if multiprocessing is True. If None, the number of workers will be the number of processors on the machine.
        **kwargs: Arbitrary keyword arguments passed to the task's run method.

    Raises:
        ValueError: If num_workers is not None and multiprocessing is False.

    Returns:
        self: The instance of the batch with tasks run.
    """
    if multiprocessing:
        from concurrent.futures import ProcessPoolExecutor as Pool

        with Pool(max_workers=num_workers) as pool:
            futures = OrderedDict()
            for task_number, task in enumerate(self.tasks.values()):
                futures[task_number] = pool.submit(task.run, **kwargs)

            for task_number, future in futures.items():
                self.tasks[task_number] = future.result()

    else:
        if num_workers is not None:
            raise ValueError(
                "num_workers is only used when multiprocessing is enabled."
            )
        for task in self.tasks.values():
            task.run(**kwargs)

    return self

report

report()

Generate analysis report base on currently completed tasks in the LocalBatch.

Return

Report

Source code in src/bloqade/analog/task/batch.py
def report(self) -> Report:
    """
    Generate analysis report base on currently
    completed tasks in the LocalBatch.

    Return:
        Report

    """

    ## this potentially can be specialize/disatch
    ## offline
    index = []
    data = []
    metas = []
    geos = []

    for task_number, task in self.tasks.items():
        geometry = task.geometry
        perfect_sorting = "".join(map(str, geometry.filling))
        parallel_decoder = geometry.parallel_decoder

        if parallel_decoder:
            cluster_indices = parallel_decoder.get_cluster_indices()
        else:
            cluster_indices = {(0, 0): list(range(len(perfect_sorting)))}

        shot_iter = filter(
            lambda shot: shot.shot_status == QuEraShotStatusCode.Completed,
            task.result().shot_outputs,
        )

        for shot, (cluster_coordinate, cluster_index) in product(
            shot_iter, cluster_indices.items()
        ):
            pre_sequence = "".join(
                map(
                    str,
                    (shot.pre_sequence[index] for index in cluster_index),
                )
            )

            post_sequence = np.asarray(
                [shot.post_sequence[index] for index in cluster_index],
                dtype=np.int8,
            )

            pfc_sorting = "".join(
                [perfect_sorting[index] for index in cluster_index]
            )

            key = (
                task_number,
                cluster_coordinate,
                pfc_sorting,
                pre_sequence,
            )

            index.append(key)
            data.append(post_sequence)

        metas.append(task.metadata)
        geos.append(task.geometry)

    index = pd.MultiIndex.from_tuples(
        index, names=["task_number", "cluster", "perfect_sorting", "pre_sequence"]
    )

    df = pd.DataFrame(data, index=index)
    df.sort_index(axis="index")

    rept = None
    if self.name is None:
        rept = Report(df, metas, geos, "Local")
    else:
        rept = Report(df, metas, geos, self.name)

    return rept

rerun

rerun(multiprocessing=False, num_workers=None, **kwargs)

Rerun all the tasks in the LocalBatch.

Return

Report

Source code in src/bloqade/analog/task/batch.py
@beartype
def rerun(
    self, multiprocessing: bool = False, num_workers: Optional[int] = None, **kwargs
):
    """
    Rerun all the tasks in the LocalBatch.

    Return:
        Report

    """

    return self._run(
        multiprocessing=multiprocessing, num_workers=num_workers, **kwargs
    )

RemoteBatch dataclass

RemoteBatch(source, tasks, name=None)

Bases: Serializable, Filter

total_nshots property

total_nshots

Total number of shots of all tasks in the RemoteBatch

Return

number of shots

_submit

_submit(
    shuffle_submit_order=True,
    ignore_submission_error=False,
    **kwargs
)

Private method to submit tasks in the RemoteBatch.

Parameters:

Name Type Description Default
shuffle_submit_order bool

If True, tasks are submitted in a random order. If False, tasks are submitted in the order they were added to the batch. Defaults to True.

True
ignore_submission_error bool

If True, submission errors are ignored and the method continues to submit the remaining tasks. If False, the method stops at the first submission error. Defaults to False.

False
**kwargs

Arbitrary keyword arguments.

{}

Returns:

Name Type Description
RemoteBatch RemoteBatch

The RemoteBatch instance with tasks submitted.

Source code in src/bloqade/analog/task/batch.py
def _submit(
    self, shuffle_submit_order: bool = True, ignore_submission_error=False, **kwargs
) -> "RemoteBatch":
    """
    Private method to submit tasks in the RemoteBatch.

    Args:
        shuffle_submit_order (bool, optional): If True, tasks are submitted in a random order.
            If False, tasks are submitted in the order they were added to the batch. Defaults to True.
        ignore_submission_error (bool, optional): If True, submission errors are ignored and the method continues to submit the remaining tasks.
            If False, the method stops at the first submission error. Defaults to False.
        **kwargs: Arbitrary keyword arguments.

    Returns:
        RemoteBatch: The RemoteBatch instance with tasks submitted.
    """
    from bloqade.analog import save

    # online, non-blocking
    if shuffle_submit_order:
        submission_order = np.random.permutation(list(self.tasks.keys()))
    else:
        submission_order = list(self.tasks.keys())

    # submit tasks in random order but store them
    # in the original order of tasks.
    # futures = OrderedDict()

    ## upon submit() should validate for Both backends
    ## and throw errors when fail.
    errors = BatchErrors()
    shuffled_tasks = OrderedDict()
    for task_index in submission_order:
        task = self.tasks[task_index]
        shuffled_tasks[task_index] = task
        try:
            task.submit(**kwargs)
        except BaseException as error:
            # record the error in the error dict
            errors.task_errors[int(task_index)] = TaskError(
                exception_type=error.__class__.__name__,
                stack_trace=traceback.format_exc(),
            )

            task.task_result_ir = QuEraTaskResults(
                task_status=QuEraTaskStatusCode.Unaccepted
            )

    self.tasks = shuffled_tasks  # permute order using dump way

    if len(errors.task_errors) > 0:
        time_stamp = datetime.datetime.now()

        if "win" in sys.platform:
            time_stamp = str(time_stamp).replace(":", "~")

        if self.name:
            future_file = f"{self.name}-partial-batch-future-{time_stamp}.json"
            error_file = f"{self.name}-partial-batch-errors-{time_stamp}.json"
        else:
            future_file = f"partial-batch-future-{time_stamp}.json"
            error_file = f"partial-batch-errors-{time_stamp}.json"

        cwd = os.getcwd()
        # cloud_batch_result.save_json(future_file, indent=2)
        # saving ?

        save(errors, error_file)
        save(self, future_file)

        if ignore_submission_error:
            warnings.warn(
                "One or more error(s) occured during submission, please see "
                "the following files for more information:\n"
                f"  - {os.path.join(cwd, future_file)}\n"
                f"  - {os.path.join(cwd, error_file)}\n",
                RuntimeWarning,
            )
        else:
            raise RemoteBatch.SubmissionException(
                str(errors)
                + "\n"
                + "One or more error(s) occured during submission, please see "
                "the following files for more information:\n"
                f"  - {os.path.join(cwd, future_file)}\n"
                f"  - {os.path.join(cwd, error_file)}\n"
            )

    else:
        # TODO: think about if we should automatically save successful submissions
        #       as well.
        pass

cancel

cancel()

Cancel all the tasks in the Batch.

Return

self

Source code in src/bloqade/analog/task/batch.py
def cancel(self) -> "RemoteBatch":
    """
    Cancel all the tasks in the Batch.

    Return:
        self

    """
    # cancel all jobs
    for task in self.tasks.values():
        task.cancel()

    return self

fetch

fetch()

Fetch the tasks in the Batch.

Note

Fetching will update the status of tasks, and only pull the results for those tasks that have completed.

Return

self

Source code in src/bloqade/analog/task/batch.py
def fetch(self) -> "RemoteBatch":
    """
    Fetch the tasks in the Batch.

    Note:
        Fetching will update the status of tasks,
        and only pull the results for those tasks
        that have completed.

    Return:
        self

    """
    # online, non-blocking
    # pull the results only when its ready
    for task in self.tasks.values():
        task.fetch()

    return self

get_completed_tasks

get_completed_tasks()

Create a RemoteBatch object that contain completed tasks from current Batch.

Tasks consider completed with following status codes:

  1. Completed
  2. Partial
Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
def get_completed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain completed tasks from current Batch.

    Tasks consider completed with following status codes:

    1. Completed
    2. Partial

    Return:
        RemoteBatch

    """
    statuses = [
        "Completed",
        "Partial",
    ]
    return self.get_tasks(*statuses)

get_failed_tasks

get_failed_tasks()

Create a RemoteBatch object that contain failed tasks from current Batch.

failed tasks with following status codes:

  1. Failed
  2. Unaccepted
Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
def get_failed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain failed tasks from current Batch.

    failed tasks with following status codes:

    1. Failed
    2. Unaccepted

    Return:
        RemoteBatch

    """
    # statuses that are in a state that are
    # completed because of an error
    statuses = ["Failed", "Unaccepted"]
    return self.get_tasks(*statuses)

get_finished_tasks

get_finished_tasks()

Create a RemoteBatch object that contain finished tasks from current Batch.

Tasks consider finished with following status codes:

  1. Failed
  2. Unaccepted
  3. Completed
  4. Partial
  5. Cancelled
Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
def get_finished_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain finished tasks from current Batch.

    Tasks consider finished with following status codes:

    1. Failed
    2. Unaccepted
    3. Completed
    4. Partial
    5. Cancelled

    Return:
        RemoteBatch

    """
    # statuses that are in a state that will
    # not run going forward for any reason
    statuses = ["Completed", "Failed", "Unaccepted", "Partial", "Cancelled"]
    return self.get_tasks(*statuses)

get_tasks

get_tasks(*status_codes)

Get Tasks with specify status_codes.

Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
@beartype
def get_tasks(self, *status_codes: str) -> "RemoteBatch":
    """
    Get Tasks with specify status_codes.

    Return:
        RemoteBatch

    """
    # offline:
    st_codes = [QuEraTaskStatusCode(x) for x in status_codes]

    new_task_results = OrderedDict()
    for task_number, task in self.tasks.items():
        if task.task_result_ir.task_status in st_codes:
            new_task_results[task_number] = task

    return RemoteBatch(self.source, new_task_results, name=self.name)

pull

pull()

Pull results of the tasks in the Batch.

Note

Pulling will pull the results for the tasks. If a given task(s) has not been completed, wait until it finished.

Return

self

Source code in src/bloqade/analog/task/batch.py
def pull(self) -> "RemoteBatch":
    """
    Pull results of the tasks in the Batch.

    Note:
        Pulling will pull the results for the tasks.
        If a given task(s) has not been completed, wait
        until it finished.

    Return:
        self
    """
    # online, blocking
    # pull the results. if its not ready, hanging
    for task in self.tasks.values():
        task.pull()

    return self

remove_failed_tasks

remove_failed_tasks()

Create a RemoteBatch object that contain tasks from current Batch, with failed tasks removed.

failed tasks with following status codes:

  1. Failed
  2. Unaccepted
Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
def remove_failed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain tasks from current Batch,
    with failed tasks removed.

    failed tasks with following status codes:

    1. Failed
    2. Unaccepted

    Return:
        RemoteBatch

    """
    # statuses that are in a state that will
    # not run going forward because of an error
    statuses = ["Failed", "Unaccepted"]
    return self.remove_tasks(*statuses)

remove_invalid_tasks

remove_invalid_tasks()

Create a RemoteBatch object that contain tasks from current Batch, with all Unaccepted tasks removed.

Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
def remove_invalid_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain tasks from current Batch,
    with all Unaccepted tasks removed.

    Return:
        RemoteBatch

    """
    return self.remove_tasks("Unaccepted")

remove_tasks

remove_tasks(*status_codes)

Remove Tasks with specify status_codes.

Return

RemoteBatch

Source code in src/bloqade/analog/task/batch.py
@beartype
def remove_tasks(
    self,
    *status_codes: Literal[
        "Created",
        "Running",
        "Completed",
        "Failed",
        "Cancelled",
        "Executing",
        "Enqueued",
        "Accepted",
        "Unaccepted",
        "Partial",
        "Unsubmitted",
    ],
) -> "RemoteBatch":
    """
    Remove Tasks with specify status_codes.

    Return:
        RemoteBatch

    """
    # offline:

    st_codes = [QuEraTaskStatusCode(x) for x in status_codes]

    new_results = OrderedDict()
    for task_number, task in self.tasks.items():
        if task.task_result_ir.task_status in st_codes:
            continue

        new_results[task_number] = task

    return RemoteBatch(self.source, new_results, self.name)

report

report()

Generate analysis report base on currently completed tasks in the RemoteBatch.

Return

Report

Source code in src/bloqade/analog/task/batch.py
def report(self) -> "Report":
    """
    Generate analysis report base on currently
    completed tasks in the RemoteBatch.

    Return:
        Report

    """
    ## this potentially can be specialize/disatch
    ## offline
    index = []
    data = []
    metas = []
    geos = []

    for task_number, task in self.tasks.items():
        ## fliter not existing results tasks:
        if (task.task_id is None) or (not task._result_exists()):
            continue

        ## filter has result but is not correctly completed.
        if task.task_result_ir.task_status not in [
            QuEraTaskStatusCode.Completed,
            QuEraTaskStatusCode.Partial,
        ]:
            continue

        geometry = task.geometry
        perfect_sorting = "".join(map(str, geometry.filling))
        parallel_decoder = geometry.parallel_decoder

        if parallel_decoder:
            cluster_indices = parallel_decoder.get_cluster_indices()
        else:
            cluster_indices = {(0, 0): list(range(len(perfect_sorting)))}

        shot_iter = filter(
            lambda shot: shot.shot_status == QuEraShotStatusCode.Completed,
            task.result().shot_outputs,
        )

        for shot, (cluster_coordinate, cluster_index) in product(
            shot_iter, cluster_indices.items()
        ):
            pre_sequence = "".join(
                map(
                    str,
                    (shot.pre_sequence[index] for index in cluster_index),
                )
            )

            post_sequence = np.asarray(
                [shot.post_sequence[index] for index in cluster_index],
                dtype=np.int8,
            )

            pfc_sorting = "".join(
                [perfect_sorting[index] for index in cluster_index]
            )

            key = (
                task_number,
                cluster_coordinate,
                pfc_sorting,
                pre_sequence,
            )

            index.append(key)
            data.append(post_sequence)

        metas.append(task.metadata)
        geos.append(task.geometry)

    index = pd.MultiIndex.from_tuples(
        index, names=["task_number", "cluster", "perfect_sorting", "pre_sequence"]
    )

    df = pd.DataFrame(data, index=index)
    df.sort_index(axis="index")

    rept = None
    if self.name is None:
        rept = Report(df, metas, geos, "Remote")
    else:
        rept = Report(df, metas, geos, self.name)

    return rept

resubmit

resubmit(shuffle_submit_order=True)

Resubmit all the tasks in the RemoteBatch

Return

self

Source code in src/bloqade/analog/task/batch.py
@beartype
def resubmit(self, shuffle_submit_order: bool = True) -> "RemoteBatch":
    """
    Resubmit all the tasks in the RemoteBatch

    Return:
        self

    """
    # online, non-blocking
    self._submit(shuffle_submit_order, force=True)
    return self

retrieve

retrieve()

Retrieve missing task results.

Note

Retrieve will update the status of tasks, and only pull the results for those tasks that have completed.

Return

self

Source code in src/bloqade/analog/task/batch.py
def retrieve(self) -> "RemoteBatch":
    """Retrieve missing task results.

    Note:
        Retrieve will update the status of tasks,
        and only pull the results for those tasks
        that have completed.

    Return:
        self

    """
    # partially online, sometimes blocking
    # pull the results for tasks that have
    # not been pulled already.
    for task in self.tasks.values():
        if not task._result_exists():
            task.pull()

    return self

tasks_metric

tasks_metric()

Get current tasks status metric

Return

dataframe with ["task id", "status", "shots"]

Source code in src/bloqade/analog/task/batch.py
def tasks_metric(self) -> pd.DataFrame:
    """
    Get current tasks status metric

    Return:
        dataframe with ["task id", "status", "shots"]

    """
    # [TODO] more info on current status
    # offline, non-blocking
    tid = []
    data = []
    for int, task in self.tasks.items():
        tid.append(int)

        dat = [None, None, None]
        dat[0] = task.task_id
        if task.task_result_ir is not None:
            dat[1] = task.task_result_ir.task_status.name
        dat[2] = task.task_ir.nshots
        data.append(dat)

    return pd.DataFrame(data, index=tid, columns=["task ID", "status", "shots"])

Serializable

json

json(**options)

Serialize the object to JSON string.

Return

JSON string

Source code in src/bloqade/analog/task/batch.py
def json(self, **options) -> str:
    """
    Serialize the object to JSON string.

    Return:
        JSON string

    """
    from bloqade.analog import dumps

    return dumps(self, **options)