Skip to content

Batch

Filter

filter_metadata

filter_metadata(__match_any__=False, **metadata)

Create a Batch object that has tasks filtered based on the values of metadata.

Parameters:

Name Type Description Default
__match_any__ bool

if True, then a task will be included if it matches any of the metadata filters. If False, then a task will be included only if it matches all of the metadata filters. Defaults to False.

False
**metadata MetadataFilterType

the metadata to filter on. The keys are the metadata names and the values (as a set) are the values to filter on. The elements in the set can be Real, Decimal, Tuple[Real], or Tuple[Decimal].

{}
Return

type(self): a Batch object with the filtered tasks, either LocalBatch or RemoteBatch depending on the type of self

Source code in src/bloqade/task/batch.py
@beartype
def filter_metadata(
    self, __match_any__: bool = False, **metadata: MetadataFilterType
) -> Union["LocalBatch", "RemoteBatch"]:
    """Create a Batch object that has tasks filtered based on the
    values of metadata.

    Args:
        __match_any__: if True, then a task will be included if it
            matches any of the metadata filters. If False, then a
            task will be included only if it matches all of the
            metadata filters. Defaults to False.

        **metadata: the metadata to filter on. The keys are the metadata
            names and the values (as a set) are the values to filter on.
            The elements in the set can be Real, Decimal, Tuple[Real], or
            Tuple[Decimal].

    Return:
        type(self): a Batch object with the filtered tasks, either
            LocalBatch or RemoteBatch depending on the type of self

    """

    def convert_to_decimal(element):
        if isinstance(element, list):
            return list(map(convert_to_decimal, element))
        elif isinstance(element, (Real, Decimal)):
            return Decimal(str(element))
        else:
            raise ValueError(
                f"Invalid value {element} for metadata filter. "
                "Only Real, Decimal, List[Real], and List[Decimal] "
                "are supported."
            )

    def metadata_match_all(task):
        return all(
            task.metadata.get(key) in value for key, value in metadata.items()
        )

    def metadata_match_any(task):
        return any(
            task.metadata.get(key) in value for key, value in metadata.items()
        )

    metadata = {k: list(map(convert_to_decimal, v)) for k, v in metadata.items()}

    metadata_filter = metadata_match_any if __match_any__ else metadata_match_all

    new_tasks = OrderedDict(
        [(k, v) for k, v in self.tasks.items() if metadata_filter(v)]
    )

    kw = dict(self.__dict__)
    kw["tasks"] = new_tasks

    return self.__class__(**kw)

LocalBatch dataclass

Bases: Serializable, Filter

report

report()

Generate analysis report base on currently completed tasks in the LocalBatch.

Return

Report

Source code in src/bloqade/task/batch.py
def report(self) -> Report:
    """
    Generate analysis report base on currently
    completed tasks in the LocalBatch.

    Return:
        Report

    """

    ## this potentially can be specialize/disatch
    ## offline
    index = []
    data = []
    metas = []
    geos = []

    for task_number, task in self.tasks.items():
        geometry = task.geometry
        perfect_sorting = "".join(map(str, geometry.filling))
        parallel_decoder = geometry.parallel_decoder

        if parallel_decoder:
            cluster_indices = parallel_decoder.get_cluster_indices()
        else:
            cluster_indices = {(0, 0): list(range(len(perfect_sorting)))}

        shot_iter = filter(
            lambda shot: shot.shot_status == QuEraShotStatusCode.Completed,
            task.result().shot_outputs,
        )

        for shot, (cluster_coordinate, cluster_index) in product(
            shot_iter, cluster_indices.items()
        ):
            pre_sequence = "".join(
                map(
                    str,
                    (shot.pre_sequence[index] for index in cluster_index),
                )
            )

            post_sequence = np.asarray(
                [shot.post_sequence[index] for index in cluster_index],
                dtype=np.int8,
            )

            pfc_sorting = "".join(
                [perfect_sorting[index] for index in cluster_index]
            )

            key = (
                task_number,
                cluster_coordinate,
                pfc_sorting,
                pre_sequence,
            )

            index.append(key)
            data.append(post_sequence)

        metas.append(task.metadata)
        geos.append(task.geometry)

    index = pd.MultiIndex.from_tuples(
        index, names=["task_number", "cluster", "perfect_sorting", "pre_sequence"]
    )

    df = pd.DataFrame(data, index=index)
    df.sort_index(axis="index")

    rept = None
    if self.name is None:
        rept = Report(df, metas, geos, "Local")
    else:
        rept = Report(df, metas, geos, self.name)

    return rept

rerun

rerun(multiprocessing=False, num_workers=None, **kwargs)

Rerun all the tasks in the LocalBatch.

Return

Report

Source code in src/bloqade/task/batch.py
@beartype
def rerun(
    self, multiprocessing: bool = False, num_workers: Optional[int] = None, **kwargs
):
    """
    Rerun all the tasks in the LocalBatch.

    Return:
        Report

    """

    return self._run(
        multiprocessing=multiprocessing, num_workers=num_workers, **kwargs
    )

RemoteBatch dataclass

Bases: Serializable, Filter

total_nshots property

total_nshots

Total number of shots of all tasks in the RemoteBatch

Return

number of shots

cancel

cancel()

Cancel all the tasks in the Batch.

Return

self

Source code in src/bloqade/task/batch.py
def cancel(self) -> "RemoteBatch":
    """
    Cancel all the tasks in the Batch.

    Return:
        self

    """
    # cancel all jobs
    for task in self.tasks.values():
        task.cancel()

    return self

fetch

fetch()

Fetch the tasks in the Batch.

Note

Fetching will update the status of tasks, and only pull the results for those tasks that have completed.

Return

self

Source code in src/bloqade/task/batch.py
def fetch(self) -> "RemoteBatch":
    """
    Fetch the tasks in the Batch.

    Note:
        Fetching will update the status of tasks,
        and only pull the results for those tasks
        that have completed.

    Return:
        self

    """
    # online, non-blocking
    # pull the results only when its ready
    for task in self.tasks.values():
        task.fetch()

    return self

get_completed_tasks

get_completed_tasks()

Create a RemoteBatch object that contain completed tasks from current Batch.

Tasks consider completed with following status codes:

  1. Completed
  2. Partial
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def get_completed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain completed tasks from current Batch.

    Tasks consider completed with following status codes:

    1. Completed
    2. Partial

    Return:
        RemoteBatch

    """
    statuses = [
        "Completed",
        "Partial",
    ]
    return self.get_tasks(*statuses)

get_failed_tasks

get_failed_tasks()

Create a RemoteBatch object that contain failed tasks from current Batch.

failed tasks with following status codes:

  1. Failed
  2. Unaccepted
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def get_failed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain failed tasks from current Batch.

    failed tasks with following status codes:

    1. Failed
    2. Unaccepted

    Return:
        RemoteBatch

    """
    # statuses that are in a state that are
    # completed because of an error
    statuses = ["Failed", "Unaccepted"]
    return self.get_tasks(*statuses)

get_finished_tasks

get_finished_tasks()

Create a RemoteBatch object that contain finished tasks from current Batch.

Tasks consider finished with following status codes:

  1. Failed
  2. Unaccepted
  3. Completed
  4. Partial
  5. Cancelled
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def get_finished_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain finished tasks from current Batch.

    Tasks consider finished with following status codes:

    1. Failed
    2. Unaccepted
    3. Completed
    4. Partial
    5. Cancelled

    Return:
        RemoteBatch

    """
    # statuses that are in a state that will
    # not run going forward for any reason
    statuses = ["Completed", "Failed", "Unaccepted", "Partial", "Cancelled"]
    return self.get_tasks(*statuses)

get_tasks

get_tasks(*status_codes)

Get Tasks with specify status_codes.

Return

RemoteBatch

Source code in src/bloqade/task/batch.py
@beartype
def get_tasks(self, *status_codes: str) -> "RemoteBatch":
    """
    Get Tasks with specify status_codes.

    Return:
        RemoteBatch

    """
    # offline:
    st_codes = [QuEraTaskStatusCode(x) for x in status_codes]

    new_task_results = OrderedDict()
    for task_number, task in self.tasks.items():
        if task.task_result_ir.task_status in st_codes:
            new_task_results[task_number] = task

    return RemoteBatch(self.source, new_task_results, name=self.name)

pull

pull()

Pull results of the tasks in the Batch.

Note

Pulling will pull the results for the tasks. If a given task(s) has not been completed, wait until it finished.

Return

self

Source code in src/bloqade/task/batch.py
def pull(self) -> "RemoteBatch":
    """
    Pull results of the tasks in the Batch.

    Note:
        Pulling will pull the results for the tasks.
        If a given task(s) has not been completed, wait
        until it finished.

    Return:
        self
    """
    # online, blocking
    # pull the results. if its not ready, hanging
    for task in self.tasks.values():
        task.pull()

    return self

remove_failed_tasks

remove_failed_tasks()

Create a RemoteBatch object that contain tasks from current Batch, with failed tasks removed.

failed tasks with following status codes:

  1. Failed
  2. Unaccepted
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def remove_failed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain tasks from current Batch,
    with failed tasks removed.

    failed tasks with following status codes:

    1. Failed
    2. Unaccepted

    Return:
        RemoteBatch

    """
    # statuses that are in a state that will
    # not run going forward because of an error
    statuses = ["Failed", "Unaccepted"]
    return self.remove_tasks(*statuses)

remove_invalid_tasks

remove_invalid_tasks()

Create a RemoteBatch object that contain tasks from current Batch, with all Unaccepted tasks removed.

Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def remove_invalid_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain tasks from current Batch,
    with all Unaccepted tasks removed.

    Return:
        RemoteBatch

    """
    return self.remove_tasks("Unaccepted")

remove_tasks

remove_tasks(*status_codes)

Remove Tasks with specify status_codes.

Return

RemoteBatch

Source code in src/bloqade/task/batch.py
@beartype
def remove_tasks(self, *status_codes: str) -> "RemoteBatch":
    """
    Remove Tasks with specify status_codes.

    Return:
        RemoteBatch

    """
    # offline:

    st_codes = [QuEraTaskStatusCode(x) for x in status_codes]

    new_results = OrderedDict()
    for task_number, task in self.tasks.items():
        if task.task_result_ir.task_status in st_codes:
            continue

        new_results[task_number] = task

    return RemoteBatch(self.source, new_results, self.name)

report

report()

Generate analysis report base on currently completed tasks in the RemoteBatch.

Return

Report

Source code in src/bloqade/task/batch.py
def report(self) -> "Report":
    """
    Generate analysis report base on currently
    completed tasks in the RemoteBatch.

    Return:
        Report

    """
    ## this potentially can be specialize/disatch
    ## offline
    index = []
    data = []
    metas = []
    geos = []

    for task_number, task in self.tasks.items():
        ## fliter not existing results tasks:
        if (task.task_id is None) or (not task._result_exists()):
            continue

        ## filter has result but is not correctly completed.
        if task.task_result_ir.task_status not in [
            QuEraTaskStatusCode.Completed,
            QuEraTaskStatusCode.Partial,
        ]:
            continue

        geometry = task.geometry
        perfect_sorting = "".join(map(str, geometry.filling))
        parallel_decoder = geometry.parallel_decoder

        if parallel_decoder:
            cluster_indices = parallel_decoder.get_cluster_indices()
        else:
            cluster_indices = {(0, 0): list(range(len(perfect_sorting)))}

        shot_iter = filter(
            lambda shot: shot.shot_status == QuEraShotStatusCode.Completed,
            task.result().shot_outputs,
        )

        for shot, (cluster_coordinate, cluster_index) in product(
            shot_iter, cluster_indices.items()
        ):
            pre_sequence = "".join(
                map(
                    str,
                    (shot.pre_sequence[index] for index in cluster_index),
                )
            )

            post_sequence = np.asarray(
                [shot.post_sequence[index] for index in cluster_index],
                dtype=np.int8,
            )

            pfc_sorting = "".join(
                [perfect_sorting[index] for index in cluster_index]
            )

            key = (
                task_number,
                cluster_coordinate,
                pfc_sorting,
                pre_sequence,
            )

            index.append(key)
            data.append(post_sequence)

        metas.append(task.metadata)
        geos.append(task.geometry)

    index = pd.MultiIndex.from_tuples(
        index, names=["task_number", "cluster", "perfect_sorting", "pre_sequence"]
    )

    df = pd.DataFrame(data, index=index)
    df.sort_index(axis="index")

    rept = None
    if self.name is None:
        rept = Report(df, metas, geos, "Remote")
    else:
        rept = Report(df, metas, geos, self.name)

    return rept

resubmit

resubmit(shuffle_submit_order=True)

Resubmit all the tasks in the RemoteBatch

Return

self

Source code in src/bloqade/task/batch.py
@beartype
def resubmit(self, shuffle_submit_order: bool = True) -> "RemoteBatch":
    """
    Resubmit all the tasks in the RemoteBatch

    Return:
        self

    """
    # online, non-blocking
    self._submit(shuffle_submit_order, force=True)
    return self

retrieve

retrieve()

Retrieve missing task results.

Note

Retrieve will update the status of tasks, and only pull the results for those tasks that have completed.

Return

self

Source code in src/bloqade/task/batch.py
def retrieve(self) -> "RemoteBatch":
    """Retrieve missing task results.

    Note:
        Retrieve will update the status of tasks,
        and only pull the results for those tasks
        that have completed.

    Return:
        self

    """
    # partially online, sometimes blocking
    # pull the results for tasks that have
    # not been pulled already.
    for task in self.tasks.values():
        if not task._result_exists():
            task.pull()

    return self

tasks_metric

tasks_metric()

Get current tasks status metric

Return

dataframe with ["task id", "status", "shots"]

Source code in src/bloqade/task/batch.py
def tasks_metric(self) -> pd.DataFrame:
    """
    Get current tasks status metric

    Return:
        dataframe with ["task id", "status", "shots"]

    """
    # [TODO] more info on current status
    # offline, non-blocking
    tid = []
    data = []
    for int, task in self.tasks.items():
        tid.append(int)

        dat = [None, None, None]
        dat[0] = task.task_id
        if task.task_result_ir is not None:
            dat[1] = task.task_result_ir.task_status.name
        dat[2] = task.task_ir.nshots
        data.append(dat)

    return pd.DataFrame(data, index=tid, columns=["task ID", "status", "shots"])

Serializable

json

json(**options)

Serialize the object to JSON string.

Return

JSON string

Source code in src/bloqade/task/batch.py
def json(self, **options) -> str:
    """
    Serialize the object to JSON string.

    Return:
        JSON string

    """
    from bloqade import dumps

    return dumps(self, **options)