diff --git a/azure-quantum/tests/unit/local/mock_client.py b/azure-quantum/tests/unit/local/mock_client.py new file mode 100644 index 00000000..ba6ad1fb --- /dev/null +++ b/azure-quantum/tests/unit/local/mock_client.py @@ -0,0 +1,473 @@ +## +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +## + +""" +Mock Azure Quantum REST client used to back a real Workspace +without making network calls. Returns real SDK models and ItemPaged. +""" + +from typing import List, Optional +from datetime import datetime, UTC, timedelta + +from azure.core.paging import ItemPaged +from azure.quantum.workspace import Workspace +from types import SimpleNamespace +from azure.quantum._client import ServicesClient +from azure.quantum._client.models import JobDetails, SessionDetails, ItemDetails + + +def _paged(items: List, page_size: int = 100) -> ItemPaged: + """Create an ItemPaged that conforms to azure-core's contract. + - get_next(token) returns a response payload + - extract_data(response) returns (items_iterable, next_link) + """ + + def get_next(token): + start = int(token) if token is not None else 0 + end = start + page_size + page = items[start:end] + next_link = str(end) if end < len(items) else None + # Return a dict-like payload as expected by extract_data + return {"items": page, "next_link": next_link} + + def extract_data(response): + # Return (iterable, next_link) per azure.core.paging contract + if response is None: + return None, [] + items_iter = response.get("items") or [] + next_link = response.get("next_link") + # azure.core.paging expects (continuation_token, items) + return next_link, items_iter + + return ItemPaged(get_next, extract_data) + + +def _apply_filter(items: List, filter_expr: Optional[str]) -> List: + """Apply a minimal OData-like filter generated by Workspace._create_filter. + Supports: + - startswith(Name, 'prefix') + - Property eq 'value' (with or groups inside parentheses) + - CreationTime ge/le YYYY-MM-DD + Properties: Name, ItemType, JobType, ProviderId, Target, State, CreationTime + """ + if not filter_expr: + return items + + def matches(item) -> bool: + expr = filter_expr + # Handle startswith(Name, 'prefix') optionally combined with ' and ' + conds = [c.strip() for c in expr.split(" and ")] + + def eval_simple(condition: str) -> bool: + # startswith(Name, 'x') (case-sensitive to match Workspace._create_filter) + if condition.startswith("startswith("): + try: + inside = condition[len("startswith(") : -1] + prop, value = inside.split(",", 1) + prop = prop.strip() + value = value.strip().strip("'") + name = getattr(item, "name", None) + return isinstance(name, str) and name.startswith(value) + except Exception: + return False + # Parenthesized OR: (A or B or C) + if condition.startswith("(") and condition.endswith(")"): + inner = condition[1:-1] + parts = [p.strip() for p in inner.split(" or ")] + return any(eval_simple(p) for p in parts) + # Equality: Prop eq 'value' + if " eq " in condition: + try: + left, right = condition.split(" eq ", 1) + prop = left.strip() + val = right.strip().strip("'") + # Map property names to model attributes + mapping = { + "Name": "name", + "ItemType": "item_type", + "JobType": "job_type", + "ProviderId": "provider_id", + "Target": "target", + "State": "status", + } + attr = mapping.get(prop) + if not attr: + return False + item_val = getattr(item, attr, None) + return item_val == val + except Exception: + return False + # CreationTime ge/le YYYY-MM-DD + if "CreationTime ge " in condition or "CreationTime le " in condition: + try: + if " ge " in condition: + _, date_str = condition.split(" ge ", 1) + cmp_date = datetime.fromisoformat(date_str.strip()) + ct = getattr(item, "creation_time", None) + return bool(ct) and ct.date() >= cmp_date.date() + if " le " in condition: + _, date_str = condition.split(" le ", 1) + cmp_date = datetime.fromisoformat(date_str.strip()) + ct = getattr(item, "creation_time", None) + return bool(ct) and ct.date() <= cmp_date.date() + except Exception: + return False + return False + + return all(eval_simple(c) for c in conds) + + return [it for it in items if matches(it)] + + +class JobsOperations: + def __init__(self, store: List[JobDetails]) -> None: + self._store = store + + def create_or_replace( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + job_details: JobDetails, + ) -> JobDetails: + # Preserve provided status; default only if missing + if getattr(job_details, "status", None) is None: + job_details.status = "Submitted" + # Ensure creation_time present + if not getattr(job_details, "creation_time", None): + job_details.creation_time = datetime.now(UTC) + # Upsert by id + for i, jd in enumerate(self._store): + if jd.id == job_id: + self._store[i] = job_details + break + else: + self._store.append(job_details) + return job_details + + def get( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + ) -> JobDetails: + for jd in self._store: + if jd.id == job_id: + return jd + raise KeyError(job_id) + + def list( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + filter: Optional[str] = None, + orderby: Optional[str] = None, + top: int = 100, + skip: int = 0, + ) -> ItemPaged[JobDetails]: + items = list(self._store) + # Apply filter + items = _apply_filter(items, filter) + # Only basic orderby support for CreationTime asc/desc + if orderby: + try: + prop, direction = orderby.split() + if prop == "CreationTime": + items.sort( + key=lambda j: getattr(j, "creation_time", datetime.now(UTC)), + reverse=(direction == "desc"), + ) + except Exception: + pass + return _paged(items[skip : skip + top], page_size=top) + + +class SessionsOperations: + def __init__( + self, store: List[SessionDetails], jobs_store: List[JobDetails] + ) -> None: + self._store = store + self._jobs_store = jobs_store + + def create_or_replace( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + session_details: SessionDetails, + ) -> SessionDetails: + if getattr(session_details, "status", None) is None: + session_details.status = "WAITING" + if not getattr(session_details, "creation_time", None): + session_details.creation_time = datetime.utcnow() + for i, sd in enumerate(self._store): + if sd.id == session_id: + self._store[i] = session_details + break + else: + self._store.append(session_details) + return session_details + + def close( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + ) -> SessionDetails: + sd = self.get(subscription_id, resource_group_name, workspace_name, session_id) + sd.status = "SUCCEEDED" + return sd + + def get( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + ) -> SessionDetails: + for sd in self._store: + if sd.id == session_id: + return sd + raise KeyError(session_id) + + def list( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + filter: Optional[str] = None, + orderby: Optional[str] = None, + skip: int = 0, + top: int = 100, + ) -> ItemPaged[SessionDetails]: + items = list(self._store) + items = _apply_filter(items, filter) + if orderby: + try: + prop, direction = orderby.split() + if prop == "CreationTime": + items.sort( + key=lambda s: getattr(s, "creation_time", datetime.now(UTC)), + reverse=(direction == "desc"), + ) + except Exception: + pass + return _paged(items[skip : skip + top], page_size=top) + + def jobs_list( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + filter: Optional[str] = None, + orderby: Optional[str] = None, + skip: int = 0, + top: int = 100, + ) -> ItemPaged[JobDetails]: + jobs = [ + j for j in self._jobs_store if getattr(j, "session_id", None) == session_id + ] + jobs = _apply_filter(jobs, filter) + if orderby: + try: + prop, direction = orderby.split() + if prop == "CreationTime": + jobs.sort( + key=lambda j: getattr(j, "creation_time", datetime.now(UTC)), + reverse=(direction == "desc"), + ) + except Exception: + pass + return _paged(jobs[skip : skip + top], page_size=top) + + +class TopLevelItemsOperations: + def __init__( + self, jobs_store: List[JobDetails], sessions_store: List[SessionDetails] + ) -> None: + self._jobs_store = jobs_store + self._sessions_store = sessions_store + + def list( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + filter: Optional[str] = None, + orderby: Optional[str] = None, + top: int = 100, + skip: int = 0, + ) -> ItemPaged[ItemDetails]: + items: List[ItemDetails] = [] + # Build JobDetails and SessionDetails via mapping-based init to mimic server responses + for j in self._jobs_store: + job_mapping = { + "id": j.id, + "itemType": "Job", + "name": getattr(j, "name", j.id), + "providerId": getattr(j, "provider_id", None), + "target": getattr(j, "target", None), + "creationTime": getattr(j, "creation_time", datetime.now(UTC)), + "jobType": getattr(j, "job_type", None), + # Status is read-only but present in service responses; include if available + "status": getattr(j, "status", None), + } + items.append(JobDetails(job_mapping)) + for s in self._sessions_store: + session_mapping = { + "id": s.id, + "itemType": "Session", + "name": getattr(s, "name", s.id), + "providerId": getattr(s, "provider_id", None), + "target": getattr(s, "target", None), + "creationTime": getattr(s, "creation_time", datetime.now(UTC)), + # Required in model; set a sensible default for mock responses + "jobFailurePolicy": getattr(s, "job_failure_policy", "Abort"), + "status": getattr(s, "status", None), + } + items.append(SessionDetails(session_mapping)) + # Apply filter across heterogeneous items + items = _apply_filter(items, filter) + if orderby: + try: + prop, direction = orderby.split() + if prop == "CreationTime": + items.sort( + key=lambda i: getattr(i, "creation_time", datetime.now(UTC)), + reverse=(direction == "desc"), + ) + except Exception: + pass + return _paged(items[skip : skip + top], page_size=top) + + +class MockServicesClient(ServicesClient): + def __init__(self, authentication_policy: Optional[object] = None) -> None: + # in-memory stores + self._jobs_store: List[JobDetails] = [] + self._sessions_store: List[SessionDetails] = [] + # operations + self.jobs = JobsOperations(self._jobs_store) + self.sessions = SessionsOperations(self._sessions_store, self._jobs_store) + self.top_level_items = TopLevelItemsOperations( + self._jobs_store, self._sessions_store + ) + # Mimic ServicesClient config shape for tests that inspect policy + self._config = SimpleNamespace(authentication_policy=authentication_policy) + + +class WorkspaceMock(Workspace): + def _create_client(self) -> ServicesClient: # type: ignore[override] + # Pass through the Workspace's auth policy to the mock client + auth_policy = self._connection_params.get_auth_policy() + return MockServicesClient(authentication_policy=auth_policy) + + +def seed_jobs(ws: WorkspaceMock) -> None: + base = datetime.now(UTC) - timedelta(days=10) + samples = [ + JobDetails( + id="j-ionq-1", + name="ionqJobA", + provider_id="ionq", + target="ionq.simulator", + status="Succeeded", + creation_time=base + timedelta(days=1), + session_id="s-ionq-1", + job_type="QuantumComputing", + ), + JobDetails( + id="j-ionq-2", + name="ionqJobB", + provider_id="ionq", + target="ionq.simulator", + status="Failed", + creation_time=base + timedelta(days=2), + session_id="s-ionq-1", + ), + JobDetails( + id="j-qh-1", + name="qhJobA", + provider_id="quantinuum", + target="quantinuum.sim", + status="Cancelled", + creation_time=base + timedelta(days=3), + session_id="s-ionq-2", + job_type="QuantumChemistry", + ), + JobDetails( + id="j-ms-1", + name="msJobA", + provider_id="microsoft", + target="microsoft.estimator", + status="Succeeded", + creation_time=base + timedelta(days=4), + ), + JobDetails( + id="j-ionq-ms-qc", + name="ionqMsQC", + provider_id="ionq", + target="microsoft.estimator", + status="Succeeded", + creation_time=base + timedelta(days=5), + job_type="QuantumComputing", + ), + JobDetails( + id="j-rig-1", + name="rigJobA", + provider_id="rigetti", + target="rigetti.qpu", + status="Succeeded", + ), + ] + for d in samples: + ws._client.jobs.create_or_replace( + ws.subscription_id, ws.resource_group, ws.name, job_id=d.id, job_details=d + ) + + +def seed_sessions(ws: WorkspaceMock) -> None: + base = datetime.now(UTC) - timedelta(days=5) + samples = [ + SessionDetails( + id="s-ionq-1", + name="sessionA", + provider_id="ionq", + target="ionq.simulator", + status="Succeeded", + creation_time=base + timedelta(days=1), + ), + SessionDetails( + id="s-ionq-2", + name="sessionB", + provider_id="ionq", + target="ionq.test", + status="Succeeded", + creation_time=base + timedelta(days=2), + ), + ] + for s in samples: + ws._client.sessions.create_or_replace( + ws.subscription_id, + ws.resource_group, + ws.name, + session_id=s.id, + session_details=s, + ) + + +def create_default_workspace() -> WorkspaceMock: + ws = WorkspaceMock( + subscription_id="sub", resource_group="rg", name="ws", location="westus" + ) + seed_jobs(ws) + seed_sessions(ws) + return ws diff --git a/azure-quantum/tests/unit/local/test_job_results.py b/azure-quantum/tests/unit/local/test_job_results.py new file mode 100644 index 00000000..7747a333 --- /dev/null +++ b/azure-quantum/tests/unit/local/test_job_results.py @@ -0,0 +1,307 @@ +## +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +## + +from unittest.mock import Mock + +from azure.quantum import Job, JobDetails + + +def _mock_job(output_data_format: str, results_as_json_str: str) -> Job: + job_details = JobDetails( + id="", + name="", + provider_id="", + target="", + container_uri="", + input_data_format="", + output_data_format=output_data_format, + ) + job_details.status = "Succeeded" + job = Job(workspace=None, job_details=job_details) + + job.has_completed = Mock(return_value=True) + job.wait_until_completed = Mock() + + class DowloadDataMock(object): + def decode(): + str + + pass + + download_data = DowloadDataMock() + download_data.decode = Mock(return_value=results_as_json_str) + job.download_data = Mock(return_value=download_data) + + return job + + +def _get_job_results(output_data_format: str, results_as_json_str: str): + job = _mock_job(output_data_format, results_as_json_str) + return job.get_results() + + +def _get_job_results_histogram(output_data_format: str, results_as_json_str: str): + job = _mock_job(output_data_format, results_as_json_str) + return job.get_results_histogram() + + +def _get_job_results_shots(output_data_format: str, results_as_json_str: str): + job = _mock_job(output_data_format, results_as_json_str) + return job.get_results_shots() + + +def test_job_success(): + job_results = _get_job_results( + "test_output_data_format", + '{"Histogram": ["[0]", 0.50, "[1]", 0.50]}', + ) + assert len(job_results["Histogram"]) == 4 + + +def test_job_for_microsoft_quantum_results_v1_success(): + job_results = _get_job_results( + "microsoft.quantum-results.v1", + '{"Histogram": ["[0]", 0.50, "[1]", 0.50]}', + ) + assert len(job_results.keys()) == 2 + assert job_results["[0]"] == 0.50 + assert job_results["[1]"] == 0.50 + + +def test_job_for_microsoft_quantum_results_v1_no_histogram_returns_raw_result(): + job_result_raw = '{"NotHistogramProperty": ["[0]", 0.50, "[1]", 0.50]}' + job_result = _get_job_results("microsoft.quantum-results.v1", job_result_raw) + assert job_result == job_result_raw + + +def test_job_for_microsoft_quantum_results_v1_invalid_histogram_returns_raw_result(): + job_result_raw = '{"NotHistogramProperty": ["[0]", 0.50, "[1]"]}' + job_result = _get_job_results("microsoft.quantum-results.v1", job_result_raw) + assert job_result == job_result_raw + + +def test_job_for_microsoft_quantum_results_v2_success(): + job_results = _get_job_results( + "microsoft.quantum-results.v2", + '{"DataFormat": "microsoft.quantum-results.v2", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}', + ) + assert len(job_results.keys()) == 2 + assert job_results["[0]"] == 0.50 + assert job_results["[1]"] == 0.50 + + +def test_job_for_microsoft_quantum_results_v2_wrong_type_returns_raw(): + job_result_raw = '{"DataFormat": "microsoft.quantum-results.v1", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}' + job_result = _get_job_results("microsoft.quantum-results.v2", job_result_raw) + assert job_result == job_result_raw + + +def test_job_for_microsoft_quantum_results_v2_invalid_histogram_returns_raw_result(): + job_result_raw = '{"DataFormat": "microsoft.quantum-results.v2", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]"}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}' + job_result = _get_job_results("microsoft.quantum-results.v2", job_result_raw) + assert job_result == job_result_raw + + +def test_job_for_microsoft_quantum_results_histogram_v2_success(): + job_results = _get_job_results_histogram( + "microsoft.quantum-results.v2", + '{"DataFormat": "microsoft.quantum-results.v2", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}', + ) + assert len(job_results.keys()) == 2 + assert job_results["[0]"]["count"] == 2 + assert job_results["[1]"]["count"] == 2 + assert job_results["[0]"]["outcome"] == [0] + assert job_results["[1]"]["outcome"] == [1] + + +def test_job_for_microsoft_quantum_results_histogram_batch_v2_success(): + job_results = _get_job_results_histogram( + "microsoft.quantum-results.v2", + '{"DataFormat": "microsoft.quantum-results.v2", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}, {"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}, {"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}', + ) + assert len(job_results) == 3 + for result in job_results: + assert len(result.keys()) == 2 + assert result["[0]"]["count"] == 2 + assert result["[1]"]["count"] == 2 + assert result["[0]"]["outcome"] == [0] + assert result["[1]"]["outcome"] == [1] + + +def test_job_for_microsoft_quantum_results_histogram_v2_wrong_type_raises_exception(): + try: + _get_job_results_histogram( + "microsoft.quantum-results.v2", + '{"Histogram": ["[0]", 0.50, "[1]", 0.50]}', + ) + assert False + except Exception: + assert True + + +def test_job_for_microsoft_quantum_results_shots_v2_success(): + job_results = _get_job_results_shots( + "microsoft.quantum-results.v2", + '{"DataFormat": "microsoft.quantum-results.v2", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}', + ) + assert len(job_results) == 4 + assert job_results[0] == [0] + assert job_results[1] == [1] + assert job_results[2] == [1] + assert job_results[3] == [0] + + +def test_job_for_microsoft_quantum_results_shots_batch_v2_success(): + job_results = _get_job_results_shots( + "microsoft.quantum-results.v2", + '{"DataFormat": "microsoft.quantum-results.v2", "Results": [{"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}, {"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}, {"Histogram": [{"Outcome": [0], "Display": "[0]", "Count": 2}, {"Outcome": [1], "Display": "[1]", "Count": 2}], "Shots": [[0], [1], [1], [0]]}]}', + ) + assert len(job_results) == 3 + for i in range(3): + assert len(job_results[i]) == 4 + assert job_results[i][0] == [0] + assert job_results[i][1] == [1] + assert job_results[i][2] == [1] + assert job_results[i][3] == [0] + + +def test_job_for_microsoft_quantum_results_histogram_v2_tuple_success(): + output = """{ + \"DataFormat\": \"microsoft.quantum-results.v2\", + \"Results\": [ + { + \"Histogram\": [ + { + \"Outcome\": { + \"Item1\": [1, 0], + \"Item2\": { + \"Item1\": -2.71, + \"Item2\": 67 + }, + \"Item3\": [ + { + \"Item1\": 6, + \"Item2\": true + }, + { + \"Item1\": 12, + \"Item2\": false + } + ] + }, + \"Display\": \"([1, 0], (-2.71, 67), [(6, true), (12, false)])\", + \"Count\": 1 + }, + { + \"Outcome\": [1, 0], + \"Display\": \"[1, 0]\", + \"Count\": 1 + }, + { + \"Outcome\": [1], + \"Display\": \"[1]\", + \"Count\": 1 + } + ], + \"Shots\": [ + { + \"Item1\": [1, 0], + \"Item2\": { + \"Item1\": -2.71, + \"Item2\": 67 + }, + \"Item3\": [ + { + \"Item1\": 6, + \"Item2\": true + }, + { + \"Item1\": 12, + \"Item2\": false + } + ] + }, + [1, 0], + [1] + ] + } + ] +}""" + job_results = _get_job_results_histogram("microsoft.quantum-results.v2", output) + assert len(job_results.keys()) == 3 + assert job_results["[1, 0]"]["count"] == 1 + assert job_results["[1]"]["count"] == 1 + assert job_results["([1, 0], (-2.71, 67), [(6, true), (12, false)])"]["count"] == 1 + assert job_results["([1, 0], (-2.71, 67), [(6, true), (12, false)])"][ + "outcome" + ] == ([1, 0], (-2.71, 67), [(6, True), (12, False)]) + assert job_results["[1]"]["outcome"] == [1] + assert job_results["[1, 0]"]["outcome"] == [1, 0] + + +def test_job_for_microsoft_quantum_results_shots_v2_tuple_success(): + output = """{ + \"DataFormat\": \"microsoft.quantum-results.v2\", + \"Results\": [ + { + \"Histogram\": [ + { + \"Outcome\": { + \"Item1\": [ + 1, + 0 + ], + \"Item2\": { + \"Item1\": -2.71, + \"Item2\": 67 + } + }, + \"Display\": \"([1, 0], (-2.71, 67))\", + \"Count\": 1 + }, + { + \"Outcome\": [1, 0], + \"Display\": \"[1, 0]\", + \"Count\": 1 + }, + { + \"Outcome\": [1], + \"Display\": \"[1]\", + \"Count\": 1 + } + ], + \"Shots\": [ + { + \"Item1\": [ + 1, + 0 + ], + \"Item2\": { + \"Item1\": -2.71, + \"Item2\": 67 + } + }, + [1, 0], + [1] + ] + } + ] + }""" + job_results = _get_job_results_shots("microsoft.quantum-results.v2", output) + assert len(job_results) == 3 + assert job_results[0] == ([1, 0], (-2.71, 67)) + assert job_results[1] == [1, 0] + assert job_results[2] == [1] + + +def test_job_for_microsoft_quantum_results_shots_v2_wrong_type_raises_exception(): + try: + _get_job_results_shots( + "microsoft.quantum-results.v2", + '{"Histogram": ["[0]", 0.50, "[1]", 0.50]}', + ) + assert False + except Exception: + assert True diff --git a/azure-quantum/tests/unit/local/test_pagination.py b/azure-quantum/tests/unit/local/test_pagination.py new file mode 100644 index 00000000..507a0e7c --- /dev/null +++ b/azure-quantum/tests/unit/local/test_pagination.py @@ -0,0 +1,339 @@ +## +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +## + +from datetime import datetime, UTC, timedelta + +from mock_client import create_default_workspace + + +def test_list_jobs_basic(): + ws = create_default_workspace() + jobs = list(ws.list_jobs()) + assert all(j.item_type == "Job" for j in jobs) + assert len(jobs) >= 4 + + +def test_list_jobs_filters(): + ws = create_default_workspace() + # name prefix + jobs = list(ws.list_jobs(name_match="ionq")) + assert jobs and all(j.details.name.startswith("ionq") for j in jobs) + # provider + jobs = list(ws.list_jobs(provider=["ionq"])) + assert jobs and all(j.details.provider_id == "ionq" for j in jobs) + # target + jobs = list(ws.list_jobs(target=["microsoft.estimator", "microsoft.dft"])) + assert all( + j.details.target in {"microsoft.estimator", "microsoft.dft"} for j in jobs + ) + # status + jobs = list(ws.list_jobs(status=["Failed", "Cancelled"])) + assert all(j.details.status in {"Failed", "Cancelled"} for j in jobs) + + +def test_list_jobs_created_window_and_ordering(): + ws = create_default_workspace() + after = datetime.now(UTC) - timedelta(days=9) + before = datetime.now(UTC) + timedelta(days=1) + # asc + asc = list( + ws.list_jobs( + created_after=after, + created_before=before, + orderby_property="CreationTime", + is_asc=True, + ) + ) + assert all( + j.details.creation_time.date() >= after.date() + and j.details.creation_time.date() <= before.date() + for j in asc + ) + for a, b in zip(asc, asc[1:]): + assert a.details.creation_time <= b.details.creation_time + # desc + desc = list( + ws.list_jobs( + created_after=after, + created_before=before, + orderby_property="CreationTime", + is_asc=False, + ) + ) + for a, b in zip(desc, desc[1:]): + assert a.details.creation_time >= b.details.creation_time + # missing creation_time default handling ensures item is included and sortable + all_jobs = list(ws.list_jobs(orderby_property="CreationTime", is_asc=True)) + assert any(j.details.id == "j-rig-1" for j in all_jobs) + + +def test_list_jobs_paging_basic(): + ws = create_default_workspace() + jobs = ws.list_jobs(orderby_property="CreationTime", is_asc=True) + # Ensure iterable and ordered + jobs_list = list(jobs) + assert len(jobs_list) >= 1 + for a, b in zip(jobs_list, jobs_list[1:]): + assert a.details.creation_time <= b.details.creation_time + + +def test_list_sessions_basic_and_filters(): + ws = create_default_workspace() + sessions = list(ws.list_sessions()) + assert all(s.item_type == "Session" for s in sessions) + # provider filter + f = list(ws.list_sessions(provider=["ionq"])) + assert f and all(s._details.provider_id == "ionq" for s in f) + # target filter + t = list(ws.list_sessions(target=["ionq.test", "ionq.simulator"])) + assert t and all(s._details.target in {"ionq.test", "ionq.simulator"} for s in t) + # status filter + st = list(ws.list_sessions(status=["Succeeded"])) + assert st and all(s._details.status == "Succeeded" for s in st) + # multi-value ORs + prov_or = ws.list_sessions(provider=["ionq", "quantinuum"]) + assert prov_or and all( + s._details.provider_id in {"ionq", "quantinuum"} for s in prov_or + ) + st_or = ws.list_sessions(status=["Succeeded", "WAITING"]) + assert st_or and all(s._details.status in {"Succeeded", "WAITING"} for s in st_or) + + +def test_list_sessions_created_ordering(): + ws = create_default_workspace() + before = datetime.now(UTC) + timedelta(days=1) + asc = list( + ws.list_sessions( + created_before=before, orderby_property="CreationTime", is_asc=True + ) + ) + for a, b in zip(asc, asc[1:]): + assert a.details.creation_time <= b.details.creation_time + desc = list( + ws.list_sessions( + created_before=before, orderby_property="CreationTime", is_asc=False + ) + ) + for a, b in zip(desc, desc[1:]): + assert a.details.creation_time >= b.details.creation_time + + +def test_list_session_jobs_filters_and_order(): + ws = create_default_workspace() + sessions = list(ws.list_sessions()) + assert sessions + sid = sessions[0].id + jobs = list(ws.list_session_jobs(session_id=sid)) + assert jobs and all( + j.item_type == "Job" and j._details.session_id == sid for j in jobs + ) + jn = list(ws.list_session_jobs(session_id=sid, name_match="ionqJob")) + assert all(j.details.name.startswith("ionqJob") for j in jn) + js = list(ws.list_session_jobs(session_id=sid, status=["Succeeded"])) + assert all(j.details.status == "Succeeded" for j in js) + asc = list( + ws.list_session_jobs( + session_id=sid, orderby_property="CreationTime", is_asc=True + ) + ) + for a, b in zip(asc, asc[1:]): + assert a.details.creation_time <= b.details.creation_time + desc = list( + ws.list_session_jobs( + session_id=sid, orderby_property="CreationTime", is_asc=False + ) + ) + for a, b in zip(desc, desc[1:]): + assert a.details.creation_time >= b.details.creation_time + + +def test_list_top_level_items_basic_and_filters(): + ws = create_default_workspace() + items = list(ws.list_top_level_items()) + assert all(i.workspace.subscription_id == ws.subscription_id for i in items) + # name filters + i1 = list(ws.list_top_level_items(name_match="ionq")) + assert all(it.details.name.startswith("ionq") for it in i1) + # exact-case only; mixed-case not supported per API + # provider + # combined provider AND status AND window + before = datetime.now(UTC) + timedelta(days=1) + combo = list( + ws.list_sessions(provider=["ionq"], status=["Succeeded"], created_before=before) + ) + assert combo and all( + s._details.provider_id == "ionq" and s._details.status == "Succeeded" + for s in combo + ) + prov = list(ws.list_top_level_items(provider=["ionq"])) + assert prov and all(it.details.provider_id == "ionq" for it in prov) + # target + tgt = list(ws.list_top_level_items(target=["microsoft.estimator", "microsoft.dft"])) + assert all( + it.details.target in {"microsoft.estimator", "microsoft.dft"} for it in tgt + ) + # status + st = list(ws.list_top_level_items(status=["Failed", "Cancelled"])) + assert all(it.details.status in {"Failed", "Cancelled"} for it in st) + # combined filters: provider AND target; with seeded AND-match expect results + combo = list( + ws.list_top_level_items(provider=["ionq"], target=["microsoft.estimator"]) + ) + assert combo and all( + it.details.provider_id == "ionq" and it.details.target == "microsoft.estimator" + for it in combo + ) + + # case sensitivity: lower-case item_type should return empty + combo_case = list(ws.list_top_level_items(item_type=["job"])) + assert len(combo_case) == 0 + + # multi-value OR grouping for item_type should return both types + both_types = list(ws.list_top_level_items(item_type=["Job", "Session"])) + assert ( + both_types + and any(it.item_type == "Job" for it in both_types) + and any(it.item_type == "Session" for it in both_types) + ) + + # multi-value OR grouping for job_type should include both QuantumComputing and QuantumChemistry + jt_multi = list( + ws.list_top_level_items(job_type=["QuantumComputing", "QuantumChemistry"]) + ) + assert ( + jt_multi + and any( + getattr(it.details, "job_type", None) == "QuantumComputing" + for it in jt_multi + ) + and any( + getattr(it.details, "job_type", None) == "QuantumChemistry" + for it in jt_multi + ) + ) + + # date boundary tests: created_after/on boundary includes items; created_before/on boundary includes items + # choose a boundary based on a known seeded item creation_time + boundary_date = next( + it.details.creation_time.date() for it in items if it.details.name == "msJobA" + ) + after_inclusive = list( + ws.list_top_level_items( + created_after=datetime.combine( + boundary_date, datetime.min.time(), tzinfo=UTC + ) + ) + ) + assert any( + it.details.creation_time.date() >= boundary_date for it in after_inclusive + ) + before_inclusive = list( + ws.list_top_level_items( + created_before=datetime.combine( + boundary_date, datetime.min.time(), tzinfo=UTC + ) + ) + ) + assert any( + it.details.creation_time.date() <= boundary_date for it in before_inclusive + ) + # job_type + provider + target (AND semantics); with seeded combo expect non-empty + jt_combo = list( + ws.list_top_level_items( + job_type=["QuantumComputing"], + provider=["ionq"], + target=["microsoft.estimator"], + ) + ) + assert jt_combo and all( + getattr(it.details, "job_type", None) == "QuantumComputing" + and it.details.provider_id == "ionq" + and it.details.target == "microsoft.estimator" + for it in jt_combo + ) + # negative test: no match + none_items = list(ws.list_top_level_items(provider=["no-provider"])) + assert len(none_items) == 0 + + +def test_list_top_level_items_created_ordering(): + ws = create_default_workspace() + after = datetime.now(UTC) - timedelta(days=15) + asc = list( + ws.list_top_level_items( + created_after=after, orderby_property="CreationTime", is_asc=True + ) + ) + for a, b in zip(asc, asc[1:]): + assert a.details.creation_time <= b.details.creation_time + desc = list( + ws.list_top_level_items( + created_after=after, orderby_property="CreationTime", is_asc=False + ) + ) + for a, b in zip(desc, desc[1:]): + assert a.details.creation_time >= b.details.creation_time + # Ascending with created_after boundary + start = datetime.now(UTC) - timedelta(days=365) + items_after = list( + ws.list_top_level_items( + created_after=start, orderby_property="CreationTime", is_asc=True + ) + ) + assert items_after + prev = None + for it in items_after: + assert it.details.creation_time.date() >= start.date() + if prev is None: + prev = it.details.creation_time + else: + assert it.details.creation_time >= prev + prev = it.details.creation_time + + +def test_filter_string_emission(): + ws = create_default_workspace() + # pylint: disable=protected-access + filter_string = ws._create_filter( + job_name="name", + item_type=["Session", "Job"], + job_type=["Regular", "Chemistry"], + provider_ids=["ionq", "quantinuum"], + target=["ionq.sim", "quantinuum,sim"], + status=["Completed", "Failed"], + created_after=datetime(2024, 10, 1), + created_before=datetime(2024, 11, 1), + ) + # pylint: enable=protected-access + expected = ( + "startswith(Name, 'name') and (ItemType eq 'Session' or ItemType eq 'Job') and " + "(JobType eq 'Regular' or JobType eq 'Chemistry') and (ProviderId eq 'ionq' or ProviderId eq 'quantinuum') and " + "(Target eq 'ionq.sim' or Target eq 'quantinuum,sim') and (State eq 'Completed' or State eq 'Failed') and " + "CreationTime ge 2024-10-01 and CreationTime le 2024-11-01" + ) + assert filter_string == expected + + +def test_orderby_emission_and_validation(): + ws = create_default_workspace() + props = [ + "Name", + "ItemType", + "JobType", + "ProviderId", + "Target", + "State", + "CreationTime", + ] + # pylint: disable=protected-access + for p in props: + assert ws._create_orderby(p, True) == f"{p} asc" + assert ws._create_orderby(p, False) == f"{p} desc" + try: + ws._create_orderby("test", True) + assert False, "Expected ValueError for invalid property" + except ValueError: + pass + # pylint: enable=protected-access diff --git a/azure-quantum/tests/unit/local/test_session.py b/azure-quantum/tests/unit/local/test_session.py new file mode 100644 index 00000000..55b6a0da --- /dev/null +++ b/azure-quantum/tests/unit/local/test_session.py @@ -0,0 +1,41 @@ +## +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +## + +from mock_client import create_default_workspace + + +def test_list_top_level_items_includes_jobs_and_sessions(): + ws = create_default_workspace() + items = list(ws.list_top_level_items()) + assert items + item_types = {type(it).__name__ for it in items} + assert "Job" in item_types + assert "Session" in item_types + + +def test_list_sessions_basic(): + ws = create_default_workspace() + sessions = list(ws.list_sessions()) + assert sessions + assert all(type(s).__name__ == "Session" for s in sessions) + + +def test_get_session_returns_matching_details_and_jobs(): + ws = create_default_workspace() + # Choose a known session from the seeded data + sessions = list(ws.list_sessions()) + assert sessions + sid = sessions[0].id + + s = ws.get_session(session_id=sid) + assert s + assert s.id == sid + assert s.details.id == sid + + # Verify session-scoped jobs are returned and have matching session_id + jobs = list(s.list_jobs()) + assert jobs + assert all(j.item_type == "Job" for j in jobs) + assert all(getattr(j._details, "session_id", None) == sid for j in jobs) diff --git a/azure-quantum/tests/unit/local/test_workspace.py b/azure-quantum/tests/unit/local/test_workspace.py new file mode 100644 index 00000000..574e86bf --- /dev/null +++ b/azure-quantum/tests/unit/local/test_workspace.py @@ -0,0 +1,338 @@ +## +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +## + +import os +from unittest import mock +from azure.quantum._constants import EnvironmentVariables, ConnectionConstants +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline.policies import AzureKeyCredentialPolicy +from azure.identity import EnvironmentCredential + +from mock_client import WorkspaceMock +from common import ( + SUBSCRIPTION_ID, + RESOURCE_GROUP, + WORKSPACE, + LOCATION, + STORAGE, + API_KEY, +) + +SIMPLE_RESOURCE_ID = ConnectionConstants.VALID_RESOURCE_ID( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + workspace_name=WORKSPACE, +) + +SIMPLE_CONNECTION_STRING = ConnectionConstants.VALID_CONNECTION_STRING( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + workspace_name=WORKSPACE, + api_key=API_KEY, + quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(LOCATION), +) + + +def test_create_workspace_instance_valid(): + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + ) + assert ws.subscription_id == SUBSCRIPTION_ID + assert ws.resource_group == RESOURCE_GROUP + assert ws.name == WORKSPACE + assert ws.location == LOCATION + + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + storage=STORAGE, + ) + assert ws.storage == STORAGE + + ws = WorkspaceMock(resource_id=SIMPLE_RESOURCE_ID, location=LOCATION) + assert ws.subscription_id == SUBSCRIPTION_ID + assert ws.resource_group == RESOURCE_GROUP + assert ws.name == WORKSPACE + assert ws.location == LOCATION + + ws = WorkspaceMock( + resource_id=SIMPLE_RESOURCE_ID, storage=STORAGE, location=LOCATION + ) + assert ws.storage == STORAGE + + +def test_create_workspace_locations(): + # User-provided location name should be normalized + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location="East US", + ) + assert ws.location == "eastus" + + +def test_env_connection_string(): + with mock.patch.dict(os.environ): + # Clear env vars then set connection string + os.environ.clear() + os.environ[EnvironmentVariables.CONNECTION_STRING] = SIMPLE_CONNECTION_STRING + + workspace = WorkspaceMock() + assert workspace.location == LOCATION + assert workspace.subscription_id == SUBSCRIPTION_ID + assert workspace.name == WORKSPACE + assert workspace.resource_group == RESOURCE_GROUP + assert isinstance(workspace.credential, AzureKeyCredential) + assert workspace.credential.key == API_KEY + # pylint: disable=protected-access + assert isinstance( + workspace._client._config.authentication_policy, AzureKeyCredentialPolicy + ) + auth_policy = workspace._client._config.authentication_policy + assert auth_policy._name == ConnectionConstants.QUANTUM_API_KEY_HEADER + assert id(auth_policy._credential) == id(workspace.credential) + + +def test_workspace_from_connection_string(): + with mock.patch.dict(os.environ): + os.environ.clear() + workspace = WorkspaceMock.from_connection_string(SIMPLE_CONNECTION_STRING) + assert workspace.location == LOCATION + assert isinstance(workspace.credential, AzureKeyCredential) + assert workspace.credential.key == API_KEY + # pylint: disable=protected-access + assert isinstance( + workspace._client._config.authentication_policy, AzureKeyCredentialPolicy + ) + auth_policy = workspace._client._config.authentication_policy + assert auth_policy._name == ConnectionConstants.QUANTUM_API_KEY_HEADER + assert id(auth_policy._credential) == id(workspace.credential) + + # Ensure env var overrides behave as original tests expect + with mock.patch.dict(os.environ): + os.environ.clear() + + wrong_subscription_id = "00000000-2BAD-2BAD-2BAD-000000000000" + wrong_resource_group = "wrongrg" + wrong_workspace = "wrong-workspace" + wrong_location = "wrong-location" + + wrong_connection_string = ConnectionConstants.VALID_CONNECTION_STRING( + subscription_id=wrong_subscription_id, + resource_group=wrong_resource_group, + workspace_name=wrong_workspace, + api_key=API_KEY, + quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT( + wrong_location + ), + ) + + os.environ[EnvironmentVariables.CONNECTION_STRING] = wrong_connection_string + os.environ[EnvironmentVariables.LOCATION] = LOCATION + os.environ[EnvironmentVariables.SUBSCRIPTION_ID] = SUBSCRIPTION_ID + os.environ[EnvironmentVariables.RESOURCE_GROUP] = RESOURCE_GROUP + os.environ[EnvironmentVariables.WORKSPACE_NAME] = WORKSPACE + + workspace = WorkspaceMock() + assert workspace.location == LOCATION + assert workspace.subscription_id == SUBSCRIPTION_ID + assert workspace.resource_group == RESOURCE_GROUP + assert workspace.name == WORKSPACE + assert isinstance(workspace.credential, AzureKeyCredential) + + # If a credential is passed, it should be used + workspace = WorkspaceMock(credential=EnvironmentCredential()) + assert isinstance(workspace.credential, EnvironmentCredential) + + # Parameter connection string should override env var + os.environ.clear() + os.environ[EnvironmentVariables.CONNECTION_STRING] = wrong_connection_string + connection_string = ConnectionConstants.VALID_CONNECTION_STRING( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + workspace_name=WORKSPACE, + api_key=API_KEY, + quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT( + LOCATION + ), + ) + workspace = WorkspaceMock.from_connection_string(connection_string) + assert workspace.location == LOCATION + assert workspace.subscription_id == SUBSCRIPTION_ID + assert workspace.resource_group == RESOURCE_GROUP + assert workspace.name == WORKSPACE + + # Bad env var connection string should not be parsed if not needed + os.environ.clear() + os.environ[EnvironmentVariables.CONNECTION_STRING] = "bad-connection-string" + connection_string = ConnectionConstants.VALID_CONNECTION_STRING( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + workspace_name=WORKSPACE, + api_key=API_KEY, + quantum_endpoint=ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT( + LOCATION + ), + ) + workspace = WorkspaceMock.from_connection_string(connection_string) + assert workspace.location == LOCATION + assert workspace.subscription_id == SUBSCRIPTION_ID + assert workspace.resource_group == RESOURCE_GROUP + assert workspace.name == WORKSPACE + + +def test_create_workspace_instance_invalid(): + def assert_value_error(exception: Exception): + assert "Azure Quantum workspace not fully specified." in exception.args[0] + + with mock.patch.dict(os.environ): + os.environ.clear() + + # missing location + try: + WorkspaceMock( + location=None, # type: ignore[arg-type] + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + ) + assert False, "Expected ValueError" + except ValueError as e: + assert_value_error(e) + + # missing location with resource id + try: + WorkspaceMock(resource_id=SIMPLE_RESOURCE_ID) + assert False, "Expected ValueError" + except ValueError as e: + assert_value_error(e) + + # missing subscription id + try: + WorkspaceMock( + location=LOCATION, + subscription_id=None, # type: ignore[arg-type] + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + ) + assert False, "Expected ValueError" + except ValueError as e: + assert_value_error(e) + + # missing resource group + try: + WorkspaceMock( + location=LOCATION, + subscription_id=SUBSCRIPTION_ID, + resource_group=None, # type: ignore[arg-type] + name=WORKSPACE, + ) + assert False, "Expected ValueError" + except ValueError as e: + assert_value_error(e) + + # missing workspace name + try: + WorkspaceMock( + location=LOCATION, + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=None, # type: ignore[arg-type] + ) + assert False, "Expected ValueError" + except ValueError as e: + assert_value_error(e) + + # missing everything + try: + WorkspaceMock() + assert False, "Expected ValueError" + except ValueError as e: + assert_value_error(e) + + # invalid resource id + try: + WorkspaceMock(location=LOCATION, resource_id="invalid/resource/id") + assert False, "Expected ValueError" + except ValueError as e: + assert "Invalid resource id" in e.args[0] + + +def test_workspace_user_agent_appid(): + app_id = "MyEnvVarAppId" + user_agent = "MyUserAgent" + with mock.patch.dict(os.environ): + os.environ.clear() + + # no UserAgent parameter and no EnvVar AppId + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + ) + assert ws.user_agent is None + + # with UserAgent parameter and no EnvVar AppId + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + user_agent=user_agent, + ) + assert ws.user_agent == user_agent + + # append with no UserAgent parameter and no EnvVar AppId + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + ) + ws.append_user_agent("featurex") + assert ws.user_agent == "featurex" + + # set EnvVar AppId for remaining cases + os.environ[EnvironmentVariables.USER_AGENT_APPID] = app_id + + # no UserAgent parameter and with EnvVar AppId + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + ) + assert ws.user_agent == app_id + + # with UserAgent parameter and EnvVar AppId + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + user_agent=user_agent, + ) + assert ws.user_agent == f"{app_id} {user_agent}" + + # append with UserAgent parameter and with EnvVar AppId + ws = WorkspaceMock( + subscription_id=SUBSCRIPTION_ID, + resource_group=RESOURCE_GROUP, + name=WORKSPACE, + location=LOCATION, + user_agent=user_agent, + ) + ws.append_user_agent("featurex") + assert ws.user_agent == f"{app_id} {user_agent}-featurex" + + ws.append_user_agent(None) + assert ws.user_agent == app_id diff --git a/azure-quantum/tests/unit/test_job_results.py b/azure-quantum/tests/unit/test_job_results.py index 11ea4aab..ce9a22b6 100644 --- a/azure-quantum/tests/unit/test_job_results.py +++ b/azure-quantum/tests/unit/test_job_results.py @@ -5,11 +5,8 @@ import re import unittest -from unittest.mock import Mock import pytest from common import QuantumTestBase, RegexScrubbingPatterns -from azure.quantum import Job, JobDetails -from azure.quantum.target import Target class TestJobResults(QuantumTestBase): @@ -18,15 +15,11 @@ class TestJobResults(QuantumTestBase): Tests the azure.quantum.job module. """ - def test_job_success(self): - job_results = self._get_job_results("test_output_data_format","{\"Histogram\": [\"[0]\", 0.50, \"[1]\", 0.50]}") - self.assertTrue(len(job_results["Histogram"]) == 4) - @pytest.mark.live_test @pytest.mark.xdist_group(name="echo-output") def test_job_get_results_with_expired_sas_token(self): """ - Get existing result blob url and replace its sas token with expired one, + Get existing result blob url and replace its sas token with expired one, so we can test its ability to refresh it. """ target = self.create_echo_target() @@ -38,268 +31,12 @@ def test_job_get_results_with_expired_sas_token(self): job.details.output_data_uri = re.sub( pattern=RegexScrubbingPatterns.URL_QUERY_SAS_KEY_EXPIRATION, repl="se=2024-01-01T00%3A00%3A00Z&", - string=job.details.output_data_uri) + string=job.details.output_data_uri, + ) job_results = job.get_results() self.assertEqual(job_results, input_data) - def test_job_for_microsoft_quantum_results_v1_success(self): - job_results = self._get_job_results("microsoft.quantum-results.v1","{\"Histogram\": [\"[0]\", 0.50, \"[1]\", 0.50]}") - self.assertTrue(len(job_results.keys()) == 2) - self.assertEqual(job_results["[0]"], 0.50) - self.assertEqual(job_results["[1]"], 0.50) - - - def test_job_for_microsoft_quantum_results_v1_no_histogram_returns_raw_result(self): - job_result_raw = "{\"NotHistogramProperty\": [\"[0]\", 0.50, \"[1]\", 0.50]}" - job_result = self._get_job_results("microsoft.quantum-results.v1", job_result_raw) - self.assertEqual(job_result, job_result_raw) - - - def test_job_for_microsoft_quantum_results_v1_invalid_histogram_returns_raw_result(self): - job_result_raw = "{\"NotHistogramProperty\": [\"[0]\", 0.50, \"[1]\"]}" - job_result = self._get_job_results("microsoft.quantum-results.v1", job_result_raw) - self.assertEqual(job_result, job_result_raw) - - def test_job_for_microsoft_quantum_results_v2_success(self): - job_results = self._get_job_results("microsoft.quantum-results.v2","{\"DataFormat\": \"microsoft.quantum-results.v2\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}") - self.assertTrue(len(job_results.keys()) == 2) - self.assertEqual(job_results["[0]"], 0.50) - self.assertEqual(job_results["[1]"], 0.50) - - def test_job_for_microsoft_quantum_results_v2_wrong_type_raises_exception(self): - job_result_raw = "{\"DataFormat\": \"microsoft.quantum-results.v1\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}" - job_result = self._get_job_results("microsoft.quantum-results.v2", job_result_raw) - self.assertEqual(job_result, job_result_raw) - - - def test_job_for_microsoft_quantum_results_v2_invalid_histogram_returns_raw_result(self): - job_result_raw = "{\"DataFormat\": \"microsoft.quantum-results.v2\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\"}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}" - job_result = self._get_job_results("microsoft.quantum-results.v2", job_result_raw) - self.assertEqual(job_result, job_result_raw) - - def test_job_for_microsoft_quantum_results_histogram_v2_success(self): - job_results = self._get_job_results_histogram("microsoft.quantum-results.v2","{\"DataFormat\": \"microsoft.quantum-results.v2\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}") - self.assertTrue(len(job_results.keys()) == 2) - self.assertEqual(job_results["[0]"]["count"], 2) - self.assertEqual(job_results["[1]"]["count"], 2) - self.assertEqual(job_results["[0]"]["outcome"], [0]) - self.assertEqual(job_results["[1]"]["outcome"], [1]) - - def test_job_for_microsoft_quantum_results_histogram_batch_v2_success(self): - job_results = self._get_job_results_histogram("microsoft.quantum-results.v2","{\"DataFormat\": \"microsoft.quantum-results.v2\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}, {\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}, {\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}") - self.assertTrue(len(job_results) == 3) - for result in job_results: - self.assertTrue(len(result.keys()) == 2) - self.assertEqual(result["[0]"]["count"], 2) - self.assertEqual(result["[1]"]["count"], 2) - self.assertEqual(result["[0]"]["outcome"], [0]) - self.assertEqual(result["[1]"]["outcome"], [1]) - - def test_job_for_microsoft_quantum_results_histogram_v2_wrong_type_raises_exception(self): - try: - job_results = self._get_job_results_histogram("microsoft.quantum-results.v2","{\"Histogram\": [\"[0]\", 0.50, \"[1]\", 0.50]}") - # Fail test because we didn't get the error - self.assertTrue(False) - except: - self.assertTrue(True) - - def test_job_for_microsoft_quantum_results_shots_v2_success(self): - job_results = self._get_job_results_shots("microsoft.quantum-results.v2","{\"DataFormat\": \"microsoft.quantum-results.v2\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}") - self.assertTrue(len(job_results) == 4) - self.assertEqual(job_results[0], [0]) - self.assertEqual(job_results[1], [1]) - self.assertEqual(job_results[2], [1]) - self.assertEqual(job_results[3], [0]) - - def test_job_for_microsoft_quantum_results_shots_batch_v2_success(self): - job_results = self._get_job_results_shots("microsoft.quantum-results.v2","{\"DataFormat\": \"microsoft.quantum-results.v2\", \"Results\": [{\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}, {\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}, {\"Histogram\": [{\"Outcome\": [0], \"Display\": \"[0]\", \"Count\": 2}, {\"Outcome\": [1], \"Display\": \"[1]\", \"Count\": 2}], \"Shots\": [[0], [1], [1], [0]]}]}") - self.assertTrue(len(job_results) == 3) - for i in range(3): - self.assertTrue(len(job_results[i]) == 4) - self.assertEqual(job_results[i][0], [0]) - self.assertEqual(job_results[i][1], [1]) - self.assertEqual(job_results[i][2], [1]) - self.assertEqual(job_results[i][3], [0]) - - def test_job_for_microsoft_quantum_results_histogram_v2_tuple_success(self): - output = '''{ - \"DataFormat\": \"microsoft.quantum-results.v2\", - \"Results\": [ - { - \"Histogram\": [ - { - \"Outcome\": { - \"Item1\": [1, 0], - \"Item2\": { - \"Item1\": -2.71, - \"Item2\": 67 - }, - \"Item3\": [ - { - \"Item1\": 6, - \"Item2\": true - }, - { - \"Item1\": 12, - \"Item2\": false - } - ] - }, - \"Display\": \"([1, 0], (-2.71, 67), [(6, true), (12, false)])\", - \"Count\": 1 - }, - { - \"Outcome\": [1, 0], - \"Display\": \"[1, 0]\", - \"Count\": 1 - }, - { - \"Outcome\": [1], - \"Display\": \"[1]\", - \"Count\": 1 - } - ], - \"Shots\": [ - { - \"Item1\": [1, 0], - \"Item2\": { - \"Item1\": -2.71, - \"Item2\": 67 - }, - \"Item3\": [ - { - \"Item1\": 6, - \"Item2\": true - }, - { - \"Item1\": 12, - \"Item2\": false - } - ] - }, - [1, 0], - [1] - ] - } - ] -}''' - job_results = self._get_job_results_histogram("microsoft.quantum-results.v2", output) - - self.assertTrue(len(job_results.keys()) == 3) - self.assertEqual(job_results["[1, 0]"]["count"], 1) - self.assertEqual(job_results["[1]"]["count"], 1) - self.assertEqual(job_results["([1, 0], (-2.71, 67), [(6, true), (12, false)])"]["count"], 1) - self.assertEqual(job_results["([1, 0], (-2.71, 67), [(6, true), (12, false)])"]["outcome"], ([1, 0], (-2.71, 67), [(6, True), (12, False)])) - self.assertEqual(job_results["[1]"]["outcome"], [1]) - self.assertEqual(job_results["[1, 0]"]["outcome"], [1, 0]) - - def test_job_for_microsoft_quantum_results_shots_v2_tuple_success(self): - output = '''{ - \"DataFormat\": \"microsoft.quantum-results.v2\", - \"Results\": [ - { - \"Histogram\": [ - { - \"Outcome\": { - \"Item1\": [ - 1, - 0 - ], - \"Item2\": { - \"Item1\": -2.71, - \"Item2\": 67 - } - }, - \"Display\": \"([1, 0], (-2.71, 67))\", - \"Count\": 1 - }, - { - \"Outcome\": [1, 0], - \"Display\": \"[1, 0]\", - \"Count\": 1 - }, - { - \"Outcome\": [1], - \"Display\": \"[1]\", - \"Count\": 1 - } - ], - \"Shots\": [ - { - \"Item1\": [ - 1, - 0 - ], - \"Item2\": { - \"Item1\": -2.71, - \"Item2\": 67 - } - }, - [1, 0], - [1] - ] - } - ] - }''' - job_results = self._get_job_results_shots("microsoft.quantum-results.v2", output) - - self.assertTrue(len(job_results) == 3) - self.assertEqual(job_results[0], ([1, 0], (-2.71, 67))) - self.assertEqual(job_results[1], [1, 0]) - self.assertEqual(job_results[2], [1]) - - def test_job_for_microsoft_quantum_results_shots_v2_wrong_type_raises_exception(self): - try: - job_results = self._get_job_results_shots("microsoft.quantum-results.v2","{\"Histogram\": [\"[0]\", 0.50, \"[1]\", 0.50]}") - # Fail test because we didn't get the error - self.assertTrue(False) - except: - self.assertTrue(True) - - def _get_job_results(self, output_data_format, results_as_json_str): - job = self._mock_job(output_data_format, results_as_json_str) - - return job.get_results() - - def _get_job_results_histogram(self, output_data_format, results_as_json_str): - job = self._mock_job(output_data_format, results_as_json_str) - - return job.get_results_histogram() - - def _get_job_results_shots(self, output_data_format, results_as_json_str): - job = self._mock_job(output_data_format, results_as_json_str) - - return job.get_results_shots() - - def _mock_job(self, output_data_format, results_as_json_str): - job_details = JobDetails( - id= "", - name= "", - provider_id="", - target="", - container_uri="", - input_data_format="", - output_data_format = output_data_format) - job_details.status = "Succeeded" - job = Job( - workspace=None, - job_details=job_details) - - job.has_completed = Mock(return_value=True) - job.wait_until_completed = Mock() - - class DowloadDataMock(object): - def decode(): str - pass - - download_data = DowloadDataMock() - download_data.decode = Mock(return_value=results_as_json_str) - job.download_data = Mock(return_value=download_data) - - return job - - if __name__ == "__main__": unittest.main()