From 09a4f466fdd611e2de6f68741728ea7d25dbd9e3 Mon Sep 17 00:00:00 2001 From: swathipil <76007337+swathipil@users.noreply.github.com> Date: Thu, 26 Sep 2024 10:37:15 -0700 Subject: [PATCH 01/22] Revert "[EventHub] Raise error when primary key is rotated (#36245)" This reverts commit 3b2dfa4e000fe160bada9ddcd16b53c20537956d. --- sdk/eventhub/azure-eventhub/CHANGELOG.md | 2 -- sdk/eventhub/azure-eventhub/azure/eventhub/_consumer.py | 5 ----- .../azure-eventhub/azure/eventhub/_transport/_base.py | 5 ----- .../azure/eventhub/_transport/_pyamqp_transport.py | 3 --- .../azure/eventhub/_transport/_uamqp_transport.py | 3 --- .../azure/eventhub/aio/_connection_manager_async.py | 2 +- .../azure/eventhub/aio/_transport/_base_async.py | 1 + .../azure/eventhub/aio/_transport/_pyamqp_transport_async.py | 3 --- .../azure/eventhub/aio/_transport/_uamqp_transport_async.py | 3 --- 9 files changed, 2 insertions(+), 25 deletions(-) diff --git a/sdk/eventhub/azure-eventhub/CHANGELOG.md b/sdk/eventhub/azure-eventhub/CHANGELOG.md index 8bd41497be88..7a2a6f6d990e 100644 --- a/sdk/eventhub/azure-eventhub/CHANGELOG.md +++ b/sdk/eventhub/azure-eventhub/CHANGELOG.md @@ -8,8 +8,6 @@ ### Bugs Fixed -- Fixed a bug where the consumer waited indefinitely when the primary key was rotated while receiving, rather than raising an authentication error. ([#33926](https://github.com/Azure/azure-sdk-for-python/issues/33926)) - ### Other Changes ## 5.12.1 (2024-06-11) diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/_consumer.py b/sdk/eventhub/azure-eventhub/azure/eventhub/_consumer.py index d59cd811e918..49a992caab56 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/_consumer.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/_consumer.py @@ -236,11 +236,6 @@ def receive(self, batch=False, max_batch_size=300, max_wait_time=None): # If optional dependency is not installed, do not retry. if isinstance(exception, ImportError): raise exception - - # If authentication exception, do not retry. - if isinstance(exception, self._amqp_transport.AUTHENTICATION_EXCEPTION): - raise self._handle_exception(exception, is_consumer=True) - self._amqp_transport.check_link_stolen(self, exception) # TODO: below block hangs when retry_total > 0 # need to remove/refactor, issue #27137 diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_base.py b/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_base.py index 0fe3ab70c67c..9a0dcdc9aa23 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_base.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_base.py @@ -30,7 +30,6 @@ ReceiveClient as uamqp_ReceiveClient, ) from uamqp.authentication import JWTTokenAuth as uamqp_JWTTokenAuth - from uamqp.errors import AuthenticationException as uamqp_AuthenticationException except ImportError: pass @@ -49,7 +48,6 @@ from .._pyamqp.constants import ( ConnectionState as pyamqp_ConnectionState ) - from .._pyamqp.error import AuthenticationException as pyamqp_AuthenticationException class AmqpTransport(ABC): # pylint: disable=too-many-public-methods """ @@ -78,9 +76,6 @@ class AmqpTransport(ABC): # pylint: disable=too-many-public-methods USER_AGENT_SYMBOL: Union[uamqp_Types_AMQPSymbol, str, bytes] PROP_PARTITION_KEY_AMQP_SYMBOL: Union[uamqp_Types_AMQPSymbol, str, bytes] - # exceptions - AUTHENTICATION_EXCEPTION: Union["uamqp_AuthenticationException", "pyamqp_AuthenticationException"] - @staticmethod @abstractmethod def build_message(**kwargs: Any) -> Union["uamqp_Message", "pyamqp_Message"]: diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_pyamqp_transport.py b/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_pyamqp_transport.py index 68db8c361a9f..457a711ec79a 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_pyamqp_transport.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_pyamqp_transport.py @@ -72,9 +72,6 @@ class PyamqpTransport(AmqpTransport): # pylint: disable=too-many-public-method ERROR_CONDITIONS = [condition.value for condition in errors.ErrorCondition] - # define exceptions - AUTHENTICATION_EXCEPTION = errors.AuthenticationException - @staticmethod def build_message(**kwargs): """ diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_uamqp_transport.py b/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_uamqp_transport.py index de543ccaed8e..81798a10c4fc 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_uamqp_transport.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/_transport/_uamqp_transport.py @@ -122,9 +122,6 @@ class UamqpTransport(AmqpTransport): # pylint: disable=too-many-public-method USER_AGENT_SYMBOL = types.AMQPSymbol("user-agent") PROP_PARTITION_KEY_AMQP_SYMBOL = types.AMQPSymbol(PROP_PARTITION_KEY) - # define exceptions - AUTHENTICATION_EXCEPTION = errors.AuthenticationException - @staticmethod def build_message(**kwargs): """ diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_connection_manager_async.py b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_connection_manager_async.py index a8b7dc9fb181..c4da39b918f3 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_connection_manager_async.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_connection_manager_async.py @@ -62,7 +62,7 @@ def __init__( **kwargs: Any ) -> None: self._loop = kwargs.get("loop") - self._lock = Lock(loop=self._loop) # pylint: disable=unexpected-keyword-arg + self._lock = Lock(loop=self._loop) self._conn: Optional[Union[uamqp_ConnectionAsync, ConnectionAsync]] = None self._container_id = container_id diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_base_async.py b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_base_async.py index cd1ce5e4f702..8d115f027b0a 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_base_async.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_base_async.py @@ -79,6 +79,7 @@ class AmqpTransportAsync(ABC): # pylint: disable=too-many-public-methods USER_AGENT_SYMBOL: Union[uamqp_Types_AMQPSymbol, Literal["user-agent"]] PROP_PARTITION_KEY_AMQP_SYMBOL: Union[uamqp_Types_AMQPSymbol, Literal[b'x-opt-partition-key']] + @staticmethod @abstractmethod def build_message(**kwargs: Any) -> Union["uamqp_Message", "pyamqp_Message"]: diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_pyamqp_transport_async.py b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_pyamqp_transport_async.py index 106636640bff..c7cf800abb34 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_pyamqp_transport_async.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_pyamqp_transport_async.py @@ -278,9 +278,6 @@ async def _receive_task(consumer, max_batch_size): # If optional dependency is not installed, do not retry. if isinstance(exception, ImportError): raise exception - # If authentication exception, do not retry. - if isinstance(exception, errors.AuthenticationException): - raise await consumer._handle_exception(exception) if ( isinstance(exception, errors.AMQPLinkError) and exception.condition == errors.ErrorCondition.LinkStolen # pylint: disable=no-member diff --git a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_uamqp_transport_async.py b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_uamqp_transport_async.py index f2f9609047c9..652c176c28d6 100644 --- a/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_uamqp_transport_async.py +++ b/sdk/eventhub/azure-eventhub/azure/eventhub/aio/_transport/_uamqp_transport_async.py @@ -283,9 +283,6 @@ async def receive_messages_async( except asyncio.CancelledError: # pylint: disable=try-except-raise raise except Exception as exception: # pylint: disable=broad-except - # If authentication exception, do not retry. - if isinstance(exception, errors.AuthenticationException): - raise await consumer._handle_exception(exception) if ( isinstance(exception, errors.LinkDetach) and exception.condition == constants.ErrorCodes.LinkStolen # pylint: disable=no-member From 2baa02aea791b0079e7f6d8fd529e967bd358031 Mon Sep 17 00:00:00 2001 From: Neehar Duvvuri <40341266+needuv@users.noreply.github.com> Date: Thu, 26 Sep 2024 14:33:53 -0400 Subject: [PATCH 02/22] Nest column_mapping in evaluator_config (#37551) * evaluator config * add a change log entry * fix changelog wording * fix another thing in changelog * fix typing issue * add to __init__ * fix some tests * fix a spell check issue * fix tests again * fix last failing test * clean up changelog --- .../azure-ai-evaluation/CHANGELOG.md | 33 +++++++ .../azure/ai/evaluation/__init__.py | 8 +- .../ai/evaluation/_evaluate/_evaluate.py | 85 ++++++++++--------- .../ai/evaluation/_model_configurations.py | 9 +- .../tests/e2etests/test_evaluate.py | 28 +++--- .../tests/unittests/test_evaluate.py | 4 +- 6 files changed, 114 insertions(+), 53 deletions(-) diff --git a/sdk/evaluation/azure-ai-evaluation/CHANGELOG.md b/sdk/evaluation/azure-ai-evaluation/CHANGELOG.md index 8e46afb4374f..331bdb5d0c81 100644 --- a/sdk/evaluation/azure-ai-evaluation/CHANGELOG.md +++ b/sdk/evaluation/azure-ai-evaluation/CHANGELOG.md @@ -8,6 +8,39 @@ ### Breaking Changes +- The `evaluator_config` parameter of `evaluate` now maps in evaluator name to a dictionary `EvaluatorConfig`, which is a `TypedDict`. The +`column_mapping` between `data` or `target` and evaluator field names should now be specified inside this new dictionary: + +Before: +```python +evaluate( + ..., + evaluator_config={ + "hate_unfairness": { + "query": "${data.question}", + "response": "${data.answer}", + } + }, + ... +) +``` + +After +```python +evaluate( + ..., + evaluator_config={ + "hate_unfairness": { + "column_mapping": { + "query": "${data.question}", + "response": "${data.answer}", + } + } + }, + ... +) +``` + ### Bugs Fixed ### Other Changes diff --git a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/__init__.py b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/__init__.py index 4cd0c11e38b3..36a725042534 100644 --- a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/__init__.py +++ b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/__init__.py @@ -25,7 +25,12 @@ from ._evaluators._rouge import RougeScoreEvaluator, RougeType from ._evaluators._similarity import SimilarityEvaluator from ._evaluators._xpia import IndirectAttackEvaluator -from ._model_configurations import AzureAIProject, AzureOpenAIModelConfiguration, OpenAIModelConfiguration +from ._model_configurations import ( + AzureAIProject, + AzureOpenAIModelConfiguration, + OpenAIModelConfiguration, + EvaluatorConfig, +) __all__ = [ "evaluate", @@ -53,4 +58,5 @@ "AzureAIProject", "AzureOpenAIModelConfiguration", "OpenAIModelConfiguration", + "EvaluatorConfig", ] diff --git a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_evaluate/_evaluate.py b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_evaluate/_evaluate.py index be6b2c0d1ef8..d8d85effa756 100644 --- a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_evaluate/_evaluate.py +++ b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_evaluate/_evaluate.py @@ -19,7 +19,7 @@ Prefixes, _InternalEvaluationMetrics, ) -from .._model_configurations import AzureAIProject +from .._model_configurations import AzureAIProject, EvaluatorConfig from .._user_agent import USER_AGENT from ._batch_run_client import BatchRunContext, CodeClient, ProxyClient from ._utils import ( @@ -273,7 +273,7 @@ def _validate_columns( df: pd.DataFrame, evaluators: Dict[str, Any], target: Optional[Callable], - evaluator_config: Dict[str, Dict[str, str]], + column_mapping: Dict[str, Dict[str, str]], ) -> None: """ Check that all columns needed by evaluator or target function are present. @@ -284,8 +284,8 @@ def _validate_columns( :type evaluators: Dict[str, Any] :param target: The callable to be applied to data set. :type target: Optional[Callable] - :param evaluator_config: The configuration for evaluators. - :type evaluator_config: Dict[str, Dict[str, str]] + :param column_mapping: Dictionary mapping evaluator name to evaluator column mapping + :type column_mapping: Dict[str, Dict[str, str]] :raises EvaluationException: If column starts from "__outputs." while target is defined. """ if target: @@ -306,7 +306,7 @@ def _validate_columns( else: for evaluator_name, evaluator in evaluators.items(): # Apply column mapping - mapping_config = evaluator_config.get(evaluator_name, evaluator_config.get("default", None)) + mapping_config = column_mapping.get(evaluator_name, column_mapping.get("default", None)) new_df = _apply_column_mapping(df, mapping_config) # Validate input data for evaluator @@ -372,11 +372,11 @@ def _apply_target_to_data( return target_output, generated_columns, run -def _process_evaluator_config(evaluator_config: Dict[str, Dict[str, str]]) -> Dict[str, Dict[str, str]]: - """Process evaluator_config to replace ${target.} with ${data.} +def _process_column_mappings(column_mapping: Dict[str, Dict[str, str]]) -> Dict[str, Dict[str, str]]: + """Process column_mapping to replace ${target.} with ${data.} - :param evaluator_config: The configuration for evaluators. - :type evaluator_config: Dict[str, Dict[str, str]] + :param column_mapping: The configuration for evaluators. + :type column_mapping: Dict[str, Dict[str, str]] :return: The processed configuration. :rtype: Dict[str, Dict[str, str]] """ @@ -385,15 +385,15 @@ def _process_evaluator_config(evaluator_config: Dict[str, Dict[str, str]]) -> Di unexpected_references = re.compile(r"\${(?!target\.|data\.).+?}") - if evaluator_config: - for evaluator, mapping_config in evaluator_config.items(): + if column_mapping: + for evaluator, mapping_config in column_mapping.items(): if isinstance(mapping_config, dict): processed_config[evaluator] = {} for map_to_key, map_value in mapping_config.items(): # Check if there's any unexpected reference other than ${target.} or ${data.} if unexpected_references.search(map_value): - msg = "Unexpected references detected in 'evaluator_config'. Ensure only ${target.} and ${data.} are used." + msg = "Unexpected references detected in 'column_mapping'. Ensure only ${target.} and ${data.} are used." raise EvaluationException( message=msg, internal_message=msg, @@ -439,7 +439,7 @@ def evaluate( evaluators: Dict[str, Callable], evaluation_name: Optional[str] = None, target: Optional[Callable] = None, - evaluator_config: Optional[Dict[str, Dict[str, str]]] = None, + evaluator_config: Optional[Dict[str, EvaluatorConfig]] = None, azure_ai_project: Optional[AzureAIProject] = None, output_path: Optional[str] = None, **kwargs, @@ -458,10 +458,10 @@ def evaluate( :keyword target: Target to be evaluated. `target` and `data` both cannot be None :paramtype target: Optional[Callable] :keyword evaluator_config: Configuration for evaluators. The configuration should be a dictionary with evaluator - names as keys and a dictionary of column mappings as values. The column mappings should be a dictionary with - keys as the column names in the evaluator input and values as the column names in the input data or data - generated by target. - :paramtype evaluator_config: Optional[Dict[str, Dict[str, str]] + names as keys and a values that are dictionaries containing the column mappings. The column mappings should + be a dictionary with keys as the column names in the evaluator input and values as the column names in the + input data or data generated by target. + :paramtype evaluator_config: Optional[Dict[str, ~azure.ai.evaluation.EvaluatorConfig]] :keyword output_path: The local folder or file path to save evaluation results to if set. If folder path is provided the results will be saved to a file named `evaluation_results.json` in the folder. :paramtype output_path: Optional[str] @@ -482,7 +482,7 @@ def evaluate( model_config = { "azure_endpoint": os.environ.get("AZURE_OPENAI_ENDPOINT"), "api_key": os.environ.get("AZURE_OPENAI_KEY"), - "azure_deployment": os.environ.get("AZURE_OPENAI_DEPLOYMENT") + "azure_deployment": os.environ.get("AZURE_OPENAI_DEPLOYMENT"), } coherence_eval = CoherenceEvaluator(model_config=model_config) @@ -497,15 +497,19 @@ def evaluate( }, evaluator_config={ "coherence": { - "response": "${data.response}", - "query": "${data.query}" + "column_mapping": { + "response": "${data.response}", + "query": "${data.query}", + }, }, "relevance": { - "response": "${data.response}", - "context": "${data.context}", - "query": "${data.query}" - } - } + "column_mapping": { + "response": "${data.response}", + "context": "${data.context}", + "query": "${data.query}", + }, + }, + }, ) """ @@ -544,13 +548,13 @@ def evaluate( raise e -def _evaluate( # pylint: disable=too-many-locals +def _evaluate( # pylint: disable=too-many-locals,too-many-statements *, evaluation_name: Optional[str] = None, target: Optional[Callable] = None, data: Optional[str] = None, evaluators: Optional[Dict[str, Callable]] = None, - evaluator_config: Optional[Dict[str, Dict[str, str]]] = None, + evaluator_config: Optional[Dict[str, EvaluatorConfig]] = None, azure_ai_project: Optional[AzureAIProject] = None, output_path: Optional[str] = None, **kwargs, @@ -560,8 +564,13 @@ def _evaluate( # pylint: disable=too-many-locals # Process evaluator config to replace ${target.} with ${data.} if evaluator_config is None: evaluator_config = {} - evaluator_config = _process_evaluator_config(evaluator_config) - _validate_columns(input_data_df, evaluators, target, evaluator_config) + # extract column mapping dicts into dictionary mapping evaluator name to column mapping + column_mapping = { + evaluator_name: evaluator_configuration.get("column_mapping", None) + for evaluator_name, evaluator_configuration in evaluator_config.items() + } + column_mapping = _process_column_mappings(column_mapping) + _validate_columns(input_data_df, evaluators, target, column_mapping) # Target Run pf_client = PFClient( @@ -577,8 +586,8 @@ def _evaluate( # pylint: disable=too-many-locals # Create default configuration for evaluators that directly maps # input data names to keyword inputs of the same name in the evaluators. - evaluator_config = evaluator_config or {} - evaluator_config.setdefault("default", {}) + column_mapping = column_mapping or {} + column_mapping.setdefault("default", {}) # If target is set, apply 1-1 column mapping from target outputs to evaluator inputs if data is not None and target is not None: @@ -586,21 +595,21 @@ def _evaluate( # pylint: disable=too-many-locals target, data, pf_client, input_data_df, evaluation_name, _run_name=kwargs.get("_run_name") ) - for evaluator_name, mapping in evaluator_config.items(): + for evaluator_name, mapping in column_mapping.items(): mapped_to_values = set(mapping.values()) for col in target_generated_columns: # If user defined mapping differently, do not change it. # If it was mapped to target, we have already changed it - # in _process_evaluator_config + # in _process_column_mappings run_output = f"${{run.outputs.{col}}}" # We will add our mapping only if # customer did not mapped target output. if col not in mapping and run_output not in mapped_to_values: - evaluator_config[evaluator_name][col] = run_output # pylint: disable=unnecessary-dict-index-lookup + column_mapping[evaluator_name][col] = run_output # pylint: disable=unnecessary-dict-index-lookup # After we have generated all columns we can check if we have # everything we need for evaluators. - _validate_columns(input_data_df, evaluators, target=None, evaluator_config=evaluator_config) + _validate_columns(input_data_df, evaluators, target=None, column_mapping=column_mapping) # Apply 1-1 mapping from input data to evaluator inputs, excluding values already assigned # via target mapping. @@ -610,8 +619,8 @@ def _evaluate( # pylint: disable=too-many-locals for col in input_data_df.columns: # Ignore columns added by target mapping. These are formatted as "__outputs." # Also ignore columns that are already in config, since they've been covered by target mapping. - if not col.startswith(Prefixes.TSG_OUTPUTS) and col not in evaluator_config["default"].keys(): - evaluator_config["default"][col] = f"${{data.{col}}}" + if not col.startswith(Prefixes.TSG_OUTPUTS) and col not in column_mapping["default"].keys(): + column_mapping["default"][col] = f"${{data.{col}}}" # Batch Run evaluators_info = {} use_pf_client = kwargs.get("_use_pf_client", True) @@ -632,7 +641,7 @@ def _evaluate( # pylint: disable=too-many-locals flow=evaluator, run=target_run, evaluator_name=evaluator_name, - column_mapping=evaluator_config.get(evaluator_name, evaluator_config.get("default", None)), + column_mapping=column_mapping.get(evaluator_name, column_mapping.get("default", None)), data=data, stream=True, name=kwargs.get("_run_name"), diff --git a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_model_configurations.py b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_model_configurations.py index ecc8e8101570..e0885d4cbc19 100644 --- a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_model_configurations.py +++ b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/_model_configurations.py @@ -2,7 +2,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -from typing import Literal, TypedDict +from typing import Dict, Literal, TypedDict from typing_extensions import NotRequired @@ -46,3 +46,10 @@ class AzureAIProject(TypedDict): """Azure resource group name of the project""" project_name: str """Azure project name""" + + +class EvaluatorConfig(TypedDict, total=False): + """Configuration for an evaluator""" + + column_mapping: Dict[str, str] + """Dictionary mapping evaluator input name to column in data""" diff --git a/sdk/evaluation/azure-ai-evaluation/tests/e2etests/test_evaluate.py b/sdk/evaluation/azure-ai-evaluation/tests/e2etests/test_evaluate.py index 1f2c321d922b..1ee4e022250e 100644 --- a/sdk/evaluation/azure-ai-evaluation/tests/e2etests/test_evaluate.py +++ b/sdk/evaluation/azure-ai-evaluation/tests/e2etests/test_evaluate.py @@ -292,12 +292,12 @@ def test_evaluate_with_target(self, questions_file): None, {"default": {}}, {"default": {}, "question_ev": {}}, - {"default": {"query": "${target.query}"}}, - {"default": {"query": "${data.query}"}}, - {"default": {}, "question_ev": {"query": "${data.query}"}}, - {"default": {}, "question_ev": {"query": "${target.query}"}}, - {"default": {}, "question_ev": {"another_question": "${target.query}"}}, - {"default": {"another_question": "${target.query}"}}, + {"default": {"column_mapping": {"query": "${target.query}"}}}, + {"default": {"column_mapping": {"query": "${data.query}"}}}, + {"default": {}, "question_ev": {"column_mapping": {"query": "${data.query}"}}}, + {"default": {}, "question_ev": {"column_mapping": {"query": "${target.query}"}}}, + {"default": {}, "question_ev": {"column_mapping": {"another_question": "${target.query}"}}}, + {"default": {"column_mapping": {"another_question": "${target.query}"}}}, ], ) def test_evaluate_another_questions(self, questions_file, evaluation_config): @@ -334,19 +334,25 @@ def test_evaluate_another_questions(self, questions_file, evaluation_config): ( { "f1_score": { - "response": "${data.context}", - "ground_truth": "${data.ground_truth}", + "column_mapping": { + "response": "${data.context}", + "ground_truth": "${data.ground_truth}", + } }, "answer": { - "response": "${target.response}", + "column_mapping": { + "response": "${target.response}", + } }, } ), ( { "default": { - "response": "${target.response}", - "ground_truth": "${data.ground_truth}", + "column_mapping": { + "response": "${target.response}", + "ground_truth": "${data.ground_truth}", + } }, } ), diff --git a/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_evaluate.py b/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_evaluate.py index 3d3cfe22d196..3bf086cbd27e 100644 --- a/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_evaluate.py +++ b/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_evaluate.py @@ -330,11 +330,11 @@ def test_evaluate_invalid_evaluator_config(self, mock_model_config, evaluate_tes evaluate( data=evaluate_test_data_jsonl_file, evaluators={"g": GroundednessEvaluator(model_config=mock_model_config)}, - evaluator_config={"g": {"query": "${foo.query}"}}, + evaluator_config={"g": {"column_mapping": {"query": "${foo.query}"}}}, ) assert ( - "Unexpected references detected in 'evaluator_config'. Ensure only ${target.} and ${data.} are used." + "Unexpected references detected in 'column_mapping'. Ensure only ${target.} and ${data.} are used." in exc_info.value.args[0] ) From 0fa6eee36d10b98c8744d32956e9193b9e014b41 Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:06:10 -0700 Subject: [PATCH 03/22] Repair livetest (#37590) * fix livetest targeting * remove extra parameter --- .../steps/resolve-package-targeting.yml | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/eng/pipelines/templates/steps/resolve-package-targeting.yml b/eng/pipelines/templates/steps/resolve-package-targeting.yml index 2efb4b425b48..8ce66b7e2cac 100644 --- a/eng/pipelines/templates/steps/resolve-package-targeting.yml +++ b/eng/pipelines/templates/steps/resolve-package-targeting.yml @@ -21,17 +21,19 @@ steps: $setting = "$(BuildTargetingString)" } else { - if ("${{ parameters.IncludeIndirect }}" -eq "true") { - $packageProperties = Get-ChildItem -Recurse -Force "${{ parameters.PackagePropertiesFolder }}/*.json" ` + if (Test-Path "${{ parameters.PackagePropertiesFolder }}") { + if ("${{ parameters.IncludeIndirect }}" -eq "true") { + $packageProperties = Get-ChildItem -Recurse -Force "${{ parameters.PackagePropertiesFolder }}/*.json" ` + | ForEach-Object { $_.Name.Replace(".json", "") } + } + else { + $packageProperties = Get-ChildItem -Recurse -Force "${{ parameters.PackagePropertiesFolder }}/*.json" ` + | Where-Object { (Get-Content -Raw $_ | ConvertFrom-Json).IncludedForValidation -eq $false } ` | ForEach-Object { $_.Name.Replace(".json", "") } - } - else { - $packageProperties = Get-ChildItem -Recurse -Force "${{ parameters.PackagePropertiesFolder }}/*.json" ` - | Where-Object { (Get-Content -Raw $_ | ConvertFrom-Json).IncludedForValidation -eq $false } ` - | ForEach-Object { $_.Name.Replace(".json", "") } - } + } - $setting = $packageProperties -join "," + $setting = $packageProperties -join "," + } } Write-Host "##vso[task.setvariable variable=TargetingString;]$setting" From 6e3961b7f555a1c0c5d73e823947a0c7d91f9524 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:19:30 -0700 Subject: [PATCH 04/22] Update CodeownersLinter to 1.0.0-dev.20240926.2 (#37589) Co-authored-by: James Suplizio --- eng/common/pipelines/codeowners-linter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eng/common/pipelines/codeowners-linter.yml b/eng/common/pipelines/codeowners-linter.yml index f815c4944fff..821b0ea8b5a7 100644 --- a/eng/common/pipelines/codeowners-linter.yml +++ b/eng/common/pipelines/codeowners-linter.yml @@ -31,7 +31,7 @@ stages: vmImage: ubuntu-22.04 variables: - CodeownersLinterVersion: '1.0.0-dev.20240917.2' + CodeownersLinterVersion: '1.0.0-dev.20240926.2' DotNetDevOpsFeed: "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-net/nuget/v3/index.json" RepoLabelUri: "https://azuresdkartifacts.blob.core.windows.net/azure-sdk-write-teams/repository-labels-blob" TeamUserUri: "https://azuresdkartifacts.blob.core.windows.net/azure-sdk-write-teams/azure-sdk-write-teams-blob" From a03b84078b95c0e2278f8da920e13c0347d15085 Mon Sep 17 00:00:00 2001 From: Jeroen Overschie Date: Thu, 26 Sep 2024 22:36:34 +0200 Subject: [PATCH 05/22] Fix for docstrings for Command.command setter and getter (#36520) Setter and getter docstrings were reversed. --- sdk/ml/azure-ai-ml/azure/ai/ml/entities/_builders/command.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_builders/command.py b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_builders/command.py index 15d652e5234e..1a310a971e82 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_builders/command.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/entities/_builders/command.py @@ -394,7 +394,7 @@ def component(self) -> Union[str, CommandComponent]: @property def command(self) -> Optional[str]: - """Sets the command to be executed. + """The command to be executed. :rtype: Optional[str] """ @@ -408,7 +408,7 @@ def command(self) -> Optional[str]: @command.setter def command(self, value: str) -> None: - """The command to be executed. + """Sets the command to be executed. :param value: The command to be executed. :type value: str From 9e1164a61d6297c4e39c883c102d3a709153cb06 Mon Sep 17 00:00:00 2001 From: Nagkumar Arkalgud Date: Thu, 26 Sep 2024 13:59:49 -0700 Subject: [PATCH 06/22] Bugfix for query/response based simulator (#37575) * Update prompty * use a non preview version of API * Remove odb * Remove prints * Update tests and fix the type conversion of response * Reformmated with tox run -e black -c ../../../eng/tox/tox.ini -- . * Update test --------- Co-authored-by: Nagkumar Arkalgud --- .../_prompty/task_query_response.prompty | 3 ++- .../azure/ai/evaluation/simulator/_simulator.py | 16 +++++++++++----- .../tests/unittests/test_non_adv_simulator.py | 4 ++-- .../tests/unittests/test_simulator.py | 4 ++-- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_prompty/task_query_response.prompty b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_prompty/task_query_response.prompty index a8922edf130e..881d00493ff8 100644 --- a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_prompty/task_query_response.prompty +++ b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_prompty/task_query_response.prompty @@ -33,7 +33,8 @@ Answer must not be more than 5 words Answer must be picked from Text as is Question should be as descriptive as possible and must include as much context as possible from Text Output must always have the provided number of QnAs -Output must be in JSON format +Output must be in JSON format. +Output must have {{num_queries}} objects in the format specified below. Any other count is unacceptable. Text: <|text_start|> On January 24, 1984, former Apple CEO Steve Jobs introduced the first Macintosh. In late 2003, Apple had 2.06 percent of the desktop share in the United States. diff --git a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py index 659f758914df..db9aec3b90fe 100644 --- a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py +++ b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py @@ -41,7 +41,7 @@ def __init__(self, azure_ai_project: Dict[str, Any], credential: Optional[Any] = """ self._validate_project_config(azure_ai_project) self.azure_ai_project = azure_ai_project - self.azure_ai_project["api_version"] = "2024-02-15-preview" + self.azure_ai_project["api_version"] = "2024-06-01" self.credential = credential @staticmethod @@ -129,7 +129,6 @@ async def __call__( max_conversation_turns *= 2 # account for both user and assistant turns prompty_model_config = self._build_prompty_model_config() - if conversation_turns: return await self._simulate_with_predefined_turns( target=target, @@ -234,8 +233,16 @@ async def _simulate_with_predefined_turns( target=target, progress_bar=progress_bar, ) - - simulated_conversations.append(current_simulation.to_list()) + simulated_conversations.append( + JsonLineChatProtocol( + { + "messages": current_simulation.to_list(), + "finish_reason": ["stop"], + "context": {}, + "$schema": "http://azureml/sdk-2-0/ChatConversation.json", + } + ) + ) progress_bar.close() return simulated_conversations @@ -398,7 +405,6 @@ async def _generate_query_responses( prompty_model_config=prompty_model_config, query_response_generating_prompty_kwargs=query_response_generating_prompty_kwargs, ) - try: query_responses = query_flow(text=text, num_queries=num_queries) if isinstance(query_responses, dict): diff --git a/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_non_adv_simulator.py b/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_non_adv_simulator.py index 3277e6229cff..14bf9c1fdcba 100644 --- a/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_non_adv_simulator.py +++ b/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_non_adv_simulator.py @@ -40,7 +40,7 @@ class TestNonAdvSimulator: def test_init_valid_project(self, valid_project): simulator = Simulator(azure_ai_project=valid_project) assert simulator.azure_ai_project["subscription_id"] == "test_subscription" - assert simulator.azure_ai_project["api_version"] == "2024-02-15-preview" + assert simulator.azure_ai_project["api_version"] == "2024-06-01" def test_init_invalid_project(self, invalid_project): with pytest.raises(ValueError): @@ -91,7 +91,7 @@ async def test_simulate_with_predefined_turns( ) assert len(result) == 1 - assert isinstance(result[0], list) + assert isinstance(result[0], JsonLineChatProtocol) @pytest.mark.asyncio @patch("azure.ai.evaluation.simulator.Simulator._complete_conversation", new_callable=AsyncMock) diff --git a/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_simulator.py b/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_simulator.py index b6e343094fd4..7d835aec6eb7 100644 --- a/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_simulator.py +++ b/sdk/evaluation/azure-ai-evaluation/tests/unittests/test_simulator.py @@ -58,7 +58,7 @@ def test_initialization_with_all_valid_scenarios( for scenario in available_scenarios: simulator = AdversarialSimulator(azure_ai_project=azure_ai_project) assert callable(simulator) - simulator(scenario=scenario, max_conversation_turns=1, max_simulation_results=3, target=async_callback) + # simulator(scenario=scenario, max_conversation_turns=1, max_simulation_results=3, target=async_callback) @patch("azure.ai.evaluation.simulator._model_tools._rai_client.RAIClient._get_service_discovery_url") @patch( @@ -121,4 +121,4 @@ def test_initialization_parity_with_evals( for scenario in available_scenarios: simulator = AdversarialSimulator(azure_ai_project=azure_ai_project, credential="test_credential") assert callable(simulator) - simulator(scenario=scenario, max_conversation_turns=1, max_simulation_results=3, target=async_callback) + # simulator(scenario=scenario, max_conversation_turns=1, max_simulation_results=3, target=async_callback) From f322ad3a5d518c53f1218be924f3ce0855b7fb7d Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Thu, 26 Sep 2024 14:53:34 -0700 Subject: [PATCH 07/22] Fix `azure-sdk-tools` tests bitrot, add tools gh action check (#37580) * encode the fact that azure-eventhub does not support pypy machines * add azure-sdk-tools test workflow * update cspell to account for new workflow * fix matrix generation when no direct packages are present * fix a test relying on folder ordering to NOT depend on that folder ordering --- .github/workflows/azure-sdk-tools.yml | 31 +++++++++++++++++++ .vscode/cspell.json | 3 +- eng/pipelines/templates/jobs/ci.yml | 4 +-- eng/scripts/distribute-packages-to-matrix.ps1 | 8 +++-- scripts/devops_tasks/tox_harness.py | 3 +- tools/azure-sdk-tools/.env | 1 + tools/azure-sdk-tools/ci_tools/functions.py | 3 +- tools/azure-sdk-tools/setup.py | 1 + .../integration/test_package_discovery.py | 19 +++++++----- .../tests/test_optional_functionality.py | 15 +++++---- .../tests/test_parse_functionality.py | 2 -- .../tests/test_requirements_parse.py | 14 ++++----- .../tests/test_whl_discovery.py | 10 +++--- 13 files changed, 76 insertions(+), 38 deletions(-) create mode 100644 .github/workflows/azure-sdk-tools.yml create mode 100644 tools/azure-sdk-tools/.env diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml new file mode 100644 index 000000000000..5d7f4c30fa0b --- /dev/null +++ b/.github/workflows/azure-sdk-tools.yml @@ -0,0 +1,31 @@ +name: Test Azure SDK Tools + +on: + workflow_dispatch: + pull_request: + branches: [ main ] + +jobs: + build-and-test: + runs-on: ubuntu-latest + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + # todo before checkin: multiplex to include running tests on py38 -> 312 + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: 3.11 + + - name: Install azure-sdk-tools + run: | + python -m pip install -e tools/azure-sdk-tools[build,ghtools] + python -m pip freeze + shell: bash + + - name: Run tests + run: | + pytest ./tests + shell: bash + working-directory: tools/azure-sdk-tools diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 41c38a103a21..60864c739443 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -135,7 +135,8 @@ "conda/conda-recipes/msal-extensions/meta.yaml", "conda/conda-recipes/msrest/meta.yaml", "conda/conda-recipes/uamqp/meta.yaml", - "conda/conda-releaselogs/azure-mgmt.md" + "conda/conda-releaselogs/azure-mgmt.md", + ".github/workflows/azure-sdk-tools.yml" ], "words": [ "msedge", diff --git a/eng/pipelines/templates/jobs/ci.yml b/eng/pipelines/templates/jobs/ci.yml index ef7191fff84d..8d4b2438e53b 100644 --- a/eng/pipelines/templates/jobs/ci.yml +++ b/eng/pipelines/templates/jobs/ci.yml @@ -138,7 +138,7 @@ jobs: parameters: BuildTargetingString: ${{ parameters.BuildTargetingString }} PackagePropertiesFolder: $(Build.ArtifactStagingDirectory)/PackageInfo - IncludeIndirect: false + IncludeIndirect: true - template: /eng/pipelines/templates/steps/build-extended-artifacts.yml parameters: @@ -170,7 +170,7 @@ jobs: parameters: BuildTargetingString: ${{ parameters.BuildTargetingString }} PackagePropertiesFolder: $(Build.ArtifactStagingDirectory)/PackageInfo - IncludeIndirect: false + IncludeIndirect: true - template: ../steps/build-extended-artifacts.yml parameters: diff --git a/eng/scripts/distribute-packages-to-matrix.ps1 b/eng/scripts/distribute-packages-to-matrix.ps1 index 1e16d3c5c1a9..36416e9958c3 100644 --- a/eng/scripts/distribute-packages-to-matrix.ps1 +++ b/eng/scripts/distribute-packages-to-matrix.ps1 @@ -151,7 +151,7 @@ function Update-Matrix { # we need to ensure the presence of TargetingString in the matrix object if ($matrixUpdate) { - if ($directBatches) { + if ($directBatches -or $indirectBatches) { if (-not $Matrix.matrix.PSObject.Properties["TargetingString"]) { $Matrix.matrix | Add-Member -Force -MemberType NoteProperty -Name TargetingString -Value @() } @@ -217,7 +217,11 @@ $indirectIncludedPackages = $packageProperties | Where-Object { $_.IncludedForVa # I will assign all the direct included packages first. our goal is to get full coverage of the direct included packages # then, for the indirect packages, we will add them as sparse TargetingString bundles to the matrix -$directBatches = Split-ArrayIntoBatches -InputArray $directIncludedPackages -BatchSize $BATCHSIZE +$directBatches = @() +if ($directIncludedPackages) { + $directBatches = Split-ArrayIntoBatches -InputArray $directIncludedPackages -BatchSize $BATCHSIZE +} + $indirectBatches = @() if ($indirectIncludedPackages) { $indirectBatches = Split-ArrayIntoBatches -InputArray $indirectIncludedPackages -BatchSize $BATCHSIZE diff --git a/scripts/devops_tasks/tox_harness.py b/scripts/devops_tasks/tox_harness.py index 9e13bb9d6fb5..137cb1222e82 100644 --- a/scripts/devops_tasks/tox_harness.py +++ b/scripts/devops_tasks/tox_harness.py @@ -318,7 +318,7 @@ def prep_and_run_tox(targeted_packages: List[str], parsed_args: Namespace) -> No if check not in skipped_tox_checks: skipped_tox_checks[check] = [] - skipped_tox_checks[check].append(parsed_package) + skipped_tox_checks[check].append(parsed_package) if not filtered_tox_environment_set: logging.info( @@ -330,7 +330,6 @@ def prep_and_run_tox(targeted_packages: List[str], parsed_args: Namespace) -> No tox_execution_array.extend(["-e", filtered_tox_environment_set]) - if parsed_args.tox_env == "apistub": local_options_array = [] if parsed_args.dest_dir: diff --git a/tools/azure-sdk-tools/.env b/tools/azure-sdk-tools/.env new file mode 100644 index 000000000000..5734dba1ae63 --- /dev/null +++ b/tools/azure-sdk-tools/.env @@ -0,0 +1 @@ +PROXY_URL=http://localhost:5000 \ No newline at end of file diff --git a/tools/azure-sdk-tools/ci_tools/functions.py b/tools/azure-sdk-tools/ci_tools/functions.py index e906360c6c7a..1a4c009b53f7 100644 --- a/tools/azure-sdk-tools/ci_tools/functions.py +++ b/tools/azure-sdk-tools/ci_tools/functions.py @@ -43,7 +43,8 @@ TEST_COMPATIBILITY_MAP = {} TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = { - "azure-storage-blob": "pypy" + "azure-storage-blob": "pypy", + "azure-eventhub": "pypy" } omit_regression = ( diff --git a/tools/azure-sdk-tools/setup.py b/tools/azure-sdk-tools/setup.py index 82ef03bd524f..b18cb0567215 100644 --- a/tools/azure-sdk-tools/setup.py +++ b/tools/azure-sdk-tools/setup.py @@ -17,6 +17,7 @@ "PyYAML", "urllib3", "tomli-w==1.0.0", + "azure-core", # Perf/Build "ConfigArgParse>=0.12.0", ] diff --git a/tools/azure-sdk-tools/tests/integration/test_package_discovery.py b/tools/azure-sdk-tools/tests/integration/test_package_discovery.py index 33b6b8c6e5f2..248667f97634 100644 --- a/tools/azure-sdk-tools/tests/integration/test_package_discovery.py +++ b/tools/azure-sdk-tools/tests/integration/test_package_discovery.py @@ -12,7 +12,7 @@ def test_discovery(): results = discover_targeted_packages("azure*", core_service_root) # if in a set, this should be empty - non_empty_results = discover_targeted_packages("azure-servicemanagement-legacy", core_service_root) + non_empty_results = discover_targeted_packages("azure-core", core_service_root) assert len(results) > 1 assert len(non_empty_results) == 1 @@ -23,13 +23,14 @@ def test_discovery_omit_mgmt(): assert [os.path.basename(result) for result in results] == [ "azure-storage-blob", "azure-storage-blob-changefeed", + "azure-storage-extensions", "azure-storage-file-datalake", "azure-storage-file-share", "azure-storage-queue" ] def test_discovery_omit_build(): - results = discover_targeted_packages("azure*", core_service_root, filter_type="Build") + results = discover_targeted_packages("*", core_service_root, filter_type="Build") assert [os.path.basename(result) for result in results] == [ "azure-core", @@ -37,23 +38,25 @@ def test_discovery_omit_build(): "azure-core-tracing-opencensus", "azure-core-tracing-opentelemetry", "azure-mgmt-core", + "corehttp" ] def test_discovery_single_package(): - results = discover_targeted_packages("azure-servicemanagement-legacy", core_service_root, filter_type="Build") + results = discover_targeted_packages("azure-core", core_service_root, filter_type="Build") assert [os.path.basename(result) for result in results] == [ - "azure-servicemanagement-legacy", + "azure-core", ] def test_discovery_omit_regression(): - results = discover_targeted_packages("azure*", core_service_root, filter_type="Regression") + results = discover_targeted_packages("*", core_service_root, filter_type="Regression") assert [os.path.basename(result) for result in results] == [ "azure-core", "azure-core-experimental", "azure-core-tracing-opencensus", - "azure-core-tracing-opentelemetry" + "azure-core-tracing-opentelemetry", + "corehttp" ] storage_results = discover_targeted_packages("azure*", storage_service_root, filter_type="Regression") @@ -61,6 +64,7 @@ def test_discovery_omit_regression(): assert [os.path.basename(result) for result in storage_results] == [ "azure-storage-blob", "azure-storage-blob-changefeed", + "azure-storage-extensions", "azure-storage-file-datalake", "azure-storage-file-share", "azure-storage-queue" @@ -68,7 +72,7 @@ def test_discovery_omit_regression(): def test_discovery_honors_contains_filter(): - + storage_results = discover_targeted_packages("azure*", storage_service_root, "file", filter_type="Regression") assert [os.path.basename(result) for result in storage_results] == [ @@ -92,4 +96,3 @@ def test_discovery_honors_override(): "azure-core-tracing-opentelemetry", "azure-mgmt-core", ] - \ No newline at end of file diff --git a/tools/azure-sdk-tools/tests/test_optional_functionality.py b/tools/azure-sdk-tools/tests/test_optional_functionality.py index 95b19c2fbc5b..3168b5e0a570 100644 --- a/tools/azure-sdk-tools/tests/test_optional_functionality.py +++ b/tools/azure-sdk-tools/tests/test_optional_functionality.py @@ -3,7 +3,6 @@ from ci_tools.parsing import ParsedSetup from ci_tools.functions import get_config_setting -from ci_tools.scenario.generation import create_scenario_file integration_folder = os.path.join(os.path.dirname(__file__), 'integration') @@ -27,9 +26,9 @@ def test_toml_result(): 'additional_pytest_args': ['-k', '*_async.py'] }, { - 'name': 'no_aiohttp', - 'install': [], - 'uninstall': ['aiohttp'], + 'name': 'no_aiohttp', + 'install': [], + 'uninstall': ['aiohttp'], 'additional_pytest_args': ['-k', 'not *_async.py'] } ] @@ -37,7 +36,7 @@ def test_toml_result(): assert actual == expected - + def test_optional_specific_get(): package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_two_options') actual = get_config_setting(package_with_toml, 'optional') @@ -49,9 +48,9 @@ def test_optional_specific_get(): 'additional_pytest_args': ['-k', '*_async.py'] }, { - 'name': 'no_aiohttp', - 'install': [], - 'uninstall': ['aiohttp'], + 'name': 'no_aiohttp', + 'install': [], + 'uninstall': ['aiohttp'], 'additional_pytest_args': ['-k', 'not *_async.py'] } ] diff --git a/tools/azure-sdk-tools/tests/test_parse_functionality.py b/tools/azure-sdk-tools/tests/test_parse_functionality.py index 6e3c45ec3583..8765520d5118 100644 --- a/tools/azure-sdk-tools/tests/test_parse_functionality.py +++ b/tools/azure-sdk-tools/tests/test_parse_functionality.py @@ -111,7 +111,6 @@ def test_sdk_sample_setup(test_patch): assert result.requires == ["requests>=2.18.4", "six>=1.11.0", "typing-extensions>=4.0.1"] assert result.is_new_sdk == True assert result.setup_filename == os.path.join(package_root, "setup.py") - assert result.namespace == "ci_tools" assert "pytyped" in result.package_data assert result.include_package_data == True assert result.folder == package_root @@ -191,7 +190,6 @@ def test_parse_recognizes_extensions(test_patch): assert result.requires == ["requests>=2.18.4", "six>=1.11.0", "typing-extensions>=4.0.1"] # todo resolve this conflict assert result.is_new_sdk == True assert result.setup_filename == os.path.join(package_root, "setup.py") - assert result.namespace == "ci_tools" assert "pytyped" in result.package_data assert result.include_package_data == True assert result.folder == package_root diff --git a/tools/azure-sdk-tools/tests/test_requirements_parse.py b/tools/azure-sdk-tools/tests/test_requirements_parse.py index d49aaf1d0328..bef7f9981953 100644 --- a/tools/azure-sdk-tools/tests/test_requirements_parse.py +++ b/tools/azure-sdk-tools/tests/test_requirements_parse.py @@ -45,7 +45,7 @@ def test_replace_dev_reqs_specifiers(tmp_directory_create): requirements_file = create_temporary_scenario(tmp_directory_create, target_file) requirements_before = get_requirements_from_file(requirements_file) - replace_dev_reqs(requirements_file, core_location) + replace_dev_reqs(requirements_file, core_location, None) requirements_after = get_requirements_from_file(requirements_file) assert requirements_before == requirements_after @@ -63,18 +63,18 @@ def test_replace_dev_reqs_relative(tmp_directory_create): expected_results = [ os.path.join(expected_output_folder, "coretestserver-1.0.0b1-py3-none-any.whl"), os.path.join(expected_output_folder, "coretestserver-1.0.0b1-py3-none-any.whl"), - os.path.join(expected_output_folder, "azure_identity-1.16.0b3-py3-none-any.whl"), - os.path.join(expected_output_folder, "azure_identity-1.16.0b3-py3-none-any.whl"), + os.path.join(expected_output_folder, "azure_identity-1.18.1-py3-none-any.whl"), + os.path.join(expected_output_folder, "azure_identity-1.18.1-py3-none-any.whl"), os.path.join(expected_output_folder, "azure_mgmt_core-1.4.0-py3-none-any.whl"), os.path.join(expected_output_folder, "azure_mgmt_core-1.4.0-py3-none-any.whl"), os.path.join(expected_output_folder, "azure_sdk_tools-0.0.0-py3-none-any.whl[build]"), os.path.join(expected_output_folder, "azure_sdk_tools-0.0.0-py3-none-any.whl[build]"), - os.path.join(expected_output_folder, "azure_core-1.30.2-py3-none-any.whl"), - os.path.join(expected_output_folder, "azure_core-1.30.2-py3-none-any.whl"), + os.path.join(expected_output_folder, "azure_core-1.31.1-py3-none-any.whl"), + os.path.join(expected_output_folder, "azure_core-1.31.1-py3-none-any.whl"), ] requirements_before = get_requirements_from_file(requirements_file) - replace_dev_reqs(requirements_file, core_location) + replace_dev_reqs(requirements_file, core_location, None) requirements_after = get_requirements_from_file(requirements_file) assert requirements_before != requirements_after @@ -93,7 +93,7 @@ def test_replace_dev_reqs_remote(tmp_directory_create): requirements_file = create_temporary_scenario(tmp_directory_create, target_file) requirements_before = get_requirements_from_file(requirements_file) - replace_dev_reqs(requirements_file, core_location) + replace_dev_reqs(requirements_file, core_location, None) requirements_after = get_requirements_from_file(requirements_file) assert requirements_before == requirements_after diff --git a/tools/azure-sdk-tools/tests/test_whl_discovery.py b/tools/azure-sdk-tools/tests/test_whl_discovery.py index 3385e5830f72..46259c87a747 100644 --- a/tools/azure-sdk-tools/tests/test_whl_discovery.py +++ b/tools/azure-sdk-tools/tests/test_whl_discovery.py @@ -12,7 +12,7 @@ tags_folder = os.path.join(integration_folder, "scenarios", "sample_interpreter_tags") -def create_basic_temp_dir(tmp_directory_create) -> TemporaryDirectory: +def create_basic_temp_dir(tmp_directory_create) -> str: tmp_dir = tmp_directory_create( [ os.path.join("azure-common", "azure_common-1.1.29-py3-none-any.whl"), @@ -37,13 +37,13 @@ def test_find_discovers_standard_whls(test_patch, tmp_directory_create): test_patch.return_value = ["py3-none-any"] # basic positive cases - found_core = find_whl(tmp_dir.name, "azure-core", "1.26.5") - found_legacy = find_whl(tmp_dir.name, "azure-servicemanagement-legacy", "0.20.7") + found_core = find_whl(tmp_dir, "azure-core", "1.26.5") + found_legacy = find_whl(tmp_dir, "azure-servicemanagement-legacy", "0.20.7") assert found_core is not None assert found_legacy is not None # basic negative cases - not_found_core = find_whl(tmp_dir.name, "azure-core", "1.26.4") + not_found_core = find_whl(tmp_dir, "azure-core", "1.26.4") assert not_found_core is None @@ -52,7 +52,7 @@ def test_find_whl_fails_on_incompatible_interpreter(test_patch, tmp_directory_cr tmp_dir = create_basic_temp_dir(tmp_directory_create) test_patch.return_value = [] - found = find_whl(tmp_dir.name, "azure-core", "1.26.5") + found = find_whl(tmp_dir, "azure-core", "1.26.5") assert found is None From 57b1576b8ed0ff044bf7a0d22eb27793a364a74e Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Thu, 26 Sep 2024 15:02:04 -0700 Subject: [PATCH 08/22] Docs onboarding: ValidatePackageForOnboarding2 -> PackageIsValidForDocsOnboarding (#37588) Co-authored-by: Daniel Jurek --- eng/common/scripts/Update-DocsMsPackages.ps1 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/common/scripts/Update-DocsMsPackages.ps1 b/eng/common/scripts/Update-DocsMsPackages.ps1 index 00ab4b458d43..fc422872b095 100644 --- a/eng/common/scripts/Update-DocsMsPackages.ps1 +++ b/eng/common/scripts/Update-DocsMsPackages.ps1 @@ -54,7 +54,7 @@ function GetMetadata($moniker) { return $metadata } -function ValidatePackageForOnboarding2($package) { +function PackageIsValidForDocsOnboarding($package) { if (!(Test-Path "Function:$ValidateDocsMsPackagesFn")) { return $true } @@ -88,7 +88,7 @@ foreach ($moniker in $MONIKERS) { if ($package.ContainsKey('_SkipDocsValidation') -and $true -eq $package['_SkipDocsValidation']) { Write-Host "Skip validation for package: $($packageIdentity)" } - elseif (!(ValidatePackageForOnboarding2 $package)) { + elseif (!(PackageIsValidForDocsOnboarding $package)) { LogWarning "Skip adding package that did not pass validation: $($packageIdentity)" continue } @@ -101,7 +101,7 @@ foreach ($moniker in $MONIKERS) { $oldPackage = $alreadyOnboardedPackages[$packageIdentity] if ($oldPackage.Version -ne $package.Version) { - if (!(ValidatePackageForOnboarding2 $package)) { + if (!(PackageIsValidForDocsOnboarding $package)) { LogWarning "Omitting package that failed validation: $($packageIdentity)@$($package.Version)" continue } From c7f2cc0c12fda2324cafca4b58a740ad2243cc52 Mon Sep 17 00:00:00 2001 From: Kyle Jackson Date: Thu, 26 Sep 2024 16:38:31 -0700 Subject: [PATCH 09/22] Azure-AI-Evaluator: Pass user simulator kwargs during flow invocation (#37592) --- .../azure/ai/evaluation/simulator/_simulator.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py index db9aec3b90fe..b4f2ae4081f7 100644 --- a/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py +++ b/sdk/evaluation/azure-ai-evaluation/azure/ai/evaluation/simulator/_simulator.py @@ -287,7 +287,9 @@ async def _extend_conversation_with_simulator( while len(current_simulation) < max_conversation_turns: user_response_content = user_flow( - task="Continue the conversation", conversation_history=current_simulation.to_list() + task="Continue the conversation", + conversation_history=current_simulation.to_list(), + **user_simulator_prompty_kwargs ) user_response = self._parse_prompty_response(response=user_response_content) user_turn = Turn(role=ConversationRole.USER, content=user_response["content"]) @@ -618,9 +620,12 @@ async def _build_user_simulation_response( prompty_model_config=self._build_prompty_model_config(), user_simulator_prompty_kwargs=user_simulator_prompty_kwargs, ) - try: - response_content = user_flow(task=task, conversation_history=conversation_history) + response_content = user_flow( + task=task, + conversation_history=conversation_history, + **user_simulator_prompty_kwargs + ) user_response = self._parse_prompty_response(response=response_content) return user_response["content"] except Exception as e: From 2bfcd9f9c82bab1f1c260aad1019d02f3eeb22c4 Mon Sep 17 00:00:00 2001 From: Yuchao Yan Date: Fri, 27 Sep 2024 10:13:05 +0800 Subject: [PATCH 10/22] Fix statistics for released MGMT SDK (#37603) * fix statistics * remove useless code * remove useless env var * remove useless code --- scripts/release_sdk_status/main.py | 97 +------------------ .../release_sdk_status/release_sdk_status.yml | 5 - scripts/release_sdk_status/util.py | 13 --- 3 files changed, 2 insertions(+), 113 deletions(-) delete mode 100644 scripts/release_sdk_status/util.py diff --git a/scripts/release_sdk_status/main.py b/scripts/release_sdk_status/main.py index e900922a60dd..031f448b9369 100644 --- a/scripts/release_sdk_status/main.py +++ b/scripts/release_sdk_status/main.py @@ -13,8 +13,6 @@ from packaging.version import parse from pathlib import Path -from util import add_certificate - MAIN_REPO_SWAGGER = 'https://github.com/Azure/azure-rest-api-specs/tree/main' PR_URL = 'https://github.com/Azure/azure-rest-api-specs/pull/' FAILED_RESULT = [] @@ -54,10 +52,6 @@ def print_call(cmd): sp.call(cmd, shell=True) -def start_test_proxy(): - print_check('pwsh {}/eng/common/testproxy/docker-start-proxy.ps1 \"start\"'.format(os.getenv('SDK_REPO'))) - - def version_sort(versions: List[str]) -> List[str]: versions_package = [parse(version) for version in versions] versions_package.sort() @@ -231,67 +225,8 @@ def bot_analysis(self): self.bot_warning += 'Need to add track2 config.' -def sdk_code_path(service_name, sdk_name) -> str: - return str(Path(os.getenv('SDK_REPO') + f'/sdk/{service_name}/{sdk_name}')) - - -@return_origin_path -def install_package_locally(service_name, sdk_name): - os.chdir(sdk_code_path(service_name, sdk_name)) - print_check('pip install -e .') - print_exec('pip install -r dev_requirements.txt') - - -@return_origin_path -def run_test_proc(sdk_name, service_name, sdk_folder): - # run test - if os.getenv('SKIP_COVERAGE') in ('true', 'yes'): - return SKIP_TEXT - - coverage_path = ''.join([os.getenv('SDK_REPO'), '/sdk/', sdk_folder]) - service_path = coverage_path.split('/azure/mgmt')[0] - os.chdir(sdk_code_path(service_name, sdk_name)) - try: - print_check(f'pytest --collect-only') - except: - print('live test run done, do not find any test !!!') - return '-, -, -, -\n' - if os.path.exists(coverage_path + '/operations') and os.path.exists(coverage_path + '/models'): - operations_path = coverage_path + '/operations' - models_path = coverage_path + '/models' - try: - start_time = int(time.time()) - print_check(f'pytest tests -s --cov={operations_path} --cov={models_path} >result.txt', path=service_path) - cost_time = int(time.time()) - start_time - my_print(f'{service_name} play_back cost {cost_time} seconds({cost_time // 60} minutes)') - except Exception as e: - print(f'{service_name} test ERROR') - return '-, 0, 0, 0\n' - else: - try: - print_check(f'pytest tests -s >result.txt', path=service_path) - except Exception as e: - return '-, 0, 0, 0\n' - if os.path.exists(service_path + '/result.txt'): - return get_test_result(service_path + '/result.txt') - - -def run_test(sdk_name, service_name, sdk_folder): - install_package_locally(service_name, sdk_name) - test_result = run_test_proc(sdk_name, service_name, sdk_folder) - return test_result - - -def clean_test_env(): - for item in ("SSL_CERT_DIR", "REQUESTS_CA_BUNDLE"): - if os.getenv(item): - os.environ.pop(item) - - def sdk_info_from_pypi(sdk_info: List[Dict[str, str]], cli_dependency): all_sdk_status = [] - add_certificate() - start_test_proxy() for package in sdk_info: sdk_name = package['package_name'] if sdk_name in cli_dependency.keys(): @@ -306,41 +241,13 @@ def sdk_info_from_pypi(sdk_info: List[Dict[str, str]], cli_dependency): readme_link=readme_link, rm_link=rm_link, cli_version=cli_version, multi_api=multi_api) sdk_folder = package['sdk_folder'] text_to_write = pypi_ins.write_to_list(sdk_folder) - service_name = package['service_name'] if pypi_ins.pypi_link != 'NA': - test_result = SKIP_TEXT - try: - test_result = run_test(sdk_name, service_name, sdk_folder) - except: - print(f'[Error] fail to play back test recordings: {sdk_name}') - text_to_write += test_result - all_sdk_status.append(text_to_write) - - clean_test_env() + all_sdk_status.append(text_to_write + SKIP_TEXT) + my_print(f'total pypi package kinds: {len(all_sdk_status)}') return all_sdk_status -def get_test_result(txt_path): - with open(txt_path, 'r+') as f: - coverage = ' - ' - for line in f.readlines(): - if 'TOTAL' in line: - coverage = line.split()[3] - if '=====' in line and ('passed' in line or 'failed' in line or 'skipped' in line): - # print(line) - passed, failed, skipped = 0, 0, 0 - if 'passed' in line: - passed = re.findall('(\d{1,2}) passed', line)[0] - if 'failed' in line: - failed = re.findall('(\d{1,2}) failed', line)[0] - if 'skipped' in line: - skipped = re.findall('(\d{1,2}) skipped', line)[0] - # print(f'{passed} {failed} {skipped}') - - return f'{coverage}, {passed}, {failed}, {skipped}\n' - - def write_to_csv(sdk_status_list, csv_name): with open(csv_name, 'w') as file_out: file_out.write('foler/package name,' diff --git a/scripts/release_sdk_status/release_sdk_status.yml b/scripts/release_sdk_status/release_sdk_status.yml index a0228cb56d4c..1a1b1dc2eb5f 100644 --- a/scripts/release_sdk_status/release_sdk_status.yml +++ b/scripts/release_sdk_status/release_sdk_status.yml @@ -52,15 +52,10 @@ jobs: git clone ${FILE_REPO:0:8}$(USR_NAME):$(Yuchao-GitToken)@${FILE_REPO:8} $(pwd)/file-storage mkdir azure-rest-api-specs git clone https://github.com/Azure/azure-rest-api-specs.git $(pwd)/azure-rest-api-specs - mkdir sdk-repo - git clone https://github.com/Azure/azure-sdk-for-python.git $(pwd)/sdk-repo # import env variable export TOKEN=$(Yuchao-GitToken) export SWAGGER_REPO=$(pwd)/azure-rest-api-specs - export SDK_REPO=$(pwd)/sdk-repo - export SKIP_COVERAGE=$(SKIP_COVERAGE) - export AZURE_TEST_RUN_LIVE=false # create virtual env python -m venv venv-sdk diff --git a/scripts/release_sdk_status/util.py b/scripts/release_sdk_status/util.py deleted file mode 100644 index d2abe4b4204b..000000000000 --- a/scripts/release_sdk_status/util.py +++ /dev/null @@ -1,13 +0,0 @@ -import os -from pathlib import Path -from subprocess import getoutput - - -def add_certificate(): - # Set the following certificate paths: - # SSL_CERT_DIR=C:\\azure-sdk-for-python\.certificate - # REQUESTS_CA_BUNDLE=C:\\azure-sdk-for-python\.certificate\dotnet-devcert.pem - result = getoutput(f"python {Path('scripts/devops_tasks/trust_proxy_cert.py')}").split("\n") - for item in result[1:]: - name, value = item.strip().split("=", 1) - os.environ[name] = value From a61273f855bd2f3de24906a392bed20374f2c770 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Fri, 27 Sep 2024 00:12:48 -0700 Subject: [PATCH 11/22] [AutoRelease] t2-mongocluster-2024-09-25-24276(can only be merged by SDK owner) (#37563) * code and test * update-testcase * Update CHANGELOG.md * Update CHANGELOG.md --------- Co-authored-by: azure-sdk Co-authored-by: ChenxiJiang333 Co-authored-by: ChenxiJiang333 <119990644+ChenxiJiang333@users.noreply.github.com> Co-authored-by: Yuchao Yan --- .../azure-mgmt-mongocluster/CHANGELOG.md | 37 ++ .../azure-mgmt-mongocluster/_meta.json | 2 +- .../azure/mgmt/mongocluster/_client.py | 5 +- .../azure/mgmt/mongocluster/_configuration.py | 7 +- .../azure/mgmt/mongocluster/_version.py | 2 +- .../azure/mgmt/mongocluster/aio/_client.py | 5 +- .../mgmt/mongocluster/aio/_configuration.py | 7 +- .../mgmt/mongocluster/models/__init__.py | 18 +- .../azure/mgmt/mongocluster/models/_enums.py | 21 +- .../azure/mgmt/mongocluster/models/_models.py | 319 +++++++++++++----- .../mongocluster/operations/_operations.py | 40 +-- .../mongo_clusters_create.py | 12 +- .../mongo_clusters_create_geo_replica.py | 2 +- .../mongo_clusters_create_pitr.py | 2 +- .../mongo_clusters_delete.py | 2 +- .../mongo_clusters_firewall_rule_create.py | 2 +- .../mongo_clusters_firewall_rule_delete.py | 2 +- .../mongo_clusters_firewall_rule_get.py | 2 +- .../mongo_clusters_firewall_rule_list.py | 2 +- .../mongo_clusters_force_promote_replica.py | 2 +- .../generated_samples/mongo_clusters_get.py | 2 +- .../generated_samples/mongo_clusters_list.py | 2 +- .../mongo_clusters_list_by_resource_group.py | 2 +- .../mongo_clusters_list_connection_strings.py | 2 +- .../mongo_clusters_name_availability.py | 2 +- ...usters_name_availability_already_exists.py | 2 +- .../mongo_clusters_patch_disk_size.py | 4 +- ...o_clusters_patch_private_network_access.py | 2 +- ...ters_private_endpoint_connection_delete.py | 2 +- ...lusters_private_endpoint_connection_get.py | 2 +- ...usters_private_endpoint_connection_list.py | 2 +- ...lusters_private_endpoint_connection_put.py | 2 +- ...ngo_clusters_private_link_resource_list.py | 2 +- .../mongo_clusters_replica_list.py | 2 +- .../mongo_clusters_reset_password.py | 4 +- .../mongo_clusters_update.py | 13 +- .../generated_samples/operations_list.py | 2 +- ..._cluster_mgmt_mongo_clusters_operations.py | 23 +- ...er_mgmt_mongo_clusters_operations_async.py | 23 +- .../sdk_packaging.toml | 2 +- .../azure-mgmt-mongocluster/setup.py | 2 +- .../azure-mgmt-mongocluster/tests/conftest.py | 56 ++- .../test_cli_mgmt_mongocluster_lro_test.py | 28 +- ...ter_mgmt_mongo_clusters_operations_test.py | 1 - ...ongo_cluster_mgmt_operations_async_test.py | 1 - ...test_mongo_cluster_mgmt_operations_test.py | 1 - .../azure-mgmt-mongocluster/tsp-location.yaml | 2 +- 47 files changed, 427 insertions(+), 252 deletions(-) diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/CHANGELOG.md b/sdk/mongocluster/azure-mgmt-mongocluster/CHANGELOG.md index a29eb4b85fe6..a8b2eb1dbc81 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/CHANGELOG.md +++ b/sdk/mongocluster/azure-mgmt-mongocluster/CHANGELOG.md @@ -1,5 +1,42 @@ # Release History +## 1.0.0 (2024-09-26) + +### Features Added + + - Model `ConnectionString` added property `name` + - Model `MongoClusterProperties` added property `administrator` + - Model `MongoClusterProperties` added property `high_availability` + - Model `MongoClusterProperties` added property `storage` + - Model `MongoClusterProperties` added property `sharding` + - Model `MongoClusterProperties` added property `compute` + - Model `MongoClusterProperties` added property `backup` + - Model `MongoClusterUpdateProperties` added property `administrator` + - Model `MongoClusterUpdateProperties` added property `high_availability` + - Model `MongoClusterUpdateProperties` added property `storage` + - Model `MongoClusterUpdateProperties` added property `sharding` + - Model `MongoClusterUpdateProperties` added property `compute` + - Model `MongoClusterUpdateProperties` added property `backup` + - Added model `AdministratorProperties` + - Added model `BackupProperties` + - Added model `ComputeProperties` + - Added enum `HighAvailabilityMode` + - Added model `HighAvailabilityProperties` + - Added model `ShardingProperties` + - Added model `StorageProperties` + +### Breaking Changes + + - Model `MongoClusterProperties` deleted or renamed its instance variable `administrator_login` + - Model `MongoClusterProperties` deleted or renamed its instance variable `administrator_login_password` + - Model `MongoClusterProperties` deleted or renamed its instance variable `earliest_restore_time` + - Model `MongoClusterProperties` deleted or renamed its instance variable `node_group_specs` + - Model `MongoClusterUpdateProperties` deleted or renamed its instance variable `administrator_login` + - Model `MongoClusterUpdateProperties` deleted or renamed its instance variable `administrator_login_password` + - Model `MongoClusterUpdateProperties` deleted or renamed its instance variable `node_group_specs` + - Deleted or renamed model `NodeGroupSpec` + - Deleted or renamed model `NodeKind` + ## 1.0.0b2 (2024-09-23) ### Features Added diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/_meta.json b/sdk/mongocluster/azure-mgmt-mongocluster/_meta.json index 0fd55258a046..c03f8a84580e 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/_meta.json +++ b/sdk/mongocluster/azure-mgmt-mongocluster/_meta.json @@ -1,5 +1,5 @@ { - "commit": "465d34792335e02d9bf8de153734420d769adac1", + "commit": "f93850874010501f216ceadf19459402efd16d9e", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "typespec_src": "specification/mongocluster/DocumentDB.MongoCluster.Management", "@azure-tools/typespec-python": "0.33.0" diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_client.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_client.py index aeed9457d267..93fcfbef2c23 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_client.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_client.py @@ -54,9 +54,8 @@ class MongoClusterMgmtClient: # pylint: disable=client-accepts-api-version-keyw :type subscription_id: str :param base_url: Service host. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is - "2024-06-01-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "2024-07-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_configuration.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_configuration.py index fcb324b0901b..870c54669cad 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_configuration.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_configuration.py @@ -30,9 +30,8 @@ class MongoClusterMgmtClientConfiguration: # pylint: disable=too-many-instance- :type subscription_id: str :param base_url: Service host. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is - "2024-06-01-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "2024-07-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -43,7 +42,7 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2024-06-01-preview") + api_version: str = kwargs.pop("api_version", "2024-07-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_version.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_version.py index bbcd28b4aa67..0ec13ea52bbf 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_version.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b2" +VERSION = "1.0.0" diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_client.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_client.py index 2a2f5cd5647f..837664327c15 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_client.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_client.py @@ -54,9 +54,8 @@ class MongoClusterMgmtClient: # pylint: disable=client-accepts-api-version-keyw :type subscription_id: str :param base_url: Service host. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is - "2024-06-01-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "2024-07-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_configuration.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_configuration.py index f25d6d53957a..fefa880a1456 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_configuration.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/aio/_configuration.py @@ -30,9 +30,8 @@ class MongoClusterMgmtClientConfiguration: # pylint: disable=too-many-instance- :type subscription_id: str :param base_url: Service host. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: The API version to use for this operation. Default value is - "2024-06-01-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Default value is "2024-07-01". + Note that overriding this default value may result in unsupported behavior. :paramtype api_version: str """ @@ -43,7 +42,7 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2024-06-01-preview") + api_version: str = kwargs.pop("api_version", "2024-07-01") if credential is None: raise ValueError("Parameter 'credential' must not be None.") diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/__init__.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/__init__.py index bfe59d753613..a352c3476837 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/__init__.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/__init__.py @@ -6,14 +6,18 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from ._models import AdministratorProperties +from ._models import BackupProperties from ._models import CheckNameAvailabilityRequest from ._models import CheckNameAvailabilityResponse +from ._models import ComputeProperties from ._models import ConnectionString from ._models import ErrorAdditionalInfo from ._models import ErrorDetail from ._models import ErrorResponse from ._models import FirewallRule from ._models import FirewallRuleProperties +from ._models import HighAvailabilityProperties from ._models import ListConnectionStringsResult from ._models import MongoCluster from ._models import MongoClusterProperties @@ -21,7 +25,6 @@ from ._models import MongoClusterRestoreParameters from ._models import MongoClusterUpdate from ._models import MongoClusterUpdateProperties -from ._models import NodeGroupSpec from ._models import Operation from ._models import OperationDisplay from ._models import PrivateEndpoint @@ -36,6 +39,8 @@ from ._models import Replica from ._models import ReplicationProperties from ._models import Resource +from ._models import ShardingProperties +from ._models import StorageProperties from ._models import SystemData from ._models import TrackedResource @@ -43,8 +48,8 @@ from ._enums import CheckNameAvailabilityReason from ._enums import CreateMode from ._enums import CreatedByType +from ._enums import HighAvailabilityMode from ._enums import MongoClusterStatus -from ._enums import NodeKind from ._enums import Origin from ._enums import PreviewFeature from ._enums import PrivateEndpointConnectionProvisioningState @@ -60,14 +65,18 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ + "AdministratorProperties", + "BackupProperties", "CheckNameAvailabilityRequest", "CheckNameAvailabilityResponse", + "ComputeProperties", "ConnectionString", "ErrorAdditionalInfo", "ErrorDetail", "ErrorResponse", "FirewallRule", "FirewallRuleProperties", + "HighAvailabilityProperties", "ListConnectionStringsResult", "MongoCluster", "MongoClusterProperties", @@ -75,7 +84,6 @@ "MongoClusterRestoreParameters", "MongoClusterUpdate", "MongoClusterUpdateProperties", - "NodeGroupSpec", "Operation", "OperationDisplay", "PrivateEndpoint", @@ -90,14 +98,16 @@ "Replica", "ReplicationProperties", "Resource", + "ShardingProperties", + "StorageProperties", "SystemData", "TrackedResource", "ActionType", "CheckNameAvailabilityReason", "CreateMode", "CreatedByType", + "HighAvailabilityMode", "MongoClusterStatus", - "NodeKind", "Origin", "PreviewFeature", "PrivateEndpointConnectionProvisioningState", diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_enums.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_enums.py index a621138cad6a..850548c02ab8 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_enums.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_enums.py @@ -54,6 +54,20 @@ class CreateMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Create a replica cluster in the same geographic region as the source cluster.""" +class HighAvailabilityMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The high availability modes for a cluster.""" + + DISABLED = "Disabled" + """High availability mode is disabled. This mode is can see availability impact during faults or + maintenance and is not recommended for production.""" + SAME_ZONE = "SameZone" + """High availability mode is enabled, where each server in a shard is placed in the same + availability zone.""" + ZONE_REDUNDANT_PREFERRED = "ZoneRedundantPreferred" + """High availability mode is enabled and preferences ZoneRedundant if availability zones capacity + is available in the region, otherwise falls-back to provisioning with SameZone.""" + + class MongoClusterStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of the Mongo cluster resource.""" @@ -73,13 +87,6 @@ class MongoClusterStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The mongo cluster resource is being dropped.""" -class NodeKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The kind of the node on the cluster.""" - - SHARD = "Shard" - """The node is a shard kind.""" - - class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit logs UX. Default value is "user,system". diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_models.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_models.py index 02d2275f5bd1..6558dd01704b 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_models.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/models/_models.py @@ -18,6 +18,53 @@ from .. import models as _models +class AdministratorProperties(_model_base.Model): + """The local administrator login properties. + + :ivar user_name: The administrator user name. + :vartype user_name: str + :ivar password: The administrator password. + :vartype password: str + """ + + user_name: Optional[str] = rest_field(name="userName", visibility=["read", "create", "update"]) + """The administrator user name.""" + password: Optional[str] = rest_field(visibility=["create", "update"]) + """The administrator password.""" + + @overload + def __init__( + self, + *, + user_name: Optional[str] = None, + password: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class BackupProperties(_model_base.Model): + """The backup properties of the cluster. This includes the earliest restore time and retention + settings. + + Readonly variables are only populated by the server, and will be ignored when sending a request. + + :ivar earliest_restore_time: Earliest restore timestamp in UTC ISO8601 format. + :vartype earliest_restore_time: str + """ + + earliest_restore_time: Optional[str] = rest_field(name="earliestRestoreTime", visibility=["read"]) + """Earliest restore timestamp in UTC ISO8601 format.""" + + class CheckNameAvailabilityRequest(_model_base.Model): """The check availability request body. @@ -91,6 +138,37 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useles super().__init__(*args, **kwargs) +class ComputeProperties(_model_base.Model): + """The compute properties of the cluster. This includes the virtual-cores/memory and scaling + options applied to servers in the cluster. + + :ivar tier: The compute tier to assign to the cluster, where each tier maps to a virtual-core + and memory size. Example values: 'M30', 'M40'. + :vartype tier: str + """ + + tier: Optional[str] = rest_field() + """The compute tier to assign to the cluster, where each tier maps to a virtual-core and memory + size. Example values: 'M30', 'M40'.""" + + @overload + def __init__( + self, + *, + tier: Optional[str] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + class ConnectionString(_model_base.Model): """Connection string for the mongo cluster. @@ -100,12 +178,16 @@ class ConnectionString(_model_base.Model): :vartype connection_string: str :ivar description: Description of the connection string. :vartype description: str + :ivar name: Name of the connection string. + :vartype name: str """ connection_string: Optional[str] = rest_field(name="connectionString", visibility=["read"]) """Value of the connection string.""" description: Optional[str] = rest_field(visibility=["read"]) """Description of the connection string.""" + name: Optional[str] = rest_field(visibility=["read"]) + """Name of the connection string.""" class ErrorAdditionalInfo(_model_base.Model): @@ -322,6 +404,36 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useles super().__init__(*args, **kwargs) +class HighAvailabilityProperties(_model_base.Model): + """The high availability properties of the cluster. + + :ivar target_mode: The target high availability mode requested for the cluster. Known values + are: "Disabled", "SameZone", and "ZoneRedundantPreferred". + :vartype target_mode: str or ~azure.mgmt.mongocluster.models.HighAvailabilityMode + """ + + target_mode: Optional[Union[str, "_models.HighAvailabilityMode"]] = rest_field(name="targetMode") + """The target high availability mode requested for the cluster. Known values are: \"Disabled\", + \"SameZone\", and \"ZoneRedundantPreferred\".""" + + @overload + def __init__( + self, + *, + target_mode: Optional[Union[str, "_models.HighAvailabilityMode"]] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + class ListConnectionStringsResult(_model_base.Model): """The connection strings for the given mongo cluster. @@ -445,17 +557,13 @@ class MongoClusterProperties(_model_base.Model): # pylint: disable=too-many-ins :vartype restore_parameters: ~azure.mgmt.mongocluster.models.MongoClusterRestoreParameters :ivar replica_parameters: The parameters to create a replica mongo cluster. :vartype replica_parameters: ~azure.mgmt.mongocluster.models.MongoClusterReplicaParameters - :ivar administrator_login: The administrator's login for the mongo cluster. - :vartype administrator_login: str - :ivar administrator_login_password: The password of the administrator login. - :vartype administrator_login_password: str + :ivar administrator: The local administrator properties for the mongo cluster. + :vartype administrator: ~azure.mgmt.mongocluster.models.AdministratorProperties :ivar server_version: The Mongo DB server version. Defaults to the latest available version if not specified. :vartype server_version: str :ivar connection_string: The default mongo connection string for the cluster. :vartype connection_string: str - :ivar earliest_restore_time: Earliest restore timestamp in UTC ISO8601 format. - :vartype earliest_restore_time: str :ivar provisioning_state: The provisioning state of the mongo cluster. Known values are: "Succeeded", "Failed", "Canceled", "InProgress", "Updating", and "Dropping". :vartype provisioning_state: str or ~azure.mgmt.mongocluster.models.ProvisioningState @@ -465,8 +573,16 @@ class MongoClusterProperties(_model_base.Model): # pylint: disable=too-many-ins :ivar public_network_access: Whether or not public endpoint access is allowed for this mongo cluster. Known values are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.mongocluster.models.PublicNetworkAccess - :ivar node_group_specs: The list of node group specs in the cluster. - :vartype node_group_specs: list[~azure.mgmt.mongocluster.models.NodeGroupSpec] + :ivar high_availability: The high availability properties of the mongo cluster. + :vartype high_availability: ~azure.mgmt.mongocluster.models.HighAvailabilityProperties + :ivar storage: The storage properties of the mongo cluster. + :vartype storage: ~azure.mgmt.mongocluster.models.StorageProperties + :ivar sharding: The sharding properties of the mongo cluster. + :vartype sharding: ~azure.mgmt.mongocluster.models.ShardingProperties + :ivar compute: The compute properties of the mongo cluster. + :vartype compute: ~azure.mgmt.mongocluster.models.ComputeProperties + :ivar backup: The backup properties of the mongo cluster. + :vartype backup: ~azure.mgmt.mongocluster.models.BackupProperties :ivar private_endpoint_connections: List of private endpoint connections. :vartype private_endpoint_connections: list[~azure.mgmt.mongocluster.models.PrivateEndpointConnection] @@ -489,18 +605,12 @@ class MongoClusterProperties(_model_base.Model): # pylint: disable=too-many-ins name="replicaParameters", visibility=["create"] ) """The parameters to create a replica mongo cluster.""" - administrator_login: Optional[str] = rest_field(name="administratorLogin", visibility=["read", "create", "update"]) - """The administrator's login for the mongo cluster.""" - administrator_login_password: Optional[str] = rest_field( - name="administratorLoginPassword", visibility=["create", "update"] - ) - """The password of the administrator login.""" + administrator: Optional["_models.AdministratorProperties"] = rest_field() + """The local administrator properties for the mongo cluster.""" server_version: Optional[str] = rest_field(name="serverVersion") """The Mongo DB server version. Defaults to the latest available version if not specified.""" connection_string: Optional[str] = rest_field(name="connectionString", visibility=["read"]) """The default mongo connection string for the cluster.""" - earliest_restore_time: Optional[str] = rest_field(name="earliestRestoreTime", visibility=["read"]) - """Earliest restore timestamp in UTC ISO8601 format.""" provisioning_state: Optional[Union[str, "_models.ProvisioningState"]] = rest_field( name="provisioningState", visibility=["read"] ) @@ -514,8 +624,16 @@ class MongoClusterProperties(_model_base.Model): # pylint: disable=too-many-ins public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field(name="publicNetworkAccess") """Whether or not public endpoint access is allowed for this mongo cluster. Known values are: \"Enabled\" and \"Disabled\".""" - node_group_specs: Optional[List["_models.NodeGroupSpec"]] = rest_field(name="nodeGroupSpecs") - """The list of node group specs in the cluster.""" + high_availability: Optional["_models.HighAvailabilityProperties"] = rest_field(name="highAvailability") + """The high availability properties of the mongo cluster.""" + storage: Optional["_models.StorageProperties"] = rest_field() + """The storage properties of the mongo cluster.""" + sharding: Optional["_models.ShardingProperties"] = rest_field() + """The sharding properties of the mongo cluster.""" + compute: Optional["_models.ComputeProperties"] = rest_field() + """The compute properties of the mongo cluster.""" + backup: Optional["_models.BackupProperties"] = rest_field() + """The backup properties of the mongo cluster.""" private_endpoint_connections: Optional[List["_models.PrivateEndpointConnection"]] = rest_field( name="privateEndpointConnections", visibility=["read"] ) @@ -534,11 +652,14 @@ def __init__( create_mode: Optional[Union[str, "_models.CreateMode"]] = None, restore_parameters: Optional["_models.MongoClusterRestoreParameters"] = None, replica_parameters: Optional["_models.MongoClusterReplicaParameters"] = None, - administrator_login: Optional[str] = None, - administrator_login_password: Optional[str] = None, + administrator: Optional["_models.AdministratorProperties"] = None, server_version: Optional[str] = None, public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - node_group_specs: Optional[List["_models.NodeGroupSpec"]] = None, + high_availability: Optional["_models.HighAvailabilityProperties"] = None, + storage: Optional["_models.StorageProperties"] = None, + sharding: Optional["_models.ShardingProperties"] = None, + compute: Optional["_models.ComputeProperties"] = None, + backup: Optional["_models.BackupProperties"] = None, preview_features: Optional[List[Union[str, "_models.PreviewFeature"]]] = None, ): ... @@ -656,35 +777,45 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useles class MongoClusterUpdateProperties(_model_base.Model): """The updatable properties of the MongoCluster. - :ivar administrator_login: The administrator's login for the mongo cluster. - :vartype administrator_login: str - :ivar administrator_login_password: The password of the administrator login. - :vartype administrator_login_password: str + :ivar administrator: The local administrator properties for the mongo cluster. + :vartype administrator: ~azure.mgmt.mongocluster.models.AdministratorProperties :ivar server_version: The Mongo DB server version. Defaults to the latest available version if not specified. :vartype server_version: str :ivar public_network_access: Whether or not public endpoint access is allowed for this mongo cluster. Known values are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.mongocluster.models.PublicNetworkAccess - :ivar node_group_specs: The list of node group specs in the cluster. - :vartype node_group_specs: list[~azure.mgmt.mongocluster.models.NodeGroupSpec] + :ivar high_availability: The high availability properties of the mongo cluster. + :vartype high_availability: ~azure.mgmt.mongocluster.models.HighAvailabilityProperties + :ivar storage: The storage properties of the mongo cluster. + :vartype storage: ~azure.mgmt.mongocluster.models.StorageProperties + :ivar sharding: The sharding properties of the mongo cluster. + :vartype sharding: ~azure.mgmt.mongocluster.models.ShardingProperties + :ivar compute: The compute properties of the mongo cluster. + :vartype compute: ~azure.mgmt.mongocluster.models.ComputeProperties + :ivar backup: The backup properties of the mongo cluster. + :vartype backup: ~azure.mgmt.mongocluster.models.BackupProperties :ivar preview_features: List of private endpoint connections. :vartype preview_features: list[str or ~azure.mgmt.mongocluster.models.PreviewFeature] """ - administrator_login: Optional[str] = rest_field(name="administratorLogin", visibility=["read", "create", "update"]) - """The administrator's login for the mongo cluster.""" - administrator_login_password: Optional[str] = rest_field( - name="administratorLoginPassword", visibility=["create", "update"] - ) - """The password of the administrator login.""" + administrator: Optional["_models.AdministratorProperties"] = rest_field() + """The local administrator properties for the mongo cluster.""" server_version: Optional[str] = rest_field(name="serverVersion") """The Mongo DB server version. Defaults to the latest available version if not specified.""" public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field(name="publicNetworkAccess") """Whether or not public endpoint access is allowed for this mongo cluster. Known values are: \"Enabled\" and \"Disabled\".""" - node_group_specs: Optional[List["_models.NodeGroupSpec"]] = rest_field(name="nodeGroupSpecs") - """The list of node group specs in the cluster.""" + high_availability: Optional["_models.HighAvailabilityProperties"] = rest_field(name="highAvailability") + """The high availability properties of the mongo cluster.""" + storage: Optional["_models.StorageProperties"] = rest_field() + """The storage properties of the mongo cluster.""" + sharding: Optional["_models.ShardingProperties"] = rest_field() + """The sharding properties of the mongo cluster.""" + compute: Optional["_models.ComputeProperties"] = rest_field() + """The compute properties of the mongo cluster.""" + backup: Optional["_models.BackupProperties"] = rest_field() + """The backup properties of the mongo cluster.""" preview_features: Optional[List[Union[str, "_models.PreviewFeature"]]] = rest_field(name="previewFeatures") """List of private endpoint connections.""" @@ -692,11 +823,14 @@ class MongoClusterUpdateProperties(_model_base.Model): def __init__( self, *, - administrator_login: Optional[str] = None, - administrator_login_password: Optional[str] = None, + administrator: Optional["_models.AdministratorProperties"] = None, server_version: Optional[str] = None, public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, - node_group_specs: Optional[List["_models.NodeGroupSpec"]] = None, + high_availability: Optional["_models.HighAvailabilityProperties"] = None, + storage: Optional["_models.StorageProperties"] = None, + sharding: Optional["_models.ShardingProperties"] = None, + compute: Optional["_models.ComputeProperties"] = None, + backup: Optional["_models.BackupProperties"] = None, preview_features: Optional[List[Union[str, "_models.PreviewFeature"]]] = None, ): ... @@ -711,57 +845,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useles super().__init__(*args, **kwargs) -class NodeGroupSpec(_model_base.Model): - """Specification for a node group. - - :ivar sku: The resource sku for the node group. This defines the size of CPU and memory that is - provisioned for each node. Example values: 'M30', 'M40'. - :vartype sku: str - :ivar disk_size_gb: The disk storage size for the node group in GB. Example values: 128, 256, - 512, 1024. - :vartype disk_size_gb: int - :ivar enable_ha: Whether high availability is enabled on the node group. - :vartype enable_ha: bool - :ivar kind: The node type deployed in the node group. "Shard" - :vartype kind: str or ~azure.mgmt.mongocluster.models.NodeKind - :ivar node_count: The number of nodes in the node group. - :vartype node_count: int - """ - - sku: Optional[str] = rest_field() - """The resource sku for the node group. This defines the size of CPU and memory that is - provisioned for each node. Example values: 'M30', 'M40'.""" - disk_size_gb: Optional[int] = rest_field(name="diskSizeGB") - """The disk storage size for the node group in GB. Example values: 128, 256, 512, 1024.""" - enable_ha: Optional[bool] = rest_field(name="enableHa") - """Whether high availability is enabled on the node group.""" - kind: Optional[Union[str, "_models.NodeKind"]] = rest_field() - """The node type deployed in the node group. \"Shard\"""" - node_count: Optional[int] = rest_field(name="nodeCount") - """The number of nodes in the node group.""" - - @overload - def __init__( - self, - *, - sku: Optional[str] = None, - disk_size_gb: Optional[int] = None, - enable_ha: Optional[bool] = None, - kind: Optional[Union[str, "_models.NodeKind"]] = None, - node_count: Optional[int] = None, - ): ... - - @overload - def __init__(self, mapping: Mapping[str, Any]): - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation - super().__init__(*args, **kwargs) - - class Operation(_model_base.Model): """Details of a REST API operation, returned from the Resource Provider Operations API. @@ -1229,6 +1312,64 @@ class ReplicationProperties(_model_base.Model): \"Provisioning\", \"Updating\", \"Broken\", and \"Reconfiguring\".""" +class ShardingProperties(_model_base.Model): + """The sharding properties of the cluster. This includes the shard count and scaling options for + the cluster. + + :ivar shard_count: Number of shards to provision on the cluster. + :vartype shard_count: int + """ + + shard_count: Optional[int] = rest_field(name="shardCount") + """Number of shards to provision on the cluster.""" + + @overload + def __init__( + self, + *, + shard_count: Optional[int] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + +class StorageProperties(_model_base.Model): + """The storage properties of the cluster. This includes the data storage size and scaling applied + to servers in the cluster. + + :ivar size_gb: The size of the data disk assigned to each server. + :vartype size_gb: int + """ + + size_gb: Optional[int] = rest_field(name="sizeGb") + """The size of the data disk assigned to each server.""" + + @overload + def __init__( + self, + *, + size_gb: Optional[int] = None, + ): ... + + @overload + def __init__(self, mapping: Mapping[str, Any]): + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=useless-super-delegation + super().__init__(*args, **kwargs) + + class SystemData(_model_base.Model): """Metadata pertaining to creation and last modification of the resource. diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/operations/_operations.py b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/operations/_operations.py index 195e54176416..d5b197cabcde 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/operations/_operations.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/azure/mgmt/mongocluster/operations/_operations.py @@ -52,7 +52,7 @@ def build_operations_list_request(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -73,7 +73,7 @@ def build_mongo_clusters_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -102,7 +102,7 @@ def build_mongo_clusters_create_or_update_request( # pylint: disable=name-too-l _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -133,7 +133,7 @@ def build_mongo_clusters_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -163,7 +163,7 @@ def build_mongo_clusters_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -191,7 +191,7 @@ def build_mongo_clusters_list_by_resource_group_request( # pylint: disable=name _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -216,7 +216,7 @@ def build_mongo_clusters_list_request(subscription_id: str, **kwargs: Any) -> Ht _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -242,7 +242,7 @@ def build_mongo_clusters_list_connection_strings_request( # pylint: disable=nam _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -271,7 +271,7 @@ def build_mongo_clusters_check_name_availability_request( # pylint: disable=nam _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -301,7 +301,7 @@ def build_mongo_clusters_promote_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -331,7 +331,7 @@ def build_firewall_rules_get_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -361,7 +361,7 @@ def build_firewall_rules_create_or_update_request( # pylint: disable=name-too-l _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -392,7 +392,7 @@ def build_firewall_rules_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -421,7 +421,7 @@ def build_firewall_rules_list_by_mongo_cluster_request( # pylint: disable=name- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -449,7 +449,7 @@ def build_private_endpoint_connections_list_by_mongo_cluster_request( # pylint: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -481,7 +481,7 @@ def build_private_endpoint_connections_get_request( # pylint: disable=name-too- _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -517,7 +517,7 @@ def build_private_endpoint_connections_create_request( # pylint: disable=name-t _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -554,7 +554,7 @@ def build_private_endpoint_connections_delete_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -585,7 +585,7 @@ def build_private_links_list_by_mongo_cluster_request( # pylint: disable=name-t _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -613,7 +613,7 @@ def build_replicas_list_by_parent_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-06-01-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01")) accept = _headers.pop("Accept", "application/json") # Construct URL diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create.py index 38d6a5969575..0c9ca9ac7a68 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create.py @@ -36,18 +36,18 @@ def main(): resource={ "location": "westus2", "properties": { - "administratorLogin": "mongoAdmin", - "administratorLoginPassword": "password", - "nodeGroupSpecs": [ - {"diskSizeGB": 128, "enableHa": True, "kind": "Shard", "nodeCount": 1, "sku": "M30"} - ], + "administrator": {"password": "password", "userName": "mongoAdmin"}, + "compute": {"tier": "M30"}, + "highAvailability": {"targetMode": "SameZone"}, "serverVersion": "5.0", + "sharding": {"shardCount": 1}, + "storage": {"sizeGb": 128}, }, }, ).result() print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_Create.json +# x-ms-original-file: 2024-07-01/MongoClusters_Create.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_geo_replica.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_geo_replica.py index 4d185bdcd549..478375d13e19 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_geo_replica.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_geo_replica.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_CreateGeoReplica.json +# x-ms-original-file: 2024-07-01/MongoClusters_CreateGeoReplica.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_pitr.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_pitr.py index 69aeab90f3ae..bbc19a418909 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_pitr.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_create_pitr.py @@ -47,6 +47,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_CreatePITR.json +# x-ms-original-file: 2024-07-01/MongoClusters_CreatePITR.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_delete.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_delete.py index 0028b70b08b0..8c9dedf1d90f 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_delete.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_delete.py @@ -36,6 +36,6 @@ def main(): ).result() -# x-ms-original-file: 2024-06-01-preview/MongoClusters_Delete.json +# x-ms-original-file: 2024-07-01/MongoClusters_Delete.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_create.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_create.py index cf75ac2e252d..8c37ec59ae62 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_create.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_create.py @@ -39,6 +39,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_FirewallRuleCreate.json +# x-ms-original-file: 2024-07-01/MongoClusters_FirewallRuleCreate.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_delete.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_delete.py index d23efc0885f5..8a01ba9d6a91 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_delete.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2024-06-01-preview/MongoClusters_FirewallRuleDelete.json +# x-ms-original-file: 2024-07-01/MongoClusters_FirewallRuleDelete.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_get.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_get.py index 135a9e131a62..e1fa3973c747 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_get.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_FirewallRuleGet.json +# x-ms-original-file: 2024-07-01/MongoClusters_FirewallRuleGet.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_list.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_list.py index 2f347895ec77..2f8d653da357 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_list.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_firewall_rule_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_FirewallRuleList.json +# x-ms-original-file: 2024-07-01/MongoClusters_FirewallRuleList.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_force_promote_replica.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_force_promote_replica.py index 5f983225b7dc..18d52b74fefa 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_force_promote_replica.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_force_promote_replica.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2024-06-01-preview/MongoClusters_ForcePromoteReplica.json +# x-ms-original-file: 2024-07-01/MongoClusters_ForcePromoteReplica.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_get.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_get.py index 6c2232842816..0aadca7b500e 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_get.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_get.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_Get.json +# x-ms-original-file: 2024-07-01/MongoClusters_Get.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list.py index 15ea44be65dd..7a526e9e56a1 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_List.json +# x-ms-original-file: 2024-07-01/MongoClusters_List.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_by_resource_group.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_by_resource_group.py index a34e5eb636a4..fd3668444f5a 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_by_resource_group.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_by_resource_group.py @@ -37,6 +37,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_ListByResourceGroup.json +# x-ms-original-file: 2024-07-01/MongoClusters_ListByResourceGroup.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_connection_strings.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_connection_strings.py index f646f554ca4e..7fcbf7399c6d 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_connection_strings.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_list_connection_strings.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_ListConnectionStrings.json +# x-ms-original-file: 2024-07-01/MongoClusters_ListConnectionStrings.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability.py index bc3862eece9b..b346f7ce5939 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_NameAvailability.json +# x-ms-original-file: 2024-07-01/MongoClusters_NameAvailability.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability_already_exists.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability_already_exists.py index 2ee5ab42c446..d55107e72039 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability_already_exists.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_name_availability_already_exists.py @@ -37,6 +37,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_NameAvailability_AlreadyExists.json +# x-ms-original-file: 2024-07-01/MongoClusters_NameAvailability_AlreadyExists.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_disk_size.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_disk_size.py index ec2355d18af5..d5b050f071af 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_disk_size.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_disk_size.py @@ -33,11 +33,11 @@ def main(): response = client.mongo_clusters.begin_update( resource_group_name="TestResourceGroup", mongo_cluster_name="myMongoCluster", - properties={"properties": {"nodeGroupSpecs": [{"diskSizeGB": 256, "kind": "Shard"}]}}, + properties={"properties": {"storage": {"sizeGb": 256}}}, ).result() print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PatchDiskSize.json +# x-ms-original-file: 2024-07-01/MongoClusters_PatchDiskSize.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_private_network_access.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_private_network_access.py index a4227298b5ef..1a3e65678149 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_private_network_access.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_patch_private_network_access.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PatchPrivateNetworkAccess.json +# x-ms-original-file: 2024-07-01/MongoClusters_PatchPrivateNetworkAccess.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_delete.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_delete.py index eed468025c9e..088f48dc338a 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_delete.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_delete.py @@ -37,6 +37,6 @@ def main(): ).result() -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PrivateEndpointConnectionDelete.json +# x-ms-original-file: 2024-07-01/MongoClusters_PrivateEndpointConnectionDelete.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_get.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_get.py index 7c2e8a9ef32e..4b25e0b76332 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_get.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_get.py @@ -38,6 +38,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PrivateEndpointConnectionGet.json +# x-ms-original-file: 2024-07-01/MongoClusters_PrivateEndpointConnectionGet.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_list.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_list.py index 1f5fcbc7ea37..875ed7c6d7f5 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_list.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PrivateEndpointConnectionList.json +# x-ms-original-file: 2024-07-01/MongoClusters_PrivateEndpointConnectionList.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_put.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_put.py index a6eb19f58f3d..07f5a9a2bd78 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_put.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_endpoint_connection_put.py @@ -41,6 +41,6 @@ def main(): print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PrivateEndpointConnectionPut.json +# x-ms-original-file: 2024-07-01/MongoClusters_PrivateEndpointConnectionPut.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_link_resource_list.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_link_resource_list.py index 5291df343f6f..2026fb4bfffe 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_link_resource_list.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_private_link_resource_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_PrivateLinkResourceList.json +# x-ms-original-file: 2024-07-01/MongoClusters_PrivateLinkResourceList.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_replica_list.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_replica_list.py index 3d76cb459702..f22f11480bd7 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_replica_list.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_replica_list.py @@ -38,6 +38,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_ReplicaList.json +# x-ms-original-file: 2024-07-01/MongoClusters_ReplicaList.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_reset_password.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_reset_password.py index 87ef7782536e..d5eaa01866e2 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_reset_password.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_reset_password.py @@ -33,11 +33,11 @@ def main(): response = client.mongo_clusters.begin_update( resource_group_name="TestResourceGroup", mongo_cluster_name="myMongoCluster", - properties={"properties": {"administratorLogin": "mongoAdmin", "administratorLoginPassword": "password"}}, + properties={"properties": {"administrator": {"password": "password", "userName": "mongoAdmin"}}}, ).result() print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_ResetPassword.json +# x-ms-original-file: 2024-07-01/MongoClusters_ResetPassword.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_update.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_update.py index 5118d23507d2..16bbc5422c0c 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_update.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/mongo_clusters_update.py @@ -35,19 +35,20 @@ def main(): mongo_cluster_name="myMongoCluster", properties={ "properties": { - "administratorLogin": "mongoAdmin", - "administratorLoginPassword": "password", - "nodeGroupSpecs": [ - {"diskSizeGB": 256, "enableHa": True, "kind": "Shard", "nodeCount": 1, "sku": "M50"} - ], + "administrator": {"userName": "mongoAdmin"}, + "compute": {"tier": "M50"}, + "highAvailability": {"targetMode": "SameZone"}, + "previewFeatures": [], "publicNetworkAccess": "Enabled", "serverVersion": "5.0", + "sharding": {"shardCount": 4}, + "storage": {"sizeGb": 256}, } }, ).result() print(response) -# x-ms-original-file: 2024-06-01-preview/MongoClusters_Update.json +# x-ms-original-file: 2024-07-01/MongoClusters_Update.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/operations_list.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/operations_list.py index ff0b7606af9e..062d83ab92c3 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/operations_list.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_samples/operations_list.py @@ -35,6 +35,6 @@ def main(): print(item) -# x-ms-original-file: 2024-06-01-preview/Operations_List.json +# x-ms-original-file: 2024-07-01/Operations_List.json if __name__ == "__main__": main() diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations.py index b25e70541922..a70bf65845e6 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations.py @@ -40,16 +40,14 @@ def test_mongo_clusters_begin_create_or_update(self, resource_group): "id": "str", "name": "str", "properties": { - "administratorLogin": "str", - "administratorLoginPassword": "str", + "administrator": {"password": "str", "userName": "str"}, + "backup": {"earliestRestoreTime": "str"}, "clusterStatus": "str", + "compute": {"tier": "str"}, "connectionString": "str", "createMode": "str", - "earliestRestoreTime": "str", + "highAvailability": {"targetMode": "str"}, "infrastructureVersion": "str", - "nodeGroupSpecs": [ - {"diskSizeGB": 0, "enableHa": bool, "kind": "str", "nodeCount": 0, "sku": "str"} - ], "previewFeatures": ["str"], "privateEndpointConnections": [ { @@ -82,6 +80,8 @@ def test_mongo_clusters_begin_create_or_update(self, resource_group): "replicaParameters": {"sourceLocation": "str", "sourceResourceId": "str"}, "restoreParameters": {"pointInTimeUTC": "2020-02-20 00:00:00", "sourceResourceId": "str"}, "serverVersion": "str", + "sharding": {"shardCount": 0}, + "storage": {"sizeGb": 0}, }, "systemData": { "createdAt": "2020-02-20 00:00:00", @@ -107,14 +107,15 @@ def test_mongo_clusters_begin_update(self, resource_group): mongo_cluster_name="str", properties={ "properties": { - "administratorLogin": "str", - "administratorLoginPassword": "str", - "nodeGroupSpecs": [ - {"diskSizeGB": 0, "enableHa": bool, "kind": "str", "nodeCount": 0, "sku": "str"} - ], + "administrator": {"password": "str", "userName": "str"}, + "backup": {"earliestRestoreTime": "str"}, + "compute": {"tier": "str"}, + "highAvailability": {"targetMode": "str"}, "previewFeatures": ["str"], "publicNetworkAccess": "str", "serverVersion": "str", + "sharding": {"shardCount": 0}, + "storage": {"sizeGb": 0}, }, "tags": {"str": "str"}, }, diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations_async.py b/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations_async.py index c1246795a8f2..854e0d8a44d1 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations_async.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/generated_tests/test_mongo_cluster_mgmt_mongo_clusters_operations_async.py @@ -42,16 +42,14 @@ async def test_mongo_clusters_begin_create_or_update(self, resource_group): "id": "str", "name": "str", "properties": { - "administratorLogin": "str", - "administratorLoginPassword": "str", + "administrator": {"password": "str", "userName": "str"}, + "backup": {"earliestRestoreTime": "str"}, "clusterStatus": "str", + "compute": {"tier": "str"}, "connectionString": "str", "createMode": "str", - "earliestRestoreTime": "str", + "highAvailability": {"targetMode": "str"}, "infrastructureVersion": "str", - "nodeGroupSpecs": [ - {"diskSizeGB": 0, "enableHa": bool, "kind": "str", "nodeCount": 0, "sku": "str"} - ], "previewFeatures": ["str"], "privateEndpointConnections": [ { @@ -84,6 +82,8 @@ async def test_mongo_clusters_begin_create_or_update(self, resource_group): "replicaParameters": {"sourceLocation": "str", "sourceResourceId": "str"}, "restoreParameters": {"pointInTimeUTC": "2020-02-20 00:00:00", "sourceResourceId": "str"}, "serverVersion": "str", + "sharding": {"shardCount": 0}, + "storage": {"sizeGb": 0}, }, "systemData": { "createdAt": "2020-02-20 00:00:00", @@ -111,14 +111,15 @@ async def test_mongo_clusters_begin_update(self, resource_group): mongo_cluster_name="str", properties={ "properties": { - "administratorLogin": "str", - "administratorLoginPassword": "str", - "nodeGroupSpecs": [ - {"diskSizeGB": 0, "enableHa": bool, "kind": "str", "nodeCount": 0, "sku": "str"} - ], + "administrator": {"password": "str", "userName": "str"}, + "backup": {"earliestRestoreTime": "str"}, + "compute": {"tier": "str"}, + "highAvailability": {"targetMode": "str"}, "previewFeatures": ["str"], "publicNetworkAccess": "str", "serverVersion": "str", + "sharding": {"shardCount": 0}, + "storage": {"sizeGb": 0}, }, "tags": {"str": "str"}, }, diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/sdk_packaging.toml b/sdk/mongocluster/azure-mgmt-mongocluster/sdk_packaging.toml index 7ddab9afcdde..fe93999c88c1 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/sdk_packaging.toml +++ b/sdk/mongocluster/azure-mgmt-mongocluster/sdk_packaging.toml @@ -3,7 +3,7 @@ package_name = "azure-mgmt-mongocluster" package_nspkg = "azure-mgmt-nspkg" package_pprint_name = "Mongocluster Management" package_doc_id = "" -is_stable = false +is_stable = true is_arm = true need_msrestazure = false need_azuremgmtcore = true diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/setup.py b/sdk/mongocluster/azure-mgmt-mongocluster/setup.py index 31ba7938cc56..e977f9f289c3 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/setup.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/setup.py @@ -49,7 +49,7 @@ url="https://github.com/Azure/azure-sdk-for-python", keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product classifiers=[ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/tests/conftest.py b/sdk/mongocluster/azure-mgmt-mongocluster/tests/conftest.py index 85d1adc3301c..592a96fc2da3 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/tests/conftest.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/tests/conftest.py @@ -1,51 +1,35 @@ +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import os -import platform import pytest -import sys - from dotenv import load_dotenv - -from devtools_testutils import test_proxy, add_general_regex_sanitizer -from devtools_testutils import add_header_regex_sanitizer, add_body_key_sanitizer +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) load_dotenv() +# For security, please avoid record sensitive identity information in recordings @pytest.fixture(scope="session", autouse=True) def add_sanitizers(test_proxy): - subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") - tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") - client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") - client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=subscription_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=tenant_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=client_id, value="00000000-0000-0000-0000-000000000000") - add_general_regex_sanitizer(regex=client_secret, value="00000000-0000-0000-0000-000000000000") + mongoclustermgmt_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + mongoclustermgmt_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + mongoclustermgmt_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + mongoclustermgmt_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=mongoclustermgmt_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=mongoclustermgmt_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=mongoclustermgmt_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=mongoclustermgmt_client_secret, value="00000000-0000-0000-0000-000000000000") + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") add_header_regex_sanitizer(key="Cookie", value="cookie;") add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_cli_mgmt_mongocluster_lro_test.py b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_cli_mgmt_mongocluster_lro_test.py index c971da9e6e33..24e1c5fa4afe 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_cli_mgmt_mongocluster_lro_test.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_cli_mgmt_mongocluster_lro_test.py @@ -1,9 +1,9 @@ -import pytest from azure.mgmt.mongocluster import MongoClusterMgmtClient from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy +import pytest AZURE_LOCATION = "westus2" -Mongo_Cluster_Name = "pythontestmongocluster" +Mongo_Cluster_Name = "pythonmongoclustertest" @pytest.mark.live_test_only class TestMgmtMongoCluster(AzureMgmtRecordedTestCase): @@ -20,12 +20,12 @@ def test_mongo_clusters_mgmt(self, resource_group): resource={ "location": AZURE_LOCATION, "properties": { - "administratorLogin": "myMongoCluster", - "administratorLoginPassword": "myMongoCluster333", + "administrator": {"password": "mongoAdmin3", "userName": "mongoAdmin"}, + "compute": {"tier": "M30"}, + "highAvailability": {"targetMode": "Disabled"}, "serverVersion": "5.0", - "nodeGroupSpecs": [ - {"diskSizeGB": 128, "enableHa": True, "kind": "Shard", "nodeCount": 1, "sku": "M30"} - ], + "sharding": {"shardCount": 1}, + "storage": {"sizeGb": 128}, }, }, ).result() @@ -37,13 +37,13 @@ def test_mongo_clusters_mgmt(self, resource_group): properties={ "location": AZURE_LOCATION, "properties": { - "administratorLogin": "myMongoCluster", - "administratorLoginPassword": "myMongoCluster333", - "serverVersion": "5.0", - "nodeGroupSpecs": [ - {"kind": "Shard", "sku": "M50", "diskSizeGB": 256, "enableHa": True, "nodeCount": 1} - ], + "administrator": {"userName": "mongoAdmin"}, + "compute": {"tier": "M50"}, + "highAvailability": {"targetMode": "Disabled"}, + "previewFeatures": [], "publicNetworkAccess": "Enabled", + "serverVersion": "5.0", + "storage": {"sizeGb": 256}, }, }, ).result() @@ -63,4 +63,4 @@ def test_mongo_clusters_mgmt(self, resource_group): response = self.client.mongo_clusters.begin_delete( resource_group_name=resource_group.name, mongo_cluster_name=Mongo_Cluster_Name, - ).result() + ).result() \ No newline at end of file diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_mongo_clusters_operations_test.py b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_mongo_clusters_operations_test.py index 10a4f05a5bd1..daf0de490908 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_mongo_clusters_operations_test.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_mongo_clusters_operations_test.py @@ -26,4 +26,3 @@ def test_mongo_clusters_list_by_resource_group(self, resource_group): ) result = [r for r in response] assert result == [] - \ No newline at end of file diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_async_test.py b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_async_test.py index 983f2b35658d..b3f84df306ce 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_async_test.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_async_test.py @@ -25,4 +25,3 @@ async def test_operations_list(self, resource_group): response = self.client.operations.list() result = [r async for r in response] assert result - diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_test.py b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_test.py index d18d4230cec4..07be16fffa7d 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_test.py +++ b/sdk/mongocluster/azure-mgmt-mongocluster/tests/test_mongo_cluster_mgmt_operations_test.py @@ -24,4 +24,3 @@ def test_operations_list(self, resource_group): response = self.client.operations.list() result = [r for r in response] assert result - diff --git a/sdk/mongocluster/azure-mgmt-mongocluster/tsp-location.yaml b/sdk/mongocluster/azure-mgmt-mongocluster/tsp-location.yaml index 520e0bf76147..13cd4360ebd3 100644 --- a/sdk/mongocluster/azure-mgmt-mongocluster/tsp-location.yaml +++ b/sdk/mongocluster/azure-mgmt-mongocluster/tsp-location.yaml @@ -1,4 +1,4 @@ directory: specification/mongocluster/DocumentDB.MongoCluster.Management -commit: 465d34792335e02d9bf8de153734420d769adac1 +commit: f93850874010501f216ceadf19459402efd16d9e repo: Azure/azure-rest-api-specs additionalDirectories: From 5f5ab590bb420d18e8f5b7e9af2216f663bf2246 Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Fri, 27 Sep 2024 10:27:24 -0700 Subject: [PATCH 12/22] Handle when no "direct" packages are changed (#37599) * ensure the default isn't "scan the entire repo" --- eng/pipelines/templates/jobs/ci.yml | 4 ++-- eng/pipelines/templates/steps/resolve-package-targeting.yml | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/eng/pipelines/templates/jobs/ci.yml b/eng/pipelines/templates/jobs/ci.yml index 8d4b2438e53b..ef7191fff84d 100644 --- a/eng/pipelines/templates/jobs/ci.yml +++ b/eng/pipelines/templates/jobs/ci.yml @@ -138,7 +138,7 @@ jobs: parameters: BuildTargetingString: ${{ parameters.BuildTargetingString }} PackagePropertiesFolder: $(Build.ArtifactStagingDirectory)/PackageInfo - IncludeIndirect: true + IncludeIndirect: false - template: /eng/pipelines/templates/steps/build-extended-artifacts.yml parameters: @@ -170,7 +170,7 @@ jobs: parameters: BuildTargetingString: ${{ parameters.BuildTargetingString }} PackagePropertiesFolder: $(Build.ArtifactStagingDirectory)/PackageInfo - IncludeIndirect: true + IncludeIndirect: false - template: ../steps/build-extended-artifacts.yml parameters: diff --git a/eng/pipelines/templates/steps/resolve-package-targeting.yml b/eng/pipelines/templates/steps/resolve-package-targeting.yml index 8ce66b7e2cac..f107489ba000 100644 --- a/eng/pipelines/templates/steps/resolve-package-targeting.yml +++ b/eng/pipelines/templates/steps/resolve-package-targeting.yml @@ -33,6 +33,12 @@ steps: } $setting = $packageProperties -join "," + + # in case we don't expect any packages, we should set the variable to null, which will match NO packages and cause whatever the check + # is to skip with exit 0 (which is what we want!) + if (-not $setting) { + $setting = "null" + } } } From 703aa823bbe593f40617d61de22fa6f37698190a Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Fri, 27 Sep 2024 11:19:24 -0700 Subject: [PATCH 13/22] [Identity] Update README regarding user-assigned MI (#37595) Examples were added to demonstrate how to authenticate with a user-assigned managed identity using and object ID and resource ID. Signed-off-by: Paul Van Eck --- sdk/identity/azure-identity/README.md | 31 ++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/sdk/identity/azure-identity/README.md b/sdk/identity/azure-identity/README.md index c0db7c1c4e84..b2b398223f43 100644 --- a/sdk/identity/azure-identity/README.md +++ b/sdk/identity/azure-identity/README.md @@ -162,13 +162,42 @@ client = SecretClient("https://my-vault.vault.azure.net", default_credential) ### Examples +These examples demonstrate authenticating `SecretClient` from the [`azure-keyvault-secrets`](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/keyvault/azure-keyvault-secrets) library with `ManagedIdentityCredential`. + + #### Authenticate with a user-assigned managed identity +To authenticate with a user-assigned managed identity, you must specify one of the following IDs for the managed identity. + +##### Client ID + +```python +from azure.identity import ManagedIdentityCredential +from azure.keyvault.secrets import SecretClient + +credential = ManagedIdentityCredential(client_id="managed_identity_client_id") +client = SecretClient("https://my-vault.vault.azure.net", credential) +``` + +##### Resource ID + +```python +from azure.identity import ManagedIdentityCredential +from azure.keyvault.secrets import SecretClient + +resource_id = "/subscriptions//resourceGroups//providers/Microsoft.ManagedIdentity/userAssignedIdentities/" + +credential = ManagedIdentityCredential(identity_config={"resource_id": resource_id}) +client = SecretClient("https://my-vault.vault.azure.net", credential) +``` + +##### Object ID + ```python from azure.identity import ManagedIdentityCredential from azure.keyvault.secrets import SecretClient -credential = ManagedIdentityCredential(client_id=managed_identity_client_id) +credential = ManagedIdentityCredential(identity_config={"object_id": "managed_identity_object_id"}) client = SecretClient("https://my-vault.vault.azure.net", credential) ``` From 689bac08962828622a4262fbfecbbf5b429cb481 Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Fri, 27 Sep 2024 11:59:35 -0700 Subject: [PATCH 14/22] Remove unnecessary variable defaults (#37604) * apply black and prepare for parsing the ci.yml so that we can understand if testproxy is enabled or disabled * The language specific cert trust in Language-Settings has been updated to no longer globally set environment variables SSL_CERT_DIR, SSL_CERT_FILE, and REQUEST_CA_BUNDLE. These are dynamically set in proxy_startup.py --- .vscode/cspell.json | 3 +- eng/pipelines/templates/variables/globals.yml | 1 + eng/scripts/Language-Settings.ps1 | 7 +- scripts/devops_tasks/tox_harness.py | 4 +- tools/azure-sdk-tools/ci_tools/build.py | 5 +- .../ci_tools/conda/conda_functions.py | 59 ++++++++++---- .../ci_tools/dependency_analysis.py | 1 + tools/azure-sdk-tools/ci_tools/functions.py | 15 ++-- tools/azure-sdk-tools/ci_tools/git_tools.py | 1 + .../azure-sdk-tools/ci_tools/github_tools.py | 1 + .../ci_tools/parsing/__init__.py | 8 +- .../ci_tools/parsing/parse_functions.py | 74 +++++++++++------- tools/azure-sdk-tools/ci_tools/variables.py | 7 +- .../devtools_testutils/__init__.py | 2 +- .../azure_recorded_testcase.py | 9 ++- .../devtools_testutils/cert.py | 23 +++--- .../devtools_testutils/envvariable_loader.py | 6 +- .../devtools_testutils/fake_credentials.py | 11 ++- .../perfstress_tests/__init__.py | 2 +- .../perfstress_tests/_async_random_stream.py | 3 +- .../perfstress_tests/_batch_perf_test.py | 14 ++-- .../perfstress_tests/_perf_stress_base.py | 13 +--- .../perfstress_tests/_perf_stress_proc.py | 6 +- .../perfstress_tests/_perf_stress_runner.py | 56 +++++++------ .../perfstress_tests/_policies.py | 5 +- .../perfstress_tests/_repeated_timer.py | 3 +- .../system_perfstress/log_test.py | 9 ++- .../pipeline_client_get_test.py | 9 ++- .../system_perfstress/sample_batch_test.py | 18 ++--- .../system_perfstress/sample_event_test.py | 26 ++++--- .../system_perfstress/sleep_test.py | 30 +++++-- .../devtools_testutils/proxy_startup.py | 10 +-- .../devtools_testutils/resource_testcase.py | 6 +- .../devtools_testutils/sanitizers.py | 2 +- .../devtools_testutils/storage/testcase.py | 3 +- .../gh_tools/vnext_issue_creator.py | 43 +++++----- tools/azure-sdk-tools/setup.py | 3 +- tools/azure-sdk-tools/tests/conftest.py | 2 + tools/azure-sdk-tools/tests/example_async.py | 23 ++---- .../tests/integration/proxy/conftest.py | 3 +- .../integration/proxy/test_proxy_startup.py | 2 +- .../service/fake-package/setup.py | 78 +++++++++++++++++++ .../scenarios/ci_yml_present/service/ci.yml | 57 ++++++++++++++ .../service/fake-package/setup.py | 78 +++++++++++++++++++ .../optional_environment_two_options/setup.py | 2 +- .../setup.py | 2 +- .../integration/test_package_discovery.py | 14 ++-- .../azure-sdk-tools/tests/test_ci_metadata.py | 23 ++++++ .../tests/test_individual_functions.py | 1 + .../tests/test_optional_functionality.py | 65 ++++++++-------- .../tests/test_parse_functionality.py | 8 +- .../tests/test_pyproject_interactions.py | 9 +-- .../tests/test_python_snippet_updater.py | 14 +++- .../tests/test_requirements_parse.py | 1 - .../tests/test_whl_discovery.py | 11 ++- .../testutils/common_recordingtestcase.py | 1 + 56 files changed, 608 insertions(+), 284 deletions(-) create mode 100644 tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_not_present/service/fake-package/setup.py create mode 100644 tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/ci.yml create mode 100644 tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/fake-package/setup.py create mode 100644 tools/azure-sdk-tools/tests/test_ci_metadata.py diff --git a/.vscode/cspell.json b/.vscode/cspell.json index 60864c739443..81321a0c4e72 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -470,7 +470,8 @@ "dicom", "WINDOWSVMIMAGE", "LINUXVMIMAGE", - "MACVMIMAGE" + "MACVMIMAGE", + "myuseragent" ], "overrides": [ { diff --git a/eng/pipelines/templates/variables/globals.yml b/eng/pipelines/templates/variables/globals.yml index 63309a3390f0..faa1609c97b8 100644 --- a/eng/pipelines/templates/variables/globals.yml +++ b/eng/pipelines/templates/variables/globals.yml @@ -11,3 +11,4 @@ variables: REPOROOT: $(Build.SourcesDirectory) WINDOWS_OUTPUTROOT: $(REPOROOT)\out WindowsContainerImage: 'onebranch.azurecr.io/windows/ltsc2019/vse2022:latest' + GDN_SUPPRESS_FORKED_BUILD_WARNING: true \ No newline at end of file diff --git a/eng/scripts/Language-Settings.ps1 b/eng/scripts/Language-Settings.ps1 index 38f27396130b..06855b2c8275 100644 --- a/eng/scripts/Language-Settings.ps1 +++ b/eng/scripts/Language-Settings.ps1 @@ -391,11 +391,8 @@ function Get-python-DocsMsMetadataForPackage($PackageInfo) { function Import-Dev-Cert-python { - Write-Host "Python Trust Methodology" - - $pathToScript = Resolve-Path (Join-Path -Path $PSScriptRoot -ChildPath "../../scripts/devops_tasks/trust_proxy_cert.py") - python -m pip install requests - python $pathToScript + Write-Host "Python no longer requires an out of proc trust methodology." ` + "The variables SSL_CERT_DIR, SSL_CERT_FILE, and REQUESTS_CA_BUNDLE are now dynamically set in proxy_startup.py" } # Defined in common.ps1 as: diff --git a/scripts/devops_tasks/tox_harness.py b/scripts/devops_tasks/tox_harness.py index 137cb1222e82..e6de27dfc08e 100644 --- a/scripts/devops_tasks/tox_harness.py +++ b/scripts/devops_tasks/tox_harness.py @@ -354,9 +354,9 @@ def prep_and_run_tox(targeted_packages: List[str], parsed_args: Namespace) -> No "setup_execute_tests.py -> tox_harness.py::prep_and_run_tox", ) - return_code = execute_tox_serial(tox_command_tuples) + return_result = execute_tox_serial(tox_command_tuples) if not parsed_args.disablecov: collect_tox_coverage_files(targeted_packages) - sys.exit(return_code) + sys.exit(return_result) #type: ignore diff --git a/tools/azure-sdk-tools/ci_tools/build.py b/tools/azure-sdk-tools/ci_tools/build.py index 40a39e22ec52..17dffebdd359 100644 --- a/tools/azure-sdk-tools/ci_tools/build.py +++ b/tools/azure-sdk-tools/ci_tools/build.py @@ -9,6 +9,7 @@ from ci_tools.versioning.version_shared import set_version_py, set_dev_classifier from ci_tools.versioning.version_set_dev import get_dev_version, format_build_id + def build() -> None: parser = argparse.ArgumentParser( description="""This is the primary entrypoint for the "build" action. This command is used to build any package within the azure-sdk-for-python repository.""", @@ -102,7 +103,9 @@ def build() -> None: else: target_dir = repo_root - logging.debug(f"Searching for packages starting from {target_dir} with glob string {args.glob_string} and package filter {args.package_filter_string}") + logging.debug( + f"Searching for packages starting from {target_dir} with glob string {args.glob_string} and package filter {args.package_filter_string}" + ) targeted_packages = discover_targeted_packages( args.glob_string, diff --git a/tools/azure-sdk-tools/ci_tools/conda/conda_functions.py b/tools/azure-sdk-tools/ci_tools/conda/conda_functions.py index 1a03b2a6c184..76226c9de86c 100644 --- a/tools/azure-sdk-tools/ci_tools/conda/conda_functions.py +++ b/tools/azure-sdk-tools/ci_tools/conda/conda_functions.py @@ -239,10 +239,12 @@ def create_setup_files( with open(cfg_location, "w") as f: f.write(SETUP_CFG) + def tolerant_match(pkg_name, input_string): - pattern = pkg_name.replace('-', '[-_]') + pattern = pkg_name.replace("-", "[-_]") return fnmatch.fnmatch(input_string, f"*{pattern}*") + def create_combined_sdist( conda_build: CondaConfiguration, config_assembly_folder: str, config_assembled_folder: str ) -> str: @@ -276,7 +278,8 @@ def create_combined_sdist( [ os.path.join(config_assembled_folder, a) for a in os.listdir(config_assembled_folder) - if os.path.isfile(os.path.join(config_assembled_folder, a)) and tolerant_match(conda_build.name, a) + if os.path.isfile(os.path.join(config_assembled_folder, a)) + and tolerant_match(conda_build.name, a) ] ) ) @@ -526,15 +529,22 @@ def prep_and_create_environment(environment_dir: str) -> None: subprocess.run(["conda", "env", "create", "--prefix", environment_dir], cwd=environment_dir, check=True) subprocess.run( - ["conda", "install", "--yes", "--quiet", "--prefix", environment_dir, "conda-build", "conda-verify", "typing-extensions", "conda-index"], - cwd=environment_dir, - check=True - ) - subprocess.run( - ["conda", "run", "--prefix", environment_dir, "conda", "list"], + [ + "conda", + "install", + "--yes", + "--quiet", + "--prefix", + environment_dir, + "conda-build", + "conda-verify", + "typing-extensions", + "conda-index", + ], cwd=environment_dir, - check=True + check=True, ) + subprocess.run(["conda", "run", "--prefix", environment_dir, "conda", "list"], cwd=environment_dir, check=True) def copy_channel_files(coalescing_channel_dir: str, additional_channel_dir: str) -> None: @@ -573,9 +583,17 @@ def build_conda_packages( if additional_channel_folders: for channel in additional_channel_folders: copy_channel_files(conda_output_dir, channel) - subprocess.run(["conda", "run", "--prefix", conda_env_dir, "python", "-m", "conda_index", conda_output_dir], cwd=repo_root, check=True) + subprocess.run( + ["conda", "run", "--prefix", conda_env_dir, "python", "-m", "conda_index", conda_output_dir], + cwd=repo_root, + check=True, + ) else: - subprocess.run(["conda", "run", "--prefix", conda_env_dir, "python", "-m", "conda_index", conda_output_dir], cwd=repo_root, check=True) + subprocess.run( + ["conda", "run", "--prefix", conda_env_dir, "python", "-m", "conda_index", conda_output_dir], + cwd=repo_root, + check=True, + ) for conda_build in conda_configurations: conda_build_folder = os.path.join(conda_sdist_dir, conda_build.name).replace("\\", "/") @@ -595,7 +613,18 @@ def invoke_conda_build( channels: List[str] = [], ) -> None: - command = ["conda", "run", "--prefix", conda_env_dir, "conda-build", ".", "--output-folder", conda_output_dir, "-c", conda_output_dir] + command = [ + "conda", + "run", + "--prefix", + conda_env_dir, + "conda-build", + ".", + "--output-folder", + conda_output_dir, + "-c", + conda_output_dir, + ] for channel in channels: command.extend(["-c", channel]) @@ -641,8 +670,10 @@ def entrypoint(): args = parser.parse_args() - if (not args.config and not args.config_file): - raise argparse.ArgumentError("config arg", "One of either -c (--config) or -f (--file) argument must be provided.") + if not args.config and not args.config_file: + raise argparse.ArgumentError( + "config arg", "One of either -c (--config) or -f (--file) argument must be provided." + ) if args.config_file: with open(args.config_file, "r") as f: diff --git a/tools/azure-sdk-tools/ci_tools/dependency_analysis.py b/tools/azure-sdk-tools/ci_tools/dependency_analysis.py index 674f9a77dae9..6ec422a089b3 100755 --- a/tools/azure-sdk-tools/ci_tools/dependency_analysis.py +++ b/tools/azure-sdk-tools/ci_tools/dependency_analysis.py @@ -11,6 +11,7 @@ import sys import textwrap from typing import List, Set, Dict, Tuple, Any + try: from collections import Sized except: diff --git a/tools/azure-sdk-tools/ci_tools/functions.py b/tools/azure-sdk-tools/ci_tools/functions.py index 1a4c009b53f7..655075de857b 100644 --- a/tools/azure-sdk-tools/ci_tools/functions.py +++ b/tools/azure-sdk-tools/ci_tools/functions.py @@ -42,10 +42,7 @@ ] TEST_COMPATIBILITY_MAP = {} -TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = { - "azure-storage-blob": "pypy", - "azure-eventhub": "pypy" -} +TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = {"azure-storage-blob": "pypy", "azure-eventhub": "pypy"} omit_regression = ( lambda x: "nspkg" not in x @@ -196,11 +193,13 @@ def discover_targeted_packages( # glob the starting package set collected_packages = glob_packages(glob_string, target_root_dir) - logging.info(f"Results for glob_string \"{glob_string}\" and root directory \"{target_root_dir}\" are: {collected_packages}") + logging.info( + f'Results for glob_string "{glob_string}" and root directory "{target_root_dir}" are: {collected_packages}' + ) # apply the additional contains filter collected_packages = [pkg for pkg in collected_packages if additional_contains_filter in pkg] - logging.info(f"Results after additional contains filter: \"{additional_contains_filter}\" {collected_packages}") + logging.info(f'Results after additional contains filter: "{additional_contains_filter}" {collected_packages}') # filter for compatibility, this means excluding a package that doesn't support py36 when we are running a py36 executable if compatibility_filter: @@ -406,9 +405,7 @@ def find_sdist(dist_dir: str, pkg_name: str, pkg_version: str) -> str: packages = [os.path.relpath(w, dist_dir) for w in packages] if not packages: - logging.error( - f"No sdist is found in directory {dist_dir} with package name format {pkg_format}." - ) + logging.error(f"No sdist is found in directory {dist_dir} with package name format {pkg_format}.") return return packages[0] diff --git a/tools/azure-sdk-tools/ci_tools/git_tools.py b/tools/azure-sdk-tools/ci_tools/git_tools.py index 5db07fbce923..498a08b6b10f 100644 --- a/tools/azure-sdk-tools/ci_tools/git_tools.py +++ b/tools/azure-sdk-tools/ci_tools/git_tools.py @@ -1,5 +1,6 @@ """Pure git tools for managing local folder Git. """ + import logging from git import Repo, GitCommandError diff --git a/tools/azure-sdk-tools/ci_tools/github_tools.py b/tools/azure-sdk-tools/ci_tools/github_tools.py index e2e91330bd57..5840fb8e2cbc 100644 --- a/tools/azure-sdk-tools/ci_tools/github_tools.py +++ b/tools/azure-sdk-tools/ci_tools/github_tools.py @@ -1,5 +1,6 @@ """Github tools. """ + from contextlib import contextmanager import logging import os diff --git a/tools/azure-sdk-tools/ci_tools/parsing/__init__.py b/tools/azure-sdk-tools/ci_tools/parsing/__init__.py index 3b94023ff674..3ecc50416003 100644 --- a/tools/azure-sdk-tools/ci_tools/parsing/__init__.py +++ b/tools/azure-sdk-tools/ci_tools/parsing/__init__.py @@ -3,12 +3,12 @@ parse_require, get_name_from_specifier, ParsedSetup, - parse_freeze_output, read_setup_py_content, get_build_config, get_config_setting, update_build_config, - compare_string_to_glob_array + compare_string_to_glob_array, + get_ci_config, ) __all__ = [ @@ -16,10 +16,10 @@ "parse_require", "get_name_from_specifier", "ParsedSetup", - "parse_freeze_output", "read_setup_py_content", "get_build_config", "get_config_setting", "update_build_config", - "compare_string_to_glob_array" + "compare_string_to_glob_array", + "get_ci_config", ] diff --git a/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 7ca149b4858f..189417ef2a08 100644 --- a/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -3,6 +3,7 @@ import textwrap import re import fnmatch +import logging try: # py 311 adds this library natively @@ -11,7 +12,7 @@ # otherwise fall back to pypi package tomli import tomli as toml -from typing import Dict, List, Tuple, Any +from typing import Dict, List, Tuple, Any, Optional # Assumes the presence of setuptools from pkg_resources import ( @@ -102,7 +103,7 @@ def from_path(cls, parse_directory_or_file: str): ext_modules, ) - def get_build_config(self) -> Dict[str, Any]: + def get_build_config(self) -> Optional[Dict[str, Any]]: return get_build_config(self.folder) def get_config_setting(self, setting: str, default: Any = True) -> Any: @@ -162,7 +163,7 @@ def get_config_setting(package_path: str, setting: str, default: Any = True) -> return default -def get_build_config(package_path: str) -> Dict[str, Any]: +def get_build_config(package_path: str) -> Optional[Dict[str, Any]]: """ Attempts to retrieve all values within [tools.azure-sdk-build] section of a pyproject.toml. @@ -185,6 +186,29 @@ def get_build_config(package_path: str) -> Dict[str, Any]: return {} +def get_ci_config(package_path: str) -> Optional[Dict[str, Any]]: + """ + Attempts to retrieve the parsed toml content of a CI.yml associated with this package. + """ + if os.path.isfile(package_path): + package_path = os.path.dirname(package_path) + + # this checks exactly one directory up + # for sdk/core/azure-core + # sdk/core/ci.yml is checked only + ci_file = os.path.join(os.path.dirname(package_path), "ci.yml") + + if os.path.exists(ci_file): + import yaml + try: + with open(ci_file, "r") as f: + return yaml.safe_load(f) + except Exception as e: + logging.error(f"Failed to load ci.yml at {ci_file} due to exception {e}") + + return None + + def read_setup_py_content(setup_filename: str) -> str: """ Get setup.py content, returns a string. @@ -193,10 +217,9 @@ def read_setup_py_content(setup_filename: str) -> str: content = setup_file.read() return content - def parse_setup( setup_filename: str, -) -> Tuple[str, str, str, List[str], bool, str, str, Dict[str, Any], bool, List[str], str, List[Extension]]: +) -> Tuple[str, str, str, List[str], bool, str, str, Dict[str, Any], bool, List[str], List[str], str, List[Extension]]: """ Used to evaluate a setup.py (or a directory containing a setup.py) and return a tuple containing: ( @@ -234,7 +257,7 @@ def setup(*args, **kwargs): not isinstance(node, ast.Expr) or not isinstance(node.value, ast.Call) or not hasattr(node.value.func, "id") - or node.value.func.id != "setup" + or node.value.func.id != "setup" # type: ignore ): continue parsed.body[index:index] = parsed_mock_setup.body @@ -279,22 +302,23 @@ def setup(*args, **kwargs): ext_package = kwargs.get("ext_package", None) ext_modules = kwargs.get("ext_modules", []) + # fmt: off return ( - name, - version, - python_requires, - requires, - is_new_sdk, - setup_filename, - name_space, - package_data, - include_package_data, - classifiers, - keywords, - ext_package, - ext_modules, + name, # str + version, # str + python_requires, # str + requires, # List[str] + is_new_sdk, # bool + setup_filename, # str + name_space, # str, + package_data, # Dict[str, Any], + include_package_data, # bool, + classifiers, # List[str], + keywords, # List[str] ADJUSTED + ext_package, # str + ext_modules, # List[Extension] ) - + # fmt: on def get_install_requires(setup_path: str) -> List[str]: """ @@ -312,16 +336,6 @@ def parse_require(req: str) -> Requirement: return Requirement.parse(req) -def parse_freeze_output(file_location: str) -> Dict[str, str]: - """ - Takes a python requirements file and returns a dictionary representing the contents. - """ - with open(file_location, "r") as f: - reqs = f.read() - - return dict((req.name, req) for req in parse_requirements(reqs)) - - def get_name_from_specifier(version: str) -> str: """ Given a specifier string of format of , returns the package name. diff --git a/tools/azure-sdk-tools/ci_tools/variables.py b/tools/azure-sdk-tools/ci_tools/variables.py index 088c8518570b..d74417ad8f7d 100644 --- a/tools/azure-sdk-tools/ci_tools/variables.py +++ b/tools/azure-sdk-tools/ci_tools/variables.py @@ -1,5 +1,6 @@ import os + def str_to_bool(input_string: str) -> bool: """ Takes a boolean string representation and returns a bool type value. @@ -80,10 +81,14 @@ def in_public() -> int: return 0 + def in_analyze_weekly() -> int: # Returns 4 if the build originates from the tests-weekly analyze job # 0 otherwise - if "tests-weekly" in os.getenv("SYSTEM_DEFINITIONNAME", "") and os.getenv("SYSTEM_STAGEDISPLAYNAME", "") == "Analyze_Test": + if ( + "tests-weekly" in os.getenv("SYSTEM_DEFINITIONNAME", "") + and os.getenv("SYSTEM_STAGEDISPLAYNAME", "") == "Analyze_Test" + ): return 4 return 0 diff --git a/tools/azure-sdk-tools/devtools_testutils/__init__.py b/tools/azure-sdk-tools/devtools_testutils/__init__.py index b006263e6494..5a47b9c6e573 100644 --- a/tools/azure-sdk-tools/devtools_testutils/__init__.py +++ b/tools/azure-sdk-tools/devtools_testutils/__init__.py @@ -116,5 +116,5 @@ "RetryCounter", "FakeTokenCredential", "create_combined_bundle", - "is_live_and_not_recording" + "is_live_and_not_recording", ] diff --git a/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py b/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py index ad7a5c875d50..9120be23f036 100644 --- a/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py +++ b/tools/azure-sdk-tools/devtools_testutils/azure_recorded_testcase.py @@ -88,7 +88,7 @@ def get_credential(self, client_class, **kwargs): secret = os.environ.get("AZURE_CLIENT_SECRET", getattr(os.environ, "CLIENT_SECRET", None)) # Return live credentials only in live mode - if self.is_live: + if self.is_live: # Service principal authentication if tenant_id and client_id and secret: # Create msrestazure class @@ -190,6 +190,7 @@ def generate_sas(self, *args, **kwargs): token = sas_func(*sas_func_pos_args, **kwargs) return token + def get_credential(**kwargs): tenant_id = os.environ.get("AZURE_TENANT_ID", getattr(os.environ, "TENANT_ID", None)) client_id = os.environ.get("AZURE_CLIENT_ID", getattr(os.environ, "CLIENT_ID", None)) @@ -261,6 +262,7 @@ def get_credential(**kwargs): system_access_token = os.environ.get("SYSTEM_ACCESSTOKEN") if service_connection_id and client_id and tenant_id and system_access_token: from azure.identity import AzurePipelinesCredential + if is_async: from azure.identity.aio import AzurePipelinesCredential return AzurePipelinesCredential( @@ -268,12 +270,12 @@ def get_credential(**kwargs): client_id=client_id, service_connection_id=service_connection_id, system_access_token=system_access_token, - **kwargs + **kwargs, ) # This is for testing purposes only, to ensure that the AzurePipelinesCredential is used when available else: force_fallback_dac = os.environ.get("AZURE_TEST_FORCE_FALLBACK_DAC", "false") - if service_connection_id and not(force_fallback_dac): + if service_connection_id and not (force_fallback_dac): # if service_connection_id is set, we believe it is running in CI system_access_token = SANITIZED if system_access_token else None raise ValueError( @@ -282,6 +284,7 @@ def get_credential(**kwargs): ) # Fall back to DefaultAzureCredential from azure.identity import DefaultAzureCredential + if is_async: from azure.identity.aio import DefaultAzureCredential return DefaultAzureCredential(exclude_managed_identity_credential=True, **kwargs) diff --git a/tools/azure-sdk-tools/devtools_testutils/cert.py b/tools/azure-sdk-tools/devtools_testutils/cert.py index 758aae572476..4fd32780de87 100644 --- a/tools/azure-sdk-tools/devtools_testutils/cert.py +++ b/tools/azure-sdk-tools/devtools_testutils/cert.py @@ -1,14 +1,15 @@ from typing import List + def create_combined_bundle(cert_files: List[str], output_location: str) -> None: - """ - Combines a list of ascii-encoded PEM certificates into one bundle. - """ - combined_cert_strings: List[str] = [] - - for cert_path in cert_files: - with open(cert_path, 'r') as f: - combined_cert_strings.extend(f.readlines()) - - with open(output_location, 'w') as f: - f.writelines(combined_cert_strings) + """ + Combines a list of ascii-encoded PEM certificates into one bundle. + """ + combined_cert_strings: List[str] = [] + + for cert_path in cert_files: + with open(cert_path, "r") as f: + combined_cert_strings.extend(f.readlines()) + + with open(output_location, "w") as f: + f.writelines(combined_cert_strings) diff --git a/tools/azure-sdk-tools/devtools_testutils/envvariable_loader.py b/tools/azure-sdk-tools/devtools_testutils/envvariable_loader.py index 333d0e0789a0..c5ca037dfb42 100644 --- a/tools/azure-sdk-tools/devtools_testutils/envvariable_loader.py +++ b/tools/azure-sdk-tools/devtools_testutils/envvariable_loader.py @@ -26,7 +26,7 @@ def __init__( random_name_enabled=False, use_cache=True, preparers=None, - **kwargs + **kwargs, ): super(EnvironmentVariableLoader, self).__init__( name_prefix, @@ -128,8 +128,8 @@ def create_resource(self, name, **kwargs): ) else: raise AzureTestError( - 'To pass a live ID you must provide the scrubbed value for recordings to prevent secrets ' - f'from being written to files. {key} was not given. For example: ' + "To pass a live ID you must provide the scrubbed value for recordings to prevent secrets " + f"from being written to files. {key} was not given. For example: " '@EnvironmentVariableLoader("schemaregistry", ' 'schemaregistry_endpoint="fake_endpoint.servicebus.windows.net")' ) diff --git a/tools/azure-sdk-tools/devtools_testutils/fake_credentials.py b/tools/azure-sdk-tools/devtools_testutils/fake_credentials.py index 8c3e92e4087f..4696adca9217 100644 --- a/tools/azure-sdk-tools/devtools_testutils/fake_credentials.py +++ b/tools/azure-sdk-tools/devtools_testutils/fake_credentials.py @@ -4,17 +4,20 @@ SANITIZED = "Sanitized" # General-use fake credentials -FAKE_ACCESS_TOKEN = "eyJhbGciOiJub25lIiwidHlwIjoiSldUIn0.eyJlbWFpbCI6IkJvYkBjb250b3NvLmNvbSIsImdpdmVuX25hbWUiOiJCb2I" \ - "iLCJpc3MiOiJodHRwOi8vRGVmYXVsdC5Jc3N1ZXIuY29tIiwiYXVkIjoiaHR0cDovL0RlZmF1bHQuQXVkaWVuY2UuY29tIiwiaWF0IjoiMTYwNz" \ +FAKE_ACCESS_TOKEN = ( + "eyJhbGciOiJub25lIiwidHlwIjoiSldUIn0.eyJlbWFpbCI6IkJvYkBjb250b3NvLmNvbSIsImdpdmVuX25hbWUiOiJCb2I" + "iLCJpc3MiOiJodHRwOi8vRGVmYXVsdC5Jc3N1ZXIuY29tIiwiYXVkIjoiaHR0cDovL0RlZmF1bHQuQXVkaWVuY2UuY29tIiwiaWF0IjoiMTYwNz" "k3ODY4MyIsIm5iZiI6IjE2MDc5Nzg2ODMiLCJleHAiOiIxNjA3OTc4OTgzIn0." +) FAKE_ID = "00000000-0000-0000-0000-000000000000" FAKE_LOGIN_PASSWORD = "F4ke_L0gin_P4ss" # Service-specific fake credentials BATCH_TEST_PASSWORD = "kt#_gahr!@aGERDXA" MGMT_HDINSIGHT_FAKE_KEY = "qFmud5LfxcCxWUvWcGMhKDp0v0KuBRLsO/AIddX734W7lzdInsVMsB5ILVoOrF+0fCfk/IYYy5SJ9Q+2v4aihQ==" -SERVICEBUS_FAKE_SAS = "SharedAccessSignature sr=https%3A%2F%2Ffoo.servicebus.windows.net&sig=dummyValue%3D&se=168726" \ - "7490&skn=dummyKey" +SERVICEBUS_FAKE_SAS = ( + "SharedAccessSignature sr=https%3A%2F%2Ffoo.servicebus.windows.net&sig=dummyValue%3D&se=168726" "7490&skn=dummyKey" +) STORAGE_ACCOUNT_FAKE_KEY = "NzhL3hKZbJBuJ2484dPTR+xF30kYaWSSCbs2BzLgVVI1woqeST/1IgqaLm6QAOTxtGvxctSNbIR/1hW8yH+bJg==" diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/__init__.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/__init__.py index 2556fca820a2..11b2b1d32cdf 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/__init__.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/__init__.py @@ -21,7 +21,7 @@ "WriteStream", "AsyncIteratorRandomStream", "AsyncRandomStream", - "get_random_bytes" + "get_random_bytes", ] diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_async_random_stream.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_async_random_stream.py index ee9e2ab8e0ac..e0f40fc1b226 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_async_random_stream.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_async_random_stream.py @@ -63,8 +63,9 @@ def close(self): class AsyncIteratorRandomStream(AsyncIterator[bytes]): """ - Async random stream of bytes for methods that accept AsyncIterator as input. + Async random stream of bytes for methods that accept AsyncIterator as input. """ + def __init__(self, length, initial_buffer_length=_DEFAULT_LENGTH): self._base_data = get_random_bytes(initial_buffer_length) self._data_length = length diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_batch_perf_test.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_batch_perf_test.py index c1cb29666fb8..25cebf4e0c69 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_batch_perf_test.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_batch_perf_test.py @@ -26,7 +26,7 @@ def __init__(self, arguments): if self.args.insecure: # Disable SSL verification for SDK Client - self._client_kwargs['connection_verify'] = False + self._client_kwargs["connection_verify"] = False # Disable SSL verification for test proxy session self._session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) @@ -34,7 +34,8 @@ def __init__(self, arguments): # Suppress warnings import warnings from urllib3.exceptions import InsecureRequestWarning - warnings.simplefilter('ignore', InsecureRequestWarning) + + warnings.simplefilter("ignore", InsecureRequestWarning) else: self._session = aiohttp.ClientSession() @@ -42,7 +43,7 @@ def __init__(self, arguments): # Add policy to redirect requests to the test proxy self._test_proxy = self.args.test_proxies[self._parallel_index % len(self.args.test_proxies)] self._test_proxy_policy = PerfTestProxyPolicy(self._test_proxy) - self._client_kwargs['per_retry_policies'] = [self._test_proxy_policy] + self._client_kwargs["per_retry_policies"] = [self._test_proxy_policy] async def post_setup(self) -> None: """ @@ -78,11 +79,8 @@ async def pre_cleanup(self) -> None: """ # cSpell:ignore inmemory # Only stop playback if it was successfully started - if self._test_proxy_policy and self._test_proxy_policy.mode == 'playback': - headers = { - "x-recording-id": self._recording_id, - "x-purge-inmemory-recording": "true" - } + if self._test_proxy_policy and self._test_proxy_policy.mode == "playback": + headers = {"x-recording-id": self._recording_id, "x-purge-inmemory-recording": "true"} url = urljoin(self._test_proxy, "/playback/stop") async with self._session.post(url, headers=headers) as resp: assert resp.status == 200 diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_base.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_base.py index 8c806d733cfd..1052aac0a477 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_base.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_base.py @@ -93,7 +93,7 @@ async def run_all_async(self, duration: int, *, run_profiler: bool = False, **kw """ Run all async tests, including both warmup and duration. """ - + @staticmethod @abc.abstractmethod def add_arguments(parser: argparse.ArgumentParser) -> None: @@ -225,20 +225,15 @@ def _save_profile(self, sync: str, output_path: Optional[str] = None) -> None: """ if self._profile: profile_name = output_path or "{}/cProfile-{}-{}-{}.pstats".format( - os.getcwd(), - self.__class__.__name__, - self._parallel_index, - sync) + os.getcwd(), self.__class__.__name__, self._parallel_index, sync + ) print("Dumping profile data to {}".format(profile_name)) self._profile.dump_stats(profile_name) else: print("No profile generated.") def _print_profile_stats( - self, - *, - sort_key: pstats.SortKey = PSTATS_PRINT_DEFAULT_SORT_KEY, - count: int = PSTATS_PRINT_DEFAULT_LINE_COUNT + self, *, sort_key: pstats.SortKey = PSTATS_PRINT_DEFAULT_SORT_KEY, count: int = PSTATS_PRINT_DEFAULT_LINE_COUNT ) -> None: """Print the profile stats to stdout. diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_proc.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_proc.py index 4540b07af03e..34a4e8d65fbb 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_proc.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_proc.py @@ -38,6 +38,7 @@ def _synchronize(stages, ignore_error=False): if not ignore_error: raise + async def _start_tests(index, test_class, num_tests, args, test_stages, results, status): """Create test classes, run setup, tests and cleanup.""" # Create all parallel tests with a global unique index value @@ -111,10 +112,7 @@ async def _run_tests(duration: int, args, tests, results, status, *, with_profil """Run the listed tests either in parallel asynchronously or in a thread pool.""" # Kick of a status monitoring thread. stop_status = threading.Event() - status_thread = threading.Thread( - target=_report_status, - args=(status, tests, stop_status), - daemon=True) + status_thread = threading.Thread(target=_report_status, args=(status, tests, stop_status), daemon=True) status_thread.start() try: diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_runner.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_runner.py index 791727745d58..215ff17351b9 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_runner.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_perf_stress_runner.py @@ -81,7 +81,11 @@ def _parse_args(self) -> str: "-p", "--parallel", nargs="?", type=int, help="Degree of parallelism to run with. Default is 1.", default=1 ) per_test_arg_parser.add_argument( - "--processes", nargs="?", type=int, help="Number of concurrent processes over which to distribute the parallel runs. Default is the number of cores.", default=multiprocessing.cpu_count() + "--processes", + nargs="?", + type=int, + help="Number of concurrent processes over which to distribute the parallel runs. Default is the number of cores.", + default=multiprocessing.cpu_count(), ) per_test_arg_parser.add_argument( "-d", "--duration", nargs="?", type=int, help="Duration of the test in seconds. Default is 10.", default=10 @@ -99,11 +103,13 @@ def _parse_args(self) -> str: "--profile", action="store_true", help="Run tests with profiler. Default is False.", default=False ) per_test_arg_parser.add_argument( - "--profile-path", nargs="?", type=str, help="File path to store profiler results. If not specified, results will be stored in the current directory." + "--profile-path", + nargs="?", + type=str, + help="File path to store profiler results. If not specified, results will be stored in the current directory.", ) per_test_arg_parser.add_argument( - "-x", "--test-proxies", help="URIs of TestProxy Servers (separated by ';')", - type=lambda s: s.split(';') + "-x", "--test-proxies", help="URIs of TestProxy Servers (separated by ';')", type=lambda s: s.split(";") ) per_test_arg_parser.add_argument( "--insecure", action="store_true", help="Disable SSL validation. Default is False.", default=False @@ -127,8 +133,8 @@ def _parse_args(self) -> str: def _discover_tests(self, test_folder_path): base_classes = [PerfStressTest, BatchPerfTest, EventPerfTest] self._test_classes = {} - if os.path.isdir(os.path.join(test_folder_path, 'tests')): - test_folder_path = os.path.join(test_folder_path, 'tests') + if os.path.isdir(os.path.join(test_folder_path, "tests")): + test_folder_path = os.path.join(test_folder_path, "tests") sys.path.append(test_folder_path) self.logger.debug("Searching for tests in {}".format(test_folder_path)) @@ -177,9 +183,9 @@ async def start(self): # of threads that will be run by each process. # E.g. if parallel=10, processes=4: mapping=[(0, 3), (3, 3), (6, 2), (8, 2)] k, m = divmod(self.per_test_args.parallel, processes) - mapping = [(i*k+min(i, m), ((i+1)*k+min(i+1, m)) - (i*k+min(i, m))) for i in range(processes)] + mapping = [(i * k + min(i, m), ((i + 1) * k + min(i + 1, m)) - (i * k + min(i, m))) for i in range(processes)] - ctx = multiprocessing.get_context('spawn') + ctx = multiprocessing.get_context("spawn") self.results = ctx.Queue() self.status = ctx.JoinableQueue() self.status_thread = RepeatedTimer(1, self._print_status, self.per_test_args.parallel, start_now=False) @@ -194,22 +200,27 @@ async def start(self): "Tests": ctx.Barrier(processes + 1), "Pre Cleanup": ctx.Barrier(processes + 1), "Cleanup": ctx.Barrier(processes + 1), - "Finished": ctx.Barrier(processes + 1) + "Finished": ctx.Barrier(processes + 1), } try: - futures = [ctx.Process( - target=run_process, - args=( - index, - self.per_test_args, - self._test_classes[self._test_name][1], - self._test_name, - threads, - self.test_stages, - self.results, - self.status), - daemon=True) for index, threads in mapping] + futures = [ + ctx.Process( + target=run_process, + args=( + index, + self.per_test_args, + self._test_classes[self._test_name][1], + self._test_name, + threads, + self.test_stages, + self.results, + self.status, + ), + daemon=True, + ) + for index, threads in mapping + ] [f.start() for f in futures] # All tests wait to start "Setup". @@ -251,7 +262,6 @@ async def start(self): except Exception as e: self.logger.warn("Error closing processes: " + str(e)) - def _report_results(self): """Calculate and log the test run results across all child processes""" operations = [] @@ -269,7 +279,7 @@ def _report_results(self): total_operations, self._format_number(weighted_average_seconds, 4), self._format_number(operations_per_second, 4), - self._format_number(seconds_per_operation, 4) + self._format_number(seconds_per_operation, 4), ) ) else: diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_policies.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_policies.py index 7b1f4f25f977..968fa0ca0c26 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_policies.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_policies.py @@ -18,10 +18,7 @@ def __init__(self, url): def redirect_to_test_proxy(self, request): if self.recording_id and self.mode: live_endpoint = urlparse(request.http_request.url) - redirected = live_endpoint._replace( - scheme=self._proxy_url.scheme, - netloc=self._proxy_url.netloc - ) + redirected = live_endpoint._replace(scheme=self._proxy_url.scheme, netloc=self._proxy_url.netloc) request.http_request.url = redirected.geturl() request.http_request.headers["x-recording-id"] = self.recording_id request.http_request.headers["x-recording-mode"] = self.mode diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_repeated_timer.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_repeated_timer.py index 63a8af99e415..b8acaf8c1335 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_repeated_timer.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/_repeated_timer.py @@ -6,6 +6,7 @@ import itertools from threading import Timer, Lock + # Credit to https://stackoverflow.com/questions/3393612/run-certain-code-every-n-seconds class RepeatedTimer(object): def __init__(self, interval, function, *args, start_now: bool = True, **kwargs): @@ -46,7 +47,7 @@ def __init__(self): def increment(self): next(self._counter) - + def reset(self): with self._read_lock: self._number_of_read = 0 diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/log_test.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/log_test.py index 6ea05b6ec4e2..4855efba407b 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/log_test.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/log_test.py @@ -9,6 +9,7 @@ from devtools_testutils.perfstress_tests import PerfStressTest + # Used for logging every step and property of the perf test class LogTest(PerfStressTest): _logged_global_completed_operations = 0 @@ -41,11 +42,13 @@ async def run_async(self): async def cleanup(self): await super().cleanup() - self.log(f'cleanup() - Completed Operations: {self._logged_completed_operations}') + self.log(f"cleanup() - Completed Operations: {self._logged_completed_operations}") async def global_cleanup(self): await super().global_cleanup() - self.log(f'global_cleanup() - Global Completed Operations: {self._logged_global_completed_operations}') + self.log(f"global_cleanup() - Global Completed Operations: {self._logged_global_completed_operations}") def log(self, message): - print(f'[{(time.time() - type(self).start_time):.3f}] [PID: {os.getpid()}] [Parallel: {self._parallel_index}] {message}') + print( + f"[{(time.time() - type(self).start_time):.3f}] [PID: {os.getpid()}] [Parallel: {self._parallel_index}] {message}" + ) diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/pipeline_client_get_test.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/pipeline_client_get_test.py index b25f2fea762c..c83e6df2836e 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/pipeline_client_get_test.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/pipeline_client_get_test.py @@ -55,6 +55,11 @@ async def close(self): @staticmethod def add_arguments(parser): - parser.add_argument("--first-run-extra-requests", type=int, default=0, help='Extra requests to send on first run. ' + - 'Simulates SDKs which require extra requests (like authentication) on first API call.') + parser.add_argument( + "--first-run-extra-requests", + type=int, + default=0, + help="Extra requests to send on first run. " + + "Simulates SDKs which require extra requests (like authentication) on first API call.", + ) parser.add_argument("-u", "--url", required=True) diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_batch_test.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_batch_test.py index 16b7263a45ec..de3a9858d5d6 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_batch_test.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_batch_test.py @@ -6,12 +6,14 @@ from devtools_testutils.perfstress_tests import BatchPerfTest import random -class MockReceiver(): + +class MockReceiver: def receive(self, min_messages=1, max_messages=5): for i in range(random.randint(min_messages, max_messages)): yield i -class AsyncMockReceiver(): + +class AsyncMockReceiver: async def receive(self, min_messages=1, max_messages=5): for i in range(random.randint(min_messages, max_messages)): yield i @@ -27,20 +29,18 @@ def __init__(self, arguments): def run_batch_sync(self) -> int: messages = self.receiver_client.receive( - max_messages=self.args.max_message_count, - min_messages=self.args.min_message_count + max_messages=self.args.max_message_count, min_messages=self.args.min_message_count ) return len(list(messages)) async def run_batch_async(self) -> int: messages = self.async_receiver_client.receive( - max_messages=self.args.max_message_count, - min_messages=self.args.min_message_count + max_messages=self.args.max_message_count, min_messages=self.args.min_message_count ) return len([m async for m in messages]) - + @staticmethod def add_arguments(parser): super(SampleBatchTest, SampleBatchTest).add_arguments(parser) - parser.add_argument('--max-message-count', nargs='?', type=int, default=10) - parser.add_argument('--min-message-count', nargs='?', type=int, default=0) + parser.add_argument("--max-message-count", nargs="?", type=int, default=10) + parser.add_argument("--min-message-count", nargs="?", type=int, default=0) diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_event_test.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_event_test.py index 4ce04b1d6b2c..4b1b9f90b263 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_event_test.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sample_event_test.py @@ -11,7 +11,7 @@ from devtools_testutils.perfstress_tests import EventPerfTest -class MockEventProcessor(): +class MockEventProcessor: def __init__(self, partitions, process_event, process_error, error_after=None, max_events_per_second=None): self.partitions = partitions @@ -20,13 +20,13 @@ def __init__(self, partitions, process_event, process_error, error_after=None, m self.shutdown = False self._error_raised = False self._error_lock = threading.Lock() - self._event_args = [{'partition': i, 'data': 'hello'} for i in range(self.partitions)] + self._event_args = [{"partition": i, "data": "hello"} for i in range(self.partitions)] self._events_raised = [0] * self.partitions self._starttime = None self._process_event = process_event self._process_error = process_error self._executor = ThreadPoolExecutor(max_workers=self.partitions) - + def _process_error_after(self, partition): with self._error_lock: if not self._error_raised: @@ -65,7 +65,7 @@ def _process(self, partition): self._process_event(**event_args) -class AsyncMockEventProcessor(): +class AsyncMockEventProcessor: def __init__(self, partitions, process_event, process_error, error_after=None, max_events_per_second=None): self.partitions = partitions @@ -77,7 +77,7 @@ def __init__(self, partitions, process_event, process_error, error_after=None, m self._starttime = None self._process_event = process_event self._process_error = process_error - self._event_args = [{'partition': i, 'data': 'hello'} for i in range(self.partitions)] + self._event_args = [{"partition": i, "data": "hello"} for i in range(self.partitions)] self._events_raised = [0] * self.partitions async def _process_error_after(self, partition): @@ -131,14 +131,14 @@ def __init__(self, arguments): self.process_event_sync, self.process_error_sync, error_after=self.args.error_after_seconds, - max_events_per_second=self.args.max_events_per_second + max_events_per_second=self.args.max_events_per_second, ) self.async_event_processor = AsyncMockEventProcessor( self.args.partitions, self.process_event_async, self.process_error_async, error_after=self.args.error_after_seconds, - max_events_per_second=self.args.max_events_per_second + max_events_per_second=self.args.max_events_per_second, ) def process_event_sync(self, **kwargs): @@ -176,10 +176,14 @@ async def stop_events_async(self) -> None: Stop the process for receiving events. """ self.async_event_processor.shutdown = True - + @staticmethod def add_arguments(parser): super(SampleEventTest, SampleEventTest).add_arguments(parser) - parser.add_argument('--error-after-seconds', nargs='?', type=int, help='Raise error after this number of seconds.') - parser.add_argument('--max-events-per-second', nargs='?', type=int, help='Maximum events per second across all partitions.') - parser.add_argument('--partitions', nargs='?', type=int, help="Number of partitions. Default is 8.", default=8) + parser.add_argument( + "--error-after-seconds", nargs="?", type=int, help="Raise error after this number of seconds." + ) + parser.add_argument( + "--max-events-per-second", nargs="?", type=int, help="Maximum events per second across all partitions." + ) + parser.add_argument("--partitions", nargs="?", type=int, help="Number of partitions. Default is 8.", default=8) diff --git a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sleep_test.py b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sleep_test.py index fa700e752adb..fba0e9ba7f08 100644 --- a/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sleep_test.py +++ b/tools/azure-sdk-tools/devtools_testutils/perfstress_tests/system_perfstress/sleep_test.py @@ -15,7 +15,9 @@ class SleepTest(PerfStressTest): def __init__(self, arguments): super().__init__(arguments) - self.seconds_per_operation = (self.args.initial_delay_ms / 1000) * math.pow(self.args.instance_growth_factor, self._parallel_index) + self.seconds_per_operation = (self.args.initial_delay_ms / 1000) * math.pow( + self.args.instance_growth_factor, self._parallel_index + ) def run_sync(self): time.sleep(self.seconds_per_operation) @@ -28,13 +30,25 @@ async def run_async(self): @staticmethod def add_arguments(parser): super(SleepTest, SleepTest).add_arguments(parser) - parser.add_argument('--initial-delay-ms', nargs='?', type=int, default=1000, help='Initial delay (in milliseconds)') - + parser.add_argument( + "--initial-delay-ms", nargs="?", type=int, default=1000, help="Initial delay (in milliseconds)" + ) + # Used for verifying the perf framework correctly computes average throughput across parallel tests of different speed. # Each instance of this test completes operations at a different rate, to allow for testing scenarios where # some instances are still waiting when time expires. - parser.add_argument('--instance-growth-factor', nargs='?', type=float, default=1, - help='Instance growth factor. The delay of instance N will be (InitialDelayMS * (InstanceGrowthFactor ^ InstanceCount)).') - - parser.add_argument('--iteration-growth-factor', nargs='?', type=float, default=1, - help='Iteration growth factor. The delay of iteration N will be (InitialDelayMS * (IterationGrowthFactor ^ IterationCount)).') + parser.add_argument( + "--instance-growth-factor", + nargs="?", + type=float, + default=1, + help="Instance growth factor. The delay of instance N will be (InitialDelayMS * (InstanceGrowthFactor ^ InstanceCount)).", + ) + + parser.add_argument( + "--iteration-growth-factor", + nargs="?", + type=float, + default=1, + help="Iteration growth factor. The delay of iteration N will be (InitialDelayMS * (IterationGrowthFactor ^ IterationCount)).", + ) diff --git a/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py b/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py index 70a924cf9979..8fdd59e5ad2a 100644 --- a/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py +++ b/tools/azure-sdk-tools/devtools_testutils/proxy_startup.py @@ -169,9 +169,9 @@ def check_certificate_location(repo_root: str) -> None: """ existing_root_pem = certifi.where() - local_dev_cert = os.path.abspath(os.path.join(repo_root, 'eng', 'common', 'testproxy', 'dotnet-devcert.crt')) + local_dev_cert = os.path.abspath(os.path.join(repo_root, "eng", "common", "testproxy", "dotnet-devcert.crt")) combined_filename = os.path.basename(local_dev_cert).split(".")[0] + ".pem" - combined_folder = os.path.join(repo_root, '.certificate') + combined_folder = os.path.join(repo_root, ".certificate") combined_location = os.path.join(combined_folder, combined_filename) # If no local certificate folder exists, create one @@ -316,11 +316,7 @@ def set_common_sanitizers() -> None: # General regex sanitizers for sensitive patterns throughout interactions batch_sanitizers[Sanitizer.GENERAL_REGEX] = [ - { - "regex": "(?:[\\?&](sig|se|st|sv)=)(?[^&\\\"\\s]*)", - "group_for_replace": "secret", - "value": SANITIZED - }, + {"regex": '(?:[\\?&](sig|se|st|sv)=)(?[^&\\"\\s]*)', "group_for_replace": "secret", "value": SANITIZED}, ] # Header regex sanitizers for sensitive patterns in request/response headers diff --git a/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py b/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py index 096aa5fd32b3..af91a7aa2e65 100644 --- a/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py +++ b/tools/azure-sdk-tools/devtools_testutils/resource_testcase.py @@ -98,7 +98,7 @@ def create_resource(self, name, **kwargs): id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/" + name, ) if name != self.moniker: - add_general_string_sanitizer(target=name, value=self.moniker) + add_general_string_sanitizer(target=name, value=self.moniker) return { self.parameter_name: self.resource, self.parameter_name_for_location: self.location, @@ -116,7 +116,9 @@ def remove_resource(self, name, **kwargs): raise AzureTestError("Timed out waiting for resource group to be deleted.") else: self.client.resource_groups.begin_delete(name, polling=False).result() - except Exception as err: # NOTE: some track 1 libraries do not have azure-core installed. Cannot use HttpResponseError here + except ( + Exception + ) as err: # NOTE: some track 1 libraries do not have azure-core installed. Cannot use HttpResponseError here logging.info("Failed to delete resource group with name {}".format(name)) logging.info("{}".format(err)) pass diff --git a/tools/azure-sdk-tools/devtools_testutils/sanitizers.py b/tools/azure-sdk-tools/devtools_testutils/sanitizers.py index a13cc8157c65..83dbd7be7faa 100644 --- a/tools/azure-sdk-tools/devtools_testutils/sanitizers.py +++ b/tools/azure-sdk-tools/devtools_testutils/sanitizers.py @@ -490,7 +490,7 @@ def remove_batch_sanitizers(sanitizers: List[str], headers: Optional[Dict] = Non if is_live_and_not_recording(): return - data = {"Sanitizers" : sanitizers} + data = {"Sanitizers": sanitizers} headers_to_send = {"Content-Type": "application/json"} if headers is not None: diff --git a/tools/azure-sdk-tools/devtools_testutils/storage/testcase.py b/tools/azure-sdk-tools/devtools_testutils/storage/testcase.py index baadca35453a..4cbfced500ca 100644 --- a/tools/azure-sdk-tools/devtools_testutils/storage/testcase.py +++ b/tools/azure-sdk-tools/devtools_testutils/storage/testcase.py @@ -179,7 +179,6 @@ def assert_download_progress(self, size, max_chunk_size, max_get_size, progress) assert i[0] % max_chunk_size == 0 or i[0] % max_chunk_size == small_chunk_size assert i[1] == size - def get_datetime_variable(self, variables, name, dt): dt_string = variables.setdefault(name, dt.isoformat()) return datetime.strptime(dt_string, "%Y-%m-%dT%H:%M:%S.%f") @@ -216,4 +215,4 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.log_stream.close() # reset logging since we messed with the setting - self.test_case.configure_logging() \ No newline at end of file + self.test_case.configure_logging() diff --git a/tools/azure-sdk-tools/gh_tools/vnext_issue_creator.py b/tools/azure-sdk-tools/gh_tools/vnext_issue_creator.py index d6983f126f6b..50ea9f297601 100644 --- a/tools/azure-sdk-tools/gh_tools/vnext_issue_creator.py +++ b/tools/azure-sdk-tools/gh_tools/vnext_issue_creator.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -# This script is used to create issues for client libraries failing the vnext of mypy, pyright, and pylint. +# This script is used to create issues for client libraries failing the vnext of mypy, pyright, and pylint. import sys import os @@ -22,26 +22,21 @@ def get_version_running(check_type: CHECK_TYPE) -> str: - commands = [ - sys.executable, - "-m", - check_type, - "--version" - ] + commands = [sys.executable, "-m", check_type, "--version"] version = subprocess.run( commands, check=True, capture_output=True, ) version = version.stdout.rstrip().decode("utf-8") - version_running = re.findall(r'(\d+.\d+.\d+)', version)[0] + version_running = re.findall(r"(\d+.\d+.\d+)", version)[0] logging.info(f"Running {check_type} version {version_running}") return version_running def get_build_link(check_type: CHECK_TYPE) -> str: - build_id = os.getenv('BUILD_BUILDID') - job_id = os.getenv('SYSTEM_JOBID') + build_id = os.getenv("BUILD_BUILDID") + job_id = os.getenv("SYSTEM_JOBID") next_id: str if check_type == "mypy": @@ -51,7 +46,9 @@ def get_build_link(check_type: CHECK_TYPE) -> str: if check_type == "pylint": next_id = "e1fa7d9e-8471-5a74-cd7d-e1c9a992e07e" - return f"https://dev.azure.com/azure-sdk/internal/_build/results?buildId={build_id}&view=logs&j={job_id}&t={next_id}" + return ( + f"https://dev.azure.com/azure-sdk/internal/_build/results?buildId={build_id}&view=logs&j={job_id}&t={next_id}" + ) def get_merge_dates(year: str) -> typing.List[datetime.datetime]: @@ -69,8 +66,9 @@ def get_merge_dates(year: str) -> typing.List[datetime.datetime]: merge_dates = [] for month in merge_months: - code_complete = [day for week in month for day in week if \ - day.weekday() == calendar.FRIDAY and day.month in [1, 4, 7, 10]][0] + code_complete = [ + day for week in month for day in week if day.weekday() == calendar.FRIDAY and day.month in [1, 4, 7, 10] + ][0] monday_after_release_week = code_complete + datetime.timedelta(days=10) merge_dates.append(monday_after_release_week) return merge_dates @@ -82,9 +80,9 @@ def get_date_for_version_bump(today: datetime.datetime) -> str: merge_date = min(date for date in merge_dates if date >= today) except ValueError: # today's date is after October merge date, so rollover to next year - merge_dates = get_merge_dates(today.year+1) + merge_dates = get_merge_dates(today.year + 1) merge_date = min(date for date in merge_dates if date >= today) - return merge_date.strftime('%Y-%m-%d') + return merge_date.strftime("%Y-%m-%d") def create_vnext_issue(package_name: str, check_type: CHECK_TYPE) -> None: @@ -104,8 +102,11 @@ def create_vnext_issue(package_name: str, check_type: CHECK_TYPE) -> None: build_link = get_build_link(check_type) merge_date = get_date_for_version_bump(today) error_type = "linting" if check_type == "pylint" else "typing" - guide_link = "[Pylint Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/pylint_checking.md)" \ - if check_type == "pylint" else "[Typing Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/static_type_checking.md#run-mypy)" + guide_link = ( + "[Pylint Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/pylint_checking.md)" + if check_type == "pylint" + else "[Typing Guide](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/static_type_checking.md#run-mypy)" + ) title = f"{package_name} needs {error_type} updates for {check_type} version {version}" template = ( @@ -116,7 +117,7 @@ def create_vnext_issue(package_name: str, check_type: CHECK_TYPE) -> None: f"\n**{check_type.capitalize()} errors:** [Link to build ({today.strftime('%Y-%m-%d')})]({build_link})" f"\n**How to fix:** Run the `next-{check_type}` tox command at the library package-level and resolve " f"the {error_type} errors.\n" - f"1) `../{package_name}>pip install \"tox<5\"`\n" + f'1) `../{package_name}>pip install "tox<5"`\n' f"2) `../{package_name}>tox run -e next-{check_type} -c ../../../eng/tox/tox.ini --root .`\n\n" f"See the {guide_link} for more information." ) @@ -124,11 +125,7 @@ def create_vnext_issue(package_name: str, check_type: CHECK_TYPE) -> None: # create an issue for the library failing the vnext check if not vnext_issue: logging.info(f"Issue does not exist for {package_name} with {check_type} version {version}. Creating...") - repo.create_issue( - title=title, - body=template, - labels=[check_type] - ) + repo.create_issue(title=title, body=template, labels=[check_type]) return # an issue exists, let's update it so it reflects the latest typing/linting errors diff --git a/tools/azure-sdk-tools/setup.py b/tools/azure-sdk-tools/setup.py index b18cb0567215..cb40aa24f1e2 100644 --- a/tools/azure-sdk-tools/setup.py +++ b/tools/azure-sdk-tools/setup.py @@ -18,6 +18,7 @@ "urllib3", "tomli-w==1.0.0", "azure-core", + "pyyaml", # Perf/Build "ConfigArgParse>=0.12.0", ] @@ -50,7 +51,7 @@ "sdk_analyze_deps=ci_tools.dependency_analysis:analyze_dependencies", "sdk_find_invalid_versions=ci_tools.versioning.find_invalid_versions:find_invalid_versions_main", "sdk_verify_keywords=ci_tools.keywords_verify:entrypoint", - "systemperf=devtools_testutils.perfstress_tests:run_system_perfstress_tests_cmd" + "systemperf=devtools_testutils.perfstress_tests:run_system_perfstress_tests_cmd", ], }, extras_require={ diff --git a/tools/azure-sdk-tools/tests/conftest.py b/tools/azure-sdk-tools/tests/conftest.py index 676b2c94b07f..f677898af79d 100644 --- a/tools/azure-sdk-tools/tests/conftest.py +++ b/tools/azure-sdk-tools/tests/conftest.py @@ -5,9 +5,11 @@ from typing import List from tempfile import TemporaryDirectory + @pytest.fixture() def tmp_directory_create(): with TemporaryDirectory() as tmp_dir: + def create_temp_directory(fake_creation_paths: List[str]) -> TemporaryDirectory: for file in fake_creation_paths: target_path = os.path.join(tmp_dir, file) diff --git a/tools/azure-sdk-tools/tests/example_async.py b/tools/azure-sdk-tools/tests/example_async.py index 7f439e273ac9..1747e9fe7890 100644 --- a/tools/azure-sdk-tools/tests/example_async.py +++ b/tools/azure-sdk-tools/tests/example_async.py @@ -38,16 +38,14 @@ async def test_example_trio(): async def req(): request = HttpRequest("GET", "https://bing.com/") - policies = [ - UserAgentPolicy("myuseragent"), - AsyncRedirectPolicy() - ] + policies = [UserAgentPolicy("myuseragent"), AsyncRedirectPolicy()] # [START trio] from azure.core.pipeline.transport import TrioRequestsTransport async with AsyncPipeline(TrioRequestsTransport(), policies=policies) as pipeline: return await pipeline.run(request) # [END trio] + response = trio.run(req) assert isinstance(response.http_response.status_code, int) @@ -56,10 +54,7 @@ async def req(): async def test_example_asyncio(): request = HttpRequest("GET", "https://bing.com") - policies = [ - UserAgentPolicy("myuseragent"), - AsyncRedirectPolicy() - ] + policies = [UserAgentPolicy("myuseragent"), AsyncRedirectPolicy()] # [START asyncio] from azure.core.pipeline.transport import AsyncioRequestsTransport @@ -74,10 +69,7 @@ async def test_example_asyncio(): async def test_example_aiohttp(): request = HttpRequest("GET", "https://bing.com") - policies = [ - UserAgentPolicy("myuseragent"), - AsyncRedirectPolicy() - ] + policies = [UserAgentPolicy("myuseragent"), AsyncRedirectPolicy()] # [START aiohttp] from azure.core.pipeline.transport import AioHttpTransport @@ -97,10 +89,7 @@ async def test_example_async_pipeline(): # example: create request and policies request = HttpRequest("GET", "https://bing.com") - policies = [ - UserAgentPolicy("myuseragent"), - AsyncRedirectPolicy() - ] + policies = [UserAgentPolicy("myuseragent"), AsyncRedirectPolicy()] # run the pipeline async with AsyncPipeline(transport=AioHttpTransport(), policies=policies) as pipeline: @@ -221,7 +210,7 @@ async def test_example_async_retry_policy(): retry_status=5, retry_backoff_factor=0.5, retry_backoff_max=60, - retry_on_methods=['GET'] + retry_on_methods=["GET"], ) # [END async_retry_policy] diff --git a/tools/azure-sdk-tools/tests/integration/proxy/conftest.py b/tools/azure-sdk-tools/tests/integration/proxy/conftest.py index a2aec1652819..b327a4004a47 100644 --- a/tools/azure-sdk-tools/tests/integration/proxy/conftest.py +++ b/tools/azure-sdk-tools/tests/integration/proxy/conftest.py @@ -1,6 +1,7 @@ import pytest from devtools_testutils import test_proxy + @pytest.fixture(scope="session", autouse=True) def start_proxy(test_proxy): - return + return diff --git a/tools/azure-sdk-tools/tests/integration/proxy/test_proxy_startup.py b/tools/azure-sdk-tools/tests/integration/proxy/test_proxy_startup.py index 94a37a278e0c..bed8df1fd8e3 100644 --- a/tools/azure-sdk-tools/tests/integration/proxy/test_proxy_startup.py +++ b/tools/azure-sdk-tools/tests/integration/proxy/test_proxy_startup.py @@ -23,4 +23,4 @@ class TestProxyIntegration: # Therefore we are not using recorded_by_proxy decorator or recorded_test fixture def test_tool_spinup_http(self): result = http_client.request("GET", PROXY_CHECK_URL) - assert(result.status == 200) + assert result.status == 200 diff --git a/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_not_present/service/fake-package/setup.py b/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_not_present/service/fake-package/setup.py new file mode 100644 index 000000000000..0afc081e44e6 --- /dev/null +++ b/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_not_present/service/fake-package/setup.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from setuptools import setup, find_packages +import os +from io import open +import re + +# example setup.py Feel free to copy the entire "azure-template" folder into a package folder named +# with "azure-". Ensure that the below arguments to setup() are updated to reflect +# your package. + +# this setup.py is set up in a specific way to keep the azure* and azure-mgmt-* namespaces WORKING all the way +# up from python 2.7. Reference here: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/packaging.md + +PACKAGE_NAME = "ci-yml-nonpresent-test" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace("-", "/") +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace("-", ".") + +with open("README.md", encoding="utf-8") as f: + long_description = f.read() + +setup( + name=PACKAGE_NAME, + version="1.0.0", + description=PACKAGE_NAME, + # ensure that these are updated to reflect the package owners' information + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/Azure/azure-sdk-for-python", + keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product + author="Microsoft Corporation", + author_email="azuresdkengsysadmins@microsoft.com", + license="MIT License", + # ensure that the development status reflects the status of your package + classifiers=[ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: MIT License", + ], + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + # This means any folder structure that only consists of a __init__.py. + # For example, for storage, this would mean adding 'azure.storage' + # in addition to the default 'azure' that is seen here. + "azure", + ] + ), + include_package_data=True, + package_data={ + "azure": ["py.typed"], + }, + install_requires=[ + "azure-core<2.0.0,>=1.10.0", + ], + python_requires=">=3.7", + project_urls={ + "Bug Reports": "https://github.com/Azure/azure-sdk-for-python/issues", + "Source": "https://github.com/Azure/azure-sdk-for-python", + }, +) diff --git a/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/ci.yml b/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/ci.yml new file mode 100644 index 000000000000..d358db016420 --- /dev/null +++ b/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/ci.yml @@ -0,0 +1,57 @@ +# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. + +trigger: + branches: + include: + - main + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/core/ + - eng/ + - tools/ + exclude: + - eng/common/ + +pr: + branches: + include: + - main + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/core/ + - eng/ + - tools/ + exclude: + - eng/common/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: core + BuildTargetingString: "*" + ValidateFormatting: true + TestProxy: false + TestTimeoutInMinutes: 120 + Artifacts: + - name: azure-core + safeName: azurecore + - name: azure-mgmt-core + safeName: azuremgmtcore + - name: azure-core-tracing-opencensus + safeName: azurecorecoretracingopencensus + - name: azure-core-tracing-opentelemetry + safeName: azurecorecoretracingtelemetry + - name: azure-common + safeName: azurecommon + skipPublishDocMs: true + - name: azure-core-experimental + safeName: azurecoreexperimental + - name: corehttp + safeName: corehttp diff --git a/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/fake-package/setup.py b/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/fake-package/setup.py new file mode 100644 index 000000000000..4f9cb87c3a6d --- /dev/null +++ b/tools/azure-sdk-tools/tests/integration/scenarios/ci_yml_present/service/fake-package/setup.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from setuptools import setup, find_packages +import os +from io import open +import re + +# example setup.py Feel free to copy the entire "azure-template" folder into a package folder named +# with "azure-". Ensure that the below arguments to setup() are updated to reflect +# your package. + +# this setup.py is set up in a specific way to keep the azure* and azure-mgmt-* namespaces WORKING all the way +# up from python 2.7. Reference here: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/packaging.md + +PACKAGE_NAME = "ci-yml-present-test" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace("-", "/") +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace("-", ".") + +with open("README.md", encoding="utf-8") as f: + long_description = f.read() + +setup( + name=PACKAGE_NAME, + version="1.0.0", + description=PACKAGE_NAME, + # ensure that these are updated to reflect the package owners' information + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/Azure/azure-sdk-for-python", + keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product + author="Microsoft Corporation", + author_email="azuresdkengsysadmins@microsoft.com", + license="MIT License", + # ensure that the development status reflects the status of your package + classifiers=[ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "License :: OSI Approved :: MIT License", + ], + packages=find_packages( + exclude=[ + "tests", + # Exclude packages that will be covered by PEP420 or nspkg + # This means any folder structure that only consists of a __init__.py. + # For example, for storage, this would mean adding 'azure.storage' + # in addition to the default 'azure' that is seen here. + "azure", + ] + ), + include_package_data=True, + package_data={ + "azure": ["py.typed"], + }, + install_requires=[ + "azure-core<2.0.0,>=1.10.0", + ], + python_requires=">=3.7", + project_urls={ + "Bug Reports": "https://github.com/Azure/azure-sdk-for-python/issues", + "Source": "https://github.com/Azure/azure-sdk-for-python", + }, +) diff --git a/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_two_options/setup.py b/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_two_options/setup.py index 828e6260efad..92419a9fefdb 100644 --- a/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_two_options/setup.py +++ b/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_two_options/setup.py @@ -30,7 +30,7 @@ setup( name=PACKAGE_NAME, - version='1.0.0', + version="1.0.0", description=PACKAGE_NAME, # ensure that these are updated to reflect the package owners' information long_description=long_description, diff --git a/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_zero_options/setup.py b/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_zero_options/setup.py index 828e6260efad..92419a9fefdb 100644 --- a/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_zero_options/setup.py +++ b/tools/azure-sdk-tools/tests/integration/scenarios/optional_environment_zero_options/setup.py @@ -30,7 +30,7 @@ setup( name=PACKAGE_NAME, - version='1.0.0', + version="1.0.0", description=PACKAGE_NAME, # ensure that these are updated to reflect the package owners' information long_description=long_description, diff --git a/tools/azure-sdk-tools/tests/integration/test_package_discovery.py b/tools/azure-sdk-tools/tests/integration/test_package_discovery.py index 248667f97634..6f7cc80a2b78 100644 --- a/tools/azure-sdk-tools/tests/integration/test_package_discovery.py +++ b/tools/azure-sdk-tools/tests/integration/test_package_discovery.py @@ -8,6 +8,7 @@ core_service_root = os.path.join(repo_root, "sdk", "core") storage_service_root = os.path.join(repo_root, "sdk", "storage") + def test_discovery(): results = discover_targeted_packages("azure*", core_service_root) @@ -17,6 +18,7 @@ def test_discovery(): assert len(results) > 1 assert len(non_empty_results) == 1 + def test_discovery_omit_mgmt(): results = discover_targeted_packages("azure*", storage_service_root, filter_type="Omit_management") @@ -26,9 +28,10 @@ def test_discovery_omit_mgmt(): "azure-storage-extensions", "azure-storage-file-datalake", "azure-storage-file-share", - "azure-storage-queue" + "azure-storage-queue", ] + def test_discovery_omit_build(): results = discover_targeted_packages("*", core_service_root, filter_type="Build") @@ -38,9 +41,10 @@ def test_discovery_omit_build(): "azure-core-tracing-opencensus", "azure-core-tracing-opentelemetry", "azure-mgmt-core", - "corehttp" + "corehttp", ] + def test_discovery_single_package(): results = discover_targeted_packages("azure-core", core_service_root, filter_type="Build") @@ -48,6 +52,7 @@ def test_discovery_single_package(): "azure-core", ] + def test_discovery_omit_regression(): results = discover_targeted_packages("*", core_service_root, filter_type="Regression") @@ -56,7 +61,7 @@ def test_discovery_omit_regression(): "azure-core-experimental", "azure-core-tracing-opencensus", "azure-core-tracing-opentelemetry", - "corehttp" + "corehttp", ] storage_results = discover_targeted_packages("azure*", storage_service_root, filter_type="Regression") @@ -67,7 +72,7 @@ def test_discovery_omit_regression(): "azure-storage-extensions", "azure-storage-file-datalake", "azure-storage-file-share", - "azure-storage-queue" + "azure-storage-queue", ] @@ -81,7 +86,6 @@ def test_discovery_honors_contains_filter(): ] - def test_discovery_honors_override(): os.environ["ENABLE_AZURE_COMMON"] = "true" os.environ["ENABLE_AZURE_SERVICEMANAGEMENT_LEGACY"] = "false" diff --git a/tools/azure-sdk-tools/tests/test_ci_metadata.py b/tools/azure-sdk-tools/tests/test_ci_metadata.py new file mode 100644 index 000000000000..d126576559b3 --- /dev/null +++ b/tools/azure-sdk-tools/tests/test_ci_metadata.py @@ -0,0 +1,23 @@ +import os, tempfile, shutil + +import pytest + +from ci_tools.parsing import get_ci_config +from ci_tools.environment_exclusions import is_check_enabled + +integration_folder = os.path.join(os.path.dirname(__file__), "integration") +scenario_present = os.path.join(integration_folder, "scenarios", "ci_yml_present", "service", "fake-package") +scenario_not_present = os.path.join(integration_folder, "scenarios", "ci_yml_not_present", "service", "fake-package") + +def test_ci_config_present(): + config = get_ci_config(scenario_present) + assert config is not None + assert type(config) is dict + assert type(config["extends"]) is dict + + should_proxy = config.get("extends", {}).get("parameters", {}).get("TestProxy", True) + assert should_proxy == False + +def test_ci_config_non_present(): + config = get_ci_config(scenario_not_present) + assert config is None \ No newline at end of file diff --git a/tools/azure-sdk-tools/tests/test_individual_functions.py b/tools/azure-sdk-tools/tests/test_individual_functions.py index e5835a545e89..29cab4654e38 100644 --- a/tools/azure-sdk-tools/tests/test_individual_functions.py +++ b/tools/azure-sdk-tools/tests/test_individual_functions.py @@ -6,6 +6,7 @@ from ci_tools.parsing import ParsedSetup, compare_string_to_glob_array from typing import List + @pytest.mark.parametrize( "input_string, glob_array, expected_result", [ diff --git a/tools/azure-sdk-tools/tests/test_optional_functionality.py b/tools/azure-sdk-tools/tests/test_optional_functionality.py index 3168b5e0a570..065e85c732c7 100644 --- a/tools/azure-sdk-tools/tests/test_optional_functionality.py +++ b/tools/azure-sdk-tools/tests/test_optional_functionality.py @@ -4,63 +4,64 @@ from ci_tools.parsing import ParsedSetup from ci_tools.functions import get_config_setting -integration_folder = os.path.join(os.path.dirname(__file__), 'integration') +integration_folder = os.path.join(os.path.dirname(__file__), "integration") + def test_toml_result(): - package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_two_options') + package_with_toml = os.path.join(integration_folder, "scenarios", "optional_environment_two_options") parsed_setup = ParsedSetup.from_path(package_with_toml) actual = parsed_setup.get_build_config() expected = { - 'mypy': True, - 'type_check_samples': True, - 'verifytypes': True, - 'pyright': True, - 'pylint': True, - 'black': True, - 'optional':[ + "mypy": True, + "type_check_samples": True, + "verifytypes": True, + "pyright": True, + "pylint": True, + "black": True, + "optional": [ { - 'name': 'no_requests', - 'install': [], - 'uninstall': ['requests'], - 'additional_pytest_args': ['-k', '*_async.py'] + "name": "no_requests", + "install": [], + "uninstall": ["requests"], + "additional_pytest_args": ["-k", "*_async.py"], }, { - 'name': 'no_aiohttp', - 'install': [], - 'uninstall': ['aiohttp'], - 'additional_pytest_args': ['-k', 'not *_async.py'] - } - ] + "name": "no_aiohttp", + "install": [], + "uninstall": ["aiohttp"], + "additional_pytest_args": ["-k", "not *_async.py"], + }, + ], } assert actual == expected def test_optional_specific_get(): - package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_two_options') - actual = get_config_setting(package_with_toml, 'optional') + package_with_toml = os.path.join(integration_folder, "scenarios", "optional_environment_two_options") + actual = get_config_setting(package_with_toml, "optional") expected = [ { - 'name': 'no_requests', - 'install': [], - 'uninstall': ['requests'], - 'additional_pytest_args': ['-k', '*_async.py'] + "name": "no_requests", + "install": [], + "uninstall": ["requests"], + "additional_pytest_args": ["-k", "*_async.py"], }, { - 'name': 'no_aiohttp', - 'install': [], - 'uninstall': ['aiohttp'], - 'additional_pytest_args': ['-k', 'not *_async.py'] - } + "name": "no_aiohttp", + "install": [], + "uninstall": ["aiohttp"], + "additional_pytest_args": ["-k", "not *_async.py"], + }, ] assert expected == actual def test_optional_specific_get_no_result(): - package_with_toml = os.path.join(integration_folder, 'scenarios', 'optional_environment_zero_options') - actual = get_config_setting(package_with_toml, 'optional', None) + package_with_toml = os.path.join(integration_folder, "scenarios", "optional_environment_zero_options") + actual = get_config_setting(package_with_toml, "optional", None) expected = None assert expected == actual diff --git a/tools/azure-sdk-tools/tests/test_parse_functionality.py b/tools/azure-sdk-tools/tests/test_parse_functionality.py index 8765520d5118..d3825bb75e00 100644 --- a/tools/azure-sdk-tools/tests/test_parse_functionality.py +++ b/tools/azure-sdk-tools/tests/test_parse_functionality.py @@ -6,7 +6,10 @@ import pytest package_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) -test_folder = os.path.join(os.path.dirname(__file__), ) +test_folder = os.path.join( + os.path.dirname(__file__), +) + def test_parse_require(): test_scenarios = [ @@ -19,7 +22,7 @@ def test_parse_require(): ("azure-core<2.0.0,>=1.2.2", "azure-core", "<2.0.0,>=1.2.2"), ("azure-core[aio]<2.0.0,>=1.26.0", "azure-core", "<2.0.0,>=1.26.0"), ("azure-core[aio,cool_extra]<2.0.0,>=1.26.0", "azure-core", "<2.0.0,>=1.26.0"), - ("azure-core[]", "azure-core", None) + ("azure-core[]", "azure-core", None), ] for scenario in test_scenarios: @@ -120,6 +123,7 @@ def test_sdk_sample_setup(test_patch): assert result.keywords[0] == "azure sdk" assert len(result.keywords) == 2 + @patch("ci_tools.parsing.parse_functions.read_setup_py_content") def test_parse_recognizes_extensions(test_patch): test_patch.return_value = """ diff --git a/tools/azure-sdk-tools/tests/test_pyproject_interactions.py b/tools/azure-sdk-tools/tests/test_pyproject_interactions.py index efcdc141c40e..a6a078513c6e 100644 --- a/tools/azure-sdk-tools/tests/test_pyproject_interactions.py +++ b/tools/azure-sdk-tools/tests/test_pyproject_interactions.py @@ -59,12 +59,10 @@ def test_nonpresent_pyproject_update(): reloaded_build_config = get_build_config(new_path) assert reloaded_build_config == update_result + @pytest.mark.parametrize( "check_name, environment_value, expected_result", - [ - ("mindependency", "true", True), - ("mindependency", "false", False) - ] + [("mindependency", "true", True), ("mindependency", "false", False)], ) def test_environment_override(check_name, environment_value, expected_result): with tempfile.TemporaryDirectory() as temp_dir: @@ -82,6 +80,7 @@ def test_pyproject_update_check_override(): build_config = get_build_config(temp_dir) + assert build_config is not None build_config["pyright"] = True update_result = update_build_config(temp_dir, build_config) @@ -89,4 +88,4 @@ def test_pyproject_update_check_override(): assert update_result == build_config reloaded_build_config = get_build_config(temp_dir) - assert reloaded_build_config == update_result + assert reloaded_build_config == update_result \ No newline at end of file diff --git a/tools/azure-sdk-tools/tests/test_python_snippet_updater.py b/tools/azure-sdk-tools/tests/test_python_snippet_updater.py index 8c5df43f1c6e..f6e6f058a7fe 100644 --- a/tools/azure-sdk-tools/tests/test_python_snippet_updater.py +++ b/tools/azure-sdk-tools/tests/test_python_snippet_updater.py @@ -1,6 +1,11 @@ import os import pytest -from ci_tools.snippet_update.python_snippet_updater import get_snippet, update_snippet, check_snippets, check_not_up_to_date +from ci_tools.snippet_update.python_snippet_updater import ( + get_snippet, + update_snippet, + check_snippets, + check_not_up_to_date, +) def test_get_snippet(): @@ -9,8 +14,9 @@ def test_get_snippet(): get_snippet(file) snippets = check_snippets().keys() assert len(snippets) == 7 - assert 'example_async.trio' in snippets - assert 'example_async.async_retry_policy' in snippets + assert "example_async.trio" in snippets + assert "example_async.async_retry_policy" in snippets + def test_update_snippet(): folder = os.path.dirname(os.path.abspath(__file__)) @@ -19,6 +25,7 @@ def test_update_snippet(): file_1 = os.path.join(folder, "README.md") update_snippet(file_1) + def test_missing_snippet(): folder = os.path.dirname(os.path.abspath(__file__)) file = os.path.join(folder, "example_async.py") @@ -27,6 +34,7 @@ def test_missing_snippet(): with pytest.raises(SystemExit): update_snippet(file_1) + def test_out_of_sync(): folder = os.path.dirname(os.path.abspath(__file__)) file = os.path.join(folder, "example_async.py") diff --git a/tools/azure-sdk-tools/tests/test_requirements_parse.py b/tools/azure-sdk-tools/tests/test_requirements_parse.py index bef7f9981953..df36d9ea93cb 100644 --- a/tools/azure-sdk-tools/tests/test_requirements_parse.py +++ b/tools/azure-sdk-tools/tests/test_requirements_parse.py @@ -96,4 +96,3 @@ def test_replace_dev_reqs_remote(tmp_directory_create): replace_dev_reqs(requirements_file, core_location, None) requirements_after = get_requirements_from_file(requirements_file) assert requirements_before == requirements_after - diff --git a/tools/azure-sdk-tools/tests/test_whl_discovery.py b/tools/azure-sdk-tools/tests/test_whl_discovery.py index 46259c87a747..e78a55c6151b 100644 --- a/tools/azure-sdk-tools/tests/test_whl_discovery.py +++ b/tools/azure-sdk-tools/tests/test_whl_discovery.py @@ -1,4 +1,3 @@ - import os from unittest.mock import patch @@ -92,11 +91,11 @@ def test_find_whl_discovers_specific_wheels(test_patch, tmp_directory_create): "azure_storage_extensions-1.0.0b1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", "azure_storage_extensions-1.0.0b1-cp39-cp39-win_amd64.whl", "azure_storage_extensions-1.0.0b1-cp39-cp39-win32.whl", - "azure-storage-extensions-1.0.0b1.tar.gz" + "azure-storage-extensions-1.0.0b1.tar.gz", ] ) - with open(os.path.join(tags_folder, 'from_WSL_310.txt'), 'r', encoding='utf-8') as f: + with open(os.path.join(tags_folder, "from_WSL_310.txt"), "r", encoding="utf-8") as f: compatible_tags = [line.strip() for line in f.readlines()] test_patch.return_value = compatible_tags @@ -140,13 +139,13 @@ def test_find_sdist_discovers_specific_sdist(test_patch, tmp_directory_create): "azure_storage_extensions-1.0.0b1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", "azure_storage_extensions-1.0.0b1-cp39-cp39-win_amd64.whl", "azure_storage_extensions-1.0.0b1-cp39-cp39-win32.whl", - "azure-storage-extensions-1.0.0b1.tar.gz" + "azure-storage-extensions-1.0.0b1.tar.gz", ] ) - with open(os.path.join(tags_folder, 'from_WSL_310.txt'), 'r', encoding='utf-8') as f: + with open(os.path.join(tags_folder, "from_WSL_310.txt"), "r", encoding="utf-8") as f: compatible_tags = [line.strip() for line in f.readlines()] test_patch.return_value = compatible_tags found = find_sdist(tmp_dir, "azure-storage-extensions", "1.0.0b1") - assert isinstance(found, str) \ No newline at end of file + assert isinstance(found, str) diff --git a/tools/azure-sdk-tools/testutils/common_recordingtestcase.py b/tools/azure-sdk-tools/testutils/common_recordingtestcase.py index 3f0a1da53fd6..4e6d083b2763 100644 --- a/tools/azure-sdk-tools/testutils/common_recordingtestcase.py +++ b/tools/azure-sdk-tools/testutils/common_recordingtestcase.py @@ -10,6 +10,7 @@ import os import os.path import time + # We don't vcrpy anymore, if this code is loaded, fail # import vcr import re From bfbd8917300bb5a4b871a08a7e2a9407dd2ba4d3 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Fri, 27 Sep 2024 12:47:38 -0700 Subject: [PATCH 15/22] bump min azure-core that works with 3.13 (#37598) --- sdk/communication/azure-communication-identity/setup.py | 2 +- sdk/communication/azure-communication-sms/setup.py | 2 +- sdk/containerregistry/azure-containerregistry/setup.py | 2 +- sdk/eventhub/azure-eventhub/setup.py | 2 +- sdk/textanalytics/azure-ai-textanalytics/setup.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/communication/azure-communication-identity/setup.py b/sdk/communication/azure-communication-identity/setup.py index e18c4e042e7d..8ea708be45e4 100644 --- a/sdk/communication/azure-communication-identity/setup.py +++ b/sdk/communication/azure-communication-identity/setup.py @@ -69,7 +69,7 @@ "pytyped": ["py.typed"], }, python_requires=">=3.8", - install_requires=["msrest>=0.7.1", "azure-core>=1.24.0"], + install_requires=["msrest>=0.7.1", "azure-core>=1.27.0"], extras_require={":python_version<'3.8'": ["typing-extensions"]}, project_urls={ "Bug Reports": "https://github.com/Azure/azure-sdk-for-python/issues", diff --git a/sdk/communication/azure-communication-sms/setup.py b/sdk/communication/azure-communication-sms/setup.py index 184fc3ce3830..9a97fd302039 100644 --- a/sdk/communication/azure-communication-sms/setup.py +++ b/sdk/communication/azure-communication-sms/setup.py @@ -68,7 +68,7 @@ }, python_requires=">=3.8", install_requires=[ - 'azure-core>=1.24.0', + 'azure-core>=1.27.0', 'msrest>=0.7.1', ], extras_require={ diff --git a/sdk/containerregistry/azure-containerregistry/setup.py b/sdk/containerregistry/azure-containerregistry/setup.py index b200f53d5964..b8908f3673f1 100644 --- a/sdk/containerregistry/azure-containerregistry/setup.py +++ b/sdk/containerregistry/azure-containerregistry/setup.py @@ -56,7 +56,7 @@ ), python_requires=">=3.8", install_requires=[ - "azure-core>=1.24.0,<2.0.0", + "azure-core>=1.27.0,<2.0.0", "isodate>=0.6.0", ], project_urls={ diff --git a/sdk/eventhub/azure-eventhub/setup.py b/sdk/eventhub/azure-eventhub/setup.py index a8fd87775648..e9fd59f0b123 100644 --- a/sdk/eventhub/azure-eventhub/setup.py +++ b/sdk/eventhub/azure-eventhub/setup.py @@ -71,7 +71,7 @@ zip_safe=False, packages=find_packages(exclude=exclude_packages), install_requires=[ - "azure-core>=1.14.0", + "azure-core>=1.27.0", "typing-extensions>=4.0.1", ] ) diff --git a/sdk/textanalytics/azure-ai-textanalytics/setup.py b/sdk/textanalytics/azure-ai-textanalytics/setup.py index 3391b4bae3c8..b9c083bd2c93 100644 --- a/sdk/textanalytics/azure-ai-textanalytics/setup.py +++ b/sdk/textanalytics/azure-ai-textanalytics/setup.py @@ -69,7 +69,7 @@ }, python_requires=">=3.8", install_requires=[ - "azure-core>=1.24.0", + "azure-core>=1.27.0", 'azure-common>=1.1', "isodate>=0.6.1", "typing-extensions>=4.0.1", From 59e903e2c72a55625f62aa17d80f5d8b6a38da7d Mon Sep 17 00:00:00 2001 From: vincenttran-msft <101599632+vincenttran-msft@users.noreply.github.com> Date: Fri, 27 Sep 2024 12:50:54 -0700 Subject: [PATCH 16/22] Update version (#37597) --- sdk/storage/azure-storage-blob/CHANGELOG.md | 6 ++++++ sdk/storage/azure-storage-blob/setup.py | 2 +- sdk/storage/azure-storage-file-datalake/setup.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/sdk/storage/azure-storage-blob/CHANGELOG.md b/sdk/storage/azure-storage-blob/CHANGELOG.md index 2ffe8313992c..24923f638ead 100644 --- a/sdk/storage/azure-storage-blob/CHANGELOG.md +++ b/sdk/storage/azure-storage-blob/CHANGELOG.md @@ -1,5 +1,11 @@ # Release History +## 12.24.0b1 (Unreleased) + +### Features Added + + + ## 12.23.1 (2024-09-25) ### Features Added diff --git a/sdk/storage/azure-storage-blob/setup.py b/sdk/storage/azure-storage-blob/setup.py index b011968e8e1c..d21b26946e65 100644 --- a/sdk/storage/azure-storage-blob/setup.py +++ b/sdk/storage/azure-storage-blob/setup.py @@ -56,7 +56,7 @@ url='https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/storage/azure-storage-blob', keywords="azure, azure sdk", classifiers=[ - 'Development Status :: 5 - Production/Stable', + 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3', diff --git a/sdk/storage/azure-storage-file-datalake/setup.py b/sdk/storage/azure-storage-file-datalake/setup.py index 6ad45b351d7d..473b7d05d6a2 100644 --- a/sdk/storage/azure-storage-file-datalake/setup.py +++ b/sdk/storage/azure-storage-file-datalake/setup.py @@ -78,7 +78,7 @@ python_requires=">=3.8", install_requires=[ "azure-core>=1.30.0", - "azure-storage-blob>=12.23.1", + "azure-storage-blob>=12.24.0b1", "typing-extensions>=4.6.0", "isodate>=0.6.1" ], From 8fb928705aae1252caf96043b645191ca986a5ae Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:12:43 -0700 Subject: [PATCH 17/22] Adjust tooling github action to only trigger when tools are changed (#37619) * filter the trigger for the workflow to only changes that include azure-sdk-tools * extend the number of directories that can trigger the 'other' packages properly --- .github/workflows/azure-sdk-tools.yml | 3 +++ eng/scripts/Language-Settings.ps1 | 16 ++++++++++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/.github/workflows/azure-sdk-tools.yml b/.github/workflows/azure-sdk-tools.yml index 5d7f4c30fa0b..79071f34a8b7 100644 --- a/.github/workflows/azure-sdk-tools.yml +++ b/.github/workflows/azure-sdk-tools.yml @@ -4,6 +4,9 @@ on: workflow_dispatch: pull_request: branches: [ main ] + paths: + - "tools/azure-sdk-tools/**" + jobs: build-and-test: diff --git a/eng/scripts/Language-Settings.ps1 b/eng/scripts/Language-Settings.ps1 index 06855b2c8275..97f0ab3e78a6 100644 --- a/eng/scripts/Language-Settings.ps1 +++ b/eng/scripts/Language-Settings.ps1 @@ -9,7 +9,6 @@ $PackageRepositoryUri = "https://pypi.org/project" ."$PSScriptRoot/docs/Docs-ToC.ps1" ."$PSScriptRoot/docs/Docs-Onboarding.ps1" - function Get-python-AdditionalValidationPackagesFromPackageSet { param( [Parameter(Mandatory=$true)] @@ -21,9 +20,22 @@ function Get-python-AdditionalValidationPackagesFromPackageSet { ) $additionalValidationPackages = @() + function isOther($fileName) { + $startsWithPrefixes = @(".config", ".devcontainer", ".github", ".vscode", "common", "conda", "doc", "eng", "scripts") + + $startsWith = $false + foreach ($prefix in $startsWithPrefixes) { + if ($fileName.StartsWith($prefix)) { + $startsWith = $true + } + } + + return $startsWith + } + $toolChanged = $diffObj.ChangedFiles | Where-Object { $_.StartsWith("tool")} $engChanged = $diffObj.ChangedFiles | Where-Object { $_.StartsWith("eng")} - $othersChanged = $diffObj.ChangedFiles | Where-Object { $_.StartsWith("scripts") -or $_.StartsWith("doc") -or $_.StartsWith("common") -or $_.StartsWith("conda") } + $othersChanged = $diffObj.ChangedFiles | Where-Object { isOther($_) } if ($toolChanged) { $additionalPackages = @( From d58cf54cba67424207f3cd8cd4b46a30aa7417ed Mon Sep 17 00:00:00 2001 From: vincenttran-msft <101599632+vincenttran-msft@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:43:59 -0700 Subject: [PATCH 18/22] Nice (#37621) --- sdk/storage/azure-storage-blob/azure/storage/blob/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py index 79ac4e25382e..f67466f0741b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_version.py @@ -4,4 +4,4 @@ # license information. # -------------------------------------------------------------------------- -VERSION = "12.23.1" +VERSION = "12.24.0b1" From 912602b29481aec48a39e6545afeda0c991853cc Mon Sep 17 00:00:00 2001 From: Scott Beddall <45376673+scbedd@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:48:54 -0700 Subject: [PATCH 19/22] Disable pypy for azure-storage-file-share (#37618) * disable pypy running against azure-storage-file-share, azure-storage-file-datalake, azure-storage-queue --- sdk/storage/azure-storage-file-share/LICENSE | 2 +- sdk/storage/azure-storage-queue/sdk_packaging.toml | 2 +- tools/azure-sdk-tools/ci_tools/functions.py | 8 +++++++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/sdk/storage/azure-storage-file-share/LICENSE b/sdk/storage/azure-storage-file-share/LICENSE index 63447fd8bbbf..b2f52a2bad4e 100644 --- a/sdk/storage/azure-storage-file-share/LICENSE +++ b/sdk/storage/azure-storage-file-share/LICENSE @@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file +SOFTWARE. diff --git a/sdk/storage/azure-storage-queue/sdk_packaging.toml b/sdk/storage/azure-storage-queue/sdk_packaging.toml index e7687fdae93b..901bc8ccbfa6 100644 --- a/sdk/storage/azure-storage-queue/sdk_packaging.toml +++ b/sdk/storage/azure-storage-queue/sdk_packaging.toml @@ -1,2 +1,2 @@ [packaging] -auto_update = false \ No newline at end of file +auto_update = false diff --git a/tools/azure-sdk-tools/ci_tools/functions.py b/tools/azure-sdk-tools/ci_tools/functions.py index 655075de857b..882284e6d6a9 100644 --- a/tools/azure-sdk-tools/ci_tools/functions.py +++ b/tools/azure-sdk-tools/ci_tools/functions.py @@ -42,7 +42,13 @@ ] TEST_COMPATIBILITY_MAP = {} -TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = {"azure-storage-blob": "pypy", "azure-eventhub": "pypy"} +TEST_PYTHON_DISTRO_INCOMPATIBILITY_MAP = { + "azure-storage-blob": "pypy", + "azure-storage-queue": "pypy", + "azure-storage-file-datalake": "pypy", + "azure-storage-file-share": "pypy", + "azure-eventhub": "pypy", +} omit_regression = ( lambda x: "nspkg" not in x From 99c7fd7bb81a5fd887b28a709057703191413cfe Mon Sep 17 00:00:00 2001 From: Peter Wu <162184229+weirongw23-msft@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:57:32 -0700 Subject: [PATCH 20/22] fixed indentation readme issue for queue (#37614) --- sdk/storage/azure-storage-queue/README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sdk/storage/azure-storage-queue/README.md b/sdk/storage/azure-storage-queue/README.md index e29b9b37ef68..cfa85dcdb20c 100644 --- a/sdk/storage/azure-storage-queue/README.md +++ b/sdk/storage/azure-storage-queue/README.md @@ -117,14 +117,14 @@ The `credential` parameter may be provided in a number of different forms, depen Use the returned token credential to authenticate the client: ```python - from azure.identity import DefaultAzureCredential - from azure.storage.queue import QueueServiceClient - token_credential = DefaultAzureCredential() - - queue_service_client = QueueServiceClient( - account_url="https://.queue.core.windows.net", - credential=token_credential - ) + from azure.identity import DefaultAzureCredential + from azure.storage.queue import QueueServiceClient + token_credential = DefaultAzureCredential() + + queue_service_client = QueueServiceClient( + account_url="https://.queue.core.windows.net", + credential=token_credential + ) ``` #### Creating the client from a connection string From 5ca02fc143164f478c51ad783f6000e6c627b7b7 Mon Sep 17 00:00:00 2001 From: Peter Wu <162184229+weirongw23-msft@users.noreply.github.com> Date: Fri, 27 Sep 2024 16:04:45 -0700 Subject: [PATCH 21/22] fixed indentation issue for token credential example (#37613) --- sdk/storage/azure-storage-blob/README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sdk/storage/azure-storage-blob/README.md b/sdk/storage/azure-storage-blob/README.md index 773f0481bbbb..7ebf5915b40f 100644 --- a/sdk/storage/azure-storage-blob/README.md +++ b/sdk/storage/azure-storage-blob/README.md @@ -87,14 +87,14 @@ The `credential` parameter may be provided in a number of different forms, depen Use the returned token credential to authenticate the client: ```python - from azure.identity import DefaultAzureCredential - from azure.storage.blob import BlobServiceClient - token_credential = DefaultAzureCredential() - - blob_service_client = BlobServiceClient( - account_url="https://.blob.core.windows.net", - credential=token_credential - ) + from azure.identity import DefaultAzureCredential + from azure.storage.blob import BlobServiceClient + token_credential = DefaultAzureCredential() + + blob_service_client = BlobServiceClient( + account_url="https://.blob.core.windows.net", + credential=token_credential + ) ``` 2. To use a [shared access signature (SAS) token](https://docs.microsoft.com/azure/storage/common/storage-sas-overview), From b47a0a74d603eb35d3d045edcc72b6ea97879a49 Mon Sep 17 00:00:00 2001 From: swathipil Date: Fri, 27 Sep 2024 16:15:12 -0700 Subject: [PATCH 22/22] skip windows tests --- .../tests/livetest/asynctests/test_properties_async.py | 2 ++ .../azure-eventhub/tests/livetest/synctests/test_properties.py | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_properties_async.py b/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_properties_async.py index 4bd258bd749a..c7d4d4ce2404 100644 --- a/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_properties_async.py +++ b/sdk/eventhub/azure-eventhub/tests/livetest/asynctests/test_properties_async.py @@ -4,6 +4,7 @@ # license information. # -------------------------------------------------------------------------- +import sys import pytest from azure.eventhub.aio import ( @@ -101,6 +102,7 @@ async def test_get_partition_ids(auth_credentials_async, uamqp_transport): assert partition_ids == ["0", "1"] +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Large negative timestamp to datetime conversion fails on Windows with: https://bugs.python.org/issue36439") @pytest.mark.liveTest @pytest.mark.asyncio async def test_get_partition_properties(auth_credentials_async, uamqp_transport): diff --git a/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_properties.py b/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_properties.py index 81570d66a7a5..97dcede5cf8c 100644 --- a/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_properties.py +++ b/sdk/eventhub/azure-eventhub/tests/livetest/synctests/test_properties.py @@ -4,6 +4,7 @@ # license information. # -------------------------------------------------------------------------- +import sys import pytest from azure.eventhub import EventHubSharedKeyCredential @@ -97,7 +98,7 @@ def test_get_partition_ids(auth_credentials, uamqp_transport): partition_ids = client.get_partition_ids() assert partition_ids == ["0", "1"] - +@pytest.mark.skipif(sys.platform.startswith("win"), reason="Large negative timestamp to datetime conversion fails on Windows with: https://bugs.python.org/issue36439") @pytest.mark.liveTest def test_get_partition_properties(auth_credentials, uamqp_transport): fully_qualified_namespace, eventhub_name, credential = auth_credentials