From 6f839fe5cf4ba055256aa63ad3ae508415767806 Mon Sep 17 00:00:00 2001 From: cloudant-sdks-automation <71659186+cloudant-sdks-automation@users.noreply.github.com> Date: Wed, 22 May 2024 14:42:38 +0000 Subject: [PATCH] feat(generated): update API definition to 1.0.0-dev0.1.10 Generated SDK source code using: - Generator version 3.90.1 - Specification version 1.0.0-dev0.1.10 - Automation (cloudant-sdks) version dcfded7 --- ibmcloudant/cloudant_v1.py | 16 ++++++++++++++++ test/unit/test_cloudant_v1.py | 10 ++++++---- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/ibmcloudant/cloudant_v1.py b/ibmcloudant/cloudant_v1.py index 8607de00..a27df4df 100644 --- a/ibmcloudant/cloudant_v1.py +++ b/ibmcloudant/cloudant_v1.py @@ -10840,6 +10840,10 @@ class DatabaseInformation: :param int doc_count: A count of the documents in the specified database. :param int doc_del_count: Number of deleted documents. :param str engine: (optional) The engine used for the database. + :param str instance_start_time: An opaque string to detect whether a database + has beenrecreated. The field name is for compatibility with old + replicatorversions. Do not use the value to infer timing infromation. + Typicallyonly used by replicators. :param DatabaseInformationProps props: Schema for database properties. :param ContentInformationSizes sizes: Schema for size information of content. :param str update_seq: An opaque string that describes the state of the @@ -10857,6 +10861,7 @@ def __init__( disk_format_version: int, doc_count: int, doc_del_count: int, + instance_start_time: str, props: 'DatabaseInformationProps', sizes: 'ContentInformationSizes', update_seq: str, @@ -10879,6 +10884,10 @@ def __init__( the data when it is stored on disk. :param int doc_count: A count of the documents in the specified database. :param int doc_del_count: Number of deleted documents. + :param str instance_start_time: An opaque string to detect whether a + database has beenrecreated. The field name is for compatibility with old + replicatorversions. Do not use the value to infer timing infromation. + Typicallyonly used by replicators. :param DatabaseInformationProps props: Schema for database properties. :param ContentInformationSizes sizes: Schema for size information of content. @@ -10902,6 +10911,7 @@ def __init__( self.doc_count = doc_count self.doc_del_count = doc_del_count self.engine = engine + self.instance_start_time = instance_start_time self.props = props self.sizes = sizes self.update_seq = update_seq @@ -10942,6 +10952,10 @@ def from_dict(cls, _dict: Dict) -> 'DatabaseInformation': raise ValueError('Required property \'doc_del_count\' not present in DatabaseInformation JSON') if (engine := _dict.get('engine')) is not None: args['engine'] = engine + if (instance_start_time := _dict.get('instance_start_time')) is not None: + args['instance_start_time'] = instance_start_time + else: + raise ValueError('Required property \'instance_start_time\' not present in DatabaseInformation JSON') if (props := _dict.get('props')) is not None: args['props'] = DatabaseInformationProps.from_dict(props) else: @@ -10989,6 +11003,8 @@ def to_dict(self) -> Dict: _dict['doc_del_count'] = self.doc_del_count if hasattr(self, 'engine') and self.engine is not None: _dict['engine'] = self.engine + if hasattr(self, 'instance_start_time') and self.instance_start_time is not None: + _dict['instance_start_time'] = self.instance_start_time if hasattr(self, 'props') and self.props is not None: if isinstance(self.props, dict): _dict['props'] = self.props diff --git a/test/unit/test_cloudant_v1.py b/test/unit/test_cloudant_v1.py index 159fc7dd..78ffcf1a 100644 --- a/test/unit/test_cloudant_v1.py +++ b/test/unit/test_cloudant_v1.py @@ -1147,7 +1147,7 @@ def test_post_dbs_info_all_params(self): """ # Set up mock url = preprocess_url('/_dbs_info') - mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]' + mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]' responses.add( responses.POST, url, @@ -1192,7 +1192,7 @@ def test_post_dbs_info_value_error(self): """ # Set up mock url = preprocess_url('/_dbs_info') - mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]' + mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]' responses.add( responses.POST, url, @@ -1316,7 +1316,7 @@ def test_get_database_information_all_params(self): """ # Set up mock url = preprocess_url('/testString') - mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}' + mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}' responses.add( responses.GET, url, @@ -1354,7 +1354,7 @@ def test_get_database_information_value_error(self): """ # Set up mock url = preprocess_url('/testString') - mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}' + mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}' responses.add( responses.GET, url, @@ -14365,6 +14365,7 @@ def test_database_information_serialization(self): database_information_model_json['doc_count'] = 0 database_information_model_json['doc_del_count'] = 0 database_information_model_json['engine'] = 'testString' + database_information_model_json['instance_start_time'] = 'testString' database_information_model_json['props'] = database_information_props_model database_information_model_json['sizes'] = content_information_sizes_model database_information_model_json['update_seq'] = 'testString' @@ -14565,6 +14566,7 @@ def test_dbs_info_result_serialization(self): database_information_model['doc_count'] = 0 database_information_model['doc_del_count'] = 0 database_information_model['engine'] = 'testString' + database_information_model['instance_start_time'] = 'testString' database_information_model['props'] = database_information_props_model database_information_model['sizes'] = content_information_sizes_model database_information_model['update_seq'] = 'testString'