Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(generated): update API definition to 1.0.0-dev0.1.10 #654

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions ibmcloudant/cloudant_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -10840,6 +10840,10 @@ class DatabaseInformation:
:param int doc_count: A count of the documents in the specified database.
:param int doc_del_count: Number of deleted documents.
:param str engine: (optional) The engine used for the database.
:param str instance_start_time: An opaque string to detect whether a database
has beenrecreated. The field name is for compatibility with old
replicatorversions. Do not use the value to infer timing infromation.
Typicallyonly used by replicators.
:param DatabaseInformationProps props: Schema for database properties.
:param ContentInformationSizes sizes: Schema for size information of content.
:param str update_seq: An opaque string that describes the state of the
Expand All @@ -10857,6 +10861,7 @@ def __init__(
disk_format_version: int,
doc_count: int,
doc_del_count: int,
instance_start_time: str,
props: 'DatabaseInformationProps',
sizes: 'ContentInformationSizes',
update_seq: str,
Expand All @@ -10879,6 +10884,10 @@ def __init__(
the data when it is stored on disk.
:param int doc_count: A count of the documents in the specified database.
:param int doc_del_count: Number of deleted documents.
:param str instance_start_time: An opaque string to detect whether a
database has beenrecreated. The field name is for compatibility with old
replicatorversions. Do not use the value to infer timing infromation.
Typicallyonly used by replicators.
:param DatabaseInformationProps props: Schema for database properties.
:param ContentInformationSizes sizes: Schema for size information of
content.
Expand All @@ -10902,6 +10911,7 @@ def __init__(
self.doc_count = doc_count
self.doc_del_count = doc_del_count
self.engine = engine
self.instance_start_time = instance_start_time
self.props = props
self.sizes = sizes
self.update_seq = update_seq
Expand Down Expand Up @@ -10942,6 +10952,10 @@ def from_dict(cls, _dict: Dict) -> 'DatabaseInformation':
raise ValueError('Required property \'doc_del_count\' not present in DatabaseInformation JSON')
if (engine := _dict.get('engine')) is not None:
args['engine'] = engine
if (instance_start_time := _dict.get('instance_start_time')) is not None:
args['instance_start_time'] = instance_start_time
else:
raise ValueError('Required property \'instance_start_time\' not present in DatabaseInformation JSON')
if (props := _dict.get('props')) is not None:
args['props'] = DatabaseInformationProps.from_dict(props)
else:
Expand Down Expand Up @@ -10989,6 +11003,8 @@ def to_dict(self) -> Dict:
_dict['doc_del_count'] = self.doc_del_count
if hasattr(self, 'engine') and self.engine is not None:
_dict['engine'] = self.engine
if hasattr(self, 'instance_start_time') and self.instance_start_time is not None:
_dict['instance_start_time'] = self.instance_start_time
if hasattr(self, 'props') and self.props is not None:
if isinstance(self.props, dict):
_dict['props'] = self.props
Expand Down
10 changes: 6 additions & 4 deletions test/unit/test_cloudant_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -1147,7 +1147,7 @@ def test_post_dbs_info_all_params(self):
"""
# Set up mock
url = preprocess_url('/_dbs_info')
mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]'
mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]'
responses.add(
responses.POST,
url,
Expand Down Expand Up @@ -1192,7 +1192,7 @@ def test_post_dbs_info_value_error(self):
"""
# Set up mock
url = preprocess_url('/_dbs_info')
mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]'
mock_response = '[{"error": "error", "info": {"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}, "key": "key"}]'
responses.add(
responses.POST,
url,
Expand Down Expand Up @@ -1316,7 +1316,7 @@ def test_get_database_information_all_params(self):
"""
# Set up mock
url = preprocess_url('/testString')
mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}'
mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}'
responses.add(
responses.GET,
url,
Expand Down Expand Up @@ -1354,7 +1354,7 @@ def test_get_database_information_value_error(self):
"""
# Set up mock
url = preprocess_url('/testString')
mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}'
mock_response = '{"cluster": {"n": 3, "q": 1, "r": 1, "w": 1}, "committed_update_seq": "committed_update_seq", "compact_running": false, "compacted_seq": "compacted_seq", "db_name": "db_name", "disk_format_version": 19, "doc_count": 0, "doc_del_count": 0, "engine": "engine", "instance_start_time": "instance_start_time", "props": {"partitioned": false}, "sizes": {"active": 6, "external": 8, "file": 4}, "update_seq": "update_seq", "uuid": "uuid", "partitioned_indexes": {"count": 0, "indexes": {"search": 0, "view": 0}, "limit": 10}}'
responses.add(
responses.GET,
url,
Expand Down Expand Up @@ -14365,6 +14365,7 @@ def test_database_information_serialization(self):
database_information_model_json['doc_count'] = 0
database_information_model_json['doc_del_count'] = 0
database_information_model_json['engine'] = 'testString'
database_information_model_json['instance_start_time'] = 'testString'
database_information_model_json['props'] = database_information_props_model
database_information_model_json['sizes'] = content_information_sizes_model
database_information_model_json['update_seq'] = 'testString'
Expand Down Expand Up @@ -14565,6 +14566,7 @@ def test_dbs_info_result_serialization(self):
database_information_model['doc_count'] = 0
database_information_model['doc_del_count'] = 0
database_information_model['engine'] = 'testString'
database_information_model['instance_start_time'] = 'testString'
database_information_model['props'] = database_information_props_model
database_information_model['sizes'] = content_information_sizes_model
database_information_model['update_seq'] = 'testString'
Expand Down