diff --git a/cognite/client/_api/agents/agents.py b/cognite/client/_api/agents/agents.py index bd177be879..383fbe1113 100644 --- a/cognite/client/_api/agents/agents.py +++ b/cognite/client/_api/agents/agents.py @@ -49,10 +49,9 @@ def upsert(self, agents: AgentUpsert | Sequence[AgentUpsert]) -> Agent | AgentLi ... AgentUpsert, ... QueryKnowledgeGraphAgentToolUpsert, ... QueryKnowledgeGraphAgentToolConfiguration, - ... DataModelInfo + ... DataModelInfo, ... ) >>> client = CogniteClient() - ... >>> find_assets_tool = QueryKnowledgeGraphAgentToolUpsert( ... name="find assets", ... description="Use this tool to find assets", @@ -65,12 +64,10 @@ def upsert(self, agents: AgentUpsert | Sequence[AgentUpsert]) -> Agent | AgentLi ... view_external_ids=["CogniteAsset"], ... ) ... ] - ... ) + ... ), ... ) >>> agent = AgentUpsert( - ... external_id="my_agent", - ... name="My Agent", - ... tools=[find_assets_tool] + ... external_id="my_agent", name="My Agent", tools=[find_assets_tool] ... ) >>> client.agents.upsert(agents=[agent]) @@ -83,9 +80,8 @@ def upsert(self, agents: AgentUpsert | Sequence[AgentUpsert]) -> Agent | AgentLi ... DataModelInfo, ... SummarizeDocumentAgentToolUpsert, ... AskDocumentAgentToolUpsert, - ... QueryTimeSeriesDatapointsAgentToolUpsert + ... QueryTimeSeriesDatapointsAgentToolUpsert, ... ) - ... >>> find_assets_tool = QueryKnowledgeGraphAgentToolUpsert( ... name="find assets", ... description="Use this tool to query the knowledge graph for assets", @@ -98,7 +94,7 @@ def upsert(self, agents: AgentUpsert | Sequence[AgentUpsert]) -> Agent | AgentLi ... view_external_ids=["CogniteAsset"], ... ) ... ] - ... ) + ... ), ... ) >>> find_files_tool = QueryKnowledgeGraphAgentToolUpsert( ... name="find files", @@ -112,7 +108,7 @@ def upsert(self, agents: AgentUpsert | Sequence[AgentUpsert]) -> Agent | AgentLi ... view_external_ids=["CogniteFile"], ... ) ... ] - ... ) + ... ), ... ) >>> find_time_series_tool = QueryKnowledgeGraphAgentToolUpsert( ... name="find time series", @@ -126,26 +122,33 @@ def upsert(self, agents: AgentUpsert | Sequence[AgentUpsert]) -> Agent | AgentLi ... view_external_ids=["CogniteTimeSeries"], ... ) ... ] - ... ) + ... ), ... ) >>> summarize_tool = SummarizeDocumentAgentToolUpsert( ... name="summarize document", - ... description="Use this tool to get a summary of a document" + ... description="Use this tool to get a summary of a document", ... ) >>> ask_doc_tool = AskDocumentAgentToolUpsert( ... name="ask document", - ... description="Use this tool to ask questions about specific documents" + ... description="Use this tool to ask questions about specific documents", ... ) >>> ts_tool = QueryTimeSeriesDatapointsAgentToolUpsert( ... name="query time series", - ... description="Use this tool to query time series data points" + ... description="Use this tool to query time series data points", ... ) >>> agent = AgentUpsert( ... external_id="my_agent", ... name="My agent", ... description="An agent with many tools", ... instructions="You are a helpful assistant that can query knowledge graphs, summarize documents, answer questions about documents, and query time series data points.", - ... tools=[find_assets_tool, find_files_tool, find_time_series_tool, summarize_tool, ask_doc_tool, ts_tool] + ... tools=[ + ... find_assets_tool, + ... find_files_tool, + ... find_time_series_tool, + ... summarize_tool, + ... ask_doc_tool, + ... ts_tool, + ... ], ... ) >>> client.agents.upsert(agents=[agent]) @@ -268,8 +271,7 @@ def chat( >>> from cognite.client.data_classes.agents import Message >>> client = CogniteClient() >>> response = client.agents.chat( - ... agent_id="my_agent", - ... messages=Message("What can you help me with?") + ... agent_id="my_agent", messages=Message("What can you help me with?") ... ) >>> print(response.text) @@ -278,7 +280,7 @@ def chat( >>> follow_up = client.agents.chat( ... agent_id="my_agent", ... messages=Message("Tell me more about that"), - ... cursor=response.cursor + ... cursor=response.cursor, ... ) Send multiple messages at once: @@ -287,8 +289,8 @@ def chat( ... agent_id="my_agent", ... messages=[ ... Message("Help me find the 1st stage compressor."), - ... Message("Once you have found it, find related time series.") - ... ] + ... Message("Once you have found it, find related time series."), + ... ], ... ) """ self._warnings.warn() diff --git a/cognite/client/_api/ai/tools/documents.py b/cognite/client/_api/ai/tools/documents.py index b7ab4563ea..c1e6c4a9f9 100644 --- a/cognite/client/_api/ai/tools/documents.py +++ b/cognite/client/_api/ai/tools/documents.py @@ -43,9 +43,7 @@ def summarize( You can also use external ID or instance ID: >>> from cognite.client.data_classes.data_modeling import NodeId - >>> client.ai.tools.documents.summarize( - ... instance_id=NodeId("my-space", "my-xid") - ... ) + >>> client.ai.tools.documents.summarize(instance_id=NodeId("my-space", "my-xid")) """ ident = IdentifierSequenceWithInstanceId.load(id, external_id, instance_id).as_singleton() res = self._post(self._RESOURCE_PATH + "/summarize", json={"items": ident.as_dicts()}) diff --git a/cognite/client/_api/annotations.py b/cognite/client/_api/annotations.py index 3ad4f45a1d..9d7184807a 100644 --- a/cognite/client/_api/annotations.py +++ b/cognite/client/_api/annotations.py @@ -233,7 +233,9 @@ def list(self, filter: AnnotationFilter | dict, limit: int | None = DEFAULT_LIMI >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import AnnotationFilter >>> client = CogniteClient() - >>> flt = AnnotationFilter(annotated_resource_type="file", annotated_resource_ids=[{"id": 123}]) + >>> flt = AnnotationFilter( + ... annotated_resource_type="file", annotated_resource_ids=[{"id": 123}] + ... ) >>> res = client.annotations.list(flt, limit=None) """ assert_type(filter, "filter", [AnnotationFilter, dict], allow_none=False) diff --git a/cognite/client/_api/assets.py b/cognite/client/_api/assets.py index 37f4e0a608..6b01c7bf42 100644 --- a/cognite/client/_api/assets.py +++ b/cognite/client/_api/assets.py @@ -281,7 +281,9 @@ def retrieve_multiple( Get assets by external id: - >>> res = client.assets.retrieve_multiple(external_ids=["abc", "def"], ignore_unknown_ids=True) + >>> res = client.assets.retrieve_multiple( + ... external_ids=["abc", "def"], ignore_unknown_ids=True + ... ) """ identifiers = IdentifierSequence.load(ids=ids, external_ids=external_ids) return self._retrieve_multiple( @@ -303,7 +305,9 @@ def aggregate(self, filter: AssetFilter | dict[str, Any] | None = None) -> list[ >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> aggregate_by_prefix = client.assets.aggregate(filter={"external_id_prefix": "prefix"}) + >>> aggregate_by_prefix = client.assets.aggregate( + ... filter={"external_id_prefix": "prefix"} + ... ) """ warnings.warn( f"This method is deprecated. Use {self.__class__.__name__}.aggregate_count instead.", DeprecationWarning @@ -382,8 +386,8 @@ def aggregate_cardinality_values( >>> from cognite.client.data_classes.assets import AssetProperty >>> is_critical = Search(AssetProperty.description, "critical") >>> critical_assets = client.assets.aggregate_cardinality_values( - ... AssetProperty.metadata_key("timezone"), - ... advanced_filter=is_critical) + ... AssetProperty.metadata_key("timezone"), advanced_filter=is_critical + ... ) """ self._validate_filter(advanced_filter) return self._advanced_aggregate( @@ -458,7 +462,9 @@ def aggregate_unique_values( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.assets import AssetProperty >>> client = CogniteClient() - >>> result = client.assets.aggregate_unique_values(AssetProperty.metadata_key("timezone")) + >>> result = client.assets.aggregate_unique_values( + ... AssetProperty.metadata_key("timezone") + ... ) >>> print(result.unique) Get the different labels with count used for assets created after 2020-01-01 in your CDF project: @@ -467,8 +473,12 @@ def aggregate_unique_values( >>> from cognite.client.data_classes.assets import AssetProperty >>> from cognite.client.utils import timestamp_to_ms >>> from datetime import datetime - >>> created_after_2020 = filters.Range(AssetProperty.created_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.assets.aggregate_unique_values(AssetProperty.labels, advanced_filter=created_after_2020) + >>> created_after_2020 = filters.Range( + ... AssetProperty.created_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.assets.aggregate_unique_values( + ... AssetProperty.labels, advanced_filter=created_after_2020 + ... ) >>> print(result.unique) Get the different labels with count for assets updated after 2020-01-01 in your CDF project, but exclude all labels that @@ -478,8 +488,14 @@ def aggregate_unique_values( >>> from cognite.client.data_classes import aggregations >>> from cognite.client.data_classes import filters >>> not_test = aggregations.Not(aggregations.Prefix("test")) - >>> created_after_2020 = filters.Range(AssetProperty.last_updated_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.assets.aggregate_unique_values(AssetProperty.labels, advanced_filter=created_after_2020, aggregate_filter=not_test) + >>> created_after_2020 = filters.Range( + ... AssetProperty.last_updated_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.assets.aggregate_unique_values( + ... AssetProperty.labels, + ... advanced_filter=created_after_2020, + ... aggregate_filter=not_test, + ... ) >>> print(result.unique) """ @@ -632,7 +648,8 @@ def create_hierarchy( >>> assets = [ ... Asset(external_id="root", name="root"), ... Asset(external_id="child1", parent_external_id="root", name="child1"), - ... Asset(external_id="child2", parent_external_id="root", name="child2")] + ... Asset(external_id="child2", parent_external_id="root", name="child2"), + ... ] >>> res = client.assets.create_hierarchy(assets) Create an asset hierarchy, but run update for existing assets: @@ -723,7 +740,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.assets.delete(id=[1,2,3], external_id="3") + >>> client.assets.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=id, external_ids=external_id), @@ -765,7 +782,11 @@ def update( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import AssetUpdate >>> client = CogniteClient() - >>> my_update = AssetUpdate(id=1).description.set("New description").metadata.add({"key": "value"}) + >>> my_update = ( + ... AssetUpdate(id=1) + ... .description.set("New description") + ... .metadata.add({"key": "value"}) + ... ) >>> res1 = client.assets.update(my_update) >>> # Remove an already set field like so >>> another_update = AssetUpdate(id=1).description.set(None) @@ -889,8 +910,8 @@ def filter( >>> from cognite.client.data_classes.assets import AssetProperty, SortableAssetProperty >>> in_timezone = filters.Prefix(AssetProperty.metadata_key("timezone"), "Europe") >>> res = client.assets.filter( - ... filter=in_timezone, - ... sort=(SortableAssetProperty.external_id, "asc")) + ... filter=in_timezone, sort=(SortableAssetProperty.external_id, "asc") + ... ) """ warnings.warn( @@ -951,12 +972,14 @@ def search( Search for assets using multiple filters, finding all assets with name similar to `xyz` with parent asset `123` or `456` with source `some source`: - >>> res = client.assets.search(name="xyz",filter={"parent_ids": [123,456],"source": "some source"}) + >>> res = client.assets.search( + ... name="xyz", filter={"parent_ids": [123, 456], "source": "some source"} + ... ) Search for an asset with an attached label: >>> my_label_filter = LabelFilter(contains_all=["PUMP"]) - >>> res = client.assets.search(name="xyz",filter=AssetFilter(labels=my_label_filter)) + >>> res = client.assets.search(name="xyz", filter=AssetFilter(labels=my_label_filter)) """ return self._search( list_cls=AssetList, @@ -1075,12 +1098,12 @@ def list( Iterate over assets: >>> for asset in client.assets: - ... asset # do something with the asset + ... asset # do something with the asset Iterate over chunks of assets to reduce memory load: >>> for asset_list in client.assets(chunk_size=2500): - ... asset_list # do something with the assets + ... asset_list # do something with the assets Filter assets based on labels: @@ -1105,17 +1128,19 @@ def list( >>> from cognite.client.data_classes.assets import AssetProperty, SortableAssetProperty >>> in_timezone = filters.Prefix(AssetProperty.metadata_key("timezone"), "Europe") >>> res = client.assets.list( - ... advanced_filter=in_timezone, - ... sort=(SortableAssetProperty.external_id, "asc")) + ... advanced_filter=in_timezone, sort=(SortableAssetProperty.external_id, "asc") + ... ) Combine filter and advanced filter: >>> from cognite.client.data_classes import filters >>> not_instrument_lvl5 = filters.And( - ... filters.ContainsAny("labels", ["Level5"]), - ... filters.Not(filters.ContainsAny("labels", ["Instrument"])) + ... filters.ContainsAny("labels", ["Level5"]), + ... filters.Not(filters.ContainsAny("labels", ["Instrument"])), + ... ) + >>> res = client.assets.list( + ... asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5 ... ) - >>> res = client.assets.list(asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5) """ agg_props = self._process_aggregated_props(aggregated_properties) diff --git a/cognite/client/_api/data_modeling/containers.py b/cognite/client/_api/data_modeling/containers.py index 5b471de9c8..5462dbadc4 100644 --- a/cognite/client/_api/data_modeling/containers.py +++ b/cognite/client/_api/data_modeling/containers.py @@ -111,13 +111,14 @@ def retrieve(self, ids: ContainerIdentifier | Sequence[ContainerIdentifier]) -> >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.data_modeling.containers.retrieve(('mySpace', 'myContainer')) + >>> res = client.data_modeling.containers.retrieve(("mySpace", "myContainer")) Fetch using the ContainerId: >>> from cognite.client.data_classes.data_modeling import ContainerId >>> res = client.data_modeling.containers.retrieve( - ... ContainerId(space='mySpace', external_id='myContainer')) + ... ContainerId(space="mySpace", external_id="myContainer") + ... ) """ identifier = _load_identifier(ids, "container") return self._retrieve_multiple( @@ -244,12 +245,12 @@ def list( Iterate over containers: >>> for container in client.data_modeling.containers: - ... container # do something with the container + ... container # do something with the container Iterate over chunks of containers to reduce memory load: >>> for container_list in client.data_modeling.containers(chunk_size=10): - ... container_list # do something with the containers + ... container_list # do something with the containers """ flt = _ContainerFilter(space, include_global) return self._list( @@ -281,19 +282,25 @@ def apply(self, container: ContainerApply | Sequence[ContainerApply]) -> Contain >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.data_modeling import ( - ... ContainerApply, ContainerProperty, Text, Float64) + ... ContainerApply, + ... ContainerProperty, + ... Text, + ... Float64, + ... ) >>> client = CogniteClient() - >>> container = ContainerApply( - ... space="mySpace", - ... external_id="myContainer", - ... properties={ - ... "name": ContainerProperty(type=Text, name="name"), - ... "numbers": ContainerProperty( - ... type=Float64(is_list=True, max_list_size=200), - ... description="very important numbers", - ... ), - ... }, - ... ), + >>> container = ( + ... ContainerApply( + ... space="mySpace", + ... external_id="myContainer", + ... properties={ + ... "name": ContainerProperty(type=Text, name="name"), + ... "numbers": ContainerProperty( + ... type=Float64(is_list=True, max_list_size=200), + ... description="very important numbers", + ... ), + ... }, + ... ), + ... ) >>> res = client.data_modeling.containers.apply(container) Create new container with unit-aware properties: @@ -309,11 +316,8 @@ def apply(self, container: ContainerApply | Sequence[ContainerApply]) -> Contain ... description="Maximum Pump Pressure", ... name="maxPressure", ... type=Float64( - ... unit=UnitReference( - ... external_id="pressure:bar", - ... source_unit="BAR" - ... ) - ... ) + ... unit=UnitReference(external_id="pressure:bar", source_unit="BAR") + ... ), ... ), ... "rotationConfigurations": ContainerProperty( ... nullable=True, @@ -321,12 +325,10 @@ def apply(self, container: ContainerApply | Sequence[ContainerApply]) -> Contain ... name="rotationConfigurations", ... type=Float64( ... is_list=True, - ... unit=UnitReference( - ... external_id="angular_velocity:rev-per-min" - ... ) - ... ) - ... ) - ... } + ... unit=UnitReference(external_id="angular_velocity:rev-per-min"), + ... ), + ... ), + ... }, ... ) >>> res = client.data_modeling.containers.apply(container) @@ -335,10 +337,25 @@ def apply(self, container: ContainerApply | Sequence[ContainerApply]) -> Contain ``nullable``, ``auto_increment``, ``default_value`` and ``immutable`` that may be specified, depending on the choice of property type (e.g. ``auto_increment`` only works with integer types). - >>> from cognite.client.data_classes.data_modeling.data_types import UnitReference, EnumValue >>> from cognite.client.data_classes.data_modeling.data_types import ( - ... Boolean, Date, DirectRelation, Enum, FileReference, Float32, Float64, - ... Int32, Int64, Json, SequenceReference, Text, TimeSeriesReference, Timestamp + ... UnitReference, + ... EnumValue, + ... ) + >>> from cognite.client.data_classes.data_modeling.data_types import ( + ... Boolean, + ... Date, + ... DirectRelation, + ... Enum, + ... FileReference, + ... Float32, + ... Float64, + ... Int32, + ... Int64, + ... Json, + ... SequenceReference, + ... Text, + ... TimeSeriesReference, + ... Timestamp, ... ) >>> container_properties = { ... "prop01": ContainerProperty(Boolean), @@ -355,8 +372,13 @@ def apply(self, container: ContainerApply | Sequence[ContainerApply]) -> Contain ... "prop10": ContainerProperty(DirectRelation(is_list=True)), ... # Note: Enum also support `unknown_value`: The value to use when the enum value is unknown. ... "prop11": ContainerProperty( - ... Enum({"Closed": EnumValue("Valve is closed"), - ... "Opened": EnumValue("Valve is opened")})), + ... Enum( + ... { + ... "Closed": EnumValue("Valve is closed"), + ... "Opened": EnumValue("Valve is opened"), + ... } + ... ) + ... ), ... # Note: Floats support unit references, e.g. `unit=UnitReference("pressure:bar")`: ... "prop12": ContainerProperty(Float32), ... "prop13": ContainerProperty(Float32(is_list=True)), diff --git a/cognite/client/_api/data_modeling/data_models.py b/cognite/client/_api/data_modeling/data_models.py index eb5fa705b5..d9fefca24c 100644 --- a/cognite/client/_api/data_modeling/data_models.py +++ b/cognite/client/_api/data_modeling/data_models.py @@ -210,12 +210,12 @@ def list( Iterate over data model: >>> for data_model in client.data_modeling.data_models: - ... data_model # do something with the data_model + ... data_model # do something with the data_model Iterate over chunks of data model to reduce memory load: >>> for data_model_list in client.data_modeling.data_models(chunk_size=10): - ... data_model_list # do something with the data model + ... data_model_list # do something with the data model """ filter = DataModelFilter(space, inline_views, all_versions, include_global) @@ -250,8 +250,19 @@ def apply(self, data_model: DataModelApply | Sequence[DataModelApply]) -> DataMo >>> from cognite.client.data_classes.data_modeling import DataModelApply, ViewId >>> client = CogniteClient() >>> data_models = [ - ... DataModelApply(space="mySpace",external_id="myDataModel",version="v1",views=[ViewId("mySpace","myView","v1")]), - ... DataModelApply(space="mySpace",external_id="myOtherDataModel",version="v1",views=[ViewId("mySpace","myView","v1")])] + ... DataModelApply( + ... space="mySpace", + ... external_id="myDataModel", + ... version="v1", + ... views=[ViewId("mySpace", "myView", "v1")], + ... ), + ... DataModelApply( + ... space="mySpace", + ... external_id="myOtherDataModel", + ... version="v1", + ... views=[ViewId("mySpace", "myView", "v1")], + ... ), + ... ] >>> res = client.data_modeling.data_models.apply(data_models) """ return self._create_multiple( diff --git a/cognite/client/_api/data_modeling/graphql.py b/cognite/client/_api/data_modeling/graphql.py index c1bbc41d93..cd4ee2f026 100644 --- a/cognite/client/_api/data_modeling/graphql.py +++ b/cognite/client/_api/data_modeling/graphql.py @@ -85,7 +85,7 @@ def apply_dml( ... id=("mySpaceExternalId", "myModelExternalId", "1"), ... dml="type MyType { id: String! }", ... name="My model name", - ... description="My model description" + ... description="My model description", ... ) """ graphql_body = """ diff --git a/cognite/client/_api/data_modeling/instances.py b/cognite/client/_api/data_modeling/instances.py index 744a8c632c..dc1d83e569 100644 --- a/cognite/client/_api/data_modeling/instances.py +++ b/cognite/client/_api/data_modeling/instances.py @@ -359,32 +359,45 @@ def retrieve_edges( We strongly suggest you use snake_cased attribute names, as is done here: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.data_modeling import EdgeId, TypedEdge, PropertyOptions, DirectRelationReference, ViewId + >>> from cognite.client.data_classes.data_modeling import ( + ... EdgeId, + ... TypedEdge, + ... PropertyOptions, + ... DirectRelationReference, + ... ViewId, + ... ) >>> class Flow(TypedEdge): - ... flow_rate = PropertyOptions(identifier="flowRate") - ... - ... def __init__( - ... self, - ... space: str, - ... external_id: str, - ... version: int, - ... type: DirectRelationReference, - ... last_updated_time: int, - ... created_time: int, - ... flow_rate: float, - ... start_node: DirectRelationReference, - ... end_node: DirectRelationReference, - ... deleted_time: int | None = None, - ... ) -> None: - ... super().__init__( - ... space, external_id, version, type, last_updated_time, created_time, start_node, end_node, deleted_time - ... ) - ... self.flow_rate = flow_rate + ... flow_rate = PropertyOptions(identifier="flowRate") ... - ... @classmethod - ... def get_source(cls) -> ViewId: - ... return ViewId("sp_model_space", "flow", "1") + ... def __init__( + ... self, + ... space: str, + ... external_id: str, + ... version: int, + ... type: DirectRelationReference, + ... last_updated_time: int, + ... created_time: int, + ... flow_rate: float, + ... start_node: DirectRelationReference, + ... end_node: DirectRelationReference, + ... deleted_time: int | None = None, + ... ) -> None: + ... super().__init__( + ... space, + ... external_id, + ... version, + ... type, + ... last_updated_time, + ... created_time, + ... start_node, + ... end_node, + ... deleted_time, + ... ) + ... self.flow_rate = flow_rate ... + ... @classmethod + ... def get_source(cls) -> ViewId: + ... return ViewId("sp_model_space", "flow", "1") >>> client = CogniteClient() >>> res = client.data_modeling.instances.retrieve_edges( ... EdgeId("mySpace", "theFlow"), edge_cls=Flow @@ -461,38 +474,43 @@ def retrieve_nodes( >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.data_modeling import NodeId, TypedNode, PropertyOptions, DirectRelationReference, ViewId + >>> from cognite.client.data_classes.data_modeling import ( + ... NodeId, + ... TypedNode, + ... PropertyOptions, + ... DirectRelationReference, + ... ViewId, + ... ) >>> class Person(TypedNode): - ... birth_year = PropertyOptions(identifier="birthYear") - ... - ... def __init__( - ... self, - ... space: str, - ... external_id: str, - ... version: int, - ... last_updated_time: int, - ... created_time: int, - ... name: str, - ... birth_year: int | None = None, - ... type: DirectRelationReference | None = None, - ... deleted_time: int | None = None, - ... ): - ... super().__init__( - ... space=space, - ... external_id=external_id, - ... version=version, - ... last_updated_time=last_updated_time, - ... created_time=created_time, - ... type=type, - ... deleted_time=deleted_time - ... ) - ... self.name = name - ... self.birth_year = birth_year + ... birth_year = PropertyOptions(identifier="birthYear") ... - ... @classmethod - ... def get_source(cls) -> ViewId: - ... return ViewId("myModelSpace", "Person", "1") + ... def __init__( + ... self, + ... space: str, + ... external_id: str, + ... version: int, + ... last_updated_time: int, + ... created_time: int, + ... name: str, + ... birth_year: int | None = None, + ... type: DirectRelationReference | None = None, + ... deleted_time: int | None = None, + ... ): + ... super().__init__( + ... space=space, + ... external_id=external_id, + ... version=version, + ... last_updated_time=last_updated_time, + ... created_time=created_time, + ... type=type, + ... deleted_time=deleted_time, + ... ) + ... self.name = name + ... self.birth_year = birth_year ... + ... @classmethod + ... def get_source(cls) -> ViewId: + ... return ViewId("myModelSpace", "Person", "1") >>> client = CogniteClient() >>> res = client.data_modeling.instances.retrieve_nodes( ... NodeId("myDataSpace", "myPerson"), node_cls=Person @@ -533,7 +551,8 @@ def retrieve( >>> res = client.data_modeling.instances.retrieve( ... nodes=("mySpace", "myNodeExternalId"), ... edges=("mySpace", "myEdgeExternalId"), - ... sources=("mySpace", "myViewExternalId", "myViewVersion")) + ... sources=("mySpace", "myViewExternalId", "myViewVersion"), + ... ) Retrieve nodes an edges using the built in data class @@ -541,7 +560,8 @@ def retrieve( >>> res = client.data_modeling.instances.retrieve( ... NodeId("mySpace", "myNode"), ... EdgeId("mySpace", "myEdge"), - ... ViewId("mySpace", "myViewExternalId", "myViewVersion")) + ... ViewId("mySpace", "myViewExternalId", "myViewVersion"), + ... ) Retrieve nodes an edges using the the view object as source @@ -549,7 +569,8 @@ def retrieve( >>> res = client.data_modeling.instances.retrieve( ... NodeId("mySpace", "myNode"), ... EdgeId("mySpace", "myEdge"), - ... sources=("myspace", "myView")) + ... sources=("myspace", "myView"), + ... ) """ return self._retrieve_typed( nodes=nodes, edges=edges, sources=sources, include_typing=include_typing, node_cls=Node, edge_cls=Edge @@ -686,13 +707,17 @@ def delete( Delete nodes and edges using the built in data class >>> from cognite.client.data_classes.data_modeling import NodeId, EdgeId - >>> client.data_modeling.instances.delete(NodeId('mySpace', 'myNode'), EdgeId('mySpace', 'myEdge')) + >>> client.data_modeling.instances.delete( + ... NodeId("mySpace", "myNode"), EdgeId("mySpace", "myEdge") + ... ) Delete all nodes from a NodeList >>> from cognite.client.data_classes.data_modeling import NodeId, EdgeId - >>> my_view = client.data_modeling.views.retrieve(('mySpace', 'myView')) - >>> my_nodes = client.data_modeling.instances.list(instance_type='node', sources=my_view, limit=None) + >>> my_view = client.data_modeling.views.retrieve(("mySpace", "myView")) + >>> my_nodes = client.data_modeling.instances.list( + ... instance_type="node", sources=my_view, limit=None + ... ) >>> client.data_modeling.instances.delete(nodes=my_nodes.as_ids()) """ identifiers = self._load_node_and_edge_ids(nodes, edges) @@ -801,21 +826,28 @@ def subscribe( Subscribe to a given query and print the changed data: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.data_modeling.query import Query, QueryResult, NodeResultSetExpression, Select, SourceSelector + >>> from cognite.client.data_classes.data_modeling.query import ( + ... Query, + ... QueryResult, + ... NodeResultSetExpression, + ... Select, + ... SourceSelector, + ... ) >>> from cognite.client.data_classes.data_modeling import ViewId >>> from cognite.client.data_classes.filters import Range >>> >>> client = CogniteClient() >>> def just_print_the_result(result: QueryResult) -> None: ... print(result) - ... >>> view_id = ViewId("someSpace", "someView", "v1") >>> filter = Range(view_id.as_property_ref("createdYear"), lt=2023) >>> query = Query( ... with_={"work_orders": NodeResultSetExpression(filter=filter)}, - ... select={"work_orders": Select([SourceSelector(view_id, ["createdYear"])])} + ... select={"work_orders": Select([SourceSelector(view_id, ["createdYear"])])}, + ... ) + >>> subscription_context = client.data_modeling.instances.subscribe( + ... query, just_print_the_result ... ) - >>> subscription_context = client.data_modeling.instances.subscribe(query, just_print_the_result) >>> subscription_context.cancel() """ for result_set_expression in query.with_.values(): @@ -930,14 +962,24 @@ def apply( Create new node without data: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.data_modeling import EdgeApply, NodeOrEdgeData, NodeApply + >>> from cognite.client.data_classes.data_modeling import ( + ... EdgeApply, + ... NodeOrEdgeData, + ... NodeApply, + ... ) >>> client = CogniteClient() >>> node = NodeApply("mySpace", "myNodeId") >>> res = client.data_modeling.instances.apply(node) Create two nodes with data with a one-to-many edge - >>> from cognite.client.data_classes.data_modeling import ContainerId, EdgeApply, NodeOrEdgeData, NodeApply, ViewId + >>> from cognite.client.data_classes.data_modeling import ( + ... ContainerId, + ... EdgeApply, + ... NodeOrEdgeData, + ... NodeApply, + ... ViewId, + ... ) >>> work_order = NodeApply( ... space="industrial", ... external_id="work_order:123", @@ -945,9 +987,9 @@ def apply( ... # Insert data through a view ... NodeOrEdgeData( ... ViewId("mySpace", "WorkOrderView", "v1"), - ... {"title": "Repair pump", "createdYear": 2023} + ... {"title": "Repair pump", "createdYear": 2023}, ... ) - ... ] + ... ], ... ) >>> pump = NodeApply( ... space="industrial", @@ -956,9 +998,9 @@ def apply( ... # Insert data directly to the container ... NodeOrEdgeData( ... ContainerId("mySpace", "PumpContainer"), - ... {"name": "Pump 456", "location": "Subsea"} + ... {"name": "Pump 456", "location": "Subsea"}, ... ) - ... ] + ... ], ... ) ... # This is one-to-many edge, in this case from a work order to a pump >>> work_order_to_pump = EdgeApply( @@ -983,7 +1025,7 @@ def apply( >>> res = client.data_modeling.instances.apply( ... edges=work_order_to_pump, ... auto_create_start_nodes=True, - ... auto_create_end_nodes=True + ... auto_create_end_nodes=True, ... ) Using helper function to create valid graphql timestamp for a datetime object: @@ -991,7 +1033,9 @@ def apply( >>> from cognite.client.utils import datetime_to_ms_iso_timestamp >>> from datetime import datetime, timezone >>> my_date = datetime(2020, 3, 14, 15, 9, 26, 535000, tzinfo=timezone.utc) - >>> data_model_timestamp = datetime_to_ms_iso_timestamp(my_date) # "2020-03-14T15:09:26.535+00:00" + >>> data_model_timestamp = datetime_to_ms_iso_timestamp( + ... my_date + ... ) # "2020-03-14T15:09:26.535+00:00" Create a typed node apply. Any property that you want to look up by a different attribute name, e.g. you want `my_node.birth_year` to return the data for property `birthYear`, must use the PropertyOptions as shown below. @@ -1005,9 +1049,9 @@ def apply( ... super().__init__(space, external_id, type=("sp_model_space", "Person")) ... self.name = name ... self.birth_year = birth_year + ... ... def get_source(self): ... return ViewId("sp_model_space", "Person", "v1") - ... >>> person = PersonApply("sp_date_space", "my_person", "John Doe", 1980) >>> res = client.data_modeling.instances.apply(nodes=person) """ @@ -1140,9 +1184,8 @@ def search( >>> from cognite.client.data_classes.data_modeling import ViewId >>> client = CogniteClient() >>> res = client.data_modeling.instances.search( - ... ViewId("mySpace", "PersonView", "v1"), - ... query="Arnold", - ... properties=["name"]) + ... ViewId("mySpace", "PersonView", "v1"), query="Arnold", properties=["name"] + ... ) Search for Quentin in the person view in the name property, but only born after 1970: @@ -1153,7 +1196,8 @@ def search( ... ViewId("mySpace", "PersonView", "v1"), ... query="Quentin", ... properties=["name"], - ... filter=born_after_1970) + ... filter=born_after_1970, + ... ) """ self._validate_filter(filter) @@ -1287,7 +1331,9 @@ def aggregate( >>> client = CogniteClient() >>> avg_run_time = aggs.Avg("runTimeMinutes") >>> view_id = ViewId("mySpace", "PumpView", "v1") - >>> res = client.data_modeling.instances.aggregate(view_id, avg_run_time, group_by="releaseYear") + >>> res = client.data_modeling.instances.aggregate( + ... view_id, avg_run_time, group_by="releaseYear" + ... ) """ if instance_type not in ("node", "edge"): @@ -1449,21 +1495,37 @@ def query(self, query: Query, include_typing: bool = False) -> QueryResult: Find work orders created before 2023 sorted by title: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.data_modeling.query import Query, Select, NodeResultSetExpression, EdgeResultSetExpression, SourceSelector + >>> from cognite.client.data_classes.data_modeling.query import ( + ... Query, + ... Select, + ... NodeResultSetExpression, + ... EdgeResultSetExpression, + ... SourceSelector, + ... ) >>> from cognite.client.data_classes.filters import Range, Equals >>> from cognite.client.data_classes.data_modeling.ids import ViewId >>> client = CogniteClient() >>> work_order_id = ViewId("mySpace", "WorkOrderView", "v1") >>> pump_id = ViewId("mySpace", "PumpView", "v1") >>> query = Query( - ... with_ = { - ... "work_orders": NodeResultSetExpression(filter=Range(work_order_id.as_property_ref("createdYear"), lt=2023)), - ... "work_orders_to_pumps": EdgeResultSetExpression(from_="work_orders", filter=Equals(["edge", "type"], {"space": work_order_id.space, "externalId": "WorkOrder.asset"})), + ... with_={ + ... "work_orders": NodeResultSetExpression( + ... filter=Range(work_order_id.as_property_ref("createdYear"), lt=2023) + ... ), + ... "work_orders_to_pumps": EdgeResultSetExpression( + ... from_="work_orders", + ... filter=Equals( + ... ["edge", "type"], + ... {"space": work_order_id.space, "externalId": "WorkOrder.asset"}, + ... ), + ... ), ... "pumps": NodeResultSetExpression(from_="work_orders_to_pumps"), ... }, - ... select = { + ... select={ ... "pumps": Select( - ... [SourceSelector(pump_id, ["name"])], sort=[InstanceSort(pump_id.as_property_ref("name"))]), + ... [SourceSelector(pump_id, ["name"])], + ... sort=[InstanceSort(pump_id.as_property_ref("name"))], + ... ), ... }, ... ) >>> res = client.data_modeling.instances.query(query) @@ -1472,7 +1534,10 @@ def query(self, query: Query, include_typing: bool = False) -> QueryResult: a UnitReference or a UnitSystemReference. Note that in order for a property to be converted, they need to have a unit defined in the underlying container. - >>> from cognite.client.data_classes.data_modeling.data_types import UnitReference, UnitSystemReference + >>> from cognite.client.data_classes.data_modeling.data_types import ( + ... UnitReference, + ... UnitSystemReference, + ... ) >>> selected_source = SourceSelector( ... source=ViewId("my-space", "my-xid", "v1"), ... properties=["f32_prop1", "f32_prop2", "f64_prop1", "f64_prop2"], @@ -1509,21 +1574,37 @@ def sync(self, query: Query, include_typing: bool = False) -> QueryResult: >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.data_modeling.instances import InstanceSort - >>> from cognite.client.data_classes.data_modeling.query import Query, Select, NodeResultSetExpression, EdgeResultSetExpression, SourceSelector + >>> from cognite.client.data_classes.data_modeling.query import ( + ... Query, + ... Select, + ... NodeResultSetExpression, + ... EdgeResultSetExpression, + ... SourceSelector, + ... ) >>> from cognite.client.data_classes.filters import Range, Equals >>> from cognite.client.data_classes.data_modeling.ids import ViewId >>> client = CogniteClient() >>> work_order_id = ViewId("mySpace", "WorkOrderView", "v1") >>> pump_id = ViewId("mySpace", "PumpView", "v1") >>> query = Query( - ... with_ = { - ... "work_orders": NodeResultSetExpression(filter=Range(work_order_id.as_property_ref("createdYear"), lt=2023)), - ... "work_orders_to_pumps": EdgeResultSetExpression(from_="work_orders", filter=Equals(["edge", "type"], {"space": work_order_id.space, "externalId": "WorkOrder.asset"})), + ... with_={ + ... "work_orders": NodeResultSetExpression( + ... filter=Range(work_order_id.as_property_ref("createdYear"), lt=2023) + ... ), + ... "work_orders_to_pumps": EdgeResultSetExpression( + ... from_="work_orders", + ... filter=Equals( + ... ["edge", "type"], + ... {"space": work_order_id.space, "externalId": "WorkOrder.asset"}, + ... ), + ... ), ... "pumps": NodeResultSetExpression(from_="work_orders_to_pumps"), ... }, - ... select = { + ... select={ ... "pumps": Select( - ... [SourceSelector(pump_id, ["name"])], sort=[InstanceSort(pump_id.as_property_ref("name"))]), + ... [SourceSelector(pump_id, ["name"])], + ... sort=[InstanceSort(pump_id.as_property_ref("name"))], + ... ), ... }, ... ) >>> res = client.data_modeling.instances.sync(query) @@ -1637,25 +1718,28 @@ def list( >>> from cognite.client.data_classes.data_modeling import InstanceSort >>> property_sort = InstanceSort( - ... property=('space', 'view_xid/view_version', 'some_property'), + ... property=("space", "view_xid/view_version", "some_property"), ... direction="descending", - ... nulls_first=True) + ... nulls_first=True, + ... ) >>> instance_list = client.data_modeling.instances.list(sort=property_sort) Iterate over instances (note: returns nodes): >>> for instance in client.data_modeling.instances: - ... instance # do something with the instance + ... instance # do something with the instance Iterate over chunks of instances to reduce memory load: >>> for instance_list in client.data_modeling.instances(chunk_size=100): - ... instance_list # do something with the instances + ... instance_list # do something with the instances List instances with a view as source: >>> from cognite.client.data_classes.data_modeling import ViewId - >>> instance_list = client.data_modeling.instances.list(sources=ViewId("mySpace", "myView", "v1")) + >>> instance_list = client.data_modeling.instances.list( + ... sources=ViewId("mySpace", "myView", "v1") + ... ) Convert instances to pandas DataFrame with expanded properties: diff --git a/cognite/client/_api/data_modeling/spaces.py b/cognite/client/_api/data_modeling/spaces.py index 748095122b..5ab1ca41c8 100644 --- a/cognite/client/_api/data_modeling/spaces.py +++ b/cognite/client/_api/data_modeling/spaces.py @@ -91,11 +91,13 @@ def retrieve(self, spaces: str | SequenceNotStr[str]) -> Space | SpaceList | Non >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.data_modeling.spaces.retrieve(spaces='mySpace') + >>> res = client.data_modeling.spaces.retrieve(spaces="mySpace") Get multiple spaces by id: - >>> res = client.data_modeling.spaces.retrieve(spaces=["MySpace", "MyAwesomeSpace", "MyOtherSpace"]) + >>> res = client.data_modeling.spaces.retrieve( + ... spaces=["MySpace", "MyAwesomeSpace", "MyOtherSpace"] + ... ) """ identifier = _load_space_identifier(spaces) @@ -157,12 +159,12 @@ def list( Iterate over spaces: >>> for space in client.data_modeling.spaces: - ... space # do something with the space + ... space # do something with the space Iterate over chunks of spaces to reduce memory load: >>> for space_list in client.data_modeling.spaces(chunk_size=2500): - ... space_list # do something with the spaces + ... space_list # do something with the spaces """ return self._list( list_cls=SpaceList, @@ -194,8 +196,12 @@ def apply(self, spaces: SpaceApply | Sequence[SpaceApply]) -> Space | SpaceList: >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.data_modeling import SpaceApply >>> client = CogniteClient() - >>> spaces = [SpaceApply(space="mySpace", description="My first space", name="My Space"), - ... SpaceApply(space="myOtherSpace", description="My second space", name="My Other Space")] + >>> spaces = [ + ... SpaceApply(space="mySpace", description="My first space", name="My Space"), + ... SpaceApply( + ... space="myOtherSpace", description="My second space", name="My Other Space" + ... ), + ... ] >>> res = client.data_modeling.spaces.apply(spaces) """ return self._create_multiple( diff --git a/cognite/client/_api/data_modeling/statistics.py b/cognite/client/_api/data_modeling/statistics.py index cc99aa0d51..571d9368f9 100644 --- a/cognite/client/_api/data_modeling/statistics.py +++ b/cognite/client/_api/data_modeling/statistics.py @@ -50,9 +50,7 @@ def retrieve( >>> result = client.data_modeling.statistics.spaces.retrieve("my-space") Fetch statistics for multiple spaces: - >>> res = client.data_modeling.statistics.spaces.retrieve( - ... ["my-space1", "my-space2"] - ... ) + >>> res = client.data_modeling.statistics.spaces.retrieve(["my-space1", "my-space2"]) """ return self._retrieve_multiple( diff --git a/cognite/client/_api/data_modeling/views.py b/cognite/client/_api/data_modeling/views.py index c53f766257..99be237c12 100644 --- a/cognite/client/_api/data_modeling/views.py +++ b/cognite/client/_api/data_modeling/views.py @@ -123,7 +123,7 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.data_modeling.views.retrieve(('mySpace', 'myView', 'v1')) + >>> res = client.data_modeling.views.retrieve(("mySpace", "myView", "v1")) """ identifier = _load_identifier(ids, "view") @@ -152,7 +152,7 @@ def delete(self, ids: ViewIdentifier | Sequence[ViewIdentifier]) -> list[ViewId] >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.data_modeling.views.delete(('mySpace', 'myView', 'v1')) + >>> client.data_modeling.views.delete(("mySpace", "myView", "v1")) """ deleted_views = cast( list, @@ -196,12 +196,12 @@ def list( Iterate over views: >>> for view in client.data_modeling.views: - ... view # do something with the view + ... view # do something with the view Iterate over chunks of views to reduce memory load: >>> for view_list in client.data_modeling.views(chunk_size=10): - ... view_list # do something with the views + ... view_list # do something with the views """ filter_ = ViewFilter(space, include_inherited_properties, all_versions, include_global) @@ -229,7 +229,11 @@ def apply(self, view: ViewApply | Sequence[ViewApply]) -> View | ViewList: Create new views: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.data_modeling import ViewApply, MappedPropertyApply, ContainerId + >>> from cognite.client.data_classes.data_modeling import ( + ... ViewApply, + ... MappedPropertyApply, + ... ContainerId, + ... ) >>> client = CogniteClient() >>> views = [ ... ViewApply( @@ -241,8 +245,8 @@ def apply(self, view: ViewApply | Sequence[ViewApply]) -> View | ViewList: ... container=ContainerId("mySpace", "myContainer"), ... container_property_identifier="someProperty", ... ), - ... } - ... ) + ... }, + ... ) ... ] >>> res = client.data_modeling.views.apply(views) @@ -255,9 +259,11 @@ def apply(self, view: ViewApply | Sequence[ViewApply]) -> View | ViewList: ... MappedPropertyApply, ... MultiEdgeConnectionApply, ... ViewApply, - ... ViewId + ... ViewId, + ... ) + >>> work_order_for_asset = DirectRelationReference( + ... space="mySpace", external_id="work_order_for_asset" ... ) - >>> work_order_for_asset = DirectRelationReference(space="mySpace", external_id="work_order_for_asset") >>> work_order_view = ViewApply( ... space="mySpace", ... external_id="WorkOrder", @@ -274,7 +280,7 @@ def apply(self, view: ViewApply | Sequence[ViewApply]) -> View | ViewList: ... source=ViewId("mySpace", "Asset", "v1"), ... name="asset", ... ), - ... } + ... }, ... ) >>> asset_view = ViewApply( ... space="mySpace", @@ -293,7 +299,7 @@ def apply(self, view: ViewApply | Sequence[ViewApply]) -> View | ViewList: ... source=ViewId("mySpace", "WorkOrder", "v1"), ... name="work_orders", ... ), - ... } + ... }, ... ) >>> res = client.data_modeling.views.apply([work_order_view, asset_view]) """ diff --git a/cognite/client/_api/data_sets.py b/cognite/client/_api/data_sets.py index 950411f192..928982ce95 100644 --- a/cognite/client/_api/data_sets.py +++ b/cognite/client/_api/data_sets.py @@ -182,7 +182,9 @@ def retrieve_multiple( Get data sets by external id: - >>> res = client.data_sets.retrieve_multiple(external_ids=["abc", "def"], ignore_unknown_ids=True) + >>> res = client.data_sets.retrieve_multiple( + ... external_ids=["abc", "def"], ignore_unknown_ids=True + ... ) """ identifiers = IdentifierSequence.load(ids=ids, external_ids=external_ids) return self._retrieve_multiple( @@ -250,7 +252,9 @@ def update( Perform a partial update on a data set, updating the description and removing a field from metadata: >>> from cognite.client.data_classes import DataSetUpdate - >>> my_update = DataSetUpdate(id=1).description.set("New description").metadata.remove(["key"]) + >>> my_update = ( + ... DataSetUpdate(id=1).description.set("New description").metadata.remove(["key"]) + ... ) >>> res = client.data_sets.update(my_update) """ return self._update_multiple( @@ -290,12 +294,12 @@ def list( Iterate over data sets: >>> for data_set in client.data_sets: - ... data_set # do something with the data_set + ... data_set # do something with the data_set Iterate over chunks of data sets to reduce memory load: >>> for data_set_list in client.data_sets(chunk_size=2500): - ... data_set_list # do something with the list + ... data_set_list # do something with the list """ filter = DataSetFilter( diff --git a/cognite/client/_api/datapoints.py b/cognite/client/_api/datapoints.py index 3943d85ac4..b9b3ac954a 100644 --- a/cognite/client/_api/datapoints.py +++ b/cognite/client/_api/datapoints.py @@ -591,7 +591,7 @@ def __call__( >>> queries = [ ... DatapointsQuery(id=123), ... DatapointsQuery(external_id="foo"), - ... DatapointsQuery(instance_id=NodeId("my-space", "my-ts-xid")) + ... DatapointsQuery(instance_id=NodeId("my-space", "my-ts-xid")), ... ] >>> for chunk_lst in client.time_series.data(query, return_arrays=False): ... if chunk_lst.get(id=123) is None: @@ -998,11 +998,12 @@ def retrieve( (milliseconds after epoch). In the below example, we fetch them using their external ids: >>> dps_lst = client.time_series.data.retrieve( - ... external_id=["foo", "bar"], - ... start=1514764800000, - ... end=1546300800000, - ... aggregates=["max", "average"], - ... granularity="1d") + ... external_id=["foo", "bar"], + ... start=1514764800000, + ... end=1546300800000, + ... aggregates=["max", "average"], + ... granularity="1d", + ... ) In the two code examples above, we have a `dps` object (an instance of ``Datapoints``), and a `dps_lst` object (an instance of ``DatapointsList``). On `dps`, which in this case contains raw datapoints, you may access the underlying data directly by using the `.value` attribute. This works for @@ -1039,7 +1040,8 @@ def retrieve( ... ], ... external_id=DatapointsQuery(external_id="foo", aggregates="max"), ... start="5d-ago", - ... granularity="1h") + ... granularity="1h", + ... ) Certain aggregates are very useful when they follow the calendar, for example electricity consumption per day, week, month or year. You may request such calendar-based aggregates in a specific timezone to make them even more useful: daylight savings (DST) @@ -1047,10 +1049,8 @@ def retrieve( can be used independently. To get monthly local aggregates in Oslo, Norway you can do: >>> dps = client.time_series.data.retrieve( - ... id=123, - ... aggregates="sum", - ... granularity="1month", - ... timezone="Europe/Oslo") + ... id=123, aggregates="sum", granularity="1month", timezone="Europe/Oslo" + ... ) When requesting multiple time series, an easy way to get the datapoints of a specific one is to use the `.get` method on the returned ``DatapointsList`` object, then specify if you want `id` or `external_id`. Note: If you fetch a time series @@ -1080,9 +1080,12 @@ def retrieve( ... external_id=[ ... DatapointsQuery(external_id=sensor_xid, start=ev.start_time, end=ev.end_time) ... for ev in periods - ... ]) + ... ], + ... ) >>> ts_44 = dps_lst.get(id=44) # Single ``Datapoints`` object - >>> ts_lst = dps_lst.get(external_id=sensor_xid) # List of ``len(periods)`` ``Datapoints`` objects + >>> ts_lst = dps_lst.get( + ... external_id=sensor_xid + ... ) # List of ``len(periods)`` ``Datapoints`` objects The API has an endpoint to :py:meth:`~DatapointsAPI.retrieve_latest`, i.e. "before", but not "after". Luckily, we can emulate that behaviour easily. Let's say we have a very dense time series and do not want to fetch all of the available raw data (or fetch less precise @@ -1091,10 +1094,14 @@ def retrieve( >>> import itertools >>> month_starts = [ ... datetime(year, month, 1, tzinfo=utc) - ... for year, month in itertools.product(range(2000, 2011), range(1, 13))] + ... for year, month in itertools.product(range(2000, 2011), range(1, 13)) + ... ] >>> dps_lst = client.time_series.data.retrieve( - ... external_id=[DatapointsQuery(external_id="foo", start=start) for start in month_starts], - ... limit=1) + ... external_id=[ + ... DatapointsQuery(external_id="foo", start=start) for start in month_starts + ... ], + ... limit=1, + ... ) To get *all* historic and future datapoints for a time series, e.g. to do a backup, you may want to import the two integer constants: ``MIN_TIMESTAMP_MS`` and ``MAX_TIMESTAMP_MS``, to make sure you do not miss any. **Performance warning**: This pattern of @@ -1102,28 +1109,30 @@ def retrieve( >>> from cognite.client.utils import MIN_TIMESTAMP_MS, MAX_TIMESTAMP_MS >>> dps_backup = client.time_series.data.retrieve( - ... id=123, - ... start=MIN_TIMESTAMP_MS, - ... end=MAX_TIMESTAMP_MS + 1) # end is exclusive + ... id=123, start=MIN_TIMESTAMP_MS, end=MAX_TIMESTAMP_MS + 1 + ... ) # end is exclusive If you have a time series with 'unit_external_id' set, you can use the 'target_unit' parameter to convert the datapoints to the desired unit. In the example below, we are converting temperature readings from a sensor measured and stored in Celsius, to Fahrenheit (we're assuming that the time series has e.g. ``unit_external_id="temperature:deg_c"`` ): >>> client.time_series.data.retrieve( - ... id=42, start="2w-ago", target_unit="temperature:deg_f") + ... id=42, start="2w-ago", target_unit="temperature:deg_f" + ... ) Or alternatively, you can use the 'target_unit_system' parameter to convert the datapoints to the desired unit system: >>> client.time_series.data.retrieve( - ... id=42, start="2w-ago", target_unit_system="Imperial") + ... id=42, start="2w-ago", target_unit_system="Imperial" + ... ) To retrieve status codes for a time series, pass ``include_status=True``. This is only possible for raw datapoint queries. You would typically also pass ``ignore_bad_datapoints=False`` to not hide all the datapoints that are marked as uncertain or bad, which is the API's default behaviour. You may also use ``treat_uncertain_as_bad`` to control how uncertain values are interpreted. >>> dps = client.time_series.data.retrieve( - ... id=42, include_status=True, ignore_bad_datapoints=False) + ... id=42, include_status=True, ignore_bad_datapoints=False + ... ) >>> dps.status_code # list of integer codes, e.g.: [0, 1073741824, 2147483648] >>> dps.status_symbol # list of symbolic representations, e.g. [Good, Uncertain, Bad] @@ -1351,7 +1360,8 @@ def retrieve_arrays( ... id=42, ... start=datetime(2020, 1, 1, tzinfo=timezone.utc), ... aggregates=["min", "max"], - ... granularity="7d") + ... granularity="7d", + ... ) >>> weekly_range = dps.max - dps.min Get up-to 2 million raw datapoints for the last 48 hours for a noisy time series with external_id="ts-noisy", @@ -1359,9 +1369,8 @@ def retrieve_arrays( >>> import numpy as np >>> dps = client.time_series.data.retrieve_arrays( - ... external_id="ts-noisy", - ... start="2d-ago", - ... limit=2_000_000) + ... external_id="ts-noisy", start="2d-ago", limit=2_000_000 + ... ) >>> smooth = np.convolve(dps.value, np.ones(5) / 5) # doctest: +SKIP >>> smoother = np.convolve(dps.value, np.ones(20) / 20) # doctest: +SKIP @@ -1370,10 +1379,8 @@ def retrieve_arrays( >>> id_lst = [42, 43, 44] >>> dps_lst = client.time_series.data.retrieve_arrays( - ... id=id_lst, - ... start="2h-ago", - ... include_outside_points=True, - ... ignore_unknown_ids=True) + ... id=id_lst, start="2h-ago", include_outside_points=True, ignore_unknown_ids=True + ... ) >>> largest_gaps = [np.max(np.diff(dps.timestamp)) for dps in dps_lst] Get raw datapoints for a time series with external_id="bar" from the last 10 weeks, then convert to a ``pandas.Series`` @@ -1483,11 +1490,8 @@ def retrieve_dataframe( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> df = client.time_series.data.retrieve_dataframe( - ... id=12345, - ... start="2w-ago", - ... end="now", - ... limit=100, - ... column_names="id") + ... id=12345, start="2w-ago", end="now", limit=100, column_names="id" + ... ) Get the pandas dataframe with a uniform index (fixed spacing between points) of 1 day, for two time series with individually specified aggregates, from 1990 through 2020: @@ -1497,12 +1501,15 @@ def retrieve_dataframe( >>> df = client.time_series.data.retrieve_dataframe( ... external_id=[ ... DatapointsQuery(external_id="foo", aggregates="discrete_variance"), - ... DatapointsQuery(external_id="bar", aggregates=["total_variation", "continuous_variance"]), + ... DatapointsQuery( + ... external_id="bar", aggregates=["total_variation", "continuous_variance"] + ... ), ... ], ... granularity="1d", ... start=datetime(1990, 1, 1, tzinfo=timezone.utc), ... end=datetime(2020, 12, 31, tzinfo=timezone.utc), - ... uniform_index=True) + ... uniform_index=True, + ... ) Get a pandas dataframe containing the 'average' aggregate for two time series using a 30-day granularity, starting Jan 1, 1970 all the way up to present, without having the aggregate name in the column names: @@ -1511,7 +1518,8 @@ def retrieve_dataframe( ... external_id=["foo", "bar"], ... aggregates="average", ... granularity="30d", - ... include_aggregate_name=False) + ... include_aggregate_name=False, + ... ) You may also use ``pandas.Timestamp`` to define start and end: @@ -1519,7 +1527,8 @@ def retrieve_dataframe( >>> df = client.time_series.data.retrieve_dataframe( ... external_id="foo", ... start=pd.Timestamp("2023-01-01"), - ... end=pd.Timestamp("2023-02-01")) + ... end=pd.Timestamp("2023-02-01"), + ... ) """ _, pd = local_import("numpy", "pandas") # Verify that deps are available or raise CogniteImportError query = _FullDatapointsQuery( @@ -1926,8 +1935,8 @@ def retrieve_latest( >>> from cognite.client.data_classes.data_modeling import NodeId >>> res = client.time_series.data.retrieve_latest( - ... external_id=["foo", "bar"], - ... instance_id=NodeId("my-space", "my-ts-xid")) + ... external_id=["foo", "bar"], instance_id=NodeId("my-space", "my-ts-xid") + ... ) You can also get the first datapoint before a specific time: @@ -1939,13 +1948,17 @@ def retrieve_latest( You can also retrieve the datapoint in a different unit or unit system: - >>> res = client.time_series.data.retrieve_latest(id=1, target_unit="temperature:deg_f")[0] + >>> res = client.time_series.data.retrieve_latest(id=1, target_unit="temperature:deg_f")[ + ... 0 + ... ] >>> res = client.time_series.data.retrieve_latest(id=1, target_unit_system="Imperial")[0] You may also pass an instance of LatestDatapointQuery: >>> from cognite.client.data_classes import LatestDatapointQuery - >>> res = client.time_series.data.retrieve_latest(id=LatestDatapointQuery(id=1, before=60_000))[0] + >>> res = client.time_series.data.retrieve_latest( + ... id=LatestDatapointQuery(id=1, before=60_000) + ... )[0] If you need the latest datapoint for multiple time series, simply give a list of ids. Note that we are using external ids here, but either will work: @@ -1963,16 +1976,23 @@ def retrieve_latest( >>> id_queries = [ ... 123, ... LatestDatapointQuery(id=456, before="1w-ago"), - ... LatestDatapointQuery(id=789, before=datetime(2018,1,1, tzinfo=timezone.utc)), - ... LatestDatapointQuery(id=987, target_unit="temperature:deg_f")] + ... LatestDatapointQuery(id=789, before=datetime(2018, 1, 1, tzinfo=timezone.utc)), + ... LatestDatapointQuery(id=987, target_unit="temperature:deg_f"), + ... ] >>> ext_id_queries = [ ... "foo", - ... LatestDatapointQuery(external_id="abc", before="3h-ago", target_unit_system="Imperial"), + ... LatestDatapointQuery( + ... external_id="abc", before="3h-ago", target_unit_system="Imperial" + ... ), ... LatestDatapointQuery(external_id="def", include_status=True), ... LatestDatapointQuery(external_id="ghi", treat_uncertain_as_bad=False), - ... LatestDatapointQuery(external_id="jkl", include_status=True, ignore_bad_datapoints=False)] + ... LatestDatapointQuery( + ... external_id="jkl", include_status=True, ignore_bad_datapoints=False + ... ), + ... ] >>> res = client.time_series.data.retrieve_latest( - ... id=id_queries, external_id=ext_id_queries) + ... id=id_queries, external_id=ext_id_queries + ... ) """ fetcher = RetrieveLatestDpsFetcher( id=id, @@ -2039,10 +2059,10 @@ def insert( >>> from datetime import datetime, timezone >>> client = CogniteClient() >>> datapoints = [ - ... (datetime(2018,1,1, tzinfo=timezone.utc), 1000), - ... (datetime(2018,1,2, tzinfo=timezone.utc), 2000, StatusCode.Good), - ... (datetime(2018,1,3, tzinfo=timezone.utc), 3000, StatusCode.Uncertain), - ... (datetime(2018,1,4, tzinfo=timezone.utc), None, StatusCode.Bad), + ... (datetime(2018, 1, 1, tzinfo=timezone.utc), 1000), + ... (datetime(2018, 1, 2, tzinfo=timezone.utc), 2000, StatusCode.Good), + ... (datetime(2018, 1, 3, tzinfo=timezone.utc), 3000, StatusCode.Uncertain), + ... (datetime(2018, 1, 4, tzinfo=timezone.utc), None, StatusCode.Bad), ... ] >>> client.time_series.data.insert(datapoints, id=1) @@ -2055,7 +2075,9 @@ def insert( ... (160000000000, 2000, 3145728), ... (170000000000, 2000, 2147483648), # Same as StatusCode.Bad ... ] - >>> client.time_series.data.insert(datapoints, instance_id=NodeId("my-space", "my-ts-xid")) + >>> client.time_series.data.insert( + ... datapoints, instance_id=NodeId("my-space", "my-ts-xid") + ... ) Or they can be a list of dictionaries: @@ -2065,7 +2087,11 @@ def insert( ... {"timestamp": 160000000000, "value": 2000}, ... {"timestamp": 170000000000, "value": 3000, "status": {"code": 0}}, ... {"timestamp": 180000000000, "value": 4000, "status": {"symbol": "Uncertain"}}, - ... {"timestamp": 190000000000, "value": math.nan, "status": {"code": StatusCode.Bad, "symbol": "Bad"}}, + ... { + ... "timestamp": 190000000000, + ... "value": math.nan, + ... "status": {"code": StatusCode.Bad, "symbol": "Bad"}, + ... }, ... ] >>> client.time_series.data.insert(datapoints, external_id="abcd") @@ -2127,35 +2153,61 @@ def insert_multiple( >>> from datetime import datetime, timezone >>> client = CogniteClient() >>> to_insert = [ - ... {"id": 1, "datapoints": [ - ... (datetime(2018,1,1, tzinfo=timezone.utc), 1000), - ... (datetime(2018,1,2, tzinfo=timezone.utc), 2000, StatusCode.Good)], + ... { + ... "id": 1, + ... "datapoints": [ + ... (datetime(2018, 1, 1, tzinfo=timezone.utc), 1000), + ... (datetime(2018, 1, 2, tzinfo=timezone.utc), 2000, StatusCode.Good), + ... ], ... }, - ... {"external_id": "foo", "datapoints": [ - ... (datetime(2018,1,3, tzinfo=timezone.utc), 3000), - ... (datetime(2018,1,4, tzinfo=timezone.utc), 4000, StatusCode.Uncertain)], + ... { + ... "external_id": "foo", + ... "datapoints": [ + ... (datetime(2018, 1, 3, tzinfo=timezone.utc), 3000), + ... (datetime(2018, 1, 4, tzinfo=timezone.utc), 4000, StatusCode.Uncertain), + ... ], + ... }, + ... { + ... "instance_id": NodeId("my-space", "my-ts-xid"), + ... "datapoints": [ + ... (datetime(2018, 1, 5, tzinfo=timezone.utc), 5000), + ... (datetime(2018, 1, 6, tzinfo=timezone.utc), None, StatusCode.Bad), + ... ], ... }, - ... {"instance_id": NodeId("my-space", "my-ts-xid"), "datapoints": [ - ... (datetime(2018,1,5, tzinfo=timezone.utc), 5000), - ... (datetime(2018,1,6, tzinfo=timezone.utc), None, StatusCode.Bad)], - ... } ... ] Passing datapoints using the dictionary format with timestamp given in milliseconds since epoch: >>> import math >>> to_insert.append( - ... {"external_id": "bar", "datapoints": [ - ... {"timestamp": 170000000, "value": 7000}, - ... {"timestamp": 180000000, "value": 8000, "status": {"symbol": "Uncertain"}}, - ... {"timestamp": 190000000, "value": None, "status": {"code": StatusCode.Bad}}, - ... {"timestamp": 200000000, "value": math.inf, "status": {"code": StatusCode.Bad, "symbol": "Bad"}}, - ... ]}) + ... { + ... "external_id": "bar", + ... "datapoints": [ + ... {"timestamp": 170000000, "value": 7000}, + ... { + ... "timestamp": 180000000, + ... "value": 8000, + ... "status": {"symbol": "Uncertain"}, + ... }, + ... { + ... "timestamp": 190000000, + ... "value": None, + ... "status": {"code": StatusCode.Bad}, + ... }, + ... { + ... "timestamp": 200000000, + ... "value": math.inf, + ... "status": {"code": StatusCode.Bad, "symbol": "Bad"}, + ... }, + ... ], + ... } + ... ) If the Datapoints or DatapointsArray are fetched with status codes, these will be automatically used in the insert: >>> data_to_clone = client.time_series.data.retrieve( - ... external_id="bar", include_status=True, ignore_bad_datapoints=False) + ... external_id="bar", include_status=True, ignore_bad_datapoints=False + ... ) >>> to_insert.append({"external_id": "bar-clone", "datapoints": data_to_clone}) >>> client.time_series.data.insert_multiple(to_insert) """ @@ -2213,8 +2265,10 @@ def delete_ranges(self, ranges: list[dict[str, Any]]) -> None: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> ranges = [{"id": 1, "start": "2d-ago", "end": "now"}, - ... {"external_id": "abc", "start": "2d-ago", "end": "2d-ahead"}] + >>> ranges = [ + ... {"id": 1, "start": "2d-ago", "end": "now"}, + ... {"external_id": "abc", "start": "2d-ago", "end": "2d-ahead"}, + ... ] >>> client.time_series.data.delete_ranges(ranges) """ valid_ranges = [] diff --git a/cognite/client/_api/datapoints_subscriptions.py b/cognite/client/_api/datapoints_subscriptions.py index 1269e9a74e..9ae100a8e2 100644 --- a/cognite/client/_api/datapoints_subscriptions.py +++ b/cognite/client/_api/datapoints_subscriptions.py @@ -82,7 +82,8 @@ def create(self, subscription: DataPointSubscriptionWrite) -> DatapointSubscript ... external_id="my_subscription", ... name="My subscription", ... partition_count=1, - ... time_series_ids=["myFistTimeSeries", "mySecondTimeSeries"]) + ... time_series_ids=["myFistTimeSeries", "mySecondTimeSeries"], + ... ) >>> created = client.time_series.subscriptions.create(sub) Create a subscription with explicit time series IDs given as Node IDs @@ -94,22 +95,30 @@ def create(self, subscription: DataPointSubscriptionWrite) -> DatapointSubscript ... external_id="my_subscription", ... name="My subscription with Data Model Ids", ... partition_count=1, - ... instance_ids=[NodeId("my_space", "myFistTimeSeries"), NodeId("my_space", "mySecondTimeSeries")]) + ... instance_ids=[ + ... NodeId("my_space", "myFistTimeSeries"), + ... NodeId("my_space", "mySecondTimeSeries"), + ... ], + ... ) >>> created = client.time_series.subscriptions.create(sub) Create a filter defined subscription for all numeric time series that are stepwise: >>> from cognite.client.data_classes import DataPointSubscriptionWrite >>> from cognite.client.data_classes import filters as flt - >>> from cognite.client.data_classes.datapoints_subscriptions import DatapointSubscriptionProperty + >>> from cognite.client.data_classes.datapoints_subscriptions import ( + ... DatapointSubscriptionProperty, + ... ) >>> is_numeric_stepwise = flt.And( ... flt.Equals(DatapointSubscriptionProperty.is_string, False), - ... flt.Equals(DatapointSubscriptionProperty.is_step, True)) + ... flt.Equals(DatapointSubscriptionProperty.is_step, True), + ... ) >>> sub = DataPointSubscriptionWrite( ... external_id="my_subscription", ... name="My subscription for numeric, stepwise time series", ... partition_count=1, - ... filter=is_numeric_stepwise) + ... filter=is_numeric_stepwise, + ... ) >>> created = client.time_series.subscriptions.create(sub) """ @@ -234,7 +243,9 @@ def update( Add a time series to a preexisting subscription: >>> from cognite.client.data_classes import DataPointSubscriptionUpdate - >>> update = DataPointSubscriptionUpdate("my_subscription").time_series_ids.add(["MyNewTimeSeriesExternalId"]) + >>> update = DataPointSubscriptionUpdate("my_subscription").time_series_ids.add( + ... ["MyNewTimeSeriesExternalId"] + ... ) >>> updated = client.time_series.subscriptions.update(update) """ @@ -302,7 +313,9 @@ def iterate_data( Iterate continuously over all changes to the subscription newer than 3 days: - >>> for batch in client.time_series.subscriptions.iterate_data("my_subscription", "3d-ago"): + >>> for batch in client.time_series.subscriptions.iterate_data( + ... "my_subscription", "3d-ago" + ... ): ... pass # do something """ current_partitions = [DatapointSubscriptionPartition(partition, cursor)] diff --git a/cognite/client/_api/diagrams.py b/cognite/client/_api/diagrams.py index d01441741f..ab7c4396eb 100644 --- a/cognite/client/_api/diagrams.py +++ b/cognite/client/_api/diagrams.py @@ -220,8 +220,9 @@ def detect( >>> client = CogniteClient() >>> detect_job = client.diagrams.detect( ... entities=[ - ... {"userDefinedField": "21PT1017","ignoredField": "AA11"}, - ... {"userDefinedField": "21PT1018"}], + ... {"userDefinedField": "21PT1017", "ignoredField": "AA11"}, + ... {"userDefinedField": "21PT1018"}, + ... ], ... search_field="userDefinedField", ... partial_match=True, ... min_tokens=2, @@ -229,8 +230,9 @@ def detect( ... file_external_ids=["Test1"], ... file_references=[ ... FileReference(id=20, first_page=1, last_page=10), - ... FileReference(external_id="ext_20", first_page=11, last_page=20) - ... ]) + ... FileReference(external_id="ext_20", first_page=11, last_page=20), + ... ], + ... ) >>> result = detect_job.result >>> print(result) @@ -266,13 +268,16 @@ def detect( To use beta configuration options you can use a dictionary or `DiagramDetectConfig` object for convenience: - >>> from cognite.client.data_classes.contextualization import ConnectionFlags, DiagramDetectConfig + >>> from cognite.client.data_classes.contextualization import ( + ... ConnectionFlags, + ... DiagramDetectConfig, + ... ) >>> config = DiagramDetectConfig( ... remove_leading_zeros=True, ... connection_flags=ConnectionFlags( ... no_text_inbetween=True, ... natural_reading_order=True, - ... ) + ... ), ... ) >>> job = client.diagrams.detect(entities=[{"name": "A1"}], file_id=123, config=config) diff --git a/cognite/client/_api/documents.py b/cognite/client/_api/documents.py index 1c367c5c3f..3428ac7eed 100644 --- a/cognite/client/_api/documents.py +++ b/cognite/client/_api/documents.py @@ -52,7 +52,9 @@ def download_page_as_png_bytes(self, id: int, page_number: int = 1) -> bytes: Download an image preview and display using IPython.display.Image (for example in a Jupyter Notebook): >>> from IPython.display import Image - >>> binary_png = client.documents.previews.download_page_as_png_bytes(id=123, page_number=5) + >>> binary_png = client.documents.previews.download_page_as_png_bytes( + ... id=123, page_number=5 + ... ) >>> Image(binary_png) """ res = self._do_request( @@ -270,7 +272,7 @@ def aggregate_count(self, query: str | None = None, filter: Filter | dict[str, A >>> client.documents.aggregate_count( ... filter=filters.InAssetSubtree( ... property=DocumentProperty.asset_external_ids, - ... values=['Plant_1', 'Plant_2'], + ... values=["Plant_1", "Plant_2"], ... ) ... ) """ @@ -311,7 +313,9 @@ def aggregate_cardinality_values( >>> from cognite.client.data_classes import filters >>> from cognite.client.data_classes.documents import DocumentProperty >>> is_plain_text = filters.Equals(DocumentProperty.mime_type, "text/plain") - >>> plain_text_author_count = client.documents.aggregate_cardinality_values(DocumentProperty.author, filter=is_plain_text) + >>> plain_text_author_count = client.documents.aggregate_cardinality_values( + ... DocumentProperty.author, filter=is_plain_text + ... ) Count the number of types of documents in your CDF project but exclude documents that start with "text": @@ -319,7 +323,9 @@ def aggregate_cardinality_values( >>> from cognite.client.data_classes import aggregations >>> agg = aggregations >>> is_not_text = agg.Not(agg.Prefix("text")) - >>> type_count_excluded_text = client.documents.aggregate_cardinality_values(DocumentProperty.type, aggregate_filter=is_not_text) + >>> type_count_excluded_text = client.documents.aggregate_cardinality_values( + ... DocumentProperty.type, aggregate_filter=is_not_text + ... ) """ self._validate_filter(filter) @@ -402,7 +408,9 @@ def aggregate_unique_values( >>> from cognite.client.data_classes import filters >>> from cognite.client.data_classes.documents import DocumentProperty >>> is_abc = filters.Prefix(DocumentProperty.external_id, "abc") - >>> result = client.documents.aggregate_unique_values(DocumentProperty.language, filter=is_abc) + >>> result = client.documents.aggregate_unique_values( + ... DocumentProperty.language, filter=is_abc + ... ) >>> unique_languages = result.unique Get the unique mime types with count of documents, but exclude mime types that start with text: @@ -411,7 +419,9 @@ def aggregate_unique_values( >>> from cognite.client.data_classes import aggregations >>> agg = aggregations >>> is_not_text = agg.Not(agg.Prefix("text")) - >>> result = client.documents.aggregate_unique_values(DocumentProperty.mime_type, aggregate_filter=is_not_text) + >>> result = client.documents.aggregate_unique_values( + ... DocumentProperty.mime_type, aggregate_filter=is_not_text + ... ) >>> unique_mime_types = result.unique """ self._validate_filter(filter) @@ -591,11 +601,15 @@ def search( >>> from cognite.client.data_classes.documents import DocumentProperty >>> from cognite.client.utils import timestamp_to_ms >>> is_plain_text = filters.Equals(DocumentProperty.mime_type, "text/plain") - >>> last_week = filters.Range(DocumentProperty.created_time, - ... gt=timestamp_to_ms(datetime.now() - timedelta(days=7))) - >>> documents = client.documents.search('"CPLEX Error 1217: No Solution exists."', + >>> last_week = filters.Range( + ... DocumentProperty.created_time, + ... gt=timestamp_to_ms(datetime.now() - timedelta(days=7)), + ... ) + >>> documents = client.documents.search( + ... '"CPLEX Error 1217: No Solution exists."', ... highlight=True, - ... filter=filters.And(is_plain_text, last_week)) + ... filter=filters.And(is_plain_text, last_week), + ... ) """ self._validate_filter(filter) results = [] @@ -661,7 +675,7 @@ def list( >>> from cognite.client.data_classes.documents import DocumentProperty >>> for document in client.documents: - ... print(document.name) + ... print(document.name) List all documents in your CDF project sorted by mime/type in descending order: diff --git a/cognite/client/_api/entity_matching.py b/cognite/client/_api/entity_matching.py index 3846722379..46178f6dd0 100644 --- a/cognite/client/_api/entity_matching.py +++ b/cognite/client/_api/entity_matching.py @@ -78,7 +78,7 @@ def retrieve_multiple( Examples: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> retrieved_models = client.entity_matching.retrieve_multiple([1,2,3]) + >>> retrieved_models = client.entity_matching.retrieve_multiple([1, 2, 3]) """ identifiers = IdentifierSequence.load(ids=ids, external_ids=external_ids) @@ -223,14 +223,16 @@ def fit( Example: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> sources = [{'id': 101, 'name': 'ChildAsset1', 'description': 'Child of ParentAsset1'}] - >>> targets = [{'id': 1, 'name': 'ParentAsset1', 'description': 'Parent to ChildAsset1'}] + >>> sources = [ + ... {"id": 101, "name": "ChildAsset1", "description": "Child of ParentAsset1"} + ... ] + >>> targets = [{"id": 1, "name": "ParentAsset1", "description": "Parent to ChildAsset1"}] >>> true_matches = [(1, 101)] >>> model = client.entity_matching.fit( ... sources=sources, ... targets=targets, ... true_matches=true_matches, - ... description="AssetMatchingJob1" + ... description="AssetMatchingJob1", ... ) """ @@ -291,15 +293,11 @@ def predict( Examples: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> sources = {'id': 101, 'name': 'ChildAsset1', 'description': 'Child of ParentAsset1'} - >>> targets = {'id': 1, 'name': 'ParentAsset1', 'description': 'Parent to ChildAsset1'} + >>> sources = {"id": 101, "name": "ChildAsset1", "description": "Child of ParentAsset1"} + >>> targets = {"id": 1, "name": "ParentAsset1", "description": "Parent to ChildAsset1"} >>> true_matches = [(1, 101)] >>> model = client.entity_matching.predict( - ... sources = sources, - ... targets = targets, - ... num_matches = 1, - ... score_threshold = 0.6, - ... id=1 + ... sources=sources, targets=targets, num_matches=1, score_threshold=0.6, id=1 ... ) """ @@ -334,10 +332,14 @@ def refit( Examples: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> sources = [{'id': 101, 'name': 'ChildAsset1', 'description': 'Child of ParentAsset1'}] - >>> targets = [{'id': 1, 'name': 'ParentAsset1', 'description': 'Parent to ChildAsset1'}] + >>> sources = [ + ... {"id": 101, "name": "ChildAsset1", "description": "Child of ParentAsset1"} + ... ] + >>> targets = [{"id": 1, "name": "ParentAsset1", "description": "Parent to ChildAsset1"}] >>> true_matches = [(1, 101)] - >>> model = client.entity_matching.refit(true_matches = true_matches, description="AssetMatchingJob1", id=1) + >>> model = client.entity_matching.refit( + ... true_matches=true_matches, description="AssetMatchingJob1", id=1 + ... ) """ model = self.retrieve(id=id, external_id=external_id) assert model diff --git a/cognite/client/_api/events.py b/cognite/client/_api/events.py index 8a6a5462d6..2664402090 100644 --- a/cognite/client/_api/events.py +++ b/cognite/client/_api/events.py @@ -303,8 +303,12 @@ def aggregate_unique_values( >>> from cognite.client.data_classes.events import EventProperty >>> from cognite.client.utils import timestamp_to_ms >>> from datetime import datetime - >>> is_after_2020 = filters.Range(EventProperty.start_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.events.aggregate_unique_values(EventProperty.type, advanced_filter=is_after_2020) + >>> is_after_2020 = filters.Range( + ... EventProperty.start_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.events.aggregate_unique_values( + ... EventProperty.type, advanced_filter=is_after_2020 + ... ) >>> print(result.unique) Get the unique types of events after 2020-01-01 in your CDF project, but exclude all types that start with @@ -314,8 +318,12 @@ def aggregate_unique_values( >>> from cognite.client.data_classes import aggregations >>> agg = aggregations >>> not_planned = agg.Not(agg.Prefix("planned")) - >>> is_after_2020 = filters.Range(EventProperty.start_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.events.aggregate_unique_values(EventProperty.type, advanced_filter=is_after_2020, aggregate_filter=not_planned) + >>> is_after_2020 = filters.Range( + ... EventProperty.start_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.events.aggregate_unique_values( + ... EventProperty.type, advanced_filter=is_after_2020, aggregate_filter=not_planned + ... ) >>> print(result.unique) """ @@ -399,7 +407,9 @@ def aggregate_cardinality_values( >>> from cognite.client.data_classes import filters >>> from cognite.client.data_classes.events import EventProperty >>> is_asset = filters.ContainsAny(EventProperty.asset_ids, 123) - >>> plain_text_author_count = client.events.aggregate_cardinality_values(EventProperty.type, advanced_filter=is_asset) + >>> plain_text_author_count = client.events.aggregate_cardinality_values( + ... EventProperty.type, advanced_filter=is_asset + ... ) """ self._validate_filter(advanced_filter) @@ -533,7 +543,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.events.delete(id=[1,2,3], external_id="3") + >>> client.events.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=id, external_ids=external_id), @@ -582,7 +592,11 @@ def update( Perform a partial update on a event, updating the description and adding a new field to metadata: >>> from cognite.client.data_classes import EventUpdate - >>> my_update = EventUpdate(id=1).description.set("New description").metadata.add({"key": "value"}) + >>> my_update = ( + ... EventUpdate(id=1) + ... .description.set("New description") + ... .metadata.add({"key": "value"}) + ... ) >>> res = client.events.update(my_update) """ return self._update_multiple( @@ -690,7 +704,8 @@ def filter( >>> is_workorder = filters.Prefix("external_id", "workorder") >>> has_failure = filters.Search("description", "failure") >>> res = client.events.filter( - ... filter=filters.And(is_workorder, has_failure), sort=("start_time", "desc")) + ... filter=filters.And(is_workorder, has_failure), sort=("start_time", "desc") + ... ) Note that you can check the API documentation above to see which properties you can filter on with which filters. @@ -704,7 +719,8 @@ def filter( >>> has_failure = filters.Search(EventProperty.description, "failure") >>> res = client.events.filter( ... filter=filters.And(is_workorder, has_failure), - ... sort=(SortableEventProperty.start_time, "desc")) + ... sort=(SortableEventProperty.start_time, "desc"), + ... ) """ warnings.warn( f"{self.__class__.__name__}.filter() method is deprecated and will be removed in the next major version of the SDK. Please use the {self.__class__.__name__}.list() method with advanced_filter parameter instead.", @@ -792,12 +808,12 @@ def list( Iterate over events: >>> for event in client.events: - ... event # do something with the event + ... event # do something with the event Iterate over chunks of events to reduce memory load: >>> for event_list in client.events(chunk_size=2500): - ... event_list # do something with the events + ... event_list # do something with the events Using advanced filter, find all events that have a metadata key 'timezone' starting with 'Europe', and sort by external id ascending: @@ -816,17 +832,19 @@ def list( >>> from cognite.client.data_classes.events import EventProperty, SortableEventProperty >>> in_timezone = filters.Prefix(EventProperty.metadata_key("timezone"), "Europe") >>> res = client.events.list( - ... advanced_filter=in_timezone, - ... sort=(SortableEventProperty.external_id, "asc")) + ... advanced_filter=in_timezone, sort=(SortableEventProperty.external_id, "asc") + ... ) Combine filter and advanced filter: >>> from cognite.client.data_classes import filters >>> not_instrument_lvl5 = filters.And( - ... filters.ContainsAny("labels", ["Level5"]), - ... filters.Not(filters.ContainsAny("labels", ["Instrument"])) + ... filters.ContainsAny("labels", ["Level5"]), + ... filters.Not(filters.ContainsAny("labels", ["Instrument"])), + ... ) + >>> res = client.events.list( + ... asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5 ... ) - >>> res = client.events.list(asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5) """ asset_subtree_ids_processed = process_asset_subtree_ids(asset_subtree_ids, asset_subtree_external_ids) diff --git a/cognite/client/_api/extractionpipelines.py b/cognite/client/_api/extractionpipelines.py index fe3fdf530b..792328daa6 100644 --- a/cognite/client/_api/extractionpipelines.py +++ b/cognite/client/_api/extractionpipelines.py @@ -130,7 +130,9 @@ def retrieve_multiple( Get assets by external id: - >>> res = client.extraction_pipelines.retrieve_multiple(external_ids=["abc", "def"], ignore_unknown_ids=True) + >>> res = client.extraction_pipelines.retrieve_multiple( + ... external_ids=["abc", "def"], ignore_unknown_ids=True + ... ) """ identifiers = IdentifierSequence.load(ids=ids, external_ids=external_ids) return self._retrieve_multiple( @@ -219,7 +221,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.extraction_pipelines.delete(id=[1,2,3], external_id="3") + >>> client.extraction_pipelines.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple(identifiers=IdentifierSequence.load(id, external_id), wrap_ids=True, extra_body_fields={}) @@ -309,12 +311,16 @@ def list( Filter extraction pipeline runs on a given status: - >>> runs_list = client.extraction_pipelines.runs.list(external_id="test ext id", statuses=["seen"], limit=5) + >>> runs_list = client.extraction_pipelines.runs.list( + ... external_id="test ext id", statuses=["seen"], limit=5 + ... ) Get all failed pipeline runs in the last 24 hours for pipeline 'extId': >>> from cognite.client.data_classes import ExtractionPipelineRun - >>> res = client.extraction_pipelines.runs.list(external_id="extId", statuses="failure", created_time="24h-ago") + >>> res = client.extraction_pipelines.runs.list( + ... external_id="extId", statuses="failure", created_time="24h-ago" + ... ) """ if isinstance(created_time, str): created_time = TimestampRange(min=timestamp_to_ms(created_time)) @@ -375,7 +381,8 @@ def create( >>> from cognite.client.data_classes import ExtractionPipelineRunWrite >>> client = CogniteClient() >>> res = client.extraction_pipelines.runs.create( - ... ExtractionPipelineRunWrite(status="success", extpipe_external_id="extId")) + ... ExtractionPipelineRunWrite(status="success", extpipe_external_id="extId") + ... ) """ assert_type(run, "run", [ExtractionPipelineRunCore, Sequence]) return self._create_multiple( @@ -454,7 +461,9 @@ def create(self, config: ExtractionPipelineConfig | ExtractionPipelineConfigWrit >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import ExtractionPipelineConfigWrite >>> client = CogniteClient() - >>> res = client.extraction_pipelines.config.create(ExtractionPipelineConfigWrite(external_id="extId", config="my config contents")) + >>> res = client.extraction_pipelines.config.create( + ... ExtractionPipelineConfigWrite(external_id="extId", config="my config contents") + ... ) """ if isinstance(config, ExtractionPipelineConfig): config = config.as_write() diff --git a/cognite/client/_api/files.py b/cognite/client/_api/files.py index 28e84d69eb..4b5f3e5c11 100644 --- a/cognite/client/_api/files.py +++ b/cognite/client/_api/files.py @@ -332,7 +332,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.files.delete(id=[1,2,3], external_id="3") + >>> client.files.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=id, external_ids=external_id), @@ -385,7 +385,9 @@ def update( Perform a partial update on file metadata, updating the source and adding a new field to metadata: >>> from cognite.client.data_classes import FileMetadataUpdate - >>> my_update = FileMetadataUpdate(id=1).source.set("new source").metadata.add({"key": "value"}) + >>> my_update = ( + ... FileMetadataUpdate(id=1).source.set("new source").metadata.add({"key": "value"}) + ... ) >>> res = client.files.update(my_update) Attach labels to a files: @@ -437,7 +439,9 @@ def search( Search for an asset with an attached label: >>> my_label_filter = LabelFilter(contains_all=["WELL LOG"]) - >>> res = client.assets.search(name="xyz",filter=FileMetadataFilter(labels=my_label_filter)) + >>> res = client.assets.search( + ... name="xyz", filter=FileMetadataFilter(labels=my_label_filter) + ... ) """ return self._search(list_cls=FileMetadataList, search={"name": name}, filter=filter or {}, limit=limit) @@ -529,13 +533,17 @@ def upload( Upload a file with a label: >>> from cognite.client.data_classes import Label - >>> res = client.files.upload("/path/to/file", name="my_file", labels=[Label(external_id="WELL LOG")]) + >>> res = client.files.upload( + ... "/path/to/file", name="my_file", labels=[Label(external_id="WELL LOG")] + ... ) Upload a file with a geo_location: >>> from cognite.client.data_classes import GeoLocation, Geometry >>> geometry = Geometry(type="LineString", coordinates=[[30, 10], [10, 30], [40, 40]]) - >>> res = client.files.upload("/path/to/file", geo_location=GeoLocation(type="Feature", geometry=geometry)) + >>> res = client.files.upload( + ... "/path/to/file", geo_location=GeoLocation(type="Feature", geometry=geometry) + ... ) """ file_metadata = FileMetadata( @@ -612,14 +620,14 @@ def upload_content_bytes( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.files.upload_content_bytes( - ... b"some content", external_id="my_file_xid") + >>> res = client.files.upload_content_bytes(b"some content", external_id="my_file_xid") ...or by using instance_id: >>> from cognite.client.data_classes.data_modeling import NodeId >>> res = client.files.upload_content_bytes( - ... b"some content", instance_id=NodeId("my-space", "my_file_xid")) + ... b"some content", instance_id=NodeId("my-space", "my_file_xid") + ... ) """ identifiers = IdentifierSequence.load(external_ids=external_id, instance_ids=instance_id).as_singleton() @@ -709,7 +717,7 @@ def upload_bytes( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.files.upload_bytes(b"some content", name="my_file", asset_ids=[1,2,3]) + >>> res = client.files.upload_bytes(b"some content", name="my_file", asset_ids=[1, 2, 3]) """ if isinstance(content, str): content = content.encode("utf-8") @@ -879,7 +887,9 @@ def multipart_upload_content_session( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> with client.files.multipart_upload_content_session(external_id="external-id", parts=2) as session: + >>> with client.files.multipart_upload_content_session( + ... external_id="external-id", parts=2 + ... ) as session: ... # Note that the minimum chunk size is 5 MiB. ... session.upload_part(0, "hello" * 1_200_000) ... session.upload_part(1, " world") @@ -1045,11 +1055,13 @@ def download( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.files.download(directory="my_directory", id=[1,2,3], external_id=["abc", "def"]) + >>> client.files.download( + ... directory="my_directory", id=[1, 2, 3], external_id=["abc", "def"] + ... ) Download files by id to the current directory: - >>> client.files.download(directory=".", id=[1,2,3]) + >>> client.files.download(directory=".", id=[1, 2, 3]) """ identifiers = IdentifierSequence.load(ids=id, external_ids=external_id, instance_ids=instance_id) @@ -1279,12 +1291,12 @@ def list( Iterate over files metadata: >>> for file_metadata in client.files: - ... file_metadata # do something with the file metadata + ... file_metadata # do something with the file metadata Iterate over chunks of files metadata to reduce memory load: >>> for file_list in client.files(chunk_size=2500): - ... file_list # do something with the files + ... file_list # do something with the files Filter files based on labels: @@ -1295,7 +1307,9 @@ def list( Filter files based on geoLocation: >>> from cognite.client.data_classes import GeoLocationFilter, GeometryFilter - >>> my_geo_location_filter = GeoLocationFilter(relation="intersects", shape=GeometryFilter(type="Point", coordinates=[35,10])) + >>> my_geo_location_filter = GeoLocationFilter( + ... relation="intersects", shape=GeometryFilter(type="Point", coordinates=[35, 10]) + ... ) >>> file_list = client.files.list(geo_location=my_geo_location_filter) """ asset_subtree_ids_processed = process_asset_subtree_ids(asset_subtree_ids, asset_subtree_external_ids) diff --git a/cognite/client/_api/functions.py b/cognite/client/_api/functions.py index f23035a5d6..8fde7dc71b 100644 --- a/cognite/client/_api/functions.py +++ b/cognite/client/_api/functions.py @@ -263,14 +263,14 @@ def create( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> function = client.functions.create( - ... name="myfunction", - ... folder="path/to/code", - ... function_path="path/to/function.py") + ... name="myfunction", folder="path/to/code", function_path="path/to/function.py" + ... ) Create function with file_id from already uploaded source code: >>> function = client.functions.create( - ... name="myfunction", file_id=123, function_path="path/to/function.py") + ... name="myfunction", file_id=123, function_path="path/to/function.py" + ... ) Create function with predefined function object named `handle`: @@ -1285,7 +1285,9 @@ def create( ... name="My schedule", ... function_id=123, ... cron_expression="*/5 * * * *", - ... client_credentials=ClientCredentials("my-client-id", os.environ["MY_CLIENT_SECRET"]), + ... client_credentials=ClientCredentials( + ... "my-client-id", os.environ["MY_CLIENT_SECRET"] + ... ), ... description="This schedule does magic stuff.", ... data={"magic": "stuff"}, ... ) @@ -1312,7 +1314,7 @@ def create( ... function_id=456, ... cron_expression="*/5 * * * *", ... description="A schedule just used for some temporary testing.", - ... nonce=session.nonce + ... nonce=session.nonce, ... ), ... ) @@ -1369,7 +1371,7 @@ def delete(self, id: int) -> None: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.functions.schedules.delete(id = 123) + >>> client.functions.schedules.delete(id=123) """ url = f"{self._RESOURCE_PATH}/delete" diff --git a/cognite/client/_api/geospatial.py b/cognite/client/_api/geospatial.py index b832737834..2337985180 100644 --- a/cognite/client/_api/geospatial.py +++ b/cognite/client/_api/geospatial.py @@ -134,7 +134,7 @@ def list_feature_types(self) -> FeatureTypeList: >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> for feature_type in client.geospatial.list_feature_types(): - ... feature_type # do something with the feature type definition + ... feature_type # do something with the feature type definition """ return self._list( list_cls=FeatureTypeList, @@ -193,26 +193,29 @@ def patch_feature_types(self, patch: FeatureTypePatch | Sequence[FeatureTypePatc >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> res = client.geospatial.patch_feature_types( - ... patch=FeatureTypePatch( - ... external_id="wells", - ... property_patches=Patches(add={"altitude": {"type": "DOUBLE"}}), - ... search_spec_patches=Patches( - ... add={ - ... "altitude_idx": {"properties": ["altitude"]}, - ... "composite_idx": {"properties": ["location", "altitude"]} - ... } - ... ) - ... ) + ... patch=FeatureTypePatch( + ... external_id="wells", + ... property_patches=Patches(add={"altitude": {"type": "DOUBLE"}}), + ... search_spec_patches=Patches( + ... add={ + ... "altitude_idx": {"properties": ["altitude"]}, + ... "composite_idx": {"properties": ["location", "altitude"]}, + ... } + ... ), + ... ) ... ) Add an additional index to an existing property >>> from cognite.client.data_classes.geospatial import Patches >>> res = client.geospatial.patch_feature_types( - ... patch=FeatureTypePatch( + ... patch=FeatureTypePatch( ... external_id="wells", - ... search_spec_patches=Patches(add={"location_idx": {"properties": ["location"]}}) - ... )) + ... search_spec_patches=Patches( + ... add={"location_idx": {"properties": ["location"]}} + ... ), + ... ) + ... ) """ if isinstance(patch, FeatureTypePatch): @@ -278,18 +281,16 @@ def create_features( ... external_id="my_feature_type", ... properties={ ... "location": {"type": "POINT", "srid": 4326}, - ... "temperature": {"type": "DOUBLE"} - ... } + ... "temperature": {"type": "DOUBLE"}, + ... }, ... ) ... ] >>> res = client.geospatial.create_feature_types(feature_types) >>> res = client.geospatial.create_features( ... feature_type_external_id="my_feature_type", ... feature=FeatureWrite( - ... external_id="my_feature", - ... location={"wkt": "POINT(1 1)"}, - ... temperature=12.4 - ... ) + ... external_id="my_feature", location={"wkt": "POINT(1 1)"}, temperature=12.4 + ... ), ... ) """ if chunk_size is not None and (chunk_size < 1 or chunk_size > self._CREATE_LIMIT): @@ -326,8 +327,7 @@ def delete_features( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> client.geospatial.delete_features( - ... feature_type_external_id="my_feature_type", - ... external_id=my_feature + ... feature_type_external_id="my_feature_type", external_id=my_feature ... ) """ resource_path = self._feature_resource_path(feature_type_external_id) @@ -375,8 +375,7 @@ def retrieve_features( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> client.geospatial.retrieve_features( - ... feature_type_external_id="my_feature_type", - ... external_id="my_feature" + ... feature_type_external_id="my_feature_type", external_id="my_feature" ... ) """ resource_path = self._feature_resource_path(feature_type_external_id) @@ -416,11 +415,11 @@ def update_features( >>> client = CogniteClient() >>> my_feature = client.geospatial.create_features( ... feature_type_external_id="my_feature_type", - ... feature=Feature(external_id="my_feature", temperature=12.4) + ... feature=Feature(external_id="my_feature", temperature=12.4), ... ) >>> my_updated_feature = client.geospatial.update_features( ... feature_type_external_id="my_feature_type", - ... feature=Feature(external_id="my_feature", temperature=6.237) + ... feature=Feature(external_id="my_feature", temperature=6.237), ... ) """ if chunk_size is not None and (chunk_size < 1 or chunk_size > self._UPDATE_LIMIT): @@ -478,14 +477,12 @@ def list_features( >>> my_feature = client.geospatial.create_features( ... feature_type_external_id=my_feature_type, ... feature=Feature( - ... external_id="my_feature", - ... temperature=12.4, - ... location={"wkt": "POINT(0 1)"} - ... ) + ... external_id="my_feature", temperature=12.4, location={"wkt": "POINT(0 1)"} + ... ), ... ) >>> res = client.geospatial.list_features( ... feature_type_external_id="my_feature_type", - ... filter={"range": {"property": "temperature", "gt": 12.0}} + ... filter={"range": {"property": "temperature", "gt": 12.0}}, ... ) >>> for f in res: ... # do something with the features @@ -495,17 +492,19 @@ def list_features( >>> res = client.geospatial.list_features( ... feature_type_external_id=my_feature_type, ... filter={}, - ... properties={"temperature": {}, "pressure": {}} + ... properties={"temperature": {}, "pressure": {}}, ... ) Search for features with spatial filters: >>> res = client.geospatial.list_features( ... feature_type_external_id=my_feature_type, - ... filter={"stWithin": { - ... "property": "location", - ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"} - ... }} + ... filter={ + ... "stWithin": { + ... "property": "location", + ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"}, + ... } + ... }, ... ) """ return self._list( @@ -562,14 +561,12 @@ def search_features( >>> my_feature = client.geospatial.create_features( ... feature_type_external_id=my_feature_type, ... feature=Feature( - ... external_id="my_feature", - ... temperature=12.4, - ... location={"wkt": "POINT(0 1)"} - ... ) + ... external_id="my_feature", temperature=12.4, location={"wkt": "POINT(0 1)"} + ... ), ... ) >>> res = client.geospatial.search_features( ... feature_type_external_id="my_feature_type", - ... filter={"range": {"property": "temperature", "gt": 12.0}} + ... filter={"range": {"property": "temperature", "gt": 12.0}}, ... ) >>> for f in res: ... # do something with the features @@ -579,7 +576,7 @@ def search_features( >>> res = client.geospatial.search_features( ... feature_type_external_id=my_feature_type, ... filter={}, - ... properties={"temperature": {}, "pressure": {}} + ... properties={"temperature": {}, "pressure": {}}, ... ) Search for features and do CRS conversion on an output property: @@ -587,7 +584,7 @@ def search_features( >>> res = client.geospatial.search_features( ... feature_type_external_id=my_feature_type, ... filter={}, - ... properties={"location": {"srid": 3995}} + ... properties={"location": {"srid": 3995}}, ... ) Search for features and order results: @@ -595,43 +592,51 @@ def search_features( >>> res = client.geospatial.search_features( ... feature_type_external_id=my_feature_type, ... filter={}, - ... order_by=[ - ... OrderSpec("temperature", "ASC"), - ... OrderSpec("pressure", "DESC")] + ... order_by=[OrderSpec("temperature", "ASC"), OrderSpec("pressure", "DESC")], ... ) Search for features with spatial filters: >>> res = client.geospatial.search_features( ... feature_type_external_id=my_feature_type, - ... filter={"stWithin": { - ... "property": "location", - ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"} - ... }} + ... filter={ + ... "stWithin": { + ... "property": "location", + ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"}, + ... } + ... }, ... ) Combining multiple filters: >>> res = client.geospatial.search_features( ... feature_type_external_id=my_feature_type, - ... filter={"and": [ - ... {"range": {"property": "temperature", "gt": 12.0}}, - ... {"stWithin": { - ... "property": "location", - ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"} - ... }} - ... ]} + ... filter={ + ... "and": [ + ... {"range": {"property": "temperature", "gt": 12.0}}, + ... { + ... "stWithin": { + ... "property": "location", + ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"}, + ... } + ... }, + ... ] + ... }, ... ) >>> res = client.geospatial.search_features( ... feature_type_external_id=my_feature_type, - ... filter={"or": [ - ... {"range": {"property": "temperature", "gt": 12.0}}, - ... {"stWithin": { - ... "property": "location", - ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"} - ... }} - ... ]} + ... filter={ + ... "or": [ + ... {"range": {"property": "temperature", "gt": 12.0}}, + ... { + ... "stWithin": { + ... "property": "location", + ... "value": {"wkt": "POLYGON((0 0, 0 1, 1 1, 0 0))"}, + ... } + ... }, + ... ] + ... }, ... ) """ resource_path = self._feature_resource_path(feature_type_external_id) + "/search" @@ -682,11 +687,11 @@ def stream_features( >>> client = CogniteClient() >>> my_feature = client.geospatial.create_features( ... feature_type_external_id="my_feature_type", - ... feature=Feature(external_id="my_feature", temperature=12.4) + ... feature=Feature(external_id="my_feature", temperature=12.4), ... ) >>> features = client.geospatial.stream_features( ... feature_type_external_id="my_feature_type", - ... filter={"range": {"property": "temperature", "gt": 12.0}} + ... filter={"range": {"property": "temperature", "gt": 12.0}}, ... ) >>> for f in features: ... # do something with the features @@ -696,7 +701,7 @@ def stream_features( >>> features = client.geospatial.stream_features( ... feature_type_external_id="my_feature_type", ... filter={}, - ... properties={"temperature": {}, "pressure": {}} + ... properties={"temperature": {}, "pressure": {}}, ... ) >>> for f in features: ... # do something with the features @@ -745,16 +750,17 @@ def aggregate_features( >>> client = CogniteClient() >>> my_feature = client.geospatial.create_features( ... feature_type_external_id="my_feature_type", - ... feature=Feature(external_id="my_feature", temperature=12.4) + ... feature=Feature(external_id="my_feature", temperature=12.4), ... ) >>> res = client.geospatial.aggregate_features( ... feature_type_external_id="my_feature_type", ... filter={"range": {"property": "temperature", "gt": 12.0}}, ... group_by=["category"], ... order_by=[OrderSpec("category", "ASC")], - ... output={"min_temperature": {"min": {"property": "temperature"}}, - ... "max_volume": {"max": {"property": "volume"}} - ... } + ... output={ + ... "min_temperature": {"min": {"property": "temperature"}}, + ... "max_volume": {"max": {"property": "volume"}}, + ... }, ... ) >>> for a in res: ... # loop over aggregates in different groups @@ -845,37 +851,37 @@ def create_coordinate_reference_systems( >>> from cognite.client.data_classes import CoordinateReferenceSystemWrite >>> client = CogniteClient() >>> custom_crs = CoordinateReferenceSystemWrite( - ... srid = 121111, + ... srid=121111, ... wkt=( - ... 'PROJCS["NTF (Paris) / Lambert zone II",' - ... ' GEOGCS["NTF (Paris)",' - ... ' DATUM["Nouvelle_Triangulation_Francaise_Paris",' - ... ' SPHEROID["Clarke 1880 (IGN)",6378249.2,293.4660212936265,' - ... ' AUTHORITY["EPSG","7011"]],' - ... ' TOWGS84[-168,-60,320,0,0,0,0],' - ... ' AUTHORITY["EPSG","6807"]],' - ... ' PRIMEM["Paris",2.33722917,' - ... ' AUTHORITY["EPSG","8903"]],' - ... ' UNIT["grad",0.01570796326794897,' - ... ' AUTHORITY["EPSG","9105"]], ' - ... ' AUTHORITY["EPSG","4807"]],' - ... ' PROJECTION["Lambert_Conformal_Conic_1SP"],' - ... ' PARAMETER["latitude_of_origin",52],' - ... ' PARAMETER["central_meridian",0],' - ... ' PARAMETER["scale_factor",0.99987742],' - ... ' PARAMETER["false_easting",600000],' - ... ' PARAMETER["false_northing",2200000],' - ... ' UNIT["metre",1,' - ... ' AUTHORITY["EPSG","9001"]],' - ... ' AXIS["X",EAST],' - ... ' AXIS["Y",NORTH],' - ... ' AUTHORITY["EPSG","27572"]]' + ... 'PROJCS["NTF (Paris) / Lambert zone II",' + ... ' GEOGCS["NTF (Paris)",' + ... ' DATUM["Nouvelle_Triangulation_Francaise_Paris",' + ... ' SPHEROID["Clarke 1880 (IGN)",6378249.2,293.4660212936265,' + ... ' AUTHORITY["EPSG","7011"]],' + ... " TOWGS84[-168,-60,320,0,0,0,0]," + ... ' AUTHORITY["EPSG","6807"]],' + ... ' PRIMEM["Paris",2.33722917,' + ... ' AUTHORITY["EPSG","8903"]],' + ... ' UNIT["grad",0.01570796326794897,' + ... ' AUTHORITY["EPSG","9105"]], ' + ... ' AUTHORITY["EPSG","4807"]],' + ... ' PROJECTION["Lambert_Conformal_Conic_1SP"],' + ... ' PARAMETER["latitude_of_origin",52],' + ... ' PARAMETER["central_meridian",0],' + ... ' PARAMETER["scale_factor",0.99987742],' + ... ' PARAMETER["false_easting",600000],' + ... ' PARAMETER["false_northing",2200000],' + ... ' UNIT["metre",1,' + ... ' AUTHORITY["EPSG","9001"]],' + ... ' AXIS["X",EAST],' + ... ' AXIS["Y",NORTH],' + ... ' AUTHORITY["EPSG","27572"]]' ... ), ... proj_string=( - ... '+proj=lcc +lat_1=46.8 +lat_0=46.8 +lon_0=0 +k_0=0.99987742 ' - ... '+x_0=600000 +y_0=2200000 +a=6378249.2 +b=6356515 ' - ... '+towgs84=-168,-60,320,0,0,0,0 +pm=paris +units=m +no_defs' - ... ) + ... "+proj=lcc +lat_1=46.8 +lat_0=46.8 +lon_0=0 +k_0=0.99987742 " + ... "+x_0=600000 +y_0=2200000 +a=6378249.2 +b=6356515 " + ... "+towgs84=-168,-60,320,0,0,0,0 +pm=paris +units=m +no_defs" + ... ), ... ) >>> crs = client.geospatial.create_coordinate_reference_systems(custom_crs) """ @@ -952,8 +958,14 @@ def put_raster( >>> feature_type = ... >>> feature = ... >>> raster_property_name = ... - >>> metadata = client.geospatial.put_raster(feature_type.external_id, feature.external_id, - ... raster_property_name, "XYZ", 3857, file) + >>> metadata = client.geospatial.put_raster( + ... feature_type.external_id, + ... feature.external_id, + ... raster_property_name, + ... "XYZ", + ... 3857, + ... file, + ... ) """ query_params = f"format={raster_format}&srid={raster_srid}" if allow_crs_transformation: @@ -999,7 +1011,9 @@ def delete_raster( >>> feature_type = ... >>> feature = ... >>> raster_property_name = ... - >>> client.geospatial.delete_raster(feature_type.external_id, feature.external_id, raster_property_name) + >>> client.geospatial.delete_raster( + ... feature_type.external_id, feature.external_id, raster_property_name + ... ) """ url_path = ( self._raster_resource_path(feature_type_external_id, feature_external_id, raster_property_name) + "/delete" @@ -1047,8 +1061,13 @@ def get_raster( >>> feature_type = ... >>> feature = ... >>> raster_property_name = ... - >>> raster_data = client.geospatial.get_raster(feature_type.external_id, feature.external_id, - ... raster_property_name, "XYZ", {"SIGNIFICANT_DIGITS": "4"}) + >>> raster_data = client.geospatial.get_raster( + ... feature_type.external_id, + ... feature.external_id, + ... raster_property_name, + ... "XYZ", + ... {"SIGNIFICANT_DIGITS": "4"}, + ... ) """ url_path = self._raster_resource_path(feature_type_external_id, feature_external_id, raster_property_name) res = self._do_request( @@ -1083,10 +1102,18 @@ def compute( Compute the transformation of an ewkt geometry from one SRID to another: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.geospatial import GeospatialGeometryTransformComputeFunction, GeospatialGeometryValueComputeFunction + >>> from cognite.client.data_classes.geospatial import ( + ... GeospatialGeometryTransformComputeFunction, + ... GeospatialGeometryValueComputeFunction, + ... ) >>> client = CogniteClient() - >>> compute_function = GeospatialGeometryTransformComputeFunction(GeospatialGeometryValueComputeFunction("SRID=4326;POLYGON((0 0,10 0,10 10,0 10,0 0))"), srid=23031) - >>> compute_result = client.geospatial.compute(output = {"output": compute_function}) + >>> compute_function = GeospatialGeometryTransformComputeFunction( + ... GeospatialGeometryValueComputeFunction( + ... "SRID=4326;POLYGON((0 0,10 0,10 10,0 10,0 0))" + ... ), + ... srid=23031, + ... ) + >>> compute_result = client.geospatial.compute(output={"output": compute_function}) """ res = self._do_request( "POST", diff --git a/cognite/client/_api/hosted_extractors/destinations.py b/cognite/client/_api/hosted_extractors/destinations.py index 77ce16d0bb..ca81fdcdf4 100644 --- a/cognite/client/_api/hosted_extractors/destinations.py +++ b/cognite/client/_api/hosted_extractors/destinations.py @@ -107,11 +107,13 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.hosted_extractors.destinations.retrieve('myDestination') + >>> res = client.hosted_extractors.destinations.retrieve("myDestination") Get multiple destinations by id: - >>> res = client.hosted_extractors.destinations.retrieve(["myDestination", "myDestination2"], ignore_unknown_ids=True) + >>> res = client.hosted_extractors.destinations.retrieve( + ... ["myDestination", "myDestination2"], ignore_unknown_ids=True + ... ) """ self._warning.warn() @@ -176,9 +178,16 @@ def create(self, items: DestinationWrite | Sequence[DestinationWrite]) -> Destin Create new destination: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.hosted_extractors import DestinationWrite, SessionWrite + >>> from cognite.client.data_classes.hosted_extractors import ( + ... DestinationWrite, + ... SessionWrite, + ... ) >>> client = CogniteClient() - >>> destination = DestinationWrite(external_id='my_dest', credentials=SessionWrite("my_nonce"), target_data_set_id=123) + >>> destination = DestinationWrite( + ... external_id="my_dest", + ... credentials=SessionWrite("my_nonce"), + ... target_data_set_id=123, + ... ) >>> res = client.hosted_extractors.destinations.create(destination) """ self._warning.warn() @@ -225,7 +234,7 @@ def update( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import DestinationUpdate >>> client = CogniteClient() - >>> destination = DestinationUpdate('my_dest').target_data_set_id.set(123) + >>> destination = DestinationUpdate("my_dest").target_data_set_id.set(123) >>> res = client.hosted_extractors.destinations.update(destination) """ self._warning.warn() @@ -261,12 +270,12 @@ def list( Iterate over destinations: >>> for destination in client.hosted_extractors.destinations: - ... destination # do something with the destination + ... destination # do something with the destination Iterate over chunks of destinations to reduce memory load: >>> for destination_list in client.hosted_extractors.destinations(chunk_size=25): - ... destination_list # do something with the destinationss + ... destination_list # do something with the destinationss """ self._warning.warn() return self._list( diff --git a/cognite/client/_api/hosted_extractors/jobs.py b/cognite/client/_api/hosted_extractors/jobs.py index ff8ad8b053..e398dbd671 100644 --- a/cognite/client/_api/hosted_extractors/jobs.py +++ b/cognite/client/_api/hosted_extractors/jobs.py @@ -109,11 +109,13 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.hosted_extractors.jobs.retrieve('myJob') + >>> res = client.hosted_extractors.jobs.retrieve("myJob") Get multiple jobs by id: - >>> res = client.hosted_extractors.jobs.retrieve(["myJob", "myOtherJob"], ignore_unknown_ids=True) + >>> res = client.hosted_extractors.jobs.retrieve( + ... ["myJob", "myOtherJob"], ignore_unknown_ids=True + ... ) """ self._warning.warn() @@ -178,7 +180,9 @@ def create(self, items: JobWrite | Sequence[JobWrite]) -> Job | JobList: >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import EventHubSourceWrite >>> client = CogniteClient() - >>> job_write = EventHubSourceWrite('my_event_hub', 'http://myeventhub.com', "My EventHub", 'my_key', 'my_value') + >>> job_write = EventHubSourceWrite( + ... "my_event_hub", "http://myeventhub.com", "My EventHub", "my_key", "my_value" + ... ) >>> job = client.hosted_extractors.jobs.create(job_write) """ self._warning.warn() @@ -225,7 +229,7 @@ def update( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import EventHubSourceUpdate >>> client = CogniteClient() - >>> job = EventHubSourceUpdate('my_event_hub').event_hub_name.set("My Updated EventHub") + >>> job = EventHubSourceUpdate("my_event_hub").event_hub_name.set("My Updated EventHub") >>> updated_job = client.hosted_extractors.jobs.update(job) """ self._warning.warn() @@ -261,12 +265,12 @@ def list( Iterate over jobs: >>> for job in client.hosted_extractors.jobs: - ... job # do something with the job + ... job # do something with the job Iterate over chunks of jobs to reduce memory load: >>> for job_list in client.hosted_extractors.jobs(chunk_size=25): - ... job_list # do something with the jobs + ... job_list # do something with the jobs """ self._warning.warn() return self._list( diff --git a/cognite/client/_api/hosted_extractors/mappings.py b/cognite/client/_api/hosted_extractors/mappings.py index f7d2b02ba5..c6d2fa84ad 100644 --- a/cognite/client/_api/hosted_extractors/mappings.py +++ b/cognite/client/_api/hosted_extractors/mappings.py @@ -107,11 +107,13 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.hosted_extractors.mappings.retrieve('myMapping') + >>> res = client.hosted_extractors.mappings.retrieve("myMapping") Get multiple mappings by id: - >>> res = client.hosted_extractors.mappings.retrieve(["myMapping", "myMapping2"], ignore_unknown_ids=True) + >>> res = client.hosted_extractors.mappings.retrieve( + ... ["myMapping", "myMapping2"], ignore_unknown_ids=True + ... ) """ self._warning.warn() @@ -177,7 +179,12 @@ def create(self, items: MappingWrite | Sequence[MappingWrite]) -> Mapping | Mapp >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import MappingWrite, CustomMapping >>> client = CogniteClient() - >>> mapping = MappingWrite(external_id="my_mapping", mapping=CustomMapping("some expression"), published=True, input="json") + >>> mapping = MappingWrite( + ... external_id="my_mapping", + ... mapping=CustomMapping("some expression"), + ... published=True, + ... input="json", + ... ) >>> res = client.hosted_extractors.mappings.create(mapping) """ self._warning.warn() @@ -213,7 +220,7 @@ def update( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import MappingUpdate >>> client = CogniteClient() - >>> mapping = MappingUpdate('my_mapping').published.set(False) + >>> mapping = MappingUpdate("my_mapping").published.set(False) >>> res = client.hosted_extractors.mappings.update(mapping) """ self._warning.warn() @@ -248,12 +255,12 @@ def list( Iterate over mappings: >>> for mapping in client.hosted_extractors.mappings: - ... mapping # do something with the mapping + ... mapping # do something with the mapping Iterate over chunks of mappings to reduce memory load: >>> for mapping_list in client.hosted_extractors.mappings(chunk_size=25): - ... mapping_list # do something with the mappings + ... mapping_list # do something with the mappings """ self._warning.warn() return self._list( diff --git a/cognite/client/_api/hosted_extractors/sources.py b/cognite/client/_api/hosted_extractors/sources.py index 6ef02474d5..60cbaea437 100644 --- a/cognite/client/_api/hosted_extractors/sources.py +++ b/cognite/client/_api/hosted_extractors/sources.py @@ -102,11 +102,13 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.hosted_extractors.sources.retrieve('myMQTTSource') + >>> res = client.hosted_extractors.sources.retrieve("myMQTTSource") Get multiple sources by id: - >>> res = client.hosted_extractors.sources.retrieve(["myMQTTSource", "MyEventHubSource"], ignore_unknown_ids=True) + >>> res = client.hosted_extractors.sources.retrieve( + ... ["myMQTTSource", "MyEventHubSource"], ignore_unknown_ids=True + ... ) """ self._warning.warn() @@ -171,7 +173,9 @@ def create(self, items: SourceWrite | Sequence[SourceWrite]) -> Source | SourceL >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import EventHubSourceWrite >>> client = CogniteClient() - >>> source = EventHubSourceWrite('my_event_hub', 'http://myeventhub.com', "My EventHub", 'my_key', 'my_value') + >>> source = EventHubSourceWrite( + ... "my_event_hub", "http://myeventhub.com", "My EventHub", "my_key", "my_value" + ... ) >>> res = client.hosted_extractors.sources.create(source) """ self._warning.warn() @@ -218,7 +222,9 @@ def update( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.hosted_extractors import EventHubSourceUpdate >>> client = CogniteClient() - >>> source = EventHubSourceUpdate('my_event_hub').event_hub_name.set("My Updated EventHub") + >>> source = EventHubSourceUpdate("my_event_hub").event_hub_name.set( + ... "My Updated EventHub" + ... ) >>> res = client.hosted_extractors.sources.update(source) """ self._warning.warn() @@ -267,12 +273,12 @@ def list( Iterate over sources: >>> for source in client.hosted_extractors.sources: - ... source # do something with the source + ... source # do something with the source Iterate over chunks of sources to reduce memory load: >>> for source_list in client.hosted_extractors.sources(chunk_size=25): - ... source_list # do something with the sources + ... source_list # do something with the sources """ self._warning.warn() return self._list( diff --git a/cognite/client/_api/iam.py b/cognite/client/_api/iam.py index 9fbb280be6..33ef4fb66c 100644 --- a/cognite/client/_api/iam.py +++ b/cognite/client/_api/iam.py @@ -145,26 +145,28 @@ def compare_capabilities( >>> to_check = [ ... AssetsAcl( ... actions=[AssetsAcl.Action.Read, AssetsAcl.Action.Write], - ... scope=AssetsAcl.Scope.All()), + ... scope=AssetsAcl.Scope.All(), + ... ), ... EventsAcl( ... actions=[EventsAcl.Action.Write], ... scope=EventsAcl.Scope.DataSet([123]), - ... )] + ... ), + ... ] >>> missing = client.iam.compare_capabilities( - ... existing_capabilities=my_groups, - ... desired_capabilities=to_check) + ... existing_capabilities=my_groups, desired_capabilities=to_check + ... ) >>> if missing: ... pass # do something Capabilities can also be passed as dictionaries: >>> to_check = [ - ... {'assetsAcl': {'actions': ['READ', 'WRITE'], 'scope': {'all': {}}}}, - ... {'eventsAcl': {'actions': ['WRITE'], 'scope': {'datasetScope': {'ids': [123]}}}}, + ... {"assetsAcl": {"actions": ["READ", "WRITE"], "scope": {"all": {}}}}, + ... {"eventsAcl": {"actions": ["WRITE"], "scope": {"datasetScope": {"ids": [123]}}}}, ... ] >>> missing = client.iam.compare_capabilities( - ... existing_capabilities=my_groups, - ... desired_capabilities=to_check) + ... existing_capabilities=my_groups, desired_capabilities=to_check + ... ) You may also load capabilities from a dict-representation directly into ACLs (access-control list) by using ``Capability.load``. This will also ensure that the capabilities are valid. @@ -246,19 +248,21 @@ def verify_capabilities( >>> to_check = [ ... AssetsAcl( ... actions=[AssetsAcl.Action.Read, AssetsAcl.Action.Write], - ... scope=AssetsAcl.Scope.All()), + ... scope=AssetsAcl.Scope.All(), + ... ), ... EventsAcl( ... actions=[EventsAcl.Action.Write], ... scope=EventsAcl.Scope.DataSet([123]), - ... )] + ... ), + ... ] >>> if missing := client.iam.verify_capabilities(to_check): ... pass # do something Capabilities can also be passed as dictionaries: >>> to_check = [ - ... {'assetsAcl': {'actions': ['READ', 'WRITE'], 'scope': {'all': {}}}}, - ... {'eventsAcl': {'actions': ['WRITE'], 'scope': {'datasetScope': {'ids': [123]}}}}, + ... {"assetsAcl": {"actions": ["READ", "WRITE"], "scope": {"all": {}}}}, + ... {"eventsAcl": {"actions": ["WRITE"], "scope": {"datasetScope": {"ids": [123]}}}}, ... ] >>> missing = client.iam.verify_capabilities(to_check) @@ -361,7 +365,8 @@ def create(self, group: Group | GroupWrite | Sequence[Group] | Sequence[GroupWri >>> client = CogniteClient() >>> my_capabilities = [ ... AssetsAcl([AssetsAcl.Action.Read], AssetsAcl.Scope.All()), - ... EventsAcl([EventsAcl.Action.Write], EventsAcl.Scope.DataSet([123, 456]))] + ... EventsAcl([EventsAcl.Action.Write], EventsAcl.Scope.DataSet([123, 456])), + ... ] >>> my_group = GroupWrite(name="My Group", capabilities=my_capabilities) >>> res = client.iam.groups.create(my_group) @@ -372,7 +377,8 @@ def create(self, group: Group | GroupWrite | Sequence[Group] | Sequence[GroupWri >>> grp = GroupWrite( ... name="Externally managed group", ... capabilities=my_capabilities, - ... source_id="b7c9a5a4...") + ... source_id="b7c9a5a4...", + ... ) >>> res = client.iam.groups.create(grp) Create a group whose members are managed internally by Cognite. This group may grant access through @@ -389,7 +395,8 @@ def create(self, group: Group | GroupWrite | Sequence[Group] | Sequence[GroupWri >>> user_list_group = GroupWrite( ... name="Specfic users only", ... capabilities=my_capabilities, - ... members=["XRsSD1k3mTIKG", "M0SxY6bM9Jl"]) + ... members=["XRsSD1k3mTIKG", "M0SxY6bM9Jl"], + ... ) >>> res = client.iam.groups.create([user_list_group, all_group]) Capabilities are often defined in configuration files, like YAML or JSON. You may convert capabilities @@ -398,8 +405,8 @@ def create(self, group: Group | GroupWrite | Sequence[Group] | Sequence[GroupWri >>> from cognite.client.data_classes.capabilities import Capability >>> unparsed_capabilities = [ - ... {'assetsAcl': {'actions': ['READ', 'WRITE'], 'scope': {'all': {}}}}, - ... {'eventsAcl': {'actions': ['WRITE'], 'scope': {'datasetScope': {'ids': [123]}}}}, + ... {"assetsAcl": {"actions": ["READ", "WRITE"], "scope": {"all": {}}}}, + ... {"eventsAcl": {"actions": ["WRITE"], "scope": {"datasetScope": {"ids": [123]}}}}, ... ] >>> acls = [Capability.load(cap) for cap in unparsed_capabilities] >>> group = GroupWrite(name="Another group", capabilities=acls) diff --git a/cognite/client/_api/labels.py b/cognite/client/_api/labels.py index 11a59f3038..95cc3212a3 100644 --- a/cognite/client/_api/labels.py +++ b/cognite/client/_api/labels.py @@ -162,12 +162,12 @@ def list( Iterate over label definitions: >>> for label in client.labels: - ... label # do something with the label definition + ... label # do something with the label definition Iterate over chunks of label definitions to reduce memory load: >>> for label_list in client.labels(chunk_size=2500): - ... label_list # do something with the type definitions + ... label_list # do something with the type definitions """ data_set_ids_processed = process_data_set_ids(data_set_ids, data_set_external_ids) @@ -205,7 +205,12 @@ def create( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import LabelDefinitionWrite >>> client = CogniteClient() - >>> labels = [LabelDefinitionWrite(external_id="ROTATING_EQUIPMENT", name="Rotating equipment"), LabelDefinitionWrite(external_id="PUMP", name="pump")] + >>> labels = [ + ... LabelDefinitionWrite( + ... external_id="ROTATING_EQUIPMENT", name="Rotating equipment" + ... ), + ... LabelDefinitionWrite(external_id="PUMP", name="pump"), + ... ] >>> res = client.labels.create(labels) """ if isinstance(label, Sequence): diff --git a/cognite/client/_api/postgres_gateway/tables.py b/cognite/client/_api/postgres_gateway/tables.py index b93a19b433..d7249c7c0f 100644 --- a/cognite/client/_api/postgres_gateway/tables.py +++ b/cognite/client/_api/postgres_gateway/tables.py @@ -97,8 +97,11 @@ def create(self, username: str, items: pg.TableWrite | Sequence[pg.TableWrite]) >>> from cognite.client.data_classes.data_modeling import ViewId >>> from cognite.client.data_classes.postgres_gateway import ViewTableWrite >>> client = CogniteClient() - >>> table = ViewTableWrite(tablename="myCustom", options=ViewId(space="mySpace", external_id="myExternalId", version="v1")) - >>> res = client.postgres_gateway.tables.create("myUserName",table) + >>> table = ViewTableWrite( + ... tablename="myCustom", + ... options=ViewId(space="mySpace", external_id="myExternalId", version="v1"), + ... ) + >>> res = client.postgres_gateway.tables.create("myUserName", table) """ return self._create_multiple( @@ -141,11 +144,13 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.postgres_gateway.tables.retrieve("myUserName", 'myCustom') + >>> res = client.postgres_gateway.tables.retrieve("myUserName", "myCustom") Get multiple custom tables by id: - >>> res = client.postgres_gateway.tables.retrieve("myUserName", ["myCustom", "myCustom2"]) + >>> res = client.postgres_gateway.tables.retrieve( + ... "myUserName", ["myCustom", "myCustom2"] + ... ) """ return self._retrieve_multiple( @@ -211,12 +216,12 @@ def list( Iterate over tables: >>> for table in client.postgres_gateway.tables: - ... table # do something with the custom table + ... table # do something with the custom table Iterate over chunks of tables to reduce memory load: >>> for table_list in client.postgres_gateway.tables(chunk_size=25): - ... table_list # do something with the custom tables + ... table_list # do something with the custom tables """ return self._list( diff --git a/cognite/client/_api/postgres_gateway/users.py b/cognite/client/_api/postgres_gateway/users.py index 3c320ab677..3f24c9e543 100644 --- a/cognite/client/_api/postgres_gateway/users.py +++ b/cognite/client/_api/postgres_gateway/users.py @@ -104,12 +104,15 @@ def create(self, user: UserWrite | Sequence[UserWrite]) -> UserCreated | UserCre >>> import os >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.postgres_gateway import UserWrite, SessionCredentials + >>> from cognite.client.data_classes.postgres_gateway import ( + ... UserWrite, + ... SessionCredentials, + ... ) >>> from cognite.client.data_classes import ClientCredentials >>> client = CogniteClient() >>> session = client.iam.sessions.create( ... ClientCredentials(os.environ["IDP_CLIENT_ID"], os.environ["IDP_CLIENT_SECRET"]), - ... session_type="CLIENT_CREDENTIALS" + ... session_type="CLIENT_CREDENTIALS", ... ) >>> user = UserWrite(credentials=SessionCredentials(nonce=session.nonce)) >>> res = client.postgres_gateway.users.create(user) @@ -145,14 +148,19 @@ def update(self, items: UserUpdate | UserWrite | Sequence[UserUpdate | UserWrite >>> import os >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.postgres_gateway import UserUpdate, SessionCredentials + >>> from cognite.client.data_classes.postgres_gateway import ( + ... UserUpdate, + ... SessionCredentials, + ... ) >>> from cognite.client.data_classes import ClientCredentials >>> client = CogniteClient() >>> session = client.iam.sessions.create( ... ClientCredentials(os.environ["IDP_CLIENT_ID"], os.environ["IDP_CLIENT_SECRET"]), - ... session_type="CLIENT_CREDENTIALS" + ... session_type="CLIENT_CREDENTIALS", + ... ) + >>> update = UserUpdate("myUser").credentials.set( + ... SessionCredentials(nonce=session.nonce) ... ) - >>> update = UserUpdate('myUser').credentials.set(SessionCredentials(nonce=session.nonce)) >>> res = client.postgres_gateway.users.update(update) """ @@ -248,12 +256,12 @@ def list(self, limit: int = DEFAULT_LIMIT_READ) -> UserList: Iterate over users: >>> for user in client.postgres_gateway.users: - ... user # do something with the user + ... user # do something with the user Iterate over chunks of users to reduce memory load: >>> for user_list in client.postgres_gateway.users(chunk_size=25): - ... user_list # do something with the users + ... user_list # do something with the users """ return self._list( diff --git a/cognite/client/_api/raw.py b/cognite/client/_api/raw.py index a9afb814cd..f1753c7ec9 100644 --- a/cognite/client/_api/raw.py +++ b/cognite/client/_api/raw.py @@ -157,12 +157,12 @@ def list(self, limit: int | None = DEFAULT_LIMIT_READ) -> DatabaseList: Iterate over databases: >>> for db in client.raw.databases: - ... db # do something with the db + ... db # do something with the db Iterate over chunks of databases to reduce memory load: >>> for db_list in client.raw.databases(chunk_size=2500): - ... db_list # do something with the dbs + ... db_list # do something with the dbs """ return self._list(list_cls=DatabaseList, resource_cls=Database, method="GET", limit=limit) @@ -307,12 +307,12 @@ def list(self, db_name: str, limit: int | None = DEFAULT_LIMIT_READ) -> raw.Tabl Iterate over tables: >>> for table in client.raw.tables(db_name="db1"): - ... table # do something with the table + ... table # do something with the table Iterate over chunks of tables to reduce memory load: >>> for table_list in client.raw.tables(db_name="db1", chunk_size=2500): - ... table_list # do something with the tables + ... table_list # do something with the tables """ tb = self._list( list_cls=raw.TableList, @@ -529,8 +529,10 @@ def insert( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import RowWrite >>> client = CogniteClient() - >>> rows = [RowWrite(key="r1", columns={"col1": "val1", "col2": "val1"}), - ... RowWrite(key="r2", columns={"col1": "val2", "col2": "val2"})] + >>> rows = [ + ... RowWrite(key="r1", columns={"col1": "val1", "col2": "val1"}), + ... RowWrite(key="r2", columns={"col1": "val2", "col2": "val2"}), + ... ] >>> client.raw.rows.insert("db1", "table1", rows) You may also insert a dictionary directly: @@ -582,10 +584,9 @@ def insert_dataframe( >>> >>> client = CogniteClient() >>> df = pd.DataFrame( - ... {"col-a": [1, 3, None], "col-b": [2, -1, 9]}, - ... index=["r1", "r2", "r3"]) - >>> res = client.raw.rows.insert_dataframe( - ... "db1", "table1", df, dropna=True) + ... {"col-a": [1, 3, None], "col-b": [2, -1, 9]}, index=["r1", "r2", "r3"] + ... ) + >>> res = client.raw.rows.insert_dataframe("db1", "table1", df, dropna=True) """ if not dataframe.index.is_unique: raise ValueError("Dataframe index is not unique (used for the row keys)") @@ -818,7 +819,7 @@ def list( Iterate through all rows one-by-one to reduce memory load (no concurrency used): - >>> for row in client.raw.rows("db1", "t1", columns=["col1","col2"]): + >>> for row in client.raw.rows("db1", "t1", columns=["col1", "col2"]): ... val1 = row["col1"] # You may access the data directly ... val2 = row.get("col2") # ...or use '.get' when keys can be missing diff --git a/cognite/client/_api/relationships.py b/cognite/client/_api/relationships.py index 08e4578be4..3950ecfa9f 100644 --- a/cognite/client/_api/relationships.py +++ b/cognite/client/_api/relationships.py @@ -277,7 +277,7 @@ def list( Iterate over relationships: >>> for relationship in client.relationships: - ... relationship # do something with the relationship + ... relationship # do something with the relationship """ data_set_ids_processed = process_data_set_ids(data_set_ids, data_set_external_ids) filter = RelationshipFilter( @@ -382,7 +382,7 @@ def create( ... target_external_id="target_ext_id", ... target_type="event", ... confidence=0.1, - ... data_set_id=1234 + ... data_set_id=1234, ... ) >>> flowrel2 = Relationship( ... external_id="flow_2", @@ -391,9 +391,9 @@ def create( ... target_external_id="target_ext_id", ... target_type="event", ... confidence=0.1, - ... data_set_id=1234 + ... data_set_id=1234, ... ) - >>> res = client.relationships.create([flowrel1,flowrel2]) + >>> res = client.relationships.create([flowrel1, flowrel2]) """ assert_type(relationship, "relationship", [RelationshipCore, Sequence]) if isinstance(relationship, Sequence): @@ -444,7 +444,11 @@ def update( Perform a partial update on a relationship, setting a source_external_id and a confidence: >>> from cognite.client.data_classes import RelationshipUpdate - >>> my_update = RelationshipUpdate(external_id="flow_1").source_external_id.set("alternate_source").confidence.set(0.97) + >>> my_update = ( + ... RelationshipUpdate(external_id="flow_1") + ... .source_external_id.set("alternate_source") + ... .confidence.set(0.97) + ... ) >>> res1 = client.relationships.update(my_update) >>> # Remove an already set optional field like so >>> another_update = RelationshipUpdate(external_id="flow_1").confidence.set(None) @@ -503,8 +507,12 @@ def upsert( >>> client = CogniteClient() >>> existing_relationship = client.relationships.retrieve(id=1) >>> existing_relationship.description = "New description" - >>> new_relationship = Relationship(external_id="new_relationship", source_external_id="new_source") - >>> res = client.relationships.upsert([existing_relationship, new_relationship], mode="replace") + >>> new_relationship = Relationship( + ... external_id="new_relationship", source_external_id="new_source" + ... ) + >>> res = client.relationships.upsert( + ... [existing_relationship, new_relationship], mode="replace" + ... ) """ return self._upsert_multiple( item, @@ -527,7 +535,7 @@ def delete(self, external_id: str | SequenceNotStr[str], ignore_unknown_ids: boo >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.relationships.delete(external_id=["a","b"]) + >>> client.relationships.delete(external_id=["a", "b"]) """ self._delete_multiple( identifiers=IdentifierSequence.load(external_ids=external_id), diff --git a/cognite/client/_api/sequences.py b/cognite/client/_api/sequences.py index 6ebb56130d..242c58f621 100644 --- a/cognite/client/_api/sequences.py +++ b/cognite/client/_api/sequences.py @@ -331,7 +331,9 @@ def aggregate_cardinality_values( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.sequences import SequenceProperty >>> client = CogniteClient() - >>> count = client.sequences.aggregate_cardinality_values(SequenceProperty.metadata_key("efficiency")) + >>> count = client.sequences.aggregate_cardinality_values( + ... SequenceProperty.metadata_key("efficiency") + ... ) Count the number of timezones (metadata key) for sequences with the word "critical" in the description in your CDF project, but exclude timezones from america: @@ -343,7 +345,8 @@ def aggregate_cardinality_values( >>> timezone_count = client.sequences.aggregate_cardinality_values( ... SequenceProperty.metadata_key("timezone"), ... advanced_filter=is_critical, - ... aggregate_filter=not_america) + ... aggregate_filter=not_america, + ... ) """ self._validate_filter(advanced_filter) return self._advanced_aggregate( @@ -417,7 +420,9 @@ def aggregate_unique_values( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.sequences import SequenceProperty >>> client = CogniteClient() - >>> result = client.sequences.aggregate_unique_values(SequenceProperty.metadata_key("timezone")) + >>> result = client.sequences.aggregate_unique_values( + ... SequenceProperty.metadata_key("timezone") + ... ) >>> print(result.unique) Get the different metadata keys with count used for sequences created after 2020-01-01 in your CDF project: @@ -426,8 +431,12 @@ def aggregate_unique_values( >>> from cognite.client.data_classes.sequences import SequenceProperty >>> from cognite.client.utils import timestamp_to_ms >>> from datetime import datetime - >>> created_after_2020 = filters.Range(SequenceProperty.created_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.sequences.aggregate_unique_values(SequenceProperty.metadata, advanced_filter=created_after_2020) + >>> created_after_2020 = filters.Range( + ... SequenceProperty.created_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.sequences.aggregate_unique_values( + ... SequenceProperty.metadata, advanced_filter=created_after_2020 + ... ) >>> print(result.unique) Get the different metadata keys with count for sequences updated after 2020-01-01 in your CDF project, but exclude all metadata keys that @@ -436,8 +445,14 @@ def aggregate_unique_values( >>> from cognite.client.data_classes.sequences import SequenceProperty >>> from cognite.client.data_classes import aggregations as aggs, filters >>> not_test = aggs.Not(aggs.Prefix("test")) - >>> created_after_2020 = filters.Range(SequenceProperty.last_updated_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.sequences.aggregate_unique_values(SequenceProperty.metadata, advanced_filter=created_after_2020, aggregate_filter=not_test) + >>> created_after_2020 = filters.Range( + ... SequenceProperty.last_updated_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.sequences.aggregate_unique_values( + ... SequenceProperty.metadata, + ... advanced_filter=created_after_2020, + ... aggregate_filter=not_test, + ... ) >>> print(result.unique) """ self._validate_filter(advanced_filter) @@ -521,14 +536,20 @@ def create( >>> from cognite.client.data_classes import SequenceWrite, SequenceColumnWrite >>> client = CogniteClient() >>> column_def = [ - ... SequenceColumnWrite(value_type="String", external_id="user", description="some description"), - ... SequenceColumnWrite(value_type="Double", external_id="amount") + ... SequenceColumnWrite( + ... value_type="String", external_id="user", description="some description" + ... ), + ... SequenceColumnWrite(value_type="Double", external_id="amount"), ... ] - >>> seq = client.sequences.create(SequenceWrite(external_id="my_sequence", columns=column_def)) + >>> seq = client.sequences.create( + ... SequenceWrite(external_id="my_sequence", columns=column_def) + ... ) Create a new sequence with the same column specifications as an existing sequence: - >>> seq2 = client.sequences.create(SequenceWrite(external_id="my_copied_sequence", columns=column_def)) + >>> seq2 = client.sequences.create( + ... SequenceWrite(external_id="my_copied_sequence", columns=column_def) + ... ) """ assert_type(sequence, "sequences", [typing.Sequence, SequenceCore]) @@ -556,7 +577,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.sequences.delete(id=[1,2,3], external_id="3") + >>> client.sequences.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=id, external_ids=external_id), @@ -605,7 +626,11 @@ def update( Perform a partial update on a sequence, updating the description and adding a new field to metadata: >>> from cognite.client.data_classes import SequenceUpdate - >>> my_update = SequenceUpdate(id=1).description.set("New description").metadata.add({"key": "value"}) + >>> my_update = ( + ... SequenceUpdate(id=1) + ... .description.set("New description") + ... .metadata.add({"key": "value"}) + ... ) >>> res = client.sequences.update(my_update) **Updating column definitions** @@ -616,7 +641,11 @@ def update( >>> from cognite.client.data_classes import SequenceUpdate, SequenceColumn >>> - >>> my_update = SequenceUpdate(id=1).columns.add(SequenceColumn(value_type ="String",external_id="user", description ="some description")) + >>> my_update = SequenceUpdate(id=1).columns.add( + ... SequenceColumn( + ... value_type="String", external_id="user", description="some description" + ... ) + ... ) >>> res = client.sequences.update(my_update) Add multiple new columns: @@ -624,8 +653,11 @@ def update( >>> from cognite.client.data_classes import SequenceUpdate, SequenceColumn >>> >>> column_def = [ - ... SequenceColumn(value_type ="String",external_id="user", description ="some description"), - ... SequenceColumn(value_type="Double", external_id="amount")] + ... SequenceColumn( + ... value_type="String", external_id="user", description="some description" + ... ), + ... SequenceColumn(value_type="Double", external_id="amount"), + ... ] >>> my_update = SequenceUpdate(id=1).columns.add(column_def) >>> res = client.sequences.update(my_update) @@ -640,7 +672,9 @@ def update( >>> from cognite.client.data_classes import SequenceUpdate >>> - >>> my_update = SequenceUpdate(id=1).columns.remove(["col_external_id1","col_external_id2"]) + >>> my_update = SequenceUpdate(id=1).columns.remove( + ... ["col_external_id1", "col_external_id2"] + ... ) >>> res = client.sequences.update(my_update) Update existing columns: @@ -648,8 +682,12 @@ def update( >>> from cognite.client.data_classes import SequenceUpdate, SequenceColumnUpdate >>> >>> column_updates = [ - ... SequenceColumnUpdate(external_id="col_external_id_1").external_id.set("new_col_external_id"), - ... SequenceColumnUpdate(external_id="col_external_id_2").description.set("my new description"), + ... SequenceColumnUpdate(external_id="col_external_id_1").external_id.set( + ... "new_col_external_id" + ... ), + ... SequenceColumnUpdate(external_id="col_external_id_2").description.set( + ... "my new description" + ... ), ... ] >>> my_update = SequenceUpdate(id=1).columns.modify(column_updates) >>> res = client.sequences.update(my_update) @@ -702,7 +740,7 @@ def upsert( >>> new_sequence = SequenceWrite( ... external_id="new_sequence", ... description="New sequence", - ... columns=[SequenceColumnWrite(external_id="col1", value_type="String")] + ... columns=[SequenceColumnWrite(external_id="col1", value_type="String")], ... ) >>> res = client.sequences.upsert([existing_sequence, new_sequence], mode="replace") """ @@ -870,7 +908,9 @@ def filter( >>> client = CogniteClient() >>> asset_filter = filters.Equals("asset_id", 123) >>> is_efficiency = filters.Equals(["metadata", "type"], "efficiency") - >>> res = client.sequences.filter(filter=filters.And(asset_filter, is_efficiency), sort="created_time") + >>> res = client.sequences.filter( + ... filter=filters.And(asset_filter, is_efficiency), sort="created_time" + ... ) Note that you can check the API documentation above to see which properties you can filter on with which filters. @@ -879,12 +919,16 @@ def filter( for filtering and sorting, you can also use the `SequenceProperty` and `SortableSequenceProperty` enums. >>> from cognite.client.data_classes import filters - >>> from cognite.client.data_classes.sequences import SequenceProperty, SortableSequenceProperty + >>> from cognite.client.data_classes.sequences import ( + ... SequenceProperty, + ... SortableSequenceProperty, + ... ) >>> asset_filter = filters.Equals(SequenceProperty.asset_id, 123) >>> is_efficiency = filters.Equals(SequenceProperty.metadata_key("type"), "efficiency") >>> res = client.sequences.filter( ... filter=filters.And(asset_filter, is_efficiency), - ... sort=SortableSequenceProperty.created_time) + ... sort=SortableSequenceProperty.created_time, + ... ) """ warnings.warn( @@ -962,12 +1006,12 @@ def list( Iterate over sequences: >>> for seq in client.sequences: - ... seq # do something with the sequence + ... seq # do something with the sequence Iterate over chunks of sequences to reduce memory load: >>> for seq_list in client.sequences(chunk_size=2500): - ... seq_list # do something with the sequences + ... seq_list # do something with the sequences Using advanced filter, find all sequences that have a metadata key 'timezone' starting with 'Europe', and sort by external id ascending: @@ -983,20 +1027,25 @@ def list( for filtering and sorting, you can also use the `SequenceProperty` and `SortableSequenceProperty` Enums. >>> from cognite.client.data_classes import filters - >>> from cognite.client.data_classes.sequences import SequenceProperty, SortableSequenceProperty + >>> from cognite.client.data_classes.sequences import ( + ... SequenceProperty, + ... SortableSequenceProperty, + ... ) >>> in_timezone = filters.Prefix(SequenceProperty.metadata_key("timezone"), "Europe") >>> res = client.sequences.list( - ... advanced_filter=in_timezone, - ... sort=(SortableSequenceProperty.external_id, "asc")) + ... advanced_filter=in_timezone, sort=(SortableSequenceProperty.external_id, "asc") + ... ) Combine filter and advanced filter: >>> from cognite.client.data_classes import filters >>> not_instrument_lvl5 = filters.And( - ... filters.ContainsAny("labels", ["Level5"]), - ... filters.Not(filters.ContainsAny("labels", ["Instrument"])) + ... filters.ContainsAny("labels", ["Level5"]), + ... filters.Not(filters.ContainsAny("labels", ["Instrument"])), + ... ) + >>> res = client.sequences.list( + ... asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5 ... ) - >>> res = client.sequences.list(asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5) """ asset_subtree_ids_processed = process_asset_subtree_ids(asset_subtree_ids, asset_subtree_external_ids) @@ -1063,25 +1112,36 @@ def insert( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import Sequence, SequenceColumn >>> client = CogniteClient() - >>> seq = client.sequences.create(Sequence(columns=[SequenceColumn(value_type="String", external_id="col_a"), - ... SequenceColumn(value_type="Double", external_id ="col_b")])) - >>> data = [(1, ['pi',3.14]), (2, ['e',2.72]) ] - >>> client.sequences.data.insert(columns=["col_a","col_b"], rows=data, id=1) + >>> seq = client.sequences.create( + ... Sequence( + ... columns=[ + ... SequenceColumn(value_type="String", external_id="col_a"), + ... SequenceColumn(value_type="Double", external_id="col_b"), + ... ] + ... ) + ... ) + >>> data = [(1, ["pi", 3.14]), (2, ["e", 2.72])] + >>> client.sequences.data.insert(columns=["col_a", "col_b"], rows=data, id=1) They can also be provided as a list of API-style objects with a rowNumber and values field: - >>> data = [{"rowNumber": 123, "values": ['str',3]}, {"rowNumber": 456, "values": ["bar",42]} ] - >>> client.sequences.data.insert(data, id=1, columns=["col_a","col_b"]) # implicit columns are retrieved from metadata + >>> data = [ + ... {"rowNumber": 123, "values": ["str", 3]}, + ... {"rowNumber": 456, "values": ["bar", 42]}, + ... ] + >>> client.sequences.data.insert( + ... data, id=1, columns=["col_a", "col_b"] + ... ) # implicit columns are retrieved from metadata Or they can be a given as a dictionary with row number as the key, and the value is the data to be inserted at that row: - >>> data = {123 : ['str',3], 456 : ['bar',42] } - >>> client.sequences.data.insert(columns=['stringColumn','intColumn'], rows=data, id=1) + >>> data = {123: ["str", 3], 456: ["bar", 42]} + >>> client.sequences.data.insert(columns=["stringColumn", "intColumn"], rows=data, id=1) Finally, they can be a SequenceData object retrieved from another request. In this case columns from this object are used as well. - >>> data = client.sequences.data.retrieve(id=2,start=0,end=10) - >>> client.sequences.data.insert(rows=data, id=1,columns=None) + >>> data = client.sequences.data.retrieve(id=2, start=0, end=10) + >>> client.sequences.data.insert(rows=data, id=1, columns=None) """ columns = handle_renamed_argument(columns, "columns", "column_external_ids", "insert", kwargs, False) if isinstance(rows, SequenceRows): @@ -1132,7 +1192,7 @@ def insert_dataframe( >>> from cognite.client import CogniteClient >>> import pandas as pd >>> client = CogniteClient() - >>> df = pd.DataFrame({'col_a': [1, 2, 3], 'col_b': [4, 5, 6]}, index=[1, 2, 3]) + >>> df = pd.DataFrame({"col_a": [1, 2, 3], "col_b": [4, 5, 6]}, index=[1, 2, 3]) >>> client.sequences.data.insert_dataframe(df, id=123) """ if dropna: @@ -1158,7 +1218,7 @@ def delete(self, rows: typing.Sequence[int], id: int | None = None, external_id: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.sequences.data.delete(id=1, rows=[1,2,42]) + >>> client.sequences.data.delete(id=1, rows=[1, 2, 42]) """ post_obj = Identifier.of_either(id, external_id).as_dict() post_obj["rows"] = rows @@ -1262,10 +1322,14 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> res = client.sequences.data.retrieve(id=1) - >>> tuples = [(r,v) for r,v in res.items()] # You can use this iterator in for loops and list comprehensions, - >>> single_value = res[23] # ... get the values at a single row number, - >>> col = res.get_column(external_id='columnExtId') # ... get the array of values for a specific column, - >>> df = res.to_pandas() # ... or convert the result to a dataframe + >>> tuples = [ + ... (r, v) for r, v in res.items() + ... ] # You can use this iterator in for loops and list comprehensions, + >>> single_value = res[23] # ... get the values at a single row number, + >>> col = res.get_column( + ... external_id="columnExtId" + ... ) # ... get the array of values for a specific column, + >>> df = res.to_pandas() # ... or convert the result to a dataframe """ columns = handle_renamed_argument(columns, "columns", "column_external_ids", "insert", kwargs, False) diff --git a/cognite/client/_api/simulators/integrations.py b/cognite/client/_api/simulators/integrations.py index 7210afec8f..fa618f4974 100644 --- a/cognite/client/_api/simulators/integrations.py +++ b/cognite/client/_api/simulators/integrations.py @@ -142,7 +142,7 @@ def delete( Delete simulator integrations by id or external id: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.simulators.integrations.delete(ids=[1,2,3], external_ids="foo") + >>> client.simulators.integrations.delete(ids=[1, 2, 3], external_ids="foo") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=ids, external_ids=external_ids), diff --git a/cognite/client/_api/simulators/models.py b/cognite/client/_api/simulators/models.py index 7728ac4613..67aaaacbc0 100644 --- a/cognite/client/_api/simulators/models.py +++ b/cognite/client/_api/simulators/models.py @@ -63,10 +63,7 @@ def list( >>> from cognite.client.data_classes.simulators.filters import PropertySort >>> res = client.simulators.models.list( ... simulator_external_ids=["simulator_external_id"], - ... sort=PropertySort( - ... property="createdTime", - ... order="asc" - ... ) + ... sort=PropertySort(property="createdTime", order="asc"), ... ) """ @@ -127,7 +124,7 @@ def retrieve( >>> res = client.simulators.models.retrieve(external_ids="model_external_id") Get multiple simulator models by ids: - >>> res = client.simulators.models.retrieve(ids=[1,2]) + >>> res = client.simulators.models.retrieve(ids=[1, 2]) Get multiple simulator models by external ids: >>> res = client.simulators.models.retrieve( @@ -222,13 +219,19 @@ def create(self, items: SimulatorModelWrite | Sequence[SimulatorModelWrite]) -> >>> client = CogniteClient() >>> models = [ ... SimulatorModelWrite( - ... name="model1", simulator_external_id="sim1", type="SteadyState", - ... data_set_id=1, external_id="model_external_id" + ... name="model1", + ... simulator_external_id="sim1", + ... type="SteadyState", + ... data_set_id=1, + ... external_id="model_external_id", ... ), ... SimulatorModelWrite( - ... name="model2", simulator_external_id="sim2", type="SteadyState", - ... data_set_id=2, external_id="model_external_id2" - ... ) + ... name="model2", + ... simulator_external_id="sim2", + ... type="SteadyState", + ... data_set_id=2, + ... external_id="model_external_id2", + ... ), ... ] >>> res = client.simulators.models.create(models) """ @@ -257,7 +260,7 @@ def delete( Delete simulator models by id or external id: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.simulators.models.delete(ids=[1,2,3], external_ids="model_external_id") + >>> client.simulators.models.delete(ids=[1, 2, 3], external_ids="model_external_id") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=ids, external_ids=external_ids), diff --git a/cognite/client/_api/simulators/models_revisions.py b/cognite/client/_api/simulators/models_revisions.py index 7aaff8261e..94394797d3 100644 --- a/cognite/client/_api/simulators/models_revisions.py +++ b/cognite/client/_api/simulators/models_revisions.py @@ -70,7 +70,7 @@ def list( ... created_time=TimestampRange(min=0, max=1000000), ... last_updated_time=TimestampRange(min=0, max=1000000), ... sort=PropertySort(order="asc", property="createdTime"), - ... limit=10 + ... limit=10, ... ) """ model_revisions_filter = SimulatorModelRevisionsFilter( @@ -137,7 +137,7 @@ def retrieve( ... ) Get multiple simulator model revisions by ids: - >>> res = client.simulators.models.revisions.retrieve(ids=[1,2]) + >>> res = client.simulators.models.revisions.retrieve(ids=[1, 2]) Get multiple simulator model revisions by external ids: >>> res = client.simulators.models.revisions.retrieve( @@ -247,7 +247,11 @@ def create( Examples: Create new simulator model revisions: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.simulators import SimulatorModelRevisionWrite, SimulatorModelDependencyFileId, SimulatorModelRevisionDependency + >>> from cognite.client.data_classes.simulators import ( + ... SimulatorModelRevisionWrite, + ... SimulatorModelDependencyFileId, + ... SimulatorModelRevisionDependency, + ... ) >>> client = CogniteClient() >>> revisions = [ ... SimulatorModelRevisionWrite( @@ -259,7 +263,7 @@ def create( ... external_id="revision2", ... file_id=2, ... model_external_id="a_2", - ... external_dependencies = [ + ... external_dependencies=[ ... SimulatorModelRevisionDependency( ... file=SimulatorModelDependencyFileId(id=123), ... arguments={ @@ -267,7 +271,7 @@ def create( ... "fieldB": "value2", ... }, ... ) - ... ] + ... ], ... ), ... ] >>> res = client.simulators.models.revisions.create(revisions) diff --git a/cognite/client/_api/simulators/routine_revisions.py b/cognite/client/_api/simulators/routine_revisions.py index ddf82b1710..312acced3e 100644 --- a/cognite/client/_api/simulators/routine_revisions.py +++ b/cognite/client/_api/simulators/routine_revisions.py @@ -357,7 +357,7 @@ def list( ... routine_external_ids=["routine_1"], ... all_versions=True, ... sort=PropertySort(order="asc", property="createdTime"), - ... include_all_fields=True + ... include_all_fields=True, ... ) """ diff --git a/cognite/client/_api/simulators/routines.py b/cognite/client/_api/simulators/routines.py index 42ea544f0b..fcb3fd4a0d 100644 --- a/cognite/client/_api/simulators/routines.py +++ b/cognite/client/_api/simulators/routines.py @@ -135,7 +135,7 @@ def create( ... external_id="routine_ext_id_2", ... simulator_integration_external_id="integration_ext_id_2", ... model_external_id="model_ext_id_2", - ... ) + ... ), ... ] >>> res = client.simulators.routines.create(routines) """ @@ -165,7 +165,7 @@ def delete( Delete simulator routines by id or external id: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.simulators.routines.delete(ids=[1,2,3], external_ids="foo") + >>> client.simulators.routines.delete(ids=[1, 2, 3], external_ids="foo") """ self._warning.warn() self._delete_multiple( @@ -203,10 +203,7 @@ def list( >>> from cognite.client.data_classes.simulators.filters import PropertySort >>> res = client.simulators.routines.list( ... simulator_integration_external_ids=["integration_ext_id"], - ... sort=PropertySort( - ... property="createdTime", - ... order="desc" - ... ) + ... sort=PropertySort(property="createdTime", order="desc"), ... ) """ @@ -257,8 +254,7 @@ def run( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> run = client.simulators.routines.run( - ... routine_external_id="routine1", - ... log_severity="Debug" + ... routine_external_id="routine1", log_severity="Debug" ... ) """ self._warning.warn() diff --git a/cognite/client/_api/simulators/runs.py b/cognite/client/_api/simulators/runs.py index efce3ccf66..a2cc2f3e4c 100644 --- a/cognite/client/_api/simulators/runs.py +++ b/cognite/client/_api/simulators/runs.py @@ -187,8 +187,7 @@ def list( Filter runs by status and simulator external ids: >>> res = client.simulators.runs.list( - ... simulator_external_ids=["PROSPER", "DWSIM"], - ... status="success" + ... simulator_external_ids=["PROSPER", "DWSIM"], status="success" ... ) Filter runs by time ranges: diff --git a/cognite/client/_api/synthetic_time_series.py b/cognite/client/_api/synthetic_time_series.py index 23b18acfed..df44a8c511 100644 --- a/cognite/client/_api/synthetic_time_series.py +++ b/cognite/client/_api/synthetic_time_series.py @@ -89,9 +89,8 @@ def query( ... + ts{space:'my-space',externalId:'my-ts-xid'} ... ''' >>> dps = client.time_series.data.synthetic.query( - ... expressions=expression, - ... start="2w-ago", - ... end="now") + ... expressions=expression, start="2w-ago", end="now" + ... ) You can also specify variables for an easier query syntax: @@ -103,20 +102,22 @@ def query( ... "C": NodeId("my-space", "my-ts-xid"), ... } >>> dps = client.time_series.data.synthetic.query( - ... expressions="A+B+C", start="2w-ago", end="2w-ahead", variables=variables) + ... expressions="A+B+C", start="2w-ago", end="2w-ahead", variables=variables + ... ) Use sympy to build complex expressions: >>> from sympy import symbols, cos, sin >>> x, y = symbols("x y") >>> dps = client.time_series.data.synthetic.query( - ... [sin(x), y*cos(x)], + ... [sin(x), y * cos(x)], ... start="2w-ago", ... end="now", ... variables={x: "foo", y: "bar"}, ... aggregate="interpolation", ... granularity="15m", - ... target_unit="temperature:deg_c") + ... target_unit="temperature:deg_c", + ... ) """ if is_unlimited(limit): limit = cast(int, float("inf")) diff --git a/cognite/client/_api/templates.py b/cognite/client/_api/templates.py index 86b4299205..56b5316dc4 100644 --- a/cognite/client/_api/templates.py +++ b/cognite/client/_api/templates.py @@ -266,7 +266,9 @@ def upsert(self, external_id: str, version: TemplateGroupVersion) -> TemplateGro >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import TemplateGroup >>> client = CogniteClient() - >>> template_group = TemplateGroup("sdk-test-group", "This template group models Covid-19 spread") + >>> template_group = TemplateGroup( + ... "sdk-test-group", "This template group models Covid-19 spread" + ... ) >>> client.templates.groups.create(template_group) >>> schema = ''' >>> type Demographics @template { @@ -313,7 +315,9 @@ def list( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> template_group_list = client.templates.versions.list("template-group-ext-id", limit=5) + >>> template_group_list = client.templates.versions.list( + ... "template-group-ext-id", limit=5 + ... ) """ TemplatesAPI._deprecation_warning() resource_path = interpolate_and_url_encode(self._RESOURCE_PATH, external_id) @@ -388,7 +392,9 @@ def create( >>> "growthRate": ConstantResolver(value=0.02) >>> } >>> ) - >>> client.templates.instances.create("sdk-test-group", 1, [template_instance_1, template_instance_2]) + >>> client.templates.instances.create( + ... "sdk-test-group", 1, [template_instance_1, template_instance_2] + ... ) """ TemplatesAPI._deprecation_warning() resource_path = interpolate_and_url_encode(self._RESOURCE_PATH, external_id, version) @@ -436,7 +442,9 @@ def upsert( >>> "growthRate": ConstantResolver(0.02) >>> } >>> ) - >>> client.templates.instances.upsert("sdk-test-group", 1, [template_instance_1, template_instance_2]) + >>> client.templates.instances.upsert( + ... "sdk-test-group", 1, [template_instance_1, template_instance_2] + ... ) """ TemplatesAPI._deprecation_warning() if isinstance(instances, TemplateInstance): @@ -469,7 +477,9 @@ def update( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import TemplateInstanceUpdate >>> client = CogniteClient() - >>> my_update = TemplateInstanceUpdate(external_id="test").field_resolvers.add({ "name": ConstantResolver("Norway") }) + >>> my_update = TemplateInstanceUpdate(external_id="test").field_resolvers.add( + ... {"name": ConstantResolver("Norway")} + ... ) >>> res = client.templates.instances.update("sdk-test-group", 1, my_update) """ TemplatesAPI._deprecation_warning() @@ -501,7 +511,9 @@ def retrieve_multiple( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.templates.instances.retrieve_multiple(external_id="sdk-test-group", version=1, external_ids=["abc", "def"]) + >>> res = client.templates.instances.retrieve_multiple( + ... external_id="sdk-test-group", version=1, external_ids=["abc", "def"] + ... ) """ TemplatesAPI._deprecation_warning() resource_path = interpolate_and_url_encode(self._RESOURCE_PATH, external_id, version) @@ -540,7 +552,9 @@ def list( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> template_instances_list = client.templates.instances.list("template-group-ext-id", 1, limit=5) + >>> template_instances_list = client.templates.instances.list( + ... "template-group-ext-id", 1, limit=5 + ... ) """ TemplatesAPI._deprecation_warning() resource_path = interpolate_and_url_encode(self._RESOURCE_PATH, external_id, version) @@ -697,7 +711,9 @@ def resolve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.templates.views.resolve("template-group-ext-id", 1, "view", { "startTime": 10 }, limit=5) + >>> client.templates.views.resolve( + ... "template-group-ext-id", 1, "view", {"startTime": 10}, limit=5 + ... ) """ TemplatesAPI._deprecation_warning() url_path = interpolate_and_url_encode(self._RESOURCE_PATH, external_id, version) + "/resolve" diff --git a/cognite/client/_api/three_d.py b/cognite/client/_api/three_d.py index 6bfb437771..6a1cfc9cc1 100644 --- a/cognite/client/_api/three_d.py +++ b/cognite/client/_api/three_d.py @@ -129,12 +129,12 @@ def list(self, published: bool | None = None, limit: int | None = DEFAULT_LIMIT_ Iterate over 3d models: >>> for three_d_model in client.three_d.models: - ... three_d_model # do something with the 3d model + ... three_d_model # do something with the 3d model Iterate over chunks of 3d models to reduce memory load: >>> for three_d_model in client.three_d.models(chunk_size=50): - ... three_d_model # do something with the 3d model + ... three_d_model # do something with the 3d model """ return self._list( list_cls=ThreeDModelList, @@ -429,7 +429,11 @@ def update( Perform a partial update on a revision, updating the published property and adding a new field to metadata: >>> from cognite.client.data_classes import ThreeDModelRevisionUpdate - >>> my_update = ThreeDModelRevisionUpdate(id=1).published.set(False).metadata.add({"key": "value"}) + >>> my_update = ( + ... ThreeDModelRevisionUpdate(id=1) + ... .published.set(False) + ... .metadata.add({"key": "value"}) + ... ) >>> res = client.three_d.revisions.update(model_id=1, item=my_update) """ return self._update_multiple( @@ -555,7 +559,17 @@ def filter_nodes( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.three_d.revisions.filter_nodes(model_id=1, revision_id=1, properties={ "PDMS": { "Area": ["AB76", "AB77", "AB78"], "Type": ["PIPE", "BEND", "PIPESUP"] } }, limit=10) + >>> res = client.three_d.revisions.filter_nodes( + ... model_id=1, + ... revision_id=1, + ... properties={ + ... "PDMS": { + ... "Area": ["AB76", "AB77", "AB78"], + ... "Type": ["PIPE", "BEND", "PIPESUP"], + ... } + ... }, + ... limit=10, + ... ) """ resource_path = interpolate_and_url_encode(self._RESOURCE_PATH + "/{}/nodes", model_id, revision_id) return self._list( @@ -663,7 +677,8 @@ def list( >>> from cognite.client.data_classes import BoundingBox3D >>> bbox = BoundingBox3D(min=[0.0, 0.0, 0.0], max=[1.0, 1.0, 1.0]) >>> res = client.three_d.asset_mappings.list( - ... model_id=1, revision_id=1, intersects_bounding_box=bbox) + ... model_id=1, revision_id=1, intersects_bounding_box=bbox + ... ) """ path = interpolate_and_url_encode(self._RESOURCE_PATH, model_id, revision_id) flt: dict[str, str | int | None] = {"nodeId": node_id, "assetId": asset_id} diff --git a/cognite/client/_api/time_series.py b/cognite/client/_api/time_series.py index a89825b2eb..5ea2aa3884 100644 --- a/cognite/client/_api/time_series.py +++ b/cognite/client/_api/time_series.py @@ -365,7 +365,8 @@ def aggregate_cardinality_values( >>> timezone_count = client.time_series.aggregate_cardinality_values( ... TimeSeriesProperty.metadata_key("timezone"), ... advanced_filter=is_critical, - ... aggregate_filter=not_america) + ... aggregate_filter=not_america, + ... ) """ self._validate_filter(advanced_filter) @@ -401,7 +402,9 @@ def aggregate_cardinality_properties( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.time_series import TimeSeriesProperty >>> client = CogniteClient() - >>> key_count = client.time_series.aggregate_cardinality_properties(TimeSeriesProperty.metadata) + >>> key_count = client.time_series.aggregate_cardinality_properties( + ... TimeSeriesProperty.metadata + ... ) """ self._validate_filter(advanced_filter) return self._advanced_aggregate( @@ -437,7 +440,9 @@ def aggregate_unique_values( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.time_series import TimeSeriesProperty >>> client = CogniteClient() - >>> result = client.time_series.aggregate_unique_values(TimeSeriesProperty.metadata_key("timezone")) + >>> result = client.time_series.aggregate_unique_values( + ... TimeSeriesProperty.metadata_key("timezone") + ... ) >>> print(result.unique) Get the different units with count used for time series created after 2020-01-01 in your CDF project: @@ -446,8 +451,12 @@ def aggregate_unique_values( >>> from cognite.client.data_classes.time_series import TimeSeriesProperty >>> from cognite.client.utils import timestamp_to_ms >>> from datetime import datetime - >>> created_after_2020 = filters.Range(TimeSeriesProperty.created_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.time_series.aggregate_unique_values(TimeSeriesProperty.unit, advanced_filter=created_after_2020) + >>> created_after_2020 = filters.Range( + ... TimeSeriesProperty.created_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.time_series.aggregate_unique_values( + ... TimeSeriesProperty.unit, advanced_filter=created_after_2020 + ... ) >>> print(result.unique) Get the different units with count for time series updated after 2020-01-01 in your CDF project, but exclude all units that @@ -456,8 +465,14 @@ def aggregate_unique_values( >>> from cognite.client.data_classes.time_series import TimeSeriesProperty >>> from cognite.client.data_classes import aggregations as aggs, filters >>> not_test = aggs.Not(aggs.Prefix("test")) - >>> created_after_2020 = filters.Range(TimeSeriesProperty.last_updated_time, gte=timestamp_to_ms(datetime(2020, 1, 1))) - >>> result = client.time_series.aggregate_unique_values(TimeSeriesProperty.unit, advanced_filter=created_after_2020, aggregate_filter=not_test) + >>> created_after_2020 = filters.Range( + ... TimeSeriesProperty.last_updated_time, gte=timestamp_to_ms(datetime(2020, 1, 1)) + ... ) + >>> result = client.time_series.aggregate_unique_values( + ... TimeSeriesProperty.unit, + ... advanced_filter=created_after_2020, + ... aggregate_filter=not_test, + ... ) >>> print(result.unique) """ self._validate_filter(advanced_filter) @@ -529,7 +544,9 @@ def create( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import TimeSeriesWrite >>> client = CogniteClient() - >>> ts = client.time_series.create(TimeSeriesWrite(name="my_ts", data_set_id=123, external_id="foo")) + >>> ts = client.time_series.create( + ... TimeSeriesWrite(name="my_ts", data_set_id=123, external_id="foo") + ... ) """ return self._create_multiple( list_cls=TimeSeriesList, @@ -557,7 +574,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.time_series.delete(id=[1,2,3], external_id="3") + >>> client.time_series.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=id, external_ids=external_id), @@ -609,7 +626,11 @@ def update( Perform a partial update on a time series, updating the description and adding a new field to metadata: >>> from cognite.client.data_classes import TimeSeriesUpdate - >>> my_update = TimeSeriesUpdate(id=1).description.set("New description").metadata.add({"key": "value"}) + >>> my_update = ( + ... TimeSeriesUpdate(id=1) + ... .description.set("New description") + ... .metadata.add({"key": "value"}) + ... ) >>> res = client.time_series.update(my_update) Perform a partial update on a time series by instance id: @@ -667,8 +688,12 @@ def upsert( >>> client = CogniteClient() >>> existing_time_series = client.time_series.retrieve(id=1) >>> existing_time_series.description = "New description" - >>> new_time_series = TimeSeries(external_id="new_timeSeries", description="New timeSeries") - >>> res = client.time_series.upsert([existing_time_series, new_time_series], mode="replace") + >>> new_time_series = TimeSeries( + ... external_id="new_timeSeries", description="New timeSeries" + ... ) + >>> res = client.time_series.upsert( + ... [existing_time_series, new_time_series], mode="replace" + ... ) """ return self._upsert_multiple( @@ -711,7 +736,7 @@ def search( Search for all time series connected to asset with id 123: - >>> res = client.time_series.search(filter={"asset_ids":[123]}) + >>> res = client.time_series.search(filter={"asset_ids": [123]}) """ return self._search( @@ -758,9 +783,14 @@ def filter( for filtering and sorting, you can also use the `TimeSeriesProperty` and `SortableTimeSeriesProperty` enums. >>> from cognite.client.data_classes.filters import Equals - >>> from cognite.client.data_classes.time_series import TimeSeriesProperty, SortableTimeSeriesProperty + >>> from cognite.client.data_classes.time_series import ( + ... TimeSeriesProperty, + ... SortableTimeSeriesProperty, + ... ) >>> is_numeric = Equals(TimeSeriesProperty.is_string, False) - >>> res = client.time_series.filter(filter=is_numeric, sort=SortableTimeSeriesProperty.external_id) + >>> res = client.time_series.filter( + ... filter=is_numeric, sort=SortableTimeSeriesProperty.external_id + ... ) """ warnings.warn( f"{self.__class__.__name__}.filter() method is deprecated and will be removed in the next major version of the SDK. Use the {self.__class__.__name__}.list() method with advanced_filter parameter instead.", @@ -847,19 +877,21 @@ def list( Iterate over time series: >>> for ts in client.time_series: - ... ts # do something with the time series + ... ts # do something with the time series Iterate over chunks of time series to reduce memory load: >>> for ts_list in client.time_series(chunk_size=2500): - ... ts_list # do something with the time series + ... ts_list # do something with the time series Using advanced filter, find all time series that have a metadata key 'timezone' starting with 'Europe', and sort by external id ascending: >>> from cognite.client.data_classes import filters >>> in_timezone = filters.Prefix(["metadata", "timezone"], "Europe") - >>> res = client.time_series.list(advanced_filter=in_timezone, sort=("external_id", "asc")) + >>> res = client.time_series.list( + ... advanced_filter=in_timezone, sort=("external_id", "asc") + ... ) Note that you can check the API documentation above to see which properties you can filter on with which filters. @@ -868,20 +900,25 @@ def list( for filtering and sorting, you can also use the `TimeSeriesProperty` and `SortableTimeSeriesProperty` Enums. >>> from cognite.client.data_classes import filters - >>> from cognite.client.data_classes.time_series import TimeSeriesProperty, SortableTimeSeriesProperty + >>> from cognite.client.data_classes.time_series import ( + ... TimeSeriesProperty, + ... SortableTimeSeriesProperty, + ... ) >>> in_timezone = filters.Prefix(TimeSeriesProperty.metadata_key("timezone"), "Europe") >>> res = client.time_series.list( - ... advanced_filter=in_timezone, - ... sort=(SortableTimeSeriesProperty.external_id, "asc")) + ... advanced_filter=in_timezone, sort=(SortableTimeSeriesProperty.external_id, "asc") + ... ) Combine filter and advanced filter: >>> from cognite.client.data_classes import filters >>> not_instrument_lvl5 = filters.And( - ... filters.ContainsAny("labels", ["Level5"]), - ... filters.Not(filters.ContainsAny("labels", ["Instrument"])) + ... filters.ContainsAny("labels", ["Level5"]), + ... filters.Not(filters.ContainsAny("labels", ["Instrument"])), + ... ) + >>> res = client.time_series.list( + ... asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5 ... ) - >>> res = client.time_series.list(asset_subtree_ids=[123456], advanced_filter=not_instrument_lvl5) """ asset_subtree_ids_processed = process_asset_subtree_ids(asset_subtree_ids, asset_subtree_external_ids) data_set_ids_processed = process_data_set_ids(data_set_ids, data_set_external_ids) diff --git a/cognite/client/_api/transformations/__init__.py b/cognite/client/_api/transformations/__init__.py index 0c161bb55c..6261af2466 100644 --- a/cognite/client/_api/transformations/__init__.py +++ b/cognite/client/_api/transformations/__init__.py @@ -176,8 +176,15 @@ def create( Create new transformations: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes import TransformationWrite, TransformationDestination - >>> from cognite.client.data_classes.transformations.common import ViewInfo, EdgeType, DataModelInfo + >>> from cognite.client.data_classes import ( + ... TransformationWrite, + ... TransformationDestination, + ... ) + >>> from cognite.client.data_classes.transformations.common import ( + ... ViewInfo, + ... EdgeType, + ... DataModelInfo, + ... ) >>> client = CogniteClient() >>> transformations = [ >>> TransformationWrite( @@ -402,7 +409,9 @@ def retrieve_multiple( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.transformations.retrieve_multiple(ids=[1,2,3], external_ids=['transform-1','transform-2']) + >>> res = client.transformations.retrieve_multiple( + ... ids=[1, 2, 3], external_ids=["transform-1", "transform-2"] + ... ) """ identifiers = IdentifierSequence.load(ids=ids, external_ids=external_ids) return self._retrieve_multiple( @@ -456,7 +465,11 @@ def update( Perform a partial update on a transformation, updating the query and making it private: >>> from cognite.client.data_classes import TransformationUpdate - >>> my_update = TransformationUpdate(id=1).query.set("SELECT * FROM _cdf.assets").is_public.set(False) + >>> my_update = ( + ... TransformationUpdate(id=1) + ... .query.set("SELECT * FROM _cdf.assets") + ... .is_public.set(False) + ... ) >>> res = client.transformations.update(my_update) Update the session used for reading (source) and writing (destination) when authenticating for all @@ -468,7 +481,7 @@ def update( >>> new_nonce = NonceCredentials( ... session_id=new_session.id, ... nonce=new_session.nonce, - ... cdf_project_name=client.config.project + ... cdf_project_name=client.config.project, ... ) >>> for tr in to_update: ... tr.source_nonce = new_nonce @@ -526,12 +539,12 @@ def run( >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> - >>> res = client.transformations.run(transformation_id = 1) + >>> res = client.transformations.run(transformation_id=1) Start running transformation by id: >>> - >>> res = client.transformations.run(transformation_id = 1, wait = False) + >>> res = client.transformations.run(transformation_id=1, wait=False) """ IdentifierSequence.load(transformation_id, transformation_external_id).assert_singleton() @@ -598,7 +611,7 @@ def cancel(self, transformation_id: int | None = None, transformation_external_i >>> from cognite.client.data_classes import TransformationJobStatus >>> client = CogniteClient() >>> - >>> res = client.transformations.run(id = 1, timeout = 60.0) + >>> res = client.transformations.run(id=1, timeout=60.0) >>> if res.status == TransformationJobStatus.RUNNING: >>> res.cancel() """ @@ -649,14 +662,18 @@ def preview( For example, given that you have a query that reads from a raw table with 10,903 rows >>> - >>> result = client.transformations.preview(query="select * from my_raw_db.my_raw_table", limit=None) + >>> result = client.transformations.preview( + ... query="select * from my_raw_db.my_raw_table", limit=None + ... ) >>> print(result.results) 100 To get all rows, you also need to set the `source_limit` to None: >>> - >>> result = client.transformations.preview(query="select * from my_raw_db.my_raw_table", limit=None, source_limit=None) + >>> result = client.transformations.preview( + ... query="select * from my_raw_db.my_raw_table", limit=None, source_limit=None + ... ) >>> print(result.results) 10903 diff --git a/cognite/client/_api/transformations/notifications.py b/cognite/client/_api/transformations/notifications.py index 97d89b4753..7cdf6ffa76 100644 --- a/cognite/client/_api/transformations/notifications.py +++ b/cognite/client/_api/transformations/notifications.py @@ -181,6 +181,6 @@ def delete(self, id: int | Sequence[int] | None = None) -> None: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.transformations.notifications.delete(id=[1,2,3]) + >>> client.transformations.notifications.delete(id=[1, 2, 3]) """ self._delete_multiple(identifiers=IdentifierSequence.load(ids=id), wrap_ids=True) diff --git a/cognite/client/_api/transformations/schedules.py b/cognite/client/_api/transformations/schedules.py index 78a05fefca..6d0bbf461d 100644 --- a/cognite/client/_api/transformations/schedules.py +++ b/cognite/client/_api/transformations/schedules.py @@ -220,7 +220,7 @@ def delete( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> client.transformations.schedules.delete(id=[1,2,3], external_id="3") + >>> client.transformations.schedules.delete(id=[1, 2, 3], external_id="3") """ self._delete_multiple( identifiers=IdentifierSequence.load(ids=id, external_ids=external_id), @@ -272,7 +272,9 @@ def update( Perform a partial update on a transformation schedule, updating the interval and unpausing it: >>> from cognite.client.data_classes import TransformationScheduleUpdate - >>> my_update = TransformationScheduleUpdate(id=1).interval.set("0 * * * *").is_paused.set(False) + >>> my_update = ( + ... TransformationScheduleUpdate(id=1).interval.set("0 * * * *").is_paused.set(False) + ... ) >>> res = client.transformations.schedules.update(my_update) """ return self._update_multiple( diff --git a/cognite/client/_api/units.py b/cognite/client/_api/units.py index a07788fbd8..9e0f488554 100644 --- a/cognite/client/_api/units.py +++ b/cognite/client/_api/units.py @@ -75,11 +75,11 @@ def retrieve( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.units.retrieve('temperature:deg_c') + >>> res = client.units.retrieve("temperature:deg_c") Retrive units 'temperature:deg_c' and 'pressure:bar': - >>> res = client.units.retrieve(['temperature:deg_c', 'pressure:bar']) + >>> res = client.units.retrieve(["temperature:deg_c", "pressure:bar"]) """ identifier = IdentifierSequence.load(external_ids=external_id) @@ -141,11 +141,11 @@ def from_alias( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> unit = client.units.from_alias('cmol / L') + >>> unit = client.units.from_alias("cmol / L") Look up ambiguous alias 'F' by passing quantity 'Temperature': - >>> unit = client.units.from_alias('F', 'Temperature') + >>> unit = client.units.from_alias("F", "Temperature") Search for the closest matching unit of 'kilo watt' (should be 'kilowatt'): diff --git a/cognite/client/_api/vision.py b/cognite/client/_api/vision.py index 70cb142b21..864b4f5daf 100644 --- a/cognite/client/_api/vision.py +++ b/cognite/client/_api/vision.py @@ -82,7 +82,9 @@ def extract( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes.contextualization import VisionFeature >>> client = CogniteClient() - >>> extract_job = client.vision.extract(features=VisionFeature.ASSET_TAG_DETECTION, file_ids=[1]) + >>> extract_job = client.vision.extract( + ... features=VisionFeature.ASSET_TAG_DETECTION, file_ids=[1] + ... ) >>> extract_job.wait_for_completion() >>> for item in extract_job.items: ... predictions = item.predictions diff --git a/cognite/client/_api/workflows.py b/cognite/client/_api/workflows.py index 2e6668f480..9236d5f7ce 100644 --- a/cognite/client/_api/workflows.py +++ b/cognite/client/_api/workflows.py @@ -80,13 +80,18 @@ def upsert( Create or update a scheduled trigger for a workflow: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.workflows import WorkflowTriggerUpsert, WorkflowScheduledTriggerRule + >>> from cognite.client.data_classes.workflows import ( + ... WorkflowTriggerUpsert, + ... WorkflowScheduledTriggerRule, + ... ) >>> from zoneinfo import ZoneInfo >>> client = CogniteClient() >>> client.workflows.triggers.upsert( ... WorkflowTriggerUpsert( ... external_id="my_trigger", - ... trigger_rule=WorkflowScheduledTriggerRule(cron_expression="0 0 * * *", timezone=ZoneInfo("UTC")), + ... trigger_rule=WorkflowScheduledTriggerRule( + ... cron_expression="0 0 * * *", timezone=ZoneInfo("UTC") + ... ), ... workflow_external_id="my_workflow", ... workflow_version="1", ... input={"a": 1, "b": 2}, @@ -96,8 +101,15 @@ def upsert( Create or update a data modeling trigger for a workflow: - >>> from cognite.client.data_classes.workflows import WorkflowDataModelingTriggerRule, WorkflowTriggerDataModelingQuery - >>> from cognite.client.data_classes.data_modeling.query import NodeResultSetExpression, Select, SourceSelector + >>> from cognite.client.data_classes.workflows import ( + ... WorkflowDataModelingTriggerRule, + ... WorkflowTriggerDataModelingQuery, + ... ) + >>> from cognite.client.data_classes.data_modeling.query import ( + ... NodeResultSetExpression, + ... Select, + ... SourceSelector, + ... ) >>> from cognite.client.data_classes.data_modeling import ViewId >>> from cognite.client.data_classes.filters import Equals >>> view_id = ViewId("my_space_id", "view_external_id", "v1") @@ -106,7 +118,13 @@ def upsert( ... external_id="my_trigger", ... trigger_rule=WorkflowDataModelingTriggerRule( ... data_modeling_query=WorkflowTriggerDataModelingQuery( - ... with_={"timeseries": NodeResultSetExpression(filter=Equals(view_id.as_property_ref("name"), value="my_name"))}, + ... with_={ + ... "timeseries": NodeResultSetExpression( + ... filter=Equals( + ... view_id.as_property_ref("name"), value="my_name" + ... ) + ... ) + ... }, ... select={"timeseries": Select([SourceSelector(view_id, ["name"])])}, ... ), ... batch_size=500, @@ -274,11 +292,15 @@ def update( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.workflows.tasks.update("000560bc-9080-4286-b242-a27bb4819253", "completed") + >>> res = client.workflows.tasks.update( + ... "000560bc-9080-4286-b242-a27bb4819253", "completed" + ... ) Update task with id '000560bc-9080-4286-b242-a27bb4819253' to status 'failed' with output '{"a": 1, "b": 2}': - >>> res = client.workflows.tasks.update("000560bc-9080-4286-b242-a27bb4819253", "failed", output={"a": 1, "b": 2}) + >>> res = client.workflows.tasks.update( + ... "000560bc-9080-4286-b242-a27bb4819253", "failed", output={"a": 1, "b": 2} + ... ) Trigger workflow, retrieve detailed task execution and update status of the second task (assumed to be async) to 'completed': @@ -312,7 +334,9 @@ def retrieve_detailed(self, id: str) -> WorkflowExecutionDetailed | None: >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> res = client.workflows.executions.retrieve_detailed("000560bc-9080-4286-b242-a27bb4819253") + >>> res = client.workflows.executions.retrieve_detailed( + ... "000560bc-9080-4286-b242-a27bb4819253" + ... ) List workflow executions and retrieve detailed information for the first one: @@ -376,7 +400,9 @@ def run( ... external_id="my_workflow-task1", ... parameters=FunctionTaskParameters( ... external_id="cdf_deployed_function:my_function", - ... data={"workflow_data": "${workflow.input}"})) + ... data={"workflow_data": "${workflow.input}"}, + ... ), + ... ) Tip: You can create a session via the Sessions API, using the client.iam.session.create() method. @@ -449,8 +475,7 @@ def list( Get all workflow executions from the last 24 hours: >>> from cognite.client.utils import timestamp_to_ms - >>> res = client.workflows.executions.list( - ... created_time_start=timestamp_to_ms("1d-ago")) + >>> res = client.workflows.executions.list(created_time_start=timestamp_to_ms("1d-ago")) """ # Passing at least one filter criterion is required: @@ -621,8 +646,10 @@ def upsert( >>> from cognite.client import CogniteClient >>> from cognite.client.data_classes import ( - ... WorkflowVersionUpsert, WorkflowDefinitionUpsert, - ... WorkflowTask, FunctionTaskParameters, + ... WorkflowVersionUpsert, + ... WorkflowDefinitionUpsert, + ... WorkflowTask, + ... FunctionTaskParameters, ... ) >>> client = CogniteClient() >>> function_task = WorkflowTask( @@ -633,12 +660,12 @@ def upsert( ... ), ... ) >>> new_version = WorkflowVersionUpsert( - ... workflow_external_id="my_workflow", - ... version="1", - ... workflow_definition=WorkflowDefinitionUpsert( - ... tasks=[function_task], - ... description="This workflow has one step", - ... ), + ... workflow_external_id="my_workflow", + ... version="1", + ... workflow_definition=WorkflowDefinitionUpsert( + ... tasks=[function_task], + ... description="This workflow has one step", + ... ), ... ) >>> res = client.workflows.versions.upsert(new_version) """ @@ -676,7 +703,9 @@ def delete( Delete workflow version "1" of workflow "my workflow" and workflow version "2" of workflow "my workflow 2" using the WorkflowVersionId class: >>> from cognite.client.data_classes import WorkflowVersionId - >>> client.workflows.versions.delete([WorkflowVersionId("my workflow", "1"), WorkflowVersionId("my workflow 2", "2")]) + >>> client.workflows.versions.delete( + ... [WorkflowVersionId("my workflow", "1"), WorkflowVersionId("my workflow 2", "2")] + ... ) """ identifiers = WorkflowIds.load(workflow_version_id).dump(camel_case=True) @@ -814,12 +843,12 @@ def list( >>> from cognite.client.data_classes import WorkflowVersionId >>> res = client.workflows.versions.list( - ... [WorkflowVersionId("my_workflow"), WorkflowVersionId("my_workflow_2")]) + ... [WorkflowVersionId("my_workflow"), WorkflowVersionId("my_workflow_2")] + ... ) Get all workflow versions for workflows 'my_workflow' version '1' and 'my_workflow_2' version '2' using tuples: - >>> res = client.workflows.versions.list( - ... [("my_workflow", "1"), ("my_workflow_2", "2")]) + >>> res = client.workflows.versions.list([("my_workflow", "1"), ("my_workflow_2", "2")]) """ return self._list( diff --git a/cognite/client/credentials.py b/cognite/client/credentials.py index b6e8a1dc90..7353759a4e 100644 --- a/cognite/client/credentials.py +++ b/cognite/client/credentials.py @@ -719,7 +719,7 @@ class OAuthClientCredentials(_OAuthCredentialProviderWithTokenRefresh): ... client_secret=os.environ["OAUTH_CLIENT_SECRET"], ... scopes=["https://greenfield.cognitedata.com/.default"], ... # Any additional IDP-specific token args. e.g. - ... audience="some-audience" + ... audience="some-audience", ... ) """ @@ -822,7 +822,7 @@ def load(cls, config: dict[str, Any] | str) -> OAuthClientCredentials: ... "client_id": "abcd", ... "client_secret": os.environ["OAUTH_CLIENT_SECRET"], ... "scopes": ["https://greenfield.cognitedata.com/.default"], - ... "audience": "some-audience" + ... "audience": "some-audience", ... } >>> credentials = OAuthClientCredentials.load(config) """ diff --git a/cognite/client/data_classes/contextualization.py b/cognite/client/data_classes/contextualization.py index 1c5f54d15a..bbf5002cb8 100644 --- a/cognite/client/data_classes/contextualization.py +++ b/cognite/client/data_classes/contextualization.py @@ -1162,14 +1162,17 @@ class DiagramDetectConfig(CogniteObject): Configure a call to digrams detect endpoint: >>> from cognite.client import CogniteClient - >>> from cognite.client.data_classes.contextualization import ConnectionFlags, DiagramDetectConfig + >>> from cognite.client.data_classes.contextualization import ( + ... ConnectionFlags, + ... DiagramDetectConfig, + ... ) >>> client = CogniteClient() >>> config = DiagramDetectConfig( ... remove_leading_zeros=True, ... connection_flags=ConnectionFlags( ... no_text_inbetween=True, ... natural_reading_order=True, - ... ) + ... ), ... ) >>> job = client.diagrams.detect(entities=[{"name": "A1"}], file_id=123, config=config) diff --git a/cognite/client/data_classes/filters.py b/cognite/client/data_classes/filters.py index 8bcad66333..cf8a2e3c2e 100644 --- a/cognite/client/data_classes/filters.py +++ b/cognite/client/data_classes/filters.py @@ -337,14 +337,16 @@ class And(CompoundFilter): >>> from cognite.client.data_classes.filters import And, Equals, In >>> flt = And( ... Equals(("space", "view_xid/version", "some_property"), 42), - ... In(("space", "view_xid/version", "another_property"), ["a", "b", "c"])) + ... In(("space", "view_xid/version", "another_property"), ["a", "b", "c"]), + ... ) - Using the ``View.as_property_ref`` method to reference the property: >>> from cognite.client.data_classes.filters import And, Equals, In >>> flt = And( ... Equals(my_view.as_property_ref("some_property"), 42), - ... In(my_view.as_property_ref("another_property"), ["a", "b", "c"])) + ... In(my_view.as_property_ref("another_property"), ["a", "b", "c"]), + ... ) Using the "&" operator: @@ -369,13 +371,15 @@ class Or(CompoundFilter): >>> from cognite.client.data_classes.filters import Or, Equals, In >>> flt = Or( ... Equals(("space", "view_xid/version", "some_property"), 42), - ... In(("space", "view_xid/version", "another_property"), ["a", "b", "c"])) + ... In(("space", "view_xid/version", "another_property"), ["a", "b", "c"]), + ... ) - Using the ``View.as_property_ref`` method to reference the property: >>> flt = Or( ... Equals(my_view.as_property_ref("some_property"), 42), - ... In(my_view.as_property_ref("another_property"), ["a", "b", "c"])) + ... In(my_view.as_property_ref("another_property"), ["a", "b", "c"]), + ... ) Using the "|" operator: @@ -437,13 +441,15 @@ class Nested(Filter): >>> from cognite.client.data_classes.filters import Nested, Equals >>> flt = Nested( ... scope=("space", "viewA_xid/view_version", "viewB-ID"), - ... filter=Equals(("space", "viewB_xid/view_version", "viewB-Property"), 42)) + ... filter=Equals(("space", "viewB_xid/view_version", "viewB-Property"), 42), + ... ) - Composing the property reference using the ``View.as_property_ref`` method: >>> flt = Nested( ... scope=viewA.as_property_ref("viewB-ID"), - ... filter=Equals(viewB.as_property_ref("viewB-Property"), 42)) + ... filter=Equals(viewB.as_property_ref("viewB-Property"), 42), + ... ) """ _filter_name = "nested" @@ -595,14 +601,18 @@ class Overlaps(Filter): >>> flt = Overlaps( ... ("space", "view_xid/version", "some_start_property"), ... ("space", "view_xid/version", "some_end_property"), - ... gt=42, lt=100) + ... gt=42, + ... lt=100, + ... ) - Composing the property reference using the ``View.as_property_ref`` method: >>> flt = Overlaps( ... my_view.as_property_ref("some_start_property"), ... my_view.as_property_ref("some_end_property"), - ... gt=42, lt=100) + ... gt=42, + ... lt=100, + ... ) """ _filter_name = "overlaps" @@ -843,7 +853,7 @@ class InAssetSubtree(FilterWithPropertyAndValueList): >>> client.documents.aggregate_count( ... filter=filters.InAssetSubtree( ... property=DocumentProperty.asset_external_ids, - ... values=['Plant_1', 'Plant_2'], + ... values=["Plant_1", "Plant_2"], ... ) ... ) """ diff --git a/cognite/client/data_classes/geospatial.py b/cognite/client/data_classes/geospatial.py index 2983818d0c..d8c3bc1b80 100644 --- a/cognite/client/data_classes/geospatial.py +++ b/cognite/client/data_classes/geospatial.py @@ -361,10 +361,7 @@ def to_geopandas(self, geometry: str, camel_case: bool = False) -> geopandas.Geo >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> features = client.geospatial.search_features(...) - >>> gdf = features.to_geopandas( - ... geometry="position", - ... camel_case=False - ... ) + >>> gdf = features.to_geopandas(geometry="position", camel_case=False) >>> gdf.head() """ df = self.to_pandas(camel_case) @@ -399,12 +396,16 @@ def from_geopandas( >>> from cognite.client import CogniteClient >>> client = CogniteClient() - >>> my_feature_type = ... # some feature type with 'position' and 'temperature' properties + >>> my_feature_type = ( + ... ... + ... ) # some feature type with 'position' and 'temperature' properties >>> my_geodataframe = ... # some geodataframe with 'center_xy', 'temp' and 'id' columns >>> feature_list = FeatureList.from_geopandas(feature_type=my_feature_type, geodataframe=my_geodataframe, >>> external_id_column="id", data_set_id_column="dataSetId", >>> property_column_mapping={'position': 'center_xy', 'temperature': 'temp'}) - >>> created_features = client.geospatial.create_features(my_feature_type.external_id, feature_list) + >>> created_features = client.geospatial.create_features( + ... my_feature_type.external_id, feature_list + ... ) """ features = [] diff --git a/cognite/client/data_classes/transformations/jobs.py b/cognite/client/data_classes/transformations/jobs.py index d312775b2a..67d67af2b4 100644 --- a/cognite/client/data_classes/transformations/jobs.py +++ b/cognite/client/data_classes/transformations/jobs.py @@ -153,8 +153,8 @@ def wait(self, polling_interval: float = 1, timeout: float | None = None) -> Tra >>> from cognite.client import CogniteClient >>> client = CogniteClient() >>> - >>> job1 = client.transformations.run(id = 1, wait = False) - >>> job2 = client.transformations.run(id = 2, wait = False) + >>> job1 = client.transformations.run(id=1, wait=False) + >>> job2 = client.transformations.run(id=2, wait=False) >>> job1.wait() >>> job2.wait() >>> if TransformationJobStatus.FAILED not in [job1.status, job2.status]: @@ -163,14 +163,14 @@ def wait(self, polling_interval: float = 1, timeout: float | None = None) -> Tra wait transformation for 5 minutes and do something if still running: >>> - >>> job = client.transformations.run(id = 1, wait = False) - >>> job.wait(timeout = 5.0*60) + >>> job = client.transformations.run(id=1, wait=False) + >>> job.wait(timeout=5.0 * 60) >>> if job.status == TransformationJobStatus.FAILED: - >>> # do something if job failed + >>> # do something if job failed >>> elif job.status == TransformationJobStatus.COMPLETED: - >>> # do something if job completed successfully + >>> # do something if job completed successfully >>> else: - >>> # do something if job is still running + >>> # do something if job is still running """ self.update() if timeout is None: @@ -223,11 +223,11 @@ async def wait_async(self, polling_interval: float = 1, timeout: float | None = >>> job = client.transformations.run(id = 1, wait = False) >>> await job.wait_async(timeout = 5.0*60) >>> if job.status == TransformationJobStatus.FAILED: - >>> # do something if job failed + >>> # do something if job failed >>> elif job.status == TransformationJobStatus.COMPLETED: - >>> # do something if job completed successfully + >>> # do something if job completed successfully >>> else: - >>> # do something if job is still running + >>> # do something if job is still running >>> >>> ensure_future(run_successive_transformations()) """ diff --git a/cognite/client/data_classes/workflows.py b/cognite/client/data_classes/workflows.py index fa35416476..35b682b674 100644 --- a/cognite/client/data_classes/workflows.py +++ b/cognite/client/data_classes/workflows.py @@ -193,7 +193,7 @@ class FunctionTaskParameters(WorkflowTaskParameters): For example, if you have a workflow containing two tasks, and the external_id of the first task is `task1` then, you can specify the data for the second task as follows: - >>> from cognite.client.data_classes import WorkflowTask, FunctionTaskParameters + >>> from cognite.client.data_classes import WorkflowTask, FunctionTaskParameters >>> task = WorkflowTask( ... external_id="task2", ... parameters=FunctionTaskParameters( @@ -201,7 +201,7 @@ class FunctionTaskParameters(WorkflowTaskParameters): ... data={ ... "workflow_data": "${workflow.input}", ... "task1_input": "${task1.input}", - ... "task1_output": "${task1.output}" + ... "task1_output": "${task1.output}", ... }, ... ), ... ) diff --git a/cognite/client/utils/_time.py b/cognite/client/utils/_time.py index 92cfe3a631..becded909d 100644 --- a/cognite/client/utils/_time.py +++ b/cognite/client/utils/_time.py @@ -267,8 +267,8 @@ def timestamp_to_ms(timestamp: int | float | str | datetime) -> int: >>> from datetime import datetime >>> timestamp_to_ms(datetime(2021, 1, 7, 12, 0, 0)) >>> timestamp_to_ms("now") - >>> timestamp_to_ms("2w-ago") # 2 weeks ago - >>> timestamp_to_ms("3d-ahead") # 3 days ahead from now + >>> timestamp_to_ms("2w-ago") # 2 weeks ago + >>> timestamp_to_ms("3d-ahead") # 3 days ahead from now """ if isinstance(timestamp, numbers.Number): # float, int, int64 etc ms = int(timestamp) # type: ignore[arg-type] @@ -401,7 +401,7 @@ class WeekAligner(DateTimeAligner): def ceil(cls, date: datetime) -> datetime: """ Ceils the date to the next monday - >>> WeekAligner.ceil(datetime(2023, 4, 9 )) + >>> WeekAligner.ceil(datetime(2023, 4, 9)) datetime.datetime(2023, 4, 10, 0, 0) """ date = cls.normalize(date) diff --git a/pyproject.toml b/pyproject.toml index cc6534a8ea..ff930b4d95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,11 @@ packages = [{ include="cognite", from="." }] # let scripts use print statements "scripts/*" = ["T201"] +[tool.ruff.format] +# format code examples in docstrings to avoid users scrolling horizontally +docstring-code-format = true +docstring-code-line-length = 85 + [tool.poetry.dependencies] python = "^3.10" @@ -79,4 +84,4 @@ tenacity = "^9.1.2" [build-system] requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file +build-backend = "poetry.core.masonry.api"