diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/classic/root.Asset.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/classic/root.Asset.yaml new file mode 100644 index 00000000..dc522f28 --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/classic/root.Asset.yaml @@ -0,0 +1,9 @@ +- externalId: {{ root_asset }} + name: Asset Centric Location Filter Example + description: This is an example of an asset centric location filter + dataSetExternalId: {{ ds_asset }} +- externalId: {{ root_asset2 }} + name: Asset Centric Location Filter Example 2 + description: This is an example of an asset centric location filter 2 + dataSetExternalId: {{ ds_asset }} + diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/data_sets/data_sets.DataSet.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/data_sets/data_sets.DataSet.yaml new file mode 100644 index 00000000..09e288ef --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/data_sets/data_sets.DataSet.yaml @@ -0,0 +1,3 @@ +- externalId: {{ ds_asset }} + name: LocationFilter Asset DataSet + description: This is an example of a location filter asset centric data set diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/default.config.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/default.config.yaml new file mode 100644 index 00000000..167bcd32 --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/default.config.yaml @@ -0,0 +1,4 @@ +ds_asset: ds_asset_location_filter +root_asset: my_root_asset +root_asset2: my_root_asset2 +ts_prefix: sensor diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/locations/individualFilters.LocationFilter.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/locations/individualFilters.LocationFilter.yaml new file mode 100644 index 00000000..836d251d --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/locations/individualFilters.LocationFilter.yaml @@ -0,0 +1,19 @@ +externalId: asset_centric_individual_filter_location_filter +name: Asset Centric Individual Filter Location Example +description: This is an example of an asset centric individual filter location +assetCentric: + assets: + # Typically, you would use either dataSetExternalIds, assetSubtreeIds, or externalIdPrefix + # not all of them. They are shown in this example for demonstration purposes. + dataSetExternalIds: + - '{{ ds_asset }}' + assetSubtreeIds: + - externalId: '{{ root_asset }}' + externalIdPrefix: pump_ + timeseries: + dataSetExternalIds: + - '{{ ds_asset }}' + assetSubtreeIds: + - externalId: '{{ root_asset2 }}' + externalIdPrefix: '{{ ts_prefix }}' +# events, sequences, and files are also supported. \ No newline at end of file diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/locations/oneFilter.LocationFilter.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/locations/oneFilter.LocationFilter.yaml new file mode 100644 index 00000000..107a187d --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/locations/oneFilter.LocationFilter.yaml @@ -0,0 +1,12 @@ +externalId: asset_centric_shared_location_filter +name: Asset Centric Shared Location Example +description: This is an example of an asset centric shared location +assetCentric: + # Typically, you would use either dataSetExternalIds, assetSubtreeIds, or externalIdPrefix + # not all of them. They are shown in this example for demonstration purposes. + dataSetExternalIds: + - '{{ ds_asset }}' + assetSubtreeIds: + - externalId: '{{ root_asset }}' + - externalId: '{{ root_asset2 }}' + externalIdPrefix: pump_ diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/module.toml b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/module.toml new file mode 100644 index 00000000..6e9e7069 --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_asset_centric/module.toml @@ -0,0 +1,4 @@ +[module] +title = "Example location filter asset centric" +id = "dp:acc:industrial_tools:cdf_location_filter_asset_centric" +package_id = "dp:industrial_tools" \ No newline at end of file diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/data_models/instance.Space.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/data_models/instance.Space.yaml new file mode 100644 index 00000000..9a264494 --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/data_models/instance.Space.yaml @@ -0,0 +1,2 @@ +- space: {{ instance_space1 }} +- space: {{ instance_space2 }} diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/default.config.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/default.config.yaml new file mode 100644 index 00000000..d9206305 --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/default.config.yaml @@ -0,0 +1,2 @@ +instance_space1: my_instance_space1 +instance_space2: my_instance_space2 diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/locations/dataModel.LocationFilter.yaml b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/locations/dataModel.LocationFilter.yaml new file mode 100644 index 00000000..6fac3abe --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/locations/dataModel.LocationFilter.yaml @@ -0,0 +1,17 @@ +externalId: view_based_location_filter +name: Data Model Based Location Filter Example +description: This is an example of a data model based location filter +dataModels: + - externalId: CogniteProcessIndustries + space: cdf_idm + version: v1 +instanceSpaces: + - {{ instance_space1 }} + - {{ instance_space2 }} +# If you have your own data model, with your own views, you can +# specify what the views are representing. This is used by Cognite Applications. +#views: +# - externalId: CogniteAsset +# space: cdf_cdm +# version: v1 +# representsEntity: ASSET # Can be MAINTENANCE_ORDER, OPERATION, NOTIFICATION, ASSET \ No newline at end of file diff --git a/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/module.toml b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/module.toml new file mode 100644 index 00000000..376c0642 --- /dev/null +++ b/modules/accelerators/industrial_tools/cdf_location_filter_datamodel_based/module.toml @@ -0,0 +1,4 @@ +[module] +title = "Example location filter datamodel based" +id = "dp:acc:industrial_tools:cdf_location_filter_datamodel_based" +package_id = "dp:industrial_tools" \ No newline at end of file diff --git a/modules/accelerators/industrial_tools/cdf_search/module.toml b/modules/accelerators/industrial_tools/cdf_search/module.toml index 0a97e7de..9f4b8398 100644 --- a/modules/accelerators/industrial_tools/cdf_search/module.toml +++ b/modules/accelerators/industrial_tools/cdf_search/module.toml @@ -2,4 +2,4 @@ title = "Search configuration with location filter" is_selected_by_default = false id = "dp:acc:industrial_tools:cdf_search" -package_id = "dp:accelerators" +package_id = "dp:industrial_tools" diff --git a/modules/accelerators/inrobot/README.md b/modules/accelerators/inrobot/README.md new file mode 100644 index 00000000..692251c1 --- /dev/null +++ b/modules/accelerators/inrobot/README.md @@ -0,0 +1,41 @@ +# InRobot Module + +This module allows you to quickly set up an InRobot project. There are a few pre-requisites: + +- You must have an asset heirarchy already set up. Specifically you will need the external id of the root asset for + your location. +- You must have a 3D model already uploaded in Fusion. Note its model id, its revision id, and its name. +- You must have created 4 groups in your source system: Users*, Admin*, Robot*1*, + Run_Function_User. The naming does not matter specifically, but you may need to add more locations and/or robots in + the future, so it would be ideal to name the groups accordingly. +- You must have created an app registration for the robot, and added the app registration to the robot user group. Note + the client id and the secret for the robot app registration. +- You must have already activated the functions service for your project. This can be done in Fusion, and can take up + to 2 hours to become activated. + +This module is meant to be used in conjunction with the toolkit common module and the cdf_apm_base module. + +For now, until the next version release of toolkit, you must also enable the following toolkit feature flags: + +cdf features set fun-schedule --enable +cdf features set robotics --enable + +## Configuration Variables + +Specific inrobot variables you will need to define in your config YAML file: + +| Variable Name | Description | +|-------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| +| `first_root_asset_external_id` | This is the asset external ID for your root asset. | +| `first_location` | A human readable name that will be included as part of different location-specific spaces and groups. | +| `inrobot_admin_first_location_source_id` | The ID for the admin group for the location. | +| `inrobot_users_first_location_source_id` | The ID for the users group for the location. | +| `robot_1_first_location_source_id` | The ID for the robot group for the location. | +| `run_function_user_group_source_id` | The ID for the run function group. | +| `run_function_client_id` | The run function client ID (app registration in Azure). | +| `run_function_secret` | The secret for the run function app registration. This will be stored in your env file and should be referenced securely. | +| `robot_1_dataset_ext_id` | This is the data set for your robot. You can give this whatever value you want. | +| `three_d_model_name` | The name of the 3D model as named in Fusion. | +| `three_d_type` | The type of 3D model. This will be either `THREEDMODEL` or `POINTCLOUD`. | +| `three_d_model_id` | The model ID of your 3D model. | +| `three_d_revision_id` | The revision ID of your 3D model. | diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/README.md new file mode 100644 index 00000000..4b60de22 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/README.md @@ -0,0 +1,53 @@ +# cdf_inrobot_common + +This module contains shared configurations across multiple locations for InRobot. + +## Auth + +The module creates one group that needs one matching group in the identity provider that the CDF +project is configured with. The group is: + +- Run Function User. This role is used by our functions when they need to interact with CDF, for example, to add + annotations to images, to create timeseries, etc. This makes reference to the robot's data set, so each new robot + requires a new run function user. + +## Data models + +There is one space created in this module called cognite_app_data. This is the space where user and user profile +data is stored. We also populate the APM_Config node with our default config, including the information about the +location. If new locations are added, this node must be updated to include the new locations. + +## Data sets + +This module creates a new data set for the robot. All the robot data in CDF will be stored in this data set. The +external id is specified in the config.yaml file, and this data_set is referred to in many other modules. Any new +robots must have a new data set. + +## Functions + +This module contains four (4) functions that we deploy: Contextualize Robot Data, Gauge Reading, Get IR Data from +IR Raw, and ThreeSixty. These are functions that run every minute on files with certain labels that +the robot uploads to CDF. + +Because the function itself must be stored in a dataset and we use the dataset of the robot, a new function must be +defined for each robot in the robots.functions.yaml file. Additionally, most schedules use the robot data set id as a +data parameters - this means that a new schedule must be created for every new robot. + +## Labels + +This module creates two labels that are needed for InRobot to work: robot_gauge and read_ir. These are scoped to +the robot's data set. This means that new labels must be created for each additional robot. + +## Robotics + +This module creates some robotics specific resources: RobotCapability and DataPostProcessing. + +Currently we support the following RobotCapability resources: acoustic_video, pt_ir_video, ptz_ir, ptz_video, +ptz, threesixty, and threesixty_video. + +Currently we support the following DataPostProcessing resources: +process_threesixty, read_dial_gauge, read_digital_gauge, read_level_gauge, read_valve. + +If there are any of these robot capabilities you do not want, you can remove the YAML file. Note that the +process_threesixty data post processing requires the threesixty capability to be present. Similarly, +the various gauge data post processing options require the ptz robot capability. diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/auth/run-function-user.Group.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/auth/run-function-user.Group.yaml new file mode 100644 index 00000000..9bdd694e --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/auth/run-function-user.Group.yaml @@ -0,0 +1,89 @@ +name: "gp_Run_Function_User" +sourceId: "{{run_function_user_group_source_id}}" +metadata: + origin: "cdf-project-templates" +capabilities: + - datasetsAcl: + actions: + - READ + scope: + all: {} + - groupsAcl: + actions: + - LIST + scope: + all: {} + - sessionsAcl: + actions: + - CREATE + - LIST + scope: + all: {} + - projectsAcl: + actions: + - LIST + scope: + all: {} + - functionsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - labelsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - assetsAcl: + actions: + - READ + scope: + all: {} + - timeSeriesAcl: + actions: + - READ + - WRITE + scope: + all: {} + - filesAcl: + actions: + - READ + - WRITE + scope: + all: {} + - visionModelAcl: + actions: + - READ + scope: + all: {} + - dataModelsAcl: + actions: + - READ + scope: + all: {} + - dataModelInstancesAcl: + actions: + - READ + - WRITE + scope: + all: {} + - eventsAcl: + actions: + - READ + - WRITE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - roboticsAcl: + actions: + - READ + scope: + all: {} + - annotationsAcl: + actions: + - WRITE + scope: + all: {} diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/classic/robot.Label.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/classic/robot.Label.yaml new file mode 100644 index 00000000..a09b116d --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/classic/robot.Label.yaml @@ -0,0 +1,6 @@ +- name: robot_gauge + externalId: robot_gauge + dataSetExternalId: "{{ robot_1_dataset_ext_id }}" +- name: read_ir + externalId: read_ir + dataSetExternalId: "{{ robot_1_dataset_ext_id }}" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/data_models/cogniteAppData.space.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/data_models/cogniteAppData.space.yaml new file mode 100644 index 00000000..5534ace2 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/data_models/cogniteAppData.space.yaml @@ -0,0 +1,3 @@ +space: cognite_app_data +name: cognite_app_data +description: Space for User and User Preferences Data diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/data_models/inrobotApmConfig.node.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/data_models/inrobotApmConfig.node.yaml new file mode 100644 index 00000000..9859ce77 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/data_models/inrobotApmConfig.node.yaml @@ -0,0 +1,17 @@ +- space: "APM_Config" + externalId: "default-config" + sources: + - source: + space: APM_Config + externalId: APM_Config + version: "1" + type: view + properties: + featureConfiguration: + rootLocationConfigurations: + - assetExternalId: "{{ first_root_asset_external_id }}" + appDataInstanceSpace: sp_{{ first_location }}_app_data + sourceDataInstanceSpace: sp_{{ first_location }}_source_data + customerDataSpaceId: APM_SourceData + customerDataSpaceVersion: "1" + name: InRobot APM App Config diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/data_sets/robot_1.dataset.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/data_sets/robot_1.dataset.yaml new file mode 100644 index 00000000..3c69b050 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/data_sets/robot_1.dataset.yaml @@ -0,0 +1,2 @@ +- externalId: "{{ robot_1_dataset_ext_id }}" + name: "Robot 1 Dataset" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/default.config.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/default.config.yaml new file mode 100644 index 00000000..ac70e621 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/default.config.yaml @@ -0,0 +1,3 @@ +run_function_user_group_source_id: +run_function_client_id: +run_function_secret: diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/.gitkeep b/modules/accelerators/inrobot/cdf_inrobot_common/functions/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/README.md new file mode 100644 index 00000000..bb8b7add --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/README.md @@ -0,0 +1,30 @@ +# Contextualize robot files + +Function for contextualizing files that are uploaded from a robot to CDF. The function connects the file to +the correct asset, and connects the correct time series to the file. If no time series exist, they are created. +The function also assigns the correct action label to the file, indicating what action should be taken on the file, +e.g., read dial gauge, read valve. + +List all files with metadata field `{"processed":"false"}` in input data set. If the image has an asset id in the +"asset_id" metadata field, check that the asset exists and add that as "assetId" to the file. + +If the image is a gauge reading image, check if the asset has a time series with label "GAUGE_TIME_SERIES". If it does, +add metadata field "ts_external_id" to the image. +If not, create the timeseries. + +In this case we could write metadata to the time series if we are able to read it in the gauge reader service. So the +gauge reader would check if the TS has metadata or the file has metadata. + +1. If the TS has metadata, use ts metadata. +2. If the TS does not have metadata and the file has metadata, use file metadata and write that metadata to TS. +3. If the TS does not have metadata and the file does not have metadata complete metadata, use incomplete metadata set +4. and read remainig metadata and write metadata to TS if successful + +(Assume always complete or no metadata on timeseries) + +With this we could instruct the user to take a very close and good picture of the gauge initially and that would work +in some cases. Optionally, the user could take en image of the gauge, add correct metadata in the vision app and +that metadata will be written to the timeseries. + +It will not be easy to change the metadata if we read wrong metadata initially. An option could be to not not write +metadata to timeseries in case 3 (when metadata is read from image). diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/__init__.py new file mode 100644 index 00000000..eee45ff1 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/__init__.py @@ -0,0 +1 @@ +"""Contextualize robot data.""" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/README.md new file mode 100644 index 00000000..f44fc406 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/README.md @@ -0,0 +1,58 @@ +# Shared-code folder: `common` + +## TL;DR + +> This entire folder hierarchy of `common` is copied into all functions automatically. + +## No, I wan't to read the long story + +A common use case when you deal with the deployment of multiple functions simultaneously, is that you do not want to +replicate shared code between all the function folders. In order to accomodate this, as part of the workflow +`function-action` this template relies on, we copy all the contents in this folder (can be specified by `common_folder` +in the workflow files, see below) the functions we upload to Cognite Functions. If this is not specified, we check if +`common/` exists in the root folder and if so, _**we use it automatically**_ :rocket: + +### Handling imports + +A typical setup looks like this: + +```bash +├── common +│ └── utils.py +└── my_function + └── handler.py +``` + +The code we zip and send off to the FilesAPI will look like this: + +```bash +├── common +│ └── utils.py +└── handler.py +``` + +This means your `handler.py`-file should do imports from `common/utils.py` like this: + +```python +from common.utils import my_helper1, my_helper2 +import common.utils as utils # alternative +``` + +## No, I want `common` to be named `snowflake_utilities` + +No problem mate, locate `.github/workflow` and open the following files: + +- `deploy-pr.yaml` +- `deploy-push.yaml` + +Then, in each file, scroll down until you see: + +```yaml + +- name: Deploy and schedule ${{ matrix.function }} + uses: cognitedata/function-action@v2 + with: + function_name: ... + common_folder: snowflake_utilities # <-- add it here + ... +``` diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/apm_helpers.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/apm_helpers.py new file mode 100644 index 00000000..620abc88 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/apm_helpers.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +import json +import logging +from datetime import datetime +from typing import Any, Optional + +import pytz # type: ignore +from cognite.client import CogniteClient +from common.dataclass.common import Vec3f + +logger = logging.getLogger(__name__) + + +def create_apm_observation_from_reading( + client: CogniteClient, + file_external_id: str, + timeseries_external_id: str, + value: float, + timestamp: int, + pose: Vec3f, + message: str, + apm_checklistitem_external_id: Optional[str] = None, +): + """Create an APM observation from a gauge reading. + + NOTE: this is a preliminary implementation, where we're not yet using the + gql pygen SDK made by Cognite (which will make this a lot cleaner) + + """ + apm_config = _get_apm_config(client=client, config_external_id="inrobot") + if apm_config is None: + logger.error("Could not find APM config") + return + + space_id = str(apm_config.get("appDataSpaceId")) + appdata_space_version = str(apm_config.get("appDataSpaceVersion")) + + views = _get_views( + client=client, + space=space_id, + datamodel_id=space_id, + datamodel_version=appdata_space_version, + view_ids=["APM_Measurement", "Vec3f", "APM_Observation"], + ) + + def _get_view_version(views: list[dict[str, Any]], view_id: str) -> Optional[str]: + view = next((item for item in views if item["externalId"] == view_id), None) + if view is None: + raise ValueError(f"Could not find view {view_id}") + return view.get("version") + + # create the APM measurement + if isinstance(timestamp, int): + timestamp_s = round(timestamp / 1000) + timestamp_dt = datetime.fromtimestamp(float(timestamp_s), pytz.UTC) + elif isinstance(timestamp, datetime): + timestamp_dt = timestamp + else: + raise ValueError(f"Timestamp is of type {type(timestamp)}, needs to be either `int` or `datetime`.") + + apm_measurement_external_id = f"measurement_{timeseries_external_id}_{timestamp}" + apm_measurement = { + "timeSeries": timeseries_external_id, + "measuredAt": timestamp_dt.isoformat(), + "numericValue": value, + } + + vec3f_external_id = f"position_{timeseries_external_id}_{timestamp}" + vec3f = {"x": pose.x, "y": pose.y, "z": pose.z} + + apm_observation_external_id = f"observation_{timeseries_external_id}_{timestamp}" + apm_observation = { + "fileIds": [file_external_id], + "position": {"externalId": vec3f_external_id, "space": space_id}, + "description": "Gauge reading", + "note": message, + "createdById": "CDF Gauge Reader Function", + "updatedById": "CDF Gauge Reader Function", + } + + # edge apm_observation -> apm_measurement + edge_apm_observation_apm_measurement = { + "instanceType": "edge", + "space": space_id, + "externalId": f"{apm_observation_external_id}.measurements_{apm_measurement_external_id}", + "startNode": {"externalId": apm_observation_external_id, "space": space_id}, + "endNode": {"externalId": apm_measurement_external_id, "space": space_id}, + "type": {"externalId": "APM_Observation.measurements", "space": space_id}, + } + + # assemble all instances (nodes and edges) + instances = [ + { + "instanceType": "node", + "space": space_id, + "externalId": apm_measurement_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "APM_Measurement", + "version": _get_view_version(views, "APM_Measurement"), + }, + "properties": apm_measurement, + } + ], + }, + { + "instanceType": "node", + "space": space_id, + "externalId": vec3f_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "Vec3f", + "version": _get_view_version(views, "Vec3f"), + }, + "properties": vec3f, + } + ], + }, + { + "instanceType": "node", + "space": space_id, + "externalId": apm_observation_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "APM_Observation", + "version": _get_view_version(views, "APM_Observation"), + }, + "properties": apm_observation, + } + ], + }, + edge_apm_observation_apm_measurement, + ] + + if apm_checklistitem_external_id is not None: + edge_apm_checklist_apm_observation = { + "instanceType": "edge", + "space": space_id, + "externalId": f"{apm_checklistitem_external_id}.observations_{apm_observation_external_id}", + "startNode": {"externalId": apm_checklistitem_external_id, "space": space_id}, + "endNode": {"externalId": apm_observation_external_id, "space": space_id}, + "type": {"externalId": "APM_ChecklistItem.observations", "space": space_id}, + } + instances.append(edge_apm_checklist_apm_observation) + + logger.debug(f"Creating instances request: {json.dumps(instances, indent=2)}") + + response = client.post( + f"/api/v1/projects/{client.config.project}/models/instances", json={"items": instances} + ).json() + + logger.debug(f"Creating instances response: {json.dumps(response, indent=2)}") + + +def _get_views( + client: CogniteClient, space: str, datamodel_id: str, datamodel_version: Optional[str] = None, view_ids=list[str] +) -> list[dict[str, Any]]: + response = client.post( + f"/api/v1/projects/{client.config.project}/models/datamodels/byids", + json={ + "items": [{"space": space, "externalId": datamodel_id, "version": datamodel_version}], + }, + ).json() + + # get latest datamodel, if no version is specified + datamodel = max(response["items"], key=lambda item: item["createdTime"], default=None) + if datamodel is None: + raise ValueError(f"No datamodels with id {datamodel_id} found with version {datamodel_version}.") + + views = datamodel.get("views") + if views is None or views == []: + raise ValueError(f"Datamodel {datamodel_id} has no views.") + + available_views = {view.get("externalId"): view.get("version") for view in views} + + items = [ + {"space": space, "externalId": requested_view_id, "version": available_views.get(requested_view_id)} + for requested_view_id in view_ids + if available_views.get(requested_view_id) is not None + ] + + if len(items) != len(view_ids): + missing_views = set(view_ids) - set([item["externalId"] for item in items]) + raise ValueError(f"Views {', '.join(missing_views)} not found in datamodel {datamodel_id}.") + + views = client.post( + f"/api/v1/projects/{client.config.project}/models/views/byids", + json={ + "items": items, + }, + ).json() + + return views["items"] + + +def _get_apm_config(client: CogniteClient, config_external_id: str) -> Optional[dict[str, Any]]: + space = "APM_Config" + datamodel_id = "APM_Config" + view_id = "APM_Config" + instance_external_ids = [config_external_id, "default-config"] + + views = _get_views(client=client, space=space, datamodel_id=datamodel_id, view_ids=[view_id]) + + if len(views) != 1: + raise ValueError(f"Expected to find exactly one view for {view_id}, found {len(views)}.") + + view = views[0] + + response = client.post( + f"/api/v1/projects/{client.config.project}/models/instances/byids", + json={ + "items": [ + {"instanceType": "node", "externalId": external_id, "space": space} + for external_id in instance_external_ids + ], + "sources": [ + { + "source": { + "type": "view", + "space": space, + "externalId": view.get("externalId"), + "version": view.get("version"), + } + } + ], + }, + ).json() + + if response.get("items") == []: + logger.error("Could not find APM config, not upserting any APM_Observations now.") + return None + + # Try to find the first item with "externalId" == config_external_id + apm_config = next((item for item in response["items"] if item["externalId"] == config_external_id), None) + + # If no such item is found, try to find the first item with "externalId" == "default" + if apm_config is None: + apm_config = next((item for item in response["items"] if item["externalId"] == "default-config"), None) + if apm_config is None: + raise ValueError(f"Could not find APM config with externalId {config_external_id} or `default-config`.") + + apm_config = apm_config.get("properties").get(space).get(f"{view.get('externalId')}/{view.get('version')}") + + return apm_config + + +def _get_position_from_metadata_to_vec3f(metadata: dict[str, Any]) -> Vec3f: + """Get position from metadata.""" + x = metadata.get("waypoint_tform_body_x") + y = metadata.get("waypoint_tform_body_x") + z = metadata.get("waypoint_tform_body_x") + + if not x or not y or not z: + raise ValueError( + f"Missing metadata field. Required metadata fields \ + are waypoint_tform_body_x, waypoint_tform_body_x, waypoint_tform_body_x. File metadata keys: {metadata.keys()}" + ) + + return Vec3f(x=float(x), y=float(y), z=float(z)) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/cdf_helpers.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/cdf_helpers.py new file mode 100644 index 00000000..d9cfa076 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/cdf_helpers.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +import logging +import time +from typing import Any, Optional + +from cognite.client import CogniteClient +from cognite.client.data_classes import Annotation, LabelDefinition + +logger = logging.getLogger(__name__) + + +def call_vision_api_endpoint( + client: CogniteClient, + url: str, + input_data: dict, + max_get_job_attempts: int = 20, + headers: Optional[dict[str, Any]] = None, +): + """Post to an API endpoint, wait for response and return response.""" + # TODO: Use this function in people detector function (https://cognitedata.atlassian.net/browse/DMVP-855) + res = client.post(url=url, json=input_data, headers=headers).json() + + job_id = res.get("jobId") + for i in range(max_get_job_attempts): + logger.info(f"Attempt nr. {i} to get job status.") + res = client.get(url=f"{url}/{job_id}").json() + if res.get("status") in ["Queued", "Running"]: + logger.info(f"API job status: {res.get('status')}") + time.sleep(10) + elif res.get("status") == "Completed": + logger.info("API job completed") + break + else: # Use for/else in case of failed job or timeout + logger.info(f"API job failed or timed out: {res}") + return None + + return res + + +def create_missing_labels(client: CogniteClient, label_ids: list[str]): + """Create missing labels from a list of label IDs.""" + for label_id in label_ids: + if not check_label_exists(client, label_id): + client.labels.create([LabelDefinition(external_id=label_id, name=label_id)]) + + +def check_label_exists(client: CogniteClient, label_id: str): + """Check if a label exists in CDF given its external_id (label_id).""" + labels = client.labels.list(external_id_prefix=label_id) + return any(label.external_id == label_id for label in labels) + + +def create_annotations( + client: CogniteClient, + gauge_reading_result: dict, + file_id: int, + gauge_type: str, + bounding_box_label: str, + keypoint_label: str, +): + annotations = gauge_reading_result["items"][0]["predictions"][gauge_type + "GaugePredictions"] + annotations_list = [] + + for annotation in annotations: + print(f"ANNOTATION TYPE: {annotation}") + if gauge_type == "digital": + bounding_box = annotation["boundingBox"] + else: + keypoints = annotation["keypointCollection"]["keypoints"] + bounding_box = annotation["objectDetection"]["boundingBox"] + # Create bounding box annotation + bounding_box_annotation = Annotation( + annotation_type="images.ObjectDetection", + status="suggested", + creating_user="cognite-functions", + creating_app="sdk", + creating_app_version="4.5.2", + annotated_resource_type="file", + annotated_resource_id=file_id, + data={ + "label": bounding_box_label, + "boundingBox": bounding_box, + }, + ) + annotations_list.append(bounding_box_annotation) + + if gauge_type in ["level", "dial"]: + # Create keypoint annotation + keypoint_names = list(keypoints.keys()) + keypoint_data = {} + for i in range(len(keypoint_names)): + keypoint_name = keypoint_names[i] + print(keypoint_name) + point = keypoints[keypoint_name]["point"] + print(point) + keypoint_data[keypoint_name] = {"point": point} + print(keypoint_data) + + keypoint_annotation = Annotation( + annotation_type="images.KeypointCollection", + status="suggested", + creating_user="cognite-functions", + creating_app="sdk", + creating_app_version="4.5.2", + annotated_resource_type="file", + annotated_resource_id=file_id, + data={"label": keypoint_label, "keypoints": keypoint_data}, + ) + annotations_list.append(keypoint_annotation) + + client.annotations.create(annotations_list) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/common.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/common.py new file mode 100644 index 00000000..b07197dc --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/common.py @@ -0,0 +1,16 @@ +from dataclasses import asdict, dataclass + + +@dataclass +class Dataclass: + def to_dict(self, ignore_none=False): + if ignore_none: + return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v}) + return asdict(self) + + +@dataclass +class Vec3f(Dataclass): + x: float + y: float + z: float diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/vision.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/vision.py new file mode 100644 index 00000000..570d0a2d --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/dataclass/vision.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Optional + +from common.dataclass.common import Dataclass + + +@dataclass +class File(Dataclass): + fileId: int + fileExternalId: Optional[str] = None + + +@dataclass +class RequestCustomModelPrediction(Dataclass): + items: list[File] + modelFile: File + threshold: float + + +@dataclass +class Vertex(Dataclass): + x: float + y: float + + +@dataclass +class Region(Dataclass): + shape: str # points, rectangle, polyline, polygon + vertices: list[Vertex] + + +@dataclass +class VisionAnnotation(Dataclass): + text: str + confidence: Optional[float] = None + region: Optional[Region] = None + + +@dataclass +class Item(Dataclass): + fileId: int + annotations: Optional[list[VisionAnnotation]] = None + fileExternalId: Optional[str] = None + width: Optional[float] = None + height: Optional[float] = None + + +@dataclass +class FailedBatchSchema(Dataclass): + errorMessage: Optional[str] = None + items: Optional[list[File]] = None + + +@dataclass +class ResponseCustomModelPrediction(Dataclass): + status: str # "Queued" "Running" "Completed" "Failed" + createdTime: int + startTime: int + statusTime: int + jobId: int + items: list[Item] + modelFile: File + threshold: Optional[float] = None + failedItems: Optional[list[FailedBatchSchema]] = None diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/utils.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/utils.py new file mode 100644 index 00000000..c3ce1ce9 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/common/utils.py @@ -0,0 +1,17 @@ +import io + +from PIL import Image + + +def batch(iterable, n=1): + """Batch iterable.""" + len_iterable = len(iterable) + for ndx in range(0, len_iterable, n): + yield iterable[ndx : min(ndx + n, len_iterable)] + + +def image_to_byte_array(image: Image) -> bytes: + """Convert PIL image to byte array.""" + img_byte_arr = io.BytesIO() + image.save(img_byte_arr, format=image.format) + return img_byte_arr.getvalue() diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/handler.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/handler.py new file mode 100644 index 00000000..ce69385b --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/handler.py @@ -0,0 +1,269 @@ +"""Contextualize handler.""" + +import logging +import sys + +from cognite.client.data_classes import FileMetadataUpdate, TimeSeries +from common.cdf_helpers import create_missing_labels + +MAX_GET_JOB_ATTEMPTS = 10 +logger = logging.getLogger(__name__) + + +def get_time_series_label(camera_type): + if camera_type == "ir": + time_series_label = "IR_TIME_SERIES" + else: + time_series_label = "ANALOG_GAUGE_TIME_SERIES" + return time_series_label + + +def has_label(label_list, label_external_id): + """Check if the label_external_id is in a list of labels.""" + for label in label_list: + if label.external_id == label_external_id: + return True + return False + + +def create_timeseries( + client, + asset, + data_set_external_id, + time_series_label, + gauge_number=1, + total_number_of_gauges=1, + time_series_naming_tag="", +): + """Create time series on the correct format.""" + if time_series_label == "IR_TIME_SERIES": + ts_external_id = ( + f"{time_series_label}_{asset.external_id}_{time_series_naming_tag}" + if asset.external_id is not None + else f"{asset.id}_{time_series_label}_{time_series_naming_tag}" + ) + ts_name = f"{asset.name}_ir_{time_series_naming_tag}" + + elif gauge_number <= 1 and total_number_of_gauges <= 1: + ts_external_id = ( + f"{time_series_label}_{asset.external_id}_0" + if asset.external_id is not None + else f"{asset.id}_{time_series_label}_0" + ) + ts_name = f"{asset.name}_gauge_values" + else: + ts_external_id = ( + f"{time_series_label}_{asset.external_id}_{gauge_number}" + if asset.external_id is not None + else f"{asset.id}_{time_series_label}_{gauge_number}" + ) + ts_name = f"{asset.name}_gauge_values_{gauge_number}" + + logger.info(f"Creates timeseries with name: {ts_name}") + data_set_id = client.data_sets.retrieve(external_id=data_set_external_id).id + return client.time_series.create( + TimeSeries( + metadata={"TYPE": time_series_label}, + asset_id=asset.id, + external_id=ts_external_id, + data_set_id=data_set_id, + name=ts_name, + ) + ) + + +def get_asset(client, file): + """Get asset from metadata field.""" + if file.metadata.get("asset_id") not in [None, "0", "None"]: + try: + asset_id = int(file.metadata["asset_id"]) + asset = client.assets.retrieve(asset_id) + except Exception as e: + logger.exception(e) + return None + return asset + + +def get_timeseries_external_id(client, asset, data_set_external_id, time_series_label, number_of_gauges=1): + """Get time series external id.""" + ts = client.time_series.list(metadata={"TYPE": time_series_label}, asset_ids=[asset.id]) + number_of_timeseries = len(ts) + ts_external_ids = [] + logger.info(f"{ts}") + + # Create one timeseries for min temperature and one for max temperature for IR images + if time_series_label == "IR_TIME_SERIES": + if number_of_timeseries == 0: + print("CREATE IR TIMESERIES") + ir_ts_naming_tags = ["min", "max"] + for naming_tag in ir_ts_naming_tags: + ts = create_timeseries( + client, + asset, + data_set_external_id, + time_series_label, + time_series_naming_tag=naming_tag, + ) + ts_external_ids.append(ts.external_id) + else: + ts_external_ids = [ts[i].external_id for i in range(number_of_timeseries)] + + # Logic for creating gauge timeseries + elif number_of_timeseries >= 1 and number_of_gauges == number_of_timeseries: + ts_external_ids = [ts[i].external_id for i in range(number_of_timeseries)] + elif number_of_timeseries >= 1 and number_of_gauges > number_of_timeseries: + ts_external_ids.append(ts[0].external_id) + diff = number_of_gauges - number_of_timeseries + for gauge_number in range(diff): + logger.info("CREATE GAUGE TIMESERIES") + new_gauge_number = gauge_number + number_of_timeseries + ts = create_timeseries( + client, asset, data_set_external_id, time_series_label, new_gauge_number, number_of_gauges + ) + ts_external_ids.append(ts.external_id) + elif number_of_timeseries == 0: + logger.info("CREATE GAUGE TIMESERIES") + if number_of_gauges <= 1: + ts = create_timeseries(client, asset, data_set_external_id, time_series_label) + ts_external_ids.append(ts.external_id) + else: + for gauge_number in range(number_of_gauges): + ts = create_timeseries( + client, asset, data_set_external_id, time_series_label, gauge_number, number_of_gauges + ) + ts_external_ids.append(ts.external_id) + + return ts_external_ids + + +def get_number_of_gauges(client, asset): + """Get the total number of gauges associated with an asset.""" + try: + asset_data = client.assets.retrieve(id=asset.id) + asset_metadata = asset_data.metadata + if "number_of_gauges" in asset_metadata: + return int(asset_metadata["number_of_gauges"]) + else: + return 1 + except Exception as e: + logger.exception(e) + return 1 + + +def handle(data, client): + """Contextualize robot data.""" + print("Start gauge reading.") + required_input_data_fields = { + "data_set_external_id", + "read_dial_gauge_label", + "read_multiple_dial_gauges_label", + "read_digital_gauge_label", + "read_level_gauge_label", + "read_valve_label", + "read_ir_raw_label", + "spill_detection_label", + "gauge_context_label", + } + if not required_input_data_fields <= data.keys(): + raise RuntimeError(f"Data should contain all keys: {required_input_data_fields}. Current data: {data}") + + logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=logging.BASIC_FORMAT) + create_missing_labels( + client, + [ + data["read_dial_gauge_label"], + data["read_multiple_dial_gauges_label"], + data["read_digital_gauge_label"], + data["read_level_gauge_label"], + data["read_valve_label"], + data["read_ir_raw_label"], + data["spill_detection_label"], + "threesixty", + ], + ) + files = client.files.list( + data_set_external_ids=[data["data_set_external_id"]], + metadata={"processed": "false"}, + limit=-1, + uploaded=True, + ) + + logger.info(f"Number of files to process in dataset {data['data_set_external_id']}: {len(files)}") + print(f"Number of files to process in dataset {data['data_set_external_id']}: {len(files)}") + + for file in files: + number_of_gauges = 1 # Default is one gauge per image + metadata = file.metadata + logger.info(f"Processing file with external id: {file.external_id}, id: {file.id}") + + update = FileMetadataUpdate(file.id) + asset = get_asset(client, file) + update.asset_ids.add([asset.id]) if asset else None + + print(f"The labels on this file are: {file.labels}") + if file.labels and has_label(file.labels, data["gauge_context_label"]): + gauge_type = metadata.get("gauge_type", "dial") # Default gauge type from robot is dial + print(f"This file has gauge type: {gauge_type}") + + camera_type = metadata.get("camera", None) + time_series_label = get_time_series_label(camera_type) + + if gauge_type == "valve": + current_action_label = "read_valve_label" + elif gauge_type == "dial": + if asset: + number_of_gauges = get_number_of_gauges(client, asset) + if number_of_gauges <= 1: + current_action_label = "read_dial_gauge_label" + else: + current_action_label = "read_multiple_dial_gauges_label" + else: + current_action_label = f"read_{gauge_type}_gauge_label" + + logger.info(f"Setting action label to {data[current_action_label]}") + print(f"Setting action label to {data[current_action_label]}") + + update.labels.add(data[current_action_label]) + update.labels.remove(data["gauge_context_label"]) + + if asset and gauge_type != "spill": + logger.info(f"Number of gauges: {number_of_gauges}.") + ts_eid = get_timeseries_external_id( + client, asset, data["data_set_external_id"], time_series_label, number_of_gauges + ) + logger.info(f"Time series linked to asset: {ts_eid}.") + if ts_eid: + if time_series_label == "IR_TIME_SERIES": + for i in range(len(ts_eid)): + metadata[f"ts_external_id_{i}"] = ts_eid[i] + elif number_of_gauges <= 1: + metadata["ts_external_id"] = ts_eid[0] + else: + for gauge_number in range(number_of_gauges): + if metadata.get("ts_external_id", None): + if gauge_number == 0: + continue + else: + metadata[f"ts_external_id_{gauge_number}"] = ts_eid[gauge_number] + else: + metadata[f"ts_external_id_{gauge_number}"] = ts_eid[gauge_number] + elif "threesixty" in file.external_id: + print("This is a 360 image, adding threesixty label.") + update.labels.add(["threesixty"]) + + elif asset and metadata.get("method") == "process_ir_raw": + current_action_label = "read_ir_raw_label" + time_series_label = "IR_TIME_SERIES" + print(f"this is the asset: {asset}") + + ts_eid = get_timeseries_external_id( + client, asset, data["data_set_external_id"], time_series_label, number_of_gauges + ) + if time_series_label == "IR_TIME_SERIES": + for i in range(len(ts_eid)): + metadata[f"ts_external_id_{i}"] = ts_eid[i] + + metadata["processed"] = "true" + update.metadata.add(metadata) + client.files.update(update) + return {} diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/requirements.txt b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/requirements.txt new file mode 100644 index 00000000..a15d8762 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_contextualize_robot_data/requirements.txt @@ -0,0 +1,22 @@ +arrow==1.0.3 +certifi==2023.11.17 +chardet==4.0.0 +cognite-extractor-utils==1.4.2 +cognite-sdk==2.56.1 +dacite==1.8.1 +decorator==4.4.2 +idna==3.7 +numpy==1.20.1 +oauthlib==3.1.0 +pandas==1.2.3 +prometheus-client==0.9.0 +psutil==5.8.0 +py==1.10.0 +python-dateutil==2.8.1 +pytz==2021.1 +PyYAML==5.4.1 +requests==2.32.0 +requests-oauthlib==1.3.1 +retry==0.9.2 +six==1.15.0 +urllib3==2.6.3 diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/README.md new file mode 100644 index 00000000..39bb574e --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/README.md @@ -0,0 +1,56 @@ +# Gauge reading + +Uses the API endpoint. +Takes all files with the specified label and reads an analog gauge in the image. + +## Input parameters + +- *gauge_type*: The gauge type to use in the API call +- *input_label*: Label to list files by +- *output_label*: Label for processed files. +- *success_label*: The label added to files where the gauge reading was successful. +- *failed_label*: The label added to files where the gauge reading has failed. + +## Gauge reading + +- The function performs gauge reading on all files with the input label defined in the schedule file. +- Each input file can contain none, some of or all the metadata fields: + + - min_level + - max_level + - unit + - dead_angle + - ts_external_id + +- If the file has the ts_external_id metadata field, we first try to use the metadata from the time series as input + metadata (min_level, max_level, unit, dead_angle). If the ts do not contain all these fields and the file contains + a complete metadata set (min_level, max_level, unit, dead_angle), the timeseries is updated with the correct metadata. +- If the file does not have the ts_external_id metadata field or no complete metadata set is found, the existing file + metadata fields are used as input metadata to the API. For exmaple if the file has a metadata field "unit", + but not "min_level" etc, only unit is given as input to the API. +- If the reading returns a complete metadata reading and the timeseries did not have metadata, the time series is + updated with metadata. +- After the file is read the input_label is removed and the output_label is added. +- If the reading fails, the failed_label is added to the image. +- If the reading succeeds, the success_label is added to the image and value and metadata is added til file metadata. +- If the file contains the ts_external_id field in metadata, the value is written to the timeseries. +- If the reading fails, all attributes that are read are still written to the image. If for example unit and min_level +- is read from the image, unit and min_level are added in the file metadata. + +## Lables + +- The function checks that all labels exist, and creates the fields that do not exist. +- The funtion removes the input label from the image and adds the output label for processed files to the file + as well as the success or fail label. + +## Timestamp + +Timestamp is found from + +1. Metadata field called timestamp +2. Source created time +3. Uploaded time + +## Example request and response from the gauge reading endpoint + +[cognitedata/context-api-workers/pull/981](https://github.com/cognitedata/context-api-workers/pull/981) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/__init__.py new file mode 100644 index 00000000..9e8f6ee9 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/__init__.py @@ -0,0 +1 @@ +"""Gauge reader.""" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/README.md new file mode 100644 index 00000000..f44fc406 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/README.md @@ -0,0 +1,58 @@ +# Shared-code folder: `common` + +## TL;DR + +> This entire folder hierarchy of `common` is copied into all functions automatically. + +## No, I wan't to read the long story + +A common use case when you deal with the deployment of multiple functions simultaneously, is that you do not want to +replicate shared code between all the function folders. In order to accomodate this, as part of the workflow +`function-action` this template relies on, we copy all the contents in this folder (can be specified by `common_folder` +in the workflow files, see below) the functions we upload to Cognite Functions. If this is not specified, we check if +`common/` exists in the root folder and if so, _**we use it automatically**_ :rocket: + +### Handling imports + +A typical setup looks like this: + +```bash +├── common +│ └── utils.py +└── my_function + └── handler.py +``` + +The code we zip and send off to the FilesAPI will look like this: + +```bash +├── common +│ └── utils.py +└── handler.py +``` + +This means your `handler.py`-file should do imports from `common/utils.py` like this: + +```python +from common.utils import my_helper1, my_helper2 +import common.utils as utils # alternative +``` + +## No, I want `common` to be named `snowflake_utilities` + +No problem mate, locate `.github/workflow` and open the following files: + +- `deploy-pr.yaml` +- `deploy-push.yaml` + +Then, in each file, scroll down until you see: + +```yaml + +- name: Deploy and schedule ${{ matrix.function }} + uses: cognitedata/function-action@v2 + with: + function_name: ... + common_folder: snowflake_utilities # <-- add it here + ... +``` diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/apm_helpers.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/apm_helpers.py new file mode 100644 index 00000000..620abc88 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/apm_helpers.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +import json +import logging +from datetime import datetime +from typing import Any, Optional + +import pytz # type: ignore +from cognite.client import CogniteClient +from common.dataclass.common import Vec3f + +logger = logging.getLogger(__name__) + + +def create_apm_observation_from_reading( + client: CogniteClient, + file_external_id: str, + timeseries_external_id: str, + value: float, + timestamp: int, + pose: Vec3f, + message: str, + apm_checklistitem_external_id: Optional[str] = None, +): + """Create an APM observation from a gauge reading. + + NOTE: this is a preliminary implementation, where we're not yet using the + gql pygen SDK made by Cognite (which will make this a lot cleaner) + + """ + apm_config = _get_apm_config(client=client, config_external_id="inrobot") + if apm_config is None: + logger.error("Could not find APM config") + return + + space_id = str(apm_config.get("appDataSpaceId")) + appdata_space_version = str(apm_config.get("appDataSpaceVersion")) + + views = _get_views( + client=client, + space=space_id, + datamodel_id=space_id, + datamodel_version=appdata_space_version, + view_ids=["APM_Measurement", "Vec3f", "APM_Observation"], + ) + + def _get_view_version(views: list[dict[str, Any]], view_id: str) -> Optional[str]: + view = next((item for item in views if item["externalId"] == view_id), None) + if view is None: + raise ValueError(f"Could not find view {view_id}") + return view.get("version") + + # create the APM measurement + if isinstance(timestamp, int): + timestamp_s = round(timestamp / 1000) + timestamp_dt = datetime.fromtimestamp(float(timestamp_s), pytz.UTC) + elif isinstance(timestamp, datetime): + timestamp_dt = timestamp + else: + raise ValueError(f"Timestamp is of type {type(timestamp)}, needs to be either `int` or `datetime`.") + + apm_measurement_external_id = f"measurement_{timeseries_external_id}_{timestamp}" + apm_measurement = { + "timeSeries": timeseries_external_id, + "measuredAt": timestamp_dt.isoformat(), + "numericValue": value, + } + + vec3f_external_id = f"position_{timeseries_external_id}_{timestamp}" + vec3f = {"x": pose.x, "y": pose.y, "z": pose.z} + + apm_observation_external_id = f"observation_{timeseries_external_id}_{timestamp}" + apm_observation = { + "fileIds": [file_external_id], + "position": {"externalId": vec3f_external_id, "space": space_id}, + "description": "Gauge reading", + "note": message, + "createdById": "CDF Gauge Reader Function", + "updatedById": "CDF Gauge Reader Function", + } + + # edge apm_observation -> apm_measurement + edge_apm_observation_apm_measurement = { + "instanceType": "edge", + "space": space_id, + "externalId": f"{apm_observation_external_id}.measurements_{apm_measurement_external_id}", + "startNode": {"externalId": apm_observation_external_id, "space": space_id}, + "endNode": {"externalId": apm_measurement_external_id, "space": space_id}, + "type": {"externalId": "APM_Observation.measurements", "space": space_id}, + } + + # assemble all instances (nodes and edges) + instances = [ + { + "instanceType": "node", + "space": space_id, + "externalId": apm_measurement_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "APM_Measurement", + "version": _get_view_version(views, "APM_Measurement"), + }, + "properties": apm_measurement, + } + ], + }, + { + "instanceType": "node", + "space": space_id, + "externalId": vec3f_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "Vec3f", + "version": _get_view_version(views, "Vec3f"), + }, + "properties": vec3f, + } + ], + }, + { + "instanceType": "node", + "space": space_id, + "externalId": apm_observation_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "APM_Observation", + "version": _get_view_version(views, "APM_Observation"), + }, + "properties": apm_observation, + } + ], + }, + edge_apm_observation_apm_measurement, + ] + + if apm_checklistitem_external_id is not None: + edge_apm_checklist_apm_observation = { + "instanceType": "edge", + "space": space_id, + "externalId": f"{apm_checklistitem_external_id}.observations_{apm_observation_external_id}", + "startNode": {"externalId": apm_checklistitem_external_id, "space": space_id}, + "endNode": {"externalId": apm_observation_external_id, "space": space_id}, + "type": {"externalId": "APM_ChecklistItem.observations", "space": space_id}, + } + instances.append(edge_apm_checklist_apm_observation) + + logger.debug(f"Creating instances request: {json.dumps(instances, indent=2)}") + + response = client.post( + f"/api/v1/projects/{client.config.project}/models/instances", json={"items": instances} + ).json() + + logger.debug(f"Creating instances response: {json.dumps(response, indent=2)}") + + +def _get_views( + client: CogniteClient, space: str, datamodel_id: str, datamodel_version: Optional[str] = None, view_ids=list[str] +) -> list[dict[str, Any]]: + response = client.post( + f"/api/v1/projects/{client.config.project}/models/datamodels/byids", + json={ + "items": [{"space": space, "externalId": datamodel_id, "version": datamodel_version}], + }, + ).json() + + # get latest datamodel, if no version is specified + datamodel = max(response["items"], key=lambda item: item["createdTime"], default=None) + if datamodel is None: + raise ValueError(f"No datamodels with id {datamodel_id} found with version {datamodel_version}.") + + views = datamodel.get("views") + if views is None or views == []: + raise ValueError(f"Datamodel {datamodel_id} has no views.") + + available_views = {view.get("externalId"): view.get("version") for view in views} + + items = [ + {"space": space, "externalId": requested_view_id, "version": available_views.get(requested_view_id)} + for requested_view_id in view_ids + if available_views.get(requested_view_id) is not None + ] + + if len(items) != len(view_ids): + missing_views = set(view_ids) - set([item["externalId"] for item in items]) + raise ValueError(f"Views {', '.join(missing_views)} not found in datamodel {datamodel_id}.") + + views = client.post( + f"/api/v1/projects/{client.config.project}/models/views/byids", + json={ + "items": items, + }, + ).json() + + return views["items"] + + +def _get_apm_config(client: CogniteClient, config_external_id: str) -> Optional[dict[str, Any]]: + space = "APM_Config" + datamodel_id = "APM_Config" + view_id = "APM_Config" + instance_external_ids = [config_external_id, "default-config"] + + views = _get_views(client=client, space=space, datamodel_id=datamodel_id, view_ids=[view_id]) + + if len(views) != 1: + raise ValueError(f"Expected to find exactly one view for {view_id}, found {len(views)}.") + + view = views[0] + + response = client.post( + f"/api/v1/projects/{client.config.project}/models/instances/byids", + json={ + "items": [ + {"instanceType": "node", "externalId": external_id, "space": space} + for external_id in instance_external_ids + ], + "sources": [ + { + "source": { + "type": "view", + "space": space, + "externalId": view.get("externalId"), + "version": view.get("version"), + } + } + ], + }, + ).json() + + if response.get("items") == []: + logger.error("Could not find APM config, not upserting any APM_Observations now.") + return None + + # Try to find the first item with "externalId" == config_external_id + apm_config = next((item for item in response["items"] if item["externalId"] == config_external_id), None) + + # If no such item is found, try to find the first item with "externalId" == "default" + if apm_config is None: + apm_config = next((item for item in response["items"] if item["externalId"] == "default-config"), None) + if apm_config is None: + raise ValueError(f"Could not find APM config with externalId {config_external_id} or `default-config`.") + + apm_config = apm_config.get("properties").get(space).get(f"{view.get('externalId')}/{view.get('version')}") + + return apm_config + + +def _get_position_from_metadata_to_vec3f(metadata: dict[str, Any]) -> Vec3f: + """Get position from metadata.""" + x = metadata.get("waypoint_tform_body_x") + y = metadata.get("waypoint_tform_body_x") + z = metadata.get("waypoint_tform_body_x") + + if not x or not y or not z: + raise ValueError( + f"Missing metadata field. Required metadata fields \ + are waypoint_tform_body_x, waypoint_tform_body_x, waypoint_tform_body_x. File metadata keys: {metadata.keys()}" + ) + + return Vec3f(x=float(x), y=float(y), z=float(z)) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/cdf_helpers.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/cdf_helpers.py new file mode 100644 index 00000000..d9cfa076 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/cdf_helpers.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +import logging +import time +from typing import Any, Optional + +from cognite.client import CogniteClient +from cognite.client.data_classes import Annotation, LabelDefinition + +logger = logging.getLogger(__name__) + + +def call_vision_api_endpoint( + client: CogniteClient, + url: str, + input_data: dict, + max_get_job_attempts: int = 20, + headers: Optional[dict[str, Any]] = None, +): + """Post to an API endpoint, wait for response and return response.""" + # TODO: Use this function in people detector function (https://cognitedata.atlassian.net/browse/DMVP-855) + res = client.post(url=url, json=input_data, headers=headers).json() + + job_id = res.get("jobId") + for i in range(max_get_job_attempts): + logger.info(f"Attempt nr. {i} to get job status.") + res = client.get(url=f"{url}/{job_id}").json() + if res.get("status") in ["Queued", "Running"]: + logger.info(f"API job status: {res.get('status')}") + time.sleep(10) + elif res.get("status") == "Completed": + logger.info("API job completed") + break + else: # Use for/else in case of failed job or timeout + logger.info(f"API job failed or timed out: {res}") + return None + + return res + + +def create_missing_labels(client: CogniteClient, label_ids: list[str]): + """Create missing labels from a list of label IDs.""" + for label_id in label_ids: + if not check_label_exists(client, label_id): + client.labels.create([LabelDefinition(external_id=label_id, name=label_id)]) + + +def check_label_exists(client: CogniteClient, label_id: str): + """Check if a label exists in CDF given its external_id (label_id).""" + labels = client.labels.list(external_id_prefix=label_id) + return any(label.external_id == label_id for label in labels) + + +def create_annotations( + client: CogniteClient, + gauge_reading_result: dict, + file_id: int, + gauge_type: str, + bounding_box_label: str, + keypoint_label: str, +): + annotations = gauge_reading_result["items"][0]["predictions"][gauge_type + "GaugePredictions"] + annotations_list = [] + + for annotation in annotations: + print(f"ANNOTATION TYPE: {annotation}") + if gauge_type == "digital": + bounding_box = annotation["boundingBox"] + else: + keypoints = annotation["keypointCollection"]["keypoints"] + bounding_box = annotation["objectDetection"]["boundingBox"] + # Create bounding box annotation + bounding_box_annotation = Annotation( + annotation_type="images.ObjectDetection", + status="suggested", + creating_user="cognite-functions", + creating_app="sdk", + creating_app_version="4.5.2", + annotated_resource_type="file", + annotated_resource_id=file_id, + data={ + "label": bounding_box_label, + "boundingBox": bounding_box, + }, + ) + annotations_list.append(bounding_box_annotation) + + if gauge_type in ["level", "dial"]: + # Create keypoint annotation + keypoint_names = list(keypoints.keys()) + keypoint_data = {} + for i in range(len(keypoint_names)): + keypoint_name = keypoint_names[i] + print(keypoint_name) + point = keypoints[keypoint_name]["point"] + print(point) + keypoint_data[keypoint_name] = {"point": point} + print(keypoint_data) + + keypoint_annotation = Annotation( + annotation_type="images.KeypointCollection", + status="suggested", + creating_user="cognite-functions", + creating_app="sdk", + creating_app_version="4.5.2", + annotated_resource_type="file", + annotated_resource_id=file_id, + data={"label": keypoint_label, "keypoints": keypoint_data}, + ) + annotations_list.append(keypoint_annotation) + + client.annotations.create(annotations_list) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/common.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/common.py new file mode 100644 index 00000000..b07197dc --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/common.py @@ -0,0 +1,16 @@ +from dataclasses import asdict, dataclass + + +@dataclass +class Dataclass: + def to_dict(self, ignore_none=False): + if ignore_none: + return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v}) + return asdict(self) + + +@dataclass +class Vec3f(Dataclass): + x: float + y: float + z: float diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/vision.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/vision.py new file mode 100644 index 00000000..570d0a2d --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/dataclass/vision.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Optional + +from common.dataclass.common import Dataclass + + +@dataclass +class File(Dataclass): + fileId: int + fileExternalId: Optional[str] = None + + +@dataclass +class RequestCustomModelPrediction(Dataclass): + items: list[File] + modelFile: File + threshold: float + + +@dataclass +class Vertex(Dataclass): + x: float + y: float + + +@dataclass +class Region(Dataclass): + shape: str # points, rectangle, polyline, polygon + vertices: list[Vertex] + + +@dataclass +class VisionAnnotation(Dataclass): + text: str + confidence: Optional[float] = None + region: Optional[Region] = None + + +@dataclass +class Item(Dataclass): + fileId: int + annotations: Optional[list[VisionAnnotation]] = None + fileExternalId: Optional[str] = None + width: Optional[float] = None + height: Optional[float] = None + + +@dataclass +class FailedBatchSchema(Dataclass): + errorMessage: Optional[str] = None + items: Optional[list[File]] = None + + +@dataclass +class ResponseCustomModelPrediction(Dataclass): + status: str # "Queued" "Running" "Completed" "Failed" + createdTime: int + startTime: int + statusTime: int + jobId: int + items: list[Item] + modelFile: File + threshold: Optional[float] = None + failedItems: Optional[list[FailedBatchSchema]] = None diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/utils.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/utils.py new file mode 100644 index 00000000..c3ce1ce9 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/common/utils.py @@ -0,0 +1,17 @@ +import io + +from PIL import Image + + +def batch(iterable, n=1): + """Batch iterable.""" + len_iterable = len(iterable) + for ndx in range(0, len_iterable, n): + yield iterable[ndx : min(ndx + n, len_iterable)] + + +def image_to_byte_array(image: Image) -> bytes: + """Convert PIL image to byte array.""" + img_byte_arr = io.BytesIO() + image.save(img_byte_arr, format=image.format) + return img_byte_arr.getvalue() diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/handler.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/handler.py new file mode 100644 index 00000000..370cf6af --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/handler.py @@ -0,0 +1,396 @@ +"""Gauge reader handler.""" + +from __future__ import annotations + +import logging +import sys +from typing import Optional + +from cognite.client import CogniteClient +from cognite.client.data_classes import FileMetadataUpdate, LabelFilter, TimeSeriesUpdate +from cognite.extractorutils.uploader import TimeSeriesUploadQueue +from common.apm_helpers import _get_position_from_metadata_to_vec3f, create_apm_observation_from_reading +from common.cdf_helpers import call_vision_api_endpoint, create_annotations, create_missing_labels + +MAX_GET_JOB_ATTEMPTS = 20 + +METADATA = { + "dial": {"required": [], "all": ["min_level", "max_level", "dead_angle"]}, + "digital": { + "required": ["comma_pos"], + "all": ["comma_pos", "min_num_digits", "max_num_digits"], + }, + "level": {"required": [], "all": ["min_level", "max_level"]}, + "valve": {"required": [], "all": []}, +} +SUPPORTED_GAUGES = ["dial", "digital", "level", "valve"] +GAUGE_FEATURE_TYPES = { + "dial": "DialGaugeDetection", + "digital": "DigitalGaugeDetection", + "level": "LevelGaugeDetection", + "valve": "ValveDetection", +} + +GAUGE_PARAMETERS_TYPES = { + "dial": "dialGaugeDetectionParameters", + "digital": "digitalGaugeDetectionParameters", + "level": "levelGaugeDetectionParameters", + "valve": "valveDetectionParameters", +} + +logger = logging.getLogger(__name__) + +BOUNDING_BOX_LABELS = {"dial": "dial-gauge", "digital": "digital-gauge", "level": "level-gauge", "valve": "valve"} +KEYPOINT_LABELS = { + "dial": "dial-gauge-keypoints", + "digital": "digital-gauge-keypoints", + "level": "level-gauge-keypoints", + "valve": "valve-keypoints", +} + + +def update_ts_metadata(client, file_metadata, metadata_keys): + """Update a timeseries with gauge reading metadata.""" + metadata = {key: file_metadata.get(key) for key in metadata_keys} + logger.info(f"Timeseries {file_metadata['ts_external_id']} updated with metadata {metadata}") + client.time_series.update(TimeSeriesUpdate(external_id=file_metadata["ts_external_id"]).metadata.add(metadata)) + + +def gauge_reading_attributes_from_response(res: dict, gauge_type: str) -> Optional[dict]: + """Return the gauge reading annotations from the API response.""" + if not res: + logger.error("API call failed. Did not get a response.") + return None + if not res["items"]: + logger.error(f"No items in result: {res}") + return None + + if gauge_type == "valve": + valve_readings = [annotation for annotation in res["items"][0]["predictions"]["valvePredictions"]] + if not valve_readings: + logger.error(f"No annotation of type {gauge_type} in result.") + return None + for reading in valve_readings: + if reading.get("keypointCollection", None): + data = reading["keypointCollection"] + else: + gauge_readings = [annotation for annotation in res["items"][0]["predictions"][gauge_type + "GaugePredictions"]] + gauge_attributes = [] + + for gauge_reading in gauge_readings: + if gauge_type == "digital": + if gauge_reading.get("attributes", None): + gauge_attributes.append(gauge_reading) + else: + if gauge_reading.get("keypointCollection", None): + gauge_attributes.append(gauge_reading["keypointCollection"]) + + if not gauge_attributes: + logger.error(f"No annotation of type {gauge_type} in result.") + return None + + if "attributes" not in gauge_attributes[0]: + logger.error("No attributes in result.") + return None + data = gauge_attributes[0]["attributes"] + + return data + + +def handle_failed_upload(client: CogniteClient, id: int, error_message: str, data: dict, metadata: dict | None = None): + """Log error message and update a file that has failed.""" + logger.error(error_message) + + if data["gauge_type"] == "valve": + client.files.update( + FileMetadataUpdate(id=id) + .labels.remove(data["input_label"]) + .labels.add([data["output_label"], data["failed_label"]]) + .metadata.add({"error_message": error_message}) + ) + else: + client.files.update( + FileMetadataUpdate(id=id) + .labels.remove(data["input_label"]) + .labels.add([data["output_label"], data["failed_label"]]) + .metadata.add( + {"error_message": error_message, **metadata} + if metadata is not None + else {"error_message": error_message} + ) + ) + + +def to_input_metadata(keys: list[str], metadata: dict): + return {to_camel_case(key): metadata.get(key) for key in keys} + + +def get_timestamp(file): + """Get timestamp from file.""" + timestamp = file.metadata.get("timestamp") + if not timestamp: + timestamp = file.source_created_time + if not timestamp: + # This definitely exists + timestamp = file.uploaded_time + return timestamp + + +def to_camel_case(snake): + """Convert from snake case to camel case.""" + components = snake.split("_") + return components[0] + "".join(x.title() for x in components[1:]) if len(components) > 1 else components[0] + + +def handle(data, client): + """Gauge reader handle. Only analog, digital and level gauges supported at the moment.""" + if not {"gauge_type", "input_label", "output_label", "success_label", "failed_label"} <= data.keys(): + raise RuntimeError( + "Data should contain all keys: 'gauge_type', 'input_label', 'output_label', 'success_label', 'failed_label'" + ) + + if data["gauge_type"] not in SUPPORTED_GAUGES: + raise NotImplementedError(f"Only {SUPPORTED_GAUGES} gauge reading supported, not {data['gauge_type']}") + + logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=logging.BASIC_FORMAT) + + files = client.files.list( + labels=LabelFilter(contains_all=[data["input_label"]]), + limit=15, + uploaded=True, + ) + logger.info(f"Number of files to process with label {data['input_label']}: {len(files)}") + + create_missing_labels(client, [data["output_label"], data["success_label"], data["failed_label"]]) + upload_queue = TimeSeriesUploadQueue( + client, + max_upload_interval=1, + create_missing=True, + max_queue_size=100, + ) + project = client.config.project + for file in files: + ts_metadata = False + logger.info(f"Processing file with external id {file.external_id}, id {file.id}") + file.metadata = {} if file.metadata is None else file.metadata + + # If the file has a timeseries external id, try to get a complete metadata set from the timeseries. + # If the timeseries does not have all metadata, try to get a complete metadata set from the file. + if "ts_external_id" in file.metadata: + ts = client.time_series.retrieve(external_id=file.metadata.get("ts_external_id")) + logger.info(f"Retrieve timeseries {file.metadata.get('ts_external_id')}") + if not ts: + handle_failed_upload( + client=client, + id=file.id, + error_message=f"Time series with metadata {file.metadata.get('ts_external_id')} does not exist", + data=data, + ) + continue + if ts.metadata and all(key in ts.metadata for key in METADATA[data["gauge_type"]]["all"]): + metadata = to_input_metadata(METADATA[data["gauge_type"]]["all"], ts.metadata) + ts_metadata = True + logger.info(f"Metadata from timeseries: {metadata}") + for key in METADATA[data["gauge_type"]]["all"]: + file.metadata[key] = str(ts.metadata[key]) + if "unit" in ts.metadata: + file.metadata["unit"] = ts.metadata["unit"] + + elif all(key in file.metadata for key in METADATA[data["gauge_type"]]["all"]): + metadata = to_input_metadata(METADATA[data["gauge_type"]]["all"], file.metadata) + logger.info(f"Metadata from file: {metadata}") + update_ts_metadata(client, file.metadata, METADATA[data["gauge_type"]]["all"]) + logger.info(f"Complete metadata from image: {metadata}") + ts_metadata = True + + if not ts_metadata: + if not all(key in file.metadata for key in METADATA[data["gauge_type"]]["required"]): + handle_failed_upload( + client=client, + id=file.id, + error_message=f"Some required metadata fields missing. Required metadata is: {METADATA[data['gauge_type']]['required']}. Cannot process image.", + data=data, + ) + continue + + # If we did not find complete metadata from time series or file, use (possibly incomplete) metadata from file. + metadata = {} + for key in METADATA[data["gauge_type"]]["all"]: + if key in file.metadata: + metadata[to_camel_case(key)] = file.metadata[key] + logger.info(f"Not complete metadata from image: {metadata}") + + # Check gauge_type and make api-call based on the gauge_type + if data["gauge_type"] == "valve": + res = call_vision_api_endpoint( + client=client, + url=f"/api/v1/projects/{project}/context/vision/extract", + input_data={"items": [{"fileId": file.id}], "features": ["ValveDetection"]}, + max_get_job_attempts=MAX_GET_JOB_ATTEMPTS, + headers={"cdf-version": "beta"}, + ) + else: + res = call_vision_api_endpoint( + client=client, + url=f"/api/v1/projects/{project}/context/vision/extract", + input_data={ + "items": [{"fileId": file.id}], + "features": [GAUGE_FEATURE_TYPES[data["gauge_type"]]], + "parameters": {GAUGE_PARAMETERS_TYPES[data["gauge_type"]]: metadata}, + }, + max_get_job_attempts=MAX_GET_JOB_ATTEMPTS, + headers={"cdf-version": "beta"}, + ) + + print(f"RES: {res}") + if not res: + handle_failed_upload( + client=client, id=file.id, error_message="API call failed. Did not get a response", data=data + ) + continue + + gauge_reading_attributes = gauge_reading_attributes_from_response(res, data["gauge_type"]) + logger.info("-----------------------------GAUGE READING-------------------------------") + logger.info(f"External id: {file.external_id}, Reading: {gauge_reading_attributes}") + logger.info("-------------------------------------------------------------------------") + if gauge_reading_attributes is None: + handle_failed_upload( + client=client, + id=file.id, + error_message=f"Failed to read gauge {file.external_id}. No gauge found in image.", + data=data, + ) + continue + read_value = ( + gauge_reading_attributes["attributes"]["valveState"]["value"] + if data["gauge_type"] == "valve" + else gauge_reading_attributes[data["gauge_type"] + "GaugeValue"]["value"] + ) + if read_value is None: + handle_failed_upload( + client=client, + id=file.id, + error_message=f"Failed to read gauge {file.external_id}. Could not read value.", + data=data, + metadata=gauge_reading_attributes.get("metadata", None), + ) + continue + + # If the reading was successful + if data["gauge_type"] == "valve": + file.metadata["state"] = read_value + logger.info(f"Predicted state: {file.metadata['state'] }") + + else: + # If the readings were successful, draw the annotations on the image + create_annotations( + client=client, + gauge_reading_result=res, + file_id=file.id, + gauge_type=data["gauge_type"], + bounding_box_label=BOUNDING_BOX_LABELS[data["gauge_type"]], + keypoint_label=KEYPOINT_LABELS[data["gauge_type"]], + ) + + if data["gauge_type"] == "digital": + file.metadata["value"] = read_value + else: + file.metadata["value"] = f"{read_value:.2f}" + + for key in METADATA[data["gauge_type"]]["all"]: + key_attribute = to_camel_case(key) + if gauge_reading_attributes is not None: + key_object = gauge_reading_attributes.get(key_attribute, None) + if key_object is not None: + key_val = key_object.get("value", None) + if key_val is not None: + file.metadata[key] = str(key_val) + elif key_object is None and key == "dead_angle": + file.metadata[key] = "90" # Dead angle is not returned when it is 90 (default) + logger.info(f"Predicted value: {file.metadata['value']}") + + if "ts_external_id" in file.metadata and not ts_metadata: + # If the reading was successful and timeseries metadata does not exist, add ts metadata from the reading. + metadata = to_input_metadata(METADATA[data["gauge_type"]]["all"], file.metadata) + update_ts_metadata(client, file.metadata, METADATA[data["gauge_type"]]["all"]) + + if "ts_external_id" in file.metadata: + # Upload datapoint to timeseries if the file has ts_external_id. + timestamp = int(get_timestamp(file)) + if data["gauge_type"] == "valve": + timeseries_value = 1.0 if read_value == "on" else 0.0 if read_value == "off" else -1.0 + else: + timeseries_value = float(read_value) + + upload_queue.add_to_upload_queue( + datapoints=[(timestamp, timeseries_value)], + external_id=file.metadata.get("ts_external_id"), + ) + + try: + position = _get_position_from_metadata_to_vec3f(file.metadata) + if data["gauge_type"] in ["dial", "digital", "level"]: + # create a observation if value is above or below threshold + _value_threshold_max = float(ts.metadata.get("observation_threshold_max", "inf")) + _value_threshold_min = float(ts.metadata.get("observation_threshold_min", "-inf")) + + if timeseries_value < _value_threshold_min: + create_apm_observation_from_reading( + client=client, + file_external_id=file.external_id, + timeseries_external_id=file.metadata.get("ts_external_id"), + value=timeseries_value, + timestamp=timestamp, + pose=position, + message="WARNING: Value below threshold", + apm_checklistitem_external_id=file.metadata.get("action_run_id"), + ) + elif timeseries_value > _value_threshold_max: + create_apm_observation_from_reading( + client=client, + file_external_id=file.external_id, + timeseries_external_id=file.metadata.get("ts_external_id"), + value=timeseries_value, + timestamp=timestamp, + pose=position, + message="WARNING: Value over threshold", + apm_checklistitem_external_id=file.metadata.get("action_run_id"), + ) + elif data["gauge_type"] == "valve": + _expected_state = ts.metadata.get("expected_valve_state", "no_state_specified") + if (_expected_state == "on" or _expected_state == "1.0") and timeseries_value == 0.0: + create_apm_observation_from_reading( + client=client, + file_external_id=file.external_id, + timeseries_external_id=file.metadata.get("ts_external_id"), + value=timeseries_value, + timestamp=timestamp, + pose=position, + message="WARNING: Valve is off", + apm_checklistitem_external_id=file.metadata.get("action_run_id"), + ) + elif (_expected_state == "off" or _expected_state == "0.0") and timeseries_value == 1.0: + create_apm_observation_from_reading( + client=client, + file_external_id=file.external_id, + timeseries_external_id=file.metadata.get("ts_external_id"), + value=timeseries_value, + timestamp=timestamp, + pose=position, + message="WARNING: Valve is on", + apm_checklistitem_external_id=file.metadata.get("action_run_id"), + ) + + except Exception as e: + logger.error(f"Failed to create APM observation: {e}") + + client.files.update( + FileMetadataUpdate(id=file.id) + .labels.remove(data["input_label"]) + .labels.add([data["output_label"], data["success_label"]]) + .metadata.add(file.metadata) + ) + logger.info(f"Gauge reading completed successfully for file {file.external_id}, id: {file.id}.") + + upload_queue.upload() + return {} diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/requirements.txt b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/requirements.txt new file mode 100644 index 00000000..04e0d2f8 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_gauge_reading/requirements.txt @@ -0,0 +1,22 @@ +arrow==1.0.3 +certifi==2023.11.17 +chardet==4.0.0 +cognite-extractor-utils==1.4.2 +cognite-sdk==2.56.1 +dacite==1.8.1 +decorator==4.4.2 +idna==3.7 +numpy==1.20.3 +oauthlib==3.1.0 +pandas==1.5.3 +prometheus-client==0.9.0 +psutil==5.8.0 +py==1.10.0 +python-dateutil==2.8.1 +pytz==2021.1 +PyYAML==5.4.1 +requests==2.32.0 +requests-oauthlib==1.3.1 +retry==0.9.2 +six==1.15.0 +urllib3==2.6.3 diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/README.md new file mode 100644 index 00000000..8a68f2a0 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/README.md @@ -0,0 +1,18 @@ +# Get IR data from IR raw files + +Function for converting the raw IR files that are uploaded from Spot into IR images (RGB images with colour based on +temperature) and temperature files (CSV files where one cell corresponds to one pixel value, and contains the +temperature of that pixel value). + +The raw IR files are read from CDF, and after conversion, the IR images and temperature files are uploaded to CDF. +The raw IR files are kept in CDF after conversion. + +## Required parameters + +- `input_label`: the raw IR files get this label when they are uploaded from Spot. The label is used to list the raw + IR files. By default, this label is set to `read_ir`. +- `output_label`: the raw IR files get this label when the conversion into IR images and temperature files are finished. + By default, this label is set to `ir_finished`. +- `success_label`: the raw IR files get this label if the conversion was successful. +- `failed_label`: the raw IR files get this label if the conversion failed. +- `data_set_id`: the ID of the dataset that the function reads the files from. diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/__init__.py new file mode 100644 index 00000000..07e1fbcb --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/__init__.py @@ -0,0 +1 @@ +"""Extract ir image data and ir temperature data from the ir raw file.""" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/README.md new file mode 100644 index 00000000..f44fc406 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/README.md @@ -0,0 +1,58 @@ +# Shared-code folder: `common` + +## TL;DR + +> This entire folder hierarchy of `common` is copied into all functions automatically. + +## No, I wan't to read the long story + +A common use case when you deal with the deployment of multiple functions simultaneously, is that you do not want to +replicate shared code between all the function folders. In order to accomodate this, as part of the workflow +`function-action` this template relies on, we copy all the contents in this folder (can be specified by `common_folder` +in the workflow files, see below) the functions we upload to Cognite Functions. If this is not specified, we check if +`common/` exists in the root folder and if so, _**we use it automatically**_ :rocket: + +### Handling imports + +A typical setup looks like this: + +```bash +├── common +│ └── utils.py +└── my_function + └── handler.py +``` + +The code we zip and send off to the FilesAPI will look like this: + +```bash +├── common +│ └── utils.py +└── handler.py +``` + +This means your `handler.py`-file should do imports from `common/utils.py` like this: + +```python +from common.utils import my_helper1, my_helper2 +import common.utils as utils # alternative +``` + +## No, I want `common` to be named `snowflake_utilities` + +No problem mate, locate `.github/workflow` and open the following files: + +- `deploy-pr.yaml` +- `deploy-push.yaml` + +Then, in each file, scroll down until you see: + +```yaml + +- name: Deploy and schedule ${{ matrix.function }} + uses: cognitedata/function-action@v2 + with: + function_name: ... + common_folder: snowflake_utilities # <-- add it here + ... +``` diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/apm_helpers.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/apm_helpers.py new file mode 100644 index 00000000..620abc88 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/apm_helpers.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +import json +import logging +from datetime import datetime +from typing import Any, Optional + +import pytz # type: ignore +from cognite.client import CogniteClient +from common.dataclass.common import Vec3f + +logger = logging.getLogger(__name__) + + +def create_apm_observation_from_reading( + client: CogniteClient, + file_external_id: str, + timeseries_external_id: str, + value: float, + timestamp: int, + pose: Vec3f, + message: str, + apm_checklistitem_external_id: Optional[str] = None, +): + """Create an APM observation from a gauge reading. + + NOTE: this is a preliminary implementation, where we're not yet using the + gql pygen SDK made by Cognite (which will make this a lot cleaner) + + """ + apm_config = _get_apm_config(client=client, config_external_id="inrobot") + if apm_config is None: + logger.error("Could not find APM config") + return + + space_id = str(apm_config.get("appDataSpaceId")) + appdata_space_version = str(apm_config.get("appDataSpaceVersion")) + + views = _get_views( + client=client, + space=space_id, + datamodel_id=space_id, + datamodel_version=appdata_space_version, + view_ids=["APM_Measurement", "Vec3f", "APM_Observation"], + ) + + def _get_view_version(views: list[dict[str, Any]], view_id: str) -> Optional[str]: + view = next((item for item in views if item["externalId"] == view_id), None) + if view is None: + raise ValueError(f"Could not find view {view_id}") + return view.get("version") + + # create the APM measurement + if isinstance(timestamp, int): + timestamp_s = round(timestamp / 1000) + timestamp_dt = datetime.fromtimestamp(float(timestamp_s), pytz.UTC) + elif isinstance(timestamp, datetime): + timestamp_dt = timestamp + else: + raise ValueError(f"Timestamp is of type {type(timestamp)}, needs to be either `int` or `datetime`.") + + apm_measurement_external_id = f"measurement_{timeseries_external_id}_{timestamp}" + apm_measurement = { + "timeSeries": timeseries_external_id, + "measuredAt": timestamp_dt.isoformat(), + "numericValue": value, + } + + vec3f_external_id = f"position_{timeseries_external_id}_{timestamp}" + vec3f = {"x": pose.x, "y": pose.y, "z": pose.z} + + apm_observation_external_id = f"observation_{timeseries_external_id}_{timestamp}" + apm_observation = { + "fileIds": [file_external_id], + "position": {"externalId": vec3f_external_id, "space": space_id}, + "description": "Gauge reading", + "note": message, + "createdById": "CDF Gauge Reader Function", + "updatedById": "CDF Gauge Reader Function", + } + + # edge apm_observation -> apm_measurement + edge_apm_observation_apm_measurement = { + "instanceType": "edge", + "space": space_id, + "externalId": f"{apm_observation_external_id}.measurements_{apm_measurement_external_id}", + "startNode": {"externalId": apm_observation_external_id, "space": space_id}, + "endNode": {"externalId": apm_measurement_external_id, "space": space_id}, + "type": {"externalId": "APM_Observation.measurements", "space": space_id}, + } + + # assemble all instances (nodes and edges) + instances = [ + { + "instanceType": "node", + "space": space_id, + "externalId": apm_measurement_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "APM_Measurement", + "version": _get_view_version(views, "APM_Measurement"), + }, + "properties": apm_measurement, + } + ], + }, + { + "instanceType": "node", + "space": space_id, + "externalId": vec3f_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "Vec3f", + "version": _get_view_version(views, "Vec3f"), + }, + "properties": vec3f, + } + ], + }, + { + "instanceType": "node", + "space": space_id, + "externalId": apm_observation_external_id, + "sources": [ + { + "source": { + "type": "view", + "space": space_id, + "externalId": "APM_Observation", + "version": _get_view_version(views, "APM_Observation"), + }, + "properties": apm_observation, + } + ], + }, + edge_apm_observation_apm_measurement, + ] + + if apm_checklistitem_external_id is not None: + edge_apm_checklist_apm_observation = { + "instanceType": "edge", + "space": space_id, + "externalId": f"{apm_checklistitem_external_id}.observations_{apm_observation_external_id}", + "startNode": {"externalId": apm_checklistitem_external_id, "space": space_id}, + "endNode": {"externalId": apm_observation_external_id, "space": space_id}, + "type": {"externalId": "APM_ChecklistItem.observations", "space": space_id}, + } + instances.append(edge_apm_checklist_apm_observation) + + logger.debug(f"Creating instances request: {json.dumps(instances, indent=2)}") + + response = client.post( + f"/api/v1/projects/{client.config.project}/models/instances", json={"items": instances} + ).json() + + logger.debug(f"Creating instances response: {json.dumps(response, indent=2)}") + + +def _get_views( + client: CogniteClient, space: str, datamodel_id: str, datamodel_version: Optional[str] = None, view_ids=list[str] +) -> list[dict[str, Any]]: + response = client.post( + f"/api/v1/projects/{client.config.project}/models/datamodels/byids", + json={ + "items": [{"space": space, "externalId": datamodel_id, "version": datamodel_version}], + }, + ).json() + + # get latest datamodel, if no version is specified + datamodel = max(response["items"], key=lambda item: item["createdTime"], default=None) + if datamodel is None: + raise ValueError(f"No datamodels with id {datamodel_id} found with version {datamodel_version}.") + + views = datamodel.get("views") + if views is None or views == []: + raise ValueError(f"Datamodel {datamodel_id} has no views.") + + available_views = {view.get("externalId"): view.get("version") for view in views} + + items = [ + {"space": space, "externalId": requested_view_id, "version": available_views.get(requested_view_id)} + for requested_view_id in view_ids + if available_views.get(requested_view_id) is not None + ] + + if len(items) != len(view_ids): + missing_views = set(view_ids) - set([item["externalId"] for item in items]) + raise ValueError(f"Views {', '.join(missing_views)} not found in datamodel {datamodel_id}.") + + views = client.post( + f"/api/v1/projects/{client.config.project}/models/views/byids", + json={ + "items": items, + }, + ).json() + + return views["items"] + + +def _get_apm_config(client: CogniteClient, config_external_id: str) -> Optional[dict[str, Any]]: + space = "APM_Config" + datamodel_id = "APM_Config" + view_id = "APM_Config" + instance_external_ids = [config_external_id, "default-config"] + + views = _get_views(client=client, space=space, datamodel_id=datamodel_id, view_ids=[view_id]) + + if len(views) != 1: + raise ValueError(f"Expected to find exactly one view for {view_id}, found {len(views)}.") + + view = views[0] + + response = client.post( + f"/api/v1/projects/{client.config.project}/models/instances/byids", + json={ + "items": [ + {"instanceType": "node", "externalId": external_id, "space": space} + for external_id in instance_external_ids + ], + "sources": [ + { + "source": { + "type": "view", + "space": space, + "externalId": view.get("externalId"), + "version": view.get("version"), + } + } + ], + }, + ).json() + + if response.get("items") == []: + logger.error("Could not find APM config, not upserting any APM_Observations now.") + return None + + # Try to find the first item with "externalId" == config_external_id + apm_config = next((item for item in response["items"] if item["externalId"] == config_external_id), None) + + # If no such item is found, try to find the first item with "externalId" == "default" + if apm_config is None: + apm_config = next((item for item in response["items"] if item["externalId"] == "default-config"), None) + if apm_config is None: + raise ValueError(f"Could not find APM config with externalId {config_external_id} or `default-config`.") + + apm_config = apm_config.get("properties").get(space).get(f"{view.get('externalId')}/{view.get('version')}") + + return apm_config + + +def _get_position_from_metadata_to_vec3f(metadata: dict[str, Any]) -> Vec3f: + """Get position from metadata.""" + x = metadata.get("waypoint_tform_body_x") + y = metadata.get("waypoint_tform_body_x") + z = metadata.get("waypoint_tform_body_x") + + if not x or not y or not z: + raise ValueError( + f"Missing metadata field. Required metadata fields \ + are waypoint_tform_body_x, waypoint_tform_body_x, waypoint_tform_body_x. File metadata keys: {metadata.keys()}" + ) + + return Vec3f(x=float(x), y=float(y), z=float(z)) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/cdf_helpers.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/cdf_helpers.py new file mode 100644 index 00000000..d9cfa076 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/cdf_helpers.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +import logging +import time +from typing import Any, Optional + +from cognite.client import CogniteClient +from cognite.client.data_classes import Annotation, LabelDefinition + +logger = logging.getLogger(__name__) + + +def call_vision_api_endpoint( + client: CogniteClient, + url: str, + input_data: dict, + max_get_job_attempts: int = 20, + headers: Optional[dict[str, Any]] = None, +): + """Post to an API endpoint, wait for response and return response.""" + # TODO: Use this function in people detector function (https://cognitedata.atlassian.net/browse/DMVP-855) + res = client.post(url=url, json=input_data, headers=headers).json() + + job_id = res.get("jobId") + for i in range(max_get_job_attempts): + logger.info(f"Attempt nr. {i} to get job status.") + res = client.get(url=f"{url}/{job_id}").json() + if res.get("status") in ["Queued", "Running"]: + logger.info(f"API job status: {res.get('status')}") + time.sleep(10) + elif res.get("status") == "Completed": + logger.info("API job completed") + break + else: # Use for/else in case of failed job or timeout + logger.info(f"API job failed or timed out: {res}") + return None + + return res + + +def create_missing_labels(client: CogniteClient, label_ids: list[str]): + """Create missing labels from a list of label IDs.""" + for label_id in label_ids: + if not check_label_exists(client, label_id): + client.labels.create([LabelDefinition(external_id=label_id, name=label_id)]) + + +def check_label_exists(client: CogniteClient, label_id: str): + """Check if a label exists in CDF given its external_id (label_id).""" + labels = client.labels.list(external_id_prefix=label_id) + return any(label.external_id == label_id for label in labels) + + +def create_annotations( + client: CogniteClient, + gauge_reading_result: dict, + file_id: int, + gauge_type: str, + bounding_box_label: str, + keypoint_label: str, +): + annotations = gauge_reading_result["items"][0]["predictions"][gauge_type + "GaugePredictions"] + annotations_list = [] + + for annotation in annotations: + print(f"ANNOTATION TYPE: {annotation}") + if gauge_type == "digital": + bounding_box = annotation["boundingBox"] + else: + keypoints = annotation["keypointCollection"]["keypoints"] + bounding_box = annotation["objectDetection"]["boundingBox"] + # Create bounding box annotation + bounding_box_annotation = Annotation( + annotation_type="images.ObjectDetection", + status="suggested", + creating_user="cognite-functions", + creating_app="sdk", + creating_app_version="4.5.2", + annotated_resource_type="file", + annotated_resource_id=file_id, + data={ + "label": bounding_box_label, + "boundingBox": bounding_box, + }, + ) + annotations_list.append(bounding_box_annotation) + + if gauge_type in ["level", "dial"]: + # Create keypoint annotation + keypoint_names = list(keypoints.keys()) + keypoint_data = {} + for i in range(len(keypoint_names)): + keypoint_name = keypoint_names[i] + print(keypoint_name) + point = keypoints[keypoint_name]["point"] + print(point) + keypoint_data[keypoint_name] = {"point": point} + print(keypoint_data) + + keypoint_annotation = Annotation( + annotation_type="images.KeypointCollection", + status="suggested", + creating_user="cognite-functions", + creating_app="sdk", + creating_app_version="4.5.2", + annotated_resource_type="file", + annotated_resource_id=file_id, + data={"label": keypoint_label, "keypoints": keypoint_data}, + ) + annotations_list.append(keypoint_annotation) + + client.annotations.create(annotations_list) diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/common.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/common.py new file mode 100644 index 00000000..b07197dc --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/common.py @@ -0,0 +1,16 @@ +from dataclasses import asdict, dataclass + + +@dataclass +class Dataclass: + def to_dict(self, ignore_none=False): + if ignore_none: + return asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v}) + return asdict(self) + + +@dataclass +class Vec3f(Dataclass): + x: float + y: float + z: float diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/vision.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/vision.py new file mode 100644 index 00000000..570d0a2d --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/dataclass/vision.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Optional + +from common.dataclass.common import Dataclass + + +@dataclass +class File(Dataclass): + fileId: int + fileExternalId: Optional[str] = None + + +@dataclass +class RequestCustomModelPrediction(Dataclass): + items: list[File] + modelFile: File + threshold: float + + +@dataclass +class Vertex(Dataclass): + x: float + y: float + + +@dataclass +class Region(Dataclass): + shape: str # points, rectangle, polyline, polygon + vertices: list[Vertex] + + +@dataclass +class VisionAnnotation(Dataclass): + text: str + confidence: Optional[float] = None + region: Optional[Region] = None + + +@dataclass +class Item(Dataclass): + fileId: int + annotations: Optional[list[VisionAnnotation]] = None + fileExternalId: Optional[str] = None + width: Optional[float] = None + height: Optional[float] = None + + +@dataclass +class FailedBatchSchema(Dataclass): + errorMessage: Optional[str] = None + items: Optional[list[File]] = None + + +@dataclass +class ResponseCustomModelPrediction(Dataclass): + status: str # "Queued" "Running" "Completed" "Failed" + createdTime: int + startTime: int + statusTime: int + jobId: int + items: list[Item] + modelFile: File + threshold: Optional[float] = None + failedItems: Optional[list[FailedBatchSchema]] = None diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/utils.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/utils.py new file mode 100644 index 00000000..c3ce1ce9 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/common/utils.py @@ -0,0 +1,17 @@ +import io + +from PIL import Image + + +def batch(iterable, n=1): + """Batch iterable.""" + len_iterable = len(iterable) + for ndx in range(0, len_iterable, n): + yield iterable[ndx : min(ndx + n, len_iterable)] + + +def image_to_byte_array(image: Image) -> bytes: + """Convert PIL image to byte array.""" + img_byte_arr = io.BytesIO() + image.save(img_byte_arr, format=image.format) + return img_byte_arr.getvalue() diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/handler.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/handler.py new file mode 100644 index 00000000..83a7ab72 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/handler.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +import logging +import tempfile +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +from cognite.client import CogniteClient +from cognite.client.data_classes import FileMetadataUpdate, LabelFilter +from cognite.extractorutils.uploader import TimeSeriesUploadQueue +from common.cdf_helpers import create_missing_labels + +IMAGE_HEIGHT = 512 +IMAGE_WIDTH = 640 + +logger = logging.getLogger(__name__) + + +def get_timestamp(file): + """Get timestamp from file.""" + timestamp = file.metadata.get("timestamp") + if not timestamp: + timestamp = file.source_created_time + if not timestamp: + # This definitely exists + timestamp = file.uploaded_time + return timestamp + + +def get_asset(client, file): + """Get asset from metadata field.""" + if file.metadata.get("asset_id", None) not in [None, "0", "None"]: + try: + asset_id = int(file.metadata["asset_id"]) + except Exception as e: + logger.exception(e) + return None + return client.assets.retrieve(asset_id) + + +def handle_failed_upload(client: CogniteClient, id: int, error_message: str, data: dict, metadata: dict | None = None): + """Log error message and update a file that has failed.""" + print(error_message) + + client.files.update( + FileMetadataUpdate(id=id) + .labels.remove(data["input_label"]) + .labels.add([data["output_label"], data["failed_label"]]) + .metadata.add( + {"error_message": error_message, **metadata} if metadata is not None else {"error_message": error_message} + ) + ) + + +def handle(data, client): + print("Start extracting data from the IR raw file.") + + if not {"input_label", "output_label", "success_label", "failed_label", "data_set_external_id"} <= data.keys(): + raise RuntimeError( + "Data should contain all keys: 'input_label', 'output_label', 'success_label', 'failed_label', 'data_set_external_id'." + ) + + data_set_id = client.data_sets.retrieve(external_id=data["data_set_external_id"]).id + + files = client.files.list( + labels=LabelFilter(contains_all=[data["input_label"]]), + limit=-1, + uploaded=True, + ) + + print(f"Number of files to process with label: {data['input_label']}: {len(files)}.") + + create_missing_labels(client=client, label_ids=[data["output_label"], data["success_label"], data["failed_label"]]) + + upload_queue = TimeSeriesUploadQueue( + client, + max_upload_interval=1, + create_missing=True, + max_queue_size=100, + ) + + for file in files: + if file.metadata.get("processed") == "false": + print(f"File with external id: {file.external_id} not processed. Skipping.") + continue + + ir_image_filename = "ir_image.jpg" + ir_temperature_filename = "ir_temperatures.csv" + ir_raw_filename = "ir_raw.raw" + + asset = get_asset(client, file) + + with tempfile.TemporaryDirectory(dir="/tmp") as directory: + ir_image_path = str(Path.cwd() / directory / ir_image_filename) + ir_temperature_path = str(Path.cwd() / directory / ir_temperature_filename) + ir_raw_path = str(Path.cwd() / directory / ir_raw_filename) + + # Download raw file to tmp path + client.files.download_to_path(path=ir_raw_path, id=file.id) + + # Extract temperatures from the raw file + temperatures_decikelvin = np.fromfile(file=ir_raw_path, dtype=np.uint16).byteswap() + + # Reshape the raw temperature array + temperatures_decikelvin = temperatures_decikelvin.reshape((IMAGE_HEIGHT, IMAGE_WIDTH)) + + # Convert degrees decikelvin to degrees celsius + temperatures_celsius = ((temperatures_decikelvin) / 10) - 273.15 + + # Save the temperatures to a temp csv file + np.savetxt(ir_temperature_path, temperatures_celsius, delimiter=",") + + # Save the image to the temp path + plt.imsave(ir_image_path, temperatures_celsius) + + # Upload image and temperature data to CDF + try: + res_image = client.files.upload( + path=ir_image_path, + external_id=f"ir_image_{file.name}_{file.external_id}_{file.uploaded_time}", + name=f"ir_image_{file.external_id}.jpg", + data_set_id=data_set_id, + mime_type="image/jpeg", + asset_ids=[asset.id], + metadata={ + "asset_id": file.metadata.get("asset_id", None), + "raw_file_id": file.id, + "raw_file_name": file.name, + }, + ) + file_update = FileMetadataUpdate(id=file.id).metadata.add({"ir_image_id": res_image.id}) + client.files.update(file_update) + + except Exception as e: + handle_failed_upload(client=client, id=file.id, error_message=str(e), data=data, metadata=file.metadata) + continue + + print(f"Uploaded IR image with ID: {res_image.id}.") + + try: + res_csv = client.files.upload( + path=ir_temperature_path, + external_id=f"temperatures_{file.name}_{file.external_id}_{file.uploaded_time}", + name=f"temperatures_{file.external_id}.csv", + data_set_id=data_set_id, + mime_type="text/csv", + asset_ids=[asset.id], + metadata={ + "asset_id": file.metadata.get("asset_id", None), + "raw_file_id": file.id, + "raw_file_name": file.name, + }, + ) + file_update = FileMetadataUpdate(id=file.id).metadata.add({"ir_temp_csv_id": res_csv.id}) + client.files.update(file_update) + + except Exception as e: + handle_failed_upload(client=client, id=file.id, error_message=str(e), data=data, metadata=file.metadata) + continue + + print(f"Uploaded temperature file with ID: {res_csv.id}.") + + # Write the minimum and maximum temperature to the corresponding timeseries + minimum_temperature = np.amin(temperatures_celsius) + maximum_temperature = np.amax(temperatures_celsius) + + # if "ts_external_id" in file.metadata: + if any("ts_external_id" in key for key in file.metadata): + ts_external_ids = list(value for key, value in file.metadata.items() if "ts_external_id" in key) + ts_external_ids = sorted(ts_external_ids) + + timestamp = int(get_timestamp(file)) + + for ts_eid in ts_external_ids: + temp = maximum_temperature if ts_eid.endswith("max") else minimum_temperature + eid = ts_eid + + print(f"Datapoint {temp} at timestamp {timestamp} written to timeseries {ts_eid}.") + + upload_queue.add_to_upload_queue( + datapoints=[(timestamp, temp)], + external_id=eid, + ) + + client.files.update( + FileMetadataUpdate(id=file.id) + .labels.remove(data["input_label"]) + .labels.add([data["output_label"], data["success_label"]]) + ) + + print(f"IR reading completed successfully for file with external id: {file.external_id}, and id: {file.id}.") + + upload_queue.upload() + return {} diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/requirements.txt b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/requirements.txt new file mode 100644 index 00000000..46238a8e --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_get_ir_data_from_ir_raw/requirements.txt @@ -0,0 +1,23 @@ +arrow==1.0.3 +certifi==2023.11.17 +chardet==4.0.0 +cognite-extractor-utils==1.4.2 +cognite-sdk==2.56.1 +dacite==1.8.1 +decorator==4.4.2 +idna==3.7 +matplotlib==3.5.3 +numpy==1.26.4 +oauthlib==3.1.0 +pandas==1.2.3 +prometheus-client==0.9.0 +psutil==5.8.0 +py==1.10.0 +python-dateutil==2.8.1 +pytz==2021.1 +PyYAML==5.4.1 +requests==2.32.0 +requests-oauthlib==1.3.1 +retry==0.9.2 +six==1.15.0 +urllib3==2.6.3 diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/README.md b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/README.md new file mode 100644 index 00000000..f0628da2 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/README.md @@ -0,0 +1,14 @@ +# Three sixty reading + +Set up capabilities for thressixty. + +| Capability type | Action | Scope | Description | +|-----------------|-------------------------------------|----------------|------------------------------------------------------------------------| +| Assets | `assets:read` | Data sets, All | Find asset tags of equipment the robot works with and view asset data. | +| Events | `events:read`, `events:write` | Data sets, All | View events in the canvas. | +| Files | `files:read`, `files:write` | Data sets, All | Allow users to upload images. | +| Projects | `projects:read`, `projects:list` | Data sets, All | Extract the projects the user has access to. | +| Labels | `label:read`, `label:write` | Data sets, All | Extract the projects the user has access to. | +| Robotics | `robotics:read` | Data sets, All | Get 3D alignment of robot. | +| Functions | `functions:write`, `functions:read` | Data sets, All | Create, call and schedule functions. | +| Sessions | `sessions:create` | Data sets, All | Call and schedule functions. | diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/__init__.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/__init__.py new file mode 100644 index 00000000..0dedd18f --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/__init__.py @@ -0,0 +1 @@ +"""Three sixty image processing.""" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/cognite_threesixty_images.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/cognite_threesixty_images.py new file mode 100644 index 00000000..6111c1c7 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/cognite_threesixty_images.py @@ -0,0 +1,284 @@ +"""360 image extractor base class.""" + +import io +import logging +import time +from dataclasses import asdict, dataclass +from enum import Enum +from typing import Any, Union + +import numpy as np +import py360convert +from cognite.client.data_classes import Event, FileMetadata, Label +from PIL import Image + +logger = logging.getLogger(__name__) + +EVENT_TYPE = "scan" +EVENT_SUB_TYPE = "terrestial" +CUBEMAP_RESOLUTION = 2048 + + +@dataclass +class VectorXYZ: + """Vector of floats: (x,y,z).""" + + x: float = 0 + y: float = 0 + z: float = 0 + + def to_string(self) -> str: + """Convert vector so string.""" + return f"{self.x:.4f}, {self.y:.4f}, {self.z:.4f}" + + +def translation_to_mm_str_with_offset(translation: VectorXYZ, translation_offset_mm: VectorXYZ, translation_unit: str): + """Transfrom translation to string in to mm with offset.""" + unit_clean = translation_unit.lower().replace(" ", "") + if unit_clean == "m": + scale = 1000 + elif unit_clean == "cm": + scale = 10 + elif unit_clean == "mm": + scale = 1 + else: + raise ValueError( + f"Translaiton unit not recognized: {translation_unit}. " 'Translation unit should be "m", "cm" or "mm".' + ) + + return VectorXYZ( + x=(translation.x * scale - translation_offset_mm.x), + y=(translation.y * scale - translation_offset_mm.y), + z=(translation.z * scale - translation_offset_mm.z), + ).to_string() + + +def convert_rotation_angle(rotation_angle: str, rotation_angle_unit: str) -> str: + """Check rotation angle unit and convert to radians.""" + if rotation_angle_unit == "deg": + return rotation_angle + elif rotation_angle_unit == "rad": + return str(np.rad2deg(float(rotation_angle))) + else: + raise ValueError( + f"Rotation angle unit not recognized: {rotation_angle_unit}. " + 'Rotation angle unit should be "rad" or "deg".' + ) + + +@dataclass +class ThreesixtyImageMetadata: + """ThreesixtyImage metadata. The metadata of the 360 events.""" + + station_id: str + station_name: str + rotation_angle: str # Format: "14.837" + rotation_axis: str # Format: "0.0990, 0.0113, 0.9950" + translation: str # Format: "343109.0000, 83408.0000, 30860.0000" + timestamp: int # Format: milliseconds since epoch + site_id: str = "" + site_name: str = "" + + +@dataclass +class ThreesixtyImage: + """All information about the station.""" + + station_number: str + tran_unit: str + rot_angle_unit: str + images: dict[str, Union[str, np.ndarray]] + threesixty_image_metadata: ThreesixtyImageMetadata + + +@dataclass +class ImageWithFileMetadata: + """Image with file metadata.""" + + file_metadata: FileMetadata + content: bytes + + +class CogniteThreeSixtyImageExtractor: + """Base class for 360 image extractors. + + 360 images are represented by ThreesixtyImage objects. The extractor use the station objects + to create 360 files and events in CDF. + """ + + def __init__(self, data_set_id: int, mime_type: str = "image/jpeg"): + """Initialize ThreeSixtyImageExtractor.""" + self.data_set_id: int = data_set_id + self.mime_type = mime_type + self.labels: list[Label] = [] + + class Faces(Enum): + left = 0 + front = 1 + right = 2 + back = 3 + top = 4 + bottom = 5 + + def create_threesixty_image( + self, + content: np.ndarray, + site_id: str, + site_name: str, + station_number: Any, + rotation_angle: str, + rotation_axis: VectorXYZ, + rotation_angle_unit: str, + translation: VectorXYZ, + translation_unit: str, + translation_offset_mm: VectorXYZ, + timestamp: int = 0, + ) -> tuple[Event, list[ImageWithFileMetadata]]: + """Append station measurement to station list for truview.""" + if timestamp == 0 or timestamp is None: + timestamp = int(time.time() * 1000) + if not isinstance(timestamp, int): + timestamp = int(timestamp) + station = ThreesixtyImage( + station_number=station_number, + rot_angle_unit="deg", + tran_unit="mm", + images=self._get_cubemap_images(content), + threesixty_image_metadata=ThreesixtyImageMetadata( + site_id=site_id, + site_name=site_name, + station_name=site_name + " " + station_number, + rotation_angle=convert_rotation_angle(rotation_angle, rotation_angle_unit), + rotation_axis=rotation_axis.to_string(), + translation=translation_to_mm_str_with_offset(translation, translation_offset_mm, translation_unit), + station_id=site_id + "-" + station_number, + timestamp=timestamp, + ), + ) + + event = self._create_cdf_events(station) + files = self._create_cdf_files(station) + return event, files + + def _create_cdf_events(self, three_sixty_image: ThreesixtyImage) -> Event: + """Create 360 image eventfrom + + One event per 360 image is created. + """ + logger.info("Create events.") + event = Event( + three_sixty_image.threesixty_image_metadata.station_id + + str(three_sixty_image.threesixty_image_metadata.timestamp) + ) + event.metadata = asdict(three_sixty_image.threesixty_image_metadata) + event.data_set_id = self.data_set_id + event.description = "Scan position " + three_sixty_image.threesixty_image_metadata.station_name + event.type = EVENT_TYPE + event.subtype = EVENT_SUB_TYPE + event.start_time = three_sixty_image.threesixty_image_metadata.timestamp + return event + + def _image_to_byte_array(self, image: Image): + img_bytes = io.BytesIO() + image.save(img_bytes, format="PNG") + img_bytes = img_bytes.getvalue() # type: ignore + return img_bytes + + def _create_cdf_files( + self, three_sixty_image: ThreesixtyImage, resolution: int = 2048 + ) -> list[ImageWithFileMetadata]: + """Create 360 image files from three sixty image. + + Six files per 360 image is created, one file per face. + """ + logger.info("Create 360 files.") + files = [] + if three_sixty_image.threesixty_image_metadata.station_id: + for i, face in enumerate(self.Faces): + file_metadata = FileMetadata( + three_sixty_image.threesixty_image_metadata.station_id + + str(three_sixty_image.threesixty_image_metadata.timestamp) + + "-" + + str(resolution) + + "-" + + face.name + ) + file_metadata.labels = self.labels + metadata = {} + metadata["site_id"] = three_sixty_image.threesixty_image_metadata.site_id + metadata["site_name"] = three_sixty_image.threesixty_image_metadata.site_name + metadata["station_id"] = three_sixty_image.threesixty_image_metadata.station_id + metadata["station_name"] = three_sixty_image.threesixty_image_metadata.station_name + metadata["timestamp"] = str(three_sixty_image.threesixty_image_metadata.timestamp) + metadata["image_type"] = "cubemap" + metadata["image_resolution"] = str(resolution) + metadata["face"] = face.name + metadata["processed"] = "false" + file_metadata.metadata = metadata + file_metadata.name = ( + three_sixty_image.threesixty_image_metadata.station_name + + "-" + + str(three_sixty_image.threesixty_image_metadata.timestamp) # For avoiding duplicate filenames + + "-" + + face.name + + ".jpg" + ) + file_metadata.mime_type = self.mime_type + file_metadata.data_set_id = self.data_set_id + cubemap_img = three_sixty_image.images[face.name] + content = self._image_to_byte_array(Image.fromarray(cubemap_img)) + files.append(ImageWithFileMetadata(content=content, file_metadata=file_metadata)) + else: + logger.error("No station ID found.") + return files + + def _cube_dice2h(self, cube_dice): + w = cube_dice.shape[0] // 3 + assert cube_dice.shape[0] == w * 3 and cube_dice.shape[1] == w * 4 + cube_h = np.zeros((w, w * 6, cube_dice.shape[2]), dtype=cube_dice.dtype) + # Order: F R B L U D + sxy = [(1, 1), (2, 1), (3, 1), (0, 1), (1, 0), (1, 2)] + for i, (sx, sy) in enumerate(sxy): + face = cube_dice[(sy * w) : (sy + 1) * w, (sx * w) : (sx + 1) * w] + cube_h[:, (i * w) : (i + 1) * w] = face + return cube_h + + def _get_cubemap_images(self, content: Union[str, np.ndarray]) -> Union[dict[str, str], dict[str, np.ndarray]]: + """Create cubemap dict from equirectangular image. + + Args: + content: equirectangular + Returns: + cubemaps (dict[str,np.ndarray]: {: image}) + """ + if not isinstance(content, np.ndarray): + logger.error( + f"Unsupported input type: Equirectangular extractor" + f"only supports np.ndarray images. Got type {type(content)}" + ) + raise TypeError( + f"Unsupported input type: Equirectangular extractor " + f"only supports np.ndarray images. Got type {type(content)}" + ) + + cubemaps: dict[str, np.ndarray] = {} + try: + logger.info("Creating cubemap images from equirectangular image.") + im = py360convert.e2c(content, face_w=CUBEMAP_RESOLUTION) + cube_h = self._cube_dice2h(im) + cube_dict = py360convert.cube_h2dict(cube_h) + + # Translate keys from py360convert to TruView naming convention. + dice_map = { + "F": "front", + "R": "right", + "B": "back", + "L": "left", + "U": "top", + "D": "bottom", + } + for key, im in cube_dict.items(): + cubemaps[dice_map[key]] = cube_dict[key] + except Exception as e: + raise Exception(f"Failed to create cubemap image from equirectangular image: {e}.") + return cubemaps diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/handler.py b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/handler.py new file mode 100644 index 00000000..e0e14298 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/handler.py @@ -0,0 +1,257 @@ +"""Gauge reader handler.""" + +import io +import json +import logging +import sys +from typing import Any, Optional + +import numpy as np +from bosdyn.client.math_helpers import Quat, SE3Pose +from cognite.client import CogniteClient, global_config +from cognite.client.data_classes import FileMetadata, FileMetadataList, FileMetadataUpdate, LabelFilter +from cognite.client.exceptions import CogniteDuplicatedError +from cognite_threesixty_images import CogniteThreeSixtyImageExtractor, VectorXYZ +from PIL import Image +from scipy.spatial.transform import Rotation + +logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=logging.BASIC_FORMAT) +logger = logging.getLogger(__name__) +CUBEMAP_RESOLUTION = 1024 +global_config.disable_gzip = True + + +def get_map_transform_from_map(client: CogniteClient, map_external_id: str) -> Optional[dict]: + """Get the transform of a map in the current project based on mission. We asume the frame has the root frame as parent.""" + project = client._config.project + map_request = {"items": [{"externalId": map_external_id}]} + + res = client.post(url=f"/api/v1/projects/{project}/robotics/maps/byids", json=map_request).json() + for map in res["items"]: + frame_external_id = map.get("frameExternalId") + assert frame_external_id, f"Map {map_external_id} does not have a frame associated with it." + # Create mapping + frame_request = {"items": [{"externalId": frame_external_id}]} + + res = client.post( + url=f"/api/v1/projects/{project}/robotics/frames/byids", + json=frame_request, + ).json() + + print(f"going to get frame {frame_external_id} from map {map_external_id}") + + for item in res["items"]: + if item.get("transform"): + print(f"found translation {item.get('transform').get('orientation').get('w')}") + return SE3Pose( + x=item.get("transform").get("translation").get("x"), + y=item.get("transform").get("translation").get("y"), + z=item.get("transform").get("translation").get("z"), + rot=Quat( + w=item.get("transform").get("orientation").get("w"), + x=item.get("transform").get("orientation").get("x"), + y=item.get("transform").get("orientation").get("y"), + z=item.get("transform").get("orientation").get("z"), + ), + ) + return None + + +def convert_metadata_to_se3_pose(external_id: str, metadata: dict[str, Any]) -> Optional[SE3Pose]: + """Convert metadata to SE3Pose.""" + x = metadata.get("waypoint_tform_body_x") + y = metadata.get("waypoint_tform_body_x") + z = metadata.get("waypoint_tform_body_x") + qx = metadata.get("waypoint_tform_body_qx") + qy = metadata.get("waypoint_tform_body_qy") + qz = metadata.get("waypoint_tform_body_qz") + qw = metadata.get("waypoint_tform_body_qw") + if not x or not y or not z or not qx or not qy or not qz or not qw: + logger.error( + f"Failed to process file {external_id}. Missing metadata field. Required metadata fields \ + are waypoint_tform_body_x, waypoint_tform_body_x, waypoint_tform_body_x, waypoint_tform_body_qx, \ + waypoint_tform_body_qy, waypoint_tform_body_qz, waypoint_tform_body_qw. File metadata keys: {metadata.keys()}" + ) + return None + waypoint_tform_body = SE3Pose( + x=float(x), y=float(y), z=float(z), rot=Quat(w=float(qw), x=float(qx), y=float(qy), z=float(qz)) + ) + return waypoint_tform_body + + +def get_waypoint_and_pose( + waypoint_id: str, client: CogniteClient +) -> tuple[Optional[dict[str, Any]], Optional[SE3Pose]]: + """Get a waypoint from the robotics api and return the waypoint and pose.""" + get_waypoints_body = {"items": [{"externalId": waypoint_id}]} + get_waypoints_response = client.post( + f"/api/v1/projects/{client.config.project}/robotics/waypoints/byids", json=get_waypoints_body + ) + waypoint = json.loads(get_waypoints_response.content)["items"] + if len(waypoint) != 1: + logger.error( + f"Failed to process threesixty image. Did not get exactly 1 waypoint with external id {waypoint_id}." + ) + return None, None + waypoint_pos = waypoint[0].get("position") + waypoint_ori = waypoint[0].get("orientation") + ko_tform_waypoint = SE3Pose( + x=waypoint_pos["x"], + y=waypoint_pos["y"], + z=waypoint_pos["z"], + rot=Quat(w=waypoint_ori["w"], x=waypoint_ori["x"], y=waypoint_ori["y"], z=waypoint_ori["z"]), + ) + return waypoint[0], ko_tform_waypoint + + +def create_and_upload_360_files( + cognite_client: CogniteClient, + cognite_threesixty_image_extractor: CogniteThreeSixtyImageExtractor, + robot_pose: SE3Pose, + image: Image, + waypoint: dict[str, Any], + timestamp: int, +): + """Create Cogntie three sixty images.""" + rot = Rotation.from_quat( + [ + robot_pose.rotation.x, + robot_pose.rotation.y, + robot_pose.rotation.z, + robot_pose.rotation.w, + ] + ) + rot_vec = rot.as_rotvec() + rot_angle = np.linalg.norm(rot_vec) + rot_vec = rot_vec / rot_angle + + event, files = cognite_threesixty_image_extractor.create_threesixty_image( + content=np.array(image), + site_id=waypoint.get("mapExternalId", "default_location"), + site_name=waypoint.get("mapExternalId", "default_location"), + station_number=waypoint.get("externalId"), # TODO: make station id more readable + rotation_angle=str(rot_angle), + rotation_axis=VectorXYZ(float(rot_vec[0]), float(rot_vec[1]), float(rot_vec[2])), + rotation_angle_unit="rad", + translation=VectorXYZ(robot_pose.x, robot_pose.y, robot_pose.z), + translation_unit="m", + translation_offset_mm=VectorXYZ(0, 0, 0), + timestamp=timestamp, + ) + logger.info("Created event and files") + + # Upload event + try: + cognite_client.events.create([event]) + except CogniteDuplicatedError: + logger.warning(f"Event with external id {event.external_id} already exists. will update event.") + cognite_client.events.update([event]) + + logger.info(f"Created event in CDF with external id {event.external_id}") + # Upload files + for file in files: + cognite_client.files.upload_bytes( + content=file.content, + name=file.file_metadata.name, + data_set_id=file.file_metadata.data_set_id, + external_id=file.file_metadata.external_id, + source=file.file_metadata.source, + mime_type=file.file_metadata.mime_type, + metadata=file.file_metadata.metadata, + asset_ids=file.file_metadata.asset_ids, + labels=file.file_metadata.labels, + overwrite=True, + ) + logger.info(f"Created file in CDF with external id {file.file_metadata.external_id}") + logger.info("Completed uploading 360 image to CDF.") + + +def process_threesixty_files(files: FileMetadataList, client: CogniteClient, data_set_id: int): + """Process three sixty images.""" + cognite_threesixty_image_extractor = CogniteThreeSixtyImageExtractor(data_set_id=data_set_id) + + file: FileMetadata + for file in files: + try: + # Updating the file immediately so that the same file will not be processed again + client.files.update(FileMetadataUpdate(id=file.id).labels.remove("threesixty")) + + logger.info(f"Processing file with external id {file.external_id}, id {file.id}") + if not file.uploaded: + logger.error(f"file not upload not completed {file.external_id}") + continue + + # Get waypoint id from metadata + waypoint_id = file.metadata.get("waypoint_id") + print(f"Waypoint id: {waypoint_id}") + if not waypoint_id: + client.files.update(FileMetadataUpdate(id=file.id).labels.remove("threesixty")) + logger.error(f"Failed to process file {file.external_id}. No waypoint id in the metadata.") + continue + + # Calculate robot pose + waypoint_tform_body = convert_metadata_to_se3_pose(file.external_id, file.metadata) + if waypoint_tform_body is None: + continue + waypoint, ko_tform_waypoint = get_waypoint_and_pose(waypoint_id=waypoint_id, client=client) + if waypoint_tform_body is None or waypoint is None: + continue + robot_pose = ko_tform_waypoint * waypoint_tform_body + + print(f"waypoint : {waypoint}") + + site_alignement = get_map_transform_from_map( + client=client, map_external_id=str(waypoint.get("mapExternalId")) + ) + + site_pose = site_alignement * robot_pose + + # Download file + image_data = client.files.download_bytes(id=file.id) + try: + image = Image.open(io.BytesIO(image_data)) + except Exception: + logger.error(f"This CDF File does not seem to be an image. File ID: {file.id}") + continue + + # If images don't have a timestamp metadata field, default to the created time of the image file + image_timestamp = file.metadata.get("timestamp") + if image_timestamp is None: + image_timestamp = file.created_time + + # Create and upload 360 files + create_and_upload_360_files( + cognite_client=client, + cognite_threesixty_image_extractor=cognite_threesixty_image_extractor, + robot_pose=site_pose, + image=image, + waypoint=waypoint, + timestamp=image_timestamp, + ) + except Exception as e: + client.files.update(FileMetadataUpdate(id=file.id).labels.add("threesixty")) + logger.error(f"Failed to process file {file}. Error: {e}") + + +def handle(data, client): + """Three sixty image handle. .""" + logger.info("Start three sixty processing.") + + # Check that input contains data_set_id + if "data_set_external_id" not in data.keys(): + raise RuntimeError("Data should contain all keys: data_set_id") + data_set_id = client.data_sets.retrieve(external_id=data["data_set_external_id"]).id + + # Get all 360 images in the data set id with the label "threesixty" + # Changed default limit to 1 to process 1 file at a time as the upload_queue in cognite_threesixty_image uploads the whole queue + # on every file but after the first cycle fails as the first event is duplicated and the whole queue upload fails + files: FileMetadataList = client.files.list( + labels=LabelFilter(contains_all=["threesixty"]), + limit=25, + data_set_ids=[data_set_id], + uploaded=True, + ) + logger.info(f"Processing {len(files)} threesixty files.") + # Process three sixty files + process_threesixty_files(files=files, client=client, data_set_id=data_set_id) + return {} diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/requirements.txt b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/requirements.txt new file mode 100644 index 00000000..373de041 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/fn_threesixty/requirements.txt @@ -0,0 +1,7 @@ +bosdyn-client==3.3.2 +cognite-sdk==5.* +Pillow==12.1.1 +py360convert==0.1.0 +scipy==1.10.0 +six==1.16.0 +numpy==1.26.4 diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/robots.functions.Function.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/functions/robots.functions.Function.yaml new file mode 100644 index 00000000..6f53a76b --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/robots.functions.Function.yaml @@ -0,0 +1,51 @@ +- name: "Contextualize robot data" + externalId: "fn_contextualize_robot_data" + owner: "Robotics team" + description: "Contextualize data uploaded to CDF from robots." + envVars: + # The two environment variables below are set by the Toolkit + ENV_TYPE: "${CDF_BUILD_TYPE}" + CDF_ENV: "${CDF_ENVIRON}" + runtime: "py39" + functionPath: "./handler.py" + # Data set id for the zip file with the code that is uploaded. + dataSetExternalId: "{{ robot_1_dataset_ext_id }}" + +- name: Gauge reading + externalId: "fn_gauge_reading" + owner: "Robotics team" + description: "Reads the gauge data from the robot." + envVars: + # The two environment variables below are set by the Toolkit + ENV_TYPE: "${CDF_BUILD_TYPE}" + CDF_ENV: "${CDF_ENVIRON}" + runtime: "py39" + functionPath: "./handler.py" + # Data set id for the zip file with the code that is uploaded. + dataSetExternalId: "{{ robot_1_dataset_ext_id }}" + +- name: Get IR data From IR raw + externalId: "fn_get_ir_data_from_ir_raw" + owner: "Robotics team" + description: "Extracts the IR data from the raw IR info." + envVars: + # The two environment variables below are set by the Toolkit + ENV_TYPE: "${CDF_BUILD_TYPE}" + CDF_ENV: "${CDF_ENVIRON}" + runtime: "py39" + functionPath: "./handler.py" + # Data set id for the zip file with the code that is uploaded. + dataSetExternalId: "{{ robot_1_dataset_ext_id }}" + +- name: Process 360 + externalId: "fn_threesixty" + owner: "Robotics team" + description: "Process 360 images into streetview" + envVars: + # The two environment variables below are set by the Toolkit + ENV_TYPE: "${CDF_BUILD_TYPE}" + CDF_ENV: "${CDF_ENVIRON}" + runtime: "py39" + functionPath: "./handler.py" + # Data set id for the zip file with the code that is uploaded. + dataSetExternalId: "{{ robot_1_dataset_ext_id }}" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/functions/schedules.Schedule.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/functions/schedules.Schedule.yaml new file mode 100644 index 00000000..a783a348 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/functions/schedules.Schedule.yaml @@ -0,0 +1,94 @@ +- name: "contextualize_robot_data" + functionExternalId: "fn_contextualize_robot_data" + description: "Contextualizes the robot data and adds appropriate labels" + cronExpression: "* * * * *" + data: + gauge_context_label: robot_gauge + read_dial_gauge_label: read_dial_gauge + read_multiple_dial_gauges_label: read_multiple_dial_gauges + read_digital_gauge_label: read_digital_gauge + read_level_gauge_label: read_level_gauge + read_valve_label: read_valve + read_ir_raw_label: read_ir + spill_detection_label: unprocessed_spill_detection + data_set_external_id: "{{robot_1_dataset_ext_id}}" + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" + +- name: "read_dial_gauge" + functionExternalId: "fn_gauge_reading" + description: "Reads the dial gauge data from the robot" + cronExpression: "* * * * *" + data: + gauge_type: dial # The gauge type to use in the API call. Currently, "analog", "digital" and "level" is supported. + input_label: read_dial_gauge # Label to list files by + output_label: gauge_reading # Label that is added to all files that the function has run on. + success_label: SUCCESS + failed_label: FAILED + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" + +- name: read_level_gauge + functionExternalId: "fn_gauge_reading" + description: "Reads the level gauge data from the robot" + cronExpression: "* * * * *" + data: + gauge_type: level # The gauge type to use in the API call. Currently, "analog", "digital" and "level" is supported. + input_label: read_level_gauge # Label to list files by + output_label: gauge_reading # Label that is added to all files that the function has run on. + success_label: SUCCESS + failed_label: FAILED + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" + +- name: read_digital_gauge + functionExternalId: "fn_gauge_reading" + cronExpression: "* * * * *" + data: + gauge_type: digital # The gauge type to use in the API call. Currently, "analog", "digital" and "level" is supported. + input_label: read_digital_gauge # Label to list files by + output_label: gauge_reading # Label that is added to all files that the function has run on. + success_label: SUCCESS + failed_label: FAILED + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" + +- name: read_valve + functionExternalId: "fn_gauge_reading" + description: "Reads the valve data from the robot" + cronExpression: "* * * * *" + data: + gauge_type: valve # The gauge type to use in the API call. Currently, "analog", "digital" and "level" is supported. + input_label: read_valve # Label to list files by + output_label: gauge_reading # Label that is added to all files that the function has run on. + success_label: SUCCESS + failed_label: FAILED + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" + +- name: get_ir_data + functionExternalId: "fn_get_ir_data_from_ir_raw" + cronExpression: "* * * * *" + data: + input_label: read_ir # Label to list files by + output_label: ir_finished # Label that is added to all files that the function has run on + success_label: SUCCESS_IR + failed_label: FAILED_IR + data_set_external_id: "{{robot_1_dataset_ext_id}}" + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" + +- name: threesixty + functionExternalId: "fn_threesixty" + cronExpression: "* * * * *" + data: + data_set_external_id: "{{robot_1_dataset_ext_id}}" + authentication: + clientId: "{{ run_function_client_id }}" + clientSecret: "{{ run_function_secret }}" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/module.toml b/modules/accelerators/inrobot/cdf_inrobot_common/module.toml new file mode 100644 index 00000000..73f25310 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/module.toml @@ -0,0 +1,5 @@ +[module] +title = "InRobot Common" +is_selected_by_default = true +id = "dp:acc:inrobot:cdf_inrobot_common" +package_id = "dp:inrobot" diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/acoustic_video.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/acoustic_video.RobotCapability.yaml new file mode 100644 index 00000000..099e1e9f --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/acoustic_video.RobotCapability.yaml @@ -0,0 +1,72 @@ +name: Acoustic video recording +externalId: acoustic_video +method: acoustic_video +description: Acoustic video recording +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/capabilities/acoustic_video + title: Acoustic video recording input + type: object + additionalProperties: false + properties: + method: + type: string + parameters: + type: object + properties: + duration: + type: number + maximum: 120.0 + minimum: 0.0 + frequencyFrom: + type: number + maximum: 42000.0 + minimum: 0.0 + frequencyRange: + type: number + maximum: 8000.0 + minimum: 2000.0 + opacity: + type: number + maximum: 100.0 + minimum: 0.0 + threshold: + type: number + maximum: 100.0 + minimum: 0.0 + required: + - duration + - frequencyFrom + - frequencyRange + required: + - method + - parameters +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/acoustic_video + type: object + properties: + uploadInstructions: + type: object + properties: + video: + type: object + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + required: + - video + required: + - uploadInstructions + additionalProperties: false diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/process_threesixty.DataPostProcessing.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/process_threesixty.DataPostProcessing.yaml new file mode 100644 index 00000000..04816464 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/process_threesixty.DataPostProcessing.yaml @@ -0,0 +1,23 @@ +method: process_threesixty +name: Process a 360 image +externalId: process_threesixty +description: Process a 360 image into a panoramic view +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_postprocessing/process_threesixty + title: Process threesixty input + type: object + properties: + image: + type: object + properties: + method: + type: string + parameters: + type: object + properties: {} + required: + - method + - parameters + additionalProperties: false + additionalProperties: false diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/pt_ir_video.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/pt_ir_video.RobotCapability.yaml new file mode 100644 index 00000000..1bcc1d82 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/pt_ir_video.RobotCapability.yaml @@ -0,0 +1,64 @@ +name: PT ir video recording +externalId: pt_ir_video +method: pt_ir_video +description: Pan, tilt, thermal camera for video recording +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: false + id: robotics/schemas/0.1.0/capabilities/pt_ir_video + properties: + method: + type: string + parameters: + properties: + duration: + maximum: 120.0 + minimum: 0.0 + type: number + pan: + maximum: 180.0 + minimum: -180.0 + type: number + tilt: + maximum: 90.0 + minimum: -90.0 + type: number + zoom: + maximum: 100.0 + minimum: 0.0 + type: number + required: + - pan + - tilt + - duration + type: object + required: + - method + - parameters + title: Pan tilt thermal video recording input + type: object +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/pt_ir_video + properties: + uploadInstructions: + type: object + additionalProperties: false + properties: + video: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - uploadInstructions + type: object diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz.RobotCapability.yaml new file mode 100644 index 00000000..1c4e8012 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz.RobotCapability.yaml @@ -0,0 +1,63 @@ +name: PTZ camera +externalId: ptz +method: ptz +description: Pan, tilt, zoom camera for image capture. +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: false + id: robotics/schemas/0.1.0/capabilities/ptz + properties: + method: + type: string + parameters: + type: object + properties: + pan: + maximum: 180.0 + minimum: -180.0 + type: number + tilt: + maximum: 90.0 + minimum: -90.0 + type: number + zoom: + maximum: 100.0 + minimum: 0.0 + type: number + required: + - pan + - tilt + - zoom + required: + - method + - parameters + title: PTZ camera input + type: object +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/ptz + properties: + uploadInstructions: + type: object + additionalProperties: false + properties: + image: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + required: + - uploadInstructions + type: object diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz_ir.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz_ir.RobotCapability.yaml new file mode 100644 index 00000000..624975db --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz_ir.RobotCapability.yaml @@ -0,0 +1,80 @@ +name: PTZ thermal camera +externalId: ptz_ir +method: ptz_ir +description: Pan, tilt, zoom camera for thermal image capture. +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: false + id: robotics/schemas/0.1.0/capabilities/ptz_ir + properties: + method: + type: string + parameters: + type: object + properties: + pan: + maximum: 180.0 + minimum: -180.0 + type: number + tilt: + maximum: 90.0 + minimum: -90.0 + type: number + zoom: + maximum: 100.0 + minimum: 0.0 + type: number + required: + - pan + - tilt + - zoom + required: + - method + - parameters + title: PTZ thermal camera input + type: object +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/ptz_ir + properties: + uploadInstructions: + type: object + additionalProperties: false + properties: + image: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + raw: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + required: + - uploadInstructions + type: object diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz_video.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz_video.RobotCapability.yaml new file mode 100644 index 00000000..6eecc7bc --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/ptz_video.RobotCapability.yaml @@ -0,0 +1,68 @@ +name: PTZ video recording +externalId: ptz_video +method: ptz_video +description: Pan, tilt, zoom camera for video recording +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: false + id: robotics/schemas/0.1.0/capabilities/ptz_video + properties: + method: + type: string + parameters: + type: object + properties: + duration: + maximum: 120.0 + minimum: 0.0 + type: number + pan: + maximum: 180.0 + minimum: -180.0 + type: number + tilt: + maximum: 90.0 + minimum: -90.0 + type: number + zoom: + maximum: 100.0 + minimum: 0.0 + type: number + required: + - pan + - tilt + - zoom + - duration + required: + - method + - parameters + title: PTZ video recording input + type: object +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/ptz_video + properties: + uploadInstructions: + type: object + additionalProperties: false + properties: + video: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + required: + - uploadInstructions + type: object diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_dial_gauge.DataPostProcessing.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_dial_gauge.DataPostProcessing.yaml new file mode 100644 index 00000000..74a2c8c0 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_dial_gauge.DataPostProcessing.yaml @@ -0,0 +1,31 @@ +method: read_dial_gauge +name: Read dial gauge +externalId: read_dial_gauge +description: Read dial gauge from an image using Cognite Vision gauge reader +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_postprocessing/read_dial_gauge + title: Read dial gauge input + type: object + properties: + image: + type: object + properties: + method: + type: string + parameters: + type: object + properties: + unit: + type: string + deadAngle: + type: number + minLevel: + type: number + maxLevel: + type: number + required: + - method + - parameters + additionalProperties: false + additionalProperties: false diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_digital_gauge.DataPostProcessing.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_digital_gauge.DataPostProcessing.yaml new file mode 100644 index 00000000..d104e392 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_digital_gauge.DataPostProcessing.yaml @@ -0,0 +1,35 @@ +method: read_digital_gauge +name: Read digital gauge +externalId: read_digital_gauge +description: Read digital gauge from an image using Cognite Vision gauge reader +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_postprocessing/read_digital_gauge + title: Read digital gauge input + type: object + properties: + image: + type: object + properties: + method: + type: string + parameters: + type: object + properties: + unit: + type: string + minLevel: + type: number + maxLevel: + type: number + minNumDigits: + type: integer + maxNumDigits: + type: integer + commaPos: + type: integer + required: + - method + - parameters + additionalProperties: false + additionalProperties: false diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_level_gauge.DataPostProcessing.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_level_gauge.DataPostProcessing.yaml new file mode 100644 index 00000000..9f25b660 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_level_gauge.DataPostProcessing.yaml @@ -0,0 +1,29 @@ +method: read_level_gauge +name: Read level gauge +externalId: read_level_gauge +description: Read level gauge from an image using Cognite Vision gauge reader +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_postprocessing/read_level_gauge + title: Read level gauge input + type: object + properties: + image: + type: object + properties: + method: + type: string + parameters: + type: object + properties: + unit: + type: string + minLevel: + type: number + maxLevel: + type: number + required: + - method + - parameters + additionalProperties: false + additionalProperties: false diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_valve.DataPostProcessing.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_valve.DataPostProcessing.yaml new file mode 100644 index 00000000..bf8aa761 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/read_valve.DataPostProcessing.yaml @@ -0,0 +1,23 @@ +method: read_valve +name: Read valve state +externalId: read_valve +description: Read valve state from an image using Cognite Vision valve reader +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_postprocessing/read_valve + title: Read valve state input + type: object + properties: + image: + type: object + properties: + method: + type: string + parameters: + type: object + properties: {} + required: + - method + - parameters + additionalProperties: false + additionalProperties: false diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/threesixty.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/threesixty.RobotCapability.yaml new file mode 100644 index 00000000..95c34657 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/threesixty.RobotCapability.yaml @@ -0,0 +1,47 @@ +name: 360 degree camera +externalId: threesixty +method: threesixty +description: Panoramic 360 degree image capture. +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: false + id: robotics/schemas/0.1.0/capabilities/threesixty + properties: + method: + type: string + parameters: + type: object + properties: {} + required: + - method + - parameters + title: 360 camera input + type: object +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/threesixty + properties: + uploadInstructions: + type: object + additionalProperties: false + properties: + image: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + required: + - uploadInstructions + type: object diff --git a/modules/accelerators/inrobot/cdf_inrobot_common/robotics/threesixty_video.RobotCapability.yaml b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/threesixty_video.RobotCapability.yaml new file mode 100644 index 00000000..94b2be71 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_common/robotics/threesixty_video.RobotCapability.yaml @@ -0,0 +1,47 @@ +name: 360 degree video +externalId: threesixty_video +method: threesixty_video +description: Panoramic 360 degree video recording. +inputSchema: + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: false + id: robotics/schemas/0.1.0/capabilities/threesixty_video + properties: + method: + type: string + parameters: + type: object + properties: {} + required: + - method + - parameters + title: 360 video input + type: object +dataHandlingSchema: + $schema: http://json-schema.org/draft-07/schema# + id: robotics/schemas/0.1.0/data_handling/threesixty_video + properties: + uploadInstructions: + type: object + additionalProperties: false + properties: + video: + type: object + additionalProperties: false + properties: + method: + const: uploadFile + type: string + parameters: + type: object + properties: + filenamePrefix: + type: string + required: + - filenamePrefix + required: + - method + - parameters + required: + - uploadInstructions + type: object diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/README.md b/modules/accelerators/inrobot/cdf_inrobot_location/README.md new file mode 100644 index 00000000..734f6231 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/README.md @@ -0,0 +1,33 @@ +# cdf_inrobot_location + +This module contains location specific configurations for InRobot. This is the default location. +To support multiple locations, copy this module and modify the configurations. Remember to +rename the module name to e.g. `inrobot_location_`. + +## Auth + +The module creates three groups that need three matching groups in the identity provider that the CDF +project is configured with. The groups are: + +- Admin role. This role has access to create 3D models, create assets, create new video rooms, etc. +- User role. This is the standard role for most users. These users can interact with the robot and see all data + associated with the robot. +- A robot user role - this is given to the robot and includes the ability to create labels, write to files, write to + FDM (checklists, checklist items). Any new robot in a given location must have a new user group. Similarly, + the same robot in a new location must have a new user group. + +All the users read from robot-specific data sets. This means that when a new robot is added, the dataset scopes +for the users must be updated as well. + +The source ids from the groups in the identity provider should be set in [./default.config.yaml](default.config.yaml). + +## Data models + +There are two spaces created in this module: one space for InRobot to store app data and one space for +data from source systems, like assets, activities/work orders etc. + +## Robotics + +Each location must have three associated robotics-specific resources created: a Map, a Frame, and a Location. These are +robotics-api specific concepts and are required for the robot to understand its environment. These require a threeD +model name, type (THREEDMODEL or POINTCLOUD) revisionId, and modelId in addition to the root asset external id. diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/auth/inrobot-admin.Group.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/auth/inrobot-admin.Group.yaml new file mode 100644 index 00000000..21cf6730 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/auth/inrobot-admin.Group.yaml @@ -0,0 +1,107 @@ +name: "gp_InRobot_Admin_{{first_location}}" +sourceId: "{{inrobot_admin_first_location_source_id}}" +metadata: + origin: "cdf-project-templates" +capabilities: + - assetsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - dataModelsAcl: + actions: + - READ + - WRITE + scope: + spaceIdScope: + spaceIds: + - "APM_Config" + - "cdf_core" + - "cdf_apm" + - "cdf_infield" + - "cdf_apps_shared" + - dataModelInstancesAcl: + actions: + - READ + - WRITE + - WRITE_PROPERTIES + scope: + spaceIdScope: + spaceIds: + - "cdf_apm" + - "cognite_app_data" + - "sp_{{first_location}}_source_data" + - "sp_{{first_location}}_app_data" + - eventsAcl: + actions: + - READ + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - filesAcl: + actions: + - WRITE + - READ + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - groupsAcl: + actions: + - CREATE + - READ + - UPDATE + - LIST + - DELETE + scope: + all: {} + - threedAcl: + actions: + - READ + - CREATE + - UPDATE + - DELETE + scope: + all: {} + - projectsAcl: + actions: + - READ + - LIST + scope: + all: {} + - roboticsAcl: + actions: + - READ + - CREATE + - UPDATE + - DELETE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - timeSeriesAcl: + actions: + - READ + scope: + all: {} + - functionsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - videoStreamingAcl: + actions: + - READ + - SUBSCRIBE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - videoStreamingAcl: + actions: + - WRITE + scope: + all: {} diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/auth/inrobot-users.Group.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/auth/inrobot-users.Group.yaml new file mode 100644 index 00000000..765c9b48 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/auth/inrobot-users.Group.yaml @@ -0,0 +1,94 @@ +name: "gp_InRobot_Users_{{first_location}}" +sourceId: "{{inrobot_users_first_location_source_id}}" +metadata: + origin: "cdf-project-templates" +capabilities: + - assetsAcl: + actions: + - READ + scope: + all: {} + - dataModelsAcl: + actions: + - READ + scope: + spaceIdScope: + spaceIds: + - "APM_Config" + - "cdf_apm" + - "cdf_infield" # for user info + - "cdf_core" + - "cdf_apps_shared" + + - dataModelInstancesAcl: + actions: + - READ + scope: + spaceIdScope: + spaceIds: + - "APM_Config" + - "cdf_apm" + - "cognite_app_data" + - "sp_{{first_location}}_source_data" + - "sp_{{first_location}}_app_data" + - dataModelInstancesAcl: + actions: + - WRITE + scope: + spaceIdScope: + spaceIds: + - "cognite_app_data" # This space stores user profile info + - eventsAcl: + actions: + - READ + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - filesAcl: + actions: + - WRITE + - READ + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - groupsAcl: + actions: + - READ + scope: + all: {} + - threedAcl: + actions: + - READ + scope: + all: {} + - projectsAcl: + actions: + - READ + - LIST + scope: + all: {} + - roboticsAcl: + actions: + - READ + - CREATE + - UPDATE + - DELETE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - timeSeriesAcl: + actions: + - READ + scope: + all: {} + - videoStreamingAcl: + actions: + - READ + - SUBSCRIBE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/auth/robot-1.Group.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/auth/robot-1.Group.yaml new file mode 100644 index 00000000..74613b60 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/auth/robot-1.Group.yaml @@ -0,0 +1,77 @@ +name: "gp_Robot_{{first_location}}" +sourceId: "{{robot_1_first_location_source_id}}" +metadata: + origin: "cdf-project-templates" +capabilities: + - assetsAcl: + actions: + - READ + scope: + all: {} + - dataModelsAcl: + actions: + - READ + - WRITE + scope: + spaceIdScope: + spaceIds: + - "APM_Config" + - "cdf_core" + - "cdf_apm" + - "cdf_apps_shared" + - dataModelInstancesAcl: + actions: + - READ + scope: + spaceIdScope: + spaceIds: + - "APM_Config" + - "cognite_app_data" + - "cdf_apm" + - "sp_{{first_location}}_source_data" + - "sp_{{first_location}}_app_data" + - dataModelInstancesAcl: + actions: + - WRITE + scope: + spaceIdScope: + spaceIds: + - "cognite_app_data" + - "cdf_apm" + - "sp_{{first_location}}_source_data" + - "sp_{{first_location}}_app_data" + - filesAcl: + actions: + - READ + scope: + all: {} + - filesAcl: + actions: + - WRITE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - labelsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - roboticsAcl: + actions: + - READ + - CREATE + - UPDATE + - DELETE + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" + - videoStreamingAcl: + actions: + - PUBLISH + scope: + datasetScope: + ids: + - "{{ robot_1_dataset_ext_id }}" diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/data_models/locationAppData.space.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/data_models/locationAppData.space.yaml new file mode 100644 index 00000000..bbeb5b92 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/data_models/locationAppData.space.yaml @@ -0,0 +1,3 @@ +space: sp_{{first_location}}_app_data +name: sp:inrobot:{{first_location}}:app_data +description: Space for {{first_location}} App Data diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/data_models/locationSourceData.space.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/data_models/locationSourceData.space.yaml new file mode 100644 index 00000000..ac188818 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/data_models/locationSourceData.space.yaml @@ -0,0 +1,3 @@ +space: sp_{{ first_location}}_source_data +name: sp:inrobot:{{ first_location}}:source +description: Space for InRobot {{first_location}} Source Data diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/default.config.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/default.config.yaml new file mode 100644 index 00000000..d2417086 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/default.config.yaml @@ -0,0 +1,7 @@ +inrobot_admin_first_location_source_id: +inrobot_users_first_location_source_id: +robot_1_first_location_source_id: +three_d_model_name: +three_d_type: +three_d_model_id: +three_d_revision_id: diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/module.toml b/modules/accelerators/inrobot/cdf_inrobot_location/module.toml new file mode 100644 index 00000000..ee31fb5f --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/module.toml @@ -0,0 +1,5 @@ +[module] +title = "InRobot Location" +is_selected_by_default = true +id = "dp:acc:inrobot:cdf_inrobot_location" +package_id = "dp:inrobot" \ No newline at end of file diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Frame.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Frame.yaml new file mode 100644 index 00000000..81a335c1 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Frame.yaml @@ -0,0 +1,15 @@ +- name: Root frame at the origin of location {{ first_root_asset_external_id }} + externalId: {{ first_root_asset_external_id }} +- name: Frame {{ three_d_model_name }} + externalId: frame_{{ three_d_model_name }} + transform: + parentFrameExternalId: {{ first_root_asset_external_id }} + translation: + x: 0.0 + y: 0.0 + z: 0.0 + orientation: + x: 0.0 + y: 0.0 + z: 0.0 + w: 1.0 diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Location.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Location.yaml new file mode 100644 index 00000000..95bbd229 --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Location.yaml @@ -0,0 +1,3 @@ +name: {{ first_root_asset_external_id }} +externalId: {{ first_root_asset_external_id }} +description: Robot location diff --git a/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Map.yaml b/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Map.yaml new file mode 100644 index 00000000..4b287e9b --- /dev/null +++ b/modules/accelerators/inrobot/cdf_inrobot_location/robotics/root.Map.yaml @@ -0,0 +1,10 @@ +name: {{ three_d_model_name }} +externalId: {{ three_d_model_name }} +description: {{ three_d_type }} {{ three_d_model_name }} +mapType: {{ three_d_type }} +frameExternalId: frame_{{ three_d_model_name }} +data: + modelId: {{ three_d_model_id }} + revisionId: {{ three_d_revision_id }} +locationExternalId: "{{ first_root_asset_external_id }}" +scale: 1.0 diff --git a/modules/bootcamp/README.md b/modules/bootcamp/README.md new file mode 100644 index 00000000..5cd6027c --- /dev/null +++ b/modules/bootcamp/README.md @@ -0,0 +1,5 @@ +# Bootcamp module + +These are the initial configs and python functions used for the bootcamp. +[Docs](https://docs.cdf-bootcamp.cogniteapp.com/) +[Repo](https://github.com/cognitedata/cognite-data-fusion-bootcamp) diff --git a/modules/bootcamp/ice_cream_api/default.config.yaml b/modules/bootcamp/ice_cream_api/default.config.yaml new file mode 100644 index 00000000..980b05f0 --- /dev/null +++ b/modules/bootcamp/ice_cream_api/default.config.yaml @@ -0,0 +1,9 @@ +icapi_extractors_source_id: + +tokenUri: ${IDP_TOKEN_URL} +cdfProjectName: ${CDF_PROJECT} +scopes: ${IDP_SCOPES} +icapi_trigger_client_id: ${ICAPI_EXTRACTORS_CLIENT_ID} +icapi_trigger_client_secret: ${ICAPI_EXTRACTORS_CLIENT_SECRET} +icapi_extractors_client_id: ${ICAPI_EXTRACTORS_CLIENT_ID} +icapi_extractors_client_secret: ${ICAPI_EXTRACTORS_CLIENT_SECRET} \ No newline at end of file diff --git a/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/handler.py b/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/handler.py new file mode 100644 index 00000000..9d90b771 --- /dev/null +++ b/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/handler.py @@ -0,0 +1,168 @@ +from datetime import datetime, timedelta, timezone +from itertools import islice +from timeit import default_timer + +from cognite.client import CogniteClient +from cognite.client.data_classes import ExtractionPipelineRun +from cognite.client.data_classes.data_modeling import NodeId, ViewId +from cognite.client.data_classes.data_modeling.cdm.v1 import CogniteAsset, CogniteTimeSeries +from cognite.client.data_classes.filters import Prefix, ContainsAny + +from ice_cream_factory_api import IceCreamFactoryAPI + +from cognite.client.config import global_config +global_config.disable_pypi_version_check = True + +from itertools import islice + + +def batcher(iterable, batch_size): + iterator = iter(iterable) + while batch := list(islice(iterator, batch_size)): + yield batch + + +def get_time_series_for_site(client: CogniteClient, site): + this_site = site.lower() + sub_tree_root = client.data_modeling.instances.retrieve_nodes( + NodeId("icapi_dm_space", this_site), + node_cls=CogniteAsset + ) + + if not sub_tree_root: + print( + f"----No CogniteAssets in CDF for {site}!----\n" + f" Run the 'Create Cognite Asset Hierarchy' transformation!" + ) + return [] + + sub_tree_nodes = client.data_modeling.instances.list( + instance_type=CogniteAsset, + filter=Prefix(property=["cdf_cdm", "CogniteAsset/v1", "path"], value=sub_tree_root.path), + limit=None + ) + + if not sub_tree_nodes: + print( + f"----No CogniteTimeSeries in CDF for {site}!----\n" + f" Run the 'Contextualize Timeseries and Assets' transformation!" + ) + return [] + + value_list = [{"space": node.space, "externalId": node.external_id} for node in sub_tree_nodes] + + time_series = [ + client.data_modeling.instances.search( + view=ViewId("cdf_cdm", "CogniteTimeSeries", "v1"), + instance_type=CogniteTimeSeries, + filter=ContainsAny(property=["cdf_cdm", "CogniteTimeSeries/v1", "assets"], values=batch), + limit=None + ) + for batch in batcher(value_list, 20) + ] + + # Combine list of batch results into a single NodeList + time_series = [node for nodelist in time_series for node in nodelist] + + if not time_series: + print("No CogniteTimeSeries in the CogniteCore Data Model (cdf_cdm Space)") + + time_series = [ + item for item in time_series + if any(substring in item.external_id for substring in ["planned_status", "good"]) + ] + + return time_series + + +def report_ext_pipe(client: CogniteClient, status, message=None): + ext_pipe_run = ExtractionPipelineRun( + extpipe_external_id="ep_icapi_datapoints", + status=status, + message=message + ) + + client.extraction_pipelines.runs.create(run=ext_pipe_run) + +def handle(client: CogniteClient = None, data=None): + report_ext_pipe(client, "seen") + + sites = None + backfill = None + hours = None + max_hours = 336 + + if data: + sites = data.get("sites") + backfill = data.get("backfill") + hours = data.get("hours") + + if hours and hours > max_hours: + print(f"{hours} > {max_hours}! The Ice Cream API can't serve more than {max_hours} hours of datapoints, setting hours to max") + hours = max_hours + + all_sites = [ + "Houston", + "Oslo", + "Kuala_Lumpur", + "Hannover", + "Nuremberg", + "Marseille", + "Sao_Paulo", + "Chicago", + "Rotterdam", + "London", + ] + + sites = sites or all_sites + backfill = backfill or True + hours = hours or max_hours + + now = datetime.now(timezone.utc).timestamp() * 1000 + increment = timedelta(hours=hours).total_seconds() * 1000 + + ice_cream_api = IceCreamFactoryAPI(base_url="https://ice-cream-factory.inso-internal.cognite.ai") + + try: + for site in sites: + print(f"Getting Data Points for {site}") + big_start = default_timer() + + time_series = get_time_series_for_site(client, site) + + latest_dps = { + dp.external_id: dp.timestamp + for dp in client.time_series.data.retrieve_latest( + external_id=[ts.external_id for ts in time_series], + ignore_unknown_ids=True + ) + } if not backfill else None + + to_insert = [] + for ts in time_series: + # figure out the window of datapoints to pull for this Time Series + latest = latest_dps[ts.external_id][0] if not backfill and latest_dps.get(ts.external_id) else None + + start = latest if latest else now - increment + end = now + + dps_list = ice_cream_api.get_datapoints(timeseries_ext_id=ts.external_id, start=start, end=end) + + for dp_dict in dps_list: + dp_dict["instance_id"] = NodeId(space="icapi_dm_space", external_id=dp_dict["instance_id"]) + + to_insert.extend(dps_list) + + if len(to_insert) > 50: + client.time_series.data.insert_multiple(datapoints=to_insert) + to_insert = [] + + if to_insert: + client.time_series.data.insert_multiple(datapoints=to_insert) + print(f" {hours}h of Datapoints took {default_timer() - big_start:.2f} seconds") + else: + print(f" No TimeSeries, for {hours}h of Datapoints took {default_timer() - big_start:.2f} seconds") + + report_ext_pipe(client, "success") + except Exception as e: + report_ext_pipe(client, "fail", e) \ No newline at end of file diff --git a/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/ice_cream_factory_api.py b/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/ice_cream_factory_api.py new file mode 100644 index 00000000..f0607754 --- /dev/null +++ b/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/ice_cream_factory_api.py @@ -0,0 +1,61 @@ +from typing import Dict, Union + +import orjson +from requests import Response, Session, adapters + + +class IceCreamFactoryAPI: + """Class for Ice Cream Factory API.""" + + def __init__(self, base_url: str): + self.base_url = base_url + self.adapter = adapters.HTTPAdapter(max_retries=3) + self.session = Session() + self.session.mount("https://", self.adapter) + + def get_response( + self, headers: Dict[str, str], url_suffix: str, params: Dict[str, Union[str, int, float]] = {} + ) -> Response: + """ + Get response from API. + + Args: + headers: request header + url_suffix: string to add to base url + params: query parameters + """ + + response = self.session.get(f"{self.base_url}/{url_suffix}", headers=headers, timeout=40, params=params) + response.raise_for_status() + return response + + def get_datapoints(self, timeseries_ext_id: str, start: Union[str, int, float], end: Union[str, int, float]): + """ + Get datapoints for a timeseries external id. This will also return datapoints for an associated timeseries + + (e.g. request for external id "HPM2C561:planned_status" will return datapoints for "HPM2C561:planned_status" AND + "HPM2C561:status". Similar, request for timeseries with external id "HPM2C561:count" will return datapoints for + "HPM2C561:count" AND ""HPM2C561:good"). + + Args: + timeseries_ext_id: external id of timeseries to get datapoints for + start: start for datapoints (UNIX timestamp (int, float) or string with format 'YYYY-MM-DD HH:MM') + end: end for datapoints (UNIX timestamp (int, float) or string with format 'YYYY-MM-DD HH:MM') + """ + params = {"start": start, "end": end, "external_id": timeseries_ext_id} + response = self.get_response(headers={}, url_suffix="datapoints/oee", params=params) + + response_dict = orjson.loads(response.content) + + response_dict = [ + { + "instance_id": ts, + "datapoints": [ + # convert timestamp to ms (*1000) for CDF uploads + {"timestamp": dp[0] * 1000, "value": dp[1]} + for dp in dps + ] + } for ts, dps in response_dict.items() if len(dps) > 1 + ] + + return response_dict \ No newline at end of file diff --git a/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/requirements.txt b/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/requirements.txt new file mode 100644 index 00000000..636ad284 --- /dev/null +++ b/modules/bootcamp/ice_cream_api/functions/icapi_datapoints_extractor/requirements.txt @@ -0,0 +1,3 @@ +cognite-sdk +orjson >= 3.10.5 +guppy3 \ No newline at end of file diff --git a/modules/bootcamp/ice_cream_api/module.toml b/modules/bootcamp/ice_cream_api/module.toml new file mode 100644 index 00000000..6a8c107e --- /dev/null +++ b/modules/bootcamp/ice_cream_api/module.toml @@ -0,0 +1,5 @@ +[module] +title = "Bootcamp" +is_selected_by_default = true +id = "dp:bootcamp:ice_cream_api" +package_id = "dp:bootcamp" \ No newline at end of file diff --git a/modules/bootcamp/use_cases/oee/default.config.yaml b/modules/bootcamp/use_cases/oee/default.config.yaml new file mode 100644 index 00000000..fc053715 --- /dev/null +++ b/modules/bootcamp/use_cases/oee/default.config.yaml @@ -0,0 +1,6 @@ +data_pipeline_oee_source_id: + +uc_oee_ds_external_id: ds_uc_oee +oee_space_name: oee_ts_space +data_pipeline_oee_client_id: ${DATA_PIPELINE_OEE_CLIENT_ID} +data_pipeline_oee_client_secret: ${DATA_PIPELINE_OEE_CLIENT_SECRET} \ No newline at end of file diff --git a/modules/bootcamp/use_cases/oee/functions/oee_timeseries/handler.py b/modules/bootcamp/use_cases/oee/functions/oee_timeseries/handler.py new file mode 100644 index 00000000..1eae5308 --- /dev/null +++ b/modules/bootcamp/use_cases/oee/functions/oee_timeseries/handler.py @@ -0,0 +1,229 @@ +from concurrent.futures import ThreadPoolExecutor +from itertools import islice +from datetime import timedelta +from typing import Any, Dict + +from cognite.client import CogniteClient +from cognite.client.data_classes.data_modeling import NodeId, ViewId +from cognite.client.data_classes.data_modeling.cdm.v1 import CogniteAsset, CogniteTimeSeries, CogniteTimeSeriesApply +from cognite.client.data_classes.filters import Prefix, ContainsAny +from cognite.client.exceptions import CogniteNotFoundError + +import numpy as np + +from cognite.client.config import global_config +global_config.disable_pypi_version_check = True + + +def batcher(iterable, batch_size): + iterator = iter(iterable) + while batch := list(islice(iterator, batch_size)): + yield batch + + +def get_time_series_for_site(client: CogniteClient, site, space): + this_site = site.lower() + sub_tree_root = client.data_modeling.instances.retrieve_nodes( + NodeId(space, this_site), + node_cls=CogniteAsset + ) + + if not sub_tree_root: + print( + f"----No CogniteAssets in CDF for {site}!----\n" + f" Run the 'Create Cognite Asset Hierarchy' transformation!" + ) + return + + sub_tree_nodes = client.data_modeling.instances.list( + instance_type=CogniteAsset, + filter=Prefix(property=["cdf_cdm", "CogniteAsset/v1", "path"], value=sub_tree_root.path), + limit=None + ) + + if not sub_tree_nodes: + print( + f"----No CogniteTimeSeries in CDF for {site}!----\n" + f" Run the 'Contextualize Timeseries and Assets' transformation!" + ) + return + + value_list = [{"space": node.space, "externalId": node.external_id} for node in sub_tree_nodes] + + time_series = [ + client.data_modeling.instances.search( + view=ViewId("cdf_cdm", "CogniteTimeSeries", "v1"), + instance_type=CogniteTimeSeries, + space=space, + filter=ContainsAny(property=["cdf_cdm", "CogniteTimeSeries/v1", "assets"], values=batch), + limit=None + ) + for batch in batcher(value_list, 20) + ] + + # Combine list of batch results into a single NodeList + time_series = [node for nodelist in time_series for node in nodelist] + + if not time_series: + print("No CogniteTimeSeries in the CogniteCore Data Model (cdf_cdm Space)") + + return time_series + +def handle(client: CogniteClient, data: Dict[str, Any] = {}) -> None: + lookback_minutes = None + sites = None + + if data: + lookback_minutes = timedelta(minutes=data.get("lookback_minutes", 60)).total_seconds() * 1000 + sites = data.get("sites") + + all_sites = [ + "Houston", + "Oslo", + "Kuala_Lumpur", + "Hannover", + "Nuremberg", + "Marseille", + "Sao_Paulo", + "Chicago", + "Rotterdam", + "London", + ] + + lookback_minutes = lookback_minutes or timedelta(minutes=60).total_seconds() * 1000 + sites = sites or all_sites + + print(f"Processing datapoints for these sites: {sites}") + with ThreadPoolExecutor(max_workers=10) as executor: + futures = [executor.submit(process_site, client, lookback_minutes, site) for site in sites] + for f in futures: + f.result() + +def process_site(client, lookback_minutes, site): + oee_space = "oee_ts_space" + source_space = "icapi_dm_space" + + timeseries = get_time_series_for_site(client, site, source_space) + asset_eids = list(set([item.external_id.split(sep=":")[0] for item in timeseries])) + instance_ids = [NodeId(space=source_space, external_id=ts.external_id) for ts in timeseries] + all_latest_dps = client.time_series.data.retrieve_latest(instance_id=instance_ids) + + # Organize latest datapoints by equipment for alignment + assets_dps = { + external_id: [latest_dp for latest_dp in all_latest_dps if external_id in latest_dp.instance_id.external_id] + for external_id in asset_eids + } + + for asset, latest_dps in assets_dps.items(): + print(f"Calculating OEE for {asset}") + count_node = f"NodeId({source_space}, {asset}:count)" + good_node = f"NodeId({source_space}, {asset}:good)" + status_node = f"NodeId({source_space}, {asset}:status)" + planned_status_node = f"NodeId({source_space}, {asset}:planned_status)" + + end = min([dp.timestamp[0] for dp in latest_dps if latest_dps and dp.timestamp], default=None) + + if end: + dps_df = client.time_series.data.retrieve_dataframe( + instance_id=[dp.instance_id for dp in latest_dps], + start=end - lookback_minutes, + end=end, + aggregates=["sum"], + granularity="1m", + include_aggregate_name=False, + limit=None + ) + + # Frontfill because "planned_status" and "status" only have datapoints when the value changes + dps_df = dps_df.ffill() + + # Fill the rest with the opposite + try: + first_valid_value = dps_df[planned_status_node].loc[dps_df[planned_status_node].first_valid_index()] + except Exception as e: + print(f"Failed to find datapoints for {planned_status_node}, {e}") + continue + + backfill_value = 1.0 if first_valid_value == 0.0 else 0.0 + dps_df[planned_status_node] = dps_df[planned_status_node].fillna(value=backfill_value) + + # Same for status + first_valid_value = dps_df[status_node].loc[dps_df[status_node].first_valid_index()] + backfill_value = 1.0 if first_valid_value == 0.0 else 0.0 + dps_df[status_node] = dps_df[status_node].fillna(value=backfill_value) + + count_dps = dps_df[count_node] + good_dps = dps_df[good_node] + status_dps = dps_df[status_node] + planned_status_dps = dps_df[planned_status_node] + + total_items = len(count_dps) + + if ( + total_items != len(good_dps) + or total_items != len(status_dps) + or total_items != len(planned_status_dps) + ): + # We expect ALL dependent timeseries to have the exact same number of datapoints + # for the specified time range for the calculation to execute. + print( + f"""{asset}: Unable to retrieve datapoints for all required OEE timeseries (count, good, status, planned_status) + between {end - lookback_minutes} and {end}. Ensure that data is available for the time range specified.""" + ) + + # Calculate the components of OEE + off_spec_node = f"{asset}:off_spec" + quality_node = f"{asset}:quality" + performance_node = f"{asset}:performance" + availability_node = f"{asset}:availability" + oee_node = f"{asset}:oee" + + dps_df[off_spec_node] = count_dps - good_dps + dps_df[quality_node] = good_dps / count_dps + dps_df[performance_node] = (count_dps / status_dps) / (60.0 / 3.0) + dps_df[availability_node] = status_dps / planned_status_dps + + dps_df[oee_node] = dps_df[quality_node] * dps_df[performance_node] * dps_df[availability_node] + + # Fill in the divide by zeros + dps_df = dps_df.fillna(value=0.0) + dps_df = dps_df.replace([np.inf, -np.inf], 0.0) + + # Drop input timeseries + dps_df = dps_df.drop(columns=[count_node, good_node, status_node, planned_status_node]) + + to_insert = [ + { + "instance_id": NodeId(space="oee_ts_space", external_id=external_id), + "datapoints": list(zip(dps_df[external_id].index, dps_df[external_id])) + } + for external_id in dps_df.columns + ] + + try: + client.time_series.data.insert_multiple(to_insert) + except CogniteNotFoundError as e: + # Create the missing oee timeseries since they don't exist + ts_to_create = [] + for node_id in e.not_found: + print(f"Creating CogniteTimeSeries {node_id}") + + external_id = node_id["instanceId"]["externalId"] + + # change external_id to a readable name + # Ex: "OSLPROFILTRASYS185:off_spec" to "OSLPROFILTRASYS185 Off Spec" + name = external_id.split(":") + name[-1] = name[-1].replace("_", " ").title() + + ts_to_create.append( + CogniteTimeSeriesApply( + space=oee_space, + external_id=external_id, + name=" ".join(name), + is_step=False, + time_series_type="numeric", + ) + ) + + client.data_modeling.instances.apply(ts_to_create) + client.time_series.data.insert_multiple(to_insert) diff --git a/modules/bootcamp/use_cases/oee/functions/oee_timeseries/requirements.txt b/modules/bootcamp/use_cases/oee/functions/oee_timeseries/requirements.txt new file mode 100644 index 00000000..a75330a0 --- /dev/null +++ b/modules/bootcamp/use_cases/oee/functions/oee_timeseries/requirements.txt @@ -0,0 +1,2 @@ +pandas +cognite-sdk \ No newline at end of file diff --git a/modules/bootcamp/use_cases/oee/module.toml b/modules/bootcamp/use_cases/oee/module.toml new file mode 100644 index 00000000..13dfd008 --- /dev/null +++ b/modules/bootcamp/use_cases/oee/module.toml @@ -0,0 +1,5 @@ +[module] +title = "Bootcamp" +is_selected_by_default = true +id = "dp:bootcamp:use_cases:oee" +package_id = "dp:bootcamp" \ No newline at end of file diff --git a/modules/common/cdf_auth_readwrite_all/README.md b/modules/common/cdf_auth_readwrite_all/README.md new file mode 100644 index 00000000..16091fc1 --- /dev/null +++ b/modules/common/cdf_auth_readwrite_all/README.md @@ -0,0 +1,23 @@ +# Module: cdf_auth_readwrite_all + +This module contains two groups that are used to grant access to all resources in a CDF project. This +should **never** be used for production projects, as it grants read-write access to all resources in the project. +However, it is useful for sandbox projects to allow members of the `gp_admin_read_write` group to have full access. +It can also be used for demo projects where you want to give full read access `gp_admin_readonly` access to +all resources to a group of users. + +## Managed resources + +This module manages the following resources: + +1. a group with read-write access (`gp_admin_read_write`) to everything in a CDF project. +2. a group with read-only access `gp_admin_readonly` (for viewing configurations from UI). + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|---------------------|------------------------------------------------------------------------------------------------------| +| readwrite_source_id | The source ID of the group that should be granted read-write access to all resources in the project. | +| readonly_source_id | The source ID of the group that should be granted read-only access to all resources in the project. | diff --git a/modules/common/cdf_auth_readwrite_all/auth/admin.readonly.group.yaml b/modules/common/cdf_auth_readwrite_all/auth/admin.readonly.group.yaml new file mode 100644 index 00000000..94de3b5f --- /dev/null +++ b/modules/common/cdf_auth_readwrite_all/auth/admin.readonly.group.yaml @@ -0,0 +1,265 @@ +name: 'gp_admin_readonly' +sourceId: '{{readonly_source_id}}' +metadata: + origin: "cognite-toolkit" +capabilities: + - projectsAcl: + actions: + - LIST + - READ + scope: + all: {} + - analyticsAcl: + actions: + - READ + - LIST + scope: + all: {} + - annotationsAcl: + actions: + - READ + scope: + all: {} + - appConfigAcl: + actions: + - READ + scope: + all: {} + - assetsAcl: + actions: + - READ + scope: + all: {} + - auditlogAcl: + actions: + - READ + scope: + all: {} + - dataModelInstancesAcl: + actions: + - READ + scope: + all: {} + - dataModelsAcl: + actions: + - READ + scope: + all: {} + - datasetsAcl: + actions: + - READ + scope: + all: {} + - diagramParsingAcl: + actions: + - READ + scope: + all: {} + - digitalTwinAcl: + actions: + - READ + scope: + all: {} + - documentFeedbackAcl: + actions: + - READ + scope: + all: {} + - documentPipelinesAcl: + actions: + - READ + scope: + all: {} + - entitymatchingAcl: + actions: + - READ + scope: + all: {} + - eventsAcl: + actions: + - READ + scope: + all: {} + - extractionConfigsAcl: + actions: + - READ + scope: + all: {} + - extractionPipelinesAcl: + actions: + - READ + scope: + all: {} + - extractionRunsAcl: + actions: + - READ + scope: + all: {} + - filePipelinesAcl: + actions: + - READ + scope: + all: {} + - filesAcl: + actions: + - READ + scope: + all: {} + - functionsAcl: + actions: + - READ + scope: + all: {} + - geospatialAcl: + actions: + - READ + scope: + all: {} + - geospatialCrsAcl: + actions: + - READ + scope: + all: {} + - groupsAcl: + actions: + - READ + - LIST + scope: + all: {} + - hostedExtractorsAcl: + actions: + - READ + scope: + all: {} + - labelsAcl: + actions: + - READ + scope: + all: {} + - locationFiltersAcl: + actions: + - READ + scope: + all: {} + - monitoringTasksAcl: + actions: + - READ + scope: + all: {} + - notificationsAcl: + actions: + - READ + scope: + all: {} + - pipelinesAcl: + actions: + - READ + scope: + all: {} + - postgresGatewayAcl: + actions: + - READ + scope: + all: {} + - rawAcl: + actions: + - READ + - LIST + scope: + all: {} + - relationshipsAcl: + actions: + - READ + scope: + all: {} + - roboticsAcl: + actions: + - READ + scope: + all: {} + - sapWritebackAcl: + actions: + - READ + scope: + all: {} + - sapWritebackRequestsAcl: + actions: + - LIST + scope: + all: {} + - scheduledCalculationsAcl: + actions: + - READ + scope: + all: {} + - securityCategoriesAcl: + actions: + - LIST + - MEMBEROF + scope: + all: {} + - seismicAcl: + actions: + - READ + scope: + all: {} + - sequencesAcl: + actions: + - READ + scope: + all: {} + - sessionsAcl: + actions: + - LIST + scope: + all: {} + - templateGroupsAcl: + actions: + - READ + scope: + all: {} + - templateInstancesAcl: + actions: + - READ + scope: + all: {} + - threedAcl: + actions: + - READ + scope: + all: {} + - timeSeriesAcl: + actions: + - READ + scope: + all: {} + - timeSeriesSubscriptionsAcl: + actions: + - READ + scope: + all: {} + - transformationsAcl: + actions: + - READ + scope: + all: {} + - typesAcl: + actions: + - READ + scope: + all: {} + - visionModelAcl: + actions: + - READ + scope: + all: {} + - wellsAcl: + actions: + - READ + scope: + all: {} + - workflowOrchestrationAcl: + actions: + - READ + scope: + all: {} diff --git a/modules/common/cdf_auth_readwrite_all/auth/admin.readwrite.group.yaml b/modules/common/cdf_auth_readwrite_all/auth/admin.readwrite.group.yaml new file mode 100644 index 00000000..b189417e --- /dev/null +++ b/modules/common/cdf_auth_readwrite_all/auth/admin.readwrite.group.yaml @@ -0,0 +1,329 @@ +name: 'gp_admin_read_write' +sourceId: '{{readwrite_source_id}}' +metadata: + origin: "cognite-toolkit" +capabilities: + - projectsAcl: + actions: + - LIST + - READ + - UPDATE + scope: + all: {} + - analyticsAcl: + actions: + - READ + - EXECUTE + - LIST + scope: + all: {} + - annotationsAcl: + actions: + - WRITE + - REVIEW + - READ + - SUGGEST + scope: + all: {} + - appConfigAcl: + actions: + - WRITE + - READ + scope: + all: {} + - assetsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - auditlogAcl: + actions: + - READ + scope: + all: {} + - dataModelInstancesAcl: + actions: + - WRITE_PROPERTIES + - WRITE + - READ + scope: + all: {} + - dataModelsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - datasetsAcl: + actions: + - WRITE + - OWNER + - READ + scope: + all: {} + - diagramParsingAcl: + actions: + - WRITE + - READ + scope: + all: { } + - digitalTwinAcl: + actions: + - WRITE + - READ + scope: + all: {} + - documentFeedbackAcl: + actions: + - CREATE + - READ + - DELETE + scope: + all: {} + - documentPipelinesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - entitymatchingAcl: + actions: + - WRITE + - READ + scope: + all: {} + - eventsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - extractionConfigsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - extractionPipelinesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - extractionRunsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - filePipelinesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - filesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - functionsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - geospatialAcl: + actions: + - WRITE + - READ + scope: + all: {} + - geospatialCrsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - groupsAcl: + actions: + - CREATE + - UPDATE + - READ + - LIST + - DELETE + scope: + all: {} + - hostedExtractorsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - labelsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - locationFiltersAcl: + actions: + - WRITE + - READ + scope: + all: {} + - monitoringTasksAcl: + actions: + - WRITE + - READ + scope: + all: {} + - notificationsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - pipelinesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - postgresGatewayAcl: + actions: + - WRITE + - READ + scope: + all: {} + - rawAcl: + actions: + - LIST + - WRITE + - READ + scope: + all: {} + - relationshipsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - roboticsAcl: + actions: + - CREATE + - UPDATE + - READ + - DELETE + scope: + all: {} + - sapWritebackAcl: + actions: + - WRITE + - READ + scope: + all: {} + - sapWritebackRequestsAcl: + actions: + - WRITE + - LIST + scope: + all: {} + - scheduledCalculationsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - securityCategoriesAcl: + actions: + - CREATE + - MEMBEROF + - UPDATE + - LIST + - DELETE + scope: + all: {} + - seismicAcl: + actions: + - WRITE + - READ + scope: + all: {} + - sequencesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - sessionsAcl: + actions: + - CREATE + - LIST + - DELETE + scope: + all: {} + - templateGroupsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - templateInstancesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - threedAcl: + actions: + - CREATE + - UPDATE + - READ + - DELETE + scope: + all: {} + - timeSeriesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - timeSeriesSubscriptionsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - transformationsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - typesAcl: + actions: + - WRITE + - READ + scope: + all: {} + - visionModelAcl: + actions: + - WRITE + - READ + scope: + all: {} + - wellsAcl: + actions: + - WRITE + - READ + scope: + all: {} + - workflowOrchestrationAcl: + actions: + - WRITE + - READ + scope: + all: {} diff --git a/modules/common/cdf_auth_readwrite_all/default.config.yaml b/modules/common/cdf_auth_readwrite_all/default.config.yaml new file mode 100644 index 00000000..a5bf9eb8 --- /dev/null +++ b/modules/common/cdf_auth_readwrite_all/default.config.yaml @@ -0,0 +1,2 @@ +readwrite_source_id: +readonly_source_id: \ No newline at end of file diff --git a/modules/common/cdf_auth_readwrite_all/module.toml b/modules/common/cdf_auth_readwrite_all/module.toml new file mode 100644 index 00000000..eef82339 --- /dev/null +++ b/modules/common/cdf_auth_readwrite_all/module.toml @@ -0,0 +1,4 @@ +[module] +title = "Admin group for CI/CD" +id = "dp:common:cdf_auth_readwrite_all" +package_id = "dp:common" \ No newline at end of file diff --git a/modules/custom/my_module/auth/.gitkeep b/modules/custom/my_module/auth/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/modules/custom/my_module/default.config.yaml b/modules/custom/my_module/default.config.yaml new file mode 100644 index 00000000..30f5d73e --- /dev/null +++ b/modules/custom/my_module/default.config.yaml @@ -0,0 +1 @@ +my_var: my_value \ No newline at end of file diff --git a/modules/custom/my_module/module.toml b/modules/custom/my_module/module.toml new file mode 100644 index 00000000..5ce057e6 --- /dev/null +++ b/modules/custom/my_module/module.toml @@ -0,0 +1,4 @@ +[module] +title = "Empty module" +id = "dp:custom:my_module" +package_id = "dp:custom" \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension/README.md b/modules/models/cdf_process_industry_extension/README.md deleted file mode 100644 index 61232429..00000000 --- a/modules/models/cdf_process_industry_extension/README.md +++ /dev/null @@ -1,203 +0,0 @@ -# CDF Process Industry Extension Module - -This module provides a minimal extension of the Cognite Process Industry data model, adding organization-specific views and containers that extend the standard CDM (Cognite Data Model) for process industry use cases. - -## Why Use This Module? - -**Extend the Standard Data Model for Your Organization** - -The Cognite CDM provides foundational types, but industrial deployments often need organization-specific extensions. This module delivers **production-ready data model extensions** that follow best practices while allowing customization. - -**Key Benefits:** - -- 🏗️ **CDM Compatible**: Extends standard Cognite Process Industry types -- 🎯 **Organization Prefixed**: Custom views with your organization prefix -- 📊 **Complete Entity Coverage**: Assets, Equipment, TimeSeries, Files, Activities, and more -- 🔧 **Easy Customization**: Add custom properties to containers -- 📈 **Enterprise Ready**: Includes Maintenance Orders, Operations, and Notifications - -**Time & Cost Savings:** - -- **Development Time**: Pre-built containers and views save weeks of data modeling -- **Best Practices**: Follows Cognite's recommended patterns for extensions -- **Consistency**: Standardized naming conventions across all entity types - -## 🎯 Overview - -The CDF Process Industry Extension module provides: -- **Organization-prefixed views** for all major entity types -- **Custom containers** with extension properties -- **Enterprise data model** combining all views -- **Schema space** for model organization - -## 🏗️ Module Architecture - -``` -cdf_process_industry_extension/ -├── 📁 data_modeling/ # Data model definitions -│ ├── 📁 containers/ # Container definitions -│ │ ├── 📄 Activity.Container.yaml -│ │ ├── 📄 Asset.Container.yaml -│ │ ├── 📄 Equipment.Container.yaml -│ │ ├── 📄 File.Container.yaml -│ │ ├── 📄 MaintenanceOrder.Container.yaml -│ │ ├── 📄 Notification.Container.yaml -│ │ ├── 📄 Operation.Container.yaml -│ │ ├── 📄 Reportable.Container.yaml -│ │ └── 📄 TimeSeries.Container.yaml -│ ├── 📁 views/ # View definitions -│ │ ├── 📄 Activity.view.yaml -│ │ ├── 📄 Asset.view.yaml -│ │ ├── 📄 Equipment.view.yaml -│ │ ├── 📄 File.view.yaml -│ │ ├── 📄 MaintenanceOrder.view.yaml -│ │ ├── 📄 Notification.view.yaml -│ │ ├── 📄 Operation.view.yaml -│ │ ├── 📄 Reportable.view.yaml -│ │ └── 📄 TimeSeries.view.yaml -│ ├── 📄 enterprise.datamodel.yaml # Combined data model -│ └── 📄 schema.space.yaml # Schema space definition -├── 📄 default.config.yaml # Module configuration -└── 📄 module.toml # Module metadata -``` - -## 🚀 Core Components - -### Entity Views - -| View | Base CDM Type | Description | -|------|---------------|-------------| -| `{ORG}Asset` | CogniteAsset | Physical assets with custom properties | -| `{ORG}Equipment` | CogniteEquipment | Equipment items with extensions | -| `{ORG}TimeSeries` | CogniteTimeSeries | Time series data with custom metadata | -| `{ORG}File` | CogniteFile | Documents and files with extensions | -| `{ORG}Activity` | CogniteActivity | Activities and work items | -| `{ORG}MaintenanceOrder` | Custom | Maintenance work orders | -| `{ORG}Operation` | Custom | Work order operations | -| `{ORG}Notification` | Custom | Maintenance notifications | -| `{ORG}Reportable` | Custom | Reportable entities | - -### Enterprise Data Model - -The `{ORG}ProcessIndustries` data model combines all views into a single queryable model, including: -- All organization-specific views -- CDM reference types (CogniteSourceSystem, CogniteUnit, etc.) -- Asset/Equipment type classifications - -## 🔧 Configuration - -### Module Configuration (`default.config.yaml`) - -```yaml -schemaSpace: sp_enterprise_process_industry # Space for schema definitions -organization: ORG # Organization prefix for views -datamodelVersion: v1.0 # Data model version -``` - -## 🏃‍♂️ Getting Started - -### 1. Prerequisites - -- CDF project with data modeling capabilities -- Admin permissions to create spaces and data models - -### 2. Configure the Module - -Update your `config..yaml` under the module variables section: - -```yaml -variables: - modules: - cdf_process_industry_extension: - schemaSpace: sp_enterprise_process_industry - organization: YOUR_ORG # Organization prefix for views (e.g., "ACME") - datamodelVersion: v1.0 # Data model version -``` - -### 3. Deploy the Module - -```bash -# Deploy using CDF Toolkit -cdf deploy --env your-environment - -# Verify deployment -cdf data-models list -``` - -### 4. Customize Containers - -Add custom properties to containers as needed: - -```yaml -# Example: Adding custom property to Asset container -properties: - customField: - type: - type: primitive - primitive: string - description: Organization-specific field -``` - -## 📊 Data Model Structure - -```mermaid -graph TD - subgraph "Organization Views" - A[ORGAsset] - B[ORGEquipment] - C[ORGTimeSeries] - D[ORGFile] - E[ORGActivity] - F[ORGMaintenanceOrder] - G[ORGOperation] - H[ORGNotification] - end - - subgraph "CDM Base Types" - I[CogniteAsset] - J[CogniteEquipment] - K[CogniteTimeSeries] - L[CogniteFile] - M[CogniteActivity] - end - - A --> I - B --> J - C --> K - D --> L - E --> M - - subgraph "ORGProcessIndustries Data Model" - N[Combined Model] - end - - A --> N - B --> N - C --> N - D --> N - E --> N - F --> N - G --> N - H --> N -``` - -## 🎯 Use Cases - -### Enterprise Data Modeling -- **Standardization**: Consistent data model across all sites -- **Extensions**: Add organization-specific properties -- **Governance**: Centralized schema management - -### Integration -- **Source Systems**: Map SAP, PI, and other sources to views -- **Applications**: Build apps against the enterprise model -- **Analytics**: Query unified data across entity types - -## 📚 Dependencies - -This module should be deployed **before** source system modules that populate data into these views. - -## 📄 License - -This module is part of the Cognite Templates repository and follows the same licensing terms. - diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/Reportable.Container.yaml b/modules/models/cdf_process_industry_extension/data_modeling/containers/Reportable.Container.yaml deleted file mode 100644 index 910a4438..00000000 --- a/modules/models/cdf_process_industry_extension/data_modeling/containers/Reportable.Container.yaml +++ /dev/null @@ -1,42 +0,0 @@ -space: {{ schemaSpace }} -description: Container for common reporting attributes -externalId: {{ organization }}Reportable -constraints: {} -indexes: {} -name: {{ organization }}Reportable -properties: - sysSite: - autoIncrement: false - immutable: false - nullable: true - type: - collation: ucs_basic - list: false - type: text - sysUnit: - autoIncrement: false - immutable: false - nullable: true - type: - collation: ucs_basic - list: false - type: text - sysTagsFound: - autoIncrement: false - immutable: false - nullable: true - type: - collation: ucs_basic - list: true - maxListSize: 1200 - type: text - sysTagsLinked: - autoIncrement: false - immutable: false - nullable: true - type: - collation: ucs_basic - list: true - maxListSize: 1200 - type: text -usedFor: node \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension/data_modeling/enterprise.datamodel.yaml b/modules/models/cdf_process_industry_extension/data_modeling/enterprise.datamodel.yaml deleted file mode 100644 index 8c52e3a0..00000000 --- a/modules/models/cdf_process_industry_extension/data_modeling/enterprise.datamodel.yaml +++ /dev/null @@ -1,70 +0,0 @@ -space: {{ schemaSpace }} -externalId: {{ organization }}ProcessIndustries -name: {{ organization }} enterprise model -description: {{ organization }} enterprise model -version: {{ datamodelVersion }} -views: -- space: {{ schemaSpace }} - externalId: {{ organization }}MaintenanceOrder - version: {{ datamodelVersion }} - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}Operation - version: {{ datamodelVersion }} - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}Notification - version: {{ datamodelVersion }} - type: view -- space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 - type: view -- space: cdf_cdm - externalId: CogniteSourceable - version: v1 - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}Asset - version: {{ datamodelVersion }} - type: view -- space: cdf_cdm - externalId: CogniteAssetClass - version: v1 - type: view -- space: cdf_cdm - externalId: CogniteAssetType - version: v1 - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}Equipment - version: {{ datamodelVersion }} - type: view -- space: cdf_cdm - externalId: CogniteEquipmentType - version: v1 - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}File - version: {{ datamodelVersion }} - type: view -- space: cdf_cdm - externalId: CogniteFileCategory - version: v1 - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}Activity - version: {{ datamodelVersion }} - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}TimeSeries - version: {{ datamodelVersion }} - type: view -- space: cdf_cdm - externalId: CogniteUnit - version: v1 - type: view -- space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - version: {{ datamodelVersion }} - type: view diff --git a/modules/models/cdf_process_industry_extension/data_modeling/schema.space.yaml b/modules/models/cdf_process_industry_extension/data_modeling/schema.space.yaml deleted file mode 100644 index ffa411e6..00000000 --- a/modules/models/cdf_process_industry_extension/data_modeling/schema.space.yaml +++ /dev/null @@ -1 +0,0 @@ -space: {{ schemaSpace}} diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/Reportable.view.yaml b/modules/models/cdf_process_industry_extension/data_modeling/views/Reportable.view.yaml deleted file mode 100644 index bb3edfd9..00000000 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/Reportable.view.yaml +++ /dev/null @@ -1,72 +0,0 @@ -space: {{ schemaSpace }} -externalId: {{ organization }}Reportable -name: Reportable -description: Represents a reportable entity. - -version: {{ datamodelVersion }} -properties: - name: - container: - space: cdf_cdm - externalId: CogniteDescribable - type: container - containerPropertyIdentifier: name - description: Name of the instance - sourceId: - container: - space: cdf_cdm - externalId: CogniteSourceable - type: container - containerPropertyIdentifier: sourceId - description: Identifier from the source system - sourceContext: - container: - space: cdf_cdm - externalId: CogniteSourceable - type: container - containerPropertyIdentifier: sourceContext - description: Context of the source id. For systems where the sourceId is globally unique, the sourceContext is expected to not be set. - source: - container: - space: cdf_cdm - externalId: CogniteSourceable - type: container - containerPropertyIdentifier: source - description: Direct relation to a source system - source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 - type: view - sysSite: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysSite - description: Site (sys) - name: Site (sys) - sysUnit: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysUnit - description: Unit (sys) - name: Unit (sys) - sysTagsFound: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsFound - description: Tags found (sys) - name: Tags found (sys) - sysTagsLinked: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsLinked - description: Tags linked (sys) - name: Tags linked (sys) diff --git a/modules/models/cdf_process_industry_extension/default.config.yaml b/modules/models/cdf_process_industry_extension/default.config.yaml deleted file mode 100644 index 9054dba0..00000000 --- a/modules/models/cdf_process_industry_extension/default.config.yaml +++ /dev/null @@ -1,3 +0,0 @@ -schemaSpace: sp_enterprise_process_industry -organization: ORG -datamodelVersion: v1.0 diff --git a/modules/models/cdf_process_industry_extension/module.toml b/modules/models/cdf_process_industry_extension/module.toml deleted file mode 100644 index e50be7e6..00000000 --- a/modules/models/cdf_process_industry_extension/module.toml +++ /dev/null @@ -1,5 +0,0 @@ -[module] -title = "Example of Minimum Extension of the Cognite Process Industry Model" -is_selected_by_default = false -id = "dp:models:cdf_process_industry_extension" -package_id = "dp:quickstart" diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/360Image.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/360Image.Container.yaml new file mode 100644 index 00000000..744242ba --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/360Image.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360Image +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageAnnotation.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageAnnotation.Container.yaml new file mode 100644 index 00000000..ce56d752 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageAnnotation.Container.yaml @@ -0,0 +1,14 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageAnnotation +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} +usedFor: edge \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageCollection.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageCollection.Container.yaml new file mode 100644 index 00000000..91889488 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageCollection.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageCollection +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageModel.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageModel.Container.yaml new file mode 100644 index 00000000..740ae41a --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageModel.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageModel +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageStation.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageStation.Container.yaml new file mode 100644 index 00000000..75dfb43a --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/360ImageStation.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageStation +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/3DModel.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DModel.Container.yaml new file mode 100644 index 00000000..dbd56ea9 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DModel.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DModel +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/3DObject.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DObject.Container.yaml new file mode 100644 index 00000000..5cc12702 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DObject.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DObject +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/3DRevision.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DRevision.Container.yaml new file mode 100644 index 00000000..5b72a014 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DRevision.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DRevision +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/3DTransformation.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DTransformation.Container.yaml new file mode 100644 index 00000000..9e276c14 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/3DTransformation.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DTransformation +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/Activity.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Activity.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/Activity.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/Activity.Container.yaml diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/Annotation.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Annotation.Container.yaml new file mode 100644 index 00000000..34bc6fa6 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/Annotation.Container.yaml @@ -0,0 +1,14 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Annotation +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} +usedFor: edge \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/Asset.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Asset.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/Asset.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/Asset.Container.yaml diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/AssetClass.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/AssetClass.Container.yaml new file mode 100644 index 00000000..11def737 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/AssetClass.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}AssetClass +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/AssetType.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/AssetType.Container.yaml new file mode 100644 index 00000000..0b8a28d3 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/AssetType.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}AssetType +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/CADModel.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/CADModel.Container.yaml new file mode 100644 index 00000000..dc2f8ab0 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/CADModel.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CADModel +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/CADNode.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/CADNode.Container.yaml new file mode 100644 index 00000000..a7bd7621 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/CADNode.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CADNode +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/CADRevision.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/CADRevision.Container.yaml new file mode 100644 index 00000000..0565dcc2 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/CADRevision.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CADRevision +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/CubeMap.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/CubeMap.Container.yaml new file mode 100644 index 00000000..fad774b1 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/CubeMap.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CubeMap +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/Describable.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Describable.Container.yaml new file mode 100644 index 00000000..62651fff --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/Describable.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Describable +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/DiagramAnnotation.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/DiagramAnnotation.Container.yaml new file mode 100644 index 00000000..eeb8f926 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/DiagramAnnotation.Container.yaml @@ -0,0 +1,14 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}DiagramAnnotation +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} +usedFor: edge diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/Equipment.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Equipment.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/Equipment.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/Equipment.Container.yaml diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/EquipmentType.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/EquipmentType.Container.yaml new file mode 100644 index 00000000..76a87ca3 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/EquipmentType.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}EquipmentType +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/File.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/File.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/File.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/File.Container.yaml diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/FileCategory.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/FileCategory.Container.yaml new file mode 100644 index 00000000..c356b613 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/FileCategory.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}FileCategory +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/MaintenanceOrder.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/MaintenanceOrder.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/MaintenanceOrder.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/MaintenanceOrder.Container.yaml diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/Notification.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Notification.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/Notification.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/Notification.Container.yaml diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/Operation.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Operation.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/Operation.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/Operation.Container.yaml diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudModel.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudModel.Container.yaml new file mode 100644 index 00000000..a7b64ce6 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudModel.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}PointCloudModel +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudRevision.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudRevision.Container.yaml new file mode 100644 index 00000000..38edad78 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudRevision.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}PointCloudRevision +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudVolume.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudVolume.Container.yaml new file mode 100644 index 00000000..9015db1b --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/PointCloudVolume.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}PointCloudVolume +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/Schedulable.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Schedulable.Container.yaml new file mode 100644 index 00000000..2eea9167 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/Schedulable.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Schedulable +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/SourceSystem.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/SourceSystem.Container.yaml new file mode 100644 index 00000000..18e97311 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/SourceSystem.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}SourceSystem +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/Sourceable.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Sourceable.Container.yaml new file mode 100644 index 00000000..32adf0d2 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/Sourceable.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Sourceable +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension/data_modeling/containers/TimeSeries.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/TimeSeries.Container.yaml similarity index 100% rename from modules/models/cdf_process_industry_extension/data_modeling/containers/TimeSeries.Container.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/containers/TimeSeries.Container.yaml diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/Unit.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Unit.Container.yaml new file mode 100644 index 00000000..ebed2b84 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/Unit.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Unit +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/containers/Visualizable.Container.yaml b/modules/models/cdf_process_industry_extension_full/data_models/containers/Visualizable.Container.yaml new file mode 100644 index 00000000..0c441584 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/containers/Visualizable.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Visualizable +properties: + UUID: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false +constraints: {} +indexes: {} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/enterprise.datamodel.yaml b/modules/models/cdf_process_industry_extension_full/data_models/enterprise.datamodel.yaml new file mode 100644 index 00000000..82fc7a74 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/enterprise.datamodel.yaml @@ -0,0 +1,150 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}ProcessIndustries +name: {{ organization }} enterprise model +description: {{ organization }} enterprise model +version: {{ datamodelVersion }} +views: +- space: {{ schemaSpace }} + externalId: {{ organization }}MaintenanceOrder + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Operation + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Notification + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Describable + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Sourceable + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Schedulable + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Visualizable + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}3DTransformation + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}CubeMap + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}3DObject + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}3DModel + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}CADModel + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}3DRevision + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudModel + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}360ImageModel + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}CADRevision + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudRevision + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}360ImageCollection + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}CADNode + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudVolume + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}360Image + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}360ImageStation + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}360ImageAnnotation + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Asset + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}AssetClass + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}AssetType + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Equipment + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}EquipmentType + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}FileCategory + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Activity + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}TimeSeries + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Annotation + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}DiagramAnnotation + version: {{ datamodelVersion }} + type: view +- space: {{ schemaSpace }} + externalId: {{ organization }}Unit + version: {{ datamodelVersion }} + type: view diff --git a/modules/models/cdf_process_industry_extension_full/data_models/schema.space.yaml b/modules/models/cdf_process_industry_extension_full/data_models/schema.space.yaml new file mode 100644 index 00000000..2ac976a5 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/schema.space.yaml @@ -0,0 +1 @@ +space: {{ schemaSpace }} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/360Image.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/360Image.view.yaml new file mode 100644 index 00000000..f25852a6 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/360Image.view.yaml @@ -0,0 +1,119 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360Image +description: '' +implements: +- space: cdf_cdm + externalId: Cognite360Image + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + collection360: + container: + space: cdf_cdm_3d + externalId: Cognite360Image + type: container + containerPropertyIdentifier: collection360 + description: Direct relation to Cognite360ImageCollection + source: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageCollection + version: {{ datamodelVersion }} + type: view + station360: + container: + space: cdf_cdm_3d + externalId: Cognite360Image + type: container + containerPropertyIdentifier: group3d + description: Direct relation to Cognite3DGroup instance that groups different + Cognite360Image instances to the same station + source: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageStation + version: {{ datamodelVersion }} + type: view + front: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: front + description: Direct relation to a file holding the front projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + back: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: back + description: Direct relation to a file holding the back projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + left: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: left + description: Direct relation to a file holding the left projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + right: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: right + description: Direct relation to a file holding the right projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + top: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: top + description: Direct relation to a file holding the top projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + bottom: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: bottom + description: Direct relation to a file holding the bottom projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}360Image + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageAnnotation.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageAnnotation.view.yaml new file mode 100644 index 00000000..065db262 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageAnnotation.view.yaml @@ -0,0 +1,28 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageAnnotation +description: '' +implements: +- space: cdf_cdm + externalId: Cognite360ImageAnnotation + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + source: + container: + space: cdf_cdm + externalId: CogniteSourceable + type: container + containerPropertyIdentifier: source + description: Direct relation to a source system + source: + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageAnnotation + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageCollection.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageCollection.view.yaml new file mode 100644 index 00000000..3cd7c6e5 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageCollection.view.yaml @@ -0,0 +1,39 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageCollection +description: Represents a logical collection of Cognite360Image instances +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DRevision + - equals: + property: + - cdf_cdm_3d + - Cognite3DModel + - type + value: Image360 +implements: +- space: cdf_cdm + externalId: Cognite360ImageCollection + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + model3D: + container: + space: cdf_cdm_3d + externalId: Cognite3DRevision + type: container + containerPropertyIdentifier: model3D + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DModel + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageCollection + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageModel.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageModel.view.yaml new file mode 100644 index 00000000..0797189a --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageModel.view.yaml @@ -0,0 +1,55 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageModel +description: Navigational aid for traversing Cognite360ImageModel instances +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DModel + - equals: + property: + - cdf_cdm_3d + - Cognite3DModel + - type + value: Image360 +implements: +- space: cdf_cdm + externalId: Cognite360ImageModel + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + collections: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageCollection + version: {{ datamodelVersion }} + type: view + through: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DRevision + version: {{ datamodelVersion }} + type: view + identifier: model3D + description: List Cognite360Image collections for this instance + connectionType: multi_reverse_direct_relation + thumbnail: + container: + space: cdf_cdm_3d + externalId: Cognite3DModel + type: container + containerPropertyIdentifier: thumbnail + description: Thumbnail of the 3D model + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageModel + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageStation.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageStation.view.yaml new file mode 100644 index 00000000..4e38ee2a --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/360ImageStation.view.yaml @@ -0,0 +1,29 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}360ImageStation +description: A way to group images across collections. Used for creating visual scan + history +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DGroup + - equals: + property: + - cdf_cdm_3d + - Cognite3DGroup + - groupType + value: Station360 +implements: +- space: cdf_cdm + externalId: Cognite360ImageStation + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}360ImageStation + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/3DModel.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/3DModel.view.yaml new file mode 100644 index 00000000..7562d795 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/3DModel.view.yaml @@ -0,0 +1,29 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DModel +description: Groups revisions of 3D data of various kinds together (CAD, PointCloud, + Image360) +implements: +- space: cdf_cdm + externalId: Cognite3DModel + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + thumbnail: + container: + space: cdf_cdm_3d + externalId: Cognite3DModel + type: container + containerPropertyIdentifier: thumbnail + description: Thumbnail of the 3D model + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}3DModel + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/3DObject.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/3DObject.view.yaml new file mode 100644 index 00000000..31f70a61 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/3DObject.view.yaml @@ -0,0 +1,81 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DObject +description: This is the virtual position representation of an object in the physical + world, connecting an asset to one or more 3D resources +implements: +- space: cdf_cdm + externalId: Cognite3DObject + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + asset: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}Asset + version: {{ datamodelVersion }} + type: view + through: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}Asset + version: {{ datamodelVersion }} + type: view + identifier: object3D + description: Asset that is tied to this 3D object + connectionType: single_reverse_direct_relation + cadNodes: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADNode + version: {{ datamodelVersion }} + type: view + through: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADNode + version: {{ datamodelVersion }} + type: view + identifier: object3D + description: List of up to 1000 CADNodes that represents the connected CogniteAsset + connectionType: multi_reverse_direct_relation + images360: + type: + space: cdf_cdm + externalId: image-360-annotation + source: + space: {{ schemaSpace }} + externalId: {{ organization }}360Image + version: {{ datamodelVersion }} + type: view + direction: outwards + description: Edge connection to Cognite360Image annotations that represents the + connected CogniteAsset + edgeSource: + space: cdf_cdm + externalId: Cognite360ImageAnnotation + version: v1 + type: view + connectionType: multi_edge_connection + pointCloudVolumes: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudVolume + version: {{ datamodelVersion }} + type: view + through: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudVolume + version: {{ datamodelVersion }} + type: view + identifier: object3D + description: List of up to 1000 PointCloudVolumes that represents the connected + CogniteAsset + connectionType: multi_reverse_direct_relation + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}3DObject + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/3DRevision.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/3DRevision.view.yaml new file mode 100644 index 00000000..00f4c882 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/3DRevision.view.yaml @@ -0,0 +1,30 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DRevision +description: 'Shared revision information for various 3D data types. Normally not + used directly, but through CognitePointCloudRevision, Image360Collection or CogniteCADRevision + + ' +implements: +- space: cdf_cdm + externalId: Cognite3DRevision + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + model3D: + container: + space: cdf_cdm_3d + externalId: Cognite3DRevision + type: container + containerPropertyIdentifier: model3D + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DModel + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}3DRevision + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/3DTransformation.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/3DTransformation.view.yaml new file mode 100644 index 00000000..cccdb044 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/3DTransformation.view.yaml @@ -0,0 +1,23 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}3DTransformation +description: 'The Cognite3DTransformation object defines a comprehensive 3D transformation, + enabling precise adjustments to an object''s position, orientation, and size in + the 3D coordinate system. It allows for the translation of objects along the three + spatial axes, rotation around these axes using Euler angles, and scaling along each + axis to modify the object''s dimensions. The object''s transformation is defined + in "CDF space", a coordinate system where the positive Z axis is the up direction + + ' +implements: +- space: cdf_cdm + externalId: Cognite3DTransformation + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}3DTransformation + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/Activity.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Activity.view.yaml similarity index 65% rename from modules/models/cdf_process_industry_extension/data_modeling/views/Activity.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/Activity.view.yaml index 36348cc0..f110b7a7 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/Activity.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Activity.view.yaml @@ -57,9 +57,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: @@ -67,35 +67,3 @@ properties: externalId: {{ organization }}Activity type: container containerPropertyIdentifier: UUID - sysSite: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysSite - description: Site (sys) - name: Site (sys) - sysUnit: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysUnit - description: Unit (sys) - name: Unit (sys) - sysTagsFound: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsFound - description: Tags found (sys) - name: Tags found (sys) - sysTagsLinked: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsLinked - description: Tags linked (sys) - name: Tags linked (sys) \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/Annotation.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Annotation.view.yaml new file mode 100644 index 00000000..e71c9b5d --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Annotation.view.yaml @@ -0,0 +1,28 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Annotation +description: Annotation represents contextualization results or links +implements: +- space: cdf_cdm + externalId: CogniteAnnotation + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + source: + container: + space: cdf_cdm + externalId: CogniteSourceable + type: container + containerPropertyIdentifier: source + description: Direct relation to a source system + source: + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}Annotation + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/Asset.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Asset.view.yaml similarity index 91% rename from modules/models/cdf_process_industry_extension/data_modeling/views/Asset.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/Asset.view.yaml index dffaa941..a54a9648 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/Asset.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Asset.view.yaml @@ -61,9 +61,9 @@ properties: name: Asset class description: Specifies the class of the asset. It's a direct relation to CogniteAssetClass. source: - space: cdf_cdm - externalId: CogniteAssetClass - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}AssetClass + version: {{ datamodelVersion }} type: view type: container: @@ -74,9 +74,9 @@ properties: name: Asset type description: Specifies the type of the asset. It's a direct relation to CogniteAssetType. source: - space: cdf_cdm - externalId: CogniteAssetType - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}AssetType + version: {{ datamodelVersion }} type: view files: source: @@ -167,9 +167,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view object3D: container: @@ -178,6 +178,11 @@ properties: type: container containerPropertyIdentifier: object3D description: Direct relation to an Object3D instance representing the 3D resource + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DObject + version: {{ datamodelVersion }} + type: view UUID: container: space: {{ schemaSpace }} diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/AssetClass.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/AssetClass.view.yaml new file mode 100644 index 00000000..1d44b1a8 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/AssetClass.view.yaml @@ -0,0 +1,17 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}AssetClass +name: Asset class +description: Represents the class of an asset. +implements: +- space: cdf_cdm + externalId: CogniteAssetClass + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}AssetClass + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/AssetType.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/AssetType.view.yaml new file mode 100644 index 00000000..edbe08a2 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/AssetType.view.yaml @@ -0,0 +1,31 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}AssetType +name: Asset type +description: Represents the type of an asset. +implements: +- space: cdf_cdm + externalId: CogniteAssetType + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + assetClass: + container: + space: cdf_cdm + externalId: CogniteAssetType + type: container + containerPropertyIdentifier: assetClass + name: Asset class + description: Specifies the class the type belongs to. It's a direct relation to + CogniteAssetClass. + source: + space: {{ schemaSpace }} + externalId: {{ organization }}AssetClass + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}AssetType + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/CADModel.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/CADModel.view.yaml new file mode 100644 index 00000000..b4109d7e --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/CADModel.view.yaml @@ -0,0 +1,55 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CADModel +description: Navigational aid for traversing CogniteCADModel instances +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DModel + - equals: + property: + - cdf_cdm_3d + - Cognite3DModel + - type + value: CAD +implements: +- space: cdf_cdm + externalId: CogniteCADModel + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + revisions: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADRevision + version: {{ datamodelVersion }} + type: view + through: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DRevision + version: {{ datamodelVersion }} + type: view + identifier: model3D + description: List of revisions for this CAD model + connectionType: multi_reverse_direct_relation + thumbnail: + container: + space: cdf_cdm_3d + externalId: Cognite3DModel + type: container + containerPropertyIdentifier: thumbnail + description: Thumbnail of the 3D model + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}CADModel + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/CADNode.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/CADNode.view.yaml new file mode 100644 index 00000000..41a03344 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/CADNode.view.yaml @@ -0,0 +1,53 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CADNode +description: Represents nodes from the 3D model that have been contextualized +implements: +- space: cdf_cdm + externalId: CogniteCADNode + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + object3D: + container: + space: cdf_cdm_3d + externalId: CogniteCADNode + type: container + containerPropertyIdentifier: object3D + description: Direct relation to object3D grouping for this node + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DObject + version: {{ datamodelVersion }} + type: view + model3D: + container: + space: cdf_cdm_3d + externalId: CogniteCADNode + type: container + containerPropertyIdentifier: model3D + description: Direct relation to Cognite3DModel + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADModel + version: {{ datamodelVersion }} + type: view + revisions: + container: + space: cdf_cdm_3d + externalId: CogniteCADNode + type: container + containerPropertyIdentifier: revisions + description: List of direct relations to instances of Cognite3DRevision which + this CogniteCADNode exists in. + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADRevision + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}CADNode + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/CADRevision.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/CADRevision.view.yaml new file mode 100644 index 00000000..c8f6657a --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/CADRevision.view.yaml @@ -0,0 +1,39 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CADRevision +description: '' +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DRevision + - equals: + property: + - cdf_cdm_3d + - Cognite3DModel + - type + value: CAD +implements: +- space: cdf_cdm + externalId: CogniteCADRevision + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + model3D: + container: + space: cdf_cdm_3d + externalId: Cognite3DRevision + type: container + containerPropertyIdentifier: model3D + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DModel + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}CADRevision + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/CubeMap.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/CubeMap.view.yaml new file mode 100644 index 00000000..92b6b6a0 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/CubeMap.view.yaml @@ -0,0 +1,97 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}CubeMap +description: 'The cube map holds references to 6 images in used to visually represent + the surrounding environment + + ' +implements: +- space: cdf_cdm + externalId: CogniteCubeMap + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + front: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: front + description: Direct relation to a file holding the front projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + back: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: back + description: Direct relation to a file holding the back projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + left: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: left + description: Direct relation to a file holding the left projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + right: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: right + description: Direct relation to a file holding the right projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + top: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: top + description: Direct relation to a file holding the top projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + bottom: + container: + space: cdf_cdm_3d + externalId: CogniteCubeMap + type: container + containerPropertyIdentifier: bottom + description: Direct relation to a file holding the bottom projection of the cube + map + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}CubeMap + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/Describable.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Describable.view.yaml new file mode 100644 index 00000000..948f5acb --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Describable.view.yaml @@ -0,0 +1,19 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Describable +description: 'The describable core concept is used as a standard way of holding the + bare minimum of information about the instance + + ' +implements: +- space: cdf_cdm + externalId: CogniteDescribable + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}Describable + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/DiagramAnnotation.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/DiagramAnnotation.view.yaml new file mode 100644 index 00000000..5f3874a8 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/DiagramAnnotation.view.yaml @@ -0,0 +1,28 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}DiagramAnnotation +description: Annotation for diagrams +implements: +- space: cdf_cdm + externalId: CogniteDiagramAnnotation + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + source: + container: + space: cdf_cdm + externalId: CogniteSourceable + type: container + containerPropertyIdentifier: source + description: Direct relation to a source system + source: + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}DiagramAnnotation + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/Equipment.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Equipment.view.yaml similarity index 91% rename from modules/models/cdf_process_industry_extension/data_modeling/views/Equipment.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/Equipment.view.yaml index ec3a05b1..1026dc70 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/Equipment.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Equipment.view.yaml @@ -31,9 +31,9 @@ properties: name: Equipment type description: Specifies the type of the equipment. It's a direct relation to CogniteEquipmentType. source: - space: cdf_cdm - externalId: CogniteEquipmentType - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}EquipmentType + version: {{ datamodelVersion }} type: view files: container: @@ -88,9 +88,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/EquipmentType.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/EquipmentType.view.yaml new file mode 100644 index 00000000..27e5f345 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/EquipmentType.view.yaml @@ -0,0 +1,17 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}EquipmentType +name: Equipment type +description: Represents the type of equipment. +implements: +- space: cdf_cdm + externalId: CogniteEquipmentType + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}EquipmentType + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/File.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/File.view.yaml similarity index 88% rename from modules/models/cdf_process_industry_extension/data_modeling/views/File.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/File.view.yaml index 1f686636..4feded92 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/File.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/File.view.yaml @@ -32,9 +32,9 @@ properties: description: Specifies the detected category the file belongs to. It's a direct relation to an instance of CogniteFileCategory. source: - space: cdf_cdm - externalId: CogniteFileCategory - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}FileCategory + version: {{ datamodelVersion }} type: view equipment: source: @@ -60,9 +60,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/FileCategory.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/FileCategory.view.yaml new file mode 100644 index 00000000..3ed0a964 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/FileCategory.view.yaml @@ -0,0 +1,18 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}FileCategory +name: File category +description: Represents the categories of files as determined by contextualization + or categorization. +implements: +- space: cdf_cdm + externalId: CogniteFileCategory + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}FileCategory + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/MaintenanceOrder.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/MaintenanceOrder.view.yaml similarity index 75% rename from modules/models/cdf_process_industry_extension/data_modeling/views/MaintenanceOrder.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/MaintenanceOrder.view.yaml index f531e11e..5033fb4b 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/MaintenanceOrder.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/MaintenanceOrder.view.yaml @@ -88,9 +88,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: @@ -98,35 +98,3 @@ properties: externalId: {{ organization }}MaintenanceOrder type: container containerPropertyIdentifier: UUID - sysSite: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysSite - description: Site (sys) - name: Site (sys) - sysUnit: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysUnit - description: Unit (sys) - name: Unit (sys) - sysTagsFound: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsFound - description: Tags found (sys) - name: Tags found (sys) - sysTagsLinked: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsLinked - description: Tags linked (sys) - name: Tags linked (sys) \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/Notification.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Notification.view.yaml similarity index 91% rename from modules/models/cdf_process_industry_extension/data_modeling/views/Notification.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/Notification.view.yaml index 0241751a..ddf9f49f 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/Notification.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Notification.view.yaml @@ -1,6 +1,6 @@ space: {{ schemaSpace }} externalId: {{ organization }}Notification -name: Notification +name: Notification``` description: A formal record to report maintenance issues, defects, or requests, starting the process for planning and running maintenance activities. implements: @@ -44,9 +44,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/Operation.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Operation.view.yaml similarity index 73% rename from modules/models/cdf_process_industry_extension/data_modeling/views/Operation.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/Operation.view.yaml index 658e026a..3b4bb1b5 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/Operation.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Operation.view.yaml @@ -84,9 +84,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: @@ -94,35 +94,3 @@ properties: externalId: {{ organization }}Operation type: container containerPropertyIdentifier: UUID - sysSite: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysSite - description: Site (sys) - name: Site (sys) - sysUnit: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysUnit - description: Unit (sys) - name: Unit (sys) - sysTagsFound: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsFound - description: Tags found (sys) - name: Tags found (sys) - sysTagsLinked: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsLinked - description: Tags linked (sys) - name: Tags linked (sys) \ No newline at end of file diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudModel.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudModel.view.yaml new file mode 100644 index 00000000..5195c96d --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudModel.view.yaml @@ -0,0 +1,55 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}PointCloudModel +description: Navigational aid for traversing CognitePointCloudModel instances +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DModel + - equals: + property: + - cdf_cdm_3d + - Cognite3DModel + - type + value: PointCloud +implements: +- space: cdf_cdm + externalId: CognitePointCloudModel + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + revisions: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudRevision + version: {{ datamodelVersion }} + type: view + through: + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DRevision + version: {{ datamodelVersion }} + type: view + identifier: model3D + description: List of revisions for this PointCloud model + connectionType: multi_reverse_direct_relation + thumbnail: + container: + space: cdf_cdm_3d + externalId: Cognite3DModel + type: container + containerPropertyIdentifier: thumbnail + description: Thumbnail of the 3D model + source: + space: {{ schemaSpace }} + externalId: {{ organization }}File + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudModel + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudRevision.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudRevision.view.yaml new file mode 100644 index 00000000..af813ba2 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudRevision.view.yaml @@ -0,0 +1,39 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}PointCloudRevision +description: Navigational aid for traversing CognitePointCloudRevision instances +filter: + and: + - hasData: + - type: container + space: cdf_cdm_3d + externalId: Cognite3DRevision + - equals: + property: + - cdf_cdm_3d + - Cognite3DModel + - type + value: PointCloud +implements: +- space: cdf_cdm + externalId: CognitePointCloudRevision + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + model3D: + container: + space: cdf_cdm_3d + externalId: Cognite3DRevision + type: container + containerPropertyIdentifier: model3D + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DModel + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudRevision + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudVolume.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudVolume.view.yaml new file mode 100644 index 00000000..0fb8d614 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/PointCloudVolume.view.yaml @@ -0,0 +1,52 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}PointCloudVolume +description: PointCloud volume definition +implements: +- space: cdf_cdm + externalId: CognitePointCloudVolume + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + object3D: + container: + space: cdf_cdm_3d + externalId: CognitePointCloudVolume + type: container + containerPropertyIdentifier: object3D + description: Direct relation to object3D grouping for this node + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DObject + version: {{ datamodelVersion }} + type: view + model3D: + container: + space: cdf_cdm_3d + externalId: CognitePointCloudVolume + type: container + containerPropertyIdentifier: model3D + description: Direct relation to Cognite3DModel instance + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADModel + version: {{ datamodelVersion }} + type: view + revisions: + container: + space: cdf_cdm_3d + externalId: CognitePointCloudVolume + type: container + containerPropertyIdentifier: revisions + description: List of direct relations to revision information + source: + space: {{ schemaSpace }} + externalId: {{ organization }}CADRevision + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}PointCloudVolume + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/Schedulable.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Schedulable.view.yaml new file mode 100644 index 00000000..196f407d --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Schedulable.view.yaml @@ -0,0 +1,17 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Schedulable +description: CogniteSchedulable represents the metadata about when an activity (or + similar) starts and ends. +implements: +- space: cdf_cdm + externalId: CogniteSchedulable + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}Schedulable + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/SourceSystem.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/SourceSystem.view.yaml new file mode 100644 index 00000000..55b90ada --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/SourceSystem.view.yaml @@ -0,0 +1,17 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}SourceSystem +description: The CogniteSourceSystem core concept is used to standardize the way source + system is stored. +implements: +- space: cdf_cdm + externalId: CogniteSourceSystem + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/Sourceable.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Sourceable.view.yaml new file mode 100644 index 00000000..45d93817 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Sourceable.view.yaml @@ -0,0 +1,27 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Sourceable +implements: +- space: cdf_cdm + externalId: CogniteSourceable + version: {{ datamodelVersion }} + type: view +version: {{ datamodelVersion }} +properties: + source: + container: + space: cdf_cdm + externalId: CogniteSourceable + type: container + containerPropertyIdentifier: source + description: Direct relation to a source system + source: + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}Sourceable + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension/data_modeling/views/TimeSeries.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/TimeSeries.view.yaml similarity index 67% rename from modules/models/cdf_process_industry_extension/data_modeling/views/TimeSeries.view.yaml rename to modules/models/cdf_process_industry_extension_full/data_models/views/TimeSeries.view.yaml index 8f5e25c6..3f7f7235 100644 --- a/modules/models/cdf_process_industry_extension/data_modeling/views/TimeSeries.view.yaml +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/TimeSeries.view.yaml @@ -1,7 +1,7 @@ space: {{ schemaSpace }} externalId: {{ organization }}TimeSeries name: Time series -description: Represents a series of data points in time order. +description: Represents a series of data points in time order." implements: - space: cdf_cdm externalId: CogniteTimeSeries @@ -18,9 +18,9 @@ properties: name: Unit description: The unit of the time series. source: - space: cdf_cdm - externalId: CogniteUnit - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}Unit + version: {{ datamodelVersion }} type: view assets: container: @@ -73,9 +73,9 @@ properties: containerPropertyIdentifier: source description: Direct relation to a source system source: - space: cdf_cdm - externalId: CogniteSourceSystem - version: v1 + space: {{ schemaSpace }} + externalId: {{ organization }}SourceSystem + version: {{ datamodelVersion }} type: view UUID: container: @@ -83,35 +83,3 @@ properties: externalId: {{ organization }}TimeSeries type: container containerPropertyIdentifier: UUID - sysSite: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysSite - description: Site (sys) - name: Site (sys) - sysUnit: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysUnit - description: Unit (sys) - name: Unit (sys) - sysTagsFound: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsFound - description: Tags found (sys) - name: Tags found (sys) - sysTagsLinked: - container: - space: {{ schemaSpace }} - externalId: {{ organization }}Reportable - type: container - containerPropertyIdentifier: sysTagsLinked - description: Tags linked (sys) - name: Tags linked (sys) diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/Unit.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Unit.view.yaml new file mode 100644 index 00000000..a388001b --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Unit.view.yaml @@ -0,0 +1,16 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Unit +description: Represents a single unit of measurement +implements: +- space: cdf_cdm + externalId: CogniteUnit + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}Unit + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/data_models/views/Visualizable.view.yaml b/modules/models/cdf_process_industry_extension_full/data_models/views/Visualizable.view.yaml new file mode 100644 index 00000000..12cc9956 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/data_models/views/Visualizable.view.yaml @@ -0,0 +1,29 @@ +space: {{ schemaSpace }} +externalId: {{ organization }}Visualizable +description: CogniteVisualizable defines the standard way to reference a related 3D + resource +implements: +- space: cdf_cdm + externalId: CogniteVisualizable + version: v1 + type: view +version: {{ datamodelVersion }} +properties: + object3D: + container: + space: cdf_cdm + externalId: CogniteVisualizable + type: container + containerPropertyIdentifier: object3D + description: Direct relation to an Object3D instance representing the 3D resource + source: + space: {{ schemaSpace }} + externalId: {{ organization }}3DObject + version: {{ datamodelVersion }} + type: view + UUID: + container: + space: {{ schemaSpace }} + externalId: {{ organization }}Visualizable + type: container + containerPropertyIdentifier: UUID diff --git a/modules/models/cdf_process_industry_extension_full/default.config.yaml b/modules/models/cdf_process_industry_extension_full/default.config.yaml new file mode 100644 index 00000000..87a931da --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/default.config.yaml @@ -0,0 +1,3 @@ +organization: ORG +schemaSpace: sp_enterprise_process_industry_full +datamodelVersion: v1.0 diff --git a/modules/models/cdf_process_industry_extension_full/module.toml b/modules/models/cdf_process_industry_extension_full/module.toml new file mode 100644 index 00000000..6a33fa10 --- /dev/null +++ b/modules/models/cdf_process_industry_extension_full/module.toml @@ -0,0 +1,5 @@ +[module] +title = "Example of CogniteProcessIndustry Extension Model" +is_selected_by_default = false +id = "dp:models:cdf_process_industry_extension_full" +package_id = "dp:models" \ No newline at end of file diff --git a/modules/models/cdf_scene/data_models/containers/scene_Cdf3dRevisionProperties.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_Cdf3dRevisionProperties.Container.yaml new file mode 100644 index 00000000..fe004bd2 --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_Cdf3dRevisionProperties.Container.yaml @@ -0,0 +1,13 @@ +space: {{ schemaSpace }} +externalId: Cdf3dRevisionProperties +name: Cdf3dRevisionProperties +usedFor: edge +properties: + revisionId: + type: + list: false + type: int64 + immutable: false + nullable: false + autoIncrement: false + name: revisionId diff --git a/modules/models/cdf_scene/data_models/containers/scene_EnvironmentMap.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_EnvironmentMap.Container.yaml new file mode 100644 index 00000000..65242bda --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_EnvironmentMap.Container.yaml @@ -0,0 +1,12 @@ +space: {{ schemaSpace }} +externalId: EnvironmentMap +name: EnvironmentMap +properties: + isSpherical: + type: + list: false + type: boolean + immutable: false + nullable: false + autoIncrement: false + name: isSpherical diff --git a/modules/models/cdf_scene/data_models/containers/scene_Image360CollectionProperties.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_Image360CollectionProperties.Container.yaml new file mode 100644 index 00000000..5ec14c37 --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_Image360CollectionProperties.Container.yaml @@ -0,0 +1,23 @@ +space: {{ schemaSpace }} +externalId: Image360CollectionProperties +name: Image360CollectionProperties +usedFor: edge +properties: + image360CollectionExternalId: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: false + autoIncrement: false + name: image360CollectionExternalId + image360CollectionSpace: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: false + autoIncrement: false + name: image360CollectionSpace diff --git a/modules/models/cdf_scene/data_models/containers/scene_Image360CollectionScene.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_Image360CollectionScene.Container.yaml new file mode 100644 index 00000000..bde8fab3 --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_Image360CollectionScene.Container.yaml @@ -0,0 +1,12 @@ +space: {{ schemaSpace }} +externalId: Image360CollectionScene +name: Image360CollectionScene +properties: + model3d: + type: + list: false + type: direct + immutable: false + nullable: true + autoIncrement: false + name: model3d diff --git a/modules/models/cdf_scene/data_models/containers/scene_SceneConfiguration.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_SceneConfiguration.Container.yaml new file mode 100644 index 00000000..3e3ca35e --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_SceneConfiguration.Container.yaml @@ -0,0 +1,152 @@ +space: {{ schemaSpace }} +externalId: SceneConfiguration +name: SceneConfiguration +properties: + name: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: false + autoIncrement: false + name: name + description: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false + name: description + cameraTranslationX: + type: + list: false + type: float64 + immutable: false + nullable: false + autoIncrement: false + name: cameraTranslationX + cameraTranslationY: + type: + list: false + type: float64 + immutable: false + nullable: false + autoIncrement: false + name: cameraTranslationY + cameraTranslationZ: + type: + list: false + type: float64 + immutable: false + nullable: false + autoIncrement: false + name: cameraTranslationZ + cameraEulerRotationX: + type: + list: false + type: float64 + immutable: false + nullable: true + autoIncrement: false + name: cameraEulerRotationX + cameraEulerRotationY: + type: + list: false + type: float64 + immutable: false + nullable: false + autoIncrement: false + name: cameraEulerRotationY + cameraEulerRotationZ: + type: + list: false + type: float64 + immutable: false + nullable: false + autoIncrement: false + name: cameraEulerRotationZ + cameraTargetX: + type: + list: false + type: float64 + immutable: false + nullable: true + autoIncrement: false + name: cameraTargetX + cameraTargetY: + type: + list: false + type: float64 + immutable: false + nullable: true + autoIncrement: false + name: cameraTargetY + cameraTargetZ: + type: + list: false + type: float64 + immutable: false + nullable: true + autoIncrement: false + name: cameraTargetZ + latitude: + type: + list: false + type: float64 + immutable: false + nullable: true + autoIncrement: false + name: latitude + longitude: + type: + list: false + type: float64 + immutable: false + nullable: true + autoIncrement: false + name: longitude + createdBy: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false + name: createdBy + updatedAt: + type: + list: false + type: timestamp + immutable: false + nullable: true + autoIncrement: false + name: updatedAt + updatedBy: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false + name: updatedBy + thumbnailFile: + type: + list: false + type: file + immutable: false + nullable: true + autoIncrement: false + name: file + skybox: + type: + list: false + type: direct + immutable: false + nullable: true + autoIncrement: false + name: skybox diff --git a/modules/models/cdf_scene/data_models/containers/scene_TexturedMap.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_TexturedMap.Container.yaml new file mode 100644 index 00000000..9008c8cc --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_TexturedMap.Container.yaml @@ -0,0 +1,21 @@ +space: {{ schemaSpace }} +externalId: TexturedMap +name: TexturedMap +properties: + label: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: false + autoIncrement: false + name: label + file: + type: + list: false + type: file + immutable: false + nullable: true + autoIncrement: false + name: file diff --git a/modules/models/cdf_scene/data_models/containers/scene_TexturedPlane.Container.yaml b/modules/models/cdf_scene/data_models/containers/scene_TexturedPlane.Container.yaml new file mode 100644 index 00000000..c7b0b205 --- /dev/null +++ b/modules/models/cdf_scene/data_models/containers/scene_TexturedPlane.Container.yaml @@ -0,0 +1,29 @@ +space: {{ schemaSpace }} +externalId: TexturedPlane +name: TexturedPlane +properties: + wrapping: + type: + list: false + collation: ucs_basic + type: text + immutable: false + nullable: true + autoIncrement: false + name: wrapping + repeatU: + type: + list: false + type: int64 + immutable: false + nullable: true + autoIncrement: false + name: repeatU + repeatV: + type: + list: false + type: int64 + immutable: false + nullable: true + autoIncrement: false + name: repeatV diff --git a/modules/models/cdf_scene/data_models/scene.Space.yaml b/modules/models/cdf_scene/data_models/scene.Space.yaml new file mode 100644 index 00000000..3d642bd7 --- /dev/null +++ b/modules/models/cdf_scene/data_models/scene.Space.yaml @@ -0,0 +1,3 @@ +space: {{ schemaSpace }} +description: Space for storing 3D scene data model and instances +name: {{ schemaSpace }} diff --git a/modules/models/cdf_scene/data_models/scene_data_model.DataModel.yaml b/modules/models/cdf_scene/data_models/scene_data_model.DataModel.yaml new file mode 100644 index 00000000..18c64a61 --- /dev/null +++ b/modules/models/cdf_scene/data_models/scene_data_model.DataModel.yaml @@ -0,0 +1,66 @@ +space: {{ schemaSpace }} +externalId: scene_data_model +name: SceneConfiguration +description: This is the data model used for storing 3D scenes +version: '1' +views: +- space: {{ schemaSpace }} + externalId: SceneConfiguration + version: v1 + type: view +- space: {{ schemaSpace }} + externalId: Image360CollectionProperties + version: v1 + type: view +- space: {{ schemaSpace }} + externalId: RevisionProperties + version: v1 + type: view +- space: {{ schemaSpace }} + externalId: EnvironmentMap + version: v1 + type: view +- space: {{ schemaSpace }} + externalId: TexturedMap + version: v1 + type: view +- space: {{ schemaSpace }} + externalId: TexturedPlane + version: v1 + type: view +- space: {{ schemaSpace }} + externalId: Image360CollectionScene + version: v1 + type: view +- space: cdf_3d_schema + externalId: Cdf3dModel + version: '1' + type: view +- space: cdf_3d_schema + externalId: Cdf3dEntity + version: '1' + type: view +- space: cdf_3d_schema + externalId: Cdf3dConnectionProperties + version: '1' + type: view +- space: cdf_360_image_schema + externalId: Image360 + version: v1 + type: view +- space: cdf_360_image_schema + externalId: Image360Collection + version: v1 + type: view +- space: cdf_360_image_schema + externalId: Station360 + version: v1 + type: view +- space: cdf_3d_schema + externalId: Transformation3d + version: v1 + type: view +- space: cdf_3d_schema + externalId: CubeMap + version: v1 + type: view diff --git a/modules/models/cdf_scene/data_models/views/EnvironmentMap.View.yaml b/modules/models/cdf_scene/data_models/views/EnvironmentMap.View.yaml new file mode 100644 index 00000000..e1fab19a --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/EnvironmentMap.View.yaml @@ -0,0 +1,28 @@ +space: {{ schemaSpace }} +externalId: EnvironmentMap +name: EnvironmentMap +implements: +- space: {{ schemaSpace }} + externalId: TexturedMap + version: v1 + type: view +version: v1 +properties: + label: + container: + space: {{ schemaSpace }} + externalId: TexturedMap + type: container + containerPropertyIdentifier: label + file: + container: + space: {{ schemaSpace }} + externalId: TexturedMap + type: container + containerPropertyIdentifier: file + isSpherical: + container: + space: {{ schemaSpace }} + externalId: EnvironmentMap + type: container + containerPropertyIdentifier: isSpherical diff --git a/modules/models/cdf_scene/data_models/views/Image360CollectionProperties.View.yaml b/modules/models/cdf_scene/data_models/views/Image360CollectionProperties.View.yaml new file mode 100644 index 00000000..ecde3f7d --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/Image360CollectionProperties.View.yaml @@ -0,0 +1,22 @@ +space: {{ schemaSpace }} +externalId: Image360CollectionProperties +name: Image360CollectionProperties +implements: +- space: cdf_3d_schema + externalId: Transformation3d + version: v1 + type: view +version: v1 +properties: + image360CollectionExternalId: + container: + space: {{ schemaSpace }} + externalId: Image360CollectionProperties + type: container + containerPropertyIdentifier: image360CollectionExternalId + image360CollectionSpace: + container: + space: {{ schemaSpace }} + externalId: Image360CollectionProperties + type: container + containerPropertyIdentifier: image360CollectionSpace diff --git a/modules/models/cdf_scene/data_models/views/Image360CollectionScene.View.yaml b/modules/models/cdf_scene/data_models/views/Image360CollectionScene.View.yaml new file mode 100644 index 00000000..97aa2e82 --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/Image360CollectionScene.View.yaml @@ -0,0 +1,20 @@ +space: {{ schemaSpace }} +externalId: Image360CollectionScene +implements: +- space: cdf_360_image_schema + externalId: Image360Collection + version: v1 + type: view +version: v1 +properties: + model3d: + container: + space: {{ schemaSpace }} + externalId: Image360CollectionScene + type: container + containerPropertyIdentifier: model3d + source: + space: cdf_3d_schema + externalId: Cdf3dModel + version: '1' + type: view diff --git a/modules/models/cdf_scene/data_models/views/RevisionProperties.View.yaml b/modules/models/cdf_scene/data_models/views/RevisionProperties.View.yaml new file mode 100644 index 00000000..f08c006a --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/RevisionProperties.View.yaml @@ -0,0 +1,16 @@ +space: {{ schemaSpace }} +externalId: RevisionProperties +name: Cdf3dRevisionProperties +implements: +- space: cdf_3d_schema + externalId: Transformation3d + version: v1 + type: view +version: v1 +properties: + revisionId: + container: + space: {{ schemaSpace }} + externalId: Cdf3dRevisionProperties + type: container + containerPropertyIdentifier: revisionId diff --git a/modules/models/cdf_scene/data_models/views/SceneConfiguration.View.yaml b/modules/models/cdf_scene/data_models/views/SceneConfiguration.View.yaml new file mode 100644 index 00000000..e7a1d2ee --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/SceneConfiguration.View.yaml @@ -0,0 +1,166 @@ +space: {{ schemaSpace }} +externalId: SceneConfiguration +name: SceneConfiguration +version: v1 +properties: + name: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: name + description: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: description + cameraTranslationX: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraTranslationX + cameraTranslationY: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraTranslationY + cameraTranslationZ: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraTranslationZ + cameraEulerRotationX: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraEulerRotationX + cameraEulerRotationY: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraEulerRotationY + cameraEulerRotationZ: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraEulerRotationZ + cameraTargetX: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraTargetX + cameraTargetY: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraTargetY + cameraTargetZ: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: cameraTargetZ + latitude: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: latitude + longitude: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: longitude + thumbnailFile: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: thumbnailFile + createdBy: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: createdBy + updatedAt: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: updatedAt + updatedBy: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: updatedBy + skybox: + container: + space: {{ schemaSpace }} + externalId: SceneConfiguration + type: container + containerPropertyIdentifier: skybox + source: + space: {{ schemaSpace }} + externalId: EnvironmentMap + version: v1 + type: view + model3ds: + type: + space: {{ schemaSpace }} + externalId: SceneConfiguration.model3ds + source: + space: cdf_3d_schema + externalId: Cdf3dModel + version: '1' + type: view + direction: outwards + edgeSource: + space: {{ schemaSpace }} + externalId: RevisionProperties + version: v1 + type: view + connectionType: multi_edge_connection + images360Collections: + type: + space: {{ schemaSpace }} + externalId: SceneConfiguration.images360Collections + source: + space: cdf_3d_schema + externalId: Cdf3dModel + version: '1' + type: view + direction: outwards + edgeSource: + space: {{ schemaSpace }} + externalId: Image360CollectionProperties + version: v1 + type: view + connectionType: multi_edge_connection + texturedGroundPlanes: + type: + space: {{ schemaSpace }} + externalId: SceneConfiguration.texturedGroundPlanes + source: + space: {{ schemaSpace }} + externalId: TexturedPlane + version: v1 + type: view + direction: outwards + edgeSource: + space: cdf_3d_schema + externalId: Transformation3d + version: v1 + type: view + connectionType: multi_edge_connection diff --git a/modules/models/cdf_scene/data_models/views/TexturedMap.View.yaml b/modules/models/cdf_scene/data_models/views/TexturedMap.View.yaml new file mode 100644 index 00000000..be3c428a --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/TexturedMap.View.yaml @@ -0,0 +1,17 @@ +space: {{ schemaSpace }} +externalId: TexturedMap +name: TexturedMap +version: v1 +properties: + label: + container: + space: {{ schemaSpace }} + externalId: TexturedMap + type: container + containerPropertyIdentifier: label + file: + container: + space: {{ schemaSpace }} + externalId: TexturedMap + type: container + containerPropertyIdentifier: file diff --git a/modules/models/cdf_scene/data_models/views/TexturedPlane.View.yaml b/modules/models/cdf_scene/data_models/views/TexturedPlane.View.yaml new file mode 100644 index 00000000..6f4fc696 --- /dev/null +++ b/modules/models/cdf_scene/data_models/views/TexturedPlane.View.yaml @@ -0,0 +1,40 @@ +space: {{ schemaSpace }} +externalId: TexturedPlane +name: TexturedPlane +implements: +- space: {{ schemaSpace }} + externalId: TexturedMap + version: v1 + type: view +version: v1 +properties: + label: + container: + space: {{ schemaSpace }} + externalId: TexturedMap + type: container + containerPropertyIdentifier: label + file: + container: + space: {{ schemaSpace }} + externalId: TexturedMap + type: container + containerPropertyIdentifier: file + wrapping: + container: + space: {{ schemaSpace }} + externalId: TexturedPlane + type: container + containerPropertyIdentifier: wrapping + repeatU: + container: + space: {{ schemaSpace }} + externalId: TexturedPlane + type: container + containerPropertyIdentifier: repeatU + repeatV: + container: + space: {{ schemaSpace }} + externalId: TexturedPlane + type: container + containerPropertyIdentifier: repeatV diff --git a/modules/models/cdf_scene/default.config.yaml b/modules/models/cdf_scene/default.config.yaml new file mode 100644 index 00000000..6c346212 --- /dev/null +++ b/modules/models/cdf_scene/default.config.yaml @@ -0,0 +1 @@ +schemaSpace: scene diff --git a/modules/models/cdf_scene/module.toml b/modules/models/cdf_scene/module.toml new file mode 100644 index 00000000..47423c66 --- /dev/null +++ b/modules/models/cdf_scene/module.toml @@ -0,0 +1,4 @@ +[module] +title = "(Alpha) Scene Data Model" +id = "cdf_scene" +package_id = "dp:models" \ No newline at end of file diff --git a/modules/packages.toml b/modules/packages.toml index 468e741e..5eb963b4 100644 --- a/modules/packages.toml +++ b/modules/packages.toml @@ -34,9 +34,10 @@ canCherryPick = true modules = [ "models/rmdm_v1", "models/isa_manufacturing_extension", - "models/cdf_process_industry_extension", + "models/cdf_process_industry_extension_full", "models/qs_enterprise_dm", "models/cfihos_oil_and_gas_extension", + "models/cdf_scene", ] [packages.atlas_ai] @@ -117,3 +118,52 @@ modules = [ "tools/apps/qualitizer" ] +[packages.industrial_tools] +id = "dp:industrial_tools" +title = "Industrial Tools" +description = "Data exploration and analytics tools" +canCherryPick = true +modules = [ + "accelerators/industrial_tools/cdf_search", + "accelerators/industrial_tools/cdf_location_filter_datamodel_based", + "accelerators/industrial_tools/cdf_location_filter_asset_centric", +] + +[packages.inrobot] +id = "dp:inrobot" +title = "InRobot" +description = "Robotics inspection data integration" +canCherryPick = true +modules = [ + "accelerators/inrobot/cdf_inrobot_common", + "accelerators/inrobot/cdf_inrobot_location", +] + +[packages.bootcamp] +id = "dp:bootcamp" +title = "Bootcamp" +description = "Initial configurations for the Cognite Data Fusion Bootcamp" +canCherryPick = false +modules = [ + "bootcamp/ice_cream_api", + "bootcamp/use_cases/oee", +] + +[packages.custom] +id = "dp:custom" +title = "Custom" +description = "Start with an empty module" +canCherryPick = false +modules = [ + "custom/my_module", +] + +[packages.auth_readwrite_all] +id = "dp:common" +title = "Admin group for CI/CD with read-write and read-only access" +description = "Admin group for CI/CD with read-write and read-only access" +canCherryPick = false +modules = [ + "common/cdf_auth_readwrite_all", +] + diff --git a/packages.zip b/packages.zip new file mode 100644 index 00000000..aa07f815 Binary files /dev/null and b/packages.zip differ