36
36
from labelbox .schema .slice import CatalogSlice
37
37
from labelbox .schema .queue_mode import QueueMode
38
38
39
- from labelbox .schema .media_type import MediaType
39
+ from labelbox .schema .media_type import MediaType , get_media_type_validation_error
40
40
41
41
logger = logging .getLogger (__name__ )
42
42
@@ -853,15 +853,18 @@ def rootSchemaPayloadToFeatureSchema(client, payload):
853
853
rootSchemaPayloadToFeatureSchema ,
854
854
['rootSchemaNodes' , 'nextCursor' ])
855
855
856
- def create_ontology_from_feature_schemas (self , name ,
857
- feature_schema_ids ) -> Ontology :
856
+ def create_ontology_from_feature_schemas (self ,
857
+ name ,
858
+ feature_schema_ids ,
859
+ media_type = None ) -> Ontology :
858
860
"""
859
861
Creates an ontology from a list of feature schema ids
860
862
861
863
Args:
862
864
name (str): Name of the ontology
863
865
feature_schema_ids (List[str]): List of feature schema ids corresponding to
864
866
top level tools and classifications to include in the ontology
867
+ media_type (MediaType or None): Media type of a new ontology
865
868
Returns:
866
869
The created Ontology
867
870
"""
@@ -891,9 +894,9 @@ def create_ontology_from_feature_schemas(self, name,
891
894
"Neither `tool` or `classification` found in the normalized feature schema"
892
895
)
893
896
normalized = {'tools' : tools , 'classifications' : classifications }
894
- return self .create_ontology (name , normalized )
897
+ return self .create_ontology (name , normalized , media_type )
895
898
896
- def create_ontology (self , name , normalized ) -> Ontology :
899
+ def create_ontology (self , name , normalized , media_type = None ) -> Ontology :
897
900
"""
898
901
Creates an ontology from normalized data
899
902
>>> normalized = {"tools" : [{'tool': 'polygon', 'name': 'cat', 'color': 'black'}], "classifications" : []}
@@ -910,13 +913,27 @@ def create_ontology(self, name, normalized) -> Ontology:
910
913
Args:
911
914
name (str): Name of the ontology
912
915
normalized (dict): A normalized ontology payload. See above for details.
916
+ media_type (MediaType or None): Media type of a new ontology
913
917
Returns:
914
918
The created Ontology
915
919
"""
920
+
921
+ if media_type :
922
+ if MediaType .is_supported (media_type ):
923
+ media_type = media_type .value
924
+ else :
925
+ raise get_media_type_validation_error (media_type )
926
+
916
927
query_str = """mutation upsertRootSchemaNodePyApi($data: UpsertOntologyInput!){
917
928
upsertOntology(data: $data){ %s }
918
929
} """ % query .results_query_part (Entity .Ontology )
919
- params = {'data' : {'name' : name , 'normalized' : json .dumps (normalized )}}
930
+ params = {
931
+ 'data' : {
932
+ 'name' : name ,
933
+ 'normalized' : json .dumps (normalized ),
934
+ 'mediaType' : media_type
935
+ }
936
+ }
920
937
res = self .execute (query_str , params )
921
938
return Entity .Ontology (self , res ['upsertOntology' ])
922
939
@@ -1035,9 +1052,9 @@ def _format_failed_rows(rows: Dict[str, str],
1035
1052
)
1036
1053
1037
1054
# Start assign global keys to data rows job
1038
- query_str = """mutation assignGlobalKeysToDataRowsPyApi($globalKeyDataRowLinks: [AssignGlobalKeyToDataRowInput!]!) {
1039
- assignGlobalKeysToDataRows(data: {assignInputs: $globalKeyDataRowLinks}) {
1040
- jobId
1055
+ query_str = """mutation assignGlobalKeysToDataRowsPyApi($globalKeyDataRowLinks: [AssignGlobalKeyToDataRowInput!]!) {
1056
+ assignGlobalKeysToDataRows(data: {assignInputs: $globalKeyDataRowLinks}) {
1057
+ jobId
1041
1058
}
1042
1059
}
1043
1060
"""
@@ -1172,7 +1189,7 @@ def _format_failed_rows(rows: List[str],
1172
1189
1173
1190
# Query string for retrieving job status and result, if job is done
1174
1191
result_query_str = """query getDataRowsForGlobalKeysResultPyApi($jobId: ID!) {
1175
- dataRowsForGlobalKeysResult(jobId: {id: $jobId}) { data {
1192
+ dataRowsForGlobalKeysResult(jobId: {id: $jobId}) { data {
1176
1193
fetchedDataRows { id }
1177
1194
notFoundGlobalKeys
1178
1195
accessDeniedGlobalKeys
@@ -1246,8 +1263,8 @@ def clear_global_keys(
1246
1263
1247
1264
'Results' contains a list global keys that were successfully cleared.
1248
1265
1249
- 'Errors' contains a list of global_keys correspond to the data rows that could not be
1250
- modified, accessed by the user, or not found.
1266
+ 'Errors' contains a list of global_keys correspond to the data rows that could not be
1267
+ modified, accessed by the user, or not found.
1251
1268
Examples:
1252
1269
>>> job_result = client.get_data_row_ids_for_global_keys(["key1","key2"])
1253
1270
>>> print(job_result['status'])
@@ -1271,7 +1288,7 @@ def _format_failed_rows(rows: List[str],
1271
1288
1272
1289
# Query string for retrieving job status and result, if job is done
1273
1290
result_query_str = """query clearGlobalKeysResultPyApi($jobId: ID!) {
1274
- clearGlobalKeysResult(jobId: {id: $jobId}) { data {
1291
+ clearGlobalKeysResult(jobId: {id: $jobId}) { data {
1275
1292
clearedGlobalKeys
1276
1293
failedToClearGlobalKeys
1277
1294
notFoundGlobalKeys
0 commit comments