SUPPLYCHAIN

CreateBillOfMaterialsImportJob

valid {
    input.Body.s3uri == STRING
    input.Body.clientToken == STRING
    input.ReqMap.instanceId == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

CreateDataIntegrationFlow

enum_DataIntegrationFlowFileType := [ "CSV", "PARQUET", "JSON" ]
enum_DataIntegrationFlowLoadType := [ "INCREMENTAL", "REPLACE" ]
enum_DataIntegrationFlowSourceType := [ "S3", "DATASET" ]
enum_DataIntegrationFlowTargetType := [ "S3", "DATASET" ]
enum_DataIntegrationFlowTransformationType := [ "SQL", "NONE" ]

valid {
    input.Body.sources[_].sourceType == enum_DataIntegrationFlowSourceType[_]
    input.Body.sources[_].sourceName == STRING
    input.Body.sources[_].s3Source.bucketName == STRING
    input.Body.sources[_].s3Source.prefix == STRING
    input.Body.sources[_].s3Source.options.fileType == enum_DataIntegrationFlowFileType[_]
    input.Body.sources[_].datasetSource.datasetIdentifier == STRING
    input.Body.sources[_].datasetSource.options.loadType == enum_DataIntegrationFlowLoadType[_]
    input.Body.sources[_].datasetSource.options.dedupeRecords == BOOLEAN
    input.Body.transformation.transformationType == enum_DataIntegrationFlowTransformationType[_]
    input.Body.transformation.sqlTransformation.query == STRING
    input.Body.target.targetType == enum_DataIntegrationFlowTargetType[_]
    input.Body.target.s3Target.bucketName == STRING
    input.Body.target.s3Target.prefix == STRING
    input.Body.target.s3Target.options.fileType == enum_DataIntegrationFlowFileType[_]
    input.Body.target.datasetTarget.datasetIdentifier == STRING
    input.Body.target.datasetTarget.options.loadType == enum_DataIntegrationFlowLoadType[_]
    input.Body.target.datasetTarget.options.dedupeRecords == BOOLEAN
    input.Body.tags.STRING == STRING
    input.ReqMap.instanceId == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

CreateDataLakeDataset

enum_DataLakeDatasetSchemaFieldType := [ "INT", "DOUBLE", "STRING", "TIMESTAMP" ]

valid {
    input.Body.schema.name == STRING
    input.Body.schema.fields[_].name == STRING
    input.Body.schema.fields[_].type == enum_DataLakeDatasetSchemaFieldType[_]
    input.Body.schema.fields[_].isRequired == BOOLEAN
    input.Body.description == STRING
    input.Body.tags.STRING == STRING
    input.ReqMap.instanceId == STRING
    input.ReqMap.namespace == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

CreateInstance

valid {
    input.Body.instanceName == STRING
    input.Body.instanceDescription == STRING
    input.Body.kmsKeyArn == STRING
    input.Body.tags.STRING == STRING
    input.Body.clientToken == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

DeleteDataIntegrationFlow

valid {
    input.ReqMap.instanceId == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

DeleteDataLakeDataset

valid {
    input.ReqMap.instanceId == STRING
    input.ReqMap.namespace == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

DeleteInstance

valid {
    input.ReqMap.instanceId == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

GetBillOfMaterialsImportJob

valid {
    input.ReqMap.instanceId == STRING
    input.ReqMap.jobId == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

GetDataIntegrationFlow

valid {
    input.ReqMap.instanceId == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

GetDataLakeDataset

valid {
    input.ReqMap.instanceId == STRING
    input.ReqMap.namespace == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

GetInstance

valid {
    input.ReqMap.instanceId == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

ListDataIntegrationFlows

valid {
    input.ReqMap.instanceId == STRING
    input.Qs.nextToken == STRING
    input.Qs.maxResults == INTEGER
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

ListDataLakeDatasets

valid {
    input.ReqMap.instanceId == STRING
    input.ReqMap.namespace == STRING
    input.Qs.nextToken == STRING
    input.Qs.maxResults == INTEGER
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

ListInstances

enum_InstanceState := [ "Initializing", "Active", "CreateFailed", "DeleteFailed", "Deleting", "Deleted" ]

valid {
    input.Qs.nextToken == STRING
    input.Qs.maxResults == INTEGER
    input.Qs.instanceNameFilter[_] == STRING
    input.Qs.instanceStateFilter[_] == enum_InstanceState[_]
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

ListTagsForResource

valid {
    input.ReqMap.resourceArn == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

SendDataIntegrationEvent

enum_DataIntegrationEventType := [ "scn.data.forecast", "scn.data.inventorylevel", "scn.data.inboundorder", "scn.data.inboundorderline", "scn.data.inboundorderlineschedule", "scn.data.outboundorderline", "scn.data.outboundshipment", "scn.data.processheader", "scn.data.processoperation", "scn.data.processproduct", "scn.data.reservation", "scn.data.shipment", "scn.data.shipmentstop", "scn.data.shipmentstoporder", "scn.data.supplyplan" ]

valid {
    input.Body.eventType == enum_DataIntegrationEventType[_]
    input.Body.data == STRING
    input.Body.eventGroupId == STRING
    input.Body.eventTimestamp == TIMESTAMP
    input.Body.clientToken == STRING
    input.ReqMap.instanceId == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

TagResource

valid {
    input.Body.tags.STRING == STRING
    input.ReqMap.resourceArn == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

UntagResource

valid {
    input.ReqMap.resourceArn == STRING
    input.Qs.tagKeys[_] == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

UpdateDataIntegrationFlow

enum_DataIntegrationFlowFileType := [ "CSV", "PARQUET", "JSON" ]
enum_DataIntegrationFlowLoadType := [ "INCREMENTAL", "REPLACE" ]
enum_DataIntegrationFlowSourceType := [ "S3", "DATASET" ]
enum_DataIntegrationFlowTargetType := [ "S3", "DATASET" ]
enum_DataIntegrationFlowTransformationType := [ "SQL", "NONE" ]

valid {
    input.Body.sources[_].sourceType == enum_DataIntegrationFlowSourceType[_]
    input.Body.sources[_].sourceName == STRING
    input.Body.sources[_].s3Source.bucketName == STRING
    input.Body.sources[_].s3Source.prefix == STRING
    input.Body.sources[_].s3Source.options.fileType == enum_DataIntegrationFlowFileType[_]
    input.Body.sources[_].datasetSource.datasetIdentifier == STRING
    input.Body.sources[_].datasetSource.options.loadType == enum_DataIntegrationFlowLoadType[_]
    input.Body.sources[_].datasetSource.options.dedupeRecords == BOOLEAN
    input.Body.transformation.transformationType == enum_DataIntegrationFlowTransformationType[_]
    input.Body.transformation.sqlTransformation.query == STRING
    input.Body.target.targetType == enum_DataIntegrationFlowTargetType[_]
    input.Body.target.s3Target.bucketName == STRING
    input.Body.target.s3Target.prefix == STRING
    input.Body.target.s3Target.options.fileType == enum_DataIntegrationFlowFileType[_]
    input.Body.target.datasetTarget.datasetIdentifier == STRING
    input.Body.target.datasetTarget.options.loadType == enum_DataIntegrationFlowLoadType[_]
    input.Body.target.datasetTarget.options.dedupeRecords == BOOLEAN
    input.ReqMap.instanceId == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

UpdateDataLakeDataset

valid {
    input.Body.description == STRING
    input.ReqMap.instanceId == STRING
    input.ReqMap.namespace == STRING
    input.ReqMap.name == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}

UpdateInstance

valid {
    input.Body.instanceName == STRING
    input.Body.instanceDescription == STRING
    input.ReqMap.instanceId == STRING
    input.ProviderMetadata.Account == STRING
    input.ProviderMetadata.AccessKeyId == STRING
    input.ProviderMetadata.Region == STRING
}