Batch.fromJson constructor

Batch.fromJson(
  1. Map json_
)

Implementation

Batch.fromJson(core.Map json_)
    : this(
        createTime: json_.containsKey('createTime')
            ? json_['createTime'] as core.String
            : null,
        creator: json_.containsKey('creator')
            ? json_['creator'] as core.String
            : null,
        environmentConfig: json_.containsKey('environmentConfig')
            ? EnvironmentConfig.fromJson(json_['environmentConfig']
                as core.Map<core.String, core.dynamic>)
            : null,
        labels: json_.containsKey('labels')
            ? (json_['labels'] as core.Map<core.String, core.dynamic>).map(
                (key, value) => core.MapEntry(
                  key,
                  value as core.String,
                ),
              )
            : null,
        name: json_.containsKey('name') ? json_['name'] as core.String : null,
        operation: json_.containsKey('operation')
            ? json_['operation'] as core.String
            : null,
        pysparkBatch: json_.containsKey('pysparkBatch')
            ? PySparkBatch.fromJson(
                json_['pysparkBatch'] as core.Map<core.String, core.dynamic>)
            : null,
        runtimeConfig: json_.containsKey('runtimeConfig')
            ? RuntimeConfig.fromJson(
                json_['runtimeConfig'] as core.Map<core.String, core.dynamic>)
            : null,
        runtimeInfo: json_.containsKey('runtimeInfo')
            ? RuntimeInfo.fromJson(
                json_['runtimeInfo'] as core.Map<core.String, core.dynamic>)
            : null,
        sparkBatch: json_.containsKey('sparkBatch')
            ? SparkBatch.fromJson(
                json_['sparkBatch'] as core.Map<core.String, core.dynamic>)
            : null,
        sparkRBatch: json_.containsKey('sparkRBatch')
            ? SparkRBatch.fromJson(
                json_['sparkRBatch'] as core.Map<core.String, core.dynamic>)
            : null,
        sparkSqlBatch: json_.containsKey('sparkSqlBatch')
            ? SparkSqlBatch.fromJson(
                json_['sparkSqlBatch'] as core.Map<core.String, core.dynamic>)
            : null,
        state:
            json_.containsKey('state') ? json_['state'] as core.String : null,
        stateHistory: json_.containsKey('stateHistory')
            ? (json_['stateHistory'] as core.List)
                .map((value) => StateHistory.fromJson(
                    value as core.Map<core.String, core.dynamic>))
                .toList()
            : null,
        stateMessage: json_.containsKey('stateMessage')
            ? json_['stateMessage'] as core.String
            : null,
        stateTime: json_.containsKey('stateTime')
            ? json_['stateTime'] as core.String
            : null,
        uuid: json_.containsKey('uuid') ? json_['uuid'] as core.String : null,
      );