All Downloads are FREE. Search and download functionalities are using the official Maven repository.

error.error-conditions.json Maven / Gradle / Ivy

The newest version!
{
  "AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION" : {
    "message" : [
      "Non-deterministic expression  should not appear in the arguments of an aggregate function."
    ],
    "sqlState" : "42845"
  },
  "ALL_PARAMETERS_MUST_BE_NAMED" : {
    "message" : [
      "Using name parameterized queries requires all parameters to be named. Parameters missing names: ."
    ],
    "sqlState" : "07001"
  },
  "ALL_PARTITION_COLUMNS_NOT_ALLOWED" : {
    "message" : [
      "Cannot use all columns for partition columns."
    ],
    "sqlState" : "KD005"
  },
  "ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : {
    "message" : [
      "ALTER TABLE  column  specifies descriptor \"\" more than once, which is invalid."
    ],
    "sqlState" : "42710"
  },
  "AMBIGUOUS_ALIAS_IN_NESTED_CTE" : {
    "message" : [
      "Name  is ambiguous in nested CTE.",
      "Please set  to \"CORRECTED\" so that name defined in inner CTE takes precedence. If set it to \"LEGACY\", outer CTE definitions will take precedence.",
      "See '/sql-migration-guide.html#query-engine'."
    ],
    "sqlState" : "42KD0"
  },
  "AMBIGUOUS_COLUMN_OR_FIELD" : {
    "message" : [
      "Column or field  is ambiguous and has  matches."
    ],
    "sqlState" : "42702"
  },
  "AMBIGUOUS_COLUMN_REFERENCE" : {
    "message" : [
      "Column  is ambiguous. It's because you joined several DataFrame together, and some of these DataFrames are the same.",
      "This column points to one of the DataFrames but Spark is unable to figure out which one.",
      "Please alias the DataFrames with different names via `DataFrame.alias` before joining them,",
      "and specify the column using qualified name, e.g. `df.alias(\"a\").join(df.alias(\"b\"), col(\"a.id\") > col(\"b.id\"))`."
    ],
    "sqlState" : "42702"
  },
  "AMBIGUOUS_LATERAL_COLUMN_ALIAS" : {
    "message" : [
      "Lateral column alias  is ambiguous and has  matches."
    ],
    "sqlState" : "42702"
  },
  "AMBIGUOUS_REFERENCE" : {
    "message" : [
      "Reference  is ambiguous, could be: ."
    ],
    "sqlState" : "42704"
  },
  "AMBIGUOUS_REFERENCE_TO_FIELDS" : {
    "message" : [
      "Ambiguous reference to the field . It appears  times in the schema."
    ],
    "sqlState" : "42000"
  },
  "ARITHMETIC_OVERFLOW" : {
    "message" : [
      ". If necessary set  to \"false\" to bypass this error."
    ],
    "sqlState" : "22003"
  },
  "ASSIGNMENT_ARITY_MISMATCH" : {
    "message" : [
      "The number of columns or variables assigned or aliased:  does not match the number of source expressions: ."
    ],
    "sqlState" : "42802"
  },
  "AS_OF_JOIN" : {
    "message" : [
      "Invalid as-of join."
    ],
    "subClass" : {
      "TOLERANCE_IS_NON_NEGATIVE" : {
        "message" : [
          "The input argument `tolerance` must be non-negative."
        ]
      },
      "TOLERANCE_IS_UNFOLDABLE" : {
        "message" : [
          "The input argument `tolerance` must be a constant."
        ]
      },
      "UNSUPPORTED_DIRECTION" : {
        "message" : [
          "Unsupported as-of join direction ''. Supported as-of join direction include: ."
        ]
      }
    },
    "sqlState" : "42604"
  },
  "AVRO_INCOMPATIBLE_READ_TYPE" : {
    "message" : [
      "Cannot convert Avro  to SQL  because the original encoded data type is , however you're trying to read the field as , which would lead to an incorrect answer.",
      "To allow reading this field, enable the SQL configuration: \"spark.sql.legacy.avro.allowIncompatibleSchema\"."
    ],
    "sqlState" : "22KD3"
  },
  "AVRO_NOT_LOADED_SQL_FUNCTIONS_UNUSABLE" : {
    "message" : [
      "Cannot call the  SQL function because the Avro data source is not loaded.",
      "Please restart your job or session with the 'spark-avro' package loaded, such as by using the --packages argument on the command line, and then retry your query or command again."
    ],
    "sqlState" : "22KD3"
  },
  "BATCH_METADATA_NOT_FOUND" : {
    "message" : [
      "Unable to find batch ."
    ],
    "sqlState" : "42K03"
  },
  "BINARY_ARITHMETIC_OVERFLOW" : {
    "message" : [
      "   caused overflow."
    ],
    "sqlState" : "22003"
  },
  "BOOLEAN_STATEMENT_WITH_EMPTY_ROW" : {
    "message" : [
      "Boolean statement  is invalid. Expected single row with a value of the BOOLEAN type, but got an empty row."
    ],
    "sqlState" : "21000"
  },
  "CALL_ON_STREAMING_DATASET_UNSUPPORTED" : {
    "message" : [
      "The method  can not be called on streaming Dataset/DataFrame."
    ],
    "sqlState" : "42KDE"
  },
  "CANNOT_ALTER_COLLATION_BUCKET_COLUMN" : {
    "message" : [
      "ALTER TABLE (ALTER|CHANGE) COLUMN cannot change collation of type/subtypes of bucket columns, but found the bucket column  in the table ."
    ],
    "sqlState" : "428FR"
  },
  "CANNOT_ALTER_PARTITION_COLUMN" : {
    "message" : [
      "ALTER TABLE (ALTER|CHANGE) COLUMN is not supported for partition columns, but found the partition column  in the table ."
    ],
    "sqlState" : "428FR"
  },
  "CANNOT_ASSIGN_EVENT_TIME_COLUMN_WITHOUT_WATERMARK" : {
    "message" : [
      "Watermark needs to be defined to reassign event time column. Failed to find watermark definition in the streaming query."
    ],
    "sqlState" : "42611"
  },
  "CANNOT_CAST_DATATYPE" : {
    "message" : [
      "Cannot cast  to ."
    ],
    "sqlState" : "42846"
  },
  "CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE" : {
    "message" : [
      "Cannot convert Protobuf  to SQL  because schema is incompatible (protobufType = , sqlType = )."
    ],
    "sqlState" : "42846"
  },
  "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : {
    "message" : [
      "Unable to convert  of Protobuf to SQL type ."
    ],
    "sqlState" : "42846"
  },
  "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : {
    "message" : [
      "Cannot convert SQL  to Protobuf  because schema is incompatible (protobufType = , sqlType = )."
    ],
    "sqlState" : "42846"
  },
  "CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE" : {
    "message" : [
      "Cannot convert SQL  to Protobuf  because  is not in defined values for enum: ."
    ],
    "sqlState" : "42846"
  },
  "CANNOT_CREATE_DATA_SOURCE_TABLE" : {
    "message" : [
      "Failed to create data source table :"
    ],
    "subClass" : {
      "EXTERNAL_METADATA_UNSUPPORTED" : {
        "message" : [
          "provider '' does not support external metadata but a schema is provided. Please remove the schema when creating the table."
        ]
      }
    },
    "sqlState" : "42KDE"
  },
  "CANNOT_DECODE_URL" : {
    "message" : [
      "The provided URL cannot be decoded: . Please ensure that the URL is properly formatted and try again."
    ],
    "sqlState" : "22546"
  },
  "CANNOT_INVOKE_IN_TRANSFORMATIONS" : {
    "message" : [
      "Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702."
    ],
    "sqlState" : "0A000"
  },
  "CANNOT_LOAD_FUNCTION_CLASS" : {
    "message" : [
      "Cannot load class  when registering the function , please make sure it is on the classpath."
    ],
    "sqlState" : "46103"
  },
  "CANNOT_LOAD_PROTOBUF_CLASS" : {
    "message" : [
      "Could not load Protobuf class with name . ."
    ],
    "sqlState" : "42K03"
  },
  "CANNOT_LOAD_STATE_STORE" : {
    "message" : [
      "An error occurred during loading state."
    ],
    "subClass" : {
      "CANNOT_READ_CHECKPOINT" : {
        "message" : [
          "Cannot read RocksDB checkpoint metadata. Expected , but found ."
        ]
      },
      "CANNOT_READ_DELTA_FILE_KEY_SIZE" : {
        "message" : [
          "Error reading delta file  of : key size cannot be ."
        ]
      },
      "CANNOT_READ_DELTA_FILE_NOT_EXISTS" : {
        "message" : [
          "Error reading delta file  of :  does not exist."
        ]
      },
      "CANNOT_READ_MISSING_SNAPSHOT_FILE" : {
        "message" : [
          "Error reading snapshot file  of :  does not exist."
        ]
      },
      "CANNOT_READ_SNAPSHOT_FILE_KEY_SIZE" : {
        "message" : [
          "Error reading snapshot file  of : key size cannot be ."
        ]
      },
      "CANNOT_READ_SNAPSHOT_FILE_VALUE_SIZE" : {
        "message" : [
          "Error reading snapshot file  of : value size cannot be ."
        ]
      },
      "CANNOT_READ_STREAMING_STATE_FILE" : {
        "message" : [
          "Error reading streaming state file of  does not exist. If the stream job is restarted with a new or updated state operation, please create a new checkpoint location or clear the existing checkpoint location."
        ]
      },
      "HDFS_STORE_PROVIDER_OUT_OF_MEMORY" : {
        "message" : [
          "Could not load HDFS state store with id  because of an out of memory exception."
        ]
      },
      "INVALID_CHANGE_LOG_READER_VERSION" : {
        "message" : [
          "The change log reader version cannot be ."
        ]
      },
      "INVALID_CHANGE_LOG_WRITER_VERSION" : {
        "message" : [
          "The change log writer version cannot be ."
        ]
      },
      "ROCKSDB_STORE_PROVIDER_OUT_OF_MEMORY" : {
        "message" : [
          "Could not load RocksDB state store with id  because of an out of memory exception."
        ]
      },
      "SNAPSHOT_PARTITION_ID_NOT_FOUND" : {
        "message" : [
          "Partition id  not found for state of operator  at ."
        ]
      },
      "UNCATEGORIZED" : {
        "message" : [
          ""
        ]
      },
      "UNEXPECTED_FILE_SIZE" : {
        "message" : [
          "Copied  to , expected  bytes, found  bytes."
        ]
      },
      "UNEXPECTED_VERSION" : {
        "message" : [
          "Version cannot be  because it is less than 0."
        ]
      },
      "UNRELEASED_THREAD_ERROR" : {
        "message" : [
          ": RocksDB instance could not be acquired by  for operationType= as it was not released by  after  ms.",
          "Thread holding the lock has trace: "
        ]
      }
    },
    "sqlState" : "58030"
  },
  "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : {
    "message" : [
      "Failed to merge incompatible data types  and . Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge."
    ],
    "sqlState" : "42825"
  },
  "CANNOT_MERGE_SCHEMAS" : {
    "message" : [
      "Failed merging schemas:",
      "Initial schema:",
      "",
      "Schema that cannot be merged with the initial schema:",
      "."
    ],
    "sqlState" : "42KD9"
  },
  "CANNOT_MODIFY_CONFIG" : {
    "message" : [
      "Cannot modify the value of the Spark config: .",
      "See also '/sql-migration-guide.html#ddl-statements'."
    ],
    "sqlState" : "46110"
  },
  "CANNOT_PARSE_DECIMAL" : {
    "message" : [
      "Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators."
    ],
    "sqlState" : "22018"
  },
  "CANNOT_PARSE_INTERVAL" : {
    "message" : [
      "Unable to parse . Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again."
    ],
    "sqlState" : "22006"
  },
  "CANNOT_PARSE_JSON_FIELD" : {
    "message" : [
      "Cannot parse the field name  and the value  of the JSON token type  to target Spark data type ."
    ],
    "sqlState" : "2203G"
  },
  "CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : {
    "message" : [
      "Error parsing descriptor bytes into Protobuf FileDescriptorSet."
    ],
    "sqlState" : "22018"
  },
  "CANNOT_PARSE_TIMESTAMP" : {
    "message" : [
      ". If necessary set  to \"false\" to bypass this error."
    ],
    "sqlState" : "22007"
  },
  "CANNOT_RECOGNIZE_HIVE_TYPE" : {
    "message" : [
      "Cannot recognize hive type string: , column: . The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL."
    ],
    "sqlState" : "429BB"
  },
  "CANNOT_RENAME_ACROSS_SCHEMA" : {
    "message" : [
      "Renaming a  across schemas is not allowed."
    ],
    "sqlState" : "0AKD0"
  },
  "CANNOT_RESOLVE_DATAFRAME_COLUMN" : {
    "message" : [
      "Cannot resolve dataframe column . It's probably because of illegal references like `df1.select(df2.col(\"a\"))`."
    ],
    "sqlState" : "42704"
  },
  "CANNOT_RESOLVE_STAR_EXPAND" : {
    "message" : [
      "Cannot resolve .* given input columns . Please check that the specified table or struct exists and is accessible in the input columns."
    ],
    "sqlState" : "42704"
  },
  "CANNOT_RESTORE_PERMISSIONS_FOR_PATH" : {
    "message" : [
      "Failed to set permissions on created path  back to ."
    ],
    "sqlState" : "58030"
  },
  "CANNOT_UPDATE_FIELD" : {
    "message" : [
      "Cannot update  field  type:"
    ],
    "subClass" : {
      "ARRAY_TYPE" : {
        "message" : [
          "Update the element by updating .element."
        ]
      },
      "INTERVAL_TYPE" : {
        "message" : [
          "Update an interval by updating its fields."
        ]
      },
      "MAP_TYPE" : {
        "message" : [
          "Update a map by updating .key or .value."
        ]
      },
      "STRUCT_TYPE" : {
        "message" : [
          "Update a struct by updating its fields."
        ]
      },
      "USER_DEFINED_TYPE" : {
        "message" : [
          "Update a UserDefinedType[] by updating its fields."
        ]
      }
    },
    "sqlState" : "0A000"
  },
  "CANNOT_UP_CAST_DATATYPE" : {
    "message" : [
      "Cannot up cast  from  to .",
      "
" ], "sqlState" : "42846" }, "CANNOT_USE_KRYO" : { "message" : [ "Cannot load Kryo serialization codec. Kryo serialization cannot be used in the Spark Connect client. Use Java serialization, provide a custom Codec, or use Spark Classic instead." ], "sqlState" : "22KD3" }, "CANNOT_WRITE_STATE_STORE" : { "message" : [ "Error writing state store files for provider ." ], "subClass" : { "CANNOT_COMMIT" : { "message" : [ "Cannot perform commit during state checkpoint." ] } }, "sqlState" : "58030" }, "CAST_INVALID_INPUT" : { "message" : [ "The value of the type cannot be cast to because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead." ], "sqlState" : "22018" }, "CAST_OVERFLOW" : { "message" : [ "The value of the type cannot be cast to due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead." ], "sqlState" : "22003" }, "CAST_OVERFLOW_IN_TABLE_INSERT" : { "message" : [ "Fail to assign a value of type to the type column or variable due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead." ], "sqlState" : "22003" }, "CATALOG_NOT_FOUND" : { "message" : [ "The catalog not found. Consider to set the SQL config to a catalog plugin." ], "sqlState" : "42P08" }, "CHECKPOINT_RDD_BLOCK_ID_NOT_FOUND" : { "message" : [ "Checkpoint block not found!", "Either the executor that originally checkpointed this partition is no longer alive, or the original RDD is unpersisted.", "If this problem persists, you may consider using `rdd.checkpoint()` instead, which is slower than local checkpointing but more fault-tolerant." ], "sqlState" : "56000" }, "CLASS_NOT_OVERRIDE_EXPECTED_METHOD" : { "message" : [ " must override either or ." ], "sqlState" : "38000" }, "CLASS_UNSUPPORTED_BY_MAP_OBJECTS" : { "message" : [ "`MapObjects` does not support the class as resulting collection." ], "sqlState" : "0A000" }, "CLUSTERING_COLUMNS_MISMATCH" : { "message" : [ "Specified clustering does not match that of the existing table .", "Specified clustering columns: [].", "Existing clustering columns: []." ], "sqlState" : "42P10" }, "CLUSTERING_NOT_SUPPORTED" : { "message" : [ "'' does not support clustering." ], "sqlState" : "42000" }, "CODEC_NOT_AVAILABLE" : { "message" : [ "The codec is not available." ], "subClass" : { "WITH_AVAILABLE_CODECS_SUGGESTION" : { "message" : [ "Available codecs are ." ] }, "WITH_CONF_SUGGESTION" : { "message" : [ "Consider to set the config to ." ] } }, "sqlState" : "56038" }, "CODEC_SHORT_NAME_NOT_FOUND" : { "message" : [ "Cannot find a short name for the codec ." ], "sqlState" : "42704" }, "COLLATION_INVALID_NAME" : { "message" : [ "The value does not represent a correct collation name. Suggested valid collation names: []." ], "sqlState" : "42704" }, "COLLATION_INVALID_PROVIDER" : { "message" : [ "The value does not represent a correct collation provider. Supported providers are: []." ], "sqlState" : "42704" }, "COLLATION_MISMATCH" : { "message" : [ "Could not determine which collation to use for string functions and operators." ], "subClass" : { "EXPLICIT" : { "message" : [ "Error occurred due to the mismatch between explicit collations: []. Decide on a single explicit collation and remove others." ] }, "IMPLICIT" : { "message" : [ "Error occurred due to the mismatch between multiple implicit non-default collations. Use COLLATE function to set the collation explicitly." ] } }, "sqlState" : "42P21" }, "COLLECTION_SIZE_LIMIT_EXCEEDED" : { "message" : [ "Can't create array with elements which exceeding the array size limit ," ], "subClass" : { "FUNCTION" : { "message" : [ "unsuccessful try to create arrays in the function ." ] }, "INITIALIZE" : { "message" : [ "cannot initialize an array with specified parameters." ] }, "PARAMETER" : { "message" : [ "the value of parameter(s) in the function is invalid." ] } }, "sqlState" : "54000" }, "COLUMN_ALIASES_NOT_ALLOWED" : { "message" : [ "Column aliases are not allowed in ." ], "sqlState" : "42601" }, "COLUMN_ALREADY_EXISTS" : { "message" : [ "The column already exists. Choose another name or rename the existing column." ], "sqlState" : "42711" }, "COLUMN_NOT_DEFINED_IN_TABLE" : { "message" : [ " column is not defined in table , defined table columns are: ." ], "sqlState" : "42703" }, "COLUMN_NOT_FOUND" : { "message" : [ "The column cannot be found. Verify the spelling and correctness of the column name according to the SQL config ." ], "sqlState" : "42703" }, "COMPARATOR_RETURNS_NULL" : { "message" : [ "The comparator has returned a NULL for a comparison between and .", "It should return a positive integer for \"greater than\", 0 for \"equal\" and a negative integer for \"less than\".", "To revert to deprecated behavior where NULL is treated as 0 (equal), you must set \"spark.sql.legacy.allowNullComparisonResultInArraySort\" to \"true\"." ], "sqlState" : "22004" }, "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT" : { "message" : [ "Cannot process input data types for the expression: ." ], "subClass" : { "MISMATCHED_TYPES" : { "message" : [ "All input types must be the same except nullable, containsNull, valueContainsNull flags, but found the input types ." ] }, "NO_INPUTS" : { "message" : [ "The collection of input data types must not be empty." ] } }, "sqlState" : "42K09" }, "CONCURRENT_QUERY" : { "message" : [ "Another instance of this query was just started by a concurrent session." ], "sqlState" : "0A000" }, "CONCURRENT_STREAM_LOG_UPDATE" : { "message" : [ "Concurrent update to the log. Multiple streaming jobs detected for .", "Please make sure only one streaming job runs on a specific checkpoint location at a time." ], "sqlState" : "40000" }, "CONFLICTING_PARTITION_COLUMN_NAMES" : { "message" : [ "Conflicting partition column names detected:", "", "For partitioned table directories, data files should only live in leaf directories.", "And directories at the same level should have the same partition column name.", "Please check the following directories for unexpected files or inconsistent partition column names:", "" ], "sqlState" : "KD009" }, "CONNECT" : { "message" : [ "Generic Spark Connect error." ], "subClass" : { "INTERCEPTOR_CTOR_MISSING" : { "message" : [ "Cannot instantiate GRPC interceptor because is missing a default constructor without arguments." ] }, "INTERCEPTOR_RUNTIME_ERROR" : { "message" : [ "Error instantiating GRPC interceptor: " ] }, "PLUGIN_CTOR_MISSING" : { "message" : [ "Cannot instantiate Spark Connect plugin because is missing a default constructor without arguments." ] }, "PLUGIN_RUNTIME_ERROR" : { "message" : [ "Error instantiating Spark Connect plugin: " ] }, "SESSION_NOT_SAME" : { "message" : [ "Both Datasets must belong to the same SparkSession." ] } }, "sqlState" : "56K00" }, "CONVERSION_INVALID_INPUT" : { "message" : [ "The value () cannot be converted to because it is malformed. Correct the value as per the syntax, or change its format. Use to tolerate malformed input and return NULL instead." ], "sqlState" : "22018" }, "CREATE_PERMANENT_VIEW_WITHOUT_ALIAS" : { "message" : [ "Not allowed to create the permanent view without explicitly assigning an alias for the expression ." ], "sqlState" : "0A000" }, "CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : { "message" : [ "CREATE TABLE column specifies descriptor \"\" more than once, which is invalid." ], "sqlState" : "42710" }, "CREATE_VIEW_COLUMN_ARITY_MISMATCH" : { "message" : [ "Cannot create view , the reason is" ], "subClass" : { "NOT_ENOUGH_DATA_COLUMNS" : { "message" : [ "not enough data columns:", "View columns: .", "Data columns: ." ] }, "TOO_MANY_DATA_COLUMNS" : { "message" : [ "too many data columns:", "View columns: .", "Data columns: ." ] } }, "sqlState" : "21S01" }, "DATATYPE_MISMATCH" : { "message" : [ "Cannot resolve due to data type mismatch:" ], "subClass" : { "ARRAY_FUNCTION_DIFF_TYPES" : { "message" : [ "Input to should have been followed by a value with same element type, but it's [, ]." ] }, "BINARY_ARRAY_DIFF_TYPES" : { "message" : [ "Input to function should have been two with same element type, but it's [, ]." ] }, "BINARY_OP_DIFF_TYPES" : { "message" : [ "the left and right operands of the binary operator have incompatible types ( and )." ] }, "BINARY_OP_WRONG_TYPE" : { "message" : [ "the binary operator requires the input type , not ." ] }, "BLOOM_FILTER_BINARY_OP_WRONG_TYPE" : { "message" : [ "The Bloom filter binary input to should be either a constant value or a scalar subquery expression, but it's ." ] }, "BLOOM_FILTER_WRONG_TYPE" : { "message" : [ "Input to function should have been followed by value with , but it's []." ] }, "CANNOT_CONVERT_TO_JSON" : { "message" : [ "Unable to convert column of type to JSON." ] }, "CANNOT_DROP_ALL_FIELDS" : { "message" : [ "Cannot drop all fields in struct." ] }, "CAST_WITHOUT_SUGGESTION" : { "message" : [ "cannot cast to ." ] }, "CAST_WITH_CONF_SUGGESTION" : { "message" : [ "cannot cast to with ANSI mode on.", "If you have to cast to , you can set as ." ] }, "CAST_WITH_FUNC_SUGGESTION" : { "message" : [ "cannot cast to .", "To convert values from to , you can use the functions instead." ] }, "CREATE_MAP_KEY_DIFF_TYPES" : { "message" : [ "The given keys of function should all be the same type, but they are ." ] }, "CREATE_MAP_VALUE_DIFF_TYPES" : { "message" : [ "The given values of function should all be the same type, but they are ." ] }, "CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING" : { "message" : [ "Only foldable `STRING` expressions are allowed to appear at odd position, but they are ." ] }, "DATA_DIFF_TYPES" : { "message" : [ "Input to should all be the same type, but it's ." ] }, "FILTER_NOT_BOOLEAN" : { "message" : [ "Filter expression of type is not a boolean." ] }, "HASH_MAP_TYPE" : { "message" : [ "Input to the function cannot contain elements of the \"MAP\" type. In Spark, same maps may have different hashcode, thus hash expressions are prohibited on \"MAP\" elements. To restore previous behavior set \"spark.sql.legacy.allowHashOnMapType\" to \"true\"." ] }, "HASH_VARIANT_TYPE" : { "message" : [ "Input to the function cannot contain elements of the \"VARIANT\" type yet." ] }, "INPUT_SIZE_NOT_ONE" : { "message" : [ "Length of should be 1." ] }, "INVALID_ARG_VALUE" : { "message" : [ "The value must to be a literal of , but got ." ] }, "INVALID_JSON_MAP_KEY_TYPE" : { "message" : [ "Input schema can only contain STRING as a key type for a MAP." ] }, "INVALID_JSON_SCHEMA" : { "message" : [ "Input schema must be a struct, an array, a map or a variant." ] }, "INVALID_MAP_KEY_TYPE" : { "message" : [ "The key of map cannot be/contain ." ] }, "INVALID_ORDERING_TYPE" : { "message" : [ "The does not support ordering on type ." ] }, "INVALID_ROW_LEVEL_OPERATION_ASSIGNMENTS" : { "message" : [ "" ] }, "INVALID_XML_MAP_KEY_TYPE" : { "message" : [ "Input schema can only contain STRING as a key type for a MAP." ] }, "IN_SUBQUERY_DATA_TYPE_MISMATCH" : { "message" : [ "The data type of one or more elements in the left hand side of an IN subquery is not compatible with the data type of the output of the subquery. Mismatched columns: [], left side: [], right side: []." ] }, "IN_SUBQUERY_LENGTH_MISMATCH" : { "message" : [ "The number of columns in the left hand side of an IN subquery does not match the number of columns in the output of subquery. Left hand side columns(length: ): [], right hand side columns(length: ): []." ] }, "MAP_CONCAT_DIFF_TYPES" : { "message" : [ "The should all be of type map, but it's ." ] }, "MAP_FUNCTION_DIFF_TYPES" : { "message" : [ "Input to should have been followed by a value with same key type, but it's [, ]." ] }, "MAP_ZIP_WITH_DIFF_TYPES" : { "message" : [ "Input to the should have been two maps with compatible key types, but it's [, ]." ] }, "NON_FOLDABLE_INPUT" : { "message" : [ "the input should be a foldable expression; however, got ." ] }, "NON_STRING_TYPE" : { "message" : [ "all arguments of the function must be strings." ] }, "NULL_TYPE" : { "message" : [ "Null typed values cannot be used as arguments of ." ] }, "PARAMETER_CONSTRAINT_VIOLATION" : { "message" : [ "The () must be the ()." ] }, "RANGE_FRAME_INVALID_TYPE" : { "message" : [ "The data type used in the order specification does not match the data type which is used in the range frame." ] }, "RANGE_FRAME_MULTI_ORDER" : { "message" : [ "A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: ." ] }, "RANGE_FRAME_WITHOUT_ORDER" : { "message" : [ "A range window frame cannot be used in an unordered window specification." ] }, "SEQUENCE_WRONG_INPUT_TYPES" : { "message" : [ " uses the wrong parameter type. The parameter type must conform to:", "1. The start and stop expressions must resolve to the same type.", "2. If start and stop expressions resolve to the type, then the step expression must resolve to the type.", "3. Otherwise, if start and stop expressions resolve to the type, then the step expression must resolve to the same type." ] }, "SPECIFIED_WINDOW_FRAME_DIFF_TYPES" : { "message" : [ "Window frame bounds and do not have the same type: <> ." ] }, "SPECIFIED_WINDOW_FRAME_INVALID_BOUND" : { "message" : [ "Window frame upper bound does not follow the lower bound ." ] }, "SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE" : { "message" : [ "The data type of the bound does not match the expected data type ." ] }, "SPECIFIED_WINDOW_FRAME_WITHOUT_FOLDABLE" : { "message" : [ "Window frame bound is not a literal." ] }, "SPECIFIED_WINDOW_FRAME_WRONG_COMPARISON" : { "message" : [ "The lower bound of a window frame must be to the upper bound." ] }, "STACK_COLUMN_DIFF_TYPES" : { "message" : [ "The data type of the column () do not have the same type: () <> ()." ] }, "TYPE_CHECK_FAILURE_WITH_HINT" : { "message" : [ "." ] }, "UNEXPECTED_CLASS_TYPE" : { "message" : [ "class not found." ] }, "UNEXPECTED_INPUT_TYPE" : { "message" : [ "The parameter requires the type, however has the type ." ] }, "UNEXPECTED_NULL" : { "message" : [ "The must not be null." ] }, "UNEXPECTED_RETURN_TYPE" : { "message" : [ "The requires return type, but the actual is type." ] }, "UNEXPECTED_STATIC_METHOD" : { "message" : [ "cannot find a static method that matches the argument types in ." ] }, "UNSUPPORTED_INPUT_TYPE" : { "message" : [ "The input of can't be type data." ] }, "UNSUPPORTED_UDF_INPUT_TYPE" : { "message" : [ "UDFs do not support '' as an input data type." ] }, "UNSUPPORTED_UDF_OUTPUT_TYPE" : { "message" : [ "UDFs do not support '' as an output data type." ] }, "VALUE_OUT_OF_RANGE" : { "message" : [ "The must be between (current value = )." ] }, "WRONG_NUM_ARG_TYPES" : { "message" : [ "The expression requires argument types but the actual number is ." ] }, "WRONG_NUM_ENDPOINTS" : { "message" : [ "The number of endpoints must be >= 2 to construct intervals but the actual number is ." ] } }, "sqlState" : "42K09" }, "DATATYPE_MISSING_SIZE" : { "message" : [ "DataType requires a length parameter, for example (10). Please specify the length." ], "sqlState" : "42K01" }, "DATA_SOURCE_ALREADY_EXISTS" : { "message" : [ "Data source '' already exists. Please choose a different name for the new data source." ], "sqlState" : "42710" }, "DATA_SOURCE_EXTERNAL_ERROR" : { "message" : [ "Encountered error when saving to external data source." ], "sqlState" : "KD00F" }, "DATA_SOURCE_NOT_EXIST" : { "message" : [ "Data source '' not found. Please make sure the data source is registered." ], "sqlState" : "42704" }, "DATA_SOURCE_NOT_FOUND" : { "message" : [ "Failed to find the data source: . Make sure the provider name is correct and the package is properly registered and compatible with your Spark version." ], "sqlState" : "42K02" }, "DATA_SOURCE_TABLE_SCHEMA_MISMATCH" : { "message" : [ "The schema of the data source table does not match the expected schema. If you are using the DataFrameReader.schema API or creating a table, avoid specifying the schema.", "Data Source schema: ", "Expected schema: " ], "sqlState" : "42K03" }, "DATETIME_OVERFLOW" : { "message" : [ "Datetime operation overflow: ." ], "sqlState" : "22008" }, "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : { "message" : [ "Decimal precision exceeds max precision ." ], "sqlState" : "22003" }, "DEFAULT_DATABASE_NOT_EXISTS" : { "message" : [ "Default database does not exist, please create it first or change default database to ``." ], "sqlState" : "42704" }, "DEFAULT_PLACEMENT_INVALID" : { "message" : [ "A DEFAULT keyword in a MERGE, INSERT, UPDATE, or SET VARIABLE command could not be directly assigned to a target column because it was part of an expression.", "For example: `UPDATE SET c1 = DEFAULT` is allowed, but `UPDATE T SET c1 = DEFAULT + 1` is not allowed." ], "sqlState" : "42608" }, "DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : { "message" : [ "Distinct window functions are not supported: ." ], "sqlState" : "0A000" }, "DIVIDE_BY_ZERO" : { "message" : [ "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." ], "sqlState" : "22012" }, "DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT" : { "message" : [ "Duplicated field names in Arrow Struct are not allowed, got ." ], "sqlState" : "42713" }, "DUPLICATED_MAP_KEY" : { "message" : [ "Duplicate map key was found, please check the input data.", "If you want to remove the duplicated keys, you can set to \"LAST_WIN\" so that the key inserted at last takes precedence." ], "sqlState" : "23505" }, "DUPLICATED_METRICS_NAME" : { "message" : [ "The metric name is not unique: . The same name cannot be used for metrics with different results.", "However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)." ], "sqlState" : "42710" }, "DUPLICATE_ASSIGNMENTS" : { "message" : [ "The columns or variables appear more than once as assignment targets." ], "sqlState" : "42701" }, "DUPLICATE_CLAUSES" : { "message" : [ "Found duplicate clauses: . Please, remove one of them." ], "sqlState" : "42614" }, "DUPLICATE_KEY" : { "message" : [ "Found duplicate keys ." ], "sqlState" : "23505" }, "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT" : { "message" : [ "Call to routine is invalid because it includes multiple argument assignments to the same parameter name ." ], "subClass" : { "BOTH_POSITIONAL_AND_NAMED" : { "message" : [ "A positional argument and named argument both referred to the same parameter. Please remove the named argument referring to this parameter." ] }, "DOUBLE_NAMED_ARGUMENT_REFERENCE" : { "message" : [ "More than one named argument referred to the same parameter. Please assign a value only once." ] } }, "sqlState" : "4274K" }, "EMITTING_ROWS_OLDER_THAN_WATERMARK_NOT_ALLOWED" : { "message" : [ "Previous node emitted a row with eventTime= which is older than current_watermark_value=", "This can lead to correctness issues in the stateful operators downstream in the execution pipeline.", "Please correct the operator logic to emit rows after current global watermark value." ], "sqlState" : "42815" }, "EMPTY_JSON_FIELD_VALUE" : { "message" : [ "Failed to parse an empty string for data type ." ], "sqlState" : "42604" }, "ENCODER_NOT_FOUND" : { "message" : [ "Not found an encoder of the type to Spark SQL internal representation.", "Consider to change the input type to one of supported at '/sql-ref-datatypes.html'." ], "sqlState" : "42704" }, "END_LABEL_WITHOUT_BEGIN_LABEL" : { "message" : [ "End label can not exist without begin label." ], "sqlState" : "42K0L" }, "ERROR_READING_AVRO_UNKNOWN_FINGERPRINT" : { "message" : [ "Error reading avro data -- encountered an unknown fingerprint: , not sure what schema to use.", "This could happen if you registered additional schemas after starting your spark context." ], "sqlState" : "KD00B" }, "EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE" : { "message" : [ "The event time has the invalid type , but expected \"TIMESTAMP\"." ], "sqlState" : "42K09" }, "EXCEED_LIMIT_LENGTH" : { "message" : [ "Exceeds char/varchar type length limitation: ." ], "sqlState" : "54006" }, "EXCEPT_NESTED_COLUMN_INVALID_TYPE" : { "message" : [ "EXCEPT column was resolved and expected to be StructType, but found type ." ], "sqlState" : "428H2" }, "EXCEPT_OVERLAPPING_COLUMNS" : { "message" : [ "Columns in an EXCEPT list must be distinct and non-overlapping, but got ()." ], "sqlState" : "42702" }, "EXEC_IMMEDIATE_DUPLICATE_ARGUMENT_ALIASES" : { "message" : [ "The USING clause of this EXECUTE IMMEDIATE command contained multiple arguments with same alias (), which is invalid; please update the command to specify unique aliases and then try it again." ], "sqlState" : "42701" }, "EXPECT_PERMANENT_VIEW_NOT_TEMP" : { "message" : [ "'' expects a permanent view but is a temp view." ], "sqlState" : "42809" }, "EXPECT_TABLE_NOT_VIEW" : { "message" : [ "'' expects a table but is a view." ], "subClass" : { "NO_ALTERNATIVE" : { "message" : [ "" ] }, "USE_ALTER_VIEW" : { "message" : [ "Please use ALTER VIEW instead." ] } }, "sqlState" : "42809" }, "EXPECT_VIEW_NOT_TABLE" : { "message" : [ "The table does not support ." ], "subClass" : { "NO_ALTERNATIVE" : { "message" : [ "" ] }, "USE_ALTER_TABLE" : { "message" : [ "Please use ALTER TABLE instead." ] } }, "sqlState" : "42809" }, "EXPRESSION_DECODING_FAILED" : { "message" : [ "Failed to decode a row to a value of the expressions: ." ], "sqlState" : "42846" }, "EXPRESSION_ENCODING_FAILED" : { "message" : [ "Failed to encode a value of the expressions: to a row." ], "sqlState" : "42846" }, "EXPRESSION_TYPE_IS_NOT_ORDERABLE" : { "message" : [ "Column expression cannot be sorted because its type is not orderable." ], "sqlState" : "42822" }, "FAILED_EXECUTE_UDF" : { "message" : [ "User defined function (: () => ) failed due to: ." ], "sqlState" : "39000" }, "FAILED_FUNCTION_CALL" : { "message" : [ "Failed preparing of the function for call. Please, double check function's arguments." ], "sqlState" : "38000" }, "FAILED_JDBC" : { "message" : [ "Failed JDBC on the operation:" ], "subClass" : { "ALTER_TABLE" : { "message" : [ "Alter the table ." ] }, "CREATE_INDEX" : { "message" : [ "Create the index in the table." ] }, "CREATE_NAMESPACE" : { "message" : [ "Create the namespace ." ] }, "CREATE_NAMESPACE_COMMENT" : { "message" : [ "Create a comment on the namespace: ." ] }, "CREATE_TABLE" : { "message" : [ "Create the table ." ] }, "DROP_INDEX" : { "message" : [ "Drop the index in the table." ] }, "DROP_NAMESPACE" : { "message" : [ "Drop the namespace ." ] }, "GET_TABLES" : { "message" : [ "Get tables from the namespace: ." ] }, "LIST_NAMESPACES" : { "message" : [ "List namespaces." ] }, "LOAD_TABLE" : { "message" : [ "Load the table ." ] }, "NAMESPACE_EXISTS" : { "message" : [ "Check that the namespace exists." ] }, "REMOVE_NAMESPACE_COMMENT" : { "message" : [ "Remove a comment on the namespace: ." ] }, "RENAME_TABLE" : { "message" : [ "Rename the table to ." ] }, "TABLE_EXISTS" : { "message" : [ "Check that the table exists." ] }, "UNCLASSIFIED" : { "message" : [ "" ] } }, "sqlState" : "HV000" }, "FAILED_PARSE_STRUCT_TYPE" : { "message" : [ "Failed parsing struct: ." ], "sqlState" : "22018" }, "FAILED_READ_FILE" : { "message" : [ "Encountered error while reading file ." ], "subClass" : { "CANNOT_READ_FILE_FOOTER" : { "message" : [ "Could not read footer. Please ensure that the file is in either ORC or Parquet format.", "If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt.", "If it is, you can choose to either ignore it or fix the corruption." ] }, "FILE_NOT_EXIST" : { "message" : [ "File does not exist. It is possible the underlying files have been updated.", "You can explicitly invalidate the cache in Spark by running 'REFRESH TABLE tableName' command in SQL or by recreating the Dataset/DataFrame involved." ] }, "NO_HINT" : { "message" : [ "" ] }, "PARQUET_COLUMN_DATA_TYPE_MISMATCH" : { "message" : [ "Data type mismatches when reading Parquet column . Expected Spark type , actual Parquet type ." ] } }, "sqlState" : "KD001" }, "FAILED_REGISTER_CLASS_WITH_KRYO" : { "message" : [ "Failed to register classes with Kryo." ], "sqlState" : "KD000" }, "FAILED_RENAME_PATH" : { "message" : [ "Failed to rename to as destination already exists." ], "sqlState" : "42K04" }, "FAILED_RENAME_TEMP_FILE" : { "message" : [ "Failed to rename temp file to as FileSystem.rename returned false." ], "sqlState" : "58030" }, "FAILED_ROW_TO_JSON" : { "message" : [ "Failed to convert the row value of the class to the target SQL type in the JSON format." ], "sqlState" : "2203G" }, "FAILED_TO_PARSE_TOO_COMPLEX" : { "message" : [ "The statement, including potential SQL functions and referenced views, was too complex to parse.", "To mitigate this error divide the statement into multiple, less complex chunks." ], "sqlState" : "54001" }, "FEATURE_NOT_ENABLED" : { "message" : [ "The feature is not enabled. Consider setting the config to to enable this capability." ], "sqlState" : "56038" }, "FIELD_ALREADY_EXISTS" : { "message" : [ "Cannot column, because already exists in ." ], "sqlState" : "42710" }, "FIELD_NOT_FOUND" : { "message" : [ "No such struct field in ." ], "sqlState" : "42704" }, "FLATMAPGROUPSWITHSTATE_USER_FUNCTION_ERROR" : { "message" : [ "An error occurred in the user provided function in flatMapGroupsWithState. Reason: " ], "sqlState" : "39000" }, "FORBIDDEN_OPERATION" : { "message" : [ "The operation is not allowed on the : ." ], "sqlState" : "42809" }, "FOREACH_BATCH_USER_FUNCTION_ERROR" : { "message" : [ "An error occurred in the user provided function in foreach batch sink. Reason: " ], "sqlState" : "39000" }, "FOREACH_USER_FUNCTION_ERROR" : { "message" : [ "An error occurred in the user provided function in foreach sink. Reason: " ], "sqlState" : "39000" }, "FOUND_MULTIPLE_DATA_SOURCES" : { "message" : [ "Detected multiple data sources with the name ''. Please check the data source isn't simultaneously registered and located in the classpath." ], "sqlState" : "42710" }, "GENERATED_COLUMN_WITH_DEFAULT_VALUE" : { "message" : [ "A column cannot have both a default value and a generation expression but column has default value: () and generation expression: ()." ], "sqlState" : "42623" }, "GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION" : { "message" : [ "Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version." ], "sqlState" : "56038" }, "GRAPHITE_SINK_INVALID_PROTOCOL" : { "message" : [ "Invalid Graphite protocol: ." ], "sqlState" : "KD000" }, "GRAPHITE_SINK_PROPERTY_MISSING" : { "message" : [ "Graphite sink requires '' property." ], "sqlState" : "KD000" }, "GROUPING_COLUMN_MISMATCH" : { "message" : [ "Column of grouping () can't be found in grouping columns ." ], "sqlState" : "42803" }, "GROUPING_ID_COLUMN_MISMATCH" : { "message" : [ "Columns of grouping_id () does not match grouping columns ()." ], "sqlState" : "42803" }, "GROUPING_SIZE_LIMIT_EXCEEDED" : { "message" : [ "Grouping sets size cannot be greater than ." ], "sqlState" : "54000" }, "GROUP_BY_AGGREGATE" : { "message" : [ "Aggregate functions are not allowed in GROUP BY, but found ." ], "sqlState" : "42903" }, "GROUP_BY_POS_AGGREGATE" : { "message" : [ "GROUP BY refers to an expression that contains an aggregate function. Aggregate functions are not allowed in GROUP BY." ], "sqlState" : "42903" }, "GROUP_BY_POS_OUT_OF_RANGE" : { "message" : [ "GROUP BY position is not in select list (valid range is [1, ])." ], "sqlState" : "42805" }, "GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE" : { "message" : [ "The expression cannot be used as a grouping expression because its data type is not an orderable data type." ], "sqlState" : "42822" }, "HLL_INVALID_INPUT_SKETCH_BUFFER" : { "message" : [ "Invalid call to ; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function)." ], "sqlState" : "22546" }, "HLL_INVALID_LG_K" : { "message" : [ "Invalid call to ; the `lgConfigK` value must be between and , inclusive: ." ], "sqlState" : "22546" }, "HLL_UNION_DIFFERENT_LG_K" : { "message" : [ "Sketches have different `lgConfigK` values: and . Set the `allowDifferentLgConfigK` parameter to true to call with different `lgConfigK` values." ], "sqlState" : "22000" }, "IDENTIFIER_TOO_MANY_NAME_PARTS" : { "message" : [ " is not a valid identifier as it has more than 2 name parts." ], "sqlState" : "42601" }, "IDENTITY_COLUMNS_DUPLICATED_SEQUENCE_GENERATOR_OPTION" : { "message" : [ "Duplicated IDENTITY column sequence generator option: ." ], "sqlState" : "42601" }, "IDENTITY_COLUMNS_ILLEGAL_STEP" : { "message" : [ "IDENTITY column step cannot be 0." ], "sqlState" : "42611" }, "IDENTITY_COLUMNS_UNSUPPORTED_DATA_TYPE" : { "message" : [ "DataType is not supported for IDENTITY columns." ], "sqlState" : "428H2" }, "IDENTITY_COLUMN_WITH_DEFAULT_VALUE" : { "message" : [ "A column cannot have both a default value and an identity column specification but column has default value: () and identity column specification: ()." ], "sqlState" : "42623" }, "ILLEGAL_DAY_OF_WEEK" : { "message" : [ "Illegal input for day of week: ." ], "sqlState" : "22009" }, "ILLEGAL_STATE_STORE_VALUE" : { "message" : [ "Illegal value provided to the State Store" ], "subClass" : { "EMPTY_LIST_VALUE" : { "message" : [ "Cannot write empty list values to State Store for StateName ." ] }, "NULL_VALUE" : { "message" : [ "Cannot write null values to State Store for StateName ." ] } }, "sqlState" : "42601" }, "INCOMPARABLE_PIVOT_COLUMN" : { "message" : [ "Invalid pivot column . Pivot columns must be comparable." ], "sqlState" : "42818" }, "INCOMPATIBLE_COLUMN_TYPE" : { "message" : [ " can only be performed on tables with compatible column types. The column of the table is type which is not compatible with at the same column of the first table.." ], "sqlState" : "42825" }, "INCOMPATIBLE_DATASOURCE_REGISTER" : { "message" : [ "Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: " ], "sqlState" : "56038" }, "INCOMPATIBLE_DATA_FOR_TABLE" : { "message" : [ "Cannot write incompatible data for the table :" ], "subClass" : { "AMBIGUOUS_COLUMN_NAME" : { "message" : [ "Ambiguous column name in the input data ." ] }, "CANNOT_FIND_DATA" : { "message" : [ "Cannot find data for the output column ." ] }, "CANNOT_SAFELY_CAST" : { "message" : [ "Cannot safely cast to ." ] }, "EXTRA_COLUMNS" : { "message" : [ "Cannot write extra columns ." ] }, "EXTRA_STRUCT_FIELDS" : { "message" : [ "Cannot write extra fields to the struct ." ] }, "NULLABLE_ARRAY_ELEMENTS" : { "message" : [ "Cannot write nullable elements to array of non-nulls: ." ] }, "NULLABLE_COLUMN" : { "message" : [ "Cannot write nullable values to non-null column ." ] }, "NULLABLE_MAP_VALUES" : { "message" : [ "Cannot write nullable values to map of non-nulls: ." ] }, "STRUCT_MISSING_FIELDS" : { "message" : [ "Struct missing fields: ." ] }, "UNEXPECTED_COLUMN_NAME" : { "message" : [ "Struct -th field name does not match (may be out of order): expected , found ." ] } }, "sqlState" : "KD000" }, "INCOMPATIBLE_JOIN_TYPES" : { "message" : [ "The join types and are incompatible." ], "sqlState" : "42613" }, "INCOMPATIBLE_VIEW_SCHEMA_CHANGE" : { "message" : [ "The SQL query of view has an incompatible schema change and column cannot be resolved. Expected columns named but got .", "Please try to re-create the view by running: ." ], "sqlState" : "51024" }, "INCOMPLETE_TYPE_DEFINITION" : { "message" : [ "Incomplete complex type:" ], "subClass" : { "ARRAY" : { "message" : [ "The definition of \"ARRAY\" type is incomplete. You must provide an element type. For example: \"ARRAY\"." ] }, "MAP" : { "message" : [ "The definition of \"MAP\" type is incomplete. You must provide a key type and a value type. For example: \"MAP\"." ] }, "STRUCT" : { "message" : [ "The definition of \"STRUCT\" type is incomplete. You must provide at least one field type. For example: \"STRUCT\"." ] } }, "sqlState" : "42K01" }, "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : { "message" : [ "You may get a different result due to the upgrading to" ], "subClass" : { "DATETIME_PATTERN_RECOGNITION" : { "message" : [ "Spark >= 3.0:", "Fail to recognize pattern in the DateTimeFormatter.", "1) You can set to \"LEGACY\" to restore the behavior before Spark 3.0.", "2) You can form a valid datetime pattern with the guide from '/sql-ref-datetime-pattern.html'." ] }, "DATETIME_WEEK_BASED_PATTERN" : { "message" : [ "Spark >= 3.0:", "All week-based patterns are unsupported since Spark 3.0, detected week-based character: .", "Please use the SQL function EXTRACT instead." ] }, "PARSE_DATETIME_BY_NEW_PARSER" : { "message" : [ "Spark >= 3.0:", "Fail to parse in the new parser.", "You can set to \"LEGACY\" to restore the behavior before Spark 3.0, or set to \"CORRECTED\" and treat it as an invalid datetime string." ] }, "READ_ANCIENT_DATETIME" : { "message" : [ "Spark >= 3.0: reading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z from files can be ambiguous, as the files may be written by", "Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar that is different from Spark 3.0+'s Proleptic Gregorian calendar.", "See more details in SPARK-31404.", "You can set the SQL config or the datasource option
's column with type to with type ." ], "sqlState" : "0A000" }, "NOT_SUPPORTED_COMMAND_FOR_V2_TABLE" : { "message" : [ " is not supported for v2 tables." ], "sqlState" : "0A000" }, "NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT" : { "message" : [ " is not supported, if you want to enable it, please set \"spark.sql.catalogImplementation\" to \"hive\"." ], "sqlState" : "0A000" }, "NOT_SUPPORTED_IN_JDBC_CATALOG" : { "message" : [ "Not supported command in JDBC catalog:" ], "subClass" : { "COMMAND" : { "message" : [ "" ] }, "COMMAND_WITH_PROPERTY" : { "message" : [ " with property ." ] } }, "sqlState" : "0A000" }, "NOT_UNRESOLVED_ENCODER" : { "message" : [ "Unresolved encoder expected, but was found." ], "sqlState" : "42601" }, "NO_DEFAULT_COLUMN_VALUE_AVAILABLE" : { "message" : [ "Can't determine the default value for since it is not nullable and it has no default value." ], "sqlState" : "42608" }, "NO_HANDLER_FOR_UDAF" : { "message" : [ "No handler for UDAF ''. Use sparkSession.udf.register(...) instead." ], "sqlState" : "42000" }, "NO_MERGE_ACTION_SPECIFIED" : { "message" : [ "df.mergeInto needs to be followed by at least one of whenMatched/whenNotMatched/whenNotMatchedBySource." ], "sqlState" : "42K0E" }, "NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : { "message" : [ "Cannot find in Protobuf schema." ], "sqlState" : "42S22" }, "NO_UDF_INTERFACE" : { "message" : [ "UDF class doesn't implement any UDF interface." ], "sqlState" : "38000" }, "NULLABLE_COLUMN_OR_FIELD" : { "message" : [ "Column or field is nullable while it's required to be non-nullable." ], "sqlState" : "42000" }, "NULLABLE_ROW_ID_ATTRIBUTES" : { "message" : [ "Row ID attributes cannot be nullable: ." ], "sqlState" : "42000" }, "NULL_DATA_SOURCE_OPTION" : { "message" : [ "Data source read/write option
." ] } }, "sqlState" : "42902" }, "UNSUPPORTED_SAVE_MODE" : { "message" : [ "The save mode is not supported for:" ], "subClass" : { "EXISTENT_PATH" : { "message" : [ "an existent path." ] }, "NON_EXISTENT_PATH" : { "message" : [ "a non-existent path." ] } }, "sqlState" : "0A000" }, "UNSUPPORTED_SHOW_CREATE_TABLE" : { "message" : [ "Unsupported a SHOW CREATE TABLE command." ], "subClass" : { "ON_DATA_SOURCE_TABLE_WITH_AS_SERDE" : { "message" : [ "The table is a Spark data source table. Please use SHOW CREATE TABLE without AS SERDE instead." ] }, "ON_TEMPORARY_VIEW" : { "message" : [ "The command is not supported on a temporary view ." ] }, "ON_TRANSACTIONAL_HIVE_TABLE" : { "message" : [ "Failed to execute the command against transactional Hive table .", "Please use SHOW CREATE TABLE AS SERDE to show Hive DDL instead." ] }, "WITH_UNSUPPORTED_FEATURE" : { "message" : [ "Failed to execute the command against table/view which is created by Hive and uses the following unsupported features", "" ] }, "WITH_UNSUPPORTED_SERDE_CONFIGURATION" : { "message" : [ "Failed to execute the command against the table which is created by Hive and uses the following unsupported serde configuration", "", "Please use SHOW CREATE TABLE AS SERDE to show Hive DDL instead." ] } }, "sqlState" : "0A000" }, "UNSUPPORTED_STREAMING_OPERATOR_WITHOUT_WATERMARK" : { "message" : [ " output mode not supported for on streaming DataFrames/DataSets without watermark." ], "sqlState" : "0A000" }, "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY" : { "message" : [ "Unsupported subquery expression:" ], "subClass" : { "ACCESSING_OUTER_QUERY_COLUMN_IS_NOT_ALLOWED" : { "message" : [ "Accessing outer query column is not allowed in this location:", "" ] }, "AGGREGATE_FUNCTION_MIXED_OUTER_LOCAL_REFERENCES" : { "message" : [ "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: ." ] }, "CORRELATED_COLUMN_IS_NOT_ALLOWED_IN_PREDICATE" : { "message" : [ "Correlated column is not allowed in predicate:", "" ] }, "CORRELATED_COLUMN_NOT_FOUND" : { "message" : [ "A correlated outer name reference within a subquery expression body was not found in the enclosing query: ." ] }, "CORRELATED_REFERENCE" : { "message" : [ "Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: ." ] }, "HIGHER_ORDER_FUNCTION" : { "message" : [ "Subquery expressions are not supported within higher-order functions. Please remove all subquery expressions from higher-order functions and then try the query again." ] }, "LATERAL_JOIN_CONDITION_NON_DETERMINISTIC" : { "message" : [ "Lateral join condition cannot be non-deterministic: ." ] }, "MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY" : { "message" : [ "Correlated scalar subqueries must be aggregated to return at most one row." ] }, "NON_CORRELATED_COLUMNS_IN_GROUP_BY" : { "message" : [ "A GROUP BY clause in a scalar correlated subquery cannot contain non-correlated columns: ." ] }, "NON_DETERMINISTIC_LATERAL_SUBQUERIES" : { "message" : [ "Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row:", "" ] }, "UNSUPPORTED_CORRELATED_EXPRESSION_IN_JOIN_CONDITION" : { "message" : [ "Correlated subqueries in the join predicate cannot reference both join inputs:", "" ] }, "UNSUPPORTED_CORRELATED_REFERENCE_DATA_TYPE" : { "message" : [ "Correlated column reference '' cannot be type." ] }, "UNSUPPORTED_CORRELATED_SCALAR_SUBQUERY" : { "message" : [ "Correlated scalar subqueries can only be used in filters, aggregations, projections, and UPDATE/MERGE/DELETE commands:", "" ] }, "UNSUPPORTED_IN_EXISTS_SUBQUERY" : { "message" : [ "IN/EXISTS predicate subqueries can only be used in filters, joins, aggregations, window functions, projections, and UPDATE/MERGE/DELETE commands:", "" ] }, "UNSUPPORTED_TABLE_ARGUMENT" : { "message" : [ "Table arguments are used in a function where they are not supported:", "" ] } }, "sqlState" : "0A000" }, "UNSUPPORTED_TYPED_LITERAL" : { "message" : [ "Literals of the type are not supported. Supported types are ." ], "sqlState" : "0A000" }, "UNTYPED_SCALA_UDF" : { "message" : [ "You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could:", "1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`.", "2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive.", "3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use this API with caution." ], "sqlState" : "42K0E" }, "USER_RAISED_EXCEPTION" : { "message" : [ "" ], "sqlState" : "P0001" }, "USER_RAISED_EXCEPTION_PARAMETER_MISMATCH" : { "message" : [ "The `raise_error()` function was used to raise error class: which expects parameters: .", "The provided parameters do not match the expected parameters.", "Please make sure to provide all expected parameters." ], "sqlState" : "P0001" }, "USER_RAISED_EXCEPTION_UNKNOWN_ERROR_CLASS" : { "message" : [ "The `raise_error()` function was used to raise an unknown error class: " ], "sqlState" : "P0001" }, "VARIABLE_ALREADY_EXISTS" : { "message" : [ "Cannot create the variable because it already exists.", "Choose a different name, or drop or replace the existing variable." ], "sqlState" : "42723" }, "VARIABLE_NOT_FOUND" : { "message" : [ "The variable cannot be found. Verify the spelling and correctness of the schema and catalog.", "If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.", "To tolerate the error on drop use DROP VARIABLE IF EXISTS." ], "sqlState" : "42883" }, "VARIANT_CONSTRUCTOR_SIZE_LIMIT" : { "message" : [ "Cannot construct a Variant larger than 16 MiB. The maximum allowed size of a Variant value is 16 MiB." ], "sqlState" : "22023" }, "VARIANT_DUPLICATE_KEY" : { "message" : [ "Failed to build variant because of a duplicate object key ``." ], "sqlState" : "22023" }, "VARIANT_SIZE_LIMIT" : { "message" : [ "Cannot build variant bigger than in .", "Please avoid large input strings to this expression (for example, add function calls(s) to check the expression size and convert it to NULL first if it is too big)." ], "sqlState" : "22023" }, "VIEW_ALREADY_EXISTS" : { "message" : [ "Cannot create view because it already exists.", "Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects." ], "sqlState" : "42P07" }, "VIEW_EXCEED_MAX_NESTED_DEPTH" : { "message" : [ "The depth of view exceeds the maximum view resolution depth ().", "Analysis is aborted to avoid errors. If you want to work around this, please try to increase the value of \"spark.sql.view.maxNestedViewDepth\"." ], "sqlState" : "54K00" }, "VIEW_NOT_FOUND" : { "message" : [ "The view cannot be found. Verify the spelling and correctness of the schema and catalog.", "If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.", "To tolerate the error on drop use DROP VIEW IF EXISTS." ], "sqlState" : "42P01" }, "WINDOW_FUNCTION_AND_FRAME_MISMATCH" : { "message" : [ " function can only be evaluated in an ordered row-based window frame with a single offset: ." ], "sqlState" : "42K0E" }, "WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE" : { "message" : [ "Window function requires an OVER clause." ], "sqlState" : "42601" }, "WRITE_STREAM_NOT_ALLOWED" : { "message" : [ "`writeStream` can be called only on streaming Dataset/DataFrame." ], "sqlState" : "42601" }, "WRONG_COMMAND_FOR_OBJECT_TYPE" : { "message" : [ "The operation requires a . But is a . Use instead." ], "sqlState" : "42809" }, "WRONG_NUM_ARGS" : { "message" : [ "The requires parameters but the actual number is ." ], "subClass" : { "WITHOUT_SUGGESTION" : { "message" : [ "Please, refer to '/sql-ref-functions.html' for a fix." ] }, "WITH_SUGGESTION" : { "message" : [ "If you have to call this function with parameters, set the legacy configuration to ." ] } }, "sqlState" : "42605" }, "XML_ROW_TAG_MISSING" : { "message" : [ " option is required for reading files in XML format." ], "sqlState" : "42KDF" }, "_LEGACY_ERROR_TEMP_0001" : { "message" : [ "Invalid InsertIntoContext." ] }, "_LEGACY_ERROR_TEMP_0004" : { "message" : [ "Empty source for merge: you should specify a source table/subquery in merge." ] }, "_LEGACY_ERROR_TEMP_0006" : { "message" : [ "The number of inserted values cannot match the fields." ] }, "_LEGACY_ERROR_TEMP_0008" : { "message" : [ "There must be at least one WHEN clause in a MERGE statement." ] }, "_LEGACY_ERROR_TEMP_0012" : { "message" : [ "DISTRIBUTE BY is not supported." ] }, "_LEGACY_ERROR_TEMP_0014" : { "message" : [ "TABLESAMPLE does not accept empty inputs." ] }, "_LEGACY_ERROR_TEMP_0015" : { "message" : [ "TABLESAMPLE() is not supported." ] }, "_LEGACY_ERROR_TEMP_0016" : { "message" : [ " is not a valid byte length literal, expected syntax: DIGIT+ ('B' | 'K' | 'M' | 'G')." ] }, "_LEGACY_ERROR_TEMP_0018" : { "message" : [ "Function trim doesn't support with type . Please use BOTH, LEADING or TRAILING as trim type." ] }, "_LEGACY_ERROR_TEMP_0024" : { "message" : [ "Can only have a single from-to unit in the interval literal syntax." ] }, "_LEGACY_ERROR_TEMP_0026" : { "message" : [ "Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: ." ] }, "_LEGACY_ERROR_TEMP_0027" : { "message" : [ "The value of from-to unit must be a string." ] }, "_LEGACY_ERROR_TEMP_0028" : { "message" : [ "Intervals FROM TO are not supported." ] }, "_LEGACY_ERROR_TEMP_0029" : { "message" : [ "Cannot mix year-month and day-time fields: ." ] }, "_LEGACY_ERROR_TEMP_0031" : { "message" : [ "Invalid number of buckets: ." ] }, "_LEGACY_ERROR_TEMP_0032" : { "message" : [ "Duplicated table paths found: '' and ''. LOCATION and the case insensitive key 'path' in OPTIONS are all used to indicate the custom table path, you can only specify one of them." ] }, "_LEGACY_ERROR_TEMP_0033" : { "message" : [ "Expected either STORED AS or STORED BY, not both." ] }, "_LEGACY_ERROR_TEMP_0034" : { "message" : [ " is not supported in Hive-style ." ] }, "_LEGACY_ERROR_TEMP_0035" : { "message" : [ "Operation not allowed: ." ] }, "_LEGACY_ERROR_TEMP_0037" : { "message" : [ "It is not allowed to add catalog/namespace prefix to the table name in CACHE TABLE AS SELECT." ] }, "_LEGACY_ERROR_TEMP_0038" : { "message" : [ "CTE definition can't have duplicate names: ." ] }, "_LEGACY_ERROR_TEMP_0039" : { "message" : [ "Unsupported SQL statement." ] }, "_LEGACY_ERROR_TEMP_0043" : { "message" : [ "Expected format is 'RESET' or 'RESET key'. If you want to include special characters in key, please use quotes, e.g., RESET `key`." ] }, "_LEGACY_ERROR_TEMP_0044" : { "message" : [ "The interval value must be in the range of [-18, +18] hours with second precision." ] }, "_LEGACY_ERROR_TEMP_0045" : { "message" : [ "Invalid time zone displacement value." ] }, "_LEGACY_ERROR_TEMP_0046" : { "message" : [ "CREATE TEMPORARY TABLE without a provider is not allowed." ] }, "_LEGACY_ERROR_TEMP_0047" : { "message" : [ "'ROW FORMAT' must be used with 'STORED AS'." ] }, "_LEGACY_ERROR_TEMP_0048" : { "message" : [ "Unsupported operation: Used defined record reader/writer classes." ] }, "_LEGACY_ERROR_TEMP_0049" : { "message" : [ "Directory path and 'path' in OPTIONS should be specified one, but not both." ] }, "_LEGACY_ERROR_TEMP_0051" : { "message" : [ "Empty set in grouping sets is not supported." ] }, "_LEGACY_ERROR_TEMP_0052" : { "message" : [ "CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed." ] }, "_LEGACY_ERROR_TEMP_0053" : { "message" : [ "It is not allowed to define a TEMPORARY view with IF NOT EXISTS." ] }, "_LEGACY_ERROR_TEMP_0056" : { "message" : [ "Invalid time travel spec: ." ] }, "_LEGACY_ERROR_TEMP_0060" : { "message" : [ "." ] }, "_LEGACY_ERROR_TEMP_0062" : { "message" : [ "." ] }, "_LEGACY_ERROR_TEMP_0063" : { "message" : [ "." ] }, "_LEGACY_ERROR_TEMP_0064" : { "message" : [ "." ] }, "_LEGACY_ERROR_TEMP_1000" : { "message" : [ "LEGACY store assignment policy is disallowed in Spark data source V2. Please set the configuration to other values." ] }, "_LEGACY_ERROR_TEMP_1002" : { "message" : [ "Unable to generate an encoder for inner class `` without access to the scope that this class was defined in.", "Try moving this class out of its parent class." ] }, "_LEGACY_ERROR_TEMP_1005" : { "message" : [ " doesn't show up in the GROUP BY list ." ] }, "_LEGACY_ERROR_TEMP_1006" : { "message" : [ "Aggregate expression required for pivot, but '' did not appear in any aggregate function." ] }, "_LEGACY_ERROR_TEMP_1007" : { "message" : [ "Cannot write into temp view as it's not a data source v2 relation." ] }, "_LEGACY_ERROR_TEMP_1008" : { "message" : [ " is not a temp view of streaming logical plan, please use batch API such as `DataFrameReader.table` to read it." ] }, "_LEGACY_ERROR_TEMP_1011" : { "message" : [ "Writing into a view is not allowed. View: ." ] }, "_LEGACY_ERROR_TEMP_1012" : { "message" : [ "Cannot write into v1 table: ." ] }, "_LEGACY_ERROR_TEMP_1017" : { "message" : [ " is a built-in/temporary function. '' expects a persistent function.." ] }, "_LEGACY_ERROR_TEMP_1018" : { "message" : [ " is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet." ] }, "_LEGACY_ERROR_TEMP_1021" : { "message" : [ "count(.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)." ] }, "_LEGACY_ERROR_TEMP_1030" : { "message" : [ "Window aggregate function with filter predicate is not supported yet." ] }, "_LEGACY_ERROR_TEMP_1031" : { "message" : [ "It is not allowed to use a window function inside an aggregate function. Please use the inner window function in a sub-query." ] }, "_LEGACY_ERROR_TEMP_1032" : { "message" : [ " does not have any WindowExpression." ] }, "_LEGACY_ERROR_TEMP_1033" : { "message" : [ " has multiple Window Specifications ().", "Please file a bug report with this error message, stack trace, and the query." ] }, "_LEGACY_ERROR_TEMP_1034" : { "message" : [ "It is not allowed to use window functions inside clause." ] }, "_LEGACY_ERROR_TEMP_1035" : { "message" : [ "Cannot specify window frame for function." ] }, "_LEGACY_ERROR_TEMP_1036" : { "message" : [ "Window Frame must match the required frame ." ] }, "_LEGACY_ERROR_TEMP_1037" : { "message" : [ "Window function requires window to be ordered, please add ORDER BY clause. For example SELECT (value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table." ] }, "_LEGACY_ERROR_TEMP_1039" : { "message" : [ "Multiple time/session window expressions would result in a cartesian product of rows, therefore they are currently not supported." ] }, "_LEGACY_ERROR_TEMP_1040" : { "message" : [ "Gap duration expression used in session window must be CalendarIntervalType, but got
." ] }, "_LEGACY_ERROR_TEMP_1045" : { "message" : [ "ALTER TABLE SET LOCATION does not support partition for v2 tables." ] }, "_LEGACY_ERROR_TEMP_1046" : { "message" : [ "Join strategy hint parameter should be an identifier or string but was ()." ] }, "_LEGACY_ERROR_TEMP_1047" : { "message" : [ " Hint parameter should include columns, but found." ] }, "_LEGACY_ERROR_TEMP_1048" : { "message" : [ " Hint expects a partition number as a parameter." ] }, "_LEGACY_ERROR_TEMP_1049" : { "message" : [ "Syntax error in attribute name: ." ] }, "_LEGACY_ERROR_TEMP_1050" : { "message" : [ "Can only star expand struct data types. Attribute: ``." ] }, "_LEGACY_ERROR_TEMP_1052" : { "message" : [ "ADD COLUMN with v1 tables cannot specify NOT NULL." ] }, "_LEGACY_ERROR_TEMP_1058" : { "message" : [ "Cannot create table with both USING and ." ] }, "_LEGACY_ERROR_TEMP_1059" : { "message" : [ "STORED AS with file format '' is invalid." ] }, "_LEGACY_ERROR_TEMP_1060" : { "message" : [ " does not support nested column: ." ] }, "_LEGACY_ERROR_TEMP_1066" : { "message" : [ " is a system preserved database, you cannot create a database with this name." ] }, "_LEGACY_ERROR_TEMP_1068" : { "message" : [ " is a system preserved database, you cannot use it as current database. To access global temporary views, you should use qualified name with the GLOBAL_TEMP_DATABASE, e.g. SELECT * FROM .viewName." ] }, "_LEGACY_ERROR_TEMP_1069" : { "message" : [ "CREATE EXTERNAL TABLE must be accompanied by LOCATION." ] }, "_LEGACY_ERROR_TEMP_1071" : { "message" : [ "Some existing schema fields () are not present in the new schema. We don't support dropping columns yet." ] }, "_LEGACY_ERROR_TEMP_1072" : { "message" : [ "Only the tables/views belong to the same database can be retrieved. Querying tables/views are ." ] }, "_LEGACY_ERROR_TEMP_1073" : { "message" : [ "RENAME TABLE source and destination databases do not match: '' != ''." ] }, "_LEGACY_ERROR_TEMP_1074" : { "message" : [ "RENAME TEMPORARY VIEW from '' to '': cannot specify database name '' in the destination table." ] }, "_LEGACY_ERROR_TEMP_1076" : { "message" : [ "Partition spec is invalid.
." ] }, "_LEGACY_ERROR_TEMP_1079" : { "message" : [ "Resource Type '' is not supported." ] }, "_LEGACY_ERROR_TEMP_1080" : { "message" : [ "Table did not specify database." ] }, "_LEGACY_ERROR_TEMP_1081" : { "message" : [ "Table did not specify locationUri." ] }, "_LEGACY_ERROR_TEMP_1082" : { "message" : [ "Partition [] did not specify locationUri." ] }, "_LEGACY_ERROR_TEMP_1083" : { "message" : [ "Number of buckets should be greater than 0 but less than or equal to bucketing.maxBuckets (``). Got ``." ] }, "_LEGACY_ERROR_TEMP_1089" : { "message" : [ "Column statistics deserialization is not supported for column of data type: ." ] }, "_LEGACY_ERROR_TEMP_1090" : { "message" : [ "Column statistics serialization is not supported for column of data type: ." ] }, "_LEGACY_ERROR_TEMP_1097" : { "message" : [ "The field for corrupt records must be string type and nullable." ] }, "_LEGACY_ERROR_TEMP_1098" : { "message" : [ "DataType '' is not supported by ." ] }, "_LEGACY_ERROR_TEMP_1099" : { "message" : [ "() doesn't support the mode. Acceptable modes are and ." ] }, "_LEGACY_ERROR_TEMP_1103" : { "message" : [ "Unsupported component type in arrays." ] }, "_LEGACY_ERROR_TEMP_1104" : { "message" : [ "The second argument should be a double literal." ] }, "_LEGACY_ERROR_TEMP_1107" : { "message" : [ "Table
declares capability but is not an instance of ." ] }, "_LEGACY_ERROR_TEMP_1108" : { "message" : [ "Delete by condition with subquery is not supported: ." ] }, "_LEGACY_ERROR_TEMP_1109" : { "message" : [ "Exec update failed: cannot translate expression to source filter: ." ] }, "_LEGACY_ERROR_TEMP_1110" : { "message" : [ "Cannot delete from table
where ." ] }, "_LEGACY_ERROR_TEMP_1111" : { "message" : [ "DESCRIBE does not support partition for v2 tables." ] }, "_LEGACY_ERROR_TEMP_1114" : { "message" : [ "The streaming sources in a query do not have a common supported execution mode.", "Sources support micro-batch: .", "Sources support continuous: ." ] }, "_LEGACY_ERROR_TEMP_1120" : { "message" : [ "Unsupported NamespaceChange in JDBC catalog." ] }, "_LEGACY_ERROR_TEMP_1121" : { "message" : [ "Table does not support :
." ] }, "_LEGACY_ERROR_TEMP_1122" : { "message" : [ "Table
is not a row-level operation table." ] }, "_LEGACY_ERROR_TEMP_1123" : { "message" : [ "Cannot rename a table with ALTER VIEW. Please use ALTER TABLE instead." ] }, "_LEGACY_ERROR_TEMP_1125" : { "message" : [ "Database from v1 session catalog is not specified." ] }, "_LEGACY_ERROR_TEMP_1126" : { "message" : [ "Nested databases are not supported by v1 session catalog: ." ] }, "_LEGACY_ERROR_TEMP_1127" : { "message" : [ "Invalid partitionExprs specified: For range partitioning use REPARTITION_BY_RANGE instead." ] }, "_LEGACY_ERROR_TEMP_1128" : { "message" : [ "Failed to resolve the schema for for the partition column: . It must be specified manually." ] }, "_LEGACY_ERROR_TEMP_1132" : { "message" : [ "A schema needs to be specified when using ." ] }, "_LEGACY_ERROR_TEMP_1133" : { "message" : [ "The user-specified schema doesn't match the actual schema:", "user-specified: , actual: . If you're using", "DataFrameReader.schema API or creating a table, please do not specify the schema.", "Or if you're scanning an existed table, please drop it and re-create it." ] }, "_LEGACY_ERROR_TEMP_1134" : { "message" : [ "Unable to infer schema for at . It must be specified manually." ] }, "_LEGACY_ERROR_TEMP_1135" : { "message" : [ " is not a valid Spark SQL Data Source." ] }, "_LEGACY_ERROR_TEMP_1136" : { "message" : [ "Cannot save interval data type into external storage." ] }, "_LEGACY_ERROR_TEMP_1137" : { "message" : [ "Unable to resolve given []." ] }, "_LEGACY_ERROR_TEMP_1138" : { "message" : [ "Hive built-in ORC data source must be used with Hive support enabled. Please use the native ORC data source by setting 'spark.sql.orc.impl' to 'native'." ] }, "_LEGACY_ERROR_TEMP_1139" : { "message" : [ "Failed to find data source: . Avro is built-in but external data source module since Spark 2.4. Please deploy the application as per the deployment section of Apache Avro Data Source Guide." ] }, "_LEGACY_ERROR_TEMP_1140" : { "message" : [ "Failed to find data source: . Please deploy the application as per the deployment section of Structured Streaming + Kafka Integration Guide." ] }, "_LEGACY_ERROR_TEMP_1141" : { "message" : [ "Multiple sources found for (), please specify the fully qualified class name." ] }, "_LEGACY_ERROR_TEMP_1142" : { "message" : [ "Datasource does not support writing empty or nested empty schemas. Please make sure the data schema has at least one or more column(s)." ] }, "_LEGACY_ERROR_TEMP_1143" : { "message" : [ "The data to be inserted needs to have the same number of columns as the target table: target table has column(s) but the inserted data has column(s), which contain partition column(s) having assigned constant values." ] }, "_LEGACY_ERROR_TEMP_1144" : { "message" : [ "The data to be inserted needs to have the same number of partition columns as the target table: target table has partition column(s) but the inserted data has partition columns specified." ] }, "_LEGACY_ERROR_TEMP_1145" : { "message" : [ " is not a partition column. Partition columns are ." ] }, "_LEGACY_ERROR_TEMP_1146" : { "message" : [ "Partition column have multiple values specified, . Please only specify a single value." ] }, "_LEGACY_ERROR_TEMP_1147" : { "message" : [ "The ordering of partition columns is . All partition columns having constant values need to appear before other partition columns that do not have an assigned constant value." ] }, "_LEGACY_ERROR_TEMP_1149" : { "message" : [ "Fail to rebuild expression: missing key in `translatedFilterToExpr`." ] }, "_LEGACY_ERROR_TEMP_1151" : { "message" : [ "Fail to resolve data source for the table
since the table serde property has the duplicated key with extra options specified for this scan operation. To fix this, you can rollback to the legacy behavior of ignoring the extra options by setting the config to `false`, or address the conflicts of the same config." ] }, "_LEGACY_ERROR_TEMP_1155" : { "message" : [ "Partition column `` not found in schema ." ] }, "_LEGACY_ERROR_TEMP_1156" : { "message" : [ "Column not found in schema ." ] }, "_LEGACY_ERROR_TEMP_1158" : { "message" : [ "Saving data into a view is not allowed." ] }, "_LEGACY_ERROR_TEMP_1159" : { "message" : [ "The format of the existing table is ``. It doesn't match the specified format ``." ] }, "_LEGACY_ERROR_TEMP_1160" : { "message" : [ "The location of the existing table is ``. It doesn't match the specified location ``." ] }, "_LEGACY_ERROR_TEMP_1161" : { "message" : [ "The column number of the existing table () doesn't match the data schema ()." ] }, "_LEGACY_ERROR_TEMP_1162" : { "message" : [ "Cannot resolve '' given input columns: []." ] }, "_LEGACY_ERROR_TEMP_1163" : { "message" : [ "Specified partitioning does not match that of the existing table .", "Specified partition columns: [].", "Existing partition columns: []." ] }, "_LEGACY_ERROR_TEMP_1164" : { "message" : [ "Specified bucketing does not match that of the existing table .", "Specified bucketing: .", "Existing bucketing: ." ] }, "_LEGACY_ERROR_TEMP_1165" : { "message" : [ "It is not allowed to specify partitioning when the table schema is not defined." ] }, "_LEGACY_ERROR_TEMP_1166" : { "message" : [ "Bucketing column '' should not be part of partition columns ''." ] }, "_LEGACY_ERROR_TEMP_1167" : { "message" : [ "Bucket sorting column '' should not be part of partition columns ''." ] }, "_LEGACY_ERROR_TEMP_1169" : { "message" : [ "Requested partitioning does not match the table :", "Requested partitions: .", "Table partitions: ." ] }, "_LEGACY_ERROR_TEMP_1171" : { "message" : [ "createTableColumnTypes option column not found in schema ." ] }, "_LEGACY_ERROR_TEMP_1181" : { "message" : [ "Stream-stream join without equality predicate is not supported." ] }, "_LEGACY_ERROR_TEMP_1182" : { "message" : [ "Column are ambiguous. It's probably because you joined several Datasets together, and some of these Datasets are the same. This column points to one of the Datasets but Spark is unable to figure out which one. Please alias the Datasets with different names via `Dataset.as` before joining them, and specify the column using qualified name, e.g. `df.as(\"a\").join(df.as(\"b\"), $\"a.id\" > $\"b.id\")`. You can also set to false to disable this check." ] }, "_LEGACY_ERROR_TEMP_1183" : { "message" : [ "Cannot use \"INTERVAL\" type in the table schema." ] }, "_LEGACY_ERROR_TEMP_1184" : { "message" : [ "Catalog does not support ." ] }, "_LEGACY_ERROR_TEMP_1186" : { "message" : [ "Multi-part identifier cannot be empty." ] }, "_LEGACY_ERROR_TEMP_1187" : { "message" : [ "Hive data source can only be used with tables, you can not files of Hive data source directly." ] }, "_LEGACY_ERROR_TEMP_1188" : { "message" : [ "There is a 'path' option set and () is called with a path parameter. Either remove the path option, or call () without the parameter. To ignore this check, set '' to 'true'." ] }, "_LEGACY_ERROR_TEMP_1189" : { "message" : [ "User specified schema not supported with ``." ] }, "_LEGACY_ERROR_TEMP_1190" : { "message" : [ "Temporary view doesn't support streaming write." ] }, "_LEGACY_ERROR_TEMP_1191" : { "message" : [ "Streaming into views is not supported." ] }, "_LEGACY_ERROR_TEMP_1192" : { "message" : [ "The input source() is different from the table 's data source provider()." ] }, "_LEGACY_ERROR_TEMP_1193" : { "message" : [ "Table doesn't support streaming write - ." ] }, "_LEGACY_ERROR_TEMP_1194" : { "message" : [ "queryName must be specified for memory sink." ] }, "_LEGACY_ERROR_TEMP_1195" : { "message" : [ "'' is not supported with continuous trigger." ] }, "_LEGACY_ERROR_TEMP_1196" : { "message" : [ " column not found in existing columns ()." ] }, "_LEGACY_ERROR_TEMP_1197" : { "message" : [ "'' does not support partitioning." ] }, "_LEGACY_ERROR_TEMP_1198" : { "message" : [ "Function '' cannot process input: (): ." ] }, "_LEGACY_ERROR_TEMP_1199" : { "message" : [ "Invalid bound function ': there are arguments but parameters returned from 'inputTypes()'." ] }, "_LEGACY_ERROR_TEMP_1201" : { "message" : [ "Cannot resolve column name \"\" among ()." ] }, "_LEGACY_ERROR_TEMP_1205" : { "message" : [ "Expected only partition pruning predicates: ." ] }, "_LEGACY_ERROR_TEMP_1207" : { "message" : [ "The duration and time inputs to window must be an integer, long or string literal." ] }, "_LEGACY_ERROR_TEMP_1210" : { "message" : [ "The second argument in should be a boolean literal." ] }, "_LEGACY_ERROR_TEMP_1211" : { "message" : [ "Detected implicit cartesian product for join between logical plans", "", "and", "", "Join condition is missing or trivial.", "Either: use the CROSS JOIN syntax to allow cartesian products between these relations, or: enable implicit cartesian products by setting the configuration variable spark.sql.crossJoin.enabled=true." ] }, "_LEGACY_ERROR_TEMP_1212" : { "message" : [ "Found conflicting attributes in the condition joining outer plan:", "", "and subplan:", "." ] }, "_LEGACY_ERROR_TEMP_1213" : { "message" : [ "Window expression is empty in ." ] }, "_LEGACY_ERROR_TEMP_1214" : { "message" : [ "Found different window function type in ." ] }, "_LEGACY_ERROR_TEMP_1218" : { "message" : [ " should be converted to HadoopFsRelation." ] }, "_LEGACY_ERROR_TEMP_1219" : { "message" : [ "Hive metastore does not support altering database location." ] }, "_LEGACY_ERROR_TEMP_1222" : { "message" : [ "Unknown resource type: ." ] }, "_LEGACY_ERROR_TEMP_1223" : { "message" : [ "Invalid field id '' in day-time interval. Supported interval fields: ." ] }, "_LEGACY_ERROR_TEMP_1224" : { "message" : [ "'interval to ' is invalid." ] }, "_LEGACY_ERROR_TEMP_1225" : { "message" : [ "Invalid field id '' in year-month interval. Supported interval fields: ." ] }, "_LEGACY_ERROR_TEMP_1226" : { "message" : [ "The SQL config '' was removed in the version . " ] }, "_LEGACY_ERROR_TEMP_1228" : { "message" : [ "Decimal scale () cannot be greater than precision ()." ] }, "_LEGACY_ERROR_TEMP_1231" : { "message" : [ " is not a valid partition column in table ." ] }, "_LEGACY_ERROR_TEMP_1232" : { "message" : [ "Partition spec is invalid. The spec () must match the partition spec () defined in table ''." ] }, "_LEGACY_ERROR_TEMP_1237" : { "message" : [ "The list of partition columns with values in partition specification for table '
' in database '' is not a prefix of the list of partition columns defined in the table schema. Expected a prefix of [], but got []." ] }, "_LEGACY_ERROR_TEMP_1239" : { "message" : [ "Analyzing column statistics is not supported for column of data type: ." ] }, "_LEGACY_ERROR_TEMP_1241" : { "message" : [ "CREATE-TABLE-AS-SELECT cannot create table with location to a non-empty directory . To allow overwriting the existing non-empty directory, set '' to true." ] }, "_LEGACY_ERROR_TEMP_1246" : { "message" : [ "Can't find column `` given table data columns ." ] }, "_LEGACY_ERROR_TEMP_1247" : { "message" : [ "Operation not allowed: ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition is not supported for tables created with the datasource API." ] }, "_LEGACY_ERROR_TEMP_1248" : { "message" : [ "Operation not allowed: ALTER TABLE SET SERDE is not supported for tables created with the datasource API." ] }, "_LEGACY_ERROR_TEMP_1250" : { "message" : [ " is not allowed on since filesource partition management is disabled (spark.sql.hive.manageFilesourcePartitions = false)." ] }, "_LEGACY_ERROR_TEMP_1251" : { "message" : [ " is not allowed on since its partition metadata is not stored in the Hive metastore. To import this information into the metastore, run `msck repair table `." ] }, "_LEGACY_ERROR_TEMP_1252" : { "message" : [ "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead." ] }, "_LEGACY_ERROR_TEMP_1253" : { "message" : [ "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead." ] }, "_LEGACY_ERROR_TEMP_1255" : { "message" : [ "Cannot drop built-in function ''." ] }, "_LEGACY_ERROR_TEMP_1256" : { "message" : [ "Cannot refresh built-in function ." ] }, "_LEGACY_ERROR_TEMP_1257" : { "message" : [ "Cannot refresh temporary function ." ] }, "_LEGACY_ERROR_TEMP_1259" : { "message" : [ "ALTER ADD COLUMNS does not support views. You must drop and re-create the views for adding the new columns. Views:
." ] }, "_LEGACY_ERROR_TEMP_1260" : { "message" : [ "ALTER ADD COLUMNS does not support datasource table with type . You must drop and re-create the table for adding the new columns. Tables:
." ] }, "_LEGACY_ERROR_TEMP_1261" : { "message" : [ "LOAD DATA is not supported for datasource tables: ." ] }, "_LEGACY_ERROR_TEMP_1262" : { "message" : [ "LOAD DATA target table is partitioned, but no partition spec is provided." ] }, "_LEGACY_ERROR_TEMP_1263" : { "message" : [ "LOAD DATA target table is partitioned, but number of columns in provided partition spec () do not match number of partitioned columns in table ()." ] }, "_LEGACY_ERROR_TEMP_1264" : { "message" : [ "LOAD DATA target table is not partitioned, but a partition spec was provided." ] }, "_LEGACY_ERROR_TEMP_1266" : { "message" : [ "Operation not allowed: TRUNCATE TABLE on external tables: ." ] }, "_LEGACY_ERROR_TEMP_1267" : { "message" : [ "Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported for tables that are not partitioned: ." ] }, "_LEGACY_ERROR_TEMP_1268" : { "message" : [ "Failed to truncate table when removing data of the path: ." ] }, "_LEGACY_ERROR_TEMP_1276" : { "message" : [ "The logical plan that represents the view is not analyzed." ] }, "_LEGACY_ERROR_TEMP_1280" : { "message" : [ "It is not allowed to create a persisted view from the Dataset API." ] }, "_LEGACY_ERROR_TEMP_1286" : { "message" : [ "User-defined partition column not found in the JDBC relation: ." ] }, "_LEGACY_ERROR_TEMP_1287" : { "message" : [ "Partition column type should be , , or , but found." ] }, "_LEGACY_ERROR_TEMP_1288" : { "message" : [ "Table or view '' already exists. SaveMode: ErrorIfExists." ] }, "_LEGACY_ERROR_TEMP_1290" : { "message" : [ "Text data source supports only a single column, and you have columns." ] }, "_LEGACY_ERROR_TEMP_1291" : { "message" : [ "Can't find required partition column in partition schema ." ] }, "_LEGACY_ERROR_TEMP_1292" : { "message" : [ "Temporary view '' should not have specified a database." ] }, "_LEGACY_ERROR_TEMP_1293" : { "message" : [ "Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING." ] }, "_LEGACY_ERROR_TEMP_1294" : { "message" : [ "The timestamp provided for the '' option is invalid. The expected format is 'YYYY-MM-DDTHH:mm:ss', but the provided timestamp: ." ] }, "_LEGACY_ERROR_TEMP_1295" : { "message" : [ "Set a host to read from with option(\"host\", ...)." ] }, "_LEGACY_ERROR_TEMP_1296" : { "message" : [ "Set a port to read from with option(\"port\", ...)." ] }, "_LEGACY_ERROR_TEMP_1297" : { "message" : [ "IncludeTimestamp must be set to either \"true\" or \"false\"." ] }, "_LEGACY_ERROR_TEMP_1298" : { "message" : [ "checkpointLocation must be specified either through option(\"checkpointLocation\", ...) or SparkSession.conf.set(\"\", ...)." ] }, "_LEGACY_ERROR_TEMP_1299" : { "message" : [ "This query does not support recovering from checkpoint location. Delete to start over." ] }, "_LEGACY_ERROR_TEMP_1300" : { "message" : [ "Unable to find the column `` given []." ] }, "_LEGACY_ERROR_TEMP_1305" : { "message" : [ "Unsupported TableChange in JDBC catalog." ] }, "_LEGACY_ERROR_TEMP_1306" : { "message" : [ "There is a 'path' or 'paths' option set and load() is called with path parameters. Either remove the path option if it's the same as the path parameter, or add it to the load() parameter if you do want to read multiple paths. To ignore this check, set '' to 'true'." ] }, "_LEGACY_ERROR_TEMP_1307" : { "message" : [ "There is a 'path' option set and save() is called with a path parameter. Either remove the path option, or call save() without the parameter. To ignore this check, set '' to 'true'." ] }, "_LEGACY_ERROR_TEMP_1309" : { "message" : [ "insertInto() can't be used together with partitionBy(). Partition columns have already been defined for the table. It is not necessary to use partitionBy()." ] }, "_LEGACY_ERROR_TEMP_1310" : { "message" : [ "Couldn't find a catalog to handle the identifier ." ] }, "_LEGACY_ERROR_TEMP_1312" : { "message" : [ "'' does not support bucketBy right now." ] }, "_LEGACY_ERROR_TEMP_1313" : { "message" : [ "'' does not support bucketBy and sortBy right now." ] }, "_LEGACY_ERROR_TEMP_1316" : { "message" : [ "Invalid partition transformation: ." ] }, "_LEGACY_ERROR_TEMP_1320" : { "message" : [ "Typed column that needs input type and schema cannot be passed in untyped `select` API. Use the typed `Dataset.select` API instead." ] }, "_LEGACY_ERROR_TEMP_1321" : { "message" : [ "Invalid view name: ." ] }, "_LEGACY_ERROR_TEMP_1322" : { "message" : [ "Invalid number of buckets: bucket(, )." ] }, "_LEGACY_ERROR_TEMP_1323" : { "message" : [ "\"\" is not a numeric column. Aggregation function can only be applied on a numeric column." ] }, "_LEGACY_ERROR_TEMP_1324" : { "message" : [ "The pivot column has more than distinct values, this could indicate an error. If this was intended, set to at least the number of distinct values of the pivot column." ] }, "_LEGACY_ERROR_TEMP_1325" : { "message" : [ "Cannot modify the value of a static config: ." ] }, "_LEGACY_ERROR_TEMP_1327" : { "message" : [ "Command execution is not supported in runner ." ] }, "_LEGACY_ERROR_TEMP_1328" : { "message" : [ "Can not instantiate class , please make sure it has public non argument constructor." ] }, "_LEGACY_ERROR_TEMP_1329" : { "message" : [ "Can not load class , please make sure it is on the classpath." ] }, "_LEGACY_ERROR_TEMP_1330" : { "message" : [ "Class doesn't implement interface UserDefinedAggregateFunction." ] }, "_LEGACY_ERROR_TEMP_1332" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_1338" : { "message" : [ "Sinks cannot request distribution and ordering in continuous execution mode." ] }, "_LEGACY_ERROR_TEMP_1344" : { "message" : [ "Invalid DEFAULT value for column : fails to parse as a valid literal value." ] }, "_LEGACY_ERROR_TEMP_1345" : { "message" : [ "Failed to execute command because DEFAULT values are not supported for target data source with table provider: \"\"." ] }, "_LEGACY_ERROR_TEMP_1346" : { "message" : [ "Failed to execute command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"\"." ] }, "_LEGACY_ERROR_TEMP_2000" : { "message" : [ ". If necessary set to false to bypass this error." ] }, "_LEGACY_ERROR_TEMP_2003" : { "message" : [ "Unsuccessful try to zip maps with unique keys due to exceeding the array size limit ." ] }, "_LEGACY_ERROR_TEMP_2005" : { "message" : [ "Type does not support ordered operations." ] }, "_LEGACY_ERROR_TEMP_2013" : { "message" : [ "Negative values found in " ] }, "_LEGACY_ERROR_TEMP_2017" : { "message" : [ "not resolved." ] }, "_LEGACY_ERROR_TEMP_2026" : { "message" : [ "Failed to convert value (class of ) with the type of to JSON." ] }, "_LEGACY_ERROR_TEMP_2027" : { "message" : [ "Unexpected operator in correlated subquery." ] }, "_LEGACY_ERROR_TEMP_2028" : { "message" : [ "This line should be unreachable." ] }, "_LEGACY_ERROR_TEMP_2030" : { "message" : [ "Can not handle nested schema yet... plan ." ] }, "_LEGACY_ERROR_TEMP_2031" : { "message" : [ "The input external row cannot be null." ] }, "_LEGACY_ERROR_TEMP_2032" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_2033" : { "message" : [ "Unable to create database as failed to create its directory ." ] }, "_LEGACY_ERROR_TEMP_2034" : { "message" : [ "Unable to drop database as failed to delete its directory ." ] }, "_LEGACY_ERROR_TEMP_2035" : { "message" : [ "Unable to create table
as failed to create its directory ." ] }, "_LEGACY_ERROR_TEMP_2036" : { "message" : [ "Unable to delete partition path ." ] }, "_LEGACY_ERROR_TEMP_2037" : { "message" : [ "Unable to drop table
as failed to delete its directory ." ] }, "_LEGACY_ERROR_TEMP_2038" : { "message" : [ "Unable to rename table to as failed to rename its directory ." ] }, "_LEGACY_ERROR_TEMP_2039" : { "message" : [ "Unable to create partition path ." ] }, "_LEGACY_ERROR_TEMP_2040" : { "message" : [ "Unable to rename partition path ." ] }, "_LEGACY_ERROR_TEMP_2041" : { "message" : [ " is not implemented." ] }, "_LEGACY_ERROR_TEMP_2042" : { "message" : [ ". If necessary set to false to bypass this error." ] }, "_LEGACY_ERROR_TEMP_2045" : { "message" : [ "Unsupported table change: " ] }, "_LEGACY_ERROR_TEMP_2046" : { "message" : [ "[BUG] Not a DataSourceRDDPartition: ." ] }, "_LEGACY_ERROR_TEMP_2047" : { "message" : [ "'path' is not specified." ] }, "_LEGACY_ERROR_TEMP_2048" : { "message" : [ "Schema must be specified when creating a streaming source DataFrame. If some files already exist in the directory, then depending on the file format you may be able to create a static DataFrame on that directory with 'spark.read.load(directory)' and infer schema from it." ] }, "_LEGACY_ERROR_TEMP_2049" : { "message" : [ "Data source does not support streamed ." ] }, "_LEGACY_ERROR_TEMP_2050" : { "message" : [ "Expected exactly one path to be specified, but got: ." ] }, "_LEGACY_ERROR_TEMP_2052" : { "message" : [ " was removed in Spark 2.0. Please check if your library is compatible with Spark 2.0." ] }, "_LEGACY_ERROR_TEMP_2053" : { "message" : [ "buildReader is not supported for ." ] }, "_LEGACY_ERROR_TEMP_2056" : { "message" : [ "Unable to clear output directory prior to writing to it." ] }, "_LEGACY_ERROR_TEMP_2057" : { "message" : [ "Unable to clear partition directory prior to writing to it." ] }, "_LEGACY_ERROR_TEMP_2058" : { "message" : [ "Failed to cast value `` to `` for partition column ``." ] }, "_LEGACY_ERROR_TEMP_2059" : { "message" : [ "End of stream." ] }, "_LEGACY_ERROR_TEMP_2060" : { "message" : [ "The fallback v1 relation reports inconsistent schema:", "Schema of v2 scan: .", "Schema of v1 relation: ." ] }, "_LEGACY_ERROR_TEMP_2061" : { "message" : [ "No records should be returned from EmptyDataReader." ] }, "_LEGACY_ERROR_TEMP_2065" : { "message" : [ "Cannot create columnar reader." ] }, "_LEGACY_ERROR_TEMP_2066" : { "message" : [ "Invalid namespace name: ." ] }, "_LEGACY_ERROR_TEMP_2067" : { "message" : [ "Unsupported partition transform: ." ] }, "_LEGACY_ERROR_TEMP_2068" : { "message" : [ "Missing database location." ] }, "_LEGACY_ERROR_TEMP_2069" : { "message" : [ "Cannot remove reserved property: ." ] }, "_LEGACY_ERROR_TEMP_2070" : { "message" : [ "Writing job failed." ] }, "_LEGACY_ERROR_TEMP_2071" : { "message" : [ "Commit denied for partition (task , attempt , stage .)." ] }, "_LEGACY_ERROR_TEMP_2073" : { "message" : [ "Cannot create JDBC table with partition." ] }, "_LEGACY_ERROR_TEMP_2074" : { "message" : [ "user-specified schema." ] }, "_LEGACY_ERROR_TEMP_2075" : { "message" : [ "Write is not supported for binary file data source." ] }, "_LEGACY_ERROR_TEMP_2076" : { "message" : [ "The length of is , which exceeds the max length allowed: ." ] }, "_LEGACY_ERROR_TEMP_2077" : { "message" : [ "Unsupported field name: ." ] }, "_LEGACY_ERROR_TEMP_2078" : { "message" : [ "Both '' and '' can not be specified at the same time." ] }, "_LEGACY_ERROR_TEMP_2079" : { "message" : [ "Option '' or '' is required." ] }, "_LEGACY_ERROR_TEMP_2080" : { "message" : [ "Option `` can not be empty." ] }, "_LEGACY_ERROR_TEMP_2081" : { "message" : [ "Invalid value `` for parameter ``. This can be `NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`." ] }, "_LEGACY_ERROR_TEMP_2082" : { "message" : [ "Can't get JDBC type for ." ] }, "_LEGACY_ERROR_TEMP_2083" : { "message" : [ "Unsupported type ." ] }, "_LEGACY_ERROR_TEMP_2084" : { "message" : [ "Unsupported array element type based on binary." ] }, "_LEGACY_ERROR_TEMP_2085" : { "message" : [ "Nested arrays unsupported." ] }, "_LEGACY_ERROR_TEMP_2086" : { "message" : [ "Can't translate non-null value for field ." ] }, "_LEGACY_ERROR_TEMP_2087" : { "message" : [ "Invalid value `` for parameter `` in table writing via JDBC. The minimum value is 1." ] }, "_LEGACY_ERROR_TEMP_2089" : { "message" : [ "DataType: ." ] }, "_LEGACY_ERROR_TEMP_2090" : { "message" : [ "The input filter of should be fully convertible." ] }, "_LEGACY_ERROR_TEMP_2093" : { "message" : [ "Found duplicate field(s) \"\": in case-insensitive mode." ] }, "_LEGACY_ERROR_TEMP_2094" : { "message" : [ "Found duplicate field(s) \"\": in id mapping mode." ] }, "_LEGACY_ERROR_TEMP_2095" : { "message" : [ "Failed to merge incompatible schemas and ." ] }, "_LEGACY_ERROR_TEMP_2096" : { "message" : [ " is not supported temporarily." ] }, "_LEGACY_ERROR_TEMP_2097" : { "message" : [ "Could not execute broadcast in secs. You can increase the timeout for broadcasts via or disable broadcast join by setting to -1." ] }, "_LEGACY_ERROR_TEMP_2098" : { "message" : [ "Could not compare cost with ." ] }, "_LEGACY_ERROR_TEMP_2100" : { "message" : [ "not support type: ." ] }, "_LEGACY_ERROR_TEMP_2101" : { "message" : [ "Not support non-primitive type now." ] }, "_LEGACY_ERROR_TEMP_2103" : { "message" : [ "Dictionary encoding should not be used because of dictionary overflow." ] }, "_LEGACY_ERROR_TEMP_2104" : { "message" : [ "End of the iterator." ] }, "_LEGACY_ERROR_TEMP_2105" : { "message" : [ "Could not allocate memory to grow BytesToBytesMap." ] }, "_LEGACY_ERROR_TEMP_2106" : { "message" : [ "Can't acquire bytes memory to build hash relation, got bytes." ] }, "_LEGACY_ERROR_TEMP_2107" : { "message" : [ "There is not enough memory to build hash map." ] }, "_LEGACY_ERROR_TEMP_2108" : { "message" : [ "Does not support row that is larger than 256M." ] }, "_LEGACY_ERROR_TEMP_2109" : { "message" : [ "Cannot build HashedRelation with more than 1/3 billion unique keys." ] }, "_LEGACY_ERROR_TEMP_2110" : { "message" : [ "Cannot build a HashedRelation that is larger than 8G." ] }, "_LEGACY_ERROR_TEMP_2111" : { "message" : [ "Failed to push a row into ." ] }, "_LEGACY_ERROR_TEMP_2112" : { "message" : [ "Unexpected window function frame ." ] }, "_LEGACY_ERROR_TEMP_2113" : { "message" : [ "Unable to parse as a percentile." ] }, "_LEGACY_ERROR_TEMP_2114" : { "message" : [ " is not a recognised statistic." ] }, "_LEGACY_ERROR_TEMP_2115" : { "message" : [ "Unknown column: ." ] }, "_LEGACY_ERROR_TEMP_2116" : { "message" : [ "Unexpected: ." ] }, "_LEGACY_ERROR_TEMP_2120" : { "message" : [ "Do not support array of type ." ] }, "_LEGACY_ERROR_TEMP_2121" : { "message" : [ "Do not support type ." ] }, "_LEGACY_ERROR_TEMP_2124" : { "message" : [ "Failed to merge decimal types with incompatible scale and ." ] }, "_LEGACY_ERROR_TEMP_2126" : { "message" : [ "Unsuccessful attempt to build maps with elements due to exceeding the map size limit ." ] }, "_LEGACY_ERROR_TEMP_2128" : { "message" : [ "The key array and value array of MapData must have the same length." ] }, "_LEGACY_ERROR_TEMP_2129" : { "message" : [ "Conflict found: Field differs from derived from ." ] }, "_LEGACY_ERROR_TEMP_2130" : { "message" : [ "Fail to recognize '' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from '/sql-ref-datetime-pattern.html'." ] }, "_LEGACY_ERROR_TEMP_2131" : { "message" : [ "Exception when registering StreamingQueryListener." ] }, "_LEGACY_ERROR_TEMP_2138" : { "message" : [ "Cannot have circular references in bean class, but got the circular reference of class ." ] }, "_LEGACY_ERROR_TEMP_2139" : { "message" : [ "cannot have circular references in class, but got the circular reference of class ." ] }, "_LEGACY_ERROR_TEMP_2140" : { "message" : [ "`` is not a valid identifier of Java and cannot be used as field name", "." ] }, "_LEGACY_ERROR_TEMP_2144" : { "message" : [ "Unable to find constructor for . This could happen if is an interface, or a trait without companion object constructor." ] }, "_LEGACY_ERROR_TEMP_2145" : { "message" : [ " cannot be more than one character." ] }, "_LEGACY_ERROR_TEMP_2146" : { "message" : [ " should be an integer. Found ." ] }, "_LEGACY_ERROR_TEMP_2147" : { "message" : [ " flag can be true or false." ] }, "_LEGACY_ERROR_TEMP_2148" : { "message" : [ "null value found but field is not nullable." ] }, "_LEGACY_ERROR_TEMP_2150" : { "message" : [ "Due to Scala's limited support of tuple, tuple with more than 22 elements are not supported." ] }, "_LEGACY_ERROR_TEMP_2154" : { "message" : [ "Failed to get outer pointer for ." ] }, "_LEGACY_ERROR_TEMP_2155" : { "message" : [ " is not annotated with SQLUserDefinedType nor registered with UDTRegistration.}" ] }, "_LEGACY_ERROR_TEMP_2163" : { "message" : [ "Initial type must be a ." ] }, "_LEGACY_ERROR_TEMP_2164" : { "message" : [ "Initial type must be an , a or a ." ] }, "_LEGACY_ERROR_TEMP_2165" : { "message" : [ "Malformed records are detected in schema inference. Parse Mode: ." ] }, "_LEGACY_ERROR_TEMP_2166" : { "message" : [ "Malformed JSON." ] }, "_LEGACY_ERROR_TEMP_2167" : { "message" : [ "Malformed records are detected in schema inference. Parse Mode: . Reasons: Failed to infer a common schema. Struct types are expected, but `` was found." ] }, "_LEGACY_ERROR_TEMP_2168" : { "message" : [ "Decorrelate inner query through is not supported." ] }, "_LEGACY_ERROR_TEMP_2169" : { "message" : [ "This method should not be called in the analyzer." ] }, "_LEGACY_ERROR_TEMP_2170" : { "message" : [ "Cannot safely merge SERDEPROPERTIES:", "", "", "The conflict keys: ." ] }, "_LEGACY_ERROR_TEMP_2171" : { "message" : [ "Not supported pair: , at ()." ] }, "_LEGACY_ERROR_TEMP_2172" : { "message" : [ "Once strategy's idempotence is broken for batch ", "." ] }, "_LEGACY_ERROR_TEMP_2176" : { "message" : [ "Cannot create array with elements of data due to exceeding the limit elements for ArrayData. " ] }, "_LEGACY_ERROR_TEMP_2179" : { "message" : [ "HiveServer2 Kerberos principal or keytab is not correctly configured." ] }, "_LEGACY_ERROR_TEMP_2180" : { "message" : [ "Parent SparkUI to attach this tab to not found." ] }, "_LEGACY_ERROR_TEMP_2181" : { "message" : [ "inferSchema is not supported for hive data source." ] }, "_LEGACY_ERROR_TEMP_2182" : { "message" : [ "Requested partitioning does not match the table:", "Requested partitions: .", "Table partitions: ." ] }, "_LEGACY_ERROR_TEMP_2183" : { "message" : [ "Dynamic partition key is not among written partition paths." ] }, "_LEGACY_ERROR_TEMP_2184" : { "message" : [ "Cannot remove partition directory ''." ] }, "_LEGACY_ERROR_TEMP_2185" : { "message" : [ "Cannot create staging directory: " ] }, "_LEGACY_ERROR_TEMP_2186" : { "message" : [ "The SerDe interface removed since Hive 2.3(HIVE-15167). Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details." ] }, "_LEGACY_ERROR_TEMP_2187" : { "message" : [ ", db: , table: ." ] }, "_LEGACY_ERROR_TEMP_2192" : { "message" : [ "Partition filter cannot have both `\"` and `'` characters." ] }, "_LEGACY_ERROR_TEMP_2193" : { "message" : [ "Caught Hive MetaException attempting to get partition metadata by filter from Hive. You can set the Spark configuration setting to true to work around this problem, however this will result in degraded performance. Please report a bug: https://issues.apache.org/jira/browse/SPARK." ] }, "_LEGACY_ERROR_TEMP_2194" : { "message" : [ "Unsupported Hive Metastore version . Please set with a valid version." ] }, "_LEGACY_ERROR_TEMP_2195" : { "message" : [ " when creating Hive client using classpath: Please make sure that jars for your version of hive and hadoop are included in the paths passed to ." ] }, "_LEGACY_ERROR_TEMP_2198" : { "message" : [ "Failed to rename as already exists." ] }, "_LEGACY_ERROR_TEMP_2200" : { "message" : [ "Error: we detected a possible problem with the location of your \"_spark_metadata\"", "directory and you likely need to move it before restarting this query.", "", "Earlier version of Spark incorrectly escaped paths when writing out the", "\"_spark_metadata\" directory for structured streaming. While this was corrected in", "Spark 3.0, it appears that your query was started using an earlier version that", "", "Correct \"_spark_metadata\" Directory: ", "Incorrect \"_spark_metadata\" Directory: ", "", "Please move the data from the incorrect directory to the correct one, delete the", "incorrect directory, and then restart this query. If you believe you are receiving", "this message in error, you can disable it with the SQL conf", "." ] }, "_LEGACY_ERROR_TEMP_2201" : { "message" : [ "Partition column not found in schema ." ] }, "_LEGACY_ERROR_TEMP_2203" : { "message" : [ "Cannot set timeout duration without enabling processing time timeout in [map|flatMap]GroupsWithState." ] }, "_LEGACY_ERROR_TEMP_2204" : { "message" : [ "Cannot get event time watermark timestamp without setting watermark before [map|flatMap]GroupsWithState." ] }, "_LEGACY_ERROR_TEMP_2205" : { "message" : [ "Cannot set timeout timestamp without enabling event time timeout in [map|flatMapGroupsWithState." ] }, "_LEGACY_ERROR_TEMP_2207" : { "message" : [ "Multiple streaming queries are concurrently using ." ] }, "_LEGACY_ERROR_TEMP_2208" : { "message" : [ " does not support adding files with an absolute path." ] }, "_LEGACY_ERROR_TEMP_2209" : { "message" : [ "Data source does not support microbatch processing.", "", "Either the data source is disabled at", "SQLConf.get.DISABLED_V2_STREAMING_MICROBATCH_READERS.key (The disabled sources", "are []) or the table
does not have MICRO_BATCH_READ", "capability. Meanwhile, the fallback, data source v1, is not available.\"" ] }, "_LEGACY_ERROR_TEMP_2210" : { "message" : [ "StreamingRelationExec cannot be executed." ] }, "_LEGACY_ERROR_TEMP_2212" : { "message" : [ "Invalid catalog name: ." ] }, "_LEGACY_ERROR_TEMP_2214" : { "message" : [ "Plugin class for catalog '' does not implement CatalogPlugin: ." ] }, "_LEGACY_ERROR_TEMP_2215" : { "message" : [ "Cannot find catalog plugin class for catalog '': ." ] }, "_LEGACY_ERROR_TEMP_2216" : { "message" : [ "Failed to find public no-arg constructor for catalog '': )." ] }, "_LEGACY_ERROR_TEMP_2217" : { "message" : [ "Failed to call public no-arg constructor for catalog '': )." ] }, "_LEGACY_ERROR_TEMP_2218" : { "message" : [ "Cannot instantiate abstract catalog plugin class for catalog '': ." ] }, "_LEGACY_ERROR_TEMP_2219" : { "message" : [ "Failed during instantiating constructor for catalog '': ." ] }, "_LEGACY_ERROR_TEMP_2220" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_2222" : { "message" : [ "Cannot mutate ReadOnlySQLConf." ] }, "_LEGACY_ERROR_TEMP_2223" : { "message" : [ "Cannot clone/copy ReadOnlySQLConf." ] }, "_LEGACY_ERROR_TEMP_2224" : { "message" : [ "Cannot get SQLConf inside scheduler event loop thread." ] }, "_LEGACY_ERROR_TEMP_2225" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_2226" : { "message" : [ "null literals can't be casted to ." ] }, "_LEGACY_ERROR_TEMP_2227" : { "message" : [ " is not an UserDefinedType. Please make sure registering an UserDefinedType for ." ] }, "_LEGACY_ERROR_TEMP_2228" : { "message" : [ "Can not load in UserDefinedType for user class ." ] }, "_LEGACY_ERROR_TEMP_2229" : { "message" : [ " is not a public class. Only public classes are supported." ] }, "_LEGACY_ERROR_TEMP_2230" : { "message" : [ "Primitive types are not supported." ] }, "_LEGACY_ERROR_TEMP_2233" : { "message" : [ "Only Data Sources providing FileFormat are supported: ." ] }, "_LEGACY_ERROR_TEMP_2234" : { "message" : [ "Failed to set original ACL back to the created path: . Exception: " ] }, "_LEGACY_ERROR_TEMP_2235" : { "message" : [ "Multiple failures in stage materialization." ] }, "_LEGACY_ERROR_TEMP_2236" : { "message" : [ "Unrecognized compression scheme type ID: ." ] }, "_LEGACY_ERROR_TEMP_2237" : { "message" : [ ".getParentLogger is not yet implemented." ] }, "_LEGACY_ERROR_TEMP_2241" : { "message" : [ "Nonatomic partition table can not add multiple partitions." ] }, "_LEGACY_ERROR_TEMP_2242" : { "message" : [ " source does not support user-specified schema." ] }, "_LEGACY_ERROR_TEMP_2243" : { "message" : [ "Nonatomic partition table can not drop multiple partitions." ] }, "_LEGACY_ERROR_TEMP_2244" : { "message" : [ "The table does not support truncation of multiple partition." ] }, "_LEGACY_ERROR_TEMP_2245" : { "message" : [ "Table does not support overwrite by expression:
." ] }, "_LEGACY_ERROR_TEMP_2246" : { "message" : [ "Table does not support dynamic partition overwrite:
." ] }, "_LEGACY_ERROR_TEMP_2248" : { "message" : [ "Cannot broadcast the table over rows: rows." ] }, "_LEGACY_ERROR_TEMP_2249" : { "message" : [ "Cannot broadcast the table that is larger than : ." ] }, "_LEGACY_ERROR_TEMP_2250" : { "message" : [ "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" ] }, "_LEGACY_ERROR_TEMP_2251" : { "message" : [ " does not support the execute() code path." ] }, "_LEGACY_ERROR_TEMP_2252" : { "message" : [ "Cannot merge with ." ] }, "_LEGACY_ERROR_TEMP_2253" : { "message" : [ "Data source does not support continuous processing." ] }, "_LEGACY_ERROR_TEMP_2254" : { "message" : [ "Data read failed." ] }, "_LEGACY_ERROR_TEMP_2255" : { "message" : [ "Epoch marker generation failed." ] }, "_LEGACY_ERROR_TEMP_2256" : { "message" : [ "Foreach writer has been aborted due to a task failure." ] }, "_LEGACY_ERROR_TEMP_2260" : { "message" : [ "Cannot purge as it might break internal state." ] }, "_LEGACY_ERROR_TEMP_2261" : { "message" : [ "Clean up source files is not supported when reading from the output directory of FileStreamSink." ] }, "_LEGACY_ERROR_TEMP_2262" : { "message" : [ "latestOffset(Offset, ReadLimit) should be called instead of this method." ] }, "_LEGACY_ERROR_TEMP_2263" : { "message" : [ "Error: we detected a possible problem with the location of your checkpoint and you", "likely need to move it before restarting this query.", "", "Earlier version of Spark incorrectly escaped paths when writing out checkpoints for", "structured streaming. While this was corrected in Spark 3.0, it appears that your", "query was started using an earlier version that incorrectly handled the checkpoint", "path.", "", "Correct Checkpoint Directory: ", "Incorrect Checkpoint Directory: ", "", "Please move the data from the incorrect directory to the correct one, delete the", "incorrect directory, and then restart this query. If you believe you are receiving", "this message in error, you can disable it with the SQL conf", "." ] }, "_LEGACY_ERROR_TEMP_2264" : { "message" : [ "Subprocess exited with status . Error: ." ] }, "_LEGACY_ERROR_TEMP_2265" : { "message" : [ " without serde does not support
as output data type." ] }, "_LEGACY_ERROR_TEMP_2266" : { "message" : [ "Invalid `startIndex` provided for generating iterator over the array. Total elements: , requested `startIndex`: ." ] }, "_LEGACY_ERROR_TEMP_2267" : { "message" : [ "The backing has been modified since the creation of this Iterator." ] }, "_LEGACY_ERROR_TEMP_2268" : { "message" : [ " does not implement doExecuteBroadcast." ] }, "_LEGACY_ERROR_TEMP_2269" : { "message" : [ " is a system preserved database, please rename your existing database to resolve the name conflict, or set a different value for , and launch your Spark application again." ] }, "_LEGACY_ERROR_TEMP_2270" : { "message" : [ "comment on table is not supported." ] }, "_LEGACY_ERROR_TEMP_2271" : { "message" : [ "UpdateColumnNullability is not supported." ] }, "_LEGACY_ERROR_TEMP_2272" : { "message" : [ "Rename column is only supported for MySQL version 8.0 and above." ] }, "_LEGACY_ERROR_TEMP_2273" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_2277" : { "message" : [ "Number of dynamic partitions created is , which is more than . To solve this try to set to at least ." ] }, "_LEGACY_ERROR_TEMP_2330" : { "message" : [ "Cannot change nullable column to non-nullable: ." ] }, "_LEGACY_ERROR_TEMP_2446" : { "message" : [ "Operation not allowed: only works on table with location provided: " ] }, "_LEGACY_ERROR_TEMP_2450" : { "message" : [ "No handler for UDF/UDAF/UDTF ''" ] }, "_LEGACY_ERROR_TEMP_3000" : { "message" : [ "Unexpected Py4J server ." ] }, "_LEGACY_ERROR_TEMP_3001" : { "message" : [ "EOFException occurred while reading the port number from 's stdout." ] }, "_LEGACY_ERROR_TEMP_3002" : { "message" : [ "Data of type is not supported" ] }, "_LEGACY_ERROR_TEMP_3003" : { "message" : [ "Could not compute split, block of RDD not found" ] }, "_LEGACY_ERROR_TEMP_3004" : { "message" : [ "Attempted to use after its blocks have been removed!" ] }, "_LEGACY_ERROR_TEMP_3005" : { "message" : [ "Histogram on either an empty RDD or RDD containing +/-infinity or NaN" ] }, "_LEGACY_ERROR_TEMP_3006" : { "message" : [ "empty RDD" ] }, "_LEGACY_ERROR_TEMP_3008" : { "message" : [ "Cannot use map-side combining with array keys." ] }, "_LEGACY_ERROR_TEMP_3009" : { "message" : [ "HashPartitioner cannot partition array keys." ] }, "_LEGACY_ERROR_TEMP_3010" : { "message" : [ "reduceByKeyLocally() does not support array keys" ] }, "_LEGACY_ERROR_TEMP_3011" : { "message" : [ "This RDD lacks a SparkContext. It could happen in the following cases:", "(1) RDD transformations and actions are NOT invoked by the driver, but inside of other transformations; for example, rdd1.map(x => rdd2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the rdd1.map transformation. For more information, see SPARK-5063.", "(2) When a Spark Streaming job recovers from checkpoint, this exception will be hit if a reference to an RDD not defined by the streaming job is used in DStream operations. For more information, See SPARK-13758." ] }, "_LEGACY_ERROR_TEMP_3012" : { "message" : [ "Cannot change storage level of an RDD after it was already assigned a level" ] }, "_LEGACY_ERROR_TEMP_3013" : { "message" : [ "Can only zip RDDs with same number of elements in each partition" ] }, "_LEGACY_ERROR_TEMP_3014" : { "message" : [ "empty collection" ] }, "_LEGACY_ERROR_TEMP_3015" : { "message" : [ "countByValueApprox() does not support arrays" ] }, "_LEGACY_ERROR_TEMP_3016" : { "message" : [ "Checkpoint directory has not been set in the SparkContext" ] }, "_LEGACY_ERROR_TEMP_3017" : { "message" : [ "Invalid checkpoint file: " ] }, "_LEGACY_ERROR_TEMP_3018" : { "message" : [ "Failed to create checkpoint path " ] }, "_LEGACY_ERROR_TEMP_3019" : { "message" : [ "Checkpoint RDD has a different number of partitions from original RDD. Original", "RDD [ID: , num of partitions: ];", "Checkpoint RDD [ID: , num of partitions: ]." ] }, "_LEGACY_ERROR_TEMP_3020" : { "message" : [ "Checkpoint dir must be specified." ] }, "_LEGACY_ERROR_TEMP_3021" : { "message" : [ "Error asking standalone scheduler to shut down executors" ] }, "_LEGACY_ERROR_TEMP_3022" : { "message" : [ "Error stopping standalone scheduler's driver endpoint" ] }, "_LEGACY_ERROR_TEMP_3023" : { "message" : [ "Can't run submitMapStage on RDD with 0 partitions" ] }, "_LEGACY_ERROR_TEMP_3024" : { "message" : [ "attempted to access non-existent accumulator " ] }, "_LEGACY_ERROR_TEMP_3025" : { "message" : [ "TaskSetManagers should only send Resubmitted task statuses for tasks in ShuffleMapStages." ] }, "_LEGACY_ERROR_TEMP_3026" : { "message" : [ "duration() called on unfinished task" ] }, "_LEGACY_ERROR_TEMP_3027" : { "message" : [ "Unrecognized : " ] }, "_LEGACY_ERROR_TEMP_3028" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3029" : { "message" : [ "Exiting due to error from cluster scheduler: " ] }, "_LEGACY_ERROR_TEMP_3030" : { "message" : [ "Task has not locked block for writing" ] }, "_LEGACY_ERROR_TEMP_3031" : { "message" : [ "Block does not exist" ] }, "_LEGACY_ERROR_TEMP_3032" : { "message" : [ "Error occurred while waiting for replication to finish" ] }, "_LEGACY_ERROR_TEMP_3033" : { "message" : [ "Unable to register with external shuffle server due to : " ] }, "_LEGACY_ERROR_TEMP_3034" : { "message" : [ "Error occurred while waiting for async. reregistration" ] }, "_LEGACY_ERROR_TEMP_3035" : { "message" : [ "Unexpected shuffle block with unsupported shuffle resolver " ] }, "_LEGACY_ERROR_TEMP_3036" : { "message" : [ "Failure while trying to store block on ." ] }, "_LEGACY_ERROR_TEMP_3037" : { "message" : [ "Block was not found even though it's read-locked" ] }, "_LEGACY_ERROR_TEMP_3038" : { "message" : [ "get() failed for block even though we held a lock" ] }, "_LEGACY_ERROR_TEMP_3039" : { "message" : [ "BlockManager returned null for BlockStatus query: " ] }, "_LEGACY_ERROR_TEMP_3040" : { "message" : [ "BlockManagerMasterEndpoint returned false, expected true." ] }, "_LEGACY_ERROR_TEMP_3041" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3042" : { "message" : [ "Failed to get block , which is not a shuffle block" ] }, "_LEGACY_ERROR_TEMP_3050" : { "message" : [ "Cannot modify the value of a static config: " ] }, "_LEGACY_ERROR_TEMP_3052" : { "message" : [ "Unexpected resolved action: " ] }, "_LEGACY_ERROR_TEMP_3053" : { "message" : [ "Unexpected WHEN NOT MATCHED action: " ] }, "_LEGACY_ERROR_TEMP_3054" : { "message" : [ " is not currently supported" ] }, "_LEGACY_ERROR_TEMP_3055" : { "message" : [ "ScalarFunction neither implement magic method nor override 'produceResult'" ] }, "_LEGACY_ERROR_TEMP_3056" : { "message" : [ "Unexpected row-level read relations (allow multiple = ): " ] }, "_LEGACY_ERROR_TEMP_3057" : { "message" : [ "Cannot retrieve row-level operation from
" ] }, "_LEGACY_ERROR_TEMP_3058" : { "message" : [ "Found duplicate column(s) : " ] }, "_LEGACY_ERROR_TEMP_3059" : { "message" : [ "The positions provided () cannot be resolved in", "" ] }, "_LEGACY_ERROR_TEMP_3060" : { "message" : [ "Couldn't find column in:", "" ] }, "_LEGACY_ERROR_TEMP_3061" : { "message" : [ "", "" ] }, "_LEGACY_ERROR_TEMP_3062" : { "message" : [ "Expected to be a nested data type, but found . Was looking for the index of in a nested field" ] }, "_LEGACY_ERROR_TEMP_3063" : { "message" : [ "pivot is not supported on a streaming DataFrames/Datasets" ] }, "_LEGACY_ERROR_TEMP_3065" : { "message" : [ ": " ] }, "_LEGACY_ERROR_TEMP_3067" : { "message" : [ "Streaming aggregation doesn't support group aggregate pandas UDF" ] }, "_LEGACY_ERROR_TEMP_3068" : { "message" : [ "Global aggregation with session window in streaming query is not supported." ] }, "_LEGACY_ERROR_TEMP_3069" : { "message" : [ " is a reserved column name that cannot be read in combination with column." ] }, "_LEGACY_ERROR_TEMP_3070" : { "message" : [ " is a reserved column name that cannot be read in combination with column." ] }, "_LEGACY_ERROR_TEMP_3071" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3072" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3073" : { "message" : [ "Unexpected instruction: " ] }, "_LEGACY_ERROR_TEMP_3074" : { "message" : [ "field not found from given schema " ] }, "_LEGACY_ERROR_TEMP_3075" : { "message" : [ "Couldn't find scan attribute for in " ] }, "_LEGACY_ERROR_TEMP_3076" : { "message" : [ "Redefining watermark is disallowed. You can set the config '' to 'false' to restore the previous behavior. Note that multiple stateful operators will be disallowed." ] }, "_LEGACY_ERROR_TEMP_3077" : { "message" : [ "More than one event time columns are available. Please ensure there is at most one event time column per stream. event time columns: " ] }, "_LEGACY_ERROR_TEMP_3079" : { "message" : [ "Dynamic partition cannot be the parent of a static partition." ] }, "_LEGACY_ERROR_TEMP_3080" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3081" : { "message" : [ "Save mode not allowed for Kafka. Allowed save modes are and (default)." ] }, "_LEGACY_ERROR_TEMP_3082" : { "message" : [ "Creating bucketed Hive serde table is not supported yet." ] }, "_LEGACY_ERROR_TEMP_3083" : { "message" : [ "Unable to infer the schema. The schema specification is required to create the table ." ] }, "_LEGACY_ERROR_TEMP_3084" : { "message" : [ "No handler for UDF/UDAF/UDTF '': " ] }, "_LEGACY_ERROR_TEMP_3085" : { "message" : [ "from_avro() doesn't support the mode. Acceptable modes are and ." ] }, "_LEGACY_ERROR_TEMP_3086" : { "message" : [ "Cannot persist into Hive metastore as table property keys may not start with 'spark.sql.': " ] }, "_LEGACY_ERROR_TEMP_3087" : { "message" : [ "Cannot set or change the preserved property key: 'EXTERNAL'" ] }, "_LEGACY_ERROR_TEMP_3088" : { "message" : [ "The metadata is corrupted. Unable to find the partition column names from the schema. schema: . Partition columns: " ] }, "_LEGACY_ERROR_TEMP_3089" : { "message" : [ "Corrupted in catalog: parts expected, but part is missing." ] }, "_LEGACY_ERROR_TEMP_3090" : { "message" : [ "Raw list type in java is unsupported because Spark cannot infer the element type." ] }, "_LEGACY_ERROR_TEMP_3091" : { "message" : [ "Raw map type in java is unsupported because Spark cannot infer key and value types." ] }, "_LEGACY_ERROR_TEMP_3092" : { "message" : [ "Collection types with wildcards (e.g. List or Map) are unsupported because Spark cannot infer the data type for these type parameters." ] }, "_LEGACY_ERROR_TEMP_3093" : { "message" : [ "Unsupported java type " ] }, "_LEGACY_ERROR_TEMP_3094" : { "message" : [ "
is not supported." ] }, "_LEGACY_ERROR_TEMP_3095" : { "message" : [ "
cannot be converted to Hive TypeInfo" ] }, "_LEGACY_ERROR_TEMP_3096" : { "message" : [ "Converted table has columns,", "but source Hive table has columns.", "Set to false,", "or recreate table to workaround." ] }, "_LEGACY_ERROR_TEMP_3097" : { "message" : [ "Column in converted table has different data type with source Hive table's.", "Set to false,", "or recreate table to workaround." ] }, "_LEGACY_ERROR_TEMP_3100" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3101" : { "message" : [ "The input is not a correct window column: " ] }, "_LEGACY_ERROR_TEMP_3102" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3103" : { "message" : [ "Namespace '' is non empty.
" ] }, "_LEGACY_ERROR_TEMP_3104" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3105" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3106" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3107" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3108" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3109" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3110" : { "message" : [ "Cannot bind a V1 function." ] }, "_LEGACY_ERROR_TEMP_3113" : { "message" : [ "UnresolvedTableSpec doesn't have a data type" ] }, "_LEGACY_ERROR_TEMP_3114" : { "message" : [ "UnresolvedTableSpec doesn't have a data type" ] }, "_LEGACY_ERROR_TEMP_3121" : { "message" : [ "A HllSketch instance cannot be updates with a Spark type" ] }, "_LEGACY_ERROR_TEMP_3129" : { "message" : [ "Cannot convert this array to unsafe format as it's too big." ] }, "_LEGACY_ERROR_TEMP_3130" : { "message" : [ "Cannot create BufferHolder for input UnsafeRow because there are too many fields (number of fields: )" ] }, "_LEGACY_ERROR_TEMP_3131" : { "message" : [ "Unsupported data type " ] }, "_LEGACY_ERROR_TEMP_3132" : { "message" : [ "CaseInsensitiveStringMap is read-only." ] }, "_LEGACY_ERROR_TEMP_3133" : { "message" : [ " does not implement rowIdSchema" ] }, "_LEGACY_ERROR_TEMP_3134" : { "message" : [ " does not implement metadataSchema" ] }, "_LEGACY_ERROR_TEMP_3135" : { "message" : [ " does not support batch write" ] }, "_LEGACY_ERROR_TEMP_3136" : { "message" : [ " does not support streaming write" ] }, "_LEGACY_ERROR_TEMP_3137" : { "message" : [ ": Batch write is not supported" ] }, "_LEGACY_ERROR_TEMP_3138" : { "message" : [ ": Streaming write is not supported" ] }, "_LEGACY_ERROR_TEMP_3139" : { "message" : [ ": Delta batch write is not supported" ] }, "_LEGACY_ERROR_TEMP_3140" : { "message" : [ " does not implement build" ] }, "_LEGACY_ERROR_TEMP_3141" : { "message" : [ " does not support user defined function: " ] }, "_LEGACY_ERROR_TEMP_3142" : { "message" : [ " does not support user defined aggregate function: " ] }, "_LEGACY_ERROR_TEMP_3143" : { "message" : [ "Partition renaming is not supported" ] }, "_LEGACY_ERROR_TEMP_3144" : { "message" : [ "Partition truncate is not supported" ] }, "_LEGACY_ERROR_TEMP_3145" : { "message" : [ "Partitions truncate is not supported" ] }, "_LEGACY_ERROR_TEMP_3146" : { "message" : [ "Cannot find a compatible ScalarFunction#produceResult" ] }, "_LEGACY_ERROR_TEMP_3147" : { "message" : [ ": Batch scan are not supported" ] }, "_LEGACY_ERROR_TEMP_3148" : { "message" : [ ": Micro-batch scan are not supported" ] }, "_LEGACY_ERROR_TEMP_3149" : { "message" : [ ": Continuous scan are not supported" ] }, "_LEGACY_ERROR_TEMP_3150" : { "message" : [ "Cannot create columnar reader." ] }, "_LEGACY_ERROR_TEMP_3152" : { "message" : [ "Datatype not supported " ] }, "_LEGACY_ERROR_TEMP_3155" : { "message" : [ "Datatype not supported " ] }, "_LEGACY_ERROR_TEMP_3160" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3161" : { "message" : [ "Uploading artifact file to local file system destination path is not supported." ] }, "_LEGACY_ERROR_TEMP_3162" : { "message" : [ "Unsupported physical type ." ] }, "_LEGACY_ERROR_TEMP_3163" : { "message" : [ "Unsupported number of children: ." ] }, "_LEGACY_ERROR_TEMP_3165" : { "message" : [ "Cannot merge with " ] }, "_LEGACY_ERROR_TEMP_3166" : { "message" : [ "latestOffset(Offset, ReadLimit) should be called instead of this method" ] }, "_LEGACY_ERROR_TEMP_3167" : { "message" : [ "continuous mode is not supported!" ] }, "_LEGACY_ERROR_TEMP_3168" : { "message" : [ "hasTimedOut is true however there's no timeout configured" ] }, "_LEGACY_ERROR_TEMP_3169" : { "message" : [ "AcceptsLatestSeenOffset is not supported with DSv1 streaming source: " ] }, "_LEGACY_ERROR_TEMP_3170" : { "message" : [ "SortAggregate code-gen does not support grouping keys" ] }, "_LEGACY_ERROR_TEMP_3171" : { "message" : [ "Number of nulls not set for Parquet file . Set SQLConf to false and execute again." ] }, "_LEGACY_ERROR_TEMP_3172" : { "message" : [ "No min/max found for Parquet file . Set SQLConf to false and execute again." ] }, "_LEGACY_ERROR_TEMP_3173" : { "message" : [ "Cannot specify 'USING index_type' in 'CREATE INDEX'" ] }, "_LEGACY_ERROR_TEMP_3175" : { "message" : [ "Index Type is not supported. The supported Index Types are: " ] }, "_LEGACY_ERROR_TEMP_3176" : { "message" : [ "applyInPandasWithState is unsupported in batch query. Use applyInPandas instead." ] }, "_LEGACY_ERROR_TEMP_3177" : { "message" : [ " does not support function: " ] }, "_LEGACY_ERROR_TEMP_3178" : { "message" : [ " does not support inverse distribution function: " ] }, "_LEGACY_ERROR_TEMP_3179" : { "message" : [ "createIndex is not supported" ] }, "_LEGACY_ERROR_TEMP_3180" : { "message" : [ "indexExists is not supported" ] }, "_LEGACY_ERROR_TEMP_3181" : { "message" : [ "dropIndex is not supported" ] }, "_LEGACY_ERROR_TEMP_3182" : { "message" : [ "listIndexes is not supported" ] }, "_LEGACY_ERROR_TEMP_3183" : { "message" : [ "TableSample is not supported by this data source" ] }, "_LEGACY_ERROR_TEMP_3184" : { "message" : [ " does not support aggregate function: with DISTINCT" ] }, "_LEGACY_ERROR_TEMP_3185" : { "message" : [ "Schema evolution not supported." ] }, "_LEGACY_ERROR_TEMP_3186" : { "message" : [ "Boolean is not supported" ] }, "_LEGACY_ERROR_TEMP_3187" : { "message" : [ "only readInts is valid." ] }, "_LEGACY_ERROR_TEMP_3188" : { "message" : [ "only skipIntegers is valid" ] }, "_LEGACY_ERROR_TEMP_3189" : { "message" : [ "Unsupported encoding: " ] }, "_LEGACY_ERROR_TEMP_3190" : { "message" : [ "RLE encoding is not supported for values of type: " ] }, "_LEGACY_ERROR_TEMP_3191" : { "message" : [ "Dictionary encoding does not support String" ] }, "_LEGACY_ERROR_TEMP_3192" : { "message" : [ "Datatype not supported
" ] }, "_LEGACY_ERROR_TEMP_3198" : { "message" : [ "Cannot grow BufferHolder by size because the size is negative" ] }, "_LEGACY_ERROR_TEMP_3199" : { "message" : [ "Cannot grow BufferHolder by size because the size after growing exceeds size limitation " ] }, "_LEGACY_ERROR_TEMP_3200" : { "message" : [ "Read-ahead limit < 0" ] }, "_LEGACY_ERROR_TEMP_3201" : { "message" : [ "'note' is malformed in the expression []. It should start with a newline and 4 leading spaces; end with a newline and two spaces; however, got []." ] }, "_LEGACY_ERROR_TEMP_3202" : { "message" : [ "'group' is malformed in the expression []. It should be a value in ; however, got ." ] }, "_LEGACY_ERROR_TEMP_3203" : { "message" : [ "'source' is malformed in the expression []. It should be a value in ; however, got []." ] }, "_LEGACY_ERROR_TEMP_3204" : { "message" : [ "'since' is malformed in the expression []. It should not start with a negative number; however, got []." ] }, "_LEGACY_ERROR_TEMP_3205" : { "message" : [ "'deprecated' is malformed in the expression []. It should start with a newline and 4 leading spaces; end with a newline and two spaces; however, got []." ] }, "_LEGACY_ERROR_TEMP_3206" : { "message" : [ " is not a boolean string." ] }, "_LEGACY_ERROR_TEMP_3207" : { "message" : [ "Unexpected V2 expression: " ] }, "_LEGACY_ERROR_TEMP_3208" : { "message" : [ "The number of fields () in the partition identifier is not equal to the partition schema length (). The identifier might not refer to one partition." ] }, "_LEGACY_ERROR_TEMP_3215" : { "message" : [ "Expected a Boolean type expression in replaceNullWithFalse, but got the type in ." ] }, "_LEGACY_ERROR_TEMP_3218" : { "message" : [ "Must be 2 children: " ] }, "_LEGACY_ERROR_TEMP_3219" : { "message" : [ "The value () of the type () cannot be converted to the type." ] }, "_LEGACY_ERROR_TEMP_3220" : { "message" : [ "The value () of the type () cannot be converted to an array of " ] }, "_LEGACY_ERROR_TEMP_3221" : { "message" : [ "The value () of the type () cannot be converted to a map type with key type () and value type ()" ] }, "_LEGACY_ERROR_TEMP_3222" : { "message" : [ "Only literals are allowed in the partition spec, but got " ] }, "_LEGACY_ERROR_TEMP_3223" : { "message" : [ "Cannot find field: in " ] }, "_LEGACY_ERROR_TEMP_3224" : { "message" : [ "Cannot delete array element" ] }, "_LEGACY_ERROR_TEMP_3225" : { "message" : [ "Cannot delete map value" ] }, "_LEGACY_ERROR_TEMP_3226" : { "message" : [ "Cannot delete map key" ] }, "_LEGACY_ERROR_TEMP_3227" : { "message" : [ "Cannot find field: " ] }, "_LEGACY_ERROR_TEMP_3228" : { "message" : [ "AFTER column not found: " ] }, "_LEGACY_ERROR_TEMP_3229" : { "message" : [ "Not a struct: " ] }, "_LEGACY_ERROR_TEMP_3230" : { "message" : [ "Field not found: " ] }, "_LEGACY_ERROR_TEMP_3231" : { "message" : [ "Intervals greater than a month is not supported ()." ] }, "_LEGACY_ERROR_TEMP_3232" : { "message" : [ "Unknown EvalMode value: " ] }, "_LEGACY_ERROR_TEMP_3233" : { "message" : [ "cannot generate code for unsupported type: " ] }, "_LEGACY_ERROR_TEMP_3235" : { "message" : [ "The numbers of zipped arrays and field names should be the same" ] }, "_LEGACY_ERROR_TEMP_3238" : { "message" : [ "Failed to convert value (class of ) in type
to XML." ] }, "_LEGACY_ERROR_TEMP_3239" : { "message" : [ "Failed to parse data with unexpected event " ] }, "_LEGACY_ERROR_TEMP_3240" : { "message" : [ "Failed to parse a value for data type
with event " ] }, "_LEGACY_ERROR_TEMP_3241" : { "message" : [ "" ] }, "_LEGACY_ERROR_TEMP_3242" : { "message" : [ "sequence step must be an of day granularity if start and end values are dates" ] }, "_LEGACY_ERROR_TEMP_3243" : { "message" : [ "Illegal sequence boundaries: to by " ] }, "_LEGACY_ERROR_TEMP_3244" : { "message" : [ "Unsupported type: " ] }, "_LEGACY_ERROR_TEMP_3245" : { "message" : [ "For input string: " ] }, "_LEGACY_ERROR_TEMP_3246" : { "message" : [ "Failed to parse a value for data type ." ] }, "_LEGACY_ERROR_TEMP_3260" : { "message" : [ "'' is an invalid timestamp" ] }, "_LEGACY_ERROR_TEMP_3262" : { "message" : [ "Doesn't support month or year interval: " ] }, "_LEGACY_ERROR_USER_RAISED_EXCEPTION" : { "message" : [ "" ], "sqlState" : "P0001" } }