Skip to content

Commit

Permalink
add docs, remove accidental changes
Browse files Browse the repository at this point in the history
  • Loading branch information
clintropolis committed Dec 8, 2023
1 parent 7306beb commit 7d4a956
Show file tree
Hide file tree
Showing 4 changed files with 2 additions and 32 deletions.
1 change: 1 addition & 0 deletions docs/querying/math-expr.md
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,7 @@ JSON functions provide facilities to extract, transform, and create `COMPLEX<jso
|---|---|
| json_value(expr, path[, type]) | Extract a Druid literal (`STRING`, `LONG`, `DOUBLE`, `ARRAY<STRING>`, `ARRAY<LONG>`, or `ARRAY<DOUBLE>`) value from `expr` using JSONPath syntax of `path`. The optional `type` argument can be set to `'LONG'`,`'DOUBLE'`, `'STRING'`, `'ARRAY<LONG>'`, `'ARRAY<DOUBLE>'`, or `'ARRAY<STRING>'` to cast values to that type. |
| json_query(expr, path) | Extract a `COMPLEX<json>` value from `expr` using JSONPath syntax of `path` |
| json_query_array(expr, path) | Extract an `ARRAY<COMPLEX<json>>` value from `expr` using JSONPath syntax of `path`. If value is not an `ARRAY`, it will be translated into a single element `ARRAY` containing the value at `path`. |
| json_object(expr1, expr2[, expr3, expr4 ...]) | Construct a `COMPLEX<json>` with alternating 'key' and 'value' arguments|
| parse_json(expr) | Deserialize a JSON `STRING` into a `COMPLEX<json>`. If the input is not a `STRING` or it is invalid JSON, this function will result in an error.|
| try_parse_json(expr) | Deserialize a JSON `STRING` into a `COMPLEX<json>`. If the input is not a `STRING` or it is invalid JSON, this function will result in a `NULL` value. |
Expand Down
1 change: 1 addition & 0 deletions docs/querying/sql-json-functions.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ You can use the following JSON functions to extract, transform, and create `COMP
|`JSON_OBJECT(KEY expr1 VALUE expr2[, KEY expr3 VALUE expr4, ...])` | Constructs a new `COMPLEX<json>` object. The `KEY` expressions must evaluate to string types. The `VALUE` expressions can be composed of any input type, including other `COMPLEX<json>` values. `JSON_OBJECT` can accept colon-separated key-value pairs. The following syntax is equivalent: `JSON_OBJECT(expr1:expr2[, expr3:expr4, ...])`.|
|`JSON_PATHS(expr)`| Returns an array of all paths which refer to literal values in `expr` in JSONPath format. |
|`JSON_QUERY(expr, path)`| Extracts a `COMPLEX<json>` value from `expr`, at the specified `path`. |
|`JSON_QUERY_ARRAY(expr, path)`| Extracts an `ARRAY<COMPLEX<json>>` value from `expr`, at the specified `path`. If value is not an `ARRAY`, it will be translated into a single element `ARRAY` containing the value at `path`.|
|`JSON_VALUE(expr, path [RETURNING sqlType])`| Extracts a literal value from `expr` at the specified `path`. If you specify `RETURNING` and an SQL type name (such as `VARCHAR`, `BIGINT`, `DOUBLE`, etc) the function plans the query using the suggested type. Otherwise, it attempts to infer the type based on the context. If it can't infer the type, it defaults to `VARCHAR`.|
|`PARSE_JSON(expr)`|Parses `expr` into a `COMPLEX<json>` object. This operator deserializes JSON values when processing them, translating stringified JSON into a nested structure. If the input is not a `VARCHAR` or it is invalid JSON, this function will result in an error.|
|`TRY_PARSE_JSON(expr)`|Parses `expr` into a `COMPLEX<json>` object. This operator deserializes JSON values when processing them, translating stringified JSON into a nested structure. If the input is not a `VARCHAR` or it is invalid JSON, this function will result in a `NULL` value.|
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.druid.sql.calcite.planner;

import com.google.common.collect.ImmutableSet;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.prepare.BaseDruidSqlValidator;
import org.apache.calcite.prepare.CalciteCatalogReader;
Expand All @@ -30,14 +29,10 @@
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeMappingRule;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.sql.calcite.run.EngineFeature;

import java.util.Map;

/**
* Druid extended SQL validator. (At present, it doesn't actually
* have any extensions yet, but it will soon.)
Expand Down Expand Up @@ -85,26 +80,6 @@ public void validateCall(SqlCall call, SqlValidatorScope scope)
super.validateCall(call, scope);
}

@Override
public SqlTypeMappingRule getTypeMappingRule()
{
SqlTypeMappingRule base = super.getTypeMappingRule();
return new SqlTypeMappingRule()
{
@Override
public Map<SqlTypeName, ImmutableSet<SqlTypeName>> getTypeMapping()
{
return base.getTypeMapping();
}

@Override
public boolean canApplyFrom(SqlTypeName to, SqlTypeName from)
{
return SqlTypeMappingRule.super.canApplyFrom(to, from);
}
};
}

private CalciteContextException buildCalciteContextException(String message, SqlCall call)
{
SqlParserPos pos = call.getParserPosition();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,7 @@
import com.google.inject.Provider;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.sql.calcite.table.RowSignatures;

import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -69,10 +66,6 @@ public DruidSchemaCatalog get()
for (NamedSchema schema : namedSchemas) {
rootSchema.add(schema.getSchemaName(), schema.getSchema());
}
rootSchema.add(
"JSON",
relDataTypeFactory -> new RowSignatures.ComplexSqlType(SqlTypeName.OTHER, ColumnType.NESTED_DATA, true)
);
return new DruidSchemaCatalog(rootSchema, ImmutableMap.copyOf(schemasByName));
}
}

0 comments on commit 7d4a956

Please sign in to comment.