Created
December 11, 2016 21:44
-
-
Save bllchmbrs/10404fc9564ad1a61a2d5f3df23533d3 to your computer and use it in GitHub Desktop.
Type Checking
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ ./bin/spark-shell | |
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties | |
Setting default log level to "WARN". | |
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel). | |
16/12/11 13:43:58 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
16/12/11 13:43:58 WARN Utils: Your hostname, bill-ubuntu resolves to a loopback address: 127.0.1.1; using 192.168.42.75 instead (on interface wlp2s0) | |
16/12/11 13:43:58 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
Spark context Web UI available at http://192.168.42.75:4040 | |
Spark context available as 'sc' (master = local[*], app id = local-1481492639112). | |
Spark session available as 'spark'. | |
Welcome to | |
____ __ | |
/ __/__ ___ _____/ /__ | |
_\ \/ _ \/ _ `/ __/ '_/ | |
/___/ .__/\_,_/_/ /_/\_\ version 2.1.0-SNAPSHOT | |
/_/ | |
Using Scala version 2.11.8 (OpenJDK 64-Bit Server VM, Java 1.8.0_111) | |
Type in expressions to have them evaluated. | |
Type :help for more information. | |
scala> import org.apache.spark.sql.functions.lit | |
import org.apache.spark.sql.functions.lit | |
scala> import org.apache.spark.sql.catalyst.expressions.ParseToTimestamp | |
import org.apache.spark.sql.catalyst.expressions.ParseToTimestamp | |
scala> | |
scala> val to_timestamp_func = new ParseToTimestamp(lit("hello").expr, lit("world").expr) | |
to_timestamp_func: org.apache.spark.sql.catalyst.expressions.ParseToTimestamp = to_timestamp(hello, world) | |
scala> to_timestamp_func.child | |
res0: org.apache.spark.sql.catalyst.expressions.Expression = cast(unix_timestamp(hello, world) as timestamp) | |
scala> to_timestamp_func.dataType | |
res1: org.apache.spark.sql.types.DataType = TimestampType | |
scala> to_timestamp_func.resolved | |
res2: Boolean = true | |
scala> to_timestamp_func.childrenResolved | |
res3: Boolean = true | |
scala> | |
scala> to_timestamp_func.left | |
res4: org.apache.spark.sql.catalyst.expressions.Expression = hello | |
scala> to_timestamp_func.left.resolved | |
res5: Boolean = true | |
scala> to_timestamp_func.left.childrenResolved | |
res6: Boolean = true | |
scala> | |
scala> to_timestamp_func.format | |
res7: org.apache.spark.sql.catalyst.expressions.Expression = world | |
scala> to_timestamp_func.format.resolved | |
res8: Boolean = true | |
scala> to_timestamp_func.format.childrenResolved | |
res9: Boolean = true | |
scala> | |
scala> to_timestamp_func.checkInputDataTypes() | |
res10: org.apache.spark.sql.catalyst.analysis.TypeCheckResult = org.apache.spark.sql.catalyst.analysis.TypeCheckResult$TypeCheckSuccess$@256a5df0 | |
scala> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import org.apache.spark.sql.functions.lit | |
import org.apache.spark.sql.catalyst.expressions.ParseToTimestamp | |
val to_timestamp_func = new ParseToTimestamp(lit("hello").expr, lit("world").expr) | |
to_timestamp_func.child | |
to_timestamp_func.dataType | |
to_timestamp_func.resolved | |
to_timestamp_func.childrenResolved | |
to_timestamp_func.left | |
to_timestamp_func.left.resolved | |
to_timestamp_func.left.childrenResolved | |
to_timestamp_func.format | |
to_timestamp_func.format.resolved | |
to_timestamp_func.format.childrenResolved | |
to_timestamp_func.checkInputDataTypes() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment