public class PythonUDF extends org.apache.spark.sql.catalyst.expressions.Expression implements Logging, scala.Product, scala.Serializable
PythonRDD
.Constructor and Description |
---|
PythonUDF(String name,
byte[] command,
java.util.Map<String,String> envVars,
java.util.List<String> pythonIncludes,
String pythonExec,
java.util.List<Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars,
Accumulator<java.util.List<byte[]>> accumulator,
org.apache.spark.sql.catalyst.types.DataType dataType,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children) |
Modifier and Type | Method and Description |
---|---|
Accumulator<java.util.List<byte[]>> |
accumulator() |
java.util.List<Broadcast<org.apache.spark.api.python.PythonBroadcast>> |
broadcastVars() |
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
children() |
byte[] |
command() |
org.apache.spark.sql.catalyst.types.DataType |
dataType() |
java.util.Map<String,String> |
envVars() |
scala.runtime.Nothing$ |
eval(org.apache.spark.sql.catalyst.expressions.Row input) |
String |
name() |
boolean |
nullable() |
String |
pythonExec() |
java.util.List<String> |
pythonIncludes() |
String |
toString() |
c2, childrenResolved, eval$default$1, f1, f2, foldable, i1, i2, n1, n2, references, resolved
apply, argString, asCode, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, makeCopy, map, mapChildren, nodeName, numberedTreeString, otherCopyArgs, simpleString, stringArgs, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public PythonUDF(String name, byte[] command, java.util.Map<String,String> envVars, java.util.List<String> pythonIncludes, String pythonExec, java.util.List<Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars, Accumulator<java.util.List<byte[]>> accumulator, org.apache.spark.sql.catalyst.types.DataType dataType, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children)
public String name()
public byte[] command()
public java.util.Map<String,String> envVars()
public java.util.List<String> pythonIncludes()
public String pythonExec()
public java.util.List<Broadcast<org.apache.spark.api.python.PythonBroadcast>> broadcastVars()
public Accumulator<java.util.List<byte[]>> accumulator()
public org.apache.spark.sql.catalyst.types.DataType dataType()
dataType
in class org.apache.spark.sql.catalyst.expressions.Expression
public scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> children()
children
in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.expressions.Expression>
public String toString()
toString
in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.expressions.Expression>
public boolean nullable()
nullable
in class org.apache.spark.sql.catalyst.expressions.Expression
public scala.runtime.Nothing$ eval(org.apache.spark.sql.catalyst.expressions.Row input)
eval
in class org.apache.spark.sql.catalyst.expressions.Expression