code
stringlengths
3
6.57k
since(1.0)
sql(self, sqlQuery)
sqlContext.registerDataFrameAsTable(df, "table1")
sqlContext.sql("SELECT field1 AS f1, field2 as f2 from table1")
df2.collect()
Row(f1=1, f2=u'row1')
Row(f1=2, f2=u'row2')
Row(f1=3, f2=u'row3')
self.sparkSession.sql(sqlQuery)
since(1.0)
table(self, tableName)
sqlContext.registerDataFrameAsTable(df, "table1")
sqlContext.table("table1")
sorted(df.collect()
sorted(df2.collect()
self.sparkSession.table(tableName)
since(1.3)
tables(self, dbName=None)
sqlContext.registerDataFrameAsTable(df, "table1")
sqlContext.tables()
df2.filter("tableName = 'table1'")
first()
Row(tableName=u'table1', isTemporary=True)
DataFrame(self._ssql_ctx.tables()
DataFrame(self._ssql_ctx.tables(dbName)
since(1.3)
tableNames(self, dbName=None)
sqlContext.registerDataFrameAsTable(df, "table1")
sqlContext.tableNames()
sqlContext.tableNames("default")
self._ssql_ctx.tableNames()
self._ssql_ctx.tableNames(dbName)
since(1.0)
cacheTable(self, tableName)
self._ssql_ctx.cacheTable(tableName)
since(1.0)
uncacheTable(self, tableName)
self._ssql_ctx.uncacheTable(tableName)
since(1.3)
clearCache(self)
self._ssql_ctx.clearCache()
since(1.4)
read(self)
DataFrameReader(self)
since(2.0)
readStream(self)
sqlContext.readStream.text(tempfile.mkdtemp()
DataStreamReader(self)
since(2.0)
streams(self)
StreamingQueryManager(self._ssql_ctx.streams()
HiveContext(SQLContext)
SparkSession.builder.enableHiveSupport()
getOrCreate()
__init__(self, sparkContext, jhiveContext=None)
SparkSession.builder.enableHiveSupport()
getOrCreate()
SparkSession.builder.enableHiveSupport()
getOrCreate()
SparkSession(sparkContext, jhiveContext.sparkSession()
SQLContext.__init__(self, sparkContext, sparkSession, jhiveContext)
_createForTesting(cls, sparkContext)
sparkContext._jsc.sc()
sparkContext._jvm.org.apache.spark.sql.hive.test.TestHiveContext(jsc, False)
cls(sparkContext, jtestHive)
refreshTable(self, tableName)
self._ssql_ctx.refreshTable(tableName)
UDFRegistration(object)
__init__(self, sqlContext)
register(self, name, f, returnType=StringType()
self.sqlContext.registerFunction(name, f, returnType)
_test()
os.chdir(os.environ["SPARK_HOME"])
pyspark.sql.context.__dict__.copy()
SparkContext('local[4]', 'PythonTest')
SQLContext(sc)
Row(field1=1, field2="row1")
Row(field1=2, field2="row2")
Row(field1=3, field2="row3")
rdd.toDF()
sc.parallelize(jsonStrings)
stop()
exit(-1)
_test()
TestSolution(unittest.TestCase)
testcase_001(self)
self.assertEqual(answer(vertices)
testcase_002(self)
self.assertEqual(answer(vertices)
testcase_003(self)
self.assertEqual(answer(vertices)
testcase_004(self)
self.assertEqual(answer(vertices)
testcase_005(self)
self.assertEqual(answer(vertices)
testcase_006(self)
self.assertEqual(answer(vertices)
testcase_007(self)
self.assertEqual(answer(vertices)
testcase_008(self)