Home
last modified time | relevance | path

Searched refs:parsePlan (Results 1 – 12 of 12) sorted by relevance

/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/command/
H A DDDLCommandSuite.scala39 parser.parsePlan(sql)
46 parser.parsePlan(query) match {
61 val parsed = parser.parsePlan(sql)
88 val parsed1 = parser.parsePlan(sql1)
89 val parsed2 = parser.parsePlan(sql2)
90 val parsed3 = parser.parsePlan(sql3)
351 parser.parsePlan(query) match {
373 parser.parsePlan(query) match {
760 parser.parsePlan(
777 ).map(parser.parsePlan)
[all …]
/dports/devel/spark/spark-2.1.1/sql/hive/src/test/scala/org/apache/spark/sql/hive/
H A DHiveDDLCommandSuite.scala42 parser.parsePlan(sql).collect {
49 parser.parsePlan(sql)
179 parser.parsePlan(
190 parser.parsePlan(
198 parser.parsePlan(
206 parser.parsePlan(
219 parser.parsePlan(sql)
233 val plan = analyzer.execute(parser.parsePlan(
501 parser.parsePlan(v1)
507 val parsed = parser.parsePlan(sql)
[all …]
H A DErrorPositionSuite.scala133 def ast = spark.sessionState.sqlParser.parsePlan(query)
/dports/devel/kdevelop/kdevelop-21.12.3/kdevplatform/language/backgroundparser/
H A Dbackgroundparser.cpp240 const auto& parsePlan = m_documents[url]; in nextDocumentToParse() local
243 if (parsePlan.sequentialProcessingFlags() & ParseJob::RequiresSequentialProcessing in nextDocumentToParse()
244 && parsePlan.priority() > bestRunningPriority) { in nextDocumentToParse()
284 const DocumentParsePlan parsePlan = *parsePlanConstIt; in parseDocumentsInternal() local
293 decorator = createParseJob(url, parsePlan); in parseDocumentsInternal()
341 …ver::QObjectDecorator* createParseJob(const IndexedString& url, const DocumentParsePlan& parsePlan) in createParseJob() argument
346 const auto& notifyWhenReady = parsePlan.notifyWhenReady(); in createParseJob()
358 job->setParsePriority(parsePlan.priority()); in createParseJob()
359 job->setMinimumFeatures(parsePlan.features()); in createParseJob()
361 job->setSequentialProcessingFlags(parsePlan.sequentialProcessingFlags()); in createParseJob()
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/
H A DParserInterface.scala29 def parsePlan(sqlText: String): LogicalPlan method
H A DParseDriver.scala53 override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { parser =>
/dports/devel/spark/spark-2.1.1/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/
H A DErrorParserSuite.scala26 val e = intercept[ParseException](CatalystSqlParser.parsePlan(sql))
H A DPlanParserSuite.scala38 comparePlans(parsePlan(sqlCommand), plan)
42 val e = intercept[ParseException](parsePlan(sqlCommand))
/dports/devel/spark/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/
H A DSparkSqlParserSuite.scala55 val normalized1 = normalizePlan(parser.parsePlan(sqlCommand))
61 val e = intercept[ParseException](parser.parsePlan(sqlCommand))
/dports/devel/spark/spark-2.1.1/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/
H A DTestHive.scala470 this(sparkSession, sparkSession.sessionState.sqlParser.parsePlan(sql))
/dports/devel/spark/spark-2.1.1/sql/hive/src/main/scala/org/apache/spark/sql/hive/
H A DHiveMetastoreCatalog.scala137 sparkSession.sessionState.sqlParser.parsePlan(viewText),
/dports/devel/spark/spark-2.1.1/sql/core/src/main/scala/org/apache/spark/sql/
H A DSparkSession.scala592 Dataset.ofRows(self, sessionState.sqlParser.parsePlan(sqlText))