Home
last modified time | relevance | path

Searched refs:get_tokens_unprocessed (Results 101 – 125 of 336) sorted by relevance

12345678910>>...14

/dports/textproc/py-pygments-25/Pygments-2.5.2/pygments/lexers/
H A Dpython.py543 def get_tokens_unprocessed(self, text): member in PythonConsoleLexer
573 insertions, pylexer.get_tokens_unprocessed(curcode)):
588 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
595 pylexer.get_tokens_unprocessed(curcode)):
598 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
1051 def get_tokens_unprocessed(self, text): member in NumPyLexer
1053 PythonLexer.get_tokens_unprocessed(self, text):
H A Dmatlab.py184 def get_tokens_unprocessed(self, text): member in MatlabSessionLexer
215 insertions, mlexer.get_tokens_unprocessed(curcode)):
224 insertions, mlexer.get_tokens_unprocessed(curcode)):
H A Dhaskell.py489 def get_tokens_unprocessed(self, text): member in CryptolLexer
492 RegexLexer.get_tokens_unprocessed(self, text, stack):
518 def get_tokens_unprocessed(self, text): member in LiterateLexer
554 list(lxlexer.get_tokens_unprocessed(latex))))
559 list(lxlexer.get_tokens_unprocessed(latex))))
560 for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
H A Dmake.py46 def get_tokens_unprocessed(self, text): member in MakefileLexer
60 for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
/dports/www/moinmoin/moin-1.9.11/MoinMoin/support/pygments/lexers/
H A Dpython.py543 def get_tokens_unprocessed(self, text): member in PythonConsoleLexer
573 insertions, pylexer.get_tokens_unprocessed(curcode)):
588 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
595 pylexer.get_tokens_unprocessed(curcode)):
598 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
1051 def get_tokens_unprocessed(self, text): member in NumPyLexer
1053 PythonLexer.get_tokens_unprocessed(self, text):
H A Dmatlab.py184 def get_tokens_unprocessed(self, text): member in MatlabSessionLexer
215 insertions, mlexer.get_tokens_unprocessed(curcode)):
224 insertions, mlexer.get_tokens_unprocessed(curcode)):
H A Dhaskell.py489 def get_tokens_unprocessed(self, text): member in CryptolLexer
492 RegexLexer.get_tokens_unprocessed(self, text, stack):
518 def get_tokens_unprocessed(self, text): member in LiterateLexer
554 list(lxlexer.get_tokens_unprocessed(latex))))
559 list(lxlexer.get_tokens_unprocessed(latex))))
560 for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
/dports/devel/py-pyface/pyface-7.2.0/pyface/ui/qt4/code_editor/
H A Dpygments_highlighter.py20 def get_tokens_unprocessed(self, text, stack=("root",)): function
84 RegexLexer.get_tokens_unprocessed = get_tokens_unprocessed
/dports/net-p2p/bazarr/bazarr-1.0.2/libs/pygments/lexers/
H A Ddata.py432 def get_tokens_unprocessed(self, text=None, context=None): member in YamlLexer
435 return super().get_tokens_unprocessed(text, context)
465 def get_tokens_unprocessed(self, text): member in JsonLexer
692 def get_tokens_unprocessed(self, text): member in JsonLdLexer
693 for start, token, value in super().get_tokens_unprocessed(text):
H A Dpython.py640 def get_tokens_unprocessed(self, text): member in PythonConsoleLexer
670 insertions, pylexer.get_tokens_unprocessed(curcode))
684 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
691 pylexer.get_tokens_unprocessed(curcode))
693 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
1146 def get_tokens_unprocessed(self, text): member in NumPyLexer
1148 PythonLexer.get_tokens_unprocessed(self, text):
H A Dhaskell.py490 def get_tokens_unprocessed(self, text): member in CryptolLexer
493 RegexLexer.get_tokens_unprocessed(self, text, stack):
519 def get_tokens_unprocessed(self, text): member in LiterateLexer
555 list(lxlexer.get_tokens_unprocessed(latex))))
560 list(lxlexer.get_tokens_unprocessed(latex))))
561 yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
H A Dmake.py45 def get_tokens_unprocessed(self, text):
59 yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
/dports/textproc/py-pygments/Pygments-2.7.2/pygments/lexers/
H A Dpython.py634 def get_tokens_unprocessed(self, text): member in PythonConsoleLexer
664 insertions, pylexer.get_tokens_unprocessed(curcode))
678 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
685 pylexer.get_tokens_unprocessed(curcode))
687 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
1140 def get_tokens_unprocessed(self, text): member in NumPyLexer
1142 PythonLexer.get_tokens_unprocessed(self, text):
H A Dmatlab.py193 def get_tokens_unprocessed(self, text): member in MatlabSessionLexer
235 insertions, mlexer.get_tokens_unprocessed(curcode))
250 insertions, mlexer.get_tokens_unprocessed(curcode))
H A Dhaskell.py491 def get_tokens_unprocessed(self, text): member in CryptolLexer
494 RegexLexer.get_tokens_unprocessed(self, text, stack):
520 def get_tokens_unprocessed(self, text): member in LiterateLexer
556 list(lxlexer.get_tokens_unprocessed(latex))))
561 list(lxlexer.get_tokens_unprocessed(latex))))
562 yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
H A Dmake.py46 def get_tokens_unprocessed(self, text): member in MakefileLexer
60 yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
/dports/textproc/py-pygments/stage/usr/local/lib/python3.8/site-packages/pygments/lexers/
H A Dpython.py634 def get_tokens_unprocessed(self, text): member in PythonConsoleLexer
664 insertions, pylexer.get_tokens_unprocessed(curcode))
678 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
685 pylexer.get_tokens_unprocessed(curcode))
687 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
1140 def get_tokens_unprocessed(self, text): member in NumPyLexer
1142 PythonLexer.get_tokens_unprocessed(self, text):
H A Dmatlab.py193 def get_tokens_unprocessed(self, text): member in MatlabSessionLexer
235 insertions, mlexer.get_tokens_unprocessed(curcode))
250 insertions, mlexer.get_tokens_unprocessed(curcode))
H A Dhaskell.py491 def get_tokens_unprocessed(self, text): member in CryptolLexer
494 RegexLexer.get_tokens_unprocessed(self, text, stack):
520 def get_tokens_unprocessed(self, text): member in LiterateLexer
556 list(lxlexer.get_tokens_unprocessed(latex))))
561 list(lxlexer.get_tokens_unprocessed(latex))))
562 yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
/dports/textproc/py-pygments/Pygments-2.7.2/build/lib/pygments/lexers/
H A Dpython.py634 def get_tokens_unprocessed(self, text): member in PythonConsoleLexer
664 insertions, pylexer.get_tokens_unprocessed(curcode))
678 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
685 pylexer.get_tokens_unprocessed(curcode))
687 for i, t, v in tblexer.get_tokens_unprocessed(curtb):
1140 def get_tokens_unprocessed(self, text): member in NumPyLexer
1142 PythonLexer.get_tokens_unprocessed(self, text):
H A Dmatlab.py193 def get_tokens_unprocessed(self, text): member in MatlabSessionLexer
235 insertions, mlexer.get_tokens_unprocessed(curcode))
250 insertions, mlexer.get_tokens_unprocessed(curcode))
H A Dhaskell.py491 def get_tokens_unprocessed(self, text): member in CryptolLexer
494 RegexLexer.get_tokens_unprocessed(self, text, stack):
520 def get_tokens_unprocessed(self, text): member in LiterateLexer
556 list(lxlexer.get_tokens_unprocessed(latex))))
561 list(lxlexer.get_tokens_unprocessed(latex))))
562 yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
H A Dmake.py46 def get_tokens_unprocessed(self, text): member in MakefileLexer
60 yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
/dports/net/google-cloud-sdk/google-cloud-sdk/lib/third_party/pygments/lexers/
H A Dhaskell.py463 def get_tokens_unprocessed(self, text): member in CryptolLexer
466 RegexLexer.get_tokens_unprocessed(self, text, stack):
492 def get_tokens_unprocessed(self, text): member in LiterateLexer
528 list(lxlexer.get_tokens_unprocessed(latex))))
533 list(lxlexer.get_tokens_unprocessed(latex))))
534 for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
H A Dmake.py46 def get_tokens_unprocessed(self, text): member in MakefileLexer
60 for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):

12345678910>>...14