From 292e0266f08d624a91f9cf541c1b665e91295677 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Wed, 19 Mar 2025 19:23:39 +0000 Subject: [PATCH 001/435] Bump Ruff to 0.9.10 --- pyproject.toml | 2 +- sphinx/directives/other.py | 2 +- sphinx/domains/c/_parser.py | 2 +- sphinx/domains/citation.py | 2 +- sphinx/domains/cpp/__init__.py | 2 +- sphinx/domains/cpp/_parser.py | 2 +- sphinx/domains/python/__init__.py | 12 ++--- sphinx/domains/rst.py | 2 +- sphinx/environment/adapters/indexentries.py | 2 +- sphinx/environment/collectors/asset.py | 2 +- sphinx/ext/autodoc/__init__.py | 10 ++--- sphinx/ext/autosummary/__init__.py | 2 +- sphinx/ext/autosummary/generate.py | 2 +- sphinx/ext/graphviz.py | 4 +- sphinx/ext/imgmath.py | 6 +-- sphinx/ext/inheritance_diagram.py | 2 +- sphinx/ext/napoleon/docstring.py | 2 +- sphinx/ext/viewcode.py | 6 +-- sphinx/roles.py | 2 +- sphinx/util/docfields.py | 2 +- sphinx/util/docutils.py | 2 +- sphinx/writers/texinfo.py | 2 +- tests/test_builders/test_build_linkcheck.py | 4 +- tests/test_config/test_config.py | 2 +- tests/test_directives/test_directive_code.py | 44 +++++++++---------- tests/test_domains/test_domain_py.py | 2 +- tests/test_extensions/test_ext_coverage.py | 2 +- .../test_ext_intersphinx_cache.py | 6 +-- 28 files changed, 66 insertions(+), 66 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9645f148dd3..59312974f8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.9.9", + "ruff==0.9.10", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py index d9c2b98fd84..8c66ed383b5 100644 --- a/sphinx/directives/other.py +++ b/sphinx/directives/other.py @@ -411,7 +411,7 @@ def _insert_input(include_lines: list[str], source: str) -> None: if self.arguments[0].startswith('<') and self.arguments[0].endswith('>'): # docutils "standard" includes, do not do path processing return super().run() - rel_filename, filename = self.env.relfn2path(self.arguments[0]) + _rel_filename, filename = self.env.relfn2path(self.arguments[0]) self.arguments[0] = str(filename) self.env.note_included(filename) return super().run() diff --git a/sphinx/domains/c/_parser.py b/sphinx/domains/c/_parser.py index 7eb09f6f7b8..bd7ddbe2326 100644 --- a/sphinx/domains/c/_parser.py +++ b/sphinx/domains/c/_parser.py @@ -230,7 +230,7 @@ def _parse_paren_expression_list(self) -> ASTParenExprList | None: # # expression-list # -> initializer-list - exprs, trailing_comma = self._parse_initializer_list( + exprs, _trailing_comma = self._parse_initializer_list( 'parenthesized expression-list', '(', ')' ) if exprs is None: diff --git a/sphinx/domains/citation.py b/sphinx/domains/citation.py index 49b74cca269..348888c2d50 100644 --- a/sphinx/domains/citation.py +++ b/sphinx/domains/citation.py @@ -106,7 +106,7 @@ def resolve_xref( node: pending_xref, contnode: Element, ) -> nodes.reference | None: - docname, labelid, lineno = self.citations.get(target, ('', '', 0)) + docname, labelid, _lineno = self.citations.get(target, ('', '', 0)) if not docname: return None diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py index 75d7732a405..fc72e208791 100644 --- a/sphinx/domains/cpp/__init__.py +++ b/sphinx/domains/cpp/__init__.py @@ -744,7 +744,7 @@ def apply(self, **kwargs: Any) -> None: template_decls = ns.templatePrefix.templates else: template_decls = [] - symbols, fail_reason = parent_symbol.find_name( + symbols, _fail_reason = parent_symbol.find_name( nestedName=name, templateDecls=template_decls, typ='any', diff --git a/sphinx/domains/cpp/_parser.py b/sphinx/domains/cpp/_parser.py index d28c474795d..aa941260da9 100644 --- a/sphinx/domains/cpp/_parser.py +++ b/sphinx/domains/cpp/_parser.py @@ -365,7 +365,7 @@ def _parse_paren_expression_list(self) -> ASTParenExprList: # # expression-list # -> initializer-list - exprs, trailing_comma = self._parse_initializer_list( + exprs, _trailing_comma = self._parse_initializer_list( 'parenthesized expression-list', '(', ')' ) if exprs is None: diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py index 97519ee028e..af923cae70e 100644 --- a/sphinx/domains/python/__init__.py +++ b/sphinx/domains/python/__init__.py @@ -108,7 +108,7 @@ def add_target_and_index( modname = self.options.get('module', self.env.ref_context.get('py:module')) node_id = signode['ids'][0] - name, cls = name_cls + name, _cls = name_cls if modname: text = _('%s() (in module %s)') % (name, modname) self.indexnode['entries'].append(('single', text, node_id, '', None)) @@ -175,7 +175,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] return fullname, prefix def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str: - name, cls = name_cls + name, _cls = name_cls if modname: return _('%s (in module %s)') % (name, modname) else: @@ -268,7 +268,7 @@ def get_signature_prefix(self, sig: str) -> Sequence[nodes.Node]: return prefix def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str: - name, cls = name_cls + name, _cls = name_cls try: clsname, methname = name.rsplit('.', 1) if modname and self.config.add_module_names: @@ -364,7 +364,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] return fullname, prefix def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str: - name, cls = name_cls + name, _cls = name_cls try: clsname, attrname = name.rsplit('.', 1) if modname and self.config.add_module_names: @@ -424,7 +424,7 @@ def get_signature_prefix(self, sig: str) -> Sequence[nodes.Node]: return prefix def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str: - name, cls = name_cls + name, _cls = name_cls try: clsname, attrname = name.rsplit('.', 1) if modname and self.config.add_module_names: @@ -464,7 +464,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] return fullname, prefix def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str: - name, cls = name_cls + name, _cls = name_cls try: clsname, attrname = name.rsplit('.', 1) if modname and self.config.add_module_names: diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py index cd5d8312d4a..55aa3103d8a 100644 --- a/sphinx/domains/rst.py +++ b/sphinx/domains/rst.py @@ -83,7 +83,7 @@ def _toc_entry_name(self, sig_node: desc_signature) -> str: return '' objtype = sig_node.parent.get('objtype') - *parents, name = sig_node['_toc_parts'] + *_parents, name = sig_node['_toc_parts'] if objtype == 'directive:option': return f':{name}:' if self.config.toc_object_entries_show_parents in {'domain', 'all'}: diff --git a/sphinx/environment/adapters/indexentries.py b/sphinx/environment/adapters/indexentries.py index c19628515b6..e9e6e408b6c 100644 --- a/sphinx/environment/adapters/indexentries.py +++ b/sphinx/environment/adapters/indexentries.py @@ -253,7 +253,7 @@ def _key_func_2(entry: tuple[str, _IndexEntryTargets]) -> str: def _group_by_func(entry: tuple[str, _IndexEntry]) -> str: """Group the entries by letter or category key.""" - key, (targets, sub_items, category_key) = entry + key, (_targets, _sub_items, category_key) = entry if category_key is not None: return category_key diff --git a/sphinx/environment/collectors/asset.py b/sphinx/environment/collectors/asset.py index 13105587673..e199fc90124 100644 --- a/sphinx/environment/collectors/asset.py +++ b/sphinx/environment/collectors/asset.py @@ -117,7 +117,7 @@ def collect_candidates( try: mimetype = guess_mimetype(filename) if mimetype is None: - basename, suffix = os.path.splitext(filename) + _basename, suffix = os.path.splitext(filename) mimetype = 'image/x-' + suffix[1:] if mimetype not in candidates: globbed.setdefault(mimetype, []).append(new_imgpath.as_posix()) diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py index 8cdb039df3e..560b6905208 100644 --- a/sphinx/ext/autodoc/__init__.py +++ b/sphinx/ext/autodoc/__init__.py @@ -463,7 +463,7 @@ def parse_name(self) -> bool: type='autodoc', ) return False - explicit_modname, path, base, tp_list, args, retann = matched.groups() + explicit_modname, path, base, _tp_list, args, retann = matched.groups() # support explicit module and class name separation via :: if explicit_modname is not None: @@ -1359,7 +1359,7 @@ def resolve_name( # ... if still falsy, there's no way to know if not mod_cls: return None, [] - modname, sep, cls = mod_cls.rpartition('.') + modname, _sep, cls = mod_cls.rpartition('.') parents = [cls] # if the module name is still missing, get it like above if not modname: @@ -1405,7 +1405,7 @@ def _find_signature(self) -> tuple[str | None, str | None] | None: match = py_ext_sig_re.match(line) if not match: break - exmod, path, base, tp_list, args, retann = match.groups() + _exmod, _path, base, _tp_list, args, retann = match.groups() # the base name must match ours if base not in valid_names: @@ -2295,7 +2295,7 @@ def should_suppress_value_header(self) -> bool: return True else: doc = self.get_doc() or [] - docstring, metadata = separate_metadata( + _docstring, metadata = separate_metadata( '\n'.join(functools.reduce(operator.iadd, doc, [])) ) if 'hide-value' in metadata: @@ -2947,7 +2947,7 @@ def should_suppress_value_header(self) -> bool: else: doc = self.get_doc() if doc: - docstring, metadata = separate_metadata( + _docstring, metadata = separate_metadata( '\n'.join(functools.reduce(operator.iadd, doc, [])) ) if 'hide-value' in metadata: diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py index 97c64a37cd1..733c936d8f0 100644 --- a/sphinx/ext/autosummary/__init__.py +++ b/sphinx/ext/autosummary/__init__.py @@ -814,7 +814,7 @@ def import_ivar_by_name( """ try: name, attr = name.rsplit('.', 1) - real_name, obj, parent, modname = import_by_name(name, prefixes) + real_name, obj, _parent, modname = import_by_name(name, prefixes) # Get ancestors of the object (class.__mro__ includes the class itself as # the first entry) diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py index d865c0de2af..62a106479ea 100644 --- a/sphinx/ext/autosummary/generate.py +++ b/sphinx/ext/autosummary/generate.py @@ -719,7 +719,7 @@ def find_autosummary_in_docstring( See `find_autosummary_in_lines`. """ try: - real_name, obj, parent, modname = import_by_name(name) + _real_name, obj, _parent, _modname = import_by_name(name) lines = pydoc.getdoc(obj).splitlines() return find_autosummary_in_lines(lines, module=name, filename=filename) except AttributeError: diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py index 9cd4d163e36..b973c1f5870 100644 --- a/sphinx/ext/graphviz.py +++ b/sphinx/ext/graphviz.py @@ -431,7 +431,7 @@ def render_dot_latex( filename: str | None = None, ) -> None: try: - fname, outfn = render_dot(self, code, options, 'pdf', prefix, filename) + fname, _outfn = render_dot(self, code, options, 'pdf', prefix, filename) except GraphvizError as exc: logger.warning(__('dot code %r: %s'), code, exc) raise nodes.SkipNode from exc @@ -475,7 +475,7 @@ def render_dot_texinfo( prefix: str = 'graphviz', ) -> None: try: - fname, outfn = render_dot(self, code, options, 'png', prefix) + fname, _outfn = render_dot(self, code, options, 'png', prefix) except GraphvizError as exc: logger.warning(__('dot code %r: %s'), code, exc) raise nodes.SkipNode from exc diff --git a/sphinx/ext/imgmath.py b/sphinx/ext/imgmath.py index b7bcf4a7a67..5b58db7b084 100644 --- a/sphinx/ext/imgmath.py +++ b/sphinx/ext/imgmath.py @@ -200,7 +200,7 @@ def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int | command.append('--depth') command.append(dvipath) - stdout, stderr = convert_dvi_to_image(command, name) + stdout, _stderr = convert_dvi_to_image(command, name) depth = None if builder.config.imgmath_use_preview: @@ -221,7 +221,7 @@ def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int | command.extend(builder.config.imgmath_dvisvgm_args) command.append(dvipath) - stdout, stderr = convert_dvi_to_image(command, name) + _stdout, stderr = convert_dvi_to_image(command, name) depth = None if builder.config.imgmath_use_preview: @@ -370,7 +370,7 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non else: latex = wrap_displaymath(node.astext(), None, False) try: - rendered_path, depth = render_math(self, latex) + rendered_path, _depth = render_math(self, latex) except MathExtError as exc: msg = str(exc) sm = nodes.system_message( diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py index ce05626abe5..83a6d4b7b01 100644 --- a/sphinx/ext/inheritance_diagram.py +++ b/sphinx/ext/inheritance_diagram.py @@ -436,7 +436,7 @@ def run(self) -> list[Node]: # references to real URLs later. These nodes will eventually be # removed from the doctree after we're done with them. for name in graph.get_all_class_names(): - refnodes, x = class_role( # type: ignore[misc] + refnodes, _x = class_role( # type: ignore[misc] 'class', f':class:`{name}`', name, 0, self.state.inliner ) node.extend(refnodes) diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py index d1317e9d841..38325df1d94 100644 --- a/sphinx/ext/napoleon/docstring.py +++ b/sphinx/ext/napoleon/docstring.py @@ -476,7 +476,7 @@ def _consume_field( ) -> tuple[str, str, list[str]]: line = self._lines.next() - before, colon, after = self._partition_field_on_colon(line) + before, _colon, after = self._partition_field_on_colon(line) _name, _type, _desc = before, '', after if parse_type: diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py index 4b1c62ad0d1..39e4cf420b7 100644 --- a/sphinx/ext/viewcode.py +++ b/sphinx/ext/viewcode.py @@ -205,7 +205,7 @@ def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None: if entry is False: continue - code, tags, used, refname = entry + _code, _tags, used, _refname = entry for fullname in list(used): if used[fullname] == docname: used.pop(fullname) @@ -250,7 +250,7 @@ def get_module_filename(app: Sphinx, modname: str) -> _StrPath | None: return None else: try: - filename, source = ModuleAnalyzer.get_module_source(modname) + filename, _source = ModuleAnalyzer.get_module_source(modname) return filename except Exception: return None @@ -323,7 +323,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: max_index = len(lines) - 1 link_text = _('[docs]') for name, docname in used.items(): - type, start, end = tags[name] + _type, start, end = tags[name] backlink = urito(pagename, docname) + '#' + refname + '.' + name lines[start] = ( f'
\n' diff --git a/sphinx/roles.py b/sphinx/roles.py index 98843de5a95..04469f45488 100644 --- a/sphinx/roles.py +++ b/sphinx/roles.py @@ -115,7 +115,7 @@ def create_non_xref_node(self) -> tuple[list[Node], list[system_message]]: text = utils.unescape(self.text[1:]) if self.fix_parens: self.has_explicit_title = False # treat as implicit - text, target = self.update_title_and_target(text, '') + text, _target = self.update_title_and_target(text, '') node = self.innernodeclass(self.rawtext, text, classes=self.classes) return self.result_nodes(self.inliner.document, self.env, node, is_ref=False) diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py index 1c24a73bf2e..dcc24753862 100644 --- a/sphinx/util/docfields.py +++ b/sphinx/util/docfields.py @@ -118,7 +118,7 @@ def make_xref( if location is not None: with contextlib.suppress(ValueError): lineno = get_node_line(location) - ns, messages = role(rolename, target, target, lineno, inliner, {}, []) + ns, _messages = role(rolename, target, target, lineno, inliner, {}, []) return nodes.inline(target, '', *ns) def make_xrefs( diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py index 70d8e69be6f..b53774aa26f 100644 --- a/sphinx/util/docutils.py +++ b/sphinx/util/docutils.py @@ -370,7 +370,7 @@ def write(self, text: str) -> None: if not matched: logger.warning(text.rstrip('\r\n'), type='docutils') else: - location, type, level = matched.groups() + location, type, _level = matched.groups() message = report_re.sub('', text).rstrip() logger.log(type, message, location=location, type='docutils') diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py index b4f1272d49d..eaa7dbdb6e7 100644 --- a/sphinx/writers/texinfo.py +++ b/sphinx/writers/texinfo.py @@ -886,7 +886,7 @@ def depart_collected_footnote(self, node: Element) -> None: def visit_footnote_reference(self, node: Element) -> None: num = node.astext().strip() try: - footnode, used = self.footnotestack[-1][num] + footnode, _used = self.footnotestack[-1][num] except (KeyError, IndexError) as exc: raise nodes.SkipNode from exc # footnotes are repeated for each reference diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py index 32b7ae79ff7..82baa62f3ef 100644 --- a/tests/test_builders/test_build_linkcheck.py +++ b/tests/test_builders/test_build_linkcheck.py @@ -713,7 +713,7 @@ def log_date_time_string(self): def test_follows_redirects_on_HEAD(app, capsys): with serve_application(app, make_redirect_handler(support_head=True)) as address: app.build() - stdout, stderr = capsys.readouterr() + _stdout, stderr = capsys.readouterr() content = (app.outdir / 'output.txt').read_text(encoding='utf8') assert content == ( 'index.rst:1: [redirected with Found] ' @@ -736,7 +736,7 @@ def test_follows_redirects_on_HEAD(app, capsys): def test_follows_redirects_on_GET(app, capsys): with serve_application(app, make_redirect_handler(support_head=False)) as address: app.build() - stdout, stderr = capsys.readouterr() + _stdout, stderr = capsys.readouterr() content = (app.outdir / 'output.txt').read_text(encoding='utf8') assert content == ( 'index.rst:1: [redirected with Found] ' diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index d297af6f2ee..5e68b4a9657 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -68,7 +68,7 @@ def test_config_opt_deprecated(recwarn): opt = _Opt('default', '', ()) with pytest.warns(RemovedInSphinx90Warning): - default, rebuild, valid_types = opt + _default, _rebuild, _valid_types = opt with pytest.warns(RemovedInSphinx90Warning): _ = opt[0] diff --git a/tests/test_directives/test_directive_code.py b/tests/test_directives/test_directive_code.py index 625d15c1f31..16a25dda687 100644 --- a/tests/test_directives/test_directive_code.py +++ b/tests/test_directives/test_directive_code.py @@ -51,7 +51,7 @@ def test_LiteralIncludeReader_lineno_start(literal_inc_path): def test_LiteralIncludeReader_pyobject1(literal_inc_path): options = {'lineno-match': True, 'pyobject': 'Foo'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'class Foo:\n pass\n' assert reader.lineno_start == 5 @@ -59,7 +59,7 @@ def test_LiteralIncludeReader_pyobject1(literal_inc_path): def test_LiteralIncludeReader_pyobject2(literal_inc_path): options = {'pyobject': 'Bar'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'class Bar:\n def baz():\n pass\n' assert reader.lineno_start == 1 # no lineno-match @@ -67,21 +67,21 @@ def test_LiteralIncludeReader_pyobject2(literal_inc_path): def test_LiteralIncludeReader_pyobject3(literal_inc_path): options = {'pyobject': 'Bar.baz'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ' def baz():\n pass\n' def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path): options = {'pyobject': 'Bar', 'lines': '2-'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ' def baz():\n pass\n' def test_LiteralIncludeReader_lines1(literal_inc_path): options = {'lines': '1-3'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ( '# Literally included file using Python highlighting\n' '\n' @@ -92,7 +92,7 @@ def test_LiteralIncludeReader_lines1(literal_inc_path): def test_LiteralIncludeReader_lines2(literal_inc_path): options = {'lines': '1,3,5'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ( '# Literally included file using Python highlighting\n' 'foo = "Including Unicode characters: üöä"\n' @@ -103,7 +103,7 @@ def test_LiteralIncludeReader_lines2(literal_inc_path): def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path): options = {'lines': '3-5', 'lineno-match': True} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'foo = "Including Unicode characters: üöä"\n\nclass Foo:\n' assert reader.lineno_start == 3 @@ -133,7 +133,7 @@ def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app): def test_LiteralIncludeReader_start_at(literal_inc_path): options = {'lineno-match': True, 'start-at': 'Foo', 'end-at': 'Bar'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'class Foo:\n pass\n\nclass Bar:\n' assert reader.lineno_start == 5 @@ -141,7 +141,7 @@ def test_LiteralIncludeReader_start_at(literal_inc_path): def test_LiteralIncludeReader_start_after(literal_inc_path): options = {'lineno-match': True, 'start-after': 'Foo', 'end-before': 'Bar'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ' pass\n\n' assert reader.lineno_start == 6 @@ -154,7 +154,7 @@ def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path): 'end-before': 'comment', } reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == '\nclass Bar:\n def baz():\n pass\n\n' assert reader.lineno_start == 7 @@ -162,7 +162,7 @@ def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path): def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path): options = {'lines': '2, 3, 5', 'start-at': 'foo', 'end-before': '#'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == '\nclass Foo:\n\n' assert reader.lineno_start == 1 @@ -192,14 +192,14 @@ def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path): def test_LiteralIncludeReader_end_before(literal_inc_path): options = {'end-before': 'nclud'} # *nclud* matches first and third lines. reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == '# Literally included file using Python highlighting\n\n' def test_LiteralIncludeReader_prepend(literal_inc_path): options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ( 'Hello\n# Literally included file using Python highlighting\nSphinx\n' ) @@ -209,25 +209,25 @@ def test_LiteralIncludeReader_dedent(literal_inc_path): # dedent: 2 options = {'lines': '9-11', 'dedent': 2} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ' def baz():\n pass\n\n' # dedent: 4 options = {'lines': '9-11', 'dedent': 4} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'def baz():\n pass\n\n' # dedent: 6 options = {'lines': '9-11', 'dedent': 6} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'f baz():\n pass\n\n' # dedent: None options = {'lines': '9-11', 'dedent': None} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'def baz():\n pass\n\n' @@ -240,7 +240,7 @@ def test_LiteralIncludeReader_dedent_and_append_and_prepend(literal_inc_path): 'append': '# comment', } reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'class Foo:\n def baz():\n pass\n\n# comment\n' @@ -248,20 +248,20 @@ def test_LiteralIncludeReader_tabwidth(testroot): # tab-width: 4 options = {'tab-width': 4, 'pyobject': 'Qux'} reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'class Qux:\n def quux(self):\n pass\n' # tab-width: 8 options = {'tab-width': 8, 'pyobject': 'Qux'} reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'class Qux:\n def quux(self):\n pass\n' def test_LiteralIncludeReader_tabwidth_dedent(testroot): options = {'tab-width': 4, 'dedent': 4, 'pyobject': 'Qux.quux'} reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == 'def quux(self):\n pass\n' @@ -269,7 +269,7 @@ def test_LiteralIncludeReader_diff(testroot, literal_inc_path): literal_diff_path = testroot / 'literal-diff.inc' options = {'diff': literal_diff_path} reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG) - content, lines = reader.read() + content, _lines = reader.read() assert content == ( f'--- {literal_diff_path}\n' f'+++ {literal_inc_path}\n' diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py index 151fb4494f7..b3c246297ad 100644 --- a/tests/test_domains/test_domain_py.py +++ b/tests/test_domains/test_domain_py.py @@ -43,7 +43,7 @@ def parse(sig): m = py_sig_re.match(sig) if m is None: raise ValueError - name_prefix, tp_list, name, arglist, retann = m.groups() + _name_prefix, _tp_list, _name, arglist, _retann = m.groups() signode = addnodes.desc_signature(sig, '') _pseudo_parse_arglist(signode, arglist) return signode.astext() diff --git a/tests/test_extensions/test_ext_coverage.py b/tests/test_extensions/test_ext_coverage.py index 3c40d3dfaea..7422cd3560f 100644 --- a/tests/test_extensions/test_ext_coverage.py +++ b/tests/test_extensions/test_ext_coverage.py @@ -38,7 +38,7 @@ def test_build(app: SphinxTestApp) -> None: assert 'api.h' in c_undoc assert ' * Py_SphinxTest' in c_undoc - undoc_py, undoc_c, py_undocumented, py_documented = pickle.loads( + undoc_py, undoc_c, _py_undocumented, _py_documented = pickle.loads( (app.outdir / 'undoc.pickle').read_bytes() ) assert len(undoc_c) == 1 diff --git a/tests/test_extensions/test_ext_intersphinx_cache.py b/tests/test_extensions/test_ext_intersphinx_cache.py index b2e52a5b180..3431f136874 100644 --- a/tests/test_extensions/test_ext_intersphinx_cache.py +++ b/tests/test_extensions/test_ext_intersphinx_cache.py @@ -242,7 +242,7 @@ def test_load_mappings_cache(tmp_path): item = dict((project.normalise(entry),)) inventories = InventoryAdapter(app.env) assert list(inventories.cache) == ['http://localhost:9341/a'] - e_name, e_time, e_inv = inventories.cache['http://localhost:9341/a'] + e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/a'] assert e_name == 'spam' assert e_inv == {'py:module': item} assert inventories.named_inventory == {'spam': {'py:module': item}} @@ -273,7 +273,7 @@ def test_load_mappings_cache_update(tmp_path): inventories = InventoryAdapter(app2.env) # check that the URLs were changed accordingly assert list(inventories.cache) == ['http://localhost:9341/new'] - e_name, e_time, e_inv = inventories.cache['http://localhost:9341/new'] + e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/new'] assert e_name == 'spam' assert e_inv == {'py:module': item} assert inventories.named_inventory == {'spam': {'py:module': item}} @@ -310,7 +310,7 @@ def test_load_mappings_cache_revert_update(tmp_path): inventories = InventoryAdapter(app3.env) # check that the URLs were changed accordingly assert list(inventories.cache) == ['http://localhost:9341/old'] - e_name, e_time, e_inv = inventories.cache['http://localhost:9341/old'] + e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/old'] assert e_name == 'spam' assert e_inv == {'py:module': item} assert inventories.named_inventory == {'spam': {'py:module': item}} From 5c4b29ee324115f2963245c55466a658cfafeefe Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Wed, 19 Mar 2025 19:24:50 +0000 Subject: [PATCH 002/435] Bump Ruff to 0.10.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 59312974f8b..fbee41063fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.9.10", + "ruff==0.10.0", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", From 97d8d6bd24b0824393abaae2a8420e6b28f69aa9 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Wed, 19 Mar 2025 19:27:40 +0000 Subject: [PATCH 003/435] Bump Ruff to 0.11.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fbee41063fe..5d480cf3f76 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.10.0", + "ruff==0.11.0", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", From e01e42f5fc738815b8499c4ede30c6caf130f0a4 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Wed, 19 Mar 2025 20:11:35 +0000 Subject: [PATCH 004/435] Fix ``INVALID_BUILTIN_CLASSES`` test for Python 3.14.0a6+ --- tests/test_util/test_util_typing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_util/test_util_typing.py b/tests/test_util/test_util_typing.py index 35ee240f7b8..8a561c378ed 100644 --- a/tests/test_util/test_util_typing.py +++ b/tests/test_util/test_util_typing.py @@ -205,7 +205,7 @@ def test_is_invalid_builtin_class() -> None: zipfile.Path, zipfile.CompleteDirs, ) - if sys.version_info[:2] >= (3, 13): + if sys.version_info[:2] == (3, 13): invalid_types += ( # pathlib Path, @@ -217,7 +217,7 @@ def test_is_invalid_builtin_class() -> None: ) invalid_names = {(cls.__module__, cls.__qualname__) for cls in invalid_types} - if sys.version_info[:2] < (3, 13): + if sys.version_info[:2] != (3, 13): invalid_names |= { ('pathlib._local', 'Path'), ('pathlib._local', 'PosixPath'), @@ -231,7 +231,7 @@ def test_is_invalid_builtin_class() -> None: ('zipfile._path', 'Path'), ('zipfile._path', 'CompleteDirs'), } - assert _INVALID_BUILTIN_CLASSES.keys() == invalid_names + assert set(_INVALID_BUILTIN_CLASSES) == invalid_names def test_restify_type_hints_containers(): From 05838f0e29eb71ec18dfb9c220a21d15e89e4071 Mon Sep 17 00:00:00 2001 From: Victor Wheeler Date: Wed, 19 Mar 2025 19:05:14 -0600 Subject: [PATCH 005/435] Add an introduction to HTML templating (#13430) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- AUTHORS.rst | 1 + doc/development/html_themes/templating.rst | 26 ++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/AUTHORS.rst b/AUTHORS.rst index ff92ab7eab7..eed59b68cbd 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -107,6 +107,7 @@ Contributors * Thomas Lamb -- linkcheck builder * Thomas Waldmann -- apidoc module fixes * Tim Hoffmann -- theme improvements +* Victor Wheeler -- documentation improvements * Vince Salvino -- JavaScript search improvements * Will Maier -- directory HTML builder * Zac Hatfield-Dodds -- doctest reporting improvements, intersphinx performance diff --git a/doc/development/html_themes/templating.rst b/doc/development/html_themes/templating.rst index e7c1d11f453..77b43882f86 100644 --- a/doc/development/html_themes/templating.rst +++ b/doc/development/html_themes/templating.rst @@ -6,6 +6,32 @@ Templating ========== +What Is Templating? +------------------- + +Templating is a method of generating HTML pages by combining static templates +with variable data. +The template files contain the static parts of the desired HTML output +and include special syntax describing how variable content will be inserted. +For example, this can be used to insert the current date in the footer of each page, +or to surround the main content of the document with a scaffold of HTML for layout +and formatting purposes. +Doing so only requires an understanding of HTML and the templating syntax. +Knowledge of Python can be helpful, but is not required. + +Templating uses an inheritance mechanism which allows child templates files +(e.g. in a theme) to override as much (or as little) of their 'parents' as desired. +Likewise, content authors can use their own local templates to override as much (or +as little) of the theme templates as desired. + +The result is that the Sphinx core, without needing to be changed, provides basic +HTML generation, independent of the structure and appearance of the final output, +while granting a great deal of flexibility to theme and content authors. + + +Sphinx Templating +----------------- + Sphinx uses the `Jinja `_ templating engine for its HTML templates. Jinja is a text-based engine, inspired by Django templates, so anyone having used Django will already be familiar with it. It From 9d5c22e59365f5e354d57a6f8bfc9edf2dcc872a Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:19:35 +0000 Subject: [PATCH 006/435] Improve the error message for themes using the removed 'style' field (#13443) --- sphinx/builders/html/__init__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py index 5e6acdeaf9d..1ba026a61d0 100644 --- a/sphinx/builders/html/__init__.py +++ b/sphinx/builders/html/__init__.py @@ -17,6 +17,7 @@ from urllib.parse import quote import docutils.readers.doctree +import jinja2.exceptions from docutils import nodes from docutils.core import Publisher from docutils.frontend import OptionParser @@ -1221,6 +1222,19 @@ def js_tag(js: _JavaScript | str) -> str: ) return except Exception as exc: + if ( + isinstance(exc, jinja2.exceptions.UndefinedError) + and exc.message == "'style' is undefined" + ): + msg = __( + "The '%s' theme does not support this version of Sphinx, " + "because it uses the 'style' field in HTML templates, " + 'which was was deprecated in Sphinx 5.1 and removed in Sphinx 7.0. ' + "The theme must be updated to use the 'styles' field instead. " + 'See https://www.sphinx-doc.org/en/master/development/html_themes/templating.html#styles' + ) + raise ThemeError(msg % self.config.html_theme) from None + msg = __('An error happened in rendering the page %s.\nReason: %r') % ( pagename, exc, From e6e7f99ca05efce9de421d6b259548b586275493 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:47:49 +0000 Subject: [PATCH 007/435] Bump pyright to 1.1.397 (#13440) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5d480cf3f76..6a6e2027887 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,7 @@ lint = [ "types-Pygments==2.19.0.20250219", "types-requests==2.32.0.20250301", # align with requests "types-urllib3==1.26.25.14", - "pyright==1.1.396", + "pyright==1.1.397", "pytest>=8.0", "pypi-attestations==0.0.21", "betterproto==2.0.0b6", From 35b8bf0cab1eb4bc830a149e3e893262766ae7a0 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 20 Mar 2025 15:48:38 +0000 Subject: [PATCH 008/435] Remove mypy overrides for ``tests.test_addnodes`` (#13420) --- pyproject.toml | 1 - tests/test_addnodes.py | 4 +++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6a6e2027887..4dc02fa7811 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -203,7 +203,6 @@ ignore_missing_imports = true [[tool.mypy.overrides]] module = [ # tests/ - "tests.test_addnodes", "tests.test_application", "tests.test_events", "tests.test_highlighting", diff --git a/tests/test_addnodes.py b/tests/test_addnodes.py index b3f77ad2bb9..8cac53b2828 100644 --- a/tests/test_addnodes.py +++ b/tests/test_addnodes.py @@ -20,7 +20,9 @@ def sig_elements() -> Iterator[set[type[addnodes.desc_sig_element]]]: addnodes.SIG_ELEMENTS = original # restore the previous value -def test_desc_sig_element_nodes(sig_elements): +def test_desc_sig_element_nodes( + sig_elements: set[type[addnodes.desc_sig_element]], +) -> None: """Test the registration of ``desc_sig_element`` subclasses.""" # expected desc_sig_* node classes (must be declared *after* reloading # the module since otherwise the objects are not the correct ones) From 62594f0fb2297cd60cc1feb6bdc9659ad0d60d73 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 20 Mar 2025 15:49:51 +0000 Subject: [PATCH 009/435] Remove mypy overrides for ``tests.test_events`` (#13422) --- pyproject.toml | 1 - tests/test_events.py | 10 ++++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4dc02fa7811..a698e02bdf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -204,7 +204,6 @@ ignore_missing_imports = true module = [ # tests/ "tests.test_application", - "tests.test_events", "tests.test_highlighting", "tests.test_project", "tests.test_versioning", diff --git a/tests/test_events.py b/tests/test_events.py index 56f76511dcb..412116c9f4b 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -3,12 +3,18 @@ from __future__ import annotations from types import SimpleNamespace +from typing import TYPE_CHECKING import pytest from sphinx.errors import ExtensionError from sphinx.events import EventManager +if TYPE_CHECKING: + from typing import NoReturn + + from sphinx.application import Sphinx + def test_event_priority() -> None: result = [] @@ -27,7 +33,7 @@ def test_event_priority() -> None: def test_event_allowed_exceptions() -> None: - def raise_error(app): + def raise_error(app: Sphinx) -> NoReturn: raise RuntimeError app = SimpleNamespace(pdb=False) # pass a dummy object as an app @@ -44,7 +50,7 @@ def raise_error(app): def test_event_pdb() -> None: - def raise_error(app): + def raise_error(app: Sphinx) -> NoReturn: raise RuntimeError app = SimpleNamespace(pdb=True) # pass a dummy object as an app From 21d93b585ac854458c82afcff84e3dd0805e7b68 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:53:45 +0000 Subject: [PATCH 010/435] Bump types-requests to 2.32.0.20250306 (#13416) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a698e02bdf5..442f29b2f4d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ lint = [ "types-docutils==0.21.0.20241128", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250219", - "types-requests==2.32.0.20250301", # align with requests + "types-requests==2.32.0.20250306", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.397", "pytest>=8.0", From 5bd70fc5d2b84f49c21569967f65eb1ff4225ab1 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:58:13 +0000 Subject: [PATCH 011/435] Bump pypi-attestations to 0.0.22 (#13444) --- .github/workflows/create-release.yml | 2 +- pyproject.toml | 2 +- utils/convert_attestations.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 6f3ebf264a8..7d854e764b8 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -44,7 +44,7 @@ jobs: run: | uv pip install build "twine>=5.1" # resolution fails without betterproto - uv pip install pypi-attestations==0.0.21 betterproto==2.0.0b6 + uv pip install pypi-attestations==0.0.22 betterproto==2.0.0b6 - name: Build distribution run: python -m build diff --git a/pyproject.toml b/pyproject.toml index 442f29b2f4d..13e3e186ef9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,7 +104,7 @@ lint = [ "types-urllib3==1.26.25.14", "pyright==1.1.397", "pytest>=8.0", - "pypi-attestations==0.0.21", + "pypi-attestations==0.0.22", "betterproto==2.0.0b6", ] test = [ diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py index 7e227c21358..0d013bf97ce 100644 --- a/utils/convert_attestations.py +++ b/utils/convert_attestations.py @@ -7,7 +7,7 @@ # /// script # requires-python = ">=3.11" # dependencies = [ -# "pypi-attestations==0.0.21", +# "pypi-attestations==0.0.22", # "betterproto==2.0.0b6", # ] # /// From 9191c34ebf67f44245788a068079ed734c4ae8b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 15:59:49 +0000 Subject: [PATCH 012/435] Bump types-pygments to 2.19.0.20250305 (#13412) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 13e3e186ef9..d63af5241b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,7 @@ lint = [ "types-defusedxml==0.7.0.20240218", "types-docutils==0.21.0.20241128", "types-Pillow==10.2.0.20240822", - "types-Pygments==2.19.0.20250219", + "types-Pygments==2.19.0.20250305", "types-requests==2.32.0.20250306", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.397", From e979b1faaf9b878eba29d9f2e1bef8c57aa97034 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Thu, 20 Mar 2025 18:28:11 +0000 Subject: [PATCH 013/435] Use the PEP 735 ``[dependency-groups]`` table (#13073) --- .github/workflows/builddoc.yml | 2 +- .github/workflows/create-release.yml | 5 +--- .github/workflows/lint.yml | 8 ++--- .github/workflows/main.yml | 14 ++++----- .github/workflows/transifex.yml | 4 +-- pyproject.toml | 45 ++++++++++++++++++++++++++++ tox.ini | 16 +++++----- 7 files changed, 68 insertions(+), 26 deletions(-) diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml index 7f8471deecb..8955cf2988a 100644 --- a/.github/workflows/builddoc.yml +++ b/.github/workflows/builddoc.yml @@ -36,7 +36,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[docs] + run: uv pip install . --group docs - name: Render the documentation run: > sphinx-build diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 7d854e764b8..5d07fcbd18d 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -41,10 +41,7 @@ jobs: enable-cache: false - name: Install build dependencies (pypa/build, twine) - run: | - uv pip install build "twine>=5.1" - # resolution fails without betterproto - uv pip install pypi-attestations==0.0.22 betterproto==2.0.0b6 + run: uv pip install --group package - name: Build distribution run: python -m build diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index a3b5cf7ae52..7e72b6f6fd0 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -55,7 +55,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install ".[lint,test]" + run: uv pip install -r pyproject.toml --group package --group test --group types - name: Type check with mypy run: mypy @@ -76,7 +76,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install ".[lint,test]" + run: uv pip install -r pyproject.toml --group package --group test --group types - name: Type check with pyright run: pyright @@ -97,7 +97,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install --upgrade sphinx-lint + run: uv pip install --group lint - name: Lint documentation with sphinx-lint run: make doclinter @@ -118,7 +118,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install --upgrade twine build + run: uv pip install --group package - name: Lint with twine run: | python -m build . diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1758254c633..7e7bdb6dab2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -67,7 +67,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[test] + run: uv pip install . --group test - name: Install Docutils ${{ matrix.docutils }} run: uv pip install --upgrade "docutils~=${{ matrix.docutils }}.0" - name: Test with pytest @@ -198,7 +198,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[test] + run: uv pip install . --group test - name: Test with pytest run: python -m pytest -vv --durations 25 env: @@ -227,7 +227,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[test] + run: uv pip install . --group test - name: Test with pytest run: python -m pytest -vv --durations 25 env: @@ -262,7 +262,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[test] + run: uv pip install . --group test - name: Install Docutils' HEAD run: uv pip install "docutils @ git+https://repo.or.cz/docutils.git#subdirectory=docutils" - name: Test with pytest @@ -296,7 +296,7 @@ jobs: enable-cache: false - name: Install dependencies run: | - uv pip install .[test] --resolution lowest-direct + uv pip install . --group test --resolution lowest-direct uv pip install alabaster==1.0.0 - name: Test with pytest run: python -m pytest -n logical --dist=worksteal -vv --durations 25 @@ -326,7 +326,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[test] + run: uv pip install . --group test - name: Test with pytest run: python -m pytest -vv --durations 25 env: @@ -357,7 +357,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install .[test] pytest-cov + run: uv pip install . --group test pytest-cov - name: Test with pytest run: python -m pytest -vv --cov . --cov-append --cov-config pyproject.toml env: diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml index 09437cb7ece..56246266515 100644 --- a/.github/workflows/transifex.yml +++ b/.github/workflows/transifex.yml @@ -41,7 +41,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install --upgrade babel jinja2 + run: uv pip install --group translations - name: Extract translations from source code run: python utils/babel_runner.py extract - name: Push translations to transifex.com @@ -77,7 +77,7 @@ jobs: version: latest enable-cache: false - name: Install dependencies - run: uv pip install --upgrade babel jinja2 + run: uv pip install --group translations - name: Extract translations from source code run: python utils/babel_runner.py extract - name: Pull translations from transifex.com diff --git a/pyproject.toml b/pyproject.toml index d63af5241b4..dd50b0aa6ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -130,6 +130,48 @@ sphinx-quickstart = "sphinx.cmd.quickstart:main" sphinx-apidoc = "sphinx.ext.apidoc:main" sphinx-autogen = "sphinx.ext.autosummary.generate:main" +[dependency-groups] +docs = [ + "sphinxcontrib-websupport", +] +lint = [ + "ruff==0.11.0", + "sphinx-lint>=0.9", +] +package = [ + "betterproto==2.0.0b6", # resolution fails without betterproto + "build", + "pypi-attestations==0.0.22", + "twine>=5.1", +] +test = [ + "pytest>=8.0", + "pytest-xdist[psutil]>=3.4", + "cython>=3.0", # for Cython compilation + "defusedxml>=0.7.1", # for secure XML/HTML parsing + "setuptools>=70.0", # for Cython compilation + "typing_extensions>=4.9", # for typing_extensions.Unpack +] +translations = [ + "babel>=2.13", + "Jinja2>=3.1", +] +types = [ + "mypy==1.15.0", + "pyright==1.1.397", + { include-group = "type-stubs" }, +] +type-stubs = [ + # align with versions used elsewhere + "types-colorama==0.4.15.20240311", + "types-defusedxml==0.7.0.20240218", + "types-docutils==0.21.0.20241128", + "types-Pillow==10.2.0.20240822", + "types-Pygments==2.19.0.20250305", + "types-requests==2.32.0.20250306", + "types-urllib3==1.26.25.14", +] + [tool.flit.module] name = "sphinx" @@ -420,3 +462,6 @@ reportUnusedFunction = "none" reportUnusedImport = "none" reportUnusedVariable = "none" reportWildcardImportFromLibrary = "none" + +[tool.uv] +default-groups = "all" diff --git a/tox.ini b/tox.ini index 674013fdc08..23b239c7ffc 100644 --- a/tox.ini +++ b/tox.ini @@ -20,7 +20,7 @@ passenv = READTHEDOCS description = py{311,312,313,314}: Run unit tests against {envname}. -extras = +dependency_groups = test setenv = PYTHONWARNINGS = error @@ -31,8 +31,9 @@ commands= [testenv:lint] description = Run linters. -extras = +dependency_groups = lint + types # If you update any of these commands, don't forget to update the equivalent # GitHub Workflow step commands = @@ -43,7 +44,7 @@ commands = [testenv:docs] description = Build documentation. -extras = +dependency_groups = docs commands = python -c "import shutil; shutil.rmtree('./build/sphinx', ignore_errors=True) if '{env:CLEAN:}' else None" @@ -52,7 +53,7 @@ commands = [testenv:docs-live] description = Build documentation. -extras = +dependency_groups = docs deps = sphinx-autobuild @@ -70,7 +71,7 @@ commands = [testenv:ruff] description = Run ruff formatting and linting. -extras = +dependency_groups = lint commands = ruff format . @@ -79,8 +80,7 @@ commands = [testenv:mypy] description = Run mypy type checking. -extras = - lint - test +dependency_groups = + types commands = mypy {posargs} From fc8054cc30042eff2461a26f5566a23e7cd41221 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Thu, 20 Mar 2025 20:49:07 +0200 Subject: [PATCH 014/435] Fall back to 'en' if `format_date` is called with a falsy value (#13408) --- sphinx/util/i18n.py | 8 ++++++++ tests/test_util/test_util_i18n.py | 7 ------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py index 6cc4b31766e..05542876fc3 100644 --- a/sphinx/util/i18n.py +++ b/sphinx/util/i18n.py @@ -228,6 +228,14 @@ def babel_format_date( if not hasattr(date, 'tzinfo'): formatter = babel.dates.format_date + if not locale: + # Babel would not accept a falsy locale + # (or would try to fall back to the LC_TIME + # locale, which would be not what was requested), + # so we can just short-cut to English, as we + # would for the `"fallback to English"` case. + locale = 'en' + try: return formatter(date, format, locale=locale) except (ValueError, babel.core.UnknownLocaleError): diff --git a/tests/test_util/test_util_i18n.py b/tests/test_util/test_util_i18n.py index 4326b4382dd..31e774b7932 100644 --- a/tests/test_util/test_util_i18n.py +++ b/tests/test_util/test_util_i18n.py @@ -4,12 +4,10 @@ import datetime import os -import sys import time from pathlib import Path from typing import TYPE_CHECKING -import babel import pytest from babel.messages.mofile import read_mo @@ -60,11 +58,6 @@ def test_catalog_write_mo(tmp_path): assert read_mo(f) is not None -# https://github.com/python-babel/babel/issues/1183 -@pytest.mark.xfail( - sys.platform == 'win32' and babel.__version__ == '2.17.0', - reason='Windows tests fail with Babel 2.17', -) def test_format_date(): date = datetime.date(2016, 2, 7) From 7d4528b17a60f12dd13857913541b9386a4066ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 16:18:35 +0000 Subject: [PATCH 015/435] Bump Ruff to 0.11.1 (#13447) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dd50b0aa6ef..9829fb8bc8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.0", + "ruff==0.11.1", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.0", + "ruff==0.11.1", "sphinx-lint>=0.9", ] package = [ From d066c2be731df5f1ffed5d657c696b57f39a4f39 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Fri, 21 Mar 2025 16:18:52 +0000 Subject: [PATCH 016/435] Remove mypy overrides for ``tests.test_application`` (#13421) --- pyproject.toml | 1 - tests/test_application.py | 5 +++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9829fb8bc8c..6151d3b45c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -245,7 +245,6 @@ ignore_missing_imports = true [[tool.mypy.overrides]] module = [ # tests/ - "tests.test_application", "tests.test_highlighting", "tests.test_project", "tests.test_versioning", diff --git a/tests/test_application.py b/tests/test_application.py index b2bd7bbc66c..73c3f3556ca 100644 --- a/tests/test_application.py +++ b/tests/test_application.py @@ -20,6 +20,7 @@ if TYPE_CHECKING: import os + from typing import Any def test_instantiation( @@ -50,7 +51,7 @@ def test_instantiation( @pytest.mark.sphinx('html', testroot='root') def test_events(app: SphinxTestApp) -> None: - def empty(): + def empty() -> None: pass with pytest.raises(ExtensionError) as excinfo: @@ -62,7 +63,7 @@ def empty(): app.add_event('my_event') assert "Event 'my_event' already present" in str(excinfo.value) - def mock_callback(a_app, *args): + def mock_callback(a_app: SphinxTestApp, *args: Any) -> str: assert a_app is app assert emit_args == args return 'ret' From 10beeeb10a809a52885fd016f47aa6f3b51ad2f8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Mar 2025 19:24:31 +0000 Subject: [PATCH 017/435] Bump Ruff to 0.11.2 (#13450) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6151d3b45c6..c9d902d6ed6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.1", + "ruff==0.11.2", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.1", + "ruff==0.11.2", "sphinx-lint>=0.9", ] package = [ From 8049b4bd2d98d288750bb1b552496b14b7434980 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 24 Mar 2025 19:24:58 +0000 Subject: [PATCH 018/435] Allow ignoring type stub files for native modules (#13446) --- sphinx/ext/autodoc/importer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py index ea5b47e41e6..ca9c7ca7778 100644 --- a/sphinx/ext/autodoc/importer.py +++ b/sphinx/ext/autodoc/importer.py @@ -164,6 +164,7 @@ def import_module(modname: str, try_reload: bool = False) -> Any: if modname in sys.modules: return sys.modules[modname] + skip_pyi = bool(os.getenv('SPHINX_AUTODOC_IGNORE_NATIVE_MODULE_TYPE_STUBS', '')) original_module_names = frozenset(sys.modules) try: spec = find_spec(modname) @@ -171,7 +172,7 @@ def import_module(modname: str, try_reload: bool = False) -> Any: msg = f'No module named {modname!r}' raise ModuleNotFoundError(msg, name=modname) # NoQA: TRY301 spec, pyi_path = _find_type_stub_spec(spec, modname) - if pyi_path is None: + if skip_pyi or pyi_path is None: module = importlib.import_module(modname) else: if spec.loader is None: From c0c041349302a783d160361d5bc0449be70f406a Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Mon, 24 Mar 2025 19:28:03 +0000 Subject: [PATCH 019/435] Remove mypy overrides for ``tests.test_util.test_util_matching`` (#13449) --- pyproject.toml | 1 - tests/test_util/test_util_matching.py | 23 ++++++++++++++--------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c9d902d6ed6..d841d10747d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -292,7 +292,6 @@ module = [ "tests.test_util.test_util_docutils", "tests.test_util.test_util_images", "tests.test_util.test_util_inventory", - "tests.test_util.test_util_matching", # tests/test_writers "tests.test_writers.test_docutilsconf", ] diff --git a/tests/test_util/test_util_matching.py b/tests/test_util/test_util_matching.py index 0c17280aa6e..a2ad1ff5ba6 100644 --- a/tests/test_util/test_util_matching.py +++ b/tests/test_util/test_util_matching.py @@ -2,8 +2,13 @@ from __future__ import annotations +from typing import TYPE_CHECKING + from sphinx.util.matching import Matcher, compile_matchers, get_matching_files +if TYPE_CHECKING: + from pathlib import Path + def test_compile_matchers() -> None: # exact matching @@ -85,7 +90,7 @@ def test_Matcher() -> None: assert matcher('subdir/world.py') -def test_get_matching_files_all(rootdir): +def test_get_matching_files_all(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root') assert sorted(files) == [ 'Makefile', @@ -133,7 +138,7 @@ def test_get_matching_files_all(rootdir): ] -def test_get_matching_files_all_exclude_single(rootdir): +def test_get_matching_files_all_exclude_single(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root', exclude_patterns=['**.html']) assert sorted(files) == [ 'Makefile', @@ -178,7 +183,7 @@ def test_get_matching_files_all_exclude_single(rootdir): ] -def test_get_matching_files_all_exclude_multiple(rootdir): +def test_get_matching_files_all_exclude_multiple(rootdir: Path) -> None: files = get_matching_files( rootdir / 'test-root', exclude_patterns=['**.html', '**.inc'] ) @@ -218,7 +223,7 @@ def test_get_matching_files_all_exclude_multiple(rootdir): ] -def test_get_matching_files_all_exclude_nonexistent(rootdir): +def test_get_matching_files_all_exclude_nonexistent(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root', exclude_patterns=['halibut/**']) assert sorted(files) == [ 'Makefile', @@ -266,7 +271,7 @@ def test_get_matching_files_all_exclude_nonexistent(rootdir): ] -def test_get_matching_files_all_include_single(rootdir): +def test_get_matching_files_all_include_single(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root', include_patterns=['subdir/**']) assert sorted(files) == [ 'subdir/excluded.txt', @@ -278,7 +283,7 @@ def test_get_matching_files_all_include_single(rootdir): ] -def test_get_matching_files_all_include_multiple(rootdir): +def test_get_matching_files_all_include_multiple(rootdir: Path) -> None: files = get_matching_files( rootdir / 'test-root', include_patterns=['special/**', 'subdir/**'] ) @@ -294,12 +299,12 @@ def test_get_matching_files_all_include_multiple(rootdir): ] -def test_get_matching_files_all_include_nonexistent(rootdir): +def test_get_matching_files_all_include_nonexistent(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root', include_patterns=['halibut/**']) assert sorted(files) == [] -def test_get_matching_files_all_include_prefix(rootdir): +def test_get_matching_files_all_include_prefix(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root', include_patterns=['autodoc*']) assert sorted(files) == [ 'autodoc.txt', @@ -307,7 +312,7 @@ def test_get_matching_files_all_include_prefix(rootdir): ] -def test_get_matching_files_all_include_question_mark(rootdir): +def test_get_matching_files_all_include_question_mark(rootdir: Path) -> None: files = get_matching_files(rootdir / 'test-root', include_patterns=['img.???']) assert sorted(files) == [ 'img.gif', From d9b20d055308f12c9c3db2df2272000d1ac0e795 Mon Sep 17 00:00:00 2001 From: Alicia Garcia-Raboso Date: Mon, 24 Mar 2025 20:32:35 +0100 Subject: [PATCH 020/435] Correctly parse and cross-reference unpacked type annotations (#13369) --- CHANGES.rst | 3 +++ sphinx/domains/python/_annotations.py | 4 ++++ sphinx/pycode/ast.py | 3 +++ tests/test_domains/test_domain_py.py | 22 ++++++++++++++++++++++ tests/test_pycode/test_pycode_ast.py | 1 + 5 files changed, 33 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index c257b3b11b1..b37ce847c49 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -16,5 +16,8 @@ Features added Bugs fixed ---------- +* #13369: Correctly parse and cross-reference unpacked type annotations. + Patch by Alicia Garcia-Raboso. + Testing ------- diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py index 823aac01316..29e47fa7151 100644 --- a/sphinx/domains/python/_annotations.py +++ b/sphinx/domains/python/_annotations.py @@ -124,6 +124,10 @@ def unparse(node: ast.AST) -> list[Node]: return [nodes.Text(repr(node.value))] if isinstance(node, ast.Expr): return unparse(node.value) + if isinstance(node, ast.Starred): + result = [addnodes.desc_sig_operator('', '*')] + result.extend(unparse(node.value)) + return result if isinstance(node, ast.Invert): return [addnodes.desc_sig_punctuation('', '~')] if isinstance(node, ast.USub): diff --git a/sphinx/pycode/ast.py b/sphinx/pycode/ast.py index b1521595b49..640864e467a 100644 --- a/sphinx/pycode/ast.py +++ b/sphinx/pycode/ast.py @@ -202,5 +202,8 @@ def visit_Tuple(self, node: ast.Tuple) -> str: else: return '(' + ', '.join(self.visit(e) for e in node.elts) + ')' + def visit_Starred(self, node: ast.Starred) -> str: + return f'*{self.visit(node.value)}' + def generic_visit(self, node: ast.AST) -> NoReturn: raise NotImplementedError('Unable to parse %s object' % type(node).__name__) diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py index b3c246297ad..26c79ffd8fb 100644 --- a/tests/test_domains/test_domain_py.py +++ b/tests/test_domains/test_domain_py.py @@ -508,6 +508,28 @@ def test_parse_annotation(app): ), ) + doctree = _parse_annotation('*tuple[str, int]', app.env) + assert_node( + doctree, + ( + [desc_sig_operator, '*'], + [pending_xref, 'tuple'], + [desc_sig_punctuation, '['], + [pending_xref, 'str'], + [desc_sig_punctuation, ','], + desc_sig_space, + [pending_xref, 'int'], + [desc_sig_punctuation, ']'], + ), + ) + assert_node( + doctree[1], + pending_xref, + refdomain='py', + reftype='class', + reftarget='tuple', + ) + @pytest.mark.sphinx('html', testroot='_blank') def test_parse_annotation_suppress(app): diff --git a/tests/test_pycode/test_pycode_ast.py b/tests/test_pycode/test_pycode_ast.py index 6ebc1a91099..409e5806d1b 100644 --- a/tests/test_pycode/test_pycode_ast.py +++ b/tests/test_pycode/test_pycode_ast.py @@ -62,6 +62,7 @@ 'x[:, np.newaxis, :, :]'), # Index, Subscript, numpy extended syntax ('y[:, 1:3][np.array([0, 2, 4]), :]', 'y[:, 1:3][np.array([0, 2, 4]), :]'), # Index, 2x Subscript, numpy extended syntax + ('*tuple[str, int]', '*tuple[str, int]'), # Starred ], ) # fmt: skip def test_unparse(source, expected): From 5831b3eeafe5e78281d394359545da378859561d Mon Sep 17 00:00:00 2001 From: Till Hoffmann Date: Mon, 24 Mar 2025 15:44:16 -0400 Subject: [PATCH 021/435] Add ``doctest_fail_fast`` option to exit after the first failed test (#13332) Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> --- AUTHORS.rst | 1 + CHANGES.rst | 4 ++ doc/usage/extensions/doctest.rst | 8 ++++ sphinx/ext/doctest.py | 45 ++++++++++++++----- .../roots/test-ext-doctest-fail-fast/conf.py | 11 +++++ .../test-ext-doctest-fail-fast/fail-fast.txt | 11 +++++ tests/test_extensions/test_ext_doctest.py | 20 +++++++++ 7 files changed, 88 insertions(+), 12 deletions(-) create mode 100644 tests/roots/test-ext-doctest-fail-fast/conf.py create mode 100644 tests/roots/test-ext-doctest-fail-fast/fail-fast.txt diff --git a/AUTHORS.rst b/AUTHORS.rst index eed59b68cbd..5ff09219c02 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -106,6 +106,7 @@ Contributors * Taku Shimizu -- epub3 builder * Thomas Lamb -- linkcheck builder * Thomas Waldmann -- apidoc module fixes +* Till Hoffmann -- doctest option to exit after first failed test * Tim Hoffmann -- theme improvements * Victor Wheeler -- documentation improvements * Vince Salvino -- JavaScript search improvements diff --git a/CHANGES.rst b/CHANGES.rst index b37ce847c49..82396f2a33e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -13,6 +13,10 @@ Deprecated Features added -------------- +* #13332: Add :confval:`doctest_fail_fast` option to exit after the first failed + test. + Patch by Till Hoffmann. + Bugs fixed ---------- diff --git a/doc/usage/extensions/doctest.rst b/doc/usage/extensions/doctest.rst index 60c67827967..10e8f67dfe2 100644 --- a/doc/usage/extensions/doctest.rst +++ b/doc/usage/extensions/doctest.rst @@ -452,3 +452,11 @@ The doctest extension uses the following configuration values: Also, removal of ```` and ``# doctest:`` options only works in :rst:dir:`doctest` blocks, though you may set :confval:`trim_doctest_flags` to achieve that in all code blocks with Python console content. + +.. confval:: doctest_fail_fast + :type: :code-py:`bool` + :default: :code-py:`False` + + Exit when the first failure is encountered. + + .. versionadded:: 8.3 diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py index 105c50a6923..343534f10ce 100644 --- a/sphinx/ext/doctest.py +++ b/sphinx/ext/doctest.py @@ -358,10 +358,17 @@ def finish(self) -> None: def s(v: int) -> str: return 's' if v != 1 else '' + header = 'Doctest summary' + if self.total_failures or self.setup_failures or self.cleanup_failures: + self.app.statuscode = 1 + if self.config.doctest_fail_fast: + header = f'{header} (exiting after first failed test)' + underline = '=' * len(header) + self._out( f""" -Doctest summary -=============== +{header} +{underline} {self.total_tries:5} test{s(self.total_tries)} {self.total_failures:5} failure{s(self.total_failures)} in tests {self.setup_failures:5} failure{s(self.setup_failures)} in setup code @@ -370,15 +377,14 @@ def s(v: int) -> str: ) self.outfile.close() - if self.total_failures or self.setup_failures or self.cleanup_failures: - self.app.statuscode = 1 - def write_documents(self, docnames: Set[str]) -> None: logger.info(bold('running tests...')) for docname in sorted(docnames): # no need to resolve the doctree doctree = self.env.get_doctree(docname) - self.test_doc(docname, doctree) + success = self.test_doc(docname, doctree) + if not success and self.config.doctest_fail_fast: + break def get_filename_for_node(self, node: Node, docname: str) -> str: """Try to get the file which actually contains the doctest, not the @@ -419,7 +425,7 @@ def skipped(self, node: Element) -> bool: exec(self.config.doctest_global_cleanup, context) # NoQA: S102 return should_skip - def test_doc(self, docname: str, doctree: Node) -> None: + def test_doc(self, docname: str, doctree: Node) -> bool: groups: dict[str, TestGroup] = {} add_to_all_groups = [] self.setup_runner = SphinxDocTestRunner(verbose=False, optionflags=self.opt) @@ -496,13 +502,17 @@ def condition(node: Node) -> bool: for group in groups.values(): group.add_code(code) if not groups: - return + return True show_successes = self.config.doctest_show_successes if show_successes: self._out(f'\nDocument: {docname}\n----------{"-" * len(docname)}\n') + success = True for group in groups.values(): - self.test_group(group) + if not self.test_group(group): + success = False + if self.config.doctest_fail_fast: + break # Separately count results from setup code res_f, res_t = self.setup_runner.summarize(self._out, verbose=False) self.setup_failures += res_f @@ -517,13 +527,14 @@ def condition(node: Node) -> bool: ) self.cleanup_failures += res_f self.cleanup_tries += res_t + return success def compile( self, code: str, name: str, type: str, flags: Any, dont_inherit: bool ) -> Any: return compile(code, name, self.type, flags, dont_inherit) - def test_group(self, group: TestGroup) -> None: + def test_group(self, group: TestGroup) -> bool: ns: dict[str, Any] = {} def run_setup_cleanup( @@ -553,9 +564,10 @@ def run_setup_cleanup( # run the setup code if not run_setup_cleanup(self.setup_runner, group.setup, 'setup'): # if setup failed, don't run the group - return + return False # run the tests + success = True for code in group.tests: if len(code) == 1: # ordinary doctests (code/output interleaved) @@ -608,11 +620,19 @@ def run_setup_cleanup( self.type = 'exec' # multiple statements again # DocTest.__init__ copies the globs namespace, which we don't want test.globs = ns + old_f = self.test_runner.failures # also don't clear the globs namespace after running the doctest self.test_runner.run(test, out=self._warn_out, clear_globs=False) + if self.test_runner.failures > old_f: + success = False + if self.config.doctest_fail_fast: + break # run the cleanup - run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup') + if not run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup'): + return False + + return success def setup(app: Sphinx) -> ExtensionMetadata: @@ -638,6 +658,7 @@ def setup(app: Sphinx) -> ExtensionMetadata: '', types=frozenset({int}), ) + app.add_config_value('doctest_fail_fast', False, '', types=frozenset({bool})) return { 'version': sphinx.__display_version__, 'parallel_read_safe': True, diff --git a/tests/roots/test-ext-doctest-fail-fast/conf.py b/tests/roots/test-ext-doctest-fail-fast/conf.py new file mode 100644 index 00000000000..227afbb2c95 --- /dev/null +++ b/tests/roots/test-ext-doctest-fail-fast/conf.py @@ -0,0 +1,11 @@ +extensions = ['sphinx.ext.doctest'] + +project = 'test project for doctest' +root_doc = 'fail-fast' +source_suffix = { + '.txt': 'restructuredtext', +} +exclude_patterns = ['_build'] + +# Set in tests. +# doctest_fail_fast = ... diff --git a/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt b/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt new file mode 100644 index 00000000000..70a05af487b --- /dev/null +++ b/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt @@ -0,0 +1,11 @@ +Testing fast failure in the doctest extension +============================================= + +>>> 1 + 1 +2 + +>>> 1 + 1 +3 + +>>> 1 + 1 +3 diff --git a/tests/test_extensions/test_ext_doctest.py b/tests/test_extensions/test_ext_doctest.py index cb540fda7ec..810f8244ba8 100644 --- a/tests/test_extensions/test_ext_doctest.py +++ b/tests/test_extensions/test_ext_doctest.py @@ -147,3 +147,23 @@ def test_reporting_with_autodoc(app, capfd): assert 'File "dir/bar.py", line ?, in default' in failures assert 'File "foo.py", line ?, in default' in failures assert 'File "index.rst", line 4, in default' in failures + + +@pytest.mark.sphinx('doctest', testroot='ext-doctest-fail-fast') +@pytest.mark.parametrize('fail_fast', [False, True, None]) +def test_fail_fast(app, fail_fast, capsys): + if fail_fast is not None: + app.config.doctest_fail_fast = fail_fast + # Patch builder to get a copy of the output + written = [] + app.builder._out = written.append + app.build(force_all=True) + assert app.statuscode + + written = ''.join(written) + if fail_fast: + assert 'Doctest summary (exiting after first failed test)' in written + assert '1 failure in tests' in written + else: + assert 'Doctest summary\n' in written + assert '2 failures in tests' in written From 41e0cf0aeb45f07ee1d7abacf299d4bb94f5f226 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Tue, 25 Mar 2025 21:15:00 +0000 Subject: [PATCH 022/435] Upgrade to flit-core 3.12 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d841d10747d..364a51dc186 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["flit_core>=3.11"] +requires = ["flit_core>=3.12"] build-backend = "flit_core.buildapi" # project metadata From 466ccd076d5174988510053a95ebc48479820449 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Tue, 25 Mar 2025 21:17:04 +0000 Subject: [PATCH 023/435] Revert flit-core 3.11 temporary fix --- sphinx/__init__.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/sphinx/__init__.py b/sphinx/__init__.py index b70b6db47a6..ab02ddc547b 100644 --- a/sphinx/__init__.py +++ b/sphinx/__init__.py @@ -7,11 +7,7 @@ import warnings -# work around flit error in parsing annotated assignments -try: - from sphinx.util._pathlib import _StrPath -except ImportError: - from pathlib import Path as _StrPath # type: ignore[assignment] +from sphinx.util._pathlib import _StrPath TYPE_CHECKING = False if TYPE_CHECKING: From 628442e88248d5cec1998128b3cfc25186681854 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Tue, 25 Mar 2025 21:26:28 +0000 Subject: [PATCH 024/435] Group the version variables together --- sphinx/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sphinx/__init__.py b/sphinx/__init__.py index ab02ddc547b..6211f1abfb3 100644 --- a/sphinx/__init__.py +++ b/sphinx/__init__.py @@ -13,9 +13,6 @@ if TYPE_CHECKING: from typing import Final -__version__: Final = '8.3.0' -__display_version__: Final = __version__ # used for command line version - warnings.filterwarnings( 'ignore', 'The frontend.Option class .*', @@ -23,6 +20,9 @@ module='docutils.frontend', ) +__version__: Final = '8.3.0' +__display_version__: Final = __version__ # used for command line version + #: Version info for better programmatic use. #: #: A tuple of five elements; for Sphinx version 1.2.1 beta 3 this would be From c40ef2b985fcaccc5fbb23a851e4b5d7f555f68b Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Tue, 25 Mar 2025 21:29:11 +0000 Subject: [PATCH 025/435] Delete non-exported names from ``sphinx/__init__.py`` --- sphinx/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sphinx/__init__.py b/sphinx/__init__.py index 6211f1abfb3..6ddfdba271f 100644 --- a/sphinx/__init__.py +++ b/sphinx/__init__.py @@ -19,6 +19,7 @@ DeprecationWarning, module='docutils.frontend', ) +del warnings __version__: Final = '8.3.0' __display_version__: Final = __version__ # used for command line version @@ -34,6 +35,7 @@ version_info: Final = (8, 3, 0, 'beta', 0) package_dir: Final = _StrPath(__file__).resolve().parent +del _StrPath _in_development = True if _in_development: From 3c4b4e31128b5f2398fae39ab0f42af8cec16714 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Thu, 27 Mar 2025 19:03:43 +0000 Subject: [PATCH 026/435] Warn on all redirects if ``linkcheck_allowed_redirects`` is an empty dictionary (#13452) --- CHANGES.rst | 3 ++ doc/usage/configuration.rst | 5 ++++ sphinx/builders/linkcheck.py | 32 ++++++++++++++------- tests/test_builders/test_build_linkcheck.py | 31 ++++++++++++++++++++ 4 files changed, 60 insertions(+), 11 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 82396f2a33e..fede8b5177b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -16,6 +16,9 @@ Features added * #13332: Add :confval:`doctest_fail_fast` option to exit after the first failed test. Patch by Till Hoffmann. +* #13439: linkcheck: Permit warning on every redirect with + ``linkcheck_allowed_redirects = {}``. + Patch by Adam Turner. Bugs fixed ---------- diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst index 75e08d7654b..20912d1dc19 100644 --- a/doc/usage/configuration.rst +++ b/doc/usage/configuration.rst @@ -3668,6 +3668,11 @@ and which failures and redirects it ignores. .. versionadded:: 4.1 + .. versionchanged:: 8.3 + Setting :confval:`!linkcheck_allowed_redirects` to the empty directory + may now be used to warn on all redirects encountered + by the *linkcheck* builder. + .. confval:: linkcheck_anchors :type: :code-py:`bool` :default: :code-py:`True` diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py index 93ab2e78b00..e1a80a47c0f 100644 --- a/sphinx/builders/linkcheck.py +++ b/sphinx/builders/linkcheck.py @@ -25,6 +25,7 @@ from sphinx._cli.util.colour import darkgray, darkgreen, purple, red, turquoise from sphinx.builders.dummy import DummyBuilder +from sphinx.errors import ConfigError from sphinx.locale import __ from sphinx.transforms.post_transforms import SphinxPostTransform from sphinx.util import logging, requests @@ -178,7 +179,7 @@ def process_result(self, result: CheckResult) -> None: text = 'with unknown code' linkstat['text'] = text redirection = f'{text} to {result.message}' - if self.config.linkcheck_allowed_redirects: + if self.config.linkcheck_allowed_redirects is not None: msg = f'redirect {res_uri} - {redirection}' logger.warning(msg, location=(result.docname, result.lineno)) else: @@ -386,7 +387,7 @@ def __init__( ) self.check_anchors: bool = config.linkcheck_anchors self.allowed_redirects: dict[re.Pattern[str], re.Pattern[str]] - self.allowed_redirects = config.linkcheck_allowed_redirects + self.allowed_redirects = config.linkcheck_allowed_redirects or {} self.retries: int = config.linkcheck_retries self.rate_limit_timeout = config.linkcheck_rate_limit_timeout self._allow_unauthorized = config.linkcheck_allow_unauthorized @@ -748,20 +749,26 @@ def rewrite_github_anchor(app: Sphinx, uri: str) -> str | None: def compile_linkcheck_allowed_redirects(app: Sphinx, config: Config) -> None: - """Compile patterns in linkcheck_allowed_redirects to the regexp objects.""" - linkcheck_allowed_redirects = app.config.linkcheck_allowed_redirects - for url, pattern in list(linkcheck_allowed_redirects.items()): + """Compile patterns to the regexp objects.""" + if config.linkcheck_allowed_redirects is _sentinel_lar: + config.linkcheck_allowed_redirects = None + return + if not isinstance(config.linkcheck_allowed_redirects, dict): + raise ConfigError + allowed_redirects = {} + for url, pattern in config.linkcheck_allowed_redirects.items(): try: - linkcheck_allowed_redirects[re.compile(url)] = re.compile(pattern) + allowed_redirects[re.compile(url)] = re.compile(pattern) except re.error as exc: logger.warning( __('Failed to compile regex in linkcheck_allowed_redirects: %r %s'), exc.pattern, exc.msg, ) - finally: - # Remove the original regexp-string - linkcheck_allowed_redirects.pop(url) + config.linkcheck_allowed_redirects = allowed_redirects + + +_sentinel_lar = object() def setup(app: Sphinx) -> ExtensionMetadata: @@ -772,7 +779,9 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.add_config_value( 'linkcheck_exclude_documents', [], '', types=frozenset({list, tuple}) ) - app.add_config_value('linkcheck_allowed_redirects', {}, '', types=frozenset({dict})) + app.add_config_value( + 'linkcheck_allowed_redirects', _sentinel_lar, '', types=frozenset({dict}) + ) app.add_config_value('linkcheck_auth', [], '', types=frozenset({list, tuple})) app.add_config_value('linkcheck_request_headers', {}, '', types=frozenset({dict})) app.add_config_value('linkcheck_retries', 1, '', types=frozenset({int})) @@ -799,7 +808,8 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.add_event('linkcheck-process-uri') - app.connect('config-inited', compile_linkcheck_allowed_redirects, priority=800) + # priority 900 to happen after ``check_confval_types()`` + app.connect('config-inited', compile_linkcheck_allowed_redirects, priority=900) # FIXME: Disable URL rewrite handler for github.com temporarily. # See: https://github.com/sphinx-doc/sphinx/issues/9435 diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py index 82baa62f3ef..bdd8dea54c1 100644 --- a/tests/test_builders/test_build_linkcheck.py +++ b/tests/test_builders/test_build_linkcheck.py @@ -10,6 +10,7 @@ import wsgiref.handlers from base64 import b64encode from http.server import BaseHTTPRequestHandler +from io import StringIO from queue import Queue from typing import TYPE_CHECKING from unittest import mock @@ -27,6 +28,7 @@ RateLimit, compile_linkcheck_allowed_redirects, ) +from sphinx.errors import ConfigError from sphinx.testing.util import SphinxTestApp from sphinx.util import requests from sphinx.util._pathlib import _StrPath @@ -37,6 +39,7 @@ if TYPE_CHECKING: from collections.abc import Callable, Iterable + from pathlib import Path from typing import Any from urllib3 import HTTPConnectionPool @@ -752,6 +755,34 @@ def test_follows_redirects_on_GET(app, capsys): assert app.warning.getvalue() == '' +def test_linkcheck_allowed_redirects_config( + make_app: Callable[..., SphinxTestApp], tmp_path: Path +) -> None: + tmp_path.joinpath('conf.py').touch() + tmp_path.joinpath('index.rst').touch() + + # ``linkcheck_allowed_redirects = None`` is rejected + warning_stream = StringIO() + with pytest.raises(ConfigError): + make_app( + 'linkcheck', + srcdir=tmp_path, + confoverrides={'linkcheck_allowed_redirects': None}, + warning=warning_stream, + ) + assert strip_escape_sequences(warning_stream.getvalue()).splitlines() == [ + "WARNING: The config value `linkcheck_allowed_redirects' has type `NoneType'; expected `dict'." + ] + + # ``linkcheck_allowed_redirects = {}`` is permitted + app = make_app( + 'linkcheck', + srcdir=tmp_path, + confoverrides={'linkcheck_allowed_redirects': {}}, + ) + assert strip_escape_sequences(app.warning.getvalue()) == '' + + @pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-warn-redirects') def test_linkcheck_allowed_redirects(app: SphinxTestApp) -> None: with serve_application(app, make_redirect_handler(support_head=False)) as address: From a6d7ae16739bf92a032a7c4df0297db7cf120ec9 Mon Sep 17 00:00:00 2001 From: James Addison <55152140+jayaddison@users.noreply.github.com> Date: Tue, 1 Apr 2025 14:11:31 +0000 Subject: [PATCH 027/435] linkcheck: documentation and validation suggestions for linkcheck_allowed_redirects (#13458) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- doc/usage/configuration.rst | 1 - sphinx/builders/linkcheck.py | 6 +++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst index 20912d1dc19..d14b5d4ec6b 100644 --- a/doc/usage/configuration.rst +++ b/doc/usage/configuration.rst @@ -3642,7 +3642,6 @@ and which failures and redirects it ignores. .. confval:: linkcheck_allowed_redirects :type: :code-py:`dict[str, str]` - :default: :code-py:`{}` A dictionary that maps a pattern of the source URI to a pattern of the canonical URI. diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py index e1a80a47c0f..ff6878f2acb 100644 --- a/sphinx/builders/linkcheck.py +++ b/sphinx/builders/linkcheck.py @@ -754,7 +754,11 @@ def compile_linkcheck_allowed_redirects(app: Sphinx, config: Config) -> None: config.linkcheck_allowed_redirects = None return if not isinstance(config.linkcheck_allowed_redirects, dict): - raise ConfigError + msg = __( + f'Invalid value `{config.linkcheck_allowed_redirects!r}` in ' + 'linkcheck_allowed_redirects. Expected a dictionary.' + ) + raise ConfigError(msg) allowed_redirects = {} for url, pattern in config.linkcheck_allowed_redirects.items(): try: From 021d6a889aa7cbfede33a1c2b75eb68cd40b1554 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 20:54:02 +0100 Subject: [PATCH 028/435] Bump types-requests to 2.32.0.20250328 (#13456) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 364a51dc186..5189219e456 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ lint = [ "types-docutils==0.21.0.20241128", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250305", - "types-requests==2.32.0.20250306", # align with requests + "types-requests==2.32.0.20250328", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.397", "pytest>=8.0", @@ -168,7 +168,7 @@ type-stubs = [ "types-docutils==0.21.0.20241128", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250305", - "types-requests==2.32.0.20250306", + "types-requests==2.32.0.20250328", "types-urllib3==1.26.25.14", ] From 14818ffc01530a6fe0c5c73cb75e8c25255467bb Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:16:53 +0100 Subject: [PATCH 029/435] Bump Ruff to 0.11.3 --- pyproject.toml | 4 ++-- sphinx/ext/autosummary/generate.py | 2 +- sphinx/ext/napoleon/docstring.py | 4 ++-- sphinx/pycode/parser.py | 9 +++++---- sphinx/transforms/__init__.py | 2 +- tests/test_builders/test_build_latex.py | 2 +- 6 files changed, 12 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5189219e456..882a54f8e41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.2", + "ruff==0.11.3", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.2", + "ruff==0.11.3", "sphinx-lint>=0.9", ] package = [ diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py index 62a106479ea..5ff9caf05e4 100644 --- a/sphinx/ext/autosummary/generate.py +++ b/sphinx/ext/autosummary/generate.py @@ -583,7 +583,7 @@ def generate_autosummary_docs( showed_sources = sorted(sources) if len(showed_sources) > 20: - showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:] + showed_sources = [*showed_sources[:10], '...', *showed_sources[-10:]] logger.info( __('[autosummary] generating autosummary for: %s'), ', '.join(showed_sources) ) diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py index 38325df1d94..ea991f72301 100644 --- a/sphinx/ext/napoleon/docstring.py +++ b/sphinx/ext/napoleon/docstring.py @@ -535,7 +535,7 @@ def _consume_returns_section( if colon: if after: - _desc = [after] + lines[1:] + _desc = [after, *lines[1:]] else: _desc = lines[1:] @@ -684,7 +684,7 @@ def _format_field(self, _name: str, _type: str, _desc: list[str]) -> list[str]: if has_desc: _desc = self._fix_field_desc(_desc) if _desc[0]: - return [field + _desc[0]] + _desc[1:] + return [field + _desc[0], *_desc[1:]] else: return [field, *_desc] else: diff --git a/sphinx/pycode/parser.py b/sphinx/pycode/parser.py index 34d30200f75..2390b19d4d3 100644 --- a/sphinx/pycode/parser.py +++ b/sphinx/pycode/parser.py @@ -257,7 +257,7 @@ def get_qualname_for(self, name: str) -> list[str] | None: if self.current_function: if self.current_classes and self.context[-1] == '__init__': # store variable comments inside __init__ method of classes - return self.context[:-1] + [name] + return [*self.context[:-1], name] else: return None else: @@ -387,9 +387,10 @@ def visit_Assign(self, node: ast.Assign) -> None: self.add_variable_annotation(varname, node.type_comment) # type: ignore[arg-type] # check comments after assignment - parser = AfterCommentParser( - [current_line[node.col_offset :]] + self.buffers[node.lineno :] - ) + parser = AfterCommentParser([ + current_line[node.col_offset :], + *self.buffers[node.lineno :], + ]) parser.parse() if parser.comment and comment_re.match(parser.comment): for varname in varnames: diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py index e1f905d2d0f..c6620078e36 100644 --- a/sphinx/transforms/__init__.py +++ b/sphinx/transforms/__init__.py @@ -217,7 +217,7 @@ class SortIds(SphinxTransform): def apply(self, **kwargs: Any) -> None: for node in self.document.findall(nodes.section): if len(node['ids']) > 1 and node['ids'][0].startswith('id'): - node['ids'] = node['ids'][1:] + [node['ids'][0]] + node['ids'] = [*node['ids'][1:], node['ids'][0]] TRANSLATABLE_NODES = { diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py index ea585cd6f21..f1c19a5ab7f 100644 --- a/tests/test_builders/test_build_latex.py +++ b/tests/test_builders/test_build_latex.py @@ -143,7 +143,7 @@ def test_build_latex_doc(app, engine, docclass, python_maximum_signature_line_le } intersphinx_setup(app) app.config.latex_engine = engine - app.config.latex_documents = [app.config.latex_documents[0][:4] + (docclass,)] + app.config.latex_documents = [(*app.config.latex_documents[0][:4], docclass)] if engine == 'xelatex': app.config.latex_table_style = ['booktabs'] elif engine == 'lualatex': From eb5ca6eb70c48815d6b414a49fc21a8725647ffa Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:18:56 +0100 Subject: [PATCH 030/435] Bump Ruff to 0.11.4 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 882a54f8e41..f8ddce88ce2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.3", + "ruff==0.11.4", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.3", + "ruff==0.11.4", "sphinx-lint>=0.9", ] package = [ From 69286861f3f117d3cb753d03c42fcdeb2faf33d7 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:21:22 +0100 Subject: [PATCH 031/435] Bump Ruff to 0.11.5 --- pyproject.toml | 4 ++-- tests/roots/test-ext-autodoc/target/enums.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f8ddce88ce2..6399096879c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.4", + "ruff==0.11.5", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.4", + "ruff==0.11.5", "sphinx-lint>=0.9", ] package = [ diff --git a/tests/roots/test-ext-autodoc/target/enums.py b/tests/roots/test-ext-autodoc/target/enums.py index 6b2731672d2..9d6bcdbc97f 100644 --- a/tests/roots/test-ext-autodoc/target/enums.py +++ b/tests/roots/test-ext-autodoc/target/enums.py @@ -1,4 +1,4 @@ -# ruff: NoQA: D403, PIE796 +# ruff: NoQA: PIE796 import enum from typing import final From 92263a02158c848dcf73aa1f6ad1bbcda9e9c6c5 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:21:49 +0100 Subject: [PATCH 032/435] Bump Ruff to 0.11.6 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6399096879c..f5b38c56d7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.5", + "ruff==0.11.6", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.5", + "ruff==0.11.6", "sphinx-lint>=0.9", ] package = [ From 052e1beb3c6fbe8f7c0af44e976c9a5bd92cc5b9 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:22:14 +0100 Subject: [PATCH 033/435] Bump Ruff to 0.11.7 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f5b38c56d7a..cb22ecf9c6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.6", + "ruff==0.11.7", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.6", + "ruff==0.11.7", "sphinx-lint>=0.9", ] package = [ From 8fef43c1977212fd498818195eada89f17f54279 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:26:06 +0100 Subject: [PATCH 034/435] Bump pyright to 1.1.400 (#13503) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cb22ecf9c6a..c19476cf0d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,7 @@ lint = [ "types-Pygments==2.19.0.20250305", "types-requests==2.32.0.20250328", # align with requests "types-urllib3==1.26.25.14", - "pyright==1.1.397", + "pyright==1.1.400", "pytest>=8.0", "pypi-attestations==0.0.22", "betterproto==2.0.0b6", @@ -158,7 +158,7 @@ translations = [ ] types = [ "mypy==1.15.0", - "pyright==1.1.397", + "pyright==1.1.400", { include-group = "type-stubs" }, ] type-stubs = [ From 239a709662ae18ba79ddd6b51b9514e29985fdfe Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:34:46 +0100 Subject: [PATCH 035/435] Bump Twine to 6.1 (#13510) --- .github/workflows/create-release.yml | 34 ++-------------------------- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 33 deletions(-) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 5d07fcbd18d..b3820cc164c 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -47,8 +47,7 @@ jobs: run: python -m build - name: Check distribution - run: | - twine check dist/* + run: twine check dist/* - name: Create Sigstore attestations for built distributions uses: actions/attest@v1 @@ -87,39 +86,10 @@ jobs: name: attestation-bundles path: /tmp/attestation-bundles/ - - name: Mint PyPI API token - id: mint-token - uses: actions/github-script@v7 - with: - # language=JavaScript - script: | - // retrieve the ambient OIDC token - const oidc_request_token = process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN; - const oidc_request_url = process.env.ACTIONS_ID_TOKEN_REQUEST_URL; - const oidc_resp = await fetch(`${oidc_request_url}&audience=pypi`, { - headers: {Authorization: `bearer ${oidc_request_token}`}, - }); - const oidc_token = (await oidc_resp.json()).value; - - // exchange the OIDC token for an API token - const mint_resp = await fetch('https://pypi.org/_/oidc/github/mint-token', { - method: 'post', - body: `{"token": "${oidc_token}"}` , - headers: {'Content-Type': 'application/json'}, - }); - const api_token = (await mint_resp.json()).token; - - // mask the newly minted API token, so that we don't accidentally leak it - core.setSecret(api_token) - core.setOutput('api-token', api_token) - - name: Upload to PyPI env: TWINE_NON_INTERACTIVE: "true" - TWINE_USERNAME: "__token__" - TWINE_PASSWORD: "${{ steps.mint-token.outputs.api-token }}" - run: | - twine upload dist/* --attestations + run: twine upload dist/* --attestations github-release: runs-on: ubuntu-latest diff --git a/pyproject.toml b/pyproject.toml index c19476cf0d2..65dce1780d8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -142,7 +142,7 @@ package = [ "betterproto==2.0.0b6", # resolution fails without betterproto "build", "pypi-attestations==0.0.22", - "twine>=5.1", + "twine>=6.1", ] test = [ "pytest>=8.0", From 586a6dcba92be4566b90941f534f805974ebd712 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 19:57:18 +0100 Subject: [PATCH 036/435] Bump ``astral-sh/setup-uv`` to v6 --- .github/workflows/builddoc.yml | 2 +- .github/workflows/create-release.yml | 2 +- .github/workflows/lint.yml | 8 ++++---- .github/workflows/main.yml | 14 +++++++------- .github/workflows/transifex.yml | 4 ++-- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml index 8955cf2988a..e049f34e23c 100644 --- a/.github/workflows/builddoc.yml +++ b/.github/workflows/builddoc.yml @@ -31,7 +31,7 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index b3820cc164c..d449e1446ae 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -35,7 +35,7 @@ jobs: with: python-version: "3" - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 7e72b6f6fd0..d051e626886 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -50,7 +50,7 @@ jobs: with: python-version: "3" - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -71,7 +71,7 @@ jobs: with: python-version: "3" - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -92,7 +92,7 @@ jobs: with: python-version: "3" - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -113,7 +113,7 @@ jobs: with: python-version: "3" - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7e7bdb6dab2..11b7ee07a0c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -62,7 +62,7 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -193,7 +193,7 @@ jobs: - name: Install graphviz run: choco install --no-progress graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -222,7 +222,7 @@ jobs: - name: Install graphviz run: brew install graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -257,7 +257,7 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -290,7 +290,7 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -321,7 +321,7 @@ jobs: - name: Check Python version run: python --version --version - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -352,7 +352,7 @@ jobs: - name: Install graphviz run: sudo apt-get install --no-install-recommends --yes graphviz - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml index 56246266515..8f16784fac9 100644 --- a/.github/workflows/transifex.yml +++ b/.github/workflows/transifex.yml @@ -36,7 +36,7 @@ jobs: curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash shell: bash - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false @@ -72,7 +72,7 @@ jobs: curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash shell: bash - name: Install uv - uses: astral-sh/setup-uv@v5 + uses: astral-sh/setup-uv@v6 with: version: latest enable-cache: false From 3f617a3de3d6f7cd45a4458ee230e7353922e513 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 20:29:43 +0100 Subject: [PATCH 037/435] Use dependency groups with pip 25.1 (#13512) --- .github/workflows/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 11b7ee07a0c..597e605b581 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -103,7 +103,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install .[test] + python -m pip install . --group test - name: Install Docutils ${{ matrix.docutils }} run: python -m pip install --upgrade "docutils~=${{ matrix.docutils }}.0" - name: Test with pytest @@ -137,7 +137,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install .[test] + python -m pip install . --group test - name: Test with pytest run: python -m pytest -n logical --dist=worksteal -vv --durations 25 env: @@ -169,7 +169,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - python -m pip install .[test] + python -m pip install . --group test - name: Test with pytest run: python -m pytest -n logical --dist=worksteal -vv --durations 25 env: From 873b732de49fd71c6c62fc5546d2fb50309987e3 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 20:40:20 +0100 Subject: [PATCH 038/435] Use ``actions/setup-python`` for free-threaded testing (#13511) --- .github/workflows/main.yml | 37 +++++-------------------------------- 1 file changed, 5 insertions(+), 32 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 597e605b581..c4e96881d81 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -37,6 +37,7 @@ jobs: - "3.11" - "3.12" - "3.13" + - "3.13t" docutils: - "0.20" - "0.21" @@ -68,8 +69,12 @@ jobs: enable-cache: false - name: Install dependencies run: uv pip install . --group test + env: + UV_PYTHON: "python${{ matrix.python }}" - name: Install Docutils ${{ matrix.docutils }} run: uv pip install --upgrade "docutils~=${{ matrix.docutils }}.0" + env: + UV_PYTHON: "python${{ matrix.python }}" - name: Test with pytest run: python -m pytest -n logical --dist=worksteal -vv --durations 25 env: @@ -111,38 +116,6 @@ jobs: env: PYTHONWARNINGS: "error" # treat all warnings as errors - free-threaded: - runs-on: ubuntu-latest - name: Python ${{ matrix.python }} (free-threaded) - timeout-minutes: 15 - strategy: - fail-fast: false - matrix: - python: - - "3.13" - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Set up Python ${{ matrix.python }} (deadsnakes) - uses: deadsnakes/action@v3.2.0 - with: - python-version: ${{ matrix.python }} - nogil: true - - name: Check Python version - run: python --version --version - - name: Install graphviz - run: sudo apt-get install --no-install-recommends --yes graphviz - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install . --group test - - name: Test with pytest - run: python -m pytest -n logical --dist=worksteal -vv --durations 25 - env: - PYTHONWARNINGS: "error" # treat all warnings as errors - deadsnakes-free-threaded: runs-on: ubuntu-latest name: Python ${{ matrix.python }} (free-threaded) From b6aefedfa74a14c76c0d0f588033cd2365ad0fc5 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 20:51:07 +0100 Subject: [PATCH 039/435] Use a faster temporary directory for CI on Windows (#13513) --- .github/workflows/main.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c4e96881d81..cf40554e6f4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -154,6 +154,14 @@ jobs: timeout-minutes: 15 steps: + # https://github.com/actions/runner-images/issues/8755 + # On standard runners, the D: drive is much faster. + - name: Set %TMP% and %TEMP% to D:\\Temp + run: | + mkdir "D:\\Tmp" + echo "TMP=D:\\Tmp" >> $env:GITHUB_ENV + echo "TEMP=D:\\Tmp" >> $env:GITHUB_ENV + - uses: actions/checkout@v4 with: persist-credentials: false From 5e07baf2a80f23fbe1f4e1e396534ab920866e33 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 27 Apr 2025 20:55:39 +0100 Subject: [PATCH 040/435] Bump pypi-attestations to 0.0.25 --- pyproject.toml | 4 ++-- utils/convert_attestations.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 65dce1780d8..0d86b2adff7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,7 +104,7 @@ lint = [ "types-urllib3==1.26.25.14", "pyright==1.1.400", "pytest>=8.0", - "pypi-attestations==0.0.22", + "pypi-attestations==0.0.25", "betterproto==2.0.0b6", ] test = [ @@ -141,7 +141,7 @@ lint = [ package = [ "betterproto==2.0.0b6", # resolution fails without betterproto "build", - "pypi-attestations==0.0.22", + "pypi-attestations==0.0.25", "twine>=6.1", ] test = [ diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py index 0d013bf97ce..d4516c3c3f4 100644 --- a/utils/convert_attestations.py +++ b/utils/convert_attestations.py @@ -7,7 +7,7 @@ # /// script # requires-python = ">=3.11" # dependencies = [ -# "pypi-attestations==0.0.22", +# "pypi-attestations==0.0.25", # "betterproto==2.0.0b6", # ] # /// From c4929d026c8d22ba229b39cfc2250a9eb1476282 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Tue, 29 Apr 2025 19:30:38 +0300 Subject: [PATCH 041/435] Fix typos (#13520) --- doc/man/sphinx-build.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/man/sphinx-build.rst b/doc/man/sphinx-build.rst index 63af7e49b4c..055e3f366cc 100644 --- a/doc/man/sphinx-build.rst +++ b/doc/man/sphinx-build.rst @@ -272,13 +272,13 @@ Options From Sphinx 8.1, :option:`!--keep-going` is always enabled. Previously, it was only applicable whilst using :option:`--fail-on-warning`, which by default exited :program:`sphinx-build` on the first warning. - Using :option:`!--keep-going` runs :program:`!sphinx-build` to completion + Using :option:`!--keep-going` runs :program:`sphinx-build` to completion and exits with exit status 1 if errors are encountered. .. versionadded:: 1.8 .. versionchanged:: 8.1 :program:`sphinx-build` no longer exits on the first warning, - meaning that in effect :option:`!--fail-on-warning` is always enabled. + meaning that in effect :option:`!--keep-going` is always enabled. The option is retained for compatibility, but may be removed at some later date. From 97affba56c4de0c1d1e109da136ea7b2e06eb75b Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Mon, 12 May 2025 17:22:18 +0100 Subject: [PATCH 042/435] Bump Ruff to 0.11.8 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0d86b2adff7..fb01508bae7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.7", + "ruff==0.11.8", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.7", + "ruff==0.11.8", "sphinx-lint>=0.9", ] package = [ From ad85bf8a4dd4edbb994d897a83aba8508d47e378 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Mon, 12 May 2025 17:24:18 +0100 Subject: [PATCH 043/435] Bump Ruff to 0.11.9 --- pyproject.toml | 4 ++-- tests/roots/test-ext-autodoc/target/need_mocks.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fb01508bae7..0488b3b9d14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.8", + "ruff==0.11.9", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.8", + "ruff==0.11.9", "sphinx-lint>=0.9", ] package = [ diff --git a/tests/roots/test-ext-autodoc/target/need_mocks.py b/tests/roots/test-ext-autodoc/target/need_mocks.py index 1b8af7055d6..73782a2fde8 100644 --- a/tests/roots/test-ext-autodoc/target/need_mocks.py +++ b/tests/roots/test-ext-autodoc/target/need_mocks.py @@ -1,10 +1,9 @@ import missing_module import missing_package1.missing_module1 +import sphinx.missing_module4 from missing_module import missing_name from missing_package2 import missing_module2 from missing_package3.missing_module3 import missing_name # NoQA: F811 - -import sphinx.missing_module4 from sphinx.missing_module4 import missing_name2 From 5a73bf6af0faa10f9e25e02a2d62c7e174492ecd Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Mon, 12 May 2025 17:31:12 +0100 Subject: [PATCH 044/435] Disallow untyped defs in ``tests/test_util/test_util_images.py`` (#13543) Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> --- pyproject.toml | 1 - tests/test_util/test_util_images.py | 6 +++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0488b3b9d14..fc429193a38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -290,7 +290,6 @@ module = [ "tests.test_util.test_util", "tests.test_util.test_util_display", "tests.test_util.test_util_docutils", - "tests.test_util.test_util_images", "tests.test_util.test_util_inventory", # tests/test_writers "tests.test_writers.test_docutilsconf", diff --git a/tests/test_util/test_util_images.py b/tests/test_util/test_util_images.py index b56d68c1083..875fc0d98f4 100644 --- a/tests/test_util/test_util_images.py +++ b/tests/test_util/test_util_images.py @@ -11,13 +11,17 @@ parse_data_uri, ) +TYPE_CHECKING = False +if TYPE_CHECKING: + from pathlib import Path + GIF_FILENAME = 'img.gif' PNG_FILENAME = 'img.png' PDF_FILENAME = 'img.pdf' TXT_FILENAME = 'index.txt' -def test_get_image_size(rootdir): +def test_get_image_size(rootdir: Path) -> None: assert get_image_size(rootdir / 'test-root' / GIF_FILENAME) == (200, 181) assert get_image_size(rootdir / 'test-root' / PNG_FILENAME) == (200, 181) assert get_image_size(rootdir / 'test-root' / PDF_FILENAME) is None From 0227606e71dc765ed60cd0ad2c580a43b5ffca4f Mon Sep 17 00:00:00 2001 From: James Addison <55152140+jayaddison@users.noreply.github.com> Date: Mon, 12 May 2025 16:52:52 +0000 Subject: [PATCH 045/435] Fix tests for Python 3.14.0a7 (#13527) Authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> Co-authored-by: James Addison <55152140+jayaddison@users.noreply.github.com> --- tests/test_extensions/test_ext_autodoc.py | 6 +++++- tests/test_extensions/test_ext_autodoc_configs.py | 12 ++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py index a06c1bbe30d..7aa12db3c32 100644 --- a/tests/test_extensions/test_ext_autodoc.py +++ b/tests/test_extensions/test_ext_autodoc.py @@ -938,10 +938,14 @@ def test_autodoc_special_members(app): } if sys.version_info >= (3, 13, 0, 'alpha', 5): options['exclude-members'] = '__static_attributes__,__firstlineno__' + if sys.version_info >= (3, 14, 0, 'alpha', 7): + ann_attr_name = '__annotations_cache__' + else: + ann_attr_name = '__annotations__' actual = do_autodoc(app, 'class', 'target.Class', options) assert list(filter(lambda l: '::' in l, actual)) == [ '.. py:class:: Class(arg)', - ' .. py:attribute:: Class.__annotations__', + f' .. py:attribute:: Class.{ann_attr_name}', ' .. py:attribute:: Class.__dict__', ' .. py:method:: Class.__init__(arg)', ' .. py:attribute:: Class.__module__', diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py index ab7539190e0..c88496ee506 100644 --- a/tests/test_extensions/test_ext_autodoc_configs.py +++ b/tests/test_extensions/test_ext_autodoc_configs.py @@ -1348,6 +1348,10 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: # default options = {'members': None} actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) + if sys.version_info >= (3, 14, 0, 'alpha', 7): + attr2_typeinfo = () + else: + attr2_typeinfo = (' :type: int',) assert list(actual) == [ '', '.. py:module:: target.autodoc_type_aliases', @@ -1368,7 +1372,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: '', ' .. py:attribute:: Foo.attr2', ' :module: target.autodoc_type_aliases', - ' :type: int', + *attr2_typeinfo, '', ' docstring', '', @@ -1421,6 +1425,10 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: 'io.StringIO': 'my.module.StringIO', } actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) + if sys.version_info >= (3, 14, 0, 'alpha', 7): + attr2_typeinfo = () + else: + attr2_typeinfo = (' :type: myint',) assert list(actual) == [ '', '.. py:module:: target.autodoc_type_aliases', @@ -1441,7 +1449,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: '', ' .. py:attribute:: Foo.attr2', ' :module: target.autodoc_type_aliases', - ' :type: myint', + *attr2_typeinfo, '', ' docstring', '', From 059ee9c6269dec3f12eb73c85dd9c6ebb5667427 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Mon, 12 May 2025 18:03:51 +0100 Subject: [PATCH 046/435] Fix mypy failures --- tests/test_extensions/test_ext_autodoc_configs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py index c88496ee506..eb351442673 100644 --- a/tests/test_extensions/test_ext_autodoc_configs.py +++ b/tests/test_extensions/test_ext_autodoc_configs.py @@ -1348,6 +1348,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: # default options = {'members': None} actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) + attr2_typeinfo: tuple[str, ...] if sys.version_info >= (3, 14, 0, 'alpha', 7): attr2_typeinfo = () else: From e6d67ca53c11718023cd3964daa66e8eb15c4f40 Mon Sep 17 00:00:00 2001 From: James Addison <55152140+jayaddison@users.noreply.github.com> Date: Mon, 12 May 2025 17:13:19 +0000 Subject: [PATCH 047/435] Ensure Python clock timezone is reset during test teardown (#13537) --- tests/test_builders/test_build_linkcheck.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py index bdd8dea54c1..b0c74856b4e 100644 --- a/tests/test_builders/test_build_linkcheck.py +++ b/tests/test_builders/test_build_linkcheck.py @@ -1127,6 +1127,12 @@ def test_too_many_requests_retry_after_HTTP_date(tz, app, monkeypatch, capsys): ) as address: app.build() + # Undo side-effects: the monkeypatch context manager clears the TZ environment + # variable, but we also need to reset Python's internal notion of the current + # timezone. + if sys.platform != 'win32': + time.tzset() + content = (app.outdir / 'output.json').read_text(encoding='utf8') assert json.loads(content) == { 'filename': 'index.rst', From c6e39d858467427ba0255824932fbc8b41694a71 Mon Sep 17 00:00:00 2001 From: Shengyu Zhang Date: Tue, 13 May 2025 02:29:18 +0800 Subject: [PATCH 048/435] Add missing backslashes in LaTeX documentation (#13525) --- doc/latex.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/latex.rst b/doc/latex.rst index fce61480941..80762b1c2c1 100644 --- a/doc/latex.rst +++ b/doc/latex.rst @@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are: .. hint:: If the key value is set to - :code-tex:`r'\\newcommand\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be typeset on back of title page (``'manual'`` docclass only). @@ -1694,7 +1694,7 @@ Macros .. hint:: If adding to preamble the loading of ``tocloft`` package, also add to - preamble :code-tex:`\\renewcommand\sphinxtableofcontentshook{}` else it + preamble :code-tex:`\\renewcommand\\sphinxtableofcontentshook{}` else it will reset :code-tex:`\\l@section` and :code-tex:`\\l@subsection` cancelling ``tocloft`` customization. From 7838043f2c1951122592ac84e74f83db5a48ca3d Mon Sep 17 00:00:00 2001 From: Yuki Kobayashi Date: Tue, 13 May 2025 04:42:42 +0900 Subject: [PATCH 049/435] Support annotations and default values in ``_pseudo_parse_arglist`` (#13536) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- sphinx/domains/javascript.py | 5 +- sphinx/domains/python/_annotations.py | 30 +++++++++- sphinx/domains/python/_object.py | 10 ++-- tests/test_domains/test_domain_py.py | 21 ++++--- .../test_domains/test_domain_py_pyfunction.py | 56 +++++++++++++++++++ 5 files changed, 105 insertions(+), 17 deletions(-) diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py index 968f73aa3d3..51a93bcf802 100644 --- a/sphinx/domains/javascript.py +++ b/sphinx/domains/javascript.py @@ -137,8 +137,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] _pseudo_parse_arglist( signode, arglist, - multi_line_parameter_list, - trailing_comma, + multi_line_parameter_list=multi_line_parameter_list, + trailing_comma=trailing_comma, + env=self.env, ) return fullname, prefix diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py index 29e47fa7151..60def00a533 100644 --- a/sphinx/domains/python/_annotations.py +++ b/sphinx/domains/python/_annotations.py @@ -552,8 +552,10 @@ def _keyword_only_separator() -> addnodes.desc_parameter: def _pseudo_parse_arglist( signode: desc_signature, arglist: str, + *, multi_line_parameter_list: bool = False, trailing_comma: bool = True, + env: BuildEnvironment, ) -> None: """'Parse' a list of arguments separated by commas. @@ -561,6 +563,7 @@ def _pseudo_parse_arglist( brackets. Currently, this will split at any comma, even if it's inside a string literal (e.g. default argument value). """ + # TODO: decompose 'env' parameter into only the required bits paramlist = addnodes.desc_parameterlist() paramlist['multi_line_parameter_list'] = multi_line_parameter_list paramlist['multi_line_trailing_comma'] = trailing_comma @@ -583,9 +586,30 @@ def _pseudo_parse_arglist( ends_open += 1 argument = argument[:-1].strip() if argument: - stack[-1] += addnodes.desc_parameter( - '', '', addnodes.desc_sig_name(argument, argument) - ) + param_with_annotation, _, default_value = argument.partition('=') + param_name, _, annotation = param_with_annotation.partition(':') + del param_with_annotation + + node = addnodes.desc_parameter() + node += addnodes.desc_sig_name('', param_name.strip()) + if annotation: + children = _parse_annotation(annotation.strip(), env=env) + node += addnodes.desc_sig_punctuation('', ':') + node += addnodes.desc_sig_space() + node += addnodes.desc_sig_name('', '', *children) # type: ignore[arg-type] + if default_value: + if annotation: + node += addnodes.desc_sig_space() + node += addnodes.desc_sig_operator('', '=') + if annotation: + node += addnodes.desc_sig_space() + node += nodes.inline( + '', + default_value.strip(), + classes=['default_value'], + support_smartquotes=False, + ) + stack[-1] += node while ends_open: stack.append(addnodes.desc_optional()) stack[-2] += stack[-1] diff --git a/sphinx/domains/python/_object.py b/sphinx/domains/python/_object.py index a858afe8a3e..fd4e62bbbe0 100644 --- a/sphinx/domains/python/_object.py +++ b/sphinx/domains/python/_object.py @@ -363,8 +363,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] _pseudo_parse_arglist( signode, arglist, - multi_line_parameter_list, - trailing_comma, + multi_line_parameter_list=multi_line_parameter_list, + trailing_comma=trailing_comma, + env=self.env, ) except (NotImplementedError, ValueError) as exc: # duplicated parameter names raise ValueError and not a SyntaxError @@ -374,8 +375,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] _pseudo_parse_arglist( signode, arglist, - multi_line_parameter_list, - trailing_comma, + multi_line_parameter_list=multi_line_parameter_list, + trailing_comma=trailing_comma, + env=self.env, ) else: if self.needs_arglist(): diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py index 26c79ffd8fb..262773af35a 100644 --- a/tests/test_domains/test_domain_py.py +++ b/tests/test_domains/test_domain_py.py @@ -38,20 +38,25 @@ from sphinx.testing.util import assert_node from sphinx.writers.text import STDINDENT +TYPE_CHECKING = False +if TYPE_CHECKING: + from sphinx.application import Sphinx + from sphinx.environment import BuildEnvironment -def parse(sig): + +def parse(sig: str, *, env: BuildEnvironment) -> str: m = py_sig_re.match(sig) if m is None: raise ValueError _name_prefix, _tp_list, _name, arglist, _retann = m.groups() signode = addnodes.desc_signature(sig, '') - _pseudo_parse_arglist(signode, arglist) + _pseudo_parse_arglist(signode, arglist, env=env) return signode.astext() -def test_function_signatures() -> None: - rv = parse("compile(source : string, filename, symbol='file')") - assert rv == "(source : string, filename, symbol='file')" +def test_function_signatures(app: Sphinx) -> None: + rv = parse("compile(source : string, filename, symbol='file')", env=app.env) + assert rv == "(source: string, filename, symbol='file')" for params, expect in [ ('(a=1)', '(a=1)'), @@ -60,9 +65,9 @@ def test_function_signatures() -> None: ('(a=1[, b=None])', '(a=1, [b=None])'), ('(a=[], [b=None])', '(a=[], [b=None])'), ('(a=[][, b=None])', '(a=[], [b=None])'), - ('(a: Foo[Bar]=[][, b=None])', '(a: Foo[Bar]=[], [b=None])'), + ('(a: Foo[Bar]=[][, b=None])', '(a: Foo[Bar] = [], [b=None])'), ]: - rv = parse(f'func{params}') + rv = parse(f'func{params}', env=app.env) assert rv == expect # Note: 'def f[Foo[Bar]]()' is not valid Python but people might write @@ -70,7 +75,7 @@ def test_function_signatures() -> None: # variable. for tparams in ['', '[Foo]', '[Foo[Bar]]']: for retann in ['', '-> Foo', '-> Foo[Bar]', '-> anything else']: - rv = parse(f'func{tparams}{params} {retann}'.rstrip()) + rv = parse(f'func{tparams}{params} {retann}'.rstrip(), env=app.env) assert rv == expect diff --git a/tests/test_domains/test_domain_py_pyfunction.py b/tests/test_domains/test_domain_py_pyfunction.py index 32f4e669291..a240d52ec5d 100644 --- a/tests/test_domains/test_domain_py_pyfunction.py +++ b/tests/test_domains/test_domain_py_pyfunction.py @@ -27,6 +27,10 @@ from sphinx.testing import restructuredtext from sphinx.testing.util import assert_node +TYPE_CHECKING = False +if TYPE_CHECKING: + from sphinx.application import Sphinx + @pytest.mark.sphinx('html', testroot='_blank') def test_pyfunction(app): @@ -487,6 +491,58 @@ def test_optional_pyfunction_signature(app): ) +@pytest.mark.sphinx('html', testroot='_blank') +def test_pyfunction_signature_with_bracket(app: Sphinx) -> None: + text = '.. py:function:: hello(a : ~typing.Any = ) -> None' + doctree = restructuredtext.parse(app, text) + assert_node( + doctree, + ( + addnodes.index, + [ + desc, + ( + [ + desc_signature, + ( + [desc_name, 'hello'], + desc_parameterlist, + [desc_returns, pending_xref, 'None'], + ), + ], + desc_content, + ), + ], + ), + ) + assert_node( + doctree[1], + addnodes.desc, + desctype='function', + domain='py', + objtype='function', + no_index=False, + ) + assert_node( + doctree[1][0][1], # type: ignore[index] + ( + [ + desc_parameter, + ( + [desc_sig_name, 'a'], + [desc_sig_punctuation, ':'], + desc_sig_space, + [desc_sig_name, pending_xref, 'Any'], + desc_sig_space, + [desc_sig_operator, '='], + desc_sig_space, + [nodes.inline, ''], + ), + ], + ), + ) + + @pytest.mark.sphinx( 'html', testroot='root', From 4051354182034a2367051e5c71072c88771de5d8 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 12 May 2025 21:23:22 +0100 Subject: [PATCH 050/435] Mark tests as expected failures on Docutils 0.22.0rc2 (#13547) --- tests/test_directives/test_directive_only.py | 7 +++++++ tests/test_environment/test_environment_toctree.py | 9 +++++++++ tests/test_util/test_util_docutils_sphinx_directive.py | 9 +++++++++ 3 files changed, 25 insertions(+) diff --git a/tests/test_directives/test_directive_only.py b/tests/test_directives/test_directive_only.py index 9e62f4cb3eb..700d11588de 100644 --- a/tests/test_directives/test_directive_only.py +++ b/tests/test_directives/test_directive_only.py @@ -5,13 +5,20 @@ import re from typing import TYPE_CHECKING +import docutils import pytest from docutils import nodes if TYPE_CHECKING: from sphinx.testing.util import SphinxTestApp +xfail_du_22 = pytest.mark.xfail( + docutils.__version_info__ >= (0, 22, 0, 'alpha', 0), + reason='expected failure on Docutils 0.22+', +) + +@xfail_du_22 @pytest.mark.sphinx('text', testroot='directive-only') def test_sectioning(app: SphinxTestApp) -> None: def getsects(section): diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py index f6b849c5bec..72558aaa6da 100644 --- a/tests/test_environment/test_environment_toctree.py +++ b/tests/test_environment/test_environment_toctree.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING +import docutils import pytest from docutils import nodes from docutils.nodes import bullet_list, list_item, literal, reference, title @@ -17,7 +18,13 @@ if TYPE_CHECKING: from sphinx.testing.util import SphinxTestApp +xfail_du_22 = pytest.mark.xfail( + docutils.__version_info__ >= (0, 22, 0, 'alpha', 0), + reason='expected failure on Docutils 0.22+', +) + +@xfail_du_22 @pytest.mark.sphinx('xml', testroot='toctree') @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_process_doc(app): @@ -464,6 +471,7 @@ def test_domain_objects_document_scoping(app: SphinxTestApp) -> None: ) +@xfail_du_22 @pytest.mark.sphinx('xml', testroot='toctree') @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc(app): @@ -521,6 +529,7 @@ def test_document_toc(app): assert_node(toctree[2][0], [compact_paragraph, reference, 'Indices and tables']) +@xfail_du_22 @pytest.mark.sphinx('xml', testroot='toctree') @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc_only(app): diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py index ecfcab0b489..5770b1860f7 100644 --- a/tests/test_util/test_util_docutils_sphinx_directive.py +++ b/tests/test_util/test_util_docutils_sphinx_directive.py @@ -2,6 +2,8 @@ from types import SimpleNamespace +import docutils +import pytest from docutils import nodes from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined] from docutils.parsers.rst.states import ( @@ -14,6 +16,11 @@ from sphinx.util.docutils import SphinxDirective, new_document +xfail_du_22 = pytest.mark.xfail( + docutils.__version_info__ >= (0, 22, 0, 'alpha', 0), + reason='expected failure on Docutils 0.22+', +) + def make_directive( *, env: SimpleNamespace, input_lines: StringList | None = None @@ -104,6 +111,7 @@ def test_sphinx_directive_get_location() -> None: assert directive.get_location() == ':1' +@xfail_du_22 def test_sphinx_directive_parse_content_to_nodes() -> None: directive = make_directive(env=SimpleNamespace()) content = 'spam\n====\n\nEggs! *Lobster thermidor.*' @@ -120,6 +128,7 @@ def test_sphinx_directive_parse_content_to_nodes() -> None: assert node.children[1].astext() == 'Eggs! Lobster thermidor.' +@xfail_du_22 def test_sphinx_directive_parse_text_to_nodes() -> None: directive = make_directive(env=SimpleNamespace()) content = 'spam\n====\n\nEggs! *Lobster thermidor.*' From 5355a78e790cef630d19a2ab47107b85c5d626e1 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 12 May 2025 21:44:41 +0100 Subject: [PATCH 051/435] Fix ``test_util`` for Docutils 0.22+ (#13548) --- .../test_util_docutils_sphinx_directive.py | 35 +++++++++---------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py index 5770b1860f7..00ea5bc3fb5 100644 --- a/tests/test_util/test_util_docutils_sphinx_directive.py +++ b/tests/test_util/test_util_docutils_sphinx_directive.py @@ -3,7 +3,6 @@ from types import SimpleNamespace import docutils -import pytest from docutils import nodes from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined] from docutils.parsers.rst.states import ( @@ -16,11 +15,6 @@ from sphinx.util.docutils import SphinxDirective, new_document -xfail_du_22 = pytest.mark.xfail( - docutils.__version_info__ >= (0, 22, 0, 'alpha', 0), - reason='expected failure on Docutils 0.22+', -) - def make_directive( *, env: SimpleNamespace, input_lines: StringList | None = None @@ -37,23 +31,30 @@ def make_directive_and_state( if input_lines is not None: sm.input_lines = input_lines state = RSTState(sm) - state.document = new_document('') - state.document.settings.env = env - state.document.settings.tab_width = 4 - state.document.settings.pep_references = None - state.document.settings.rfc_references = None + document = state.document = new_document('') + document.settings.env = env + document.settings.tab_width = 4 + document.settings.pep_references = None + document.settings.rfc_references = None inliner = Inliner() - inliner.init_customizations(state.document.settings) + inliner.init_customizations(document.settings) state.inliner = inliner state.parent = None state.memo = SimpleNamespace( - document=state.document, + document=document, + reporter=document.reporter, language=english, - inliner=state.inliner, - reporter=state.document.reporter, - section_level=0, title_styles=[], + # section_parents=[], # Docutils 0.22+ + section_level=0, + section_bubble_up_kludge=False, + inliner=inliner, ) + if docutils.__version_info__ >= (0, 22, 0, 'alpha', 0): + # https://github.com/sphinx-doc/sphinx/issues/13539 + # https://sourceforge.net/p/docutils/code/10093/ + # https://sourceforge.net/p/docutils/patches/213/ + state.memo.section_parents = [] directive = SphinxDirective( name='test_directive', arguments=[], @@ -111,7 +112,6 @@ def test_sphinx_directive_get_location() -> None: assert directive.get_location() == ':1' -@xfail_du_22 def test_sphinx_directive_parse_content_to_nodes() -> None: directive = make_directive(env=SimpleNamespace()) content = 'spam\n====\n\nEggs! *Lobster thermidor.*' @@ -128,7 +128,6 @@ def test_sphinx_directive_parse_content_to_nodes() -> None: assert node.children[1].astext() == 'Eggs! Lobster thermidor.' -@xfail_du_22 def test_sphinx_directive_parse_text_to_nodes() -> None: directive = make_directive(env=SimpleNamespace()) content = 'spam\n====\n\nEggs! *Lobster thermidor.*' From f928da16337699ece42f5d7fc377870422eee3e3 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 12 May 2025 23:16:24 +0100 Subject: [PATCH 052/435] Fix tests for Docutils 0.22+ (#13549) --- sphinx/directives/other.py | 14 +++++++++++++- tests/test_directives/test_directive_only.py | 7 ------- tests/test_environment/test_environment_toctree.py | 9 --------- 3 files changed, 13 insertions(+), 17 deletions(-) diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py index 8c66ed383b5..5d6f5b778a6 100644 --- a/sphinx/directives/other.py +++ b/sphinx/directives/other.py @@ -5,6 +5,7 @@ from pathlib import Path from typing import TYPE_CHECKING, cast +import docutils from docutils import nodes from docutils.parsers.rst import directives from docutils.parsers.rst.directives.misc import Class @@ -21,7 +22,7 @@ if TYPE_CHECKING: from collections.abc import Sequence - from typing import Any, ClassVar + from typing import Any, ClassVar, Final from docutils.nodes import Element, Node @@ -29,6 +30,7 @@ from sphinx.util.typing import ExtensionMetadata, OptionSpec +DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0) glob_re = re.compile(r'.*[*?\[].*') logger = logging.getLogger(__name__) @@ -330,6 +332,14 @@ def run(self) -> list[Node]: surrounding_section_level = memo.section_level memo.title_styles = [] memo.section_level = 0 + if DU_22_PLUS: + # https://github.com/sphinx-doc/sphinx/issues/13539 + # https://sourceforge.net/p/docutils/code/10093/ + # https://sourceforge.net/p/docutils/patches/213/ + surrounding_section_parents = memo.section_parents + memo.section_parents = [] + else: + surrounding_section_parents = [] try: self.state.nested_parse( self.content, self.content_offset, node, match_titles=True @@ -365,6 +375,8 @@ def run(self) -> list[Node]: return [] finally: memo.title_styles = surrounding_title_styles + if DU_22_PLUS: + memo.section_parents = surrounding_section_parents memo.section_level = surrounding_section_level diff --git a/tests/test_directives/test_directive_only.py b/tests/test_directives/test_directive_only.py index 700d11588de..9e62f4cb3eb 100644 --- a/tests/test_directives/test_directive_only.py +++ b/tests/test_directives/test_directive_only.py @@ -5,20 +5,13 @@ import re from typing import TYPE_CHECKING -import docutils import pytest from docutils import nodes if TYPE_CHECKING: from sphinx.testing.util import SphinxTestApp -xfail_du_22 = pytest.mark.xfail( - docutils.__version_info__ >= (0, 22, 0, 'alpha', 0), - reason='expected failure on Docutils 0.22+', -) - -@xfail_du_22 @pytest.mark.sphinx('text', testroot='directive-only') def test_sectioning(app: SphinxTestApp) -> None: def getsects(section): diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py index 72558aaa6da..f6b849c5bec 100644 --- a/tests/test_environment/test_environment_toctree.py +++ b/tests/test_environment/test_environment_toctree.py @@ -4,7 +4,6 @@ from typing import TYPE_CHECKING -import docutils import pytest from docutils import nodes from docutils.nodes import bullet_list, list_item, literal, reference, title @@ -18,13 +17,7 @@ if TYPE_CHECKING: from sphinx.testing.util import SphinxTestApp -xfail_du_22 = pytest.mark.xfail( - docutils.__version_info__ >= (0, 22, 0, 'alpha', 0), - reason='expected failure on Docutils 0.22+', -) - -@xfail_du_22 @pytest.mark.sphinx('xml', testroot='toctree') @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_process_doc(app): @@ -471,7 +464,6 @@ def test_domain_objects_document_scoping(app: SphinxTestApp) -> None: ) -@xfail_du_22 @pytest.mark.sphinx('xml', testroot='toctree') @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc(app): @@ -529,7 +521,6 @@ def test_document_toc(app): assert_node(toctree[2][0], [compact_paragraph, reference, 'Indices and tables']) -@xfail_du_22 @pytest.mark.sphinx('xml', testroot='toctree') @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_document_toc_only(app): From fefa2f26be4369a2cf81685fa20958c206a7f2af Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 12 May 2025 23:35:06 +0100 Subject: [PATCH 053/435] Add ``section_parents`` to ``_fresh_title_style_context()`` (#13551) --- sphinx/util/parsing.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/sphinx/util/parsing.py b/sphinx/util/parsing.py index 4c4a6477683..ec6649fc247 100644 --- a/sphinx/util/parsing.py +++ b/sphinx/util/parsing.py @@ -5,15 +5,19 @@ import contextlib from typing import TYPE_CHECKING +import docutils from docutils.nodes import Element from docutils.statemachine import StringList, string2lines if TYPE_CHECKING: from collections.abc import Iterator + from typing import Final from docutils.nodes import Node from docutils.parsers.rst.states import RSTState +DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0) + def nested_parse_to_nodes( state: RSTState, @@ -75,15 +79,23 @@ def _fresh_title_style_context(state: RSTState) -> Iterator[None]: memo = state.memo surrounding_title_styles: list[str | tuple[str, str]] = memo.title_styles surrounding_section_level: int = memo.section_level + if DU_22_PLUS: + surrounding_section_parents = memo.section_parents + else: + surrounding_section_parents = [] # clear current title styles memo.title_styles = [] memo.section_level = 0 + if DU_22_PLUS: + memo.section_parents = [] try: yield finally: # reset title styles memo.title_styles = surrounding_title_styles memo.section_level = surrounding_section_level + if DU_22_PLUS: + memo.section_parents = surrounding_section_parents def _text_to_string_list( From a27f37597a116bb376027fd6d044ac7fd1f0047e Mon Sep 17 00:00:00 2001 From: Steve Piercy Date: Mon, 12 May 2025 15:43:12 -0700 Subject: [PATCH 054/435] Emend the version changed note for ``linkcheck_allowed_redirects`` (#13550) --- AUTHORS.rst | 1 + doc/usage/configuration.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 5ff09219c02..43a8da3469d 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -102,6 +102,7 @@ Contributors * Slawek Figiel -- additional warning suppression * Stefan Seefeld -- toctree improvements * Stefan van der Walt -- autosummary extension +* Steve Piercy -- documentation improvements * \T. Powers -- HTML output improvements * Taku Shimizu -- epub3 builder * Thomas Lamb -- linkcheck builder diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst index d14b5d4ec6b..7cdf462c4ba 100644 --- a/doc/usage/configuration.rst +++ b/doc/usage/configuration.rst @@ -3668,7 +3668,7 @@ and which failures and redirects it ignores. .. versionadded:: 4.1 .. versionchanged:: 8.3 - Setting :confval:`!linkcheck_allowed_redirects` to the empty directory + Setting :confval:`!linkcheck_allowed_redirects` to an empty dictionary may now be used to warn on all redirects encountered by the *linkcheck* builder. From 491999f5699c12128c7e37b4e89f11ec9a9800db Mon Sep 17 00:00:00 2001 From: Yuki Kobayashi Date: Tue, 13 May 2025 18:52:55 +0900 Subject: [PATCH 055/435] Docs: Fix ``nested_parse`` sample code (#13455) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- doc/extdev/markupapi.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/extdev/markupapi.rst b/doc/extdev/markupapi.rst index 7aa632446da..184bd2bd8e4 100644 --- a/doc/extdev/markupapi.rst +++ b/doc/extdev/markupapi.rst @@ -173,9 +173,9 @@ The methods are used as follows: def run(self) -> list[Node]: container = docutils.nodes.Element() # either - nested_parse_with_titles(self.state, self.result, container) + nested_parse_with_titles(self.state, self.result, container, self.content_offset) # or - self.state.nested_parse(self.result, 0, container) + self.state.nested_parse(self.result, self.content_offset, container) parsed = container.children return parsed From 05137c25acf99c07badd6de25379fcfd8a6a120c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Wed, 14 May 2025 09:48:56 +0200 Subject: [PATCH 056/435] LaTeX: fix the #13525 fix of code-tex markup in docs --- doc/latex.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/latex.rst b/doc/latex.rst index 80762b1c2c1..e8f8285f763 100644 --- a/doc/latex.rst +++ b/doc/latex.rst @@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are: .. hint:: If the key value is set to - :code-tex:`r'\\newcommand\\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be typeset on back of title page (``'manual'`` docclass only). From df171a93678d62fec2b95a878566adf4ef9ae406 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 May 2025 11:21:36 +0100 Subject: [PATCH 057/435] Bump types-docutils to 0.21.0.20250514 (#13555) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fc429193a38..59b2dc30c1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ lint = [ "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", "types-defusedxml==0.7.0.20240218", - "types-docutils==0.21.0.20241128", + "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250305", "types-requests==2.32.0.20250328", # align with requests @@ -165,7 +165,7 @@ type-stubs = [ # align with versions used elsewhere "types-colorama==0.4.15.20240311", "types-defusedxml==0.7.0.20240218", - "types-docutils==0.21.0.20241128", + "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250305", "types-requests==2.32.0.20250328", From 0e3d50cb6d224c7fec30e666fe102b1be7416265 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 May 2025 11:26:28 +0100 Subject: [PATCH 058/435] Bump types-pygments to 2.19.0.20250514 (#13556) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 59b2dc30c1c..92f00d3a759 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,7 @@ lint = [ "types-defusedxml==0.7.0.20240218", "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", - "types-Pygments==2.19.0.20250305", + "types-Pygments==2.19.0.20250514", "types-requests==2.32.0.20250328", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.400", @@ -167,7 +167,7 @@ type-stubs = [ "types-defusedxml==0.7.0.20240218", "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", - "types-Pygments==2.19.0.20250305", + "types-Pygments==2.19.0.20250514", "types-requests==2.32.0.20250328", "types-urllib3==1.26.25.14", ] From 6210799bf5bb9fb5045aaa14465ebfc9fb1c8102 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Wed, 14 May 2025 13:40:37 +0200 Subject: [PATCH 059/435] Revert "LaTeX: fix the #13525 fix of code-tex markup in docs" This reverts commit 05137c25acf99c07badd6de25379fcfd8a6a120c. Sorry about that. --- doc/latex.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/latex.rst b/doc/latex.rst index e8f8285f763..80762b1c2c1 100644 --- a/doc/latex.rst +++ b/doc/latex.rst @@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are: .. hint:: If the key value is set to - :code-tex:`'\\newcommand\\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be typeset on back of title page (``'manual'`` docclass only). From 3b46823873977c2ed87879509b99714ca8bffa1d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 May 2025 17:35:18 +0100 Subject: [PATCH 060/435] Bump types-requests to 2.32.0.20250515 (#13559) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 92f00d3a759..15520dc1841 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ lint = [ "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250514", - "types-requests==2.32.0.20250328", # align with requests + "types-requests==2.32.0.20250515", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.400", "pytest>=8.0", @@ -168,7 +168,7 @@ type-stubs = [ "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250514", - "types-requests==2.32.0.20250328", + "types-requests==2.32.0.20250515", "types-urllib3==1.26.25.14", ] From c4d37057f100862ea58ffcec95de9553263e0acd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Karel=20Ko=C4=8D=C3=AD?= Date: Fri, 16 May 2025 05:00:23 +0200 Subject: [PATCH 061/435] Support C domain objects in the table of contents (#13497) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- CHANGES.rst | 1 + sphinx/domains/c/__init__.py | 28 +++++++++++++++++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index fede8b5177b..d26a93871a5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -19,6 +19,7 @@ Features added * #13439: linkcheck: Permit warning on every redirect with ``linkcheck_allowed_redirects = {}``. Patch by Adam Turner. +* #13497: Support C domain objects in the table of contents. Bugs fixed ---------- diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py index 6dbbf70ac92..7fa1822e4ac 100644 --- a/sphinx/domains/c/__init__.py +++ b/sphinx/domains/c/__init__.py @@ -39,7 +39,7 @@ from docutils.nodes import Element, Node, TextElement, system_message - from sphinx.addnodes import pending_xref + from sphinx.addnodes import desc_signature, pending_xref from sphinx.application import Sphinx from sphinx.builders import Builder from sphinx.domains.c._symbol import LookupKey @@ -309,6 +309,32 @@ def after_content(self) -> None: self.env.current_document.c_parent_symbol = self.oldParentSymbol self.env.ref_context['c:parent_key'] = self.oldParentKey + def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]: + last_symbol: Symbol = self.env.current_document.c_last_symbol + return tuple(map(str, last_symbol.get_full_nested_name().names)) + + def _toc_entry_name(self, sig_node: desc_signature) -> str: + if not sig_node.get('_toc_parts'): + return '' + + config = self.config + objtype = sig_node.parent.get('objtype') + if config.add_function_parentheses and ( + objtype in {'function', 'method'} + or (objtype == 'macro' and '(' in sig_node.rawsource) + ): + parens = '()' + else: + parens = '' + *parents, name = sig_node['_toc_parts'] + if config.toc_object_entries_show_parents == 'domain': + return '::'.join((name + parens,)) + if config.toc_object_entries_show_parents == 'hide': + return name + parens + if config.toc_object_entries_show_parents == 'all': + return '::'.join([*parents, name + parens]) + return '' + class CMemberObject(CObject): object_type = 'member' From c76d1bd1372a652eef236bf00f0a0f1ebc9bca7d Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Fri, 16 May 2025 04:00:41 +0100 Subject: [PATCH 062/435] Merge ``SearchLanguage.init()`` into ``__init__()`` (#13562) --- sphinx/search/__init__.py | 5 +---- sphinx/search/da.py | 3 ++- sphinx/search/de.py | 3 ++- sphinx/search/en.py | 3 ++- sphinx/search/es.py | 3 ++- sphinx/search/fi.py | 3 ++- sphinx/search/fr.py | 3 ++- sphinx/search/hu.py | 3 ++- sphinx/search/it.py | 3 ++- sphinx/search/ja.py | 3 ++- sphinx/search/nl.py | 3 ++- sphinx/search/no.py | 3 ++- sphinx/search/pt.py | 3 ++- sphinx/search/ro.py | 3 ++- sphinx/search/ru.py | 3 ++- sphinx/search/sv.py | 3 ++- sphinx/search/tr.py | 3 ++- sphinx/search/zh.py | 2 -- 18 files changed, 33 insertions(+), 22 deletions(-) diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index cd0aa0bbd8f..5563efceb4a 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -91,11 +91,8 @@ class SearchLanguage: _word_re = re.compile(r'\w+') def __init__(self, options: dict[str, str]) -> None: - self.options = options - self.init(options) - - def init(self, options: dict[str, str]) -> None: """Initialize the class with the options the user has given.""" + self.options = options def split(self, input: str) -> list[str]: """This method splits a sentence into words. Default splitter splits input diff --git a/sphinx/search/da.py b/sphinx/search/da.py index a56114bb6ba..b2cb66938fa 100644 --- a/sphinx/search/da.py +++ b/sphinx/search/da.py @@ -111,7 +111,8 @@ class SearchDanish(SearchLanguage): js_stemmer_rawcode = 'danish-stemmer.js' stopwords = danish_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('danish') def stem(self, word: str) -> str: diff --git a/sphinx/search/de.py b/sphinx/search/de.py index 37aa9ec8890..5ed8062d688 100644 --- a/sphinx/search/de.py +++ b/sphinx/search/de.py @@ -294,7 +294,8 @@ class SearchGerman(SearchLanguage): js_stemmer_rawcode = 'german-stemmer.js' stopwords = german_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('german') def stem(self, word: str) -> str: diff --git a/sphinx/search/en.py b/sphinx/search/en.py index 5173dc03fc0..51494a04a4d 100644 --- a/sphinx/search/en.py +++ b/sphinx/search/en.py @@ -211,7 +211,8 @@ class SearchEnglish(SearchLanguage): js_stemmer_code = js_porter_stemmer stopwords = english_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('porter') def stem(self, word: str) -> str: diff --git a/sphinx/search/es.py b/sphinx/search/es.py index 5739c88172a..f4079adfdfa 100644 --- a/sphinx/search/es.py +++ b/sphinx/search/es.py @@ -354,7 +354,8 @@ class SearchSpanish(SearchLanguage): js_stemmer_rawcode = 'spanish-stemmer.js' stopwords = spanish_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('spanish') def stem(self, word: str) -> str: diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py index 24ef7502300..55a01586924 100644 --- a/sphinx/search/fi.py +++ b/sphinx/search/fi.py @@ -104,7 +104,8 @@ class SearchFinnish(SearchLanguage): js_stemmer_rawcode = 'finnish-stemmer.js' stopwords = finnish_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('finnish') def stem(self, word: str) -> str: diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py index 7662737d6e3..d78745c7991 100644 --- a/sphinx/search/fr.py +++ b/sphinx/search/fr.py @@ -190,7 +190,8 @@ class SearchFrench(SearchLanguage): js_stemmer_rawcode = 'french-stemmer.js' stopwords = french_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('french') def stem(self, word: str) -> str: diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py index 5c35b16fc65..7a6464c8e8b 100644 --- a/sphinx/search/hu.py +++ b/sphinx/search/hu.py @@ -217,7 +217,8 @@ class SearchHungarian(SearchLanguage): js_stemmer_rawcode = 'hungarian-stemmer.js' stopwords = hungarian_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('hungarian') def stem(self, word: str) -> str: diff --git a/sphinx/search/it.py b/sphinx/search/it.py index 60a5cf57720..1158e388ed6 100644 --- a/sphinx/search/it.py +++ b/sphinx/search/it.py @@ -307,7 +307,8 @@ class SearchItalian(SearchLanguage): js_stemmer_rawcode = 'italian-stemmer.js' stopwords = italian_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('italian') def stem(self, word: str) -> str: diff --git a/sphinx/search/ja.py b/sphinx/search/ja.py index f855fe4a67d..7045a314459 100644 --- a/sphinx/search/ja.py +++ b/sphinx/search/ja.py @@ -523,7 +523,8 @@ class SearchJapanese(SearchLanguage): lang = 'ja' language_name = 'Japanese' - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) dotted_path = options.get('type') if dotted_path is None: self.splitter = DefaultSplitter(options) diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py index 2d2f2b8a8b6..d46b5ee3383 100644 --- a/sphinx/search/nl.py +++ b/sphinx/search/nl.py @@ -118,7 +118,8 @@ class SearchDutch(SearchLanguage): js_stemmer_rawcode = 'dutch-stemmer.js' stopwords = dutch_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('dutch') def stem(self, word: str) -> str: diff --git a/sphinx/search/no.py b/sphinx/search/no.py index dfc7786d46a..93118f83307 100644 --- a/sphinx/search/no.py +++ b/sphinx/search/no.py @@ -193,7 +193,8 @@ class SearchNorwegian(SearchLanguage): js_stemmer_rawcode = 'norwegian-stemmer.js' stopwords = norwegian_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('norwegian') def stem(self, word: str) -> str: diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py index bf9b7a3a2f8..ff45b27bd95 100644 --- a/sphinx/search/pt.py +++ b/sphinx/search/pt.py @@ -252,7 +252,8 @@ class SearchPortuguese(SearchLanguage): js_stemmer_rawcode = 'portuguese-stemmer.js' stopwords = portuguese_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('portuguese') def stem(self, word: str) -> str: diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py index 0c00486319a..e08ce5a09e3 100644 --- a/sphinx/search/ro.py +++ b/sphinx/search/ro.py @@ -13,7 +13,8 @@ class SearchRomanian(SearchLanguage): js_stemmer_rawcode = 'romanian-stemmer.js' stopwords: set[str] = set() - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('romanian') def stem(self, word: str) -> str: diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py index e93046cba94..bdeff001797 100644 --- a/sphinx/search/ru.py +++ b/sphinx/search/ru.py @@ -242,7 +242,8 @@ class SearchRussian(SearchLanguage): js_stemmer_rawcode = 'russian-stemmer.js' stopwords = russian_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('russian') def stem(self, word: str) -> str: diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py index b4fa1bd06a2..5a796165805 100644 --- a/sphinx/search/sv.py +++ b/sphinx/search/sv.py @@ -131,7 +131,8 @@ class SearchSwedish(SearchLanguage): js_stemmer_rawcode = 'swedish-stemmer.js' stopwords = swedish_stopwords - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('swedish') def stem(self, word: str) -> str: diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py index b999e1d96d8..82080bf5c61 100644 --- a/sphinx/search/tr.py +++ b/sphinx/search/tr.py @@ -13,7 +13,8 @@ class SearchTurkish(SearchLanguage): js_stemmer_rawcode = 'turkish-stemmer.js' stopwords: set[str] = set() - def init(self, options: dict[str, str]) -> None: + def __init__(self, options: dict[str, str]) -> None: + super().__init__(options) self.stemmer = snowballstemmer.stemmer('turkish') def stem(self, word: str) -> str: diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py index 0f7e4dfd5f9..c063631f865 100644 --- a/sphinx/search/zh.py +++ b/sphinx/search/zh.py @@ -243,8 +243,6 @@ class SearchChinese(SearchLanguage): def __init__(self, options: dict[str, str]) -> None: super().__init__(options) self.latin_terms: set[str] = set() - - def init(self, options: dict[str, str]) -> None: dict_path = options.get('dict', JIEBA_DEFAULT_DICT) if dict_path and Path(dict_path).is_file(): jieba_load_userdict(str(dict_path)) From 9ab73b2494a41e008448df267d9c66b280678fba Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 18 May 2025 04:18:37 +0100 Subject: [PATCH 063/435] Update URLs for the Snowball project (#13571) --- sphinx/search/__init__.py | 2 +- sphinx/search/da.py | 2 +- sphinx/search/de.py | 2 +- sphinx/search/es.py | 2 +- sphinx/search/fi.py | 2 +- sphinx/search/fr.py | 2 +- sphinx/search/hu.py | 2 +- sphinx/search/it.py | 2 +- sphinx/search/nl.py | 2 +- sphinx/search/no.py | 2 +- sphinx/search/pt.py | 2 +- sphinx/search/ru.py | 2 +- sphinx/search/sv.py | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index 5563efceb4a..66875adf5ec 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -130,7 +130,7 @@ def word_filter(self, word: str) -> bool: def parse_stop_word(source: str) -> set[str]: """Parse snowball style word list like this: - * https://snowball.tartarus.org/algorithms/finnish/stop.txt + * https://snowballstem.org/algorithms/finnish/stop.txt """ result: set[str] = set() for line in source.splitlines(): diff --git a/sphinx/search/da.py b/sphinx/search/da.py index b2cb66938fa..8be1c6a215e 100644 --- a/sphinx/search/da.py +++ b/sphinx/search/da.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word danish_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/danish/stop.txt +| source: https://snowballstem.org/algorithms/danish/stop.txt og | and i | in jeg | I diff --git a/sphinx/search/de.py b/sphinx/search/de.py index 5ed8062d688..ac5ac7ee131 100644 --- a/sphinx/search/de.py +++ b/sphinx/search/de.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word german_stopwords = parse_stop_word(""" -|source: https://snowball.tartarus.org/algorithms/german/stop.txt +|source: https://snowballstem.org/algorithms/german/stop.txt aber | but alle | all diff --git a/sphinx/search/es.py b/sphinx/search/es.py index f4079adfdfa..3cc41f600ac 100644 --- a/sphinx/search/es.py +++ b/sphinx/search/es.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word spanish_stopwords = parse_stop_word(""" -|source: https://snowball.tartarus.org/algorithms/spanish/stop.txt +|source: https://snowballstem.org/algorithms/spanish/stop.txt de | from, of la | the, her que | who, that diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py index 55a01586924..c8b048d4fc9 100644 --- a/sphinx/search/fi.py +++ b/sphinx/search/fi.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word finnish_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/finnish/stop.txt +| source: https://snowballstem.org/algorithms/finnish/stop.txt | forms of BE olla diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py index d78745c7991..bbdc56032ff 100644 --- a/sphinx/search/fr.py +++ b/sphinx/search/fr.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word french_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/french/stop.txt +| source: https://snowballstem.org/algorithms/french/stop.txt au | a + le aux | a + les avec | with diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py index 7a6464c8e8b..4e30ca407ee 100644 --- a/sphinx/search/hu.py +++ b/sphinx/search/hu.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word hungarian_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/hungarian/stop.txt +| source: https://snowballstem.org/algorithms/hungarian/stop.txt | prepared by Anna Tordai a ahogy diff --git a/sphinx/search/it.py b/sphinx/search/it.py index 1158e388ed6..b42e9699b33 100644 --- a/sphinx/search/it.py +++ b/sphinx/search/it.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word italian_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/italian/stop.txt +| source: https://snowballstem.org/algorithms/italian/stop.txt ad | a (to) before vowel al | a + il allo | a + lo diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py index d46b5ee3383..39c14c76664 100644 --- a/sphinx/search/nl.py +++ b/sphinx/search/nl.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word dutch_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/dutch/stop.txt +| source: https://snowballstem.org/algorithms/dutch/stop.txt de | the en | and van | of, from diff --git a/sphinx/search/no.py b/sphinx/search/no.py index 93118f83307..7a21e6728cb 100644 --- a/sphinx/search/no.py +++ b/sphinx/search/no.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word norwegian_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/norwegian/stop.txt +| source: https://snowballstem.org/algorithms/norwegian/stop.txt og | and i | in jeg | I diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py index ff45b27bd95..82f1858f0de 100644 --- a/sphinx/search/pt.py +++ b/sphinx/search/pt.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word portuguese_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/portuguese/stop.txt +| source: https://snowballstem.org/algorithms/portuguese/stop.txt de | of, from a | the; to, at; her o | the; him diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py index bdeff001797..aeab09fa624 100644 --- a/sphinx/search/ru.py +++ b/sphinx/search/ru.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word russian_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/russian/stop.txt +| source: https://snowballstem.org/algorithms/russian/stop.txt и | and в | in/into во | alternative form diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py index 5a796165805..9a8232ef2bf 100644 --- a/sphinx/search/sv.py +++ b/sphinx/search/sv.py @@ -7,7 +7,7 @@ from sphinx.search import SearchLanguage, parse_stop_word swedish_stopwords = parse_stop_word(""" -| source: https://snowball.tartarus.org/algorithms/swedish/stop.txt +| source: https://snowballstem.org/algorithms/swedish/stop.txt och | and det | it, this/that att | to (with infinitive) From c30effe714ac79f1556b41fea4a1be80269a3141 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 18 May 2025 04:42:23 +0100 Subject: [PATCH 064/435] Pre-parse stopword lists for HTML search (#13572) --- .gitattributes | 1 + sphinx/search/__init__.py | 10 +- sphinx/search/_stopwords/__init__.py | 0 sphinx/search/_stopwords/da.py | 98 ++++++++ sphinx/search/_stopwords/da.txt | 95 ++++++++ sphinx/search/_stopwords/de.py | 235 ++++++++++++++++++ sphinx/search/_stopwords/de.txt | 278 +++++++++++++++++++++ sphinx/search/_stopwords/en.py | 37 +++ sphinx/search/_stopwords/es.py | 312 ++++++++++++++++++++++++ sphinx/search/_stopwords/es.txt | 338 ++++++++++++++++++++++++++ sphinx/search/_stopwords/fi.py | 233 ++++++++++++++++++ sphinx/search/_stopwords/fi.txt | 88 +++++++ sphinx/search/_stopwords/fr.py | 168 +++++++++++++ sphinx/search/_stopwords/fr.txt | 174 ++++++++++++++ sphinx/search/_stopwords/hu.py | 202 ++++++++++++++++ sphinx/search/_stopwords/hu.txt | 201 ++++++++++++++++ sphinx/search/_stopwords/it.py | 282 ++++++++++++++++++++++ sphinx/search/_stopwords/it.txt | 291 ++++++++++++++++++++++ sphinx/search/_stopwords/nl.py | 105 ++++++++ sphinx/search/_stopwords/nl.txt | 102 ++++++++ sphinx/search/_stopwords/no.py | 176 ++++++++++++++ sphinx/search/_stopwords/no.txt | 177 ++++++++++++++ sphinx/search/_stopwords/pt.py | 207 ++++++++++++++++ sphinx/search/_stopwords/pt.txt | 236 ++++++++++++++++++ sphinx/search/_stopwords/ru.py | 163 +++++++++++++ sphinx/search/_stopwords/ru.txt | 226 +++++++++++++++++ sphinx/search/_stopwords/sv.py | 118 +++++++++ sphinx/search/_stopwords/sv.txt | 115 +++++++++ sphinx/search/da.py | 103 +------- sphinx/search/de.py | 286 +--------------------- sphinx/search/en.py | 15 +- sphinx/search/es.py | 346 +-------------------------- sphinx/search/fi.py | 96 +------- sphinx/search/fr.py | 182 +------------- sphinx/search/hu.py | 209 +--------------- sphinx/search/it.py | 299 +---------------------- sphinx/search/nl.py | 110 +-------- sphinx/search/no.py | 185 +------------- sphinx/search/pt.py | 244 +------------------ sphinx/search/ro.py | 2 +- sphinx/search/ru.py | 234 +----------------- sphinx/search/sv.py | 123 +--------- sphinx/search/tr.py | 2 +- sphinx/search/zh.py | 15 +- 44 files changed, 4706 insertions(+), 2413 deletions(-) create mode 100644 sphinx/search/_stopwords/__init__.py create mode 100644 sphinx/search/_stopwords/da.py create mode 100644 sphinx/search/_stopwords/da.txt create mode 100644 sphinx/search/_stopwords/de.py create mode 100644 sphinx/search/_stopwords/de.txt create mode 100644 sphinx/search/_stopwords/en.py create mode 100644 sphinx/search/_stopwords/es.py create mode 100644 sphinx/search/_stopwords/es.txt create mode 100644 sphinx/search/_stopwords/fi.py create mode 100644 sphinx/search/_stopwords/fi.txt create mode 100644 sphinx/search/_stopwords/fr.py create mode 100644 sphinx/search/_stopwords/fr.txt create mode 100644 sphinx/search/_stopwords/hu.py create mode 100644 sphinx/search/_stopwords/hu.txt create mode 100644 sphinx/search/_stopwords/it.py create mode 100644 sphinx/search/_stopwords/it.txt create mode 100644 sphinx/search/_stopwords/nl.py create mode 100644 sphinx/search/_stopwords/nl.txt create mode 100644 sphinx/search/_stopwords/no.py create mode 100644 sphinx/search/_stopwords/no.txt create mode 100644 sphinx/search/_stopwords/pt.py create mode 100644 sphinx/search/_stopwords/pt.txt create mode 100644 sphinx/search/_stopwords/ru.py create mode 100644 sphinx/search/_stopwords/ru.txt create mode 100644 sphinx/search/_stopwords/sv.py create mode 100644 sphinx/search/_stopwords/sv.txt diff --git a/.gitattributes b/.gitattributes index d0f6ad06464..c10128857f4 100644 --- a/.gitattributes +++ b/.gitattributes @@ -62,4 +62,5 @@ tests/roots/test-pycode/cp_1251_coded.py working-tree-encoding=windows-1251 tests/js/fixtures/**/*.js generated sphinx/search/minified-js/*.js generated +sphinx/search/_stopwords/ generated sphinx/themes/bizstyle/static/css3-mediaqueries.js generated diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index 66875adf5ec..1cb05bea0e2 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -20,7 +20,7 @@ from sphinx.util.index_entries import split_index_msg if TYPE_CHECKING: - from collections.abc import Callable, Iterable + from collections.abc import Callable, Iterable, Set from typing import Any, Protocol, TypeVar from docutils.nodes import Node @@ -74,7 +74,7 @@ class SearchLanguage: lang: str = '' language_name: str = '' - stopwords: set[str] = set() + stopwords: Set[str] = frozenset() js_splitter_code: str = '' js_stemmer_rawcode: str = '' js_stemmer_code = """ @@ -128,9 +128,11 @@ def word_filter(self, word: str) -> bool: def parse_stop_word(source: str) -> set[str]: - """Parse snowball style word list like this: + """Collect the stopwords from a snowball style word list: - * https://snowballstem.org/algorithms/finnish/stop.txt + .. code:: text + + list of space separated stop words | optional comment """ result: set[str] = set() for line in source.splitlines(): diff --git a/sphinx/search/_stopwords/__init__.py b/sphinx/search/_stopwords/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sphinx/search/_stopwords/da.py b/sphinx/search/_stopwords/da.py new file mode 100644 index 00000000000..c31a51c6df2 --- /dev/null +++ b/sphinx/search/_stopwords/da.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +DANISH_STOPWORDS = frozenset({ + 'ad', + 'af', + 'alle', + 'alt', + 'anden', + 'at', + 'blev', + 'blive', + 'bliver', + 'da', + 'de', + 'dem', + 'den', + 'denne', + 'der', + 'deres', + 'det', + 'dette', + 'dig', + 'din', + 'disse', + 'dog', + 'du', + 'efter', + 'eller', + 'en', + 'end', + 'er', + 'et', + 'for', + 'fra', + 'ham', + 'han', + 'hans', + 'har', + 'havde', + 'have', + 'hende', + 'hendes', + 'her', + 'hos', + 'hun', + 'hvad', + 'hvis', + 'hvor', + 'i', + 'ikke', + 'ind', + 'jeg', + 'jer', + 'jo', + 'kunne', + 'man', + 'mange', + 'med', + 'meget', + 'men', + 'mig', + 'min', + 'mine', + 'mit', + 'mod', + 'ned', + 'noget', + 'nogle', + 'nu', + 'når', + 'og', + 'også', + 'om', + 'op', + 'os', + 'over', + 'på', + 'selv', + 'sig', + 'sin', + 'sine', + 'sit', + 'skal', + 'skulle', + 'som', + 'sådan', + 'thi', + 'til', + 'ud', + 'under', + 'var', + 'vi', + 'vil', + 'ville', + 'vor', + 'være', + 'været', +}) diff --git a/sphinx/search/_stopwords/da.txt b/sphinx/search/_stopwords/da.txt new file mode 100644 index 00000000000..6f2bd01afc2 --- /dev/null +++ b/sphinx/search/_stopwords/da.txt @@ -0,0 +1,95 @@ +| source: https://snowballstem.org/algorithms/danish/stop.txt +og | and +i | in +jeg | I +det | that (dem. pronoun)/it (pers. pronoun) +at | that (in front of a sentence)/to (with infinitive) +en | a/an +den | it (pers. pronoun)/that (dem. pronoun) +til | to/at/for/until/against/by/of/into, more +er | present tense of "to be" +som | who, as +på | on/upon/in/on/at/to/after/of/with/for, on +de | they +med | with/by/in, along +han | he +af | of/by/from/off/for/in/with/on, off +for | at/for/to/from/by/of/ago, in front/before, because +ikke | not +der | who/which, there/those +var | past tense of "to be" +mig | me/myself +sig | oneself/himself/herself/itself/themselves +men | but +et | a/an/one, one (number), someone/somebody/one +har | present tense of "to have" +om | round/about/for/in/a, about/around/down, if +vi | we +min | my +havde | past tense of "to have" +ham | him +hun | she +nu | now +over | over/above/across/by/beyond/past/on/about, over/past +da | then, when/as/since +fra | from/off/since, off, since +du | you +ud | out +sin | his/her/its/one's +dem | them +os | us/ourselves +op | up +man | you/one +hans | his +hvor | where +eller | or +hvad | what +skal | must/shall etc. +selv | myself/yourself/herself/ourselves etc., even +her | here +alle | all/everyone/everybody etc. +vil | will (verb) +blev | past tense of "to stay/to remain/to get/to become" +kunne | could +ind | in +når | when +være | present tense of "to be" +dog | however/yet/after all +noget | something +ville | would +jo | you know/you see (adv), yes +deres | their/theirs +efter | after/behind/according to/for/by/from, later/afterwards +ned | down +skulle | should +denne | this +end | than +dette | this +mit | my/mine +også | also +under | under/beneath/below/during, below/underneath +have | have +dig | you +anden | other +hende | her +mine | my +alt | everything +meget | much/very, plenty of +sit | his, her, its, one's +sine | his, her, its, one's +vor | our +mod | against +disse | these +hvis | if +din | your/yours +nogle | some +hos | by/at +blive | be/become +mange | many +ad | by/through +bliver | present tense of "to be/to become" +hendes | her/hers +været | be +thi | for (conj) +jer | you +sådan | such, like this/like that diff --git a/sphinx/search/_stopwords/de.py b/sphinx/search/_stopwords/de.py new file mode 100644 index 00000000000..26ee3322ff3 --- /dev/null +++ b/sphinx/search/_stopwords/de.py @@ -0,0 +1,235 @@ +from __future__ import annotations + +GERMAN_STOPWORDS = frozenset({ + 'aber', + 'alle', + 'allem', + 'allen', + 'aller', + 'alles', + 'als', + 'also', + 'am', + 'an', + 'ander', + 'andere', + 'anderem', + 'anderen', + 'anderer', + 'anderes', + 'anderm', + 'andern', + 'anderr', + 'anders', + 'auch', + 'auf', + 'aus', + 'bei', + 'bin', + 'bis', + 'bist', + 'da', + 'damit', + 'dann', + 'das', + 'daß', + 'dasselbe', + 'dazu', + 'dein', + 'deine', + 'deinem', + 'deinen', + 'deiner', + 'deines', + 'dem', + 'demselben', + 'den', + 'denn', + 'denselben', + 'der', + 'derer', + 'derselbe', + 'derselben', + 'des', + 'desselben', + 'dessen', + 'dich', + 'die', + 'dies', + 'diese', + 'dieselbe', + 'dieselben', + 'diesem', + 'diesen', + 'dieser', + 'dieses', + 'dir', + 'doch', + 'dort', + 'du', + 'durch', + 'ein', + 'eine', + 'einem', + 'einen', + 'einer', + 'eines', + 'einig', + 'einige', + 'einigem', + 'einigen', + 'einiger', + 'einiges', + 'einmal', + 'er', + 'es', + 'etwas', + 'euch', + 'euer', + 'eure', + 'eurem', + 'euren', + 'eurer', + 'eures', + 'für', + 'gegen', + 'gewesen', + 'hab', + 'habe', + 'haben', + 'hat', + 'hatte', + 'hatten', + 'hier', + 'hin', + 'hinter', + 'ich', + 'ihm', + 'ihn', + 'ihnen', + 'ihr', + 'ihre', + 'ihrem', + 'ihren', + 'ihrer', + 'ihres', + 'im', + 'in', + 'indem', + 'ins', + 'ist', + 'jede', + 'jedem', + 'jeden', + 'jeder', + 'jedes', + 'jene', + 'jenem', + 'jenen', + 'jener', + 'jenes', + 'jetzt', + 'kann', + 'kein', + 'keine', + 'keinem', + 'keinen', + 'keiner', + 'keines', + 'können', + 'könnte', + 'machen', + 'man', + 'manche', + 'manchem', + 'manchen', + 'mancher', + 'manches', + 'mein', + 'meine', + 'meinem', + 'meinen', + 'meiner', + 'meines', + 'mich', + 'mir', + 'mit', + 'muss', + 'musste', + 'nach', + 'nicht', + 'nichts', + 'noch', + 'nun', + 'nur', + 'ob', + 'oder', + 'ohne', + 'sehr', + 'sein', + 'seine', + 'seinem', + 'seinen', + 'seiner', + 'seines', + 'selbst', + 'sich', + 'sie', + 'sind', + 'so', + 'solche', + 'solchem', + 'solchen', + 'solcher', + 'solches', + 'soll', + 'sollte', + 'sondern', + 'sonst', + 'um', + 'und', + 'uns', + 'unse', + 'unsem', + 'unsen', + 'unser', + 'unses', + 'unter', + 'viel', + 'vom', + 'von', + 'vor', + 'war', + 'waren', + 'warst', + 'was', + 'weg', + 'weil', + 'weiter', + 'welche', + 'welchem', + 'welchen', + 'welcher', + 'welches', + 'wenn', + 'werde', + 'werden', + 'wie', + 'wieder', + 'will', + 'wir', + 'wird', + 'wirst', + 'wo', + 'wollen', + 'wollte', + 'während', + 'würde', + 'würden', + 'zu', + 'zum', + 'zur', + 'zwar', + 'zwischen', + 'über', +}) diff --git a/sphinx/search/_stopwords/de.txt b/sphinx/search/_stopwords/de.txt new file mode 100644 index 00000000000..94c4777bd05 --- /dev/null +++ b/sphinx/search/_stopwords/de.txt @@ -0,0 +1,278 @@ +|source: https://snowballstem.org/algorithms/german/stop.txt +aber | but + +alle | all +allem +allen +aller +alles + +als | than, as +also | so +am | an + dem +an | at + +ander | other +andere +anderem +anderen +anderer +anderes +anderm +andern +anderr +anders + +auch | also +auf | on +aus | out of +bei | by +bin | am +bis | until +bist | art +da | there +damit | with it +dann | then + +der | the +den +des +dem +die +das + +daß | that + +derselbe | the same +derselben +denselben +desselben +demselben +dieselbe +dieselben +dasselbe + +dazu | to that + +dein | thy +deine +deinem +deinen +deiner +deines + +denn | because + +derer | of those +dessen | of him + +dich | thee +dir | to thee +du | thou + +dies | this +diese +diesem +diesen +dieser +dieses + + +doch | (several meanings) +dort | (over) there + + +durch | through + +ein | a +eine +einem +einen +einer +eines + +einig | some +einige +einigem +einigen +einiger +einiges + +einmal | once + +er | he +ihn | him +ihm | to him + +es | it +etwas | something + +euer | your +eure +eurem +euren +eurer +eures + +für | for +gegen | towards +gewesen | p.p. of sein +hab | have +habe | have +haben | have +hat | has +hatte | had +hatten | had +hier | here +hin | there +hinter | behind + +ich | I +mich | me +mir | to me + + +ihr | you, to her +ihre +ihrem +ihren +ihrer +ihres +euch | to you + +im | in + dem +in | in +indem | while +ins | in + das +ist | is + +jede | each, every +jedem +jeden +jeder +jedes + +jene | that +jenem +jenen +jener +jenes + +jetzt | now +kann | can + +kein | no +keine +keinem +keinen +keiner +keines + +können | can +könnte | could +machen | do +man | one + +manche | some, many a +manchem +manchen +mancher +manches + +mein | my +meine +meinem +meinen +meiner +meines + +mit | with +muss | must +musste | had to +nach | to(wards) +nicht | not +nichts | nothing +noch | still, yet +nun | now +nur | only +ob | whether +oder | or +ohne | without +sehr | very + +sein | his +seine +seinem +seinen +seiner +seines + +selbst | self +sich | herself + +sie | they, she +ihnen | to them + +sind | are +so | so + +solche | such +solchem +solchen +solcher +solches + +soll | shall +sollte | should +sondern | but +sonst | else +über | over +um | about, around +und | and + +uns | us +unse +unsem +unsen +unser +unses + +unter | under +viel | much +vom | von + dem +von | from +vor | before +während | while +war | was +waren | were +warst | wast +was | what +weg | away, off +weil | because +weiter | further + +welche | which +welchem +welchen +welcher +welches + +wenn | when +werde | will +werden | will +wie | how +wieder | again +will | want +wir | we +wird | will +wirst | willst +wo | where +wollen | want +wollte | wanted +würde | would +würden | would +zu | to +zum | zu + dem +zur | zu + der +zwar | indeed +zwischen | between diff --git a/sphinx/search/_stopwords/en.py b/sphinx/search/_stopwords/en.py new file mode 100644 index 00000000000..01bac4cf14e --- /dev/null +++ b/sphinx/search/_stopwords/en.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +ENGLISH_STOPWORDS = frozenset({ + 'a', + 'and', + 'are', + 'as', + 'at', + 'be', + 'but', + 'by', + 'for', + 'if', + 'in', + 'into', + 'is', + 'it', + 'near', + 'no', + 'not', + 'of', + 'on', + 'or', + 'such', + 'that', + 'the', + 'their', + 'then', + 'there', + 'these', + 'they', + 'this', + 'to', + 'was', + 'will', + 'with', +}) diff --git a/sphinx/search/_stopwords/es.py b/sphinx/search/_stopwords/es.py new file mode 100644 index 00000000000..d70b317d032 --- /dev/null +++ b/sphinx/search/_stopwords/es.py @@ -0,0 +1,312 @@ +from __future__ import annotations + +SPANISH_STOPWORDS = frozenset({ + 'a', + 'al', + 'algo', + 'algunas', + 'algunos', + 'ante', + 'antes', + 'como', + 'con', + 'contra', + 'cual', + 'cuando', + 'de', + 'del', + 'desde', + 'donde', + 'durante', + 'e', + 'el', + 'ella', + 'ellas', + 'ellos', + 'en', + 'entre', + 'era', + 'erais', + 'eran', + 'eras', + 'eres', + 'es', + 'esa', + 'esas', + 'ese', + 'eso', + 'esos', + 'esta', + 'estaba', + 'estabais', + 'estaban', + 'estabas', + 'estad', + 'estada', + 'estadas', + 'estado', + 'estados', + 'estamos', + 'estando', + 'estar', + 'estaremos', + 'estará', + 'estarán', + 'estarás', + 'estaré', + 'estaréis', + 'estaría', + 'estaríais', + 'estaríamos', + 'estarían', + 'estarías', + 'estas', + 'este', + 'estemos', + 'esto', + 'estos', + 'estoy', + 'estuve', + 'estuviera', + 'estuvierais', + 'estuvieran', + 'estuvieras', + 'estuvieron', + 'estuviese', + 'estuvieseis', + 'estuviesen', + 'estuvieses', + 'estuvimos', + 'estuviste', + 'estuvisteis', + 'estuviéramos', + 'estuviésemos', + 'estuvo', + 'está', + 'estábamos', + 'estáis', + 'están', + 'estás', + 'esté', + 'estéis', + 'estén', + 'estés', + 'fue', + 'fuera', + 'fuerais', + 'fueran', + 'fueras', + 'fueron', + 'fuese', + 'fueseis', + 'fuesen', + 'fueses', + 'fui', + 'fuimos', + 'fuiste', + 'fuisteis', + 'fuéramos', + 'fuésemos', + 'ha', + 'habida', + 'habidas', + 'habido', + 'habidos', + 'habiendo', + 'habremos', + 'habrá', + 'habrán', + 'habrás', + 'habré', + 'habréis', + 'habría', + 'habríais', + 'habríamos', + 'habrían', + 'habrías', + 'habéis', + 'había', + 'habíais', + 'habíamos', + 'habían', + 'habías', + 'han', + 'has', + 'hasta', + 'hay', + 'haya', + 'hayamos', + 'hayan', + 'hayas', + 'hayáis', + 'he', + 'hemos', + 'hube', + 'hubiera', + 'hubierais', + 'hubieran', + 'hubieras', + 'hubieron', + 'hubiese', + 'hubieseis', + 'hubiesen', + 'hubieses', + 'hubimos', + 'hubiste', + 'hubisteis', + 'hubiéramos', + 'hubiésemos', + 'hubo', + 'la', + 'las', + 'le', + 'les', + 'lo', + 'los', + 'me', + 'mi', + 'mis', + 'mucho', + 'muchos', + 'muy', + 'más', + 'mí', + 'mía', + 'mías', + 'mío', + 'míos', + 'nada', + 'ni', + 'no', + 'nos', + 'nosotras', + 'nosotros', + 'nuestra', + 'nuestras', + 'nuestro', + 'nuestros', + 'o', + 'os', + 'otra', + 'otras', + 'otro', + 'otros', + 'para', + 'pero', + 'poco', + 'por', + 'porque', + 'que', + 'quien', + 'quienes', + 'qué', + 'se', + 'sea', + 'seamos', + 'sean', + 'seas', + 'seremos', + 'será', + 'serán', + 'serás', + 'seré', + 'seréis', + 'sería', + 'seríais', + 'seríamos', + 'serían', + 'serías', + 'seáis', + 'sido', + 'siendo', + 'sin', + 'sobre', + 'sois', + 'somos', + 'son', + 'soy', + 'su', + 'sus', + 'suya', + 'suyas', + 'suyo', + 'suyos', + 'sí', + 'también', + 'tanto', + 'te', + 'tendremos', + 'tendrá', + 'tendrán', + 'tendrás', + 'tendré', + 'tendréis', + 'tendría', + 'tendríais', + 'tendríamos', + 'tendrían', + 'tendrías', + 'tened', + 'tenemos', + 'tenga', + 'tengamos', + 'tengan', + 'tengas', + 'tengo', + 'tengáis', + 'tenida', + 'tenidas', + 'tenido', + 'tenidos', + 'teniendo', + 'tenéis', + 'tenía', + 'teníais', + 'teníamos', + 'tenían', + 'tenías', + 'ti', + 'tiene', + 'tienen', + 'tienes', + 'todo', + 'todos', + 'tu', + 'tus', + 'tuve', + 'tuviera', + 'tuvierais', + 'tuvieran', + 'tuvieras', + 'tuvieron', + 'tuviese', + 'tuvieseis', + 'tuviesen', + 'tuvieses', + 'tuvimos', + 'tuviste', + 'tuvisteis', + 'tuviéramos', + 'tuviésemos', + 'tuvo', + 'tuya', + 'tuyas', + 'tuyo', + 'tuyos', + 'tú', + 'un', + 'una', + 'uno', + 'unos', + 'vosotras', + 'vosotros', + 'vuestra', + 'vuestras', + 'vuestro', + 'vuestros', + 'y', + 'ya', + 'yo', + 'él', + 'éramos', +}) diff --git a/sphinx/search/_stopwords/es.txt b/sphinx/search/_stopwords/es.txt new file mode 100644 index 00000000000..d7047b93164 --- /dev/null +++ b/sphinx/search/_stopwords/es.txt @@ -0,0 +1,338 @@ +|source: https://snowballstem.org/algorithms/spanish/stop.txt +de | from, of +la | the, her +que | who, that +el | the +en | in +y | and +a | to +los | the, them +del | de + el +se | himself, from him etc +las | the, them +por | for, by, etc +un | a +para | for +con | with +no | no +una | a +su | his, her +al | a + el + | es from SER +lo | him +como | how +más | more +pero | pero +sus | su plural +le | to him, her +ya | already +o | or + | fue from SER +este | this + | ha from HABER +sí | himself etc +porque | because +esta | this + | son from SER +entre | between + | está from ESTAR +cuando | when +muy | very +sin | without +sobre | on + | ser from SER + | tiene from TENER +también | also +me | me +hasta | until +hay | there is/are +donde | where + | han from HABER +quien | whom, that + | están from ESTAR + | estado from ESTAR +desde | from +todo | all +nos | us +durante | during + | estados from ESTAR +todos | all +uno | a +les | to them +ni | nor +contra | against +otros | other + | fueron from SER +ese | that +eso | that + | había from HABER +ante | before +ellos | they +e | and (variant of y) +esto | this +mí | me +antes | before +algunos | some +qué | what? +unos | a +yo | I +otro | other +otras | other +otra | other +él | he +tanto | so much, many +esa | that +estos | these +mucho | much, many +quienes | who +nada | nothing +muchos | many +cual | who + | sea from SER +poco | few +ella | she +estar | to be + | haber from HABER +estas | these + | estaba from ESTAR + | estamos from ESTAR +algunas | some +algo | something +nosotros | we + + | other forms + +mi | me +mis | mi plural +tú | thou +te | thee +ti | thee +tu | thy +tus | tu plural +ellas | they +nosotras | we +vosotros | you +vosotras | you +os | you +mío | mine +mía | +míos | +mías | +tuyo | thine +tuya | +tuyos | +tuyas | +suyo | his, hers, theirs +suya | +suyos | +suyas | +nuestro | ours +nuestra | +nuestros | +nuestras | +vuestro | yours +vuestra | +vuestros | +vuestras | +esos | those +esas | those + + | forms of estar, to be (not including the infinitive): +estoy +estás +está +estamos +estáis +están +esté +estés +estemos +estéis +estén +estaré +estarás +estará +estaremos +estaréis +estarán +estaría +estarías +estaríamos +estaríais +estarían +estaba +estabas +estábamos +estabais +estaban +estuve +estuviste +estuvo +estuvimos +estuvisteis +estuvieron +estuviera +estuvieras +estuviéramos +estuvierais +estuvieran +estuviese +estuvieses +estuviésemos +estuvieseis +estuviesen +estando +estado +estada +estados +estadas +estad + + | forms of haber, to have (not including the infinitive): +he +has +ha +hemos +habéis +han +haya +hayas +hayamos +hayáis +hayan +habré +habrás +habrá +habremos +habréis +habrán +habría +habrías +habríamos +habríais +habrían +había +habías +habíamos +habíais +habían +hube +hubiste +hubo +hubimos +hubisteis +hubieron +hubiera +hubieras +hubiéramos +hubierais +hubieran +hubiese +hubieses +hubiésemos +hubieseis +hubiesen +habiendo +habido +habida +habidos +habidas + + | forms of ser, to be (not including the infinitive): +soy +eres +es +somos +sois +son +sea +seas +seamos +seáis +sean +seré +serás +será +seremos +seréis +serán +sería +serías +seríamos +seríais +serían +era +eras +éramos +erais +eran +fui +fuiste +fue +fuimos +fuisteis +fueron +fuera +fueras +fuéramos +fuerais +fueran +fuese +fueses +fuésemos +fueseis +fuesen +siendo +sido + | sed also means 'thirst' + + | forms of tener, to have (not including the infinitive): +tengo +tienes +tiene +tenemos +tenéis +tienen +tenga +tengas +tengamos +tengáis +tengan +tendré +tendrás +tendrá +tendremos +tendréis +tendrán +tendría +tendrías +tendríamos +tendríais +tendrían +tenía +tenías +teníamos +teníais +tenían +tuve +tuviste +tuvo +tuvimos +tuvisteis +tuvieron +tuviera +tuvieras +tuviéramos +tuvierais +tuvieran +tuviese +tuvieses +tuviésemos +tuvieseis +tuviesen +teniendo +tenido +tenida +tenidos +tenidas +tened diff --git a/sphinx/search/_stopwords/fi.py b/sphinx/search/_stopwords/fi.py new file mode 100644 index 00000000000..d7586cba227 --- /dev/null +++ b/sphinx/search/_stopwords/fi.py @@ -0,0 +1,233 @@ +from __future__ import annotations + +FINNISH_STOPWORDS = frozenset({ + 'ei', + 'eivät', + 'emme', + 'en', + 'et', + 'ette', + 'että', + 'he', + 'heidän', + 'heidät', + 'heihin', + 'heille', + 'heillä', + 'heiltä', + 'heissä', + 'heistä', + 'heitä', + 'hän', + 'häneen', + 'hänelle', + 'hänellä', + 'häneltä', + 'hänen', + 'hänessä', + 'hänestä', + 'hänet', + 'häntä', + 'itse', + 'ja', + 'johon', + 'joiden', + 'joihin', + 'joiksi', + 'joilla', + 'joille', + 'joilta', + 'joina', + 'joissa', + 'joista', + 'joita', + 'joka', + 'joksi', + 'jolla', + 'jolle', + 'jolta', + 'jona', + 'jonka', + 'jos', + 'jossa', + 'josta', + 'jota', + 'jotka', + 'kanssa', + 'keiden', + 'keihin', + 'keiksi', + 'keille', + 'keillä', + 'keiltä', + 'keinä', + 'keissä', + 'keistä', + 'keitä', + 'keneen', + 'keneksi', + 'kenelle', + 'kenellä', + 'keneltä', + 'kenen', + 'kenenä', + 'kenessä', + 'kenestä', + 'kenet', + 'ketkä', + 'ketä', + 'koska', + 'kuin', + 'kuka', + 'kun', + 'me', + 'meidän', + 'meidät', + 'meihin', + 'meille', + 'meillä', + 'meiltä', + 'meissä', + 'meistä', + 'meitä', + 'mihin', + 'miksi', + 'mikä', + 'mille', + 'millä', + 'miltä', + 'minkä', + 'minua', + 'minulla', + 'minulle', + 'minulta', + 'minun', + 'minussa', + 'minusta', + 'minut', + 'minuun', + 'minä', + 'missä', + 'mistä', + 'mitkä', + 'mitä', + 'mukaan', + 'mutta', + 'ne', + 'niiden', + 'niihin', + 'niiksi', + 'niille', + 'niillä', + 'niiltä', + 'niin', + 'niinä', + 'niissä', + 'niistä', + 'niitä', + 'noiden', + 'noihin', + 'noiksi', + 'noilla', + 'noille', + 'noilta', + 'noin', + 'noina', + 'noissa', + 'noista', + 'noita', + 'nuo', + 'nyt', + 'näiden', + 'näihin', + 'näiksi', + 'näille', + 'näillä', + 'näiltä', + 'näinä', + 'näissä', + 'näistä', + 'näitä', + 'nämä', + 'ole', + 'olemme', + 'olen', + 'olet', + 'olette', + 'oli', + 'olimme', + 'olin', + 'olisi', + 'olisimme', + 'olisin', + 'olisit', + 'olisitte', + 'olisivat', + 'olit', + 'olitte', + 'olivat', + 'olla', + 'olleet', + 'ollut', + 'on', + 'ovat', + 'poikki', + 'se', + 'sekä', + 'sen', + 'siihen', + 'siinä', + 'siitä', + 'siksi', + 'sille', + 'sillä', + 'siltä', + 'sinua', + 'sinulla', + 'sinulle', + 'sinulta', + 'sinun', + 'sinussa', + 'sinusta', + 'sinut', + 'sinuun', + 'sinä', + 'sitä', + 'tai', + 'te', + 'teidän', + 'teidät', + 'teihin', + 'teille', + 'teillä', + 'teiltä', + 'teissä', + 'teistä', + 'teitä', + 'tuo', + 'tuohon', + 'tuoksi', + 'tuolla', + 'tuolle', + 'tuolta', + 'tuon', + 'tuona', + 'tuossa', + 'tuosta', + 'tuota', + 'tähän', + 'täksi', + 'tälle', + 'tällä', + 'tältä', + 'tämä', + 'tämän', + 'tänä', + 'tässä', + 'tästä', + 'tätä', + 'vaan', + 'vai', + 'vaikka', + 'yli', +}) diff --git a/sphinx/search/_stopwords/fi.txt b/sphinx/search/_stopwords/fi.txt new file mode 100644 index 00000000000..9aff8a79929 --- /dev/null +++ b/sphinx/search/_stopwords/fi.txt @@ -0,0 +1,88 @@ +| source: https://snowballstem.org/algorithms/finnish/stop.txt +| forms of BE + +olla +olen +olet +on +olemme +olette +ovat +ole | negative form + +oli +olisi +olisit +olisin +olisimme +olisitte +olisivat +olit +olin +olimme +olitte +olivat +ollut +olleet + +en | negation +et +ei +emme +ette +eivät + +|Nom Gen Acc Part Iness Elat Illat Adess Ablat Allat Ess Trans +minä minun minut minua minussa minusta minuun minulla minulta minulle | I +sinä sinun sinut sinua sinussa sinusta sinuun sinulla sinulta sinulle | you +hän hänen hänet häntä hänessä hänestä häneen hänellä häneltä hänelle | he she +me meidän meidät meitä meissä meistä meihin meillä meiltä meille | we +te teidän teidät teitä teissä teistä teihin teillä teiltä teille | you +he heidän heidät heitä heissä heistä heihin heillä heiltä heille | they + +tämä tämän tätä tässä tästä tähän tällä tältä tälle tänä täksi | this +tuo tuon tuota tuossa tuosta tuohon tuolla tuolta tuolle tuona tuoksi | that +se sen sitä siinä siitä siihen sillä siltä sille sinä siksi | it +nämä näiden näitä näissä näistä näihin näillä näiltä näille näinä näiksi | these +nuo noiden noita noissa noista noihin noilla noilta noille noina noiksi | those +ne niiden niitä niissä niistä niihin niillä niiltä niille niinä niiksi | they + +kuka kenen kenet ketä kenessä kenestä keneen kenellä keneltä kenelle kenenä keneksi| who +ketkä keiden ketkä keitä keissä keistä keihin keillä keiltä keille keinä keiksi | (pl) +mikä minkä minkä mitä missä mistä mihin millä miltä mille minä miksi | which what +mitkä | (pl) + +joka jonka jota jossa josta johon jolla jolta jolle jona joksi | who which +jotka joiden joita joissa joista joihin joilla joilta joille joina joiksi | (pl) + +| conjunctions + +että | that +ja | and +jos | if +koska | because +kuin | than +mutta | but +niin | so +sekä | and +sillä | for +tai | or +vaan | but +vai | or +vaikka | although + + +| prepositions + +kanssa | with +mukaan | according to +noin | about +poikki | across +yli | over, across + +| other + +kun | when +niin | so +nyt | now +itse | self diff --git a/sphinx/search/_stopwords/fr.py b/sphinx/search/_stopwords/fr.py new file mode 100644 index 00000000000..7dfd86d7445 --- /dev/null +++ b/sphinx/search/_stopwords/fr.py @@ -0,0 +1,168 @@ +from __future__ import annotations + +FRENCH_STOPWORDS = frozenset({ + 'ai', + 'aie', + 'aient', + 'aies', + 'ait', + 'as', + 'au', + 'aura', + 'aurai', + 'auraient', + 'aurais', + 'aurait', + 'auras', + 'aurez', + 'auriez', + 'aurions', + 'aurons', + 'auront', + 'aux', + 'avaient', + 'avais', + 'avait', + 'avec', + 'avez', + 'aviez', + 'avions', + 'avons', + 'ayant', + 'ayez', + 'ayons', + 'c', + 'ce', + 'ceci', + 'cela', + 'celà', + 'ces', + 'cet', + 'cette', + 'd', + 'dans', + 'de', + 'des', + 'du', + 'elle', + 'en', + 'es', + 'est', + 'et', + 'eu', + 'eue', + 'eues', + 'eurent', + 'eus', + 'eusse', + 'eussent', + 'eusses', + 'eussiez', + 'eussions', + 'eut', + 'eux', + 'eûmes', + 'eût', + 'eûtes', + 'furent', + 'fus', + 'fusse', + 'fussent', + 'fusses', + 'fussiez', + 'fussions', + 'fut', + 'fûmes', + 'fût', + 'fûtes', + 'ici', + 'il', + 'ils', + 'j', + 'je', + 'l', + 'la', + 'le', + 'les', + 'leur', + 'leurs', + 'lui', + 'm', + 'ma', + 'mais', + 'me', + 'mes', + 'moi', + 'mon', + 'même', + 'n', + 'ne', + 'nos', + 'notre', + 'nous', + 'on', + 'ont', + 'ou', + 'par', + 'pas', + 'pour', + 'qu', + 'que', + 'quel', + 'quelle', + 'quelles', + 'quels', + 'qui', + 's', + 'sa', + 'sans', + 'se', + 'sera', + 'serai', + 'seraient', + 'serais', + 'serait', + 'seras', + 'serez', + 'seriez', + 'serions', + 'serons', + 'seront', + 'ses', + 'soi', + 'soient', + 'sois', + 'soit', + 'sommes', + 'son', + 'sont', + 'soyez', + 'soyons', + 'suis', + 'sur', + 't', + 'ta', + 'te', + 'tes', + 'toi', + 'ton', + 'tu', + 'un', + 'une', + 'vos', + 'votre', + 'vous', + 'y', + 'à', + 'étaient', + 'étais', + 'était', + 'étant', + 'étiez', + 'étions', + 'été', + 'étée', + 'étées', + 'étés', + 'êtes', +}) diff --git a/sphinx/search/_stopwords/fr.txt b/sphinx/search/_stopwords/fr.txt new file mode 100644 index 00000000000..7839ab57c86 --- /dev/null +++ b/sphinx/search/_stopwords/fr.txt @@ -0,0 +1,174 @@ +| source: https://snowballstem.org/algorithms/french/stop.txt +au | a + le +aux | a + les +avec | with +ce | this +ces | these +dans | with +de | of +des | de + les +du | de + le +elle | she +en | `of them' etc +et | and +eux | them +il | he +je | I +la | the +le | the +leur | their +lui | him +ma | my (fem) +mais | but +me | me +même | same; as in moi-même (myself) etc +mes | me (pl) +moi | me +mon | my (masc) +ne | not +nos | our (pl) +notre | our +nous | we +on | one +ou | where +par | by +pas | not +pour | for +qu | que before vowel +que | that +qui | who +sa | his, her (fem) +se | oneself +ses | his (pl) +son | his, her (masc) +sur | on +ta | thy (fem) +te | thee +tes | thy (pl) +toi | thee +ton | thy (masc) +tu | thou +un | a +une | a +vos | your (pl) +votre | your +vous | you + + | single letter forms + +c | c' +d | d' +j | j' +l | l' +à | to, at +m | m' +n | n' +s | s' +t | t' +y | there + + | forms of être (not including the infinitive): +été +étée +étées +étés +étant +suis +es +est +sommes +êtes +sont +serai +seras +sera +serons +serez +seront +serais +serait +serions +seriez +seraient +étais +était +étions +étiez +étaient +fus +fut +fûmes +fûtes +furent +sois +soit +soyons +soyez +soient +fusse +fusses +fût +fussions +fussiez +fussent + + | forms of avoir (not including the infinitive): +ayant +eu +eue +eues +eus +ai +as +avons +avez +ont +aurai +auras +aura +aurons +aurez +auront +aurais +aurait +aurions +auriez +auraient +avais +avait +avions +aviez +avaient +eut +eûmes +eûtes +eurent +aie +aies +ait +ayons +ayez +aient +eusse +eusses +eût +eussions +eussiez +eussent + + | Later additions (from Jean-Christophe Deschamps) +ceci | this +cela | that (added 11 Apr 2012. Omission reported by Adrien Grand) +celà | that (incorrect, though common) +cet | this +cette | this +ici | here +ils | they +les | the (pl) +leurs | their (pl) +quel | which +quels | which +quelle | which +quelles | which +sans | without +soi | oneself diff --git a/sphinx/search/_stopwords/hu.py b/sphinx/search/_stopwords/hu.py new file mode 100644 index 00000000000..83bee011b0f --- /dev/null +++ b/sphinx/search/_stopwords/hu.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +HUNGARIAN_STOPWORDS = frozenset({ + 'a', + 'abban', + 'ahhoz', + 'ahogy', + 'ahol', + 'aki', + 'akik', + 'akkor', + 'alatt', + 'amely', + 'amelyek', + 'amelyekben', + 'amelyeket', + 'amelyet', + 'amelynek', + 'ami', + 'amikor', + 'amit', + 'amolyan', + 'amíg', + 'annak', + 'arra', + 'arról', + 'az', + 'azok', + 'azon', + 'azonban', + 'azt', + 'aztán', + 'azután', + 'azzal', + 'azért', + 'be', + 'belül', + 'benne', + 'bár', + 'cikk', + 'cikkek', + 'cikkeket', + 'csak', + 'de', + 'e', + 'ebben', + 'eddig', + 'egy', + 'egyes', + 'egyetlen', + 'egyik', + 'egyre', + 'egyéb', + 'egész', + 'ehhez', + 'ekkor', + 'el', + 'ellen', + 'első', + 'elég', + 'elő', + 'először', + 'előtt', + 'emilyen', + 'ennek', + 'erre', + 'ez', + 'ezek', + 'ezen', + 'ezt', + 'ezzel', + 'ezért', + 'fel', + 'felé', + 'hanem', + 'hiszen', + 'hogy', + 'hogyan', + 'igen', + 'ill', + 'ill.', + 'illetve', + 'ilyen', + 'ilyenkor', + 'ismét', + 'ison', + 'itt', + 'jobban', + 'jó', + 'jól', + 'kell', + 'kellett', + 'keressünk', + 'keresztül', + 'ki', + 'kívül', + 'között', + 'közül', + 'legalább', + 'legyen', + 'lehet', + 'lehetett', + 'lenne', + 'lenni', + 'lesz', + 'lett', + 'maga', + 'magát', + 'majd', + 'meg', + 'mellett', + 'mely', + 'melyek', + 'mert', + 'mi', + 'mikor', + 'milyen', + 'minden', + 'mindenki', + 'mindent', + 'mindig', + 'mint', + 'mintha', + 'mit', + 'mivel', + 'miért', + 'most', + 'már', + 'más', + 'másik', + 'még', + 'míg', + 'nagy', + 'nagyobb', + 'nagyon', + 'ne', + 'nekem', + 'neki', + 'nem', + 'nincs', + 'néha', + 'néhány', + 'nélkül', + 'olyan', + 'ott', + 'pedig', + 'persze', + 'rá', + 's', + 'saját', + 'sem', + 'semmi', + 'sok', + 'sokat', + 'sokkal', + 'szemben', + 'szerint', + 'szinte', + 'számára', + 'talán', + 'tehát', + 'teljes', + 'tovább', + 'továbbá', + 'több', + 'ugyanis', + 'utolsó', + 'után', + 'utána', + 'vagy', + 'vagyis', + 'vagyok', + 'valaki', + 'valami', + 'valamint', + 'való', + 'van', + 'vannak', + 'vele', + 'vissza', + 'viszont', + 'volna', + 'volt', + 'voltak', + 'voltam', + 'voltunk', + 'által', + 'általában', + 'át', + 'én', + 'éppen', + 'és', + 'így', + 'össze', + 'úgy', + 'új', + 'újabb', + 'újra', + 'ő', + 'ők', + 'őket', +}) diff --git a/sphinx/search/_stopwords/hu.txt b/sphinx/search/_stopwords/hu.txt new file mode 100644 index 00000000000..658c6194f27 --- /dev/null +++ b/sphinx/search/_stopwords/hu.txt @@ -0,0 +1,201 @@ +| source: https://snowballstem.org/algorithms/hungarian/stop.txt +| prepared by Anna Tordai +a +ahogy +ahol +aki +akik +akkor +alatt +által +általában +amely +amelyek +amelyekben +amelyeket +amelyet +amelynek +ami +amit +amolyan +amíg +amikor +át +abban +ahhoz +annak +arra +arról +az +azok +azon +azt +azzal +azért +aztán +azután +azonban +bár +be +belül +benne +cikk +cikkek +cikkeket +csak +de +e +eddig +egész +egy +egyes +egyetlen +egyéb +egyik +egyre +ekkor +el +elég +ellen +elő +először +előtt +első +én +éppen +ebben +ehhez +emilyen +ennek +erre +ez +ezt +ezek +ezen +ezzel +ezért +és +fel +felé +hanem +hiszen +hogy +hogyan +igen +így +illetve +ill. +ill +ilyen +ilyenkor +ison +ismét +itt +jó +jól +jobban +kell +kellett +keresztül +keressünk +ki +kívül +között +közül +legalább +lehet +lehetett +legyen +lenne +lenni +lesz +lett +maga +magát +majd +majd +már +más +másik +meg +még +mellett +mert +mely +melyek +mi +mit +míg +miért +milyen +mikor +minden +mindent +mindenki +mindig +mint +mintha +mivel +most +nagy +nagyobb +nagyon +ne +néha +nekem +neki +nem +néhány +nélkül +nincs +olyan +ott +össze +ő +ők +őket +pedig +persze +rá +s +saját +sem +semmi +sok +sokat +sokkal +számára +szemben +szerint +szinte +talán +tehát +teljes +tovább +továbbá +több +úgy +ugyanis +új +újabb +újra +után +utána +utolsó +vagy +vagyis +valaki +valami +valamint +való +vagyok +van +vannak +volt +voltam +voltak +voltunk +vissza +vele +viszont +volna diff --git a/sphinx/search/_stopwords/it.py b/sphinx/search/_stopwords/it.py new file mode 100644 index 00000000000..4b0f522ac94 --- /dev/null +++ b/sphinx/search/_stopwords/it.py @@ -0,0 +1,282 @@ +from __future__ import annotations + +ITALIAN_STOPWORDS = frozenset({ + 'a', + 'abbia', + 'abbiamo', + 'abbiano', + 'abbiate', + 'ad', + 'agl', + 'agli', + 'ai', + 'al', + 'all', + 'alla', + 'alle', + 'allo', + 'anche', + 'avemmo', + 'avendo', + 'avesse', + 'avessero', + 'avessi', + 'avessimo', + 'aveste', + 'avesti', + 'avete', + 'aveva', + 'avevamo', + 'avevano', + 'avevate', + 'avevi', + 'avevo', + 'avrai', + 'avranno', + 'avrebbe', + 'avrebbero', + 'avrei', + 'avremmo', + 'avremo', + 'avreste', + 'avresti', + 'avrete', + 'avrà', + 'avrò', + 'avuta', + 'avute', + 'avuti', + 'avuto', + 'c', + 'che', + 'chi', + 'ci', + 'coi', + 'col', + 'come', + 'con', + 'contro', + 'cui', + 'da', + 'dagl', + 'dagli', + 'dai', + 'dal', + 'dall', + 'dalla', + 'dalle', + 'dallo', + 'degl', + 'degli', + 'dei', + 'del', + 'dell', + 'della', + 'delle', + 'dello', + 'di', + 'dov', + 'dove', + 'e', + 'ebbe', + 'ebbero', + 'ebbi', + 'ed', + 'era', + 'erano', + 'eravamo', + 'eravate', + 'eri', + 'ero', + 'essendo', + 'faccia', + 'facciamo', + 'facciano', + 'facciate', + 'faccio', + 'facemmo', + 'facendo', + 'facesse', + 'facessero', + 'facessi', + 'facessimo', + 'faceste', + 'facesti', + 'faceva', + 'facevamo', + 'facevano', + 'facevate', + 'facevi', + 'facevo', + 'fai', + 'fanno', + 'farai', + 'faranno', + 'farebbe', + 'farebbero', + 'farei', + 'faremmo', + 'faremo', + 'fareste', + 'faresti', + 'farete', + 'farà', + 'farò', + 'fece', + 'fecero', + 'feci', + 'fosse', + 'fossero', + 'fossi', + 'fossimo', + 'foste', + 'fosti', + 'fu', + 'fui', + 'fummo', + 'furono', + 'gli', + 'ha', + 'hai', + 'hanno', + 'ho', + 'i', + 'il', + 'in', + 'io', + 'l', + 'la', + 'le', + 'lei', + 'li', + 'lo', + 'loro', + 'lui', + 'ma', + 'mi', + 'mia', + 'mie', + 'miei', + 'mio', + 'ne', + 'negl', + 'negli', + 'nei', + 'nel', + 'nell', + 'nella', + 'nelle', + 'nello', + 'noi', + 'non', + 'nostra', + 'nostre', + 'nostri', + 'nostro', + 'o', + 'per', + 'perché', + 'più', + 'quale', + 'quanta', + 'quante', + 'quanti', + 'quanto', + 'quella', + 'quelle', + 'quelli', + 'quello', + 'questa', + 'queste', + 'questi', + 'questo', + 'sarai', + 'saranno', + 'sarebbe', + 'sarebbero', + 'sarei', + 'saremmo', + 'saremo', + 'sareste', + 'saresti', + 'sarete', + 'sarà', + 'sarò', + 'se', + 'sei', + 'si', + 'sia', + 'siamo', + 'siano', + 'siate', + 'siete', + 'sono', + 'sta', + 'stai', + 'stanno', + 'starai', + 'staranno', + 'starebbe', + 'starebbero', + 'starei', + 'staremmo', + 'staremo', + 'stareste', + 'staresti', + 'starete', + 'starà', + 'starò', + 'stava', + 'stavamo', + 'stavano', + 'stavate', + 'stavi', + 'stavo', + 'stemmo', + 'stesse', + 'stessero', + 'stessi', + 'stessimo', + 'steste', + 'stesti', + 'stette', + 'stettero', + 'stetti', + 'stia', + 'stiamo', + 'stiano', + 'stiate', + 'sto', + 'su', + 'sua', + 'sue', + 'sugl', + 'sugli', + 'sui', + 'sul', + 'sull', + 'sulla', + 'sulle', + 'sullo', + 'suo', + 'suoi', + 'ti', + 'tra', + 'tu', + 'tua', + 'tue', + 'tuo', + 'tuoi', + 'tutti', + 'tutto', + 'un', + 'una', + 'uno', + 'vi', + 'voi', + 'vostra', + 'vostre', + 'vostri', + 'vostro', + 'è', +}) diff --git a/sphinx/search/_stopwords/it.txt b/sphinx/search/_stopwords/it.txt new file mode 100644 index 00000000000..c8776836110 --- /dev/null +++ b/sphinx/search/_stopwords/it.txt @@ -0,0 +1,291 @@ +| source: https://snowballstem.org/algorithms/italian/stop.txt +ad | a (to) before vowel +al | a + il +allo | a + lo +ai | a + i +agli | a + gli +all | a + l' +agl | a + gl' +alla | a + la +alle | a + le +con | with +col | con + il +coi | con + i (forms collo, cogli etc are now very rare) +da | from +dal | da + il +dallo | da + lo +dai | da + i +dagli | da + gli +dall | da + l' +dagl | da + gll' +dalla | da + la +dalle | da + le +di | of +del | di + il +dello | di + lo +dei | di + i +degli | di + gli +dell | di + l' +degl | di + gl' +della | di + la +delle | di + le +in | in +nel | in + el +nello | in + lo +nei | in + i +negli | in + gli +nell | in + l' +negl | in + gl' +nella | in + la +nelle | in + le +su | on +sul | su + il +sullo | su + lo +sui | su + i +sugli | su + gli +sull | su + l' +sugl | su + gl' +sulla | su + la +sulle | su + le +per | through, by +tra | among +contro | against +io | I +tu | thou +lui | he +lei | she +noi | we +voi | you +loro | they +mio | my +mia | +miei | +mie | +tuo | +tua | +tuoi | thy +tue | +suo | +sua | +suoi | his, her +sue | +nostro | our +nostra | +nostri | +nostre | +vostro | your +vostra | +vostri | +vostre | +mi | me +ti | thee +ci | us, there +vi | you, there +lo | him, the +la | her, the +li | them +le | them, the +gli | to him, the +ne | from there etc +il | the +un | a +uno | a +una | a +ma | but +ed | and +se | if +perché | why, because +anche | also +come | how +dov | where (as dov') +dove | where +che | who, that +chi | who +cui | whom +non | not +più | more +quale | who, that +quanto | how much +quanti | +quanta | +quante | +quello | that +quelli | +quella | +quelle | +questo | this +questi | +questa | +queste | +si | yes +tutto | all +tutti | all + + | single letter forms: + +a | at +c | as c' for ce or ci +e | and +i | the +l | as l' +o | or + + | forms of avere, to have (not including the infinitive): + +ho +hai +ha +abbiamo +avete +hanno +abbia +abbiate +abbiano +avrò +avrai +avrà +avremo +avrete +avranno +avrei +avresti +avrebbe +avremmo +avreste +avrebbero +avevo +avevi +aveva +avevamo +avevate +avevano +ebbi +avesti +ebbe +avemmo +aveste +ebbero +avessi +avesse +avessimo +avessero +avendo +avuto +avuta +avuti +avute + + | forms of essere, to be (not including the infinitive): +sono +sei +è +siamo +siete +sia +siate +siano +sarò +sarai +sarà +saremo +sarete +saranno +sarei +saresti +sarebbe +saremmo +sareste +sarebbero +ero +eri +era +eravamo +eravate +erano +fui +fosti +fu +fummo +foste +furono +fossi +fosse +fossimo +fossero +essendo + + | forms of fare, to do (not including the infinitive, fa, fat-): +faccio +fai +facciamo +fanno +faccia +facciate +facciano +farò +farai +farà +faremo +farete +faranno +farei +faresti +farebbe +faremmo +fareste +farebbero +facevo +facevi +faceva +facevamo +facevate +facevano +feci +facesti +fece +facemmo +faceste +fecero +facessi +facesse +facessimo +facessero +facendo + + | forms of stare, to be (not including the infinitive): +sto +stai +sta +stiamo +stanno +stia +stiate +stiano +starò +starai +starà +staremo +starete +staranno +starei +staresti +starebbe +staremmo +stareste +starebbero +stavo +stavi +stava +stavamo +stavate +stavano +stetti +stesti +stette +stemmo +steste +stettero +stessi +stesse +stessimo +stessero diff --git a/sphinx/search/_stopwords/nl.py b/sphinx/search/_stopwords/nl.py new file mode 100644 index 00000000000..1742ec8dad2 --- /dev/null +++ b/sphinx/search/_stopwords/nl.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +DUTCH_STOPWORDS = frozenset({ + 'aan', + 'al', + 'alles', + 'als', + 'altijd', + 'andere', + 'ben', + 'bij', + 'daar', + 'dan', + 'dat', + 'de', + 'der', + 'deze', + 'die', + 'dit', + 'doch', + 'doen', + 'door', + 'dus', + 'een', + 'eens', + 'en', + 'er', + 'ge', + 'geen', + 'geweest', + 'haar', + 'had', + 'heb', + 'hebben', + 'heeft', + 'hem', + 'het', + 'hier', + 'hij', + 'hoe', + 'hun', + 'iemand', + 'iets', + 'ik', + 'in', + 'is', + 'ja', + 'je', + 'kan', + 'kon', + 'kunnen', + 'maar', + 'me', + 'meer', + 'men', + 'met', + 'mij', + 'mijn', + 'moet', + 'na', + 'naar', + 'niet', + 'niets', + 'nog', + 'nu', + 'of', + 'om', + 'omdat', + 'onder', + 'ons', + 'ook', + 'op', + 'over', + 'reeds', + 'te', + 'tegen', + 'toch', + 'toen', + 'tot', + 'u', + 'uit', + 'uw', + 'van', + 'veel', + 'voor', + 'want', + 'waren', + 'was', + 'wat', + 'werd', + 'wezen', + 'wie', + 'wil', + 'worden', + 'wordt', + 'zal', + 'ze', + 'zelf', + 'zich', + 'zij', + 'zijn', + 'zo', + 'zonder', + 'zou', +}) diff --git a/sphinx/search/_stopwords/nl.txt b/sphinx/search/_stopwords/nl.txt new file mode 100644 index 00000000000..64336d0623b --- /dev/null +++ b/sphinx/search/_stopwords/nl.txt @@ -0,0 +1,102 @@ +| source: https://snowballstem.org/algorithms/dutch/stop.txt +de | the +en | and +van | of, from +ik | I, the ego +te | (1) chez, at etc, (2) to, (3) too +dat | that, which +die | that, those, who, which +in | in, inside +een | a, an, one +hij | he +het | the, it +niet | not, nothing, naught +zijn | (1) to be, being, (2) his, one's, its +is | is +was | (1) was, past tense of all persons sing. of 'zijn' (to be) (2) wax, (3) the washing, (4) rise of river +op | on, upon, at, in, up, used up +aan | on, upon, to (as dative) +met | with, by +als | like, such as, when +voor | (1) before, in front of, (2) furrow +had | had, past tense all persons sing. of 'hebben' (have) +er | there +maar | but, only +om | round, about, for etc +hem | him +dan | then +zou | should/would, past tense all persons sing. of 'zullen' +of | or, whether, if +wat | what, something, anything +mijn | possessive and noun 'mine' +men | people, 'one' +dit | this +zo | so, thus, in this way +door | through by +over | over, across +ze | she, her, they, them +zich | oneself +bij | (1) a bee, (2) by, near, at +ook | also, too +tot | till, until +je | you +mij | me +uit | out of, from +der | Old Dutch form of 'van der' still found in surnames +daar | (1) there, (2) because +haar | (1) her, their, them, (2) hair +naar | (1) unpleasant, unwell etc, (2) towards, (3) as +heb | present first person sing. of 'to have' +hoe | how, why +heeft | present third person sing. of 'to have' +hebben | 'to have' and various parts thereof +deze | this +u | you +want | (1) for, (2) mitten, (3) rigging +nog | yet, still +zal | 'shall', first and third person sing. of verb 'zullen' (will) +me | me +zij | she, they +nu | now +ge | 'thou', still used in Belgium and south Netherlands +geen | none +omdat | because +iets | something, somewhat +worden | to become, grow, get +toch | yet, still +al | all, every, each +waren | (1) 'were' (2) to wander, (3) wares, (3) +veel | much, many +meer | (1) more, (2) lake +doen | to do, to make +toen | then, when +moet | noun 'spot/mote' and present form of 'to must' +ben | (1) am, (2) 'are' in interrogative second person singular of 'to be' +zonder | without +kan | noun 'can' and present form of 'to be able' +hun | their, them +dus | so, consequently +alles | all, everything, anything +onder | under, beneath +ja | yes, of course +eens | once, one day +hier | here +wie | who +werd | imperfect third person sing. of 'become' +altijd | always +doch | yet, but etc +wordt | present third person sing. of 'become' +wezen | (1) to be, (2) 'been' as in 'been fishing', (3) orphans +kunnen | to be able +ons | us/our +zelf | self +tegen | against, towards, at +na | after, near +reeds | already +wil | (1) present tense of 'want', (2) 'will', noun, (3) fender +kon | could; past tense of 'to be able' +niets | nothing +uw | your +iemand | somebody +geweest | been; past participle of 'be' +andere | other diff --git a/sphinx/search/_stopwords/no.py b/sphinx/search/_stopwords/no.py new file mode 100644 index 00000000000..9b9bfbea4c9 --- /dev/null +++ b/sphinx/search/_stopwords/no.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +NORWEGIAN_STOPWORDS = frozenset({ + 'alle', + 'at', + 'av', + 'bare', + 'begge', + 'ble', + 'blei', + 'bli', + 'blir', + 'blitt', + 'både', + 'båe', + 'da', + 'de', + 'deg', + 'dei', + 'deim', + 'deira', + 'deires', + 'dem', + 'den', + 'denne', + 'der', + 'dere', + 'deres', + 'det', + 'dette', + 'di', + 'din', + 'disse', + 'ditt', + 'du', + 'dykk', + 'dykkar', + 'då', + 'eg', + 'ein', + 'eit', + 'eitt', + 'eller', + 'elles', + 'en', + 'enn', + 'er', + 'et', + 'ett', + 'etter', + 'for', + 'fordi', + 'fra', + 'før', + 'ha', + 'hadde', + 'han', + 'hans', + 'har', + 'hennar', + 'henne', + 'hennes', + 'her', + 'hjå', + 'ho', + 'hoe', + 'honom', + 'hoss', + 'hossen', + 'hun', + 'hva', + 'hvem', + 'hver', + 'hvilke', + 'hvilken', + 'hvis', + 'hvor', + 'hvordan', + 'hvorfor', + 'i', + 'ikke', + 'ikkje', + 'ingen', + 'ingi', + 'inkje', + 'inn', + 'inni', + 'ja', + 'jeg', + 'kan', + 'kom', + 'korleis', + 'korso', + 'kun', + 'kunne', + 'kva', + 'kvar', + 'kvarhelst', + 'kven', + 'kvi', + 'kvifor', + 'man', + 'mange', + 'me', + 'med', + 'medan', + 'meg', + 'meget', + 'mellom', + 'men', + 'mi', + 'min', + 'mine', + 'mitt', + 'mot', + 'mykje', + 'ned', + 'no', + 'noe', + 'noen', + 'noka', + 'noko', + 'nokon', + 'nokor', + 'nokre', + 'nå', + 'når', + 'og', + 'også', + 'om', + 'opp', + 'oss', + 'over', + 'på', + 'samme', + 'seg', + 'selv', + 'si', + 'sia', + 'sidan', + 'siden', + 'sin', + 'sine', + 'sitt', + 'sjøl', + 'skal', + 'skulle', + 'slik', + 'so', + 'som', + 'somme', + 'somt', + 'så', + 'sånn', + 'til', + 'um', + 'upp', + 'ut', + 'uten', + 'var', + 'vart', + 'varte', + 'ved', + 'vere', + 'verte', + 'vi', + 'vil', + 'ville', + 'vore', + 'vors', + 'vort', + 'vår', + 'være', + 'vært', + 'å', +}) diff --git a/sphinx/search/_stopwords/no.txt b/sphinx/search/_stopwords/no.txt new file mode 100644 index 00000000000..552ad326a55 --- /dev/null +++ b/sphinx/search/_stopwords/no.txt @@ -0,0 +1,177 @@ +| source: https://snowballstem.org/algorithms/norwegian/stop.txt +og | and +i | in +jeg | I +det | it/this/that +at | to (w. inf.) +en | a/an +et | a/an +den | it/this/that +til | to +er | is/am/are +som | who/that +på | on +de | they / you(formal) +med | with +han | he +av | of +ikke | not +ikkje | not * +der | there +så | so +var | was/were +meg | me +seg | you +men | but +ett | one +har | have +om | about +vi | we +min | my +mitt | my +ha | have +hadde | had +hun | she +nå | now +over | over +da | when/as +ved | by/know +fra | from +du | you +ut | out +sin | your +dem | them +oss | us +opp | up +man | you/one +kan | can +hans | his +hvor | where +eller | or +hva | what +skal | shall/must +selv | self (reflective) +sjøl | self (reflective) +her | here +alle | all +vil | will +bli | become +ble | became +blei | became * +blitt | have become +kunne | could +inn | in +når | when +være | be +kom | come +noen | some +noe | some +ville | would +dere | you +som | who/which/that +deres | their/theirs +kun | only/just +ja | yes +etter | after +ned | down +skulle | should +denne | this +for | for/because +deg | you +si | hers/his +sine | hers/his +sitt | hers/his +mot | against +å | to +meget | much +hvorfor | why +dette | this +disse | these/those +uten | without +hvordan | how +ingen | none +din | your +ditt | your +blir | become +samme | same +hvilken | which +hvilke | which (plural) +sånn | such a +inni | inside/within +mellom | between +vår | our +hver | each +hvem | who +vors | us/ours +hvis | whose +både | both +bare | only/just +enn | than +fordi | as/because +før | before +mange | many +også | also +slik | just +vært | been +være | to be +båe | both * +begge | both +siden | since +dykk | your * +dykkar | yours * +dei | they * +deira | them * +deires | theirs * +deim | them * +di | your (fem.) * +då | as/when * +eg | I * +ein | a/an * +eit | a/an * +eitt | a/an * +elles | or * +honom | he * +hjå | at * +ho | she * +hoe | she * +henne | her +hennar | her/hers +hennes | hers +hoss | how * +hossen | how * +ikkje | not * +ingi | noone * +inkje | noone * +korleis | how * +korso | how * +kva | what/which * +kvar | where * +kvarhelst | where * +kven | who/whom * +kvi | why * +kvifor | why * +me | we * +medan | while * +mi | my * +mine | my * +mykje | much * +no | now * +nokon | some (masc./neut.) * +noka | some (fem.) * +nokor | some * +noko | some * +nokre | some * +si | his/hers * +sia | since * +sidan | since * +so | so * +somt | some * +somme | some * +um | about* +upp | up * +vere | be * +vore | was * +verte | become * +vort | become * +varte | became * +vart | became * diff --git a/sphinx/search/_stopwords/pt.py b/sphinx/search/_stopwords/pt.py new file mode 100644 index 00000000000..b79799d42a6 --- /dev/null +++ b/sphinx/search/_stopwords/pt.py @@ -0,0 +1,207 @@ +from __future__ import annotations + +PORTUGUESE_STOPWORDS = frozenset({ + 'a', + 'ao', + 'aos', + 'aquela', + 'aquelas', + 'aquele', + 'aqueles', + 'aquilo', + 'as', + 'até', + 'com', + 'como', + 'da', + 'das', + 'de', + 'dela', + 'delas', + 'dele', + 'deles', + 'depois', + 'do', + 'dos', + 'e', + 'ela', + 'elas', + 'ele', + 'eles', + 'em', + 'entre', + 'era', + 'eram', + 'essa', + 'essas', + 'esse', + 'esses', + 'esta', + 'estamos', + 'estas', + 'estava', + 'estavam', + 'este', + 'esteja', + 'estejam', + 'estejamos', + 'estes', + 'esteve', + 'estive', + 'estivemos', + 'estiver', + 'estivera', + 'estiveram', + 'estiverem', + 'estivermos', + 'estivesse', + 'estivessem', + 'estivéramos', + 'estivéssemos', + 'estou', + 'está', + 'estávamos', + 'estão', + 'eu', + 'foi', + 'fomos', + 'for', + 'fora', + 'foram', + 'forem', + 'formos', + 'fosse', + 'fossem', + 'fui', + 'fôramos', + 'fôssemos', + 'haja', + 'hajam', + 'hajamos', + 'havemos', + 'hei', + 'houve', + 'houvemos', + 'houver', + 'houvera', + 'houveram', + 'houverei', + 'houverem', + 'houveremos', + 'houveria', + 'houveriam', + 'houvermos', + 'houverá', + 'houverão', + 'houveríamos', + 'houvesse', + 'houvessem', + 'houvéramos', + 'houvéssemos', + 'há', + 'hão', + 'isso', + 'isto', + 'já', + 'lhe', + 'lhes', + 'mais', + 'mas', + 'me', + 'mesmo', + 'meu', + 'meus', + 'minha', + 'minhas', + 'muito', + 'na', + 'nas', + 'nem', + 'no', + 'nos', + 'nossa', + 'nossas', + 'nosso', + 'nossos', + 'num', + 'numa', + 'não', + 'nós', + 'o', + 'os', + 'ou', + 'para', + 'pela', + 'pelas', + 'pelo', + 'pelos', + 'por', + 'qual', + 'quando', + 'que', + 'quem', + 'se', + 'seja', + 'sejam', + 'sejamos', + 'sem', + 'serei', + 'seremos', + 'seria', + 'seriam', + 'será', + 'serão', + 'seríamos', + 'seu', + 'seus', + 'somos', + 'sou', + 'sua', + 'suas', + 'são', + 'só', + 'também', + 'te', + 'tem', + 'temos', + 'tenha', + 'tenham', + 'tenhamos', + 'tenho', + 'terei', + 'teremos', + 'teria', + 'teriam', + 'terá', + 'terão', + 'teríamos', + 'teu', + 'teus', + 'teve', + 'tinha', + 'tinham', + 'tive', + 'tivemos', + 'tiver', + 'tivera', + 'tiveram', + 'tiverem', + 'tivermos', + 'tivesse', + 'tivessem', + 'tivéramos', + 'tivéssemos', + 'tu', + 'tua', + 'tuas', + 'tém', + 'tínhamos', + 'um', + 'uma', + 'você', + 'vocês', + 'vos', + 'à', + 'às', + 'éramos', +}) diff --git a/sphinx/search/_stopwords/pt.txt b/sphinx/search/_stopwords/pt.txt new file mode 100644 index 00000000000..5ef15633d81 --- /dev/null +++ b/sphinx/search/_stopwords/pt.txt @@ -0,0 +1,236 @@ +| source: https://snowballstem.org/algorithms/portuguese/stop.txt +de | of, from +a | the; to, at; her +o | the; him +que | who, that +e | and +do | de + o +da | de + a +em | in +um | a +para | for + | é from SER +com | with +não | not, no +uma | a +os | the; them +no | em + o +se | himself etc +na | em + a +por | for +mais | more +as | the; them +dos | de + os +como | as, like +mas | but + | foi from SER +ao | a + o +ele | he +das | de + as + | tem from TER +à | a + a +seu | his +sua | her +ou | or + | ser from SER +quando | when +muito | much + | há from HAV +nos | em + os; us +já | already, now + | está from EST +eu | I +também | also +só | only, just +pelo | per + o +pela | per + a +até | up to +isso | that +ela | he +entre | between + | era from SER +depois | after +sem | without +mesmo | same +aos | a + os + | ter from TER +seus | his +quem | whom +nas | em + as +me | me +esse | that +eles | they + | estão from EST +você | you + | tinha from TER + | foram from SER +essa | that +num | em + um +nem | nor +suas | her +meu | my +às | a + as +minha | my + | têm from TER +numa | em + uma +pelos | per + os +elas | they + | havia from HAV + | seja from SER +qual | which + | será from SER +nós | we + | tenho from TER +lhe | to him, her +deles | of them +essas | those +esses | those +pelas | per + as +este | this + | fosse from SER +dele | of him + + | other words. There are many contractions such as naquele = em+aquele, + | mo = me+o, but they are rare. + | Indefinite article plural forms are also rare. + +tu | thou +te | thee +vocês | you (plural) +vos | you +lhes | to them +meus | my +minhas +teu | thy +tua +teus +tuas +nosso | our +nossa +nossos +nossas + +dela | of her +delas | of them + +esta | this +estes | these +estas | these +aquele | that +aquela | that +aqueles | those +aquelas | those +isto | this +aquilo | that + + | forms of estar, to be (not including the infinitive): +estou +está +estamos +estão +estive +esteve +estivemos +estiveram +estava +estávamos +estavam +estivera +estivéramos +esteja +estejamos +estejam +estivesse +estivéssemos +estivessem +estiver +estivermos +estiverem + + | forms of haver, to have (not including the infinitive): +hei +há +havemos +hão +houve +houvemos +houveram +houvera +houvéramos +haja +hajamos +hajam +houvesse +houvéssemos +houvessem +houver +houvermos +houverem +houverei +houverá +houveremos +houverão +houveria +houveríamos +houveriam + + | forms of ser, to be (not including the infinitive): +sou +somos +são +era +éramos +eram +fui +foi +fomos +foram +fora +fôramos +seja +sejamos +sejam +fosse +fôssemos +fossem +for +formos +forem +serei +será +seremos +serão +seria +seríamos +seriam + + | forms of ter, to have (not including the infinitive): +tenho +tem +temos +tém +tinha +tínhamos +tinham +tive +teve +tivemos +tiveram +tivera +tivéramos +tenha +tenhamos +tenham +tivesse +tivéssemos +tivessem +tiver +tivermos +tiverem +terei +terá +teremos +terão +teria +teríamos +teriam diff --git a/sphinx/search/_stopwords/ru.py b/sphinx/search/_stopwords/ru.py new file mode 100644 index 00000000000..cc275d5184a --- /dev/null +++ b/sphinx/search/_stopwords/ru.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +RUSSIAN_STOPWORDS = frozenset({ + 'а', + 'без', + 'более', + 'больше', + 'будет', + 'будто', + 'бы', + 'был', + 'была', + 'были', + 'было', + 'быть', + 'в', + 'вам', + 'вас', + 'вдруг', + 'ведь', + 'во', + 'вот', + 'впрочем', + 'все', + 'всегда', + 'всего', + 'всех', + 'всю', + 'вы', + 'где', + 'говорил', + 'да', + 'даже', + 'два', + 'для', + 'до', + 'другой', + 'его', + 'ее', + 'ей', + 'ему', + 'если', + 'есть', + 'еще', + 'ж', + 'же', + 'жизнь', + 'за', + 'зачем', + 'здесь', + 'и', + 'из', + 'или', + 'им', + 'иногда', + 'их', + 'к', + 'кажется', + 'как', + 'какая', + 'какой', + 'когда', + 'конечно', + 'кто', + 'куда', + 'ли', + 'лучше', + 'между', + 'меня', + 'мне', + 'много', + 'может', + 'можно', + 'мой', + 'моя', + 'мы', + 'на', + 'над', + 'надо', + 'наконец', + 'нас', + 'не', + 'него', + 'нее', + 'ней', + 'нельзя', + 'нет', + 'ни', + 'нибудь', + 'никогда', + 'ним', + 'них', + 'ничего', + 'но', + 'ну', + 'о', + 'об', + 'один', + 'он', + 'она', + 'они', + 'опять', + 'от', + 'перед', + 'по', + 'под', + 'после', + 'потом', + 'потому', + 'почти', + 'при', + 'про', + 'раз', + 'разве', + 'с', + 'сам', + 'свою', + 'себе', + 'себя', + 'сегодня', + 'сейчас', + 'сказал', + 'сказала', + 'сказать', + 'со', + 'совсем', + 'так', + 'такой', + 'там', + 'тебя', + 'тем', + 'теперь', + 'то', + 'тогда', + 'того', + 'тоже', + 'только', + 'том', + 'тот', + 'три', + 'тут', + 'ты', + 'у', + 'уж', + 'уже', + 'хорошо', + 'хоть', + 'чего', + 'человек', + 'чем', + 'через', + 'что', + 'чтоб', + 'чтобы', + 'чуть', + 'эти', + 'этого', + 'этой', + 'этом', + 'этот', + 'эту', + 'я', +}) diff --git a/sphinx/search/_stopwords/ru.txt b/sphinx/search/_stopwords/ru.txt new file mode 100644 index 00000000000..43a73af0b55 --- /dev/null +++ b/sphinx/search/_stopwords/ru.txt @@ -0,0 +1,226 @@ +| source: https://snowballstem.org/algorithms/russian/stop.txt +и | and +в | in/into +во | alternative form +не | not +что | what/that +он | he +на | on/onto +я | i +с | from +со | alternative form +как | how +а | milder form of `no' (but) +то | conjunction and form of `that' +все | all +она | she +так | so, thus +его | him +но | but +да | yes/and +ты | thou +к | towards, by +у | around, chez +же | intensifier particle +вы | you +за | beyond, behind +бы | conditional/subj. particle +по | up to, along +только | only +ее | her +мне | to me +было | it was +вот | here is/are, particle +от | away from +меня | me +еще | still, yet, more +нет | no, there isnt/arent +о | about +из | out of +ему | to him +теперь | now +когда | when +даже | even +ну | so, well +вдруг | suddenly +ли | interrogative particle +если | if +уже | already, but homonym of `narrower' +или | or +ни | neither +быть | to be +был | he was +него | prepositional form of его +до | up to +вас | you accusative +нибудь | indef. suffix preceded by hyphen +опять | again +уж | already, but homonym of `adder' +вам | to you +сказал | he said +ведь | particle `after all' +там | there +потом | then +себя | oneself +ничего | nothing +ей | to her +может | usually with `быть' as `maybe' +они | they +тут | here +где | where +есть | there is/are +надо | got to, must +ней | prepositional form of ей +для | for +мы | we +тебя | thee +их | them, their +чем | than +была | she was +сам | self +чтоб | in order to +без | without +будто | as if +человек | man, person, one +чего | genitive form of `what' +раз | once +тоже | also +себе | to oneself +под | beneath +жизнь | life +будет | will be +ж | short form of intensifer particle `же' +тогда | then +кто | who +этот | this +говорил | was saying +того | genitive form of `that' +потому | for that reason +этого | genitive form of `this' +какой | which +совсем | altogether +ним | prepositional form of `его', `они' +здесь | here +этом | prepositional form of `этот' +один | one +почти | almost +мой | my +тем | instrumental/dative plural of `тот', `то' +чтобы | full form of `in order that' +нее | her (acc.) +кажется | it seems +сейчас | now +были | they were +куда | where to +зачем | why +сказать | to say +всех | all (acc., gen. preposn. plural) +никогда | never +сегодня | today +можно | possible, one can +при | by +наконец | finally +два | two +об | alternative form of `о', about +другой | another +хоть | even +после | after +над | above +больше | more +тот | that one (masc.) +через | across, in +эти | these +нас | us +про | about +всего | in all, only, of all +них | prepositional form of `они' (they) +какая | which, feminine +много | lots +разве | interrogative particle +сказала | she said +три | three +эту | this, acc. fem. sing. +моя | my, feminine +впрочем | moreover, besides +хорошо | good +свою | ones own, acc. fem. sing. +этой | oblique form of `эта', fem. `this' +перед | in front of +иногда | sometimes +лучше | better +чуть | a little +том | preposn. form of `that one' +нельзя | one must not +такой | such a one +им | to them +более | more +всегда | always +конечно | of course +всю | acc. fem. sing of `all' +между | between + + + | b: some paradigms + | + | personal pronouns + | + | я меня мне мной [мною] + | ты тебя тебе тобой [тобою] + | он его ему им [него, нему, ним] + | она ее эи ею [нее, нэи, нею] + | оно его ему им [него, нему, ним] + | + | мы нас нам нами + | вы вас вам вами + | они их им ими [них, ним, ними] + | + | себя себе собой [собою] + | + | demonstrative pronouns: этот (this), тот (that) + | + | этот эта это эти + | этого эты это эти + | этого этой этого этих + | этому этой этому этим + | этим этой этим [этою] этими + | этом этой этом этих + | + | тот та то те + | того ту то те + | того той того тех + | тому той тому тем + | тем той тем [тою] теми + | том той том тех + | + | determinative pronouns + | + | (a) весь (all) + | + | весь вся все все + | всего всю все все + | всего всей всего всех + | всему всей всему всем + | всем всей всем [всею] всеми + | всем всей всем всех + | + | (b) сам (himself etc) + | + | сам сама само сами + | самого саму само самих + | самого самой самого самих + | самому самой самому самим + | самим самой самим [самою] самими + | самом самой самом самих + | + | stems of verbs `to be', `to have', `to do' and modal + | + | быть бы буд быв есть суть + | име + | дел + | мог мож мочь + | уме + | хоч хот + | долж + | можн + | нужн + | нельзя diff --git a/sphinx/search/_stopwords/sv.py b/sphinx/search/_stopwords/sv.py new file mode 100644 index 00000000000..c1f10635e0b --- /dev/null +++ b/sphinx/search/_stopwords/sv.py @@ -0,0 +1,118 @@ +from __future__ import annotations + +SWEDISH_STOPWORDS = frozenset({ + 'alla', + 'allt', + 'att', + 'av', + 'blev', + 'bli', + 'blir', + 'blivit', + 'de', + 'dem', + 'den', + 'denna', + 'deras', + 'dess', + 'dessa', + 'det', + 'detta', + 'dig', + 'din', + 'dina', + 'ditt', + 'du', + 'där', + 'då', + 'efter', + 'ej', + 'eller', + 'en', + 'er', + 'era', + 'ert', + 'ett', + 'från', + 'för', + 'ha', + 'hade', + 'han', + 'hans', + 'har', + 'henne', + 'hennes', + 'hon', + 'honom', + 'hur', + 'här', + 'i', + 'icke', + 'ingen', + 'inom', + 'inte', + 'jag', + 'ju', + 'kan', + 'kunde', + 'man', + 'med', + 'mellan', + 'men', + 'mig', + 'min', + 'mina', + 'mitt', + 'mot', + 'mycket', + 'ni', + 'nu', + 'när', + 'någon', + 'något', + 'några', + 'och', + 'om', + 'oss', + 'på', + 'samma', + 'sedan', + 'sig', + 'sin', + 'sina', + 'sitta', + 'själv', + 'skulle', + 'som', + 'så', + 'sådan', + 'sådana', + 'sådant', + 'till', + 'under', + 'upp', + 'ut', + 'utan', + 'vad', + 'var', + 'vara', + 'varför', + 'varit', + 'varje', + 'vars', + 'vart', + 'vem', + 'vi', + 'vid', + 'vilka', + 'vilkas', + 'vilken', + 'vilket', + 'vår', + 'våra', + 'vårt', + 'än', + 'är', + 'åt', + 'över', +}) diff --git a/sphinx/search/_stopwords/sv.txt b/sphinx/search/_stopwords/sv.txt new file mode 100644 index 00000000000..850ae7474d6 --- /dev/null +++ b/sphinx/search/_stopwords/sv.txt @@ -0,0 +1,115 @@ +| source: https://snowballstem.org/algorithms/swedish/stop.txt +och | and +det | it, this/that +att | to (with infinitive) +i | in, at +en | a +jag | I +hon | she +som | who, that +han | he +på | on +den | it, this/that +med | with +var | where, each +sig | him(self) etc +för | for +så | so (also: seed) +till | to +är | is +men | but +ett | a +om | if; around, about +hade | had +de | they, these/those +av | of +icke | not, no +mig | me +du | you +henne | her +då | then, when +sin | his +nu | now +har | have +inte | inte någon = no one +hans | his +honom | him +skulle | 'sake' +hennes | her +där | there +min | my +man | one (pronoun) +ej | nor +vid | at, by, on (also: vast) +kunde | could +något | some etc +från | from, off +ut | out +när | when +efter | after, behind +upp | up +vi | we +dem | them +vara | be +vad | what +över | over +än | than +dig | you +kan | can +sina | his +här | here +ha | have +mot | towards +alla | all +under | under (also: wonder) +någon | some etc +eller | or (else) +allt | all +mycket | much +sedan | since +ju | why +denna | this/that +själv | myself, yourself etc +detta | this/that +åt | to +utan | without +varit | was +hur | how +ingen | no +mitt | my +ni | you +bli | to be, become +blev | from bli +oss | us +din | thy +dessa | these/those +några | some etc +deras | their +blir | from bli +mina | my +samma | (the) same +vilken | who, that +er | you, your +sådan | such a +vår | our +blivit | from bli +dess | its +inom | within +mellan | between +sådant | such a +varför | why +varje | each +vilka | who, that +ditt | thy +vem | who +vilket | who, that +sitta | his +sådana | such a +vart | each +dina | thy +vars | whose +vårt | our +våra | our +ert | your +era | your +vilkas | whose diff --git a/sphinx/search/da.py b/sphinx/search/da.py index 8be1c6a215e..3eb997af1c3 100644 --- a/sphinx/search/da.py +++ b/sphinx/search/da.py @@ -4,112 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -danish_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/danish/stop.txt -og | and -i | in -jeg | I -det | that (dem. pronoun)/it (pers. pronoun) -at | that (in front of a sentence)/to (with infinitive) -en | a/an -den | it (pers. pronoun)/that (dem. pronoun) -til | to/at/for/until/against/by/of/into, more -er | present tense of "to be" -som | who, as -på | on/upon/in/on/at/to/after/of/with/for, on -de | they -med | with/by/in, along -han | he -af | of/by/from/off/for/in/with/on, off -for | at/for/to/from/by/of/ago, in front/before, because -ikke | not -der | who/which, there/those -var | past tense of "to be" -mig | me/myself -sig | oneself/himself/herself/itself/themselves -men | but -et | a/an/one, one (number), someone/somebody/one -har | present tense of "to have" -om | round/about/for/in/a, about/around/down, if -vi | we -min | my -havde | past tense of "to have" -ham | him -hun | she -nu | now -over | over/above/across/by/beyond/past/on/about, over/past -da | then, when/as/since -fra | from/off/since, off, since -du | you -ud | out -sin | his/her/its/one's -dem | them -os | us/ourselves -op | up -man | you/one -hans | his -hvor | where -eller | or -hvad | what -skal | must/shall etc. -selv | myself/yourself/herself/ourselves etc., even -her | here -alle | all/everyone/everybody etc. -vil | will (verb) -blev | past tense of "to stay/to remain/to get/to become" -kunne | could -ind | in -når | when -være | present tense of "to be" -dog | however/yet/after all -noget | something -ville | would -jo | you know/you see (adv), yes -deres | their/theirs -efter | after/behind/according to/for/by/from, later/afterwards -ned | down -skulle | should -denne | this -end | than -dette | this -mit | my/mine -også | also -under | under/beneath/below/during, below/underneath -have | have -dig | you -anden | other -hende | her -mine | my -alt | everything -meget | much/very, plenty of -sit | his, her, its, one's -sine | his, her, its, one's -vor | our -mod | against -disse | these -hvis | if -din | your/yours -nogle | some -hos | by/at -blive | be/become -mange | many -ad | by/through -bliver | present tense of "to be/to become" -hendes | her/hers -været | be -thi | for (conj) -jer | you -sådan | such, like this/like that -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.da import DANISH_STOPWORDS class SearchDanish(SearchLanguage): lang = 'da' language_name = 'Danish' js_stemmer_rawcode = 'danish-stemmer.js' - stopwords = danish_stopwords + stopwords = DANISH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/de.py b/sphinx/search/de.py index ac5ac7ee131..6875b9c7535 100644 --- a/sphinx/search/de.py +++ b/sphinx/search/de.py @@ -4,295 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -german_stopwords = parse_stop_word(""" -|source: https://snowballstem.org/algorithms/german/stop.txt -aber | but - -alle | all -allem -allen -aller -alles - -als | than, as -also | so -am | an + dem -an | at - -ander | other -andere -anderem -anderen -anderer -anderes -anderm -andern -anderr -anders - -auch | also -auf | on -aus | out of -bei | by -bin | am -bis | until -bist | art -da | there -damit | with it -dann | then - -der | the -den -des -dem -die -das - -daß | that - -derselbe | the same -derselben -denselben -desselben -demselben -dieselbe -dieselben -dasselbe - -dazu | to that - -dein | thy -deine -deinem -deinen -deiner -deines - -denn | because - -derer | of those -dessen | of him - -dich | thee -dir | to thee -du | thou - -dies | this -diese -diesem -diesen -dieser -dieses - - -doch | (several meanings) -dort | (over) there - - -durch | through - -ein | a -eine -einem -einen -einer -eines - -einig | some -einige -einigem -einigen -einiger -einiges - -einmal | once - -er | he -ihn | him -ihm | to him - -es | it -etwas | something - -euer | your -eure -eurem -euren -eurer -eures - -für | for -gegen | towards -gewesen | p.p. of sein -hab | have -habe | have -haben | have -hat | has -hatte | had -hatten | had -hier | here -hin | there -hinter | behind - -ich | I -mich | me -mir | to me - - -ihr | you, to her -ihre -ihrem -ihren -ihrer -ihres -euch | to you - -im | in + dem -in | in -indem | while -ins | in + das -ist | is - -jede | each, every -jedem -jeden -jeder -jedes - -jene | that -jenem -jenen -jener -jenes - -jetzt | now -kann | can - -kein | no -keine -keinem -keinen -keiner -keines - -können | can -könnte | could -machen | do -man | one - -manche | some, many a -manchem -manchen -mancher -manches - -mein | my -meine -meinem -meinen -meiner -meines - -mit | with -muss | must -musste | had to -nach | to(wards) -nicht | not -nichts | nothing -noch | still, yet -nun | now -nur | only -ob | whether -oder | or -ohne | without -sehr | very - -sein | his -seine -seinem -seinen -seiner -seines - -selbst | self -sich | herself - -sie | they, she -ihnen | to them - -sind | are -so | so - -solche | such -solchem -solchen -solcher -solches - -soll | shall -sollte | should -sondern | but -sonst | else -über | over -um | about, around -und | and - -uns | us -unse -unsem -unsen -unser -unses - -unter | under -viel | much -vom | von + dem -von | from -vor | before -während | while -war | was -waren | were -warst | wast -was | what -weg | away, off -weil | because -weiter | further - -welche | which -welchem -welchen -welcher -welches - -wenn | when -werde | will -werden | will -wie | how -wieder | again -will | want -wir | we -wird | will -wirst | willst -wo | where -wollen | want -wollte | wanted -würde | would -würden | would -zu | to -zum | zu + dem -zur | zu + der -zwar | indeed -zwischen | between -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.de import GERMAN_STOPWORDS class SearchGerman(SearchLanguage): lang = 'de' language_name = 'German' js_stemmer_rawcode = 'german-stemmer.js' - stopwords = german_stopwords + stopwords = GERMAN_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/en.py b/sphinx/search/en.py index 51494a04a4d..30324c8832a 100644 --- a/sphinx/search/en.py +++ b/sphinx/search/en.py @@ -5,18 +5,7 @@ import snowballstemmer from sphinx.search import SearchLanguage - -english_stopwords = { - 'a', 'and', 'are', 'as', 'at', - 'be', 'but', 'by', - 'for', - 'if', 'in', 'into', 'is', 'it', - 'near', 'no', 'not', - 'of', 'on', 'or', - 'such', - 'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to', - 'was', 'will', 'with', -} # fmt: skip +from sphinx.search._stopwords.en import ENGLISH_STOPWORDS js_porter_stemmer = """ /** @@ -209,7 +198,7 @@ class SearchEnglish(SearchLanguage): lang = 'en' language_name = 'English' js_stemmer_code = js_porter_stemmer - stopwords = english_stopwords + stopwords = ENGLISH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/es.py b/sphinx/search/es.py index 3cc41f600ac..d11937ad0c6 100644 --- a/sphinx/search/es.py +++ b/sphinx/search/es.py @@ -4,355 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -spanish_stopwords = parse_stop_word(""" -|source: https://snowballstem.org/algorithms/spanish/stop.txt -de | from, of -la | the, her -que | who, that -el | the -en | in -y | and -a | to -los | the, them -del | de + el -se | himself, from him etc -las | the, them -por | for, by, etc -un | a -para | for -con | with -no | no -una | a -su | his, her -al | a + el - | es from SER -lo | him -como | how -más | more -pero | pero -sus | su plural -le | to him, her -ya | already -o | or - | fue from SER -este | this - | ha from HABER -sí | himself etc -porque | because -esta | this - | son from SER -entre | between - | está from ESTAR -cuando | when -muy | very -sin | without -sobre | on - | ser from SER - | tiene from TENER -también | also -me | me -hasta | until -hay | there is/are -donde | where - | han from HABER -quien | whom, that - | están from ESTAR - | estado from ESTAR -desde | from -todo | all -nos | us -durante | during - | estados from ESTAR -todos | all -uno | a -les | to them -ni | nor -contra | against -otros | other - | fueron from SER -ese | that -eso | that - | había from HABER -ante | before -ellos | they -e | and (variant of y) -esto | this -mí | me -antes | before -algunos | some -qué | what? -unos | a -yo | I -otro | other -otras | other -otra | other -él | he -tanto | so much, many -esa | that -estos | these -mucho | much, many -quienes | who -nada | nothing -muchos | many -cual | who - | sea from SER -poco | few -ella | she -estar | to be - | haber from HABER -estas | these - | estaba from ESTAR - | estamos from ESTAR -algunas | some -algo | something -nosotros | we - - | other forms - -mi | me -mis | mi plural -tú | thou -te | thee -ti | thee -tu | thy -tus | tu plural -ellas | they -nosotras | we -vosotros | you -vosotras | you -os | you -mío | mine -mía | -míos | -mías | -tuyo | thine -tuya | -tuyos | -tuyas | -suyo | his, hers, theirs -suya | -suyos | -suyas | -nuestro | ours -nuestra | -nuestros | -nuestras | -vuestro | yours -vuestra | -vuestros | -vuestras | -esos | those -esas | those - - | forms of estar, to be (not including the infinitive): -estoy -estás -está -estamos -estáis -están -esté -estés -estemos -estéis -estén -estaré -estarás -estará -estaremos -estaréis -estarán -estaría -estarías -estaríamos -estaríais -estarían -estaba -estabas -estábamos -estabais -estaban -estuve -estuviste -estuvo -estuvimos -estuvisteis -estuvieron -estuviera -estuvieras -estuviéramos -estuvierais -estuvieran -estuviese -estuvieses -estuviésemos -estuvieseis -estuviesen -estando -estado -estada -estados -estadas -estad - - | forms of haber, to have (not including the infinitive): -he -has -ha -hemos -habéis -han -haya -hayas -hayamos -hayáis -hayan -habré -habrás -habrá -habremos -habréis -habrán -habría -habrías -habríamos -habríais -habrían -había -habías -habíamos -habíais -habían -hube -hubiste -hubo -hubimos -hubisteis -hubieron -hubiera -hubieras -hubiéramos -hubierais -hubieran -hubiese -hubieses -hubiésemos -hubieseis -hubiesen -habiendo -habido -habida -habidos -habidas - - | forms of ser, to be (not including the infinitive): -soy -eres -es -somos -sois -son -sea -seas -seamos -seáis -sean -seré -serás -será -seremos -seréis -serán -sería -serías -seríamos -seríais -serían -era -eras -éramos -erais -eran -fui -fuiste -fue -fuimos -fuisteis -fueron -fuera -fueras -fuéramos -fuerais -fueran -fuese -fueses -fuésemos -fueseis -fuesen -siendo -sido - | sed also means 'thirst' - - | forms of tener, to have (not including the infinitive): -tengo -tienes -tiene -tenemos -tenéis -tienen -tenga -tengas -tengamos -tengáis -tengan -tendré -tendrás -tendrá -tendremos -tendréis -tendrán -tendría -tendrías -tendríamos -tendríais -tendrían -tenía -tenías -teníamos -teníais -tenían -tuve -tuviste -tuvo -tuvimos -tuvisteis -tuvieron -tuviera -tuvieras -tuviéramos -tuvierais -tuvieran -tuviese -tuvieses -tuviésemos -tuvieseis -tuviesen -teniendo -tenido -tenida -tenidos -tenidas -tened -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.es import SPANISH_STOPWORDS class SearchSpanish(SearchLanguage): lang = 'es' language_name = 'Spanish' js_stemmer_rawcode = 'spanish-stemmer.js' - stopwords = spanish_stopwords + stopwords = SPANISH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py index c8b048d4fc9..cd044b71a80 100644 --- a/sphinx/search/fi.py +++ b/sphinx/search/fi.py @@ -4,105 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -finnish_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/finnish/stop.txt -| forms of BE - -olla -olen -olet -on -olemme -olette -ovat -ole | negative form - -oli -olisi -olisit -olisin -olisimme -olisitte -olisivat -olit -olin -olimme -olitte -olivat -ollut -olleet - -en | negation -et -ei -emme -ette -eivät - -|Nom Gen Acc Part Iness Elat Illat Adess Ablat Allat Ess Trans -minä minun minut minua minussa minusta minuun minulla minulta minulle | I -sinä sinun sinut sinua sinussa sinusta sinuun sinulla sinulta sinulle | you -hän hänen hänet häntä hänessä hänestä häneen hänellä häneltä hänelle | he she -me meidän meidät meitä meissä meistä meihin meillä meiltä meille | we -te teidän teidät teitä teissä teistä teihin teillä teiltä teille | you -he heidän heidät heitä heissä heistä heihin heillä heiltä heille | they - -tämä tämän tätä tässä tästä tähän tällä tältä tälle tänä täksi | this -tuo tuon tuota tuossa tuosta tuohon tuolla tuolta tuolle tuona tuoksi | that -se sen sitä siinä siitä siihen sillä siltä sille sinä siksi | it -nämä näiden näitä näissä näistä näihin näillä näiltä näille näinä näiksi | these -nuo noiden noita noissa noista noihin noilla noilta noille noina noiksi | those -ne niiden niitä niissä niistä niihin niillä niiltä niille niinä niiksi | they - -kuka kenen kenet ketä kenessä kenestä keneen kenellä keneltä kenelle kenenä keneksi| who -ketkä keiden ketkä keitä keissä keistä keihin keillä keiltä keille keinä keiksi | (pl) -mikä minkä minkä mitä missä mistä mihin millä miltä mille minä miksi | which what -mitkä | (pl) - -joka jonka jota jossa josta johon jolla jolta jolle jona joksi | who which -jotka joiden joita joissa joista joihin joilla joilta joille joina joiksi | (pl) - -| conjunctions - -että | that -ja | and -jos | if -koska | because -kuin | than -mutta | but -niin | so -sekä | and -sillä | for -tai | or -vaan | but -vai | or -vaikka | although - - -| prepositions - -kanssa | with -mukaan | according to -noin | about -poikki | across -yli | over, across - -| other - -kun | when -niin | so -nyt | now -itse | self -""") # NoQA: E501 +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.fi import FINNISH_STOPWORDS class SearchFinnish(SearchLanguage): lang = 'fi' language_name = 'Finnish' js_stemmer_rawcode = 'finnish-stemmer.js' - stopwords = finnish_stopwords + stopwords = FINNISH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py index bbdc56032ff..11a2c70f5dc 100644 --- a/sphinx/search/fr.py +++ b/sphinx/search/fr.py @@ -4,191 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -french_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/french/stop.txt -au | a + le -aux | a + les -avec | with -ce | this -ces | these -dans | with -de | of -des | de + les -du | de + le -elle | she -en | `of them' etc -et | and -eux | them -il | he -je | I -la | the -le | the -leur | their -lui | him -ma | my (fem) -mais | but -me | me -même | same; as in moi-même (myself) etc -mes | me (pl) -moi | me -mon | my (masc) -ne | not -nos | our (pl) -notre | our -nous | we -on | one -ou | where -par | by -pas | not -pour | for -qu | que before vowel -que | that -qui | who -sa | his, her (fem) -se | oneself -ses | his (pl) -son | his, her (masc) -sur | on -ta | thy (fem) -te | thee -tes | thy (pl) -toi | thee -ton | thy (masc) -tu | thou -un | a -une | a -vos | your (pl) -votre | your -vous | you - - | single letter forms - -c | c' -d | d' -j | j' -l | l' -à | to, at -m | m' -n | n' -s | s' -t | t' -y | there - - | forms of être (not including the infinitive): -été -étée -étées -étés -étant -suis -es -est -sommes -êtes -sont -serai -seras -sera -serons -serez -seront -serais -serait -serions -seriez -seraient -étais -était -étions -étiez -étaient -fus -fut -fûmes -fûtes -furent -sois -soit -soyons -soyez -soient -fusse -fusses -fût -fussions -fussiez -fussent - - | forms of avoir (not including the infinitive): -ayant -eu -eue -eues -eus -ai -as -avons -avez -ont -aurai -auras -aura -aurons -aurez -auront -aurais -aurait -aurions -auriez -auraient -avais -avait -avions -aviez -avaient -eut -eûmes -eûtes -eurent -aie -aies -ait -ayons -ayez -aient -eusse -eusses -eût -eussions -eussiez -eussent - - | Later additions (from Jean-Christophe Deschamps) -ceci | this -cela | that (added 11 Apr 2012. Omission reported by Adrien Grand) -celà | that (incorrect, though common) -cet | this -cette | this -ici | here -ils | they -les | the (pl) -leurs | their (pl) -quel | which -quels | which -quelle | which -quelles | which -sans | without -soi | oneself -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.fr import FRENCH_STOPWORDS class SearchFrench(SearchLanguage): lang = 'fr' language_name = 'French' js_stemmer_rawcode = 'french-stemmer.js' - stopwords = french_stopwords + stopwords = FRENCH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py index 4e30ca407ee..e86159cb604 100644 --- a/sphinx/search/hu.py +++ b/sphinx/search/hu.py @@ -4,218 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -hungarian_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/hungarian/stop.txt -| prepared by Anna Tordai -a -ahogy -ahol -aki -akik -akkor -alatt -által -általában -amely -amelyek -amelyekben -amelyeket -amelyet -amelynek -ami -amit -amolyan -amíg -amikor -át -abban -ahhoz -annak -arra -arról -az -azok -azon -azt -azzal -azért -aztán -azután -azonban -bár -be -belül -benne -cikk -cikkek -cikkeket -csak -de -e -eddig -egész -egy -egyes -egyetlen -egyéb -egyik -egyre -ekkor -el -elég -ellen -elő -először -előtt -első -én -éppen -ebben -ehhez -emilyen -ennek -erre -ez -ezt -ezek -ezen -ezzel -ezért -és -fel -felé -hanem -hiszen -hogy -hogyan -igen -így -illetve -ill. -ill -ilyen -ilyenkor -ison -ismét -itt -jó -jól -jobban -kell -kellett -keresztül -keressünk -ki -kívül -között -közül -legalább -lehet -lehetett -legyen -lenne -lenni -lesz -lett -maga -magát -majd -majd -már -más -másik -meg -még -mellett -mert -mely -melyek -mi -mit -míg -miért -milyen -mikor -minden -mindent -mindenki -mindig -mint -mintha -mivel -most -nagy -nagyobb -nagyon -ne -néha -nekem -neki -nem -néhány -nélkül -nincs -olyan -ott -össze -ő -ők -őket -pedig -persze -rá -s -saját -sem -semmi -sok -sokat -sokkal -számára -szemben -szerint -szinte -talán -tehát -teljes -tovább -továbbá -több -úgy -ugyanis -új -újabb -újra -után -utána -utolsó -vagy -vagyis -valaki -valami -valamint -való -vagyok -van -vannak -volt -voltam -voltak -voltunk -vissza -vele -viszont -volna -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.hu import HUNGARIAN_STOPWORDS class SearchHungarian(SearchLanguage): lang = 'hu' language_name = 'Hungarian' js_stemmer_rawcode = 'hungarian-stemmer.js' - stopwords = hungarian_stopwords + stopwords = HUNGARIAN_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/it.py b/sphinx/search/it.py index b42e9699b33..a7052c9ae82 100644 --- a/sphinx/search/it.py +++ b/sphinx/search/it.py @@ -4,308 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -italian_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/italian/stop.txt -ad | a (to) before vowel -al | a + il -allo | a + lo -ai | a + i -agli | a + gli -all | a + l' -agl | a + gl' -alla | a + la -alle | a + le -con | with -col | con + il -coi | con + i (forms collo, cogli etc are now very rare) -da | from -dal | da + il -dallo | da + lo -dai | da + i -dagli | da + gli -dall | da + l' -dagl | da + gll' -dalla | da + la -dalle | da + le -di | of -del | di + il -dello | di + lo -dei | di + i -degli | di + gli -dell | di + l' -degl | di + gl' -della | di + la -delle | di + le -in | in -nel | in + el -nello | in + lo -nei | in + i -negli | in + gli -nell | in + l' -negl | in + gl' -nella | in + la -nelle | in + le -su | on -sul | su + il -sullo | su + lo -sui | su + i -sugli | su + gli -sull | su + l' -sugl | su + gl' -sulla | su + la -sulle | su + le -per | through, by -tra | among -contro | against -io | I -tu | thou -lui | he -lei | she -noi | we -voi | you -loro | they -mio | my -mia | -miei | -mie | -tuo | -tua | -tuoi | thy -tue | -suo | -sua | -suoi | his, her -sue | -nostro | our -nostra | -nostri | -nostre | -vostro | your -vostra | -vostri | -vostre | -mi | me -ti | thee -ci | us, there -vi | you, there -lo | him, the -la | her, the -li | them -le | them, the -gli | to him, the -ne | from there etc -il | the -un | a -uno | a -una | a -ma | but -ed | and -se | if -perché | why, because -anche | also -come | how -dov | where (as dov') -dove | where -che | who, that -chi | who -cui | whom -non | not -più | more -quale | who, that -quanto | how much -quanti | -quanta | -quante | -quello | that -quelli | -quella | -quelle | -questo | this -questi | -questa | -queste | -si | yes -tutto | all -tutti | all - - | single letter forms: - -a | at -c | as c' for ce or ci -e | and -i | the -l | as l' -o | or - - | forms of avere, to have (not including the infinitive): - -ho -hai -ha -abbiamo -avete -hanno -abbia -abbiate -abbiano -avrò -avrai -avrà -avremo -avrete -avranno -avrei -avresti -avrebbe -avremmo -avreste -avrebbero -avevo -avevi -aveva -avevamo -avevate -avevano -ebbi -avesti -ebbe -avemmo -aveste -ebbero -avessi -avesse -avessimo -avessero -avendo -avuto -avuta -avuti -avute - - | forms of essere, to be (not including the infinitive): -sono -sei -è -siamo -siete -sia -siate -siano -sarò -sarai -sarà -saremo -sarete -saranno -sarei -saresti -sarebbe -saremmo -sareste -sarebbero -ero -eri -era -eravamo -eravate -erano -fui -fosti -fu -fummo -foste -furono -fossi -fosse -fossimo -fossero -essendo - - | forms of fare, to do (not including the infinitive, fa, fat-): -faccio -fai -facciamo -fanno -faccia -facciate -facciano -farò -farai -farà -faremo -farete -faranno -farei -faresti -farebbe -faremmo -fareste -farebbero -facevo -facevi -faceva -facevamo -facevate -facevano -feci -facesti -fece -facemmo -faceste -fecero -facessi -facesse -facessimo -facessero -facendo - - | forms of stare, to be (not including the infinitive): -sto -stai -sta -stiamo -stanno -stia -stiate -stiano -starò -starai -starà -staremo -starete -staranno -starei -staresti -starebbe -staremmo -stareste -starebbero -stavo -stavi -stava -stavamo -stavate -stavano -stetti -stesti -stette -stemmo -steste -stettero -stessi -stesse -stessimo -stessero -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.it import ITALIAN_STOPWORDS class SearchItalian(SearchLanguage): lang = 'it' language_name = 'Italian' js_stemmer_rawcode = 'italian-stemmer.js' - stopwords = italian_stopwords + stopwords = ITALIAN_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py index 39c14c76664..0692920efc4 100644 --- a/sphinx/search/nl.py +++ b/sphinx/search/nl.py @@ -4,119 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -dutch_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/dutch/stop.txt -de | the -en | and -van | of, from -ik | I, the ego -te | (1) chez, at etc, (2) to, (3) too -dat | that, which -die | that, those, who, which -in | in, inside -een | a, an, one -hij | he -het | the, it -niet | not, nothing, naught -zijn | (1) to be, being, (2) his, one's, its -is | is -was | (1) was, past tense of all persons sing. of 'zijn' (to be) (2) wax, (3) the washing, (4) rise of river -op | on, upon, at, in, up, used up -aan | on, upon, to (as dative) -met | with, by -als | like, such as, when -voor | (1) before, in front of, (2) furrow -had | had, past tense all persons sing. of 'hebben' (have) -er | there -maar | but, only -om | round, about, for etc -hem | him -dan | then -zou | should/would, past tense all persons sing. of 'zullen' -of | or, whether, if -wat | what, something, anything -mijn | possessive and noun 'mine' -men | people, 'one' -dit | this -zo | so, thus, in this way -door | through by -over | over, across -ze | she, her, they, them -zich | oneself -bij | (1) a bee, (2) by, near, at -ook | also, too -tot | till, until -je | you -mij | me -uit | out of, from -der | Old Dutch form of 'van der' still found in surnames -daar | (1) there, (2) because -haar | (1) her, their, them, (2) hair -naar | (1) unpleasant, unwell etc, (2) towards, (3) as -heb | present first person sing. of 'to have' -hoe | how, why -heeft | present third person sing. of 'to have' -hebben | 'to have' and various parts thereof -deze | this -u | you -want | (1) for, (2) mitten, (3) rigging -nog | yet, still -zal | 'shall', first and third person sing. of verb 'zullen' (will) -me | me -zij | she, they -nu | now -ge | 'thou', still used in Belgium and south Netherlands -geen | none -omdat | because -iets | something, somewhat -worden | to become, grow, get -toch | yet, still -al | all, every, each -waren | (1) 'were' (2) to wander, (3) wares, (3) -veel | much, many -meer | (1) more, (2) lake -doen | to do, to make -toen | then, when -moet | noun 'spot/mote' and present form of 'to must' -ben | (1) am, (2) 'are' in interrogative second person singular of 'to be' -zonder | without -kan | noun 'can' and present form of 'to be able' -hun | their, them -dus | so, consequently -alles | all, everything, anything -onder | under, beneath -ja | yes, of course -eens | once, one day -hier | here -wie | who -werd | imperfect third person sing. of 'become' -altijd | always -doch | yet, but etc -wordt | present third person sing. of 'become' -wezen | (1) to be, (2) 'been' as in 'been fishing', (3) orphans -kunnen | to be able -ons | us/our -zelf | self -tegen | against, towards, at -na | after, near -reeds | already -wil | (1) present tense of 'want', (2) 'will', noun, (3) fender -kon | could; past tense of 'to be able' -niets | nothing -uw | your -iemand | somebody -geweest | been; past participle of 'be' -andere | other -""") # NoQA: E501 +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.nl import DUTCH_STOPWORDS class SearchDutch(SearchLanguage): lang = 'nl' language_name = 'Dutch' js_stemmer_rawcode = 'dutch-stemmer.js' - stopwords = dutch_stopwords + stopwords = DUTCH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/no.py b/sphinx/search/no.py index 7a21e6728cb..a2bb88ee9a4 100644 --- a/sphinx/search/no.py +++ b/sphinx/search/no.py @@ -4,194 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -norwegian_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/norwegian/stop.txt -og | and -i | in -jeg | I -det | it/this/that -at | to (w. inf.) -en | a/an -et | a/an -den | it/this/that -til | to -er | is/am/are -som | who/that -på | on -de | they / you(formal) -med | with -han | he -av | of -ikke | not -ikkje | not * -der | there -så | so -var | was/were -meg | me -seg | you -men | but -ett | one -har | have -om | about -vi | we -min | my -mitt | my -ha | have -hadde | had -hun | she -nå | now -over | over -da | when/as -ved | by/know -fra | from -du | you -ut | out -sin | your -dem | them -oss | us -opp | up -man | you/one -kan | can -hans | his -hvor | where -eller | or -hva | what -skal | shall/must -selv | self (reflective) -sjøl | self (reflective) -her | here -alle | all -vil | will -bli | become -ble | became -blei | became * -blitt | have become -kunne | could -inn | in -når | when -være | be -kom | come -noen | some -noe | some -ville | would -dere | you -som | who/which/that -deres | their/theirs -kun | only/just -ja | yes -etter | after -ned | down -skulle | should -denne | this -for | for/because -deg | you -si | hers/his -sine | hers/his -sitt | hers/his -mot | against -å | to -meget | much -hvorfor | why -dette | this -disse | these/those -uten | without -hvordan | how -ingen | none -din | your -ditt | your -blir | become -samme | same -hvilken | which -hvilke | which (plural) -sånn | such a -inni | inside/within -mellom | between -vår | our -hver | each -hvem | who -vors | us/ours -hvis | whose -både | both -bare | only/just -enn | than -fordi | as/because -før | before -mange | many -også | also -slik | just -vært | been -være | to be -båe | both * -begge | both -siden | since -dykk | your * -dykkar | yours * -dei | they * -deira | them * -deires | theirs * -deim | them * -di | your (fem.) * -då | as/when * -eg | I * -ein | a/an * -eit | a/an * -eitt | a/an * -elles | or * -honom | he * -hjå | at * -ho | she * -hoe | she * -henne | her -hennar | her/hers -hennes | hers -hoss | how * -hossen | how * -ikkje | not * -ingi | noone * -inkje | noone * -korleis | how * -korso | how * -kva | what/which * -kvar | where * -kvarhelst | where * -kven | who/whom * -kvi | why * -kvifor | why * -me | we * -medan | while * -mi | my * -mine | my * -mykje | much * -no | now * -nokon | some (masc./neut.) * -noka | some (fem.) * -nokor | some * -noko | some * -nokre | some * -si | his/hers * -sia | since * -sidan | since * -so | so * -somt | some * -somme | some * -um | about* -upp | up * -vere | be * -vore | was * -verte | become * -vort | become * -varte | became * -vart | became * -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.no import NORWEGIAN_STOPWORDS class SearchNorwegian(SearchLanguage): lang = 'no' language_name = 'Norwegian' js_stemmer_rawcode = 'norwegian-stemmer.js' - stopwords = norwegian_stopwords + stopwords = NORWEGIAN_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py index 82f1858f0de..9c5dfa05774 100644 --- a/sphinx/search/pt.py +++ b/sphinx/search/pt.py @@ -4,253 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -portuguese_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/portuguese/stop.txt -de | of, from -a | the; to, at; her -o | the; him -que | who, that -e | and -do | de + o -da | de + a -em | in -um | a -para | for - | é from SER -com | with -não | not, no -uma | a -os | the; them -no | em + o -se | himself etc -na | em + a -por | for -mais | more -as | the; them -dos | de + os -como | as, like -mas | but - | foi from SER -ao | a + o -ele | he -das | de + as - | tem from TER -à | a + a -seu | his -sua | her -ou | or - | ser from SER -quando | when -muito | much - | há from HAV -nos | em + os; us -já | already, now - | está from EST -eu | I -também | also -só | only, just -pelo | per + o -pela | per + a -até | up to -isso | that -ela | he -entre | between - | era from SER -depois | after -sem | without -mesmo | same -aos | a + os - | ter from TER -seus | his -quem | whom -nas | em + as -me | me -esse | that -eles | they - | estão from EST -você | you - | tinha from TER - | foram from SER -essa | that -num | em + um -nem | nor -suas | her -meu | my -às | a + as -minha | my - | têm from TER -numa | em + uma -pelos | per + os -elas | they - | havia from HAV - | seja from SER -qual | which - | será from SER -nós | we - | tenho from TER -lhe | to him, her -deles | of them -essas | those -esses | those -pelas | per + as -este | this - | fosse from SER -dele | of him - - | other words. There are many contractions such as naquele = em+aquele, - | mo = me+o, but they are rare. - | Indefinite article plural forms are also rare. - -tu | thou -te | thee -vocês | you (plural) -vos | you -lhes | to them -meus | my -minhas -teu | thy -tua -teus -tuas -nosso | our -nossa -nossos -nossas - -dela | of her -delas | of them - -esta | this -estes | these -estas | these -aquele | that -aquela | that -aqueles | those -aquelas | those -isto | this -aquilo | that - - | forms of estar, to be (not including the infinitive): -estou -está -estamos -estão -estive -esteve -estivemos -estiveram -estava -estávamos -estavam -estivera -estivéramos -esteja -estejamos -estejam -estivesse -estivéssemos -estivessem -estiver -estivermos -estiverem - - | forms of haver, to have (not including the infinitive): -hei -há -havemos -hão -houve -houvemos -houveram -houvera -houvéramos -haja -hajamos -hajam -houvesse -houvéssemos -houvessem -houver -houvermos -houverem -houverei -houverá -houveremos -houverão -houveria -houveríamos -houveriam - - | forms of ser, to be (not including the infinitive): -sou -somos -são -era -éramos -eram -fui -foi -fomos -foram -fora -fôramos -seja -sejamos -sejam -fosse -fôssemos -fossem -for -formos -forem -serei -será -seremos -serão -seria -seríamos -seriam - - | forms of ter, to have (not including the infinitive): -tenho -tem -temos -tém -tinha -tínhamos -tinham -tive -teve -tivemos -tiveram -tivera -tivéramos -tenha -tenhamos -tenham -tivesse -tivéssemos -tivessem -tiver -tivermos -tiverem -terei -terá -teremos -terão -teria -teríamos -teriam -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.pt import PORTUGUESE_STOPWORDS class SearchPortuguese(SearchLanguage): lang = 'pt' language_name = 'Portuguese' js_stemmer_rawcode = 'portuguese-stemmer.js' - stopwords = portuguese_stopwords + stopwords = PORTUGUESE_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py index e08ce5a09e3..6aebdc13249 100644 --- a/sphinx/search/ro.py +++ b/sphinx/search/ro.py @@ -11,7 +11,7 @@ class SearchRomanian(SearchLanguage): lang = 'ro' language_name = 'Romanian' js_stemmer_rawcode = 'romanian-stemmer.js' - stopwords: set[str] = set() + stopwords = frozenset() def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py index aeab09fa624..52ff533832e 100644 --- a/sphinx/search/ru.py +++ b/sphinx/search/ru.py @@ -4,243 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -russian_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/russian/stop.txt -и | and -в | in/into -во | alternative form -не | not -что | what/that -он | he -на | on/onto -я | i -с | from -со | alternative form -как | how -а | milder form of `no' (but) -то | conjunction and form of `that' -все | all -она | she -так | so, thus -его | him -но | but -да | yes/and -ты | thou -к | towards, by -у | around, chez -же | intensifier particle -вы | you -за | beyond, behind -бы | conditional/subj. particle -по | up to, along -только | only -ее | her -мне | to me -было | it was -вот | here is/are, particle -от | away from -меня | me -еще | still, yet, more -нет | no, there isnt/arent -о | about -из | out of -ему | to him -теперь | now -когда | when -даже | even -ну | so, well -вдруг | suddenly -ли | interrogative particle -если | if -уже | already, but homonym of `narrower' -или | or -ни | neither -быть | to be -был | he was -него | prepositional form of его -до | up to -вас | you accusative -нибудь | indef. suffix preceded by hyphen -опять | again -уж | already, but homonym of `adder' -вам | to you -сказал | he said -ведь | particle `after all' -там | there -потом | then -себя | oneself -ничего | nothing -ей | to her -может | usually with `быть' as `maybe' -они | they -тут | here -где | where -есть | there is/are -надо | got to, must -ней | prepositional form of ей -для | for -мы | we -тебя | thee -их | them, their -чем | than -была | she was -сам | self -чтоб | in order to -без | without -будто | as if -человек | man, person, one -чего | genitive form of `what' -раз | once -тоже | also -себе | to oneself -под | beneath -жизнь | life -будет | will be -ж | short form of intensifer particle `же' -тогда | then -кто | who -этот | this -говорил | was saying -того | genitive form of `that' -потому | for that reason -этого | genitive form of `this' -какой | which -совсем | altogether -ним | prepositional form of `его', `они' -здесь | here -этом | prepositional form of `этот' -один | one -почти | almost -мой | my -тем | instrumental/dative plural of `тот', `то' -чтобы | full form of `in order that' -нее | her (acc.) -кажется | it seems -сейчас | now -были | they were -куда | where to -зачем | why -сказать | to say -всех | all (acc., gen. preposn. plural) -никогда | never -сегодня | today -можно | possible, one can -при | by -наконец | finally -два | two -об | alternative form of `о', about -другой | another -хоть | even -после | after -над | above -больше | more -тот | that one (masc.) -через | across, in -эти | these -нас | us -про | about -всего | in all, only, of all -них | prepositional form of `они' (they) -какая | which, feminine -много | lots -разве | interrogative particle -сказала | she said -три | three -эту | this, acc. fem. sing. -моя | my, feminine -впрочем | moreover, besides -хорошо | good -свою | ones own, acc. fem. sing. -этой | oblique form of `эта', fem. `this' -перед | in front of -иногда | sometimes -лучше | better -чуть | a little -том | preposn. form of `that one' -нельзя | one must not -такой | such a one -им | to them -более | more -всегда | always -конечно | of course -всю | acc. fem. sing of `all' -между | between - - - | b: some paradigms - | - | personal pronouns - | - | я меня мне мной [мною] - | ты тебя тебе тобой [тобою] - | он его ему им [него, нему, ним] - | она ее эи ею [нее, нэи, нею] - | оно его ему им [него, нему, ним] - | - | мы нас нам нами - | вы вас вам вами - | они их им ими [них, ним, ними] - | - | себя себе собой [собою] - | - | demonstrative pronouns: этот (this), тот (that) - | - | этот эта это эти - | этого эты это эти - | этого этой этого этих - | этому этой этому этим - | этим этой этим [этою] этими - | этом этой этом этих - | - | тот та то те - | того ту то те - | того той того тех - | тому той тому тем - | тем той тем [тою] теми - | том той том тех - | - | determinative pronouns - | - | (a) весь (all) - | - | весь вся все все - | всего всю все все - | всего всей всего всех - | всему всей всему всем - | всем всей всем [всею] всеми - | всем всей всем всех - | - | (b) сам (himself etc) - | - | сам сама само сами - | самого саму само самих - | самого самой самого самих - | самому самой самому самим - | самим самой самим [самою] самими - | самом самой самом самих - | - | stems of verbs `to be', `to have', `to do' and modal - | - | быть бы буд быв есть суть - | име - | дел - | мог мож мочь - | уме - | хоч хот - | долж - | можн - | нужн - | нельзя -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.ru import RUSSIAN_STOPWORDS class SearchRussian(SearchLanguage): lang = 'ru' language_name = 'Russian' js_stemmer_rawcode = 'russian-stemmer.js' - stopwords = russian_stopwords + stopwords = RUSSIAN_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py index 9a8232ef2bf..bcfac2ba528 100644 --- a/sphinx/search/sv.py +++ b/sphinx/search/sv.py @@ -4,132 +4,15 @@ import snowballstemmer -from sphinx.search import SearchLanguage, parse_stop_word - -swedish_stopwords = parse_stop_word(""" -| source: https://snowballstem.org/algorithms/swedish/stop.txt -och | and -det | it, this/that -att | to (with infinitive) -i | in, at -en | a -jag | I -hon | she -som | who, that -han | he -på | on -den | it, this/that -med | with -var | where, each -sig | him(self) etc -för | for -så | so (also: seed) -till | to -är | is -men | but -ett | a -om | if; around, about -hade | had -de | they, these/those -av | of -icke | not, no -mig | me -du | you -henne | her -då | then, when -sin | his -nu | now -har | have -inte | inte någon = no one -hans | his -honom | him -skulle | 'sake' -hennes | her -där | there -min | my -man | one (pronoun) -ej | nor -vid | at, by, on (also: vast) -kunde | could -något | some etc -från | from, off -ut | out -när | when -efter | after, behind -upp | up -vi | we -dem | them -vara | be -vad | what -över | over -än | than -dig | you -kan | can -sina | his -här | here -ha | have -mot | towards -alla | all -under | under (also: wonder) -någon | some etc -eller | or (else) -allt | all -mycket | much -sedan | since -ju | why -denna | this/that -själv | myself, yourself etc -detta | this/that -åt | to -utan | without -varit | was -hur | how -ingen | no -mitt | my -ni | you -bli | to be, become -blev | from bli -oss | us -din | thy -dessa | these/those -några | some etc -deras | their -blir | from bli -mina | my -samma | (the) same -vilken | who, that -er | you, your -sådan | such a -vår | our -blivit | from bli -dess | its -inom | within -mellan | between -sådant | such a -varför | why -varje | each -vilka | who, that -ditt | thy -vem | who -vilket | who, that -sitta | his -sådana | such a -vart | each -dina | thy -vars | whose -vårt | our -våra | our -ert | your -era | your -vilkas | whose -""") +from sphinx.search import SearchLanguage +from sphinx.search._stopwords.sv import SWEDISH_STOPWORDS class SearchSwedish(SearchLanguage): lang = 'sv' language_name = 'Swedish' js_stemmer_rawcode = 'swedish-stemmer.js' - stopwords = swedish_stopwords + stopwords = SWEDISH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py index 82080bf5c61..674264f1928 100644 --- a/sphinx/search/tr.py +++ b/sphinx/search/tr.py @@ -11,7 +11,7 @@ class SearchTurkish(SearchLanguage): lang = 'tr' language_name = 'Turkish' js_stemmer_rawcode = 'turkish-stemmer.js' - stopwords: set[str] = set() + stopwords = frozenset() def __init__(self, options: dict[str, str]) -> None: super().__init__(options) diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py index c063631f865..d22f765d520 100644 --- a/sphinx/search/zh.py +++ b/sphinx/search/zh.py @@ -9,6 +9,7 @@ import snowballstemmer from sphinx.search import SearchLanguage +from sphinx.search._stopwords.en import ENGLISH_STOPWORDS if TYPE_CHECKING: from collections.abc import Iterator @@ -32,18 +33,6 @@ def cut_for_search(sentence: str, HMM: bool = True) -> Iterator[str]: ) del jieba -english_stopwords = { - 'a', 'and', 'are', 'as', 'at', - 'be', 'but', 'by', - 'for', - 'if', 'in', 'into', 'is', 'it', - 'near', 'no', 'not', - 'of', 'on', 'or', - 'such', - 'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to', - 'was', 'will', 'with', -} # fmt: skip - js_porter_stemmer = """ /** * Porter Stemmer @@ -237,7 +226,7 @@ class SearchChinese(SearchLanguage): lang = 'zh' language_name = 'Chinese' js_stemmer_code = js_porter_stemmer - stopwords = english_stopwords + stopwords = ENGLISH_STOPWORDS latin1_letters = re.compile(r'[a-zA-Z0-9_]+') def __init__(self, options: dict[str, str]) -> None: From 0296bbe3af1c4821752e60943a15a8b9fdf5667f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 18 May 2025 04:52:35 +0100 Subject: [PATCH 065/435] Bump types-defusedxml to 0.7.0.20250516 (#13563) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 15520dc1841..d2bc85e02b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,7 +96,7 @@ lint = [ "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", - "types-defusedxml==0.7.0.20240218", + "types-defusedxml==0.7.0.20250516", "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250514", @@ -164,7 +164,7 @@ types = [ type-stubs = [ # align with versions used elsewhere "types-colorama==0.4.15.20240311", - "types-defusedxml==0.7.0.20240218", + "types-defusedxml==0.7.0.20250516", "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250514", From 54ca93372f6e22f563071fb39c9827f15543f1c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 18 May 2025 04:52:48 +0100 Subject: [PATCH 066/435] Bump types-pygments to 2.19.0.20250516 (#13565) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d2bc85e02b8..3d166f9427a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,7 @@ lint = [ "types-defusedxml==0.7.0.20250516", "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", - "types-Pygments==2.19.0.20250514", + "types-Pygments==2.19.0.20250516", "types-requests==2.32.0.20250515", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.400", @@ -167,7 +167,7 @@ type-stubs = [ "types-defusedxml==0.7.0.20250516", "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", - "types-Pygments==2.19.0.20250514", + "types-Pygments==2.19.0.20250516", "types-requests==2.32.0.20250515", "types-urllib3==1.26.25.14", ] From 647d9bdbc6396c3d52d6690f989b3addcee8581d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 18 May 2025 04:53:20 +0100 Subject: [PATCH 067/435] Bump Ruff to 0.11.10 (#13566) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3d166f9427a..e18dbb8e91e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.9", + "ruff==0.11.10", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.9", + "ruff==0.11.10", "sphinx-lint>=0.9", ] package = [ From 63fdb590687ad431ef64c23c65492a3601ae5813 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Mon, 19 May 2025 00:48:32 +0100 Subject: [PATCH 068/435] Bump pypi-attestations to 0.0.26 --- pyproject.toml | 4 ++-- utils/convert_attestations.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e18dbb8e91e..39b18f23104 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,7 +104,7 @@ lint = [ "types-urllib3==1.26.25.14", "pyright==1.1.400", "pytest>=8.0", - "pypi-attestations==0.0.25", + "pypi-attestations==0.0.26", "betterproto==2.0.0b6", ] test = [ @@ -141,7 +141,7 @@ lint = [ package = [ "betterproto==2.0.0b6", # resolution fails without betterproto "build", - "pypi-attestations==0.0.25", + "pypi-attestations==0.0.26", "twine>=6.1", ] test = [ diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py index d4516c3c3f4..c62fd5a057c 100644 --- a/utils/convert_attestations.py +++ b/utils/convert_attestations.py @@ -7,7 +7,7 @@ # /// script # requires-python = ">=3.11" # dependencies = [ -# "pypi-attestations==0.0.25", +# "pypi-attestations==0.0.26", # "betterproto==2.0.0b6", # ] # /// From d4c036a90c25d5122215c1a2673db3debf631c27 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 19 May 2025 01:31:50 +0100 Subject: [PATCH 069/435] Update JavaScript sources for language stemmers to Snowball 3.0.1 (#13573) --- sphinx/search/minified-js/README.rst | 7 + sphinx/search/minified-js/arabic-stemmer.js | 1 + sphinx/search/minified-js/armenian-stemmer.js | 1 + sphinx/search/minified-js/base-stemmer.js | 2 +- sphinx/search/minified-js/basque-stemmer.js | 1 + sphinx/search/minified-js/catalan-stemmer.js | 1 + sphinx/search/minified-js/danish-stemmer.js | 2 +- sphinx/search/minified-js/dutch-stemmer.js | 2 +- .../minified-js/dutch_porter-stemmer.js | 1 + sphinx/search/minified-js/english-stemmer.js | 1 + .../search/minified-js/esperanto-stemmer.js | 1 + sphinx/search/minified-js/estonian-stemmer.js | 1 + sphinx/search/minified-js/finnish-stemmer.js | 2 +- sphinx/search/minified-js/french-stemmer.js | 2 +- sphinx/search/minified-js/german-stemmer.js | 2 +- sphinx/search/minified-js/greek-stemmer.js | 1 + sphinx/search/minified-js/hindi-stemmer.js | 1 + .../search/minified-js/hungarian-stemmer.js | 2 +- .../search/minified-js/indonesian-stemmer.js | 1 + sphinx/search/minified-js/irish-stemmer.js | 1 + sphinx/search/minified-js/italian-stemmer.js | 2 +- .../search/minified-js/lithuanian-stemmer.js | 1 + sphinx/search/minified-js/nepali-stemmer.js | 1 + .../search/minified-js/norwegian-stemmer.js | 2 +- sphinx/search/minified-js/porter-stemmer.js | 2 +- .../search/minified-js/portuguese-stemmer.js | 2 +- sphinx/search/minified-js/romanian-stemmer.js | 2 +- sphinx/search/minified-js/russian-stemmer.js | 2 +- sphinx/search/minified-js/serbian-stemmer.js | 1 + sphinx/search/minified-js/spanish-stemmer.js | 2 +- sphinx/search/minified-js/swedish-stemmer.js | 2 +- sphinx/search/minified-js/tamil-stemmer.js | 1 + sphinx/search/minified-js/turkish-stemmer.js | 2 +- sphinx/search/minified-js/yiddish-stemmer.js | 1 + .../search/non-minified-js/arabic-stemmer.js | 1612 ++++++ .../non-minified-js/armenian-stemmer.js | 350 ++ sphinx/search/non-minified-js/base-stemmer.js | 186 +- .../search/non-minified-js/basque-stemmer.js | 736 +++ .../search/non-minified-js/catalan-stemmer.js | 886 ++++ .../search/non-minified-js/danish-stemmer.js | 92 +- .../search/non-minified-js/dutch-stemmer.js | 2208 ++++++-- .../non-minified-js/dutch_porter-stemmer.js | 637 +++ .../search/non-minified-js/english-stemmer.js | 1066 ++++ .../non-minified-js/esperanto-stemmer.js | 762 +++ .../non-minified-js/estonian-stemmer.js | 1088 ++++ .../search/non-minified-js/finnish-stemmer.js | 223 +- .../search/non-minified-js/french-stemmer.js | 402 +- .../search/non-minified-js/german-stemmer.js | 438 +- .../search/non-minified-js/greek-stemmer.js | 2873 +++++++++++ .../search/non-minified-js/hindi-stemmer.js | 181 + .../non-minified-js/hungarian-stemmer.js | 152 +- .../non-minified-js/indonesian-stemmer.js | 409 ++ .../search/non-minified-js/irish-stemmer.js | 378 ++ .../search/non-minified-js/italian-stemmer.js | 227 +- .../non-minified-js/lithuanian-stemmer.js | 534 ++ .../search/non-minified-js/nepali-stemmer.js | 282 + .../non-minified-js/norwegian-stemmer.js | 154 +- .../search/non-minified-js/porter-stemmer.js | 213 +- .../non-minified-js/portuguese-stemmer.js | 225 +- .../non-minified-js/romanian-stemmer.js | 381 +- .../search/non-minified-js/russian-stemmer.js | 125 +- .../search/non-minified-js/serbian-stemmer.js | 4516 +++++++++++++++++ .../search/non-minified-js/spanish-stemmer.js | 225 +- .../search/non-minified-js/swedish-stemmer.js | 188 +- .../search/non-minified-js/tamil-stemmer.js | 1189 +++++ .../search/non-minified-js/turkish-stemmer.js | 649 +-- .../search/non-minified-js/yiddish-stemmer.js | 1160 +++++ 67 files changed, 22216 insertions(+), 2587 deletions(-) create mode 100644 sphinx/search/minified-js/README.rst create mode 100644 sphinx/search/minified-js/arabic-stemmer.js create mode 100644 sphinx/search/minified-js/armenian-stemmer.js create mode 100644 sphinx/search/minified-js/basque-stemmer.js create mode 100644 sphinx/search/minified-js/catalan-stemmer.js create mode 100644 sphinx/search/minified-js/dutch_porter-stemmer.js create mode 100644 sphinx/search/minified-js/english-stemmer.js create mode 100644 sphinx/search/minified-js/esperanto-stemmer.js create mode 100644 sphinx/search/minified-js/estonian-stemmer.js create mode 100644 sphinx/search/minified-js/greek-stemmer.js create mode 100644 sphinx/search/minified-js/hindi-stemmer.js create mode 100644 sphinx/search/minified-js/indonesian-stemmer.js create mode 100644 sphinx/search/minified-js/irish-stemmer.js create mode 100644 sphinx/search/minified-js/lithuanian-stemmer.js create mode 100644 sphinx/search/minified-js/nepali-stemmer.js create mode 100644 sphinx/search/minified-js/serbian-stemmer.js create mode 100644 sphinx/search/minified-js/tamil-stemmer.js create mode 100644 sphinx/search/minified-js/yiddish-stemmer.js create mode 100644 sphinx/search/non-minified-js/arabic-stemmer.js create mode 100644 sphinx/search/non-minified-js/armenian-stemmer.js create mode 100644 sphinx/search/non-minified-js/basque-stemmer.js create mode 100644 sphinx/search/non-minified-js/catalan-stemmer.js create mode 100644 sphinx/search/non-minified-js/dutch_porter-stemmer.js create mode 100644 sphinx/search/non-minified-js/english-stemmer.js create mode 100644 sphinx/search/non-minified-js/esperanto-stemmer.js create mode 100644 sphinx/search/non-minified-js/estonian-stemmer.js create mode 100644 sphinx/search/non-minified-js/greek-stemmer.js create mode 100644 sphinx/search/non-minified-js/hindi-stemmer.js create mode 100644 sphinx/search/non-minified-js/indonesian-stemmer.js create mode 100644 sphinx/search/non-minified-js/irish-stemmer.js create mode 100644 sphinx/search/non-minified-js/lithuanian-stemmer.js create mode 100644 sphinx/search/non-minified-js/nepali-stemmer.js create mode 100644 sphinx/search/non-minified-js/serbian-stemmer.js create mode 100644 sphinx/search/non-minified-js/tamil-stemmer.js create mode 100644 sphinx/search/non-minified-js/yiddish-stemmer.js diff --git a/sphinx/search/minified-js/README.rst b/sphinx/search/minified-js/README.rst new file mode 100644 index 00000000000..e14b36aef3b --- /dev/null +++ b/sphinx/search/minified-js/README.rst @@ -0,0 +1,7 @@ +Regenerate minified files with:: + + npm install -g uglify-js + for f in $(ls sphinx/search/non-minified-js/); \ + do echo $f && \ + npx uglifyjs sphinx/search/non-minified-js/$f --compress --mangle --output sphinx/search/minified-js/$f; \ + done diff --git a/sphinx/search/minified-js/arabic-stemmer.js b/sphinx/search/minified-js/arabic-stemmer.js new file mode 100644 index 00000000000..c8e178a75b4 --- /dev/null +++ b/sphinx/search/minified-js/arabic-stemmer.js @@ -0,0 +1 @@ +var ArabicStemmer=function(){var o=new BaseStemmer,l=[["ـ",-1,1],["ً",-1,1],["ٌ",-1,1],["ٍ",-1,1],["َ",-1,1],["ُ",-1,1],["ِ",-1,1],["ّ",-1,1],["ْ",-1,1],["٠",-1,2],["١",-1,3],["٢",-1,4],["٣",-1,5],["٤",-1,6],["٥",-1,7],["٦",-1,8],["٧",-1,9],["٨",-1,10],["٩",-1,11],["ﺀ",-1,12],["ﺁ",-1,16],["ﺂ",-1,16],["ﺃ",-1,13],["ﺄ",-1,13],["ﺅ",-1,17],["ﺆ",-1,17],["ﺇ",-1,14],["ﺈ",-1,14],["ﺉ",-1,15],["ﺊ",-1,15],["ﺋ",-1,15],["ﺌ",-1,15],["ﺍ",-1,18],["ﺎ",-1,18],["ﺏ",-1,19],["ﺐ",-1,19],["ﺑ",-1,19],["ﺒ",-1,19],["ﺓ",-1,20],["ﺔ",-1,20],["ﺕ",-1,21],["ﺖ",-1,21],["ﺗ",-1,21],["ﺘ",-1,21],["ﺙ",-1,22],["ﺚ",-1,22],["ﺛ",-1,22],["ﺜ",-1,22],["ﺝ",-1,23],["ﺞ",-1,23],["ﺟ",-1,23],["ﺠ",-1,23],["ﺡ",-1,24],["ﺢ",-1,24],["ﺣ",-1,24],["ﺤ",-1,24],["ﺥ",-1,25],["ﺦ",-1,25],["ﺧ",-1,25],["ﺨ",-1,25],["ﺩ",-1,26],["ﺪ",-1,26],["ﺫ",-1,27],["ﺬ",-1,27],["ﺭ",-1,28],["ﺮ",-1,28],["ﺯ",-1,29],["ﺰ",-1,29],["ﺱ",-1,30],["ﺲ",-1,30],["ﺳ",-1,30],["ﺴ",-1,30],["ﺵ",-1,31],["ﺶ",-1,31],["ﺷ",-1,31],["ﺸ",-1,31],["ﺹ",-1,32],["ﺺ",-1,32],["ﺻ",-1,32],["ﺼ",-1,32],["ﺽ",-1,33],["ﺾ",-1,33],["ﺿ",-1,33],["ﻀ",-1,33],["ﻁ",-1,34],["ﻂ",-1,34],["ﻃ",-1,34],["ﻄ",-1,34],["ﻅ",-1,35],["ﻆ",-1,35],["ﻇ",-1,35],["ﻈ",-1,35],["ﻉ",-1,36],["ﻊ",-1,36],["ﻋ",-1,36],["ﻌ",-1,36],["ﻍ",-1,37],["ﻎ",-1,37],["ﻏ",-1,37],["ﻐ",-1,37],["ﻑ",-1,38],["ﻒ",-1,38],["ﻓ",-1,38],["ﻔ",-1,38],["ﻕ",-1,39],["ﻖ",-1,39],["ﻗ",-1,39],["ﻘ",-1,39],["ﻙ",-1,40],["ﻚ",-1,40],["ﻛ",-1,40],["ﻜ",-1,40],["ﻝ",-1,41],["ﻞ",-1,41],["ﻟ",-1,41],["ﻠ",-1,41],["ﻡ",-1,42],["ﻢ",-1,42],["ﻣ",-1,42],["ﻤ",-1,42],["ﻥ",-1,43],["ﻦ",-1,43],["ﻧ",-1,43],["ﻨ",-1,43],["ﻩ",-1,44],["ﻪ",-1,44],["ﻫ",-1,44],["ﻬ",-1,44],["ﻭ",-1,45],["ﻮ",-1,45],["ﻯ",-1,46],["ﻰ",-1,46],["ﻱ",-1,47],["ﻲ",-1,47],["ﻳ",-1,47],["ﻴ",-1,47],["ﻵ",-1,51],["ﻶ",-1,51],["ﻷ",-1,49],["ﻸ",-1,49],["ﻹ",-1,50],["ﻺ",-1,50],["ﻻ",-1,48],["ﻼ",-1,48]],b=[["آ",-1,1],["أ",-1,1],["ؤ",-1,1],["إ",-1,1],["ئ",-1,1]],m=[["آ",-1,1],["أ",-1,1],["ؤ",-1,2],["إ",-1,1],["ئ",-1,3]],_=[["ال",-1,2],["بال",-1,1],["كال",-1,1],["لل",-1,2]],k=[["أآ",-1,2],["أأ",-1,1],["أؤ",-1,1],["أإ",-1,4],["أا",-1,3]],g=[["ف",-1,1],["و",-1,1]],d=[["ال",-1,2],["بال",-1,1],["كال",-1,1],["لل",-1,2]],h=[["ب",-1,1],["با",0,-1],["بب",0,2],["كك",-1,3]],v=[["سأ",-1,4],["ست",-1,2],["سن",-1,3],["سي",-1,1]],w=[["تست",-1,1],["نست",-1,1],["يست",-1,1]],C=[["كما",-1,3],["هما",-1,3],["نا",-1,2],["ها",-1,2],["ك",-1,1],["كم",-1,2],["هم",-1,2],["هن",-1,2],["ه",-1,1],["ي",-1,1]],S=[["ن",-1,1]],r=[["ا",-1,1],["و",-1,1],["ي",-1,1]],e=[["ات",-1,1]],i=[["ت",-1,1]],q=[["ة",-1,1]],A=[["ي",-1,1]],B=[["كما",-1,3],["هما",-1,3],["نا",-1,2],["ها",-1,2],["ك",-1,1],["كم",-1,2],["هم",-1,2],["كن",-1,2],["هن",-1,2],["ه",-1,1],["كمو",-1,3],["ني",-1,2]],c=[["ا",-1,1],["تا",0,2],["تما",0,4],["نا",0,2],["ت",-1,1],["ن",-1,1],["ان",5,3],["تن",5,2],["ون",5,3],["ين",5,3],["ي",-1,1]],W=[["وا",-1,1],["تم",-1,1]],j=[["و",-1,1],["تمو",0,2]],p=[["ى",-1,1]],x=!1,y=!1,z=!1;function D(){return o.ket=o.cursor,0!=o.find_among_b(r)&&(o.bra=o.cursor,!(o.current.length<=4||!o.slice_del()))}function E(){return o.ket=o.cursor,0!=o.find_among_b(e)&&(o.bra=o.cursor,!(o.current.length<5||!o.slice_del()))}function F(){return o.ket=o.cursor,0!=o.find_among_b(i)&&(o.bra=o.cursor,!(o.current.length<4||!o.slice_del()))}function G(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(c))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<=5)return;if(o.slice_del())break;return;case 4:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}}this.stem=function(){x=!(y=z=!0);var r=o.cursor,r=((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(_)))switch(o.ket=o.cursor,r){case 1:if(o.current.length<=4)return;x=!(y=!(z=!0));break;case 2:if(o.current.length<=3)return;x=!(y=!(z=!0))}})(),o.cursor=r,(()=>{for(var r,e=o.cursor;;){var i=o.cursor;r:{var c=o.cursor;if(o.bra=o.cursor,0!=(r=o.find_among(l)))switch(o.ket=o.cursor,r){case 1:if(o.slice_del())break;return;case 2:if(o.slice_from("0"))break;return;case 3:if(o.slice_from("1"))break;return;case 4:if(o.slice_from("2"))break;return;case 5:if(o.slice_from("3"))break;return;case 6:if(o.slice_from("4"))break;return;case 7:if(o.slice_from("5"))break;return;case 8:if(o.slice_from("6"))break;return;case 9:if(o.slice_from("7"))break;return;case 10:if(o.slice_from("8"))break;return;case 11:if(o.slice_from("9"))break;return;case 12:if(o.slice_from("ء"))break;return;case 13:if(o.slice_from("أ"))break;return;case 14:if(o.slice_from("إ"))break;return;case 15:if(o.slice_from("ئ"))break;return;case 16:if(o.slice_from("آ"))break;return;case 17:if(o.slice_from("ؤ"))break;return;case 18:if(o.slice_from("ا"))break;return;case 19:if(o.slice_from("ب"))break;return;case 20:if(o.slice_from("ة"))break;return;case 21:if(o.slice_from("ت"))break;return;case 22:if(o.slice_from("ث"))break;return;case 23:if(o.slice_from("ج"))break;return;case 24:if(o.slice_from("ح"))break;return;case 25:if(o.slice_from("خ"))break;return;case 26:if(o.slice_from("د"))break;return;case 27:if(o.slice_from("ذ"))break;return;case 28:if(o.slice_from("ر"))break;return;case 29:if(o.slice_from("ز"))break;return;case 30:if(o.slice_from("س"))break;return;case 31:if(o.slice_from("ش"))break;return;case 32:if(o.slice_from("ص"))break;return;case 33:if(o.slice_from("ض"))break;return;case 34:if(o.slice_from("ط"))break;return;case 35:if(o.slice_from("ظ"))break;return;case 36:if(o.slice_from("ع"))break;return;case 37:if(o.slice_from("غ"))break;return;case 38:if(o.slice_from("ف"))break;return;case 39:if(o.slice_from("ق"))break;return;case 40:if(o.slice_from("ك"))break;return;case 41:if(o.slice_from("ل"))break;return;case 42:if(o.slice_from("م"))break;return;case 43:if(o.slice_from("ن"))break;return;case 44:if(o.slice_from("ه"))break;return;case 45:if(o.slice_from("و"))break;return;case 46:if(o.slice_from("ى"))break;return;case 47:if(o.slice_from("ي"))break;return;case 48:if(o.slice_from("لا"))break;return;case 49:if(o.slice_from("لأ"))break;return;case 50:if(o.slice_from("لإ"))break;return;case 51:if(o.slice_from("لآ"))break;return}else{if(o.cursor=c,o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}o.cursor=e})(),o.limit_backward=o.cursor,o.cursor=o.limit,o.limit-o.cursor);r:e:{var e=o.limit-o.cursor;i:if(y){c:{var i=o.limit-o.cursor;s:{for(var c=1;;){var s=o.limit-o.cursor;if(!(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(B))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})()){o.cursor=o.limit-s;break}c--}if(!(0{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(j))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})())){if(o.cursor=o.limit-t,o.cursor<=o.limit_backward)break s;o.cursor--}break c}}if(o.cursor=o.limit-i,(o.ket=o.cursor,0==o.find_among_b(W)||(o.bra=o.cursor,o.current.length<5)||!o.slice_del())&&(o.cursor=o.limit-i,!G()))break i}break e}if(o.cursor=o.limit-e,z){var u=o.limit-o.cursor;i:c:{var a=o.limit-o.cursor;if(o.ket=o.cursor,0==o.find_among_b(q)||(o.bra=o.cursor,o.current.length<4)||!o.slice_del()){o.cursor=o.limit-a;s:if(!x&&(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(C))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})()){var n=o.limit-o.cursor;if(!D()&&(o.cursor=o.limit-n,!E())&&(o.cursor=o.limit-n,!F())){if(o.cursor=o.limit-n,o.cursor<=o.limit_backward)break s;o.cursor--}break c}if(o.cursor=o.limit-a,o.ket=o.cursor,!(0==o.find_among_b(S)||(o.bra=o.cursor,o.current.length<=5))&&o.slice_del()){n=o.limit-o.cursor;if(D()||(o.cursor=o.limit-n,E())||(o.cursor=o.limit-n,F()))break c}if(o.cursor=o.limit-a,(x||!D())&&(o.cursor=o.limit-a,!E())){o.cursor=o.limit-u;break i}}}if(o.ket=o.cursor,!(0==o.find_among_b(A)||(o.bra=o.cursor,o.current.length<3))&&o.slice_del())break e}if(o.cursor=o.limit-e,o.ket=o.cursor,0==o.find_among_b(p)||(o.bra=o.cursor,!o.slice_from("ي")))break r}o.cursor=o.limit-r,o.cursor=o.limit_backward;r=o.cursor;r:{var f=o.cursor,f=((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(k))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=3)return;if(o.slice_from("أ"))break;return;case 2:if(o.current.length<=3)return;if(o.slice_from("آ"))break;return;case 3:if(o.current.length<=3)return;if(o.slice_from("ا"))break;return;case 4:if(o.current.length<=3)return;if(o.slice_from("إ"))break;return}return 1}})()||(o.cursor=f),o.cursor),f=((()=>{var r;return o.bra=o.cursor,0==o.find_among(g)||(o.ket=o.cursor,o.current.length<=3)||(r=o.cursor,o.eq_s("ا"))?void 0:(o.cursor=r,!!o.slice_del())})()||(o.cursor=f),o.cursor);if(!(()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(d))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=5)return;if(o.slice_del())break;return;case 2:if(o.current.length<=4)return;if(o.slice_del())break;return}return 1}})()&&(o.cursor=f,!z||!(()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(h))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=3)return;if(o.slice_del())break;return;case 2:if(o.current.length<=3)return;if(o.slice_from("ب"))break;return;case 3:if(o.current.length<=3)return;if(o.slice_from("ك"))break;return}return 1}})())){if(o.cursor=f,!y)break r;f=o.cursor;if((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(v))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=4)return;if(o.slice_from("ي"))break;return;case 2:if(o.current.length<=4)return;if(o.slice_from("ت"))break;return;case 3:if(o.current.length<=4)return;if(o.slice_from("ن"))break;return;case 4:if(o.current.length<=4)return;if(o.slice_from("أ"))break;return}return 1}})()||(o.cursor=f),o.bra=o.cursor,0==o.find_among(w)||(o.ket=o.cursor,o.current.length<=4)||(z=!(y=!0),!o.slice_from("است")))break r}}return o.cursor=r,(()=>{var r,e=o.cursor;if(o.limit_backward=o.cursor,o.cursor=o.limit,o.ket=o.cursor,0!=o.find_among_b(b)){if(o.bra=o.cursor,!o.slice_from("ء"))return;o.cursor=o.limit_backward}for(o.cursor=e,e=o.cursor;;){var i=o.cursor;r:{var c=o.cursor;if(o.bra=o.cursor,0!=(r=o.find_among(m)))switch(o.ket=o.cursor,r){case 1:if(o.slice_from("ا"))break;return;case 2:if(o.slice_from("و"))break;return;case 3:if(o.slice_from("ي"))break;return}else{if(o.cursor=c,o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}o.cursor=e})(),!0},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/armenian-stemmer.js b/sphinx/search/minified-js/armenian-stemmer.js new file mode 100644 index 00000000000..6b5c33afba2 --- /dev/null +++ b/sphinx/search/minified-js/armenian-stemmer.js @@ -0,0 +1 @@ +var ArmenianStemmer=function(){var o=new BaseStemmer,u=[["րորդ",-1,1],["երորդ",0,1],["ալի",-1,1],["ակի",-1,1],["որակ",-1,1],["եղ",-1,1],["ական",-1,1],["արան",-1,1],["են",-1,1],["եկեն",8,1],["երեն",8,1],["որէն",-1,1],["ին",-1,1],["գին",12,1],["ովին",12,1],["լայն",-1,1],["վուն",-1,1],["պես",-1,1],["իվ",-1,1],["ատ",-1,1],["ավետ",-1,1],["կոտ",-1,1],["բար",-1,1]],c=[["ա",-1,1],["ացա",0,1],["եցա",0,1],["վե",-1,1],["ացրի",-1,1],["ացի",-1,1],["եցի",-1,1],["վեցի",6,1],["ալ",-1,1],["ըալ",8,1],["անալ",8,1],["ենալ",8,1],["ացնալ",8,1],["ել",-1,1],["ըել",13,1],["նել",13,1],["ցնել",15,1],["եցնել",16,1],["չել",13,1],["վել",13,1],["ացվել",19,1],["եցվել",19,1],["տել",13,1],["ատել",22,1],["ոտել",22,1],["կոտել",24,1],["ված",-1,1],["ում",-1,1],["վում",27,1],["ան",-1,1],["ցան",29,1],["ացան",30,1],["ացրին",-1,1],["ացին",-1,1],["եցին",-1,1],["վեցին",34,1],["ալիս",-1,1],["ելիս",-1,1],["ավ",-1,1],["ացավ",38,1],["եցավ",38,1],["ալով",-1,1],["ելով",-1,1],["ար",-1,1],["ացար",43,1],["եցար",43,1],["ացրիր",-1,1],["ացիր",-1,1],["եցիր",-1,1],["վեցիր",48,1],["աց",-1,1],["եց",-1,1],["ացրեց",51,1],["ալուց",-1,1],["ելուց",-1,1],["ալու",-1,1],["ելու",-1,1],["աք",-1,1],["ցաք",57,1],["ացաք",58,1],["ացրիք",-1,1],["ացիք",-1,1],["եցիք",-1,1],["վեցիք",62,1],["անք",-1,1],["ցանք",64,1],["ացանք",65,1],["ացրինք",-1,1],["ացինք",-1,1],["եցինք",-1,1],["վեցինք",69,1]],s=[["որդ",-1,1],["ույթ",-1,1],["ուհի",-1,1],["ցի",-1,1],["իլ",-1,1],["ակ",-1,1],["յակ",5,1],["անակ",5,1],["իկ",-1,1],["ուկ",-1,1],["ան",-1,1],["պան",10,1],["ստան",10,1],["արան",10,1],["եղէն",-1,1],["յուն",-1,1],["ություն",15,1],["ածո",-1,1],["իչ",-1,1],["ուս",-1,1],["ուստ",-1,1],["գար",-1,1],["վոր",-1,1],["ավոր",22,1],["ոց",-1,1],["անօց",-1,1],["ու",-1,1],["ք",-1,1],["չեք",27,1],["իք",27,1],["ալիք",29,1],["անիք",29,1],["վածք",27,1],["ույք",27,1],["ենք",27,1],["ոնք",27,1],["ունք",27,1],["մունք",36,1],["իչք",27,1],["արք",27,1]],r=[["սա",-1,1],["վա",-1,1],["ամբ",-1,1],["դ",-1,1],["անդ",3,1],["ությանդ",4,1],["վանդ",4,1],["ոջդ",3,1],["երդ",3,1],["ներդ",8,1],["ուդ",3,1],["ը",-1,1],["անը",11,1],["ությանը",12,1],["վանը",12,1],["ոջը",11,1],["երը",11,1],["ները",16,1],["ի",-1,1],["վի",18,1],["երի",18,1],["ների",20,1],["անում",-1,1],["երում",-1,1],["ներում",23,1],["ն",-1,1],["ան",25,1],["ության",26,1],["վան",26,1],["ին",25,1],["երին",29,1],["ներին",30,1],["ությանն",25,1],["երն",25,1],["ներն",33,1],["ուն",25,1],["ոջ",-1,1],["ությանս",-1,1],["վանս",-1,1],["ոջս",-1,1],["ով",-1,1],["անով",40,1],["վով",40,1],["երով",40,1],["ներով",43,1],["եր",-1,1],["ներ",45,1],["ց",-1,1],["ից",47,1],["վանից",48,1],["ոջից",48,1],["վից",48,1],["երից",48,1],["ներից",52,1],["ցից",48,1],["ոց",47,1],["ուց",47,1]],t=[209,4,128,0,18],n=0,e=0;function m(){o.ket=o.cursor,0!=o.find_among_b(r)&&(o.bra=o.cursor,n<=o.cursor)&&o.slice_del()}this.stem=function(){var r,i;return e=o.limit,n=o.limit,r=o.cursor,o.go_out_grouping(t,1377,1413)&&(o.cursor++,e=o.cursor,o.go_in_grouping(t,1377,1413))&&(o.cursor++,o.go_out_grouping(t,1377,1413))&&(o.cursor++,o.go_in_grouping(t,1377,1413))&&(o.cursor++,n=o.cursor),o.cursor=r,o.limit_backward=o.cursor,o.cursor=o.limit,!(o.cursor=this.limit)return false;var s=this.current.charCodeAt(this.cursor);if(s>i||s>>3]&1<<(s&7))==0)return false;this.cursor++;return true};this.in_grouping_b=function(r,t,i){if(this.cursor<=this.limit_backward)return false;var s=this.current.charCodeAt(this.cursor-1);if(s>i||s>>3]&1<<(s&7))==0)return false;this.cursor--;return true};this.out_grouping=function(r,t,i){if(this.cursor>=this.limit)return false;var s=this.current.charCodeAt(this.cursor);if(s>i||s>>3]&1<<(s&7))==0){this.cursor++;return true}return false};this.out_grouping_b=function(r,t,i){if(this.cursor<=this.limit_backward)return false;var s=this.current.charCodeAt(this.cursor-1);if(s>i||s>>3]&1<<(s&7))==0){this.cursor--;return true}return false};this.eq_s=function(r){if(this.limit-this.cursor>>1);var a=0;var f=h0)break;if(i==t)break;if(n)break;n=true}}do{var l=r[t];if(h>=l[0].length){this.cursor=s+l[0].length;if(l.length<4)return l[2];var v=l[3](this);this.cursor=s+l[0].length;if(v)return l[2]}t=l[1]}while(t>=0);return 0};this.find_among_b=function(r){var t=0;var i=r.length;var s=this.cursor;var e=this.limit_backward;var h=0;var u=0;var n=false;while(true){var c=t+(i-t>>1);var a=0;var f=h=0;o--){if(s-f==e){a=-1;break}a=this.current.charCodeAt(s-1-f)-l[0].charCodeAt(o);if(a!=0)break;f++}if(a<0){i=c;u=f}else{t=c;h=f}if(i-t<=1){if(t>0)break;if(i==t)break;if(n)break;n=true}}do{var l=r[t];if(h>=l[0].length){this.cursor=s-l[0].length;if(l.length<4)return l[2];var v=l[3](this);this.cursor=s-l[0].length;if(v)return l[2]}t=l[1]}while(t>=0);return 0};this.replace_s=function(r,t,i){var s=i.length-(t-r);this.current=this.current.slice(0,r)+i+this.current.slice(t);this.limit+=s;if(this.cursor>=t)this.cursor+=s;else if(this.cursor>r)this.cursor=r;return s};this.slice_check=function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length){return false}return true};this.slice_from=function(r){var t=false;if(this.slice_check()){this.replace_s(this.bra,this.ket,r);t=true}return t};this.slice_del=function(){return this.slice_from("")};this.insert=function(r,t,i){var s=this.replace_s(r,t,i);if(r<=this.bra)this.bra+=s;if(r<=this.ket)this.ket+=s};this.slice_to=function(){var r="";if(this.slice_check()){r=this.current.slice(this.bra,this.ket)}return r};this.assign_to=function(){return this.current.slice(0,this.limit)}}; \ No newline at end of file +let BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/basque-stemmer.js b/sphinx/search/minified-js/basque-stemmer.js new file mode 100644 index 00000000000..3e1c4337a69 --- /dev/null +++ b/sphinx/search/minified-js/basque-stemmer.js @@ -0,0 +1 @@ +var BasqueStemmer=function(){var o=new BaseStemmer,u=[["idea",-1,1],["bidea",0,1],["kidea",0,1],["pidea",0,1],["kundea",-1,1],["galea",-1,1],["tailea",-1,1],["tzailea",-1,1],["gunea",-1,1],["kunea",-1,1],["tzaga",-1,1],["gaia",-1,1],["aldia",-1,1],["taldia",12,1],["karia",-1,1],["garria",-1,2],["karria",-1,1],["ka",-1,1],["tzaka",17,1],["la",-1,1],["mena",-1,1],["pena",-1,1],["kina",-1,1],["ezina",-1,1],["tezina",23,1],["kuna",-1,1],["tuna",-1,1],["kizuna",-1,1],["era",-1,1],["bera",28,1],["arabera",29,-1],["kera",28,1],["pera",28,1],["orra",-1,1],["korra",33,1],["dura",-1,1],["gura",-1,1],["kura",-1,1],["tura",-1,1],["eta",-1,1],["keta",39,1],["gailua",-1,1],["eza",-1,1],["erreza",42,1],["tza",-1,2],["gaitza",44,1],["kaitza",44,1],["kuntza",44,1],["ide",-1,1],["bide",48,1],["kide",48,1],["pide",48,1],["kunde",-1,1],["tzake",-1,1],["tzeke",-1,1],["le",-1,1],["gale",55,1],["taile",55,1],["tzaile",55,1],["gune",-1,1],["kune",-1,1],["tze",-1,1],["atze",61,1],["gai",-1,1],["aldi",-1,1],["taldi",64,1],["ki",-1,1],["ari",-1,1],["kari",67,1],["lari",67,1],["tari",67,1],["etari",70,1],["garri",-1,2],["karri",-1,1],["arazi",-1,1],["tarazi",74,1],["an",-1,1],["ean",76,1],["rean",77,1],["kan",76,1],["etan",76,1],["atseden",-1,-1],["men",-1,1],["pen",-1,1],["kin",-1,1],["rekin",84,1],["ezin",-1,1],["tezin",86,1],["tun",-1,1],["kizun",-1,1],["go",-1,1],["ago",90,1],["tio",-1,1],["dako",-1,1],["or",-1,1],["kor",94,1],["tzat",-1,1],["du",-1,1],["gailu",-1,1],["tu",-1,1],["atu",99,1],["aldatu",100,1],["tatu",100,1],["baditu",99,-1],["ez",-1,1],["errez",104,1],["tzez",104,1],["gaitz",-1,1],["kaitz",-1,1]],r=[["ada",-1,1],["kada",0,1],["anda",-1,1],["denda",-1,1],["gabea",-1,1],["kabea",-1,1],["aldea",-1,1],["kaldea",6,1],["taldea",6,1],["ordea",-1,1],["zalea",-1,1],["tzalea",10,1],["gilea",-1,1],["emea",-1,1],["kumea",-1,1],["nea",-1,1],["enea",15,1],["zionea",15,1],["unea",15,1],["gunea",18,1],["pea",-1,1],["aurrea",-1,1],["tea",-1,1],["kotea",22,1],["artea",22,1],["ostea",22,1],["etxea",-1,1],["ga",-1,1],["anga",27,1],["gaia",-1,1],["aldia",-1,1],["taldia",30,1],["handia",-1,1],["mendia",-1,1],["geia",-1,1],["egia",-1,1],["degia",35,1],["tegia",35,1],["nahia",-1,1],["ohia",-1,1],["kia",-1,1],["tokia",40,1],["oia",-1,1],["koia",42,1],["aria",-1,1],["karia",44,1],["laria",44,1],["taria",44,1],["eria",-1,1],["keria",48,1],["teria",48,1],["garria",-1,2],["larria",-1,1],["kirria",-1,1],["duria",-1,1],["asia",-1,1],["tia",-1,1],["ezia",-1,1],["bizia",-1,1],["ontzia",-1,1],["ka",-1,1],["joka",60,3],["aurka",60,-1],["ska",60,1],["xka",60,1],["zka",60,1],["gibela",-1,1],["gela",-1,1],["kaila",-1,1],["skila",-1,1],["tila",-1,1],["ola",-1,1],["na",-1,1],["kana",72,1],["ena",72,1],["garrena",74,1],["gerrena",74,1],["urrena",74,1],["zaina",72,1],["tzaina",78,1],["kina",72,1],["mina",72,1],["garna",72,1],["una",72,1],["duna",83,1],["asuna",83,1],["tasuna",85,1],["ondoa",-1,1],["kondoa",87,1],["ngoa",-1,1],["zioa",-1,1],["koa",-1,1],["takoa",91,1],["zkoa",91,1],["noa",-1,1],["zinoa",94,1],["aroa",-1,1],["taroa",96,1],["zaroa",96,1],["eroa",-1,1],["oroa",-1,1],["osoa",-1,1],["toa",-1,1],["ttoa",102,1],["ztoa",102,1],["txoa",-1,1],["tzoa",-1,1],["ñoa",-1,1],["ra",-1,1],["ara",108,1],["dara",109,1],["liara",109,1],["tiara",109,1],["tara",109,1],["etara",113,1],["tzara",109,1],["bera",108,1],["kera",108,1],["pera",108,1],["ora",108,2],["tzarra",108,1],["korra",108,1],["tra",108,1],["sa",-1,1],["osa",123,1],["ta",-1,1],["eta",125,1],["keta",126,1],["sta",125,1],["dua",-1,1],["mendua",129,1],["ordua",129,1],["lekua",-1,1],["burua",-1,1],["durua",-1,1],["tsua",-1,1],["tua",-1,1],["mentua",136,1],["estua",136,1],["txua",-1,1],["zua",-1,1],["tzua",140,1],["za",-1,1],["eza",142,1],["eroza",142,1],["tza",142,2],["koitza",145,1],["antza",145,1],["gintza",145,1],["kintza",145,1],["kuntza",145,1],["gabe",-1,1],["kabe",-1,1],["kide",-1,1],["alde",-1,1],["kalde",154,1],["talde",154,1],["orde",-1,1],["ge",-1,1],["zale",-1,1],["tzale",159,1],["gile",-1,1],["eme",-1,1],["kume",-1,1],["ne",-1,1],["zione",164,1],["une",164,1],["gune",166,1],["pe",-1,1],["aurre",-1,1],["te",-1,1],["kote",170,1],["arte",170,1],["oste",170,1],["etxe",-1,1],["gai",-1,1],["di",-1,1],["aldi",176,1],["taldi",177,1],["geldi",176,-1],["handi",176,1],["mendi",176,1],["gei",-1,1],["egi",-1,1],["degi",183,1],["tegi",183,1],["nahi",-1,1],["ohi",-1,1],["ki",-1,1],["toki",188,1],["oi",-1,1],["goi",190,1],["koi",190,1],["ari",-1,1],["kari",193,1],["lari",193,1],["tari",193,1],["garri",-1,2],["larri",-1,1],["kirri",-1,1],["duri",-1,1],["asi",-1,1],["ti",-1,1],["ontzi",-1,1],["ñi",-1,1],["ak",-1,1],["ek",-1,1],["tarik",-1,1],["gibel",-1,1],["ail",-1,1],["kail",209,1],["kan",-1,1],["tan",-1,1],["etan",212,1],["en",-1,4],["ren",214,2],["garren",215,1],["gerren",215,1],["urren",215,1],["ten",214,4],["tzen",214,4],["zain",-1,1],["tzain",221,1],["kin",-1,1],["min",-1,1],["dun",-1,1],["asun",-1,1],["tasun",226,1],["aizun",-1,1],["ondo",-1,1],["kondo",229,1],["go",-1,1],["ngo",231,1],["zio",-1,1],["ko",-1,1],["trako",234,5],["tako",234,1],["etako",236,1],["eko",234,1],["tariko",234,1],["sko",234,1],["tuko",234,1],["minutuko",241,6],["zko",234,1],["no",-1,1],["zino",244,1],["ro",-1,1],["aro",246,1],["igaro",247,-1],["taro",247,1],["zaro",247,1],["ero",246,1],["giro",246,1],["oro",246,1],["oso",-1,1],["to",-1,1],["tto",255,1],["zto",255,1],["txo",-1,1],["tzo",-1,1],["gintzo",259,1],["ño",-1,1],["zp",-1,1],["ar",-1,1],["dar",263,1],["behar",263,1],["zehar",263,-1],["liar",263,1],["tiar",263,1],["tar",263,1],["tzar",263,1],["or",-1,2],["kor",271,1],["os",-1,1],["ket",-1,1],["du",-1,1],["mendu",275,1],["ordu",275,1],["leku",-1,1],["buru",-1,2],["duru",-1,1],["tsu",-1,1],["tu",-1,1],["tatu",282,4],["mentu",282,1],["estu",282,1],["txu",-1,1],["zu",-1,1],["tzu",287,1],["gintzu",288,1],["z",-1,1],["ez",290,1],["eroz",290,1],["tz",290,1],["koitz",293,1]],n=[["zlea",-1,2],["keria",-1,1],["la",-1,1],["era",-1,1],["dade",-1,1],["tade",-1,1],["date",-1,1],["tate",-1,1],["gi",-1,1],["ki",-1,1],["ik",-1,1],["lanik",10,1],["rik",10,1],["larik",12,1],["ztik",10,1],["go",-1,1],["ro",-1,1],["ero",16,1],["to",-1,1]],k=[17,65,16],g=0,s=0,z=0;function l(){return z<=o.cursor}function d(){return g<=o.cursor}function c(){var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(r))){switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(!d())return;if(o.slice_del())break;return;case 3:if(o.slice_from("jok"))break;return;case 4:if(!(s<=o.cursor))return;if(o.slice_del())break;return;case 5:if(o.slice_from("tra"))break;return;case 6:if(o.slice_from("minutu"))break;return}return 1}}this.stem=function(){z=o.limit,s=o.limit,g=o.limit;var a=o.cursor;a:{r:{var r=o.cursor;i:if(o.in_grouping(k,97,117)){var i=o.cursor;if(!o.out_grouping(k,97,117)||!o.go_out_grouping(k,97,117)){if(o.cursor=i,!o.in_grouping(k,97,117))break i;if(!o.go_in_grouping(k,97,117))break i}o.cursor++;break r}if(o.cursor=r,!o.out_grouping(k,97,117))break a;i=o.cursor;if(o.out_grouping(k,97,117)&&o.go_out_grouping(k,97,117));else{if(o.cursor=i,!o.in_grouping(k,97,117))break a;if(o.cursor>=o.limit)break a}o.cursor++}z=o.cursor}for(o.cursor=a,a=o.cursor,o.go_out_grouping(k,97,117)&&(o.cursor++,o.go_in_grouping(k,97,117))&&(o.cursor++,s=o.cursor,o.go_out_grouping(k,97,117))&&(o.cursor++,o.go_in_grouping(k,97,117))&&(o.cursor++,g=o.cursor),o.cursor=a,o.limit_backward=o.cursor,o.cursor=o.limit;;){var e=o.limit-o.cursor;if(!(()=>{var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(u))){switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(!d())return;if(o.slice_del())break;return}return 1}})()){o.cursor=o.limit-e;break}}for(;;){var t=o.limit-o.cursor;if(!c()){o.cursor=o.limit-t;break}}a=o.limit-o.cursor;return(()=>{var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(n)))switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(o.slice_from("z"))break}})(),o.cursor=o.limit-a,o.cursor=o.limit_backward,!0},this.stemWord=function(a){return o.setCurrent(a),this.stem(),o.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/catalan-stemmer.js b/sphinx/search/minified-js/catalan-stemmer.js new file mode 100644 index 00000000000..75788216aa1 --- /dev/null +++ b/sphinx/search/minified-js/catalan-stemmer.js @@ -0,0 +1 @@ +var CatalanStemmer=function(){var e=new BaseStemmer,r=[["",-1,7],["·",0,6],["à",0,1],["á",0,1],["è",0,2],["é",0,2],["ì",0,3],["í",0,3],["ï",0,3],["ò",0,4],["ó",0,4],["ú",0,5],["ü",0,5]],a=[["la",-1,1],["-la",0,1],["sela",0,1],["le",-1,1],["me",-1,1],["-me",4,1],["se",-1,1],["-te",-1,1],["hi",-1,1],["'hi",8,1],["li",-1,1],["-li",10,1],["'l",-1,1],["'m",-1,1],["-m",-1,1],["'n",-1,1],["-n",-1,1],["ho",-1,1],["'ho",17,1],["lo",-1,1],["selo",19,1],["'s",-1,1],["las",-1,1],["selas",22,1],["les",-1,1],["-les",24,1],["'ls",-1,1],["-ls",-1,1],["'ns",-1,1],["-ns",-1,1],["ens",-1,1],["los",-1,1],["selos",31,1],["nos",-1,1],["-nos",33,1],["vos",-1,1],["us",-1,1],["-us",36,1],["'t",-1,1]],t=[["ica",-1,4],["lógica",0,3],["enca",-1,1],["ada",-1,2],["ancia",-1,1],["encia",-1,1],["ència",-1,1],["ícia",-1,1],["logia",-1,3],["inia",-1,1],["íinia",9,1],["eria",-1,1],["ària",-1,1],["atòria",-1,1],["alla",-1,1],["ella",-1,1],["ívola",-1,1],["ima",-1,1],["íssima",17,1],["quíssima",18,5],["ana",-1,1],["ina",-1,1],["era",-1,1],["sfera",22,1],["ora",-1,1],["dora",24,1],["adora",25,1],["adura",-1,1],["esa",-1,1],["osa",-1,1],["assa",-1,1],["essa",-1,1],["issa",-1,1],["eta",-1,1],["ita",-1,1],["ota",-1,1],["ista",-1,1],["ialista",36,1],["ionista",36,1],["iva",-1,1],["ativa",39,1],["nça",-1,1],["logía",-1,3],["ic",-1,4],["ístic",43,1],["enc",-1,1],["esc",-1,1],["ud",-1,1],["atge",-1,1],["ble",-1,1],["able",49,1],["ible",49,1],["isme",-1,1],["ialisme",52,1],["ionisme",52,1],["ivisme",52,1],["aire",-1,1],["icte",-1,1],["iste",-1,1],["ici",-1,1],["íci",-1,1],["logi",-1,3],["ari",-1,1],["tori",-1,1],["al",-1,1],["il",-1,1],["all",-1,1],["ell",-1,1],["ívol",-1,1],["isam",-1,1],["issem",-1,1],["ìssem",-1,1],["íssem",-1,1],["íssim",-1,1],["quíssim",73,5],["amen",-1,1],["ìssin",-1,1],["ar",-1,1],["ificar",77,1],["egar",77,1],["ejar",77,1],["itar",77,1],["itzar",77,1],["fer",-1,1],["or",-1,1],["dor",84,1],["dur",-1,1],["doras",-1,1],["ics",-1,4],["lógics",88,3],["uds",-1,1],["nces",-1,1],["ades",-1,2],["ancies",-1,1],["encies",-1,1],["ències",-1,1],["ícies",-1,1],["logies",-1,3],["inies",-1,1],["ínies",-1,1],["eries",-1,1],["àries",-1,1],["atòries",-1,1],["bles",-1,1],["ables",103,1],["ibles",103,1],["imes",-1,1],["íssimes",106,1],["quíssimes",107,5],["formes",-1,1],["ismes",-1,1],["ialismes",110,1],["ines",-1,1],["eres",-1,1],["ores",-1,1],["dores",114,1],["idores",115,1],["dures",-1,1],["eses",-1,1],["oses",-1,1],["asses",-1,1],["ictes",-1,1],["ites",-1,1],["otes",-1,1],["istes",-1,1],["ialistes",124,1],["ionistes",124,1],["iques",-1,4],["lógiques",127,3],["ives",-1,1],["atives",129,1],["logíes",-1,3],["allengües",-1,1],["icis",-1,1],["ícis",-1,1],["logis",-1,3],["aris",-1,1],["toris",-1,1],["ls",-1,1],["als",138,1],["ells",138,1],["ims",-1,1],["íssims",141,1],["quíssims",142,5],["ions",-1,1],["cions",144,1],["acions",145,2],["esos",-1,1],["osos",-1,1],["assos",-1,1],["issos",-1,1],["ers",-1,1],["ors",-1,1],["dors",152,1],["adors",153,1],["idors",153,1],["ats",-1,1],["itats",156,1],["bilitats",157,1],["ivitats",157,1],["ativitats",159,1],["ïtats",156,1],["ets",-1,1],["ants",-1,1],["ents",-1,1],["ments",164,1],["aments",165,1],["ots",-1,1],["uts",-1,1],["ius",-1,1],["trius",169,1],["atius",169,1],["ès",-1,1],["és",-1,1],["ís",-1,1],["dís",174,1],["ós",-1,1],["itat",-1,1],["bilitat",177,1],["ivitat",177,1],["ativitat",179,1],["ïtat",-1,1],["et",-1,1],["ant",-1,1],["ent",-1,1],["ient",184,1],["ment",184,1],["ament",186,1],["isament",187,1],["ot",-1,1],["isseu",-1,1],["ìsseu",-1,1],["ísseu",-1,1],["triu",-1,1],["íssiu",-1,1],["atiu",-1,1],["ó",-1,1],["ió",196,1],["ció",197,1],["ació",198,1]],n=[["aba",-1,1],["esca",-1,1],["isca",-1,1],["ïsca",-1,1],["ada",-1,1],["ida",-1,1],["uda",-1,1],["ïda",-1,1],["ia",-1,1],["aria",8,1],["iria",8,1],["ara",-1,1],["iera",-1,1],["ira",-1,1],["adora",-1,1],["ïra",-1,1],["ava",-1,1],["ixa",-1,1],["itza",-1,1],["ía",-1,1],["aría",19,1],["ería",19,1],["iría",19,1],["ïa",-1,1],["isc",-1,1],["ïsc",-1,1],["ad",-1,1],["ed",-1,1],["id",-1,1],["ie",-1,1],["re",-1,1],["dre",30,1],["ase",-1,1],["iese",-1,1],["aste",-1,1],["iste",-1,1],["ii",-1,1],["ini",-1,1],["esqui",-1,1],["eixi",-1,1],["itzi",-1,1],["am",-1,1],["em",-1,1],["arem",42,1],["irem",42,1],["àrem",42,1],["írem",42,1],["àssem",42,1],["éssem",42,1],["iguem",42,1],["ïguem",42,1],["avem",42,1],["àvem",42,1],["ávem",42,1],["irìem",42,1],["íem",42,1],["aríem",55,1],["iríem",55,1],["assim",-1,1],["essim",-1,1],["issim",-1,1],["àssim",-1,1],["èssim",-1,1],["éssim",-1,1],["íssim",-1,1],["ïm",-1,1],["an",-1,1],["aban",66,1],["arian",66,1],["aran",66,1],["ieran",66,1],["iran",66,1],["ían",66,1],["arían",72,1],["erían",72,1],["irían",72,1],["en",-1,1],["ien",76,1],["arien",77,1],["irien",77,1],["aren",76,1],["eren",76,1],["iren",76,1],["àren",76,1],["ïren",76,1],["asen",76,1],["iesen",76,1],["assen",76,1],["essen",76,1],["issen",76,1],["éssen",76,1],["ïssen",76,1],["esquen",76,1],["isquen",76,1],["ïsquen",76,1],["aven",76,1],["ixen",76,1],["eixen",96,1],["ïxen",76,1],["ïen",76,1],["in",-1,1],["inin",100,1],["sin",100,1],["isin",102,1],["assin",102,1],["essin",102,1],["issin",102,1],["ïssin",102,1],["esquin",100,1],["eixin",100,1],["aron",-1,1],["ieron",-1,1],["arán",-1,1],["erán",-1,1],["irán",-1,1],["iïn",-1,1],["ado",-1,1],["ido",-1,1],["ando",-1,2],["iendo",-1,1],["io",-1,1],["ixo",-1,1],["eixo",121,1],["ïxo",-1,1],["itzo",-1,1],["ar",-1,1],["tzar",125,1],["er",-1,1],["eixer",127,1],["ir",-1,1],["ador",-1,1],["as",-1,1],["abas",131,1],["adas",131,1],["idas",131,1],["aras",131,1],["ieras",131,1],["ías",131,1],["arías",137,1],["erías",137,1],["irías",137,1],["ids",-1,1],["es",-1,1],["ades",142,1],["ides",142,1],["udes",142,1],["ïdes",142,1],["atges",142,1],["ies",142,1],["aries",148,1],["iries",148,1],["ares",142,1],["ires",142,1],["adores",142,1],["ïres",142,1],["ases",142,1],["ieses",142,1],["asses",142,1],["esses",142,1],["isses",142,1],["ïsses",142,1],["ques",142,1],["esques",161,1],["ïsques",161,1],["aves",142,1],["ixes",142,1],["eixes",165,1],["ïxes",142,1],["ïes",142,1],["abais",-1,1],["arais",-1,1],["ierais",-1,1],["íais",-1,1],["aríais",172,1],["eríais",172,1],["iríais",172,1],["aseis",-1,1],["ieseis",-1,1],["asteis",-1,1],["isteis",-1,1],["inis",-1,1],["sis",-1,1],["isis",181,1],["assis",181,1],["essis",181,1],["issis",181,1],["ïssis",181,1],["esquis",-1,1],["eixis",-1,1],["itzis",-1,1],["áis",-1,1],["aréis",-1,1],["eréis",-1,1],["iréis",-1,1],["ams",-1,1],["ados",-1,1],["idos",-1,1],["amos",-1,1],["ábamos",197,1],["áramos",197,1],["iéramos",197,1],["íamos",197,1],["aríamos",201,1],["eríamos",201,1],["iríamos",201,1],["aremos",-1,1],["eremos",-1,1],["iremos",-1,1],["ásemos",-1,1],["iésemos",-1,1],["imos",-1,1],["adors",-1,1],["ass",-1,1],["erass",212,1],["ess",-1,1],["ats",-1,1],["its",-1,1],["ents",-1,1],["às",-1,1],["aràs",218,1],["iràs",218,1],["arás",-1,1],["erás",-1,1],["irás",-1,1],["és",-1,1],["arés",224,1],["ís",-1,1],["iïs",-1,1],["at",-1,1],["it",-1,1],["ant",-1,1],["ent",-1,1],["int",-1,1],["ut",-1,1],["ït",-1,1],["au",-1,1],["erau",235,1],["ieu",-1,1],["ineu",-1,1],["areu",-1,1],["ireu",-1,1],["àreu",-1,1],["íreu",-1,1],["asseu",-1,1],["esseu",-1,1],["eresseu",244,1],["àsseu",-1,1],["ésseu",-1,1],["igueu",-1,1],["ïgueu",-1,1],["àveu",-1,1],["áveu",-1,1],["itzeu",-1,1],["ìeu",-1,1],["irìeu",253,1],["íeu",-1,1],["aríeu",255,1],["iríeu",255,1],["assiu",-1,1],["issiu",-1,1],["àssiu",-1,1],["èssiu",-1,1],["éssiu",-1,1],["íssiu",-1,1],["ïu",-1,1],["ix",-1,1],["eix",265,1],["ïx",-1,1],["itz",-1,1],["ià",-1,1],["arà",-1,1],["irà",-1,1],["itzà",-1,1],["ará",-1,1],["erá",-1,1],["irá",-1,1],["irè",-1,1],["aré",-1,1],["eré",-1,1],["iré",-1,1],["í",-1,1],["iï",-1,1],["ió",-1,1]],o=[["a",-1,1],["e",-1,1],["i",-1,1],["ïn",-1,1],["o",-1,1],["ir",-1,1],["s",-1,1],["is",6,1],["os",6,1],["ïs",6,1],["it",-1,1],["eu",-1,1],["iu",-1,1],["iqu",-1,2],["itz",-1,1],["à",-1,1],["á",-1,1],["é",-1,1],["ì",-1,1],["í",-1,1],["ï",-1,1],["ó",-1,1]],u=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,129,81,6,10],c=0,m=0;function l(){return m<=e.cursor}function d(){return c<=e.cursor}this.stem=function(){m=e.limit,c=e.limit,s=e.cursor,e.go_out_grouping(u,97,252)&&(e.cursor++,e.go_in_grouping(u,97,252))&&(e.cursor++,m=e.cursor,e.go_out_grouping(u,97,252))&&(e.cursor++,e.go_in_grouping(u,97,252))&&(e.cursor++,c=e.cursor),e.cursor=s,e.limit_backward=e.cursor,e.cursor=e.limit;var s=e.limit-e.cursor,s=(e.ket=e.cursor,0!=e.find_among_b(a)&&(e.bra=e.cursor,l())&&e.slice_del(),e.cursor=e.limit-s,e.limit-e.cursor),i=e.limit-e.cursor,i=((()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(t))){switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!d())return;if(e.slice_del())break;return;case 3:if(!d())return;if(e.slice_from("log"))break;return;case 4:if(!d())return;if(e.slice_from("ic"))break;return;case 5:if(!l())return;if(e.slice_from("c"))break;return}return 1}})()||(e.cursor=e.limit-i,(()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(n))){switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!d())return;if(e.slice_del())break;return}}})()),e.cursor=e.limit-s,e.limit-e.cursor),s=((()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(o)))switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!l())return;if(e.slice_from("ic"))break}})(),e.cursor=e.limit-i,e.cursor=e.limit_backward,e.cursor);return(()=>{for(var s;;){var i=e.cursor;s:{switch(e.bra=e.cursor,s=e.find_among(r),e.ket=e.cursor,s){case 1:if(e.slice_from("a"))break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_from("i"))break;return;case 4:if(e.slice_from("o"))break;return;case 5:if(e.slice_from("u"))break;return;case 6:if(e.slice_from("."))break;return;case 7:if(e.cursor>=e.limit)break s;e.cursor++}continue}e.cursor=i;break}})(),e.cursor=s,!0},this.stemWord=function(s){return e.setCurrent(s),this.stem(),e.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/danish-stemmer.js b/sphinx/search/minified-js/danish-stemmer.js index f3fc600033c..7a577f3eb26 100644 --- a/sphinx/search/minified-js/danish-stemmer.js +++ b/sphinx/search/minified-js/danish-stemmer.js @@ -1 +1 @@ -DanishStemmer=function(){var r=new BaseStemmer;var e=[["hed",-1,1],["ethed",0,1],["ered",-1,1],["e",-1,1],["erede",3,1],["ende",3,1],["erende",5,1],["ene",3,1],["erne",3,1],["ere",3,1],["en",-1,1],["heden",10,1],["eren",10,1],["er",-1,1],["heder",13,1],["erer",13,1],["s",-1,2],["heds",16,1],["es",16,1],["endes",18,1],["erendes",19,1],["enes",18,1],["ernes",18,1],["eres",18,1],["ens",16,1],["hedens",24,1],["erens",24,1],["ers",16,1],["ets",16,1],["erets",28,1],["et",-1,1],["eret",30,1]];var i=[["gd",-1,-1],["dt",-1,-1],["gt",-1,-1],["kt",-1,-1]];var s=[["ig",-1,1],["lig",0,1],["elig",1,1],["els",-1,1],["løst",-1,2]];var t=[119,223,119,1];var a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128];var u=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16];var c=0;var l=0;var n="";function o(){l=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}c=r.cursor;r.cursor=e;r:while(true){var s=r.cursor;e:{if(!r.in_grouping(a,97,248)){break e}r.cursor=s;break r}r.cursor=s;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(a,97,248)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;r:{if(!(ls.limit||(s.cursor=i,l=s.cursor,s.cursor=e,s.go_out_grouping(o,97,248)&&(s.cursor++,s.go_in_grouping(o,97,248))&&(s.cursor++,m=s.cursor,l<=m||(m=l))),s.cursor=r,s.limit_backward=s.cursor,s.cursor=s.limit,s.limit-s.cursor),e=((()=>{var r;if(!(s.cursor{var r,i=s.limit-s.cursor;if(s.ket=s.cursor,(!s.eq_s_b("st")||(s.bra=s.cursor,!s.eq_s_b("ig"))||s.slice_del())&&(s.cursor=s.limit-i,!(s.cursor=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}r.cursor=u;var a=r.cursor;r:{r.bra=r.cursor;if(!r.eq_s("y")){r.cursor=a;break r}r.ket=r.cursor;if(!r.slice_from("Y")){return false}}while(true){var t=r.cursor;r:{e:while(true){var o=r.cursor;i:{if(!r.in_grouping(c,97,232)){break i}r.bra=r.cursor;u:{var f=r.cursor;s:{if(!r.eq_s("i")){break s}r.ket=r.cursor;if(!r.in_grouping(c,97,232)){break s}if(!r.slice_from("I")){return false}break u}r.cursor=f;if(!r.eq_s("y")){break i}r.ket=r.cursor;if(!r.slice_from("Y")){return false}}r.cursor=o;break e}r.cursor=o;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=t;break}return true}function _(){n=r.limit;l=r.limit;r:while(true){e:{if(!r.in_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(n<3)){break r}n=3}r:while(true){e:{if(!r.in_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;return true}function m(){var e;while(true){var u=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("y")){return false}break;case 2:if(!r.slice_from("i")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=u;break}return true}function v(){if(!(n<=r.cursor)){return false}return true}function g(){if(!(l<=r.cursor)){return false}return true}function d(){var e=r.limit-r.cursor;if(r.find_among_b(u)==0){return false}r.cursor=r.limit-e;r.ket=r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;r.bra=r.cursor;if(!r.slice_del()){return false}return true}function h(){b=false;r.ket=r.cursor;if(!r.eq_s_b("e")){return false}r.bra=r.cursor;if(!v()){return false}var e=r.limit-r.cursor;if(!r.out_grouping_b(c,97,232)){return false}r.cursor=r.limit-e;if(!r.slice_del()){return false}b=true;if(!d()){return false}return true}function w(){if(!v()){return false}var e=r.limit-r.cursor;if(!r.out_grouping_b(c,97,232)){return false}r.cursor=r.limit-e;{var i=r.limit-r.cursor;r:{if(!r.eq_s_b("gem")){break r}return false}r.cursor=r.limit-i}if(!r.slice_del()){return false}if(!d()){return false}return true}function p(){var e;var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){break r}r.bra=r.cursor;switch(e){case 1:if(!v()){break r}if(!r.slice_from("heid")){return false}break;case 2:if(!w()){break r}break;case 3:if(!v()){break r}if(!r.out_grouping_b(f,97,232)){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-i;var u=r.limit-r.cursor;h();r.cursor=r.limit-u;var l=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("heid")){break r}r.bra=r.cursor;if(!g()){break r}{var n=r.limit-r.cursor;e:{if(!r.eq_s_b("c")){break e}break r}r.cursor=r.limit-n}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("en")){break r}r.bra=r.cursor;if(!w()){break r}}r.cursor=r.limit-l;var k=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(a);if(e==0){break r}r.bra=r.cursor;switch(e){case 1:if(!g()){break r}if(!r.slice_del()){return false}e:{var _=r.limit-r.cursor;i:{r.ket=r.cursor;if(!r.eq_s_b("ig")){break i}r.bra=r.cursor;if(!g()){break i}{var m=r.limit-r.cursor;u:{if(!r.eq_s_b("e")){break u}break i}r.cursor=r.limit-m}if(!r.slice_del()){return false}break e}r.cursor=r.limit-_;if(!d()){break r}}break;case 2:if(!g()){break r}{var p=r.limit-r.cursor;e:{if(!r.eq_s_b("e")){break e}break r}r.cursor=r.limit-p}if(!r.slice_del()){return false}break;case 3:if(!g()){break r}if(!r.slice_del()){return false}if(!h()){break r}break;case 4:if(!g()){break r}if(!r.slice_del()){return false}break;case 5:if(!g()){break r}if(!b){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-k;var q=r.limit-r.cursor;r:{if(!r.out_grouping_b(o,73,232)){break r}var y=r.limit-r.cursor;if(r.find_among_b(t)==0){break r}if(!r.out_grouping_b(c,97,232)){break r}r.cursor=r.limit-y;r.ket=r.cursor;if(r.cursor<=r.limit_backward){break r}r.cursor--;r.bra=r.cursor;if(!r.slice_del()){return false}}r.cursor=r.limit-q;return true}this.stem=function(){var e=r.cursor;k();r.cursor=e;var i=r.cursor;_();r.cursor=i;r.limit_backward=r.cursor;r.cursor=r.limit;p();r.cursor=r.limit_backward;var u=r.cursor;m();r.cursor=u;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}}; \ No newline at end of file +var DutchStemmer=function(){var o=new BaseStemmer,a=[["a",-1,1],["e",-1,2],["o",-1,1],["u",-1,1],["à",-1,1],["á",-1,1],["â",-1,1],["ä",-1,1],["è",-1,2],["é",-1,2],["ê",-1,2],["eë",-1,3],["ië",-1,4],["ò",-1,1],["ó",-1,1],["ô",-1,1],["ö",-1,1],["ù",-1,1],["ú",-1,1],["û",-1,1],["ü",-1,1]],t=[["nde",-1,8],["en",-1,7],["s",-1,2],["'s",2,1],["es",2,4],["ies",4,3],["aus",2,6],["és",2,5]],e=[["de",-1,5],["ge",-1,2],["ische",-1,4],["je",-1,1],["lijke",-1,3],["le",-1,9],["ene",-1,10],["re",-1,8],["se",-1,7],["te",-1,6],["ieve",-1,11]],s=[["heid",-1,3],["fie",-1,7],["gie",-1,8],["atie",-1,1],["isme",-1,5],["ing",-1,5],["arij",-1,6],["erij",-1,5],["sel",-1,3],["rder",-1,4],["ster",-1,3],["iteit",-1,2],["dst",-1,10],["tst",-1,9]],c=[["end",-1,9],["atief",-1,2],["erig",-1,9],["achtig",-1,3],["ioneel",-1,1],["baar",-1,3],["laar",-1,5],["naar",-1,4],["raar",-1,6],["eriger",-1,9],["achtiger",-1,3],["lijker",-1,8],["tant",-1,7],["erigst",-1,9],["achtigst",-1,3],["lijkst",-1,8]],u=[["ig",-1,1],["iger",-1,1],["igst",-1,1]],f=[["ft",-1,2],["kt",-1,1],["pt",-1,3]],n=[["bb",-1,1],["cc",-1,2],["dd",-1,3],["ff",-1,4],["gg",-1,5],["hh",-1,6],["jj",-1,7],["kk",-1,8],["ll",-1,9],["mm",-1,10],["nn",-1,11],["pp",-1,12],["qq",-1,13],["rr",-1,14],["ss",-1,15],["tt",-1,16],["v",-1,4],["vv",16,17],["ww",-1,18],["xx",-1,19],["z",-1,15],["zz",20,20]],l=[["d",-1,1],["t",-1,2]],_=[["",-1,-1],["eft",0,1],["vaa",0,1],["val",0,1],["vali",3,-1],["vare",0,1]],m=[["ë",-1,1],["ï",-1,2]],b=[["ë",-1,1],["ï",-1,2]],k=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,120],d=[1,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,11,120,46,15],g=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],v=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],q=[17,65,208,1,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],w=!1,h=!1,p=0,j=0,z="";function x(){return j<=o.cursor}function C(){return p<=o.cursor}function S(){var r=o.limit-o.cursor,i=o.limit-o.cursor;return(o.in_grouping_b(v,97,252)||(o.cursor=o.limit-i,o.eq_s_b("ij")))&&(o.cursor=o.limit-r,1)}function B(){var r=o.limit-o.cursor,i=o.limit-o.cursor;return!o.eq_s_b("ij")&&(o.cursor=o.limit-i,o.out_grouping_b(v,97,252))&&(o.cursor=o.limit-r,1)}function D(){var r,i=o.limit-o.cursor;r:if(o.out_grouping_b(q,97,252)&&(o.ket=o.cursor,0!=(r=o.find_among_b(a))))switch(o.bra=o.cursor,r){case 1:var e=o.limit-o.cursor,s=o.limit-o.cursor;if(o.out_grouping_b(g,97,252)||(o.cursor=o.limit-s,!(o.cursor>o.limit_backward))){if(o.cursor=o.limit-e,""==(z=o.slice_to()))return;s=o.cursor;o.insert(o.cursor,o.cursor,z),o.cursor=s}break;case 2:var c=o.limit-o.cursor,e=o.limit-o.cursor;if(o.out_grouping_b(g,97,252)||(o.cursor=o.limit-e,!(o.cursor>o.limit_backward))){var u=o.limit-o.cursor;i:{var t=o.limit-o.cursor;if(!o.in_grouping_b(d,97,252)){if(o.cursor=o.limit-t,!o.in_grouping_b(k,101,235))break i;if(o.cursor>o.limit_backward)break i}break r}o.cursor=o.limit-u;t=o.limit-o.cursor;if(o.cursor<=o.limit_backward||(o.cursor--,!o.in_grouping_b(d,97,252))||!o.out_grouping_b(g,97,252)){if(o.cursor=o.limit-t,o.cursor=o.limit-c,""==(z=o.slice_to()))return;t=o.cursor;o.insert(o.cursor,o.cursor,z),o.cursor=t}}break;case 3:if(o.slice_from("eëe"))break;return;case 4:if(o.slice_from("iee"))break;return}o.cursor=o.limit-i}function W(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(e))){switch(o.bra=o.cursor,r){case 1:r:{var i=o.limit-o.cursor;if(o.eq_s_b("'t")){if(o.bra=o.cursor,o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("et")&&(o.bra=o.cursor,x())&&B()){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("rnt")){if(o.bra=o.cursor,o.slice_from("rn"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("t")&&(o.bra=o.cursor,x())&&(()=>{var r,i=o.limit-o.cursor;return!(o.cursor<=o.limit_backward)&&(o.cursor--,r=o.limit-o.cursor,o.in_grouping_b(v,97,252)||(o.cursor=o.limit-r,o.eq_s_b("ij")))?(o.cursor=o.limit-i,1):void 0})()){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("ink")){if(o.bra=o.cursor,o.slice_from("ing"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("mp")){if(o.bra=o.cursor,o.slice_from("m"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("'")&&(o.bra=o.cursor,x())){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.bra=o.cursor,!x())return;if(!B())return;if(!o.slice_del())return}break;case 2:if(!x())return;if(o.slice_from("g"))break;return;case 3:if(!x())return;if(o.slice_from("lijk"))break;return;case 4:if(!x())return;if(o.slice_from("isch"))break;return;case 5:if(!x())return;if(!B())return;if(o.slice_del())break;return;case 6:if(!x())return;if(o.slice_from("t"))break;return;case 7:if(!x())return;if(o.slice_from("s"))break;return;case 8:if(!x())return;if(o.slice_from("r"))break;return;case 9:if(!x())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"l"),D();break;case 10:if(!x())return;if(!B())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"en"),D();break;case 11:if(!x())return;if(!B())return;if(o.slice_from("ief"))break;return}return 1}}function y(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(l))&&(o.bra=o.cursor,x())&&B()){switch(r){case 1:var i=o.limit-o.cursor;if(o.eq_s_b("n")&&x())return;o.cursor=o.limit-i;r:{var e=o.limit-o.cursor;if(o.eq_s_b("in")&&!(o.cursor>o.limit_backward)){if(o.slice_from("n"))break r;return}if(o.cursor=o.limit-e,!o.slice_del())return}break;case 2:i=o.limit-o.cursor;if(o.eq_s_b("h")&&x())return;o.cursor=o.limit-i;i=o.limit-o.cursor;if(o.eq_s_b("en")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,o.slice_del())break;return}return 1}}function A(){j=o.limit,p=o.limit;for(var r=o.cursor;o.out_grouping(v,97,252););for(var i=1;;){var e=o.cursor,s=o.cursor;if(!o.eq_s("ij")&&(o.cursor=s,!o.in_grouping(v,97,252))){o.cursor=e;break}i--}if(!(0{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(t))){switch(o.bra=o.cursor,r){case 1:if(o.slice_del())break;return;case 2:if(!x())return;var i=o.limit-o.cursor;if(o.eq_s_b("t")&&x())return;if(o.cursor=o.limit-i,!B())return;if(o.slice_del())break;return;case 3:if(!x())return;if(o.slice_from("ie"))break;return;case 4:r:{var e=o.limit-o.cursor,s=o.limit-o.cursor;if(o.eq_s_b("ar")&&x()&&B()){if(o.cursor=o.limit-s,!o.slice_del())return;D()}else{o.cursor=o.limit-e;s=o.limit-o.cursor;if(o.eq_s_b("er")&&x()&&B()){if(o.cursor=o.limit-s,o.slice_del())break r;return}if(o.cursor=o.limit-e,!x())return;if(!B())return;if(!o.slice_from("e"))return}}break;case 5:if(!x())return;if(o.slice_from("é"))break;return;case 6:if(!x())return;if(!S())return;if(o.slice_from("au"))break;return;case 7:r:{var c=o.limit-o.cursor;if(o.eq_s_b("hed")&&x()){if(o.bra=o.cursor,o.slice_from("heid"))break r;return}if(o.cursor=o.limit-c,o.eq_s_b("nd")){if(o.slice_del())break r;return}if(o.cursor=o.limit-c,o.eq_s_b("d")&&x()&&B()){if(o.bra=o.cursor,o.slice_del())break r;return}o.cursor=o.limit-c;var u=o.limit-o.cursor;if((o.eq_s_b("i")||(o.cursor=o.limit-u,o.eq_s_b("j")))&&S()){if(o.slice_del())break r;return}if(o.cursor=o.limit-c,!x())return;if(!B())return;if(!o.slice_del())return;D()}break;case 8:if(o.slice_from("nd"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=(W()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=((()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(s))){switch(o.bra=o.cursor,r){case 1:if(!x())return;if(o.slice_from("eer"))break;return;case 2:if(!x())return;if(!o.slice_del())return;D();break;case 3:if(!x())return;if(o.slice_del())break;return;case 4:if(o.slice_from("r"))break;return;case 5:r:{var i=o.limit-o.cursor;if(o.eq_s_b("ild")){if(o.slice_from("er"))break r;return}if(o.cursor=o.limit-i,!x())return;if(!o.slice_del())return;D()}break;case 6:if(!x())return;if(!B())return;if(o.slice_from("aar"))break;return;case 7:if(!C())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"f"),D();break;case 8:if(!C())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"g"),D();break;case 9:if(!x())return;if(!B())return;if(o.slice_from("t"))break;return;case 10:if(!x())return;if(!B())return;if(o.slice_from("d"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=((()=>{r:{var r=o.limit-o.cursor;i:if(o.ket=o.cursor,0!=(i=o.find_among_b(c))){switch(o.bra=o.cursor,i){case 1:if(!x())break i;if(o.slice_from("ie"))break;return;case 2:if(!x())break i;if(o.slice_from("eer"))break;return;case 3:if(!x())break i;if(o.slice_del())break;return;case 4:if(!x())break i;if(!S())break i;if(o.slice_from("n"))break;return;case 5:if(!x())break i;if(!S())break i;if(o.slice_from("l"))break;return;case 6:if(!x())break i;if(!S())break i;if(o.slice_from("r"))break;return;case 7:if(!x())break i;if(o.slice_from("teer"))break;return;case 8:if(!x())break i;if(o.slice_from("lijk"))break;return;case 9:if(!x())break i;if(!B())break i;if(!o.slice_del())return;D()}break r}if(o.cursor=o.limit-r,o.ket=o.cursor,0==o.find_among_b(u))return;if(o.bra=o.cursor,!x())return;var i=o.limit-o.cursor;if(o.eq_s_b("inn")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,!B())return;if(!o.slice_del())return;D()}return 1})()&&(h=!0),o.cursor=o.limit-r,o.cursor=o.limit_backward,w=!1,o.cursor),i=o.cursor,i=((()=>{if(o.bra=o.cursor,o.eq_s("ge")){o.ket=o.cursor;var r=o.cursor,i=o.cursor+3;if(!(i>o.limit)){o.cursor=i,o.cursor=r;for(var i=o.cursor;;){var e=o.cursor,s=o.cursor;if(o.eq_s("ij")||(o.cursor=s,o.in_grouping(v,97,252)))break;if(o.cursor=e,o.cursor>=o.limit)return;o.cursor++}for(;;){var c=o.cursor,u=o.cursor;if(!o.eq_s("ij")&&(o.cursor=u,!o.in_grouping(v,97,252))){o.cursor=c;break}}if(o.cursor{if(!(o.cursor>=o.limit)){for(o.cursor++;;){if(o.bra=o.cursor,o.eq_s("ge")){o.ket=o.cursor;break}if(o.cursor>=o.limit)return;o.cursor++}var r=o.cursor,i=o.cursor+3;if(!(i>o.limit)){o.cursor=i,o.cursor=r;for(var i=o.cursor;;){var e=o.cursor,s=o.cursor;if(o.eq_s("ij")||(o.cursor=s,o.in_grouping(v,97,252)))break;if(o.cursor=e,o.cursor>=o.limit)return;o.cursor++}for(;;){var c=o.cursor,u=o.cursor;if(!o.eq_s("ij")&&(o.cursor=u,!o.in_grouping(v,97,252))){o.cursor=c;break}}if(o.cursor{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(f))){switch(o.bra=o.cursor,r){case 1:if(o.slice_from("k"))break;return;case 2:if(o.slice_from("f"))break;return;case 3:if(o.slice_from("p"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor);return h&&(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(n))){switch(o.bra=o.cursor,r){case 1:if(o.slice_from("b"))break;return;case 2:if(o.slice_from("c"))break;return;case 3:if(o.slice_from("d"))break;return;case 4:if(o.slice_from("f"))break;return;case 5:if(o.slice_from("g"))break;return;case 6:if(o.slice_from("h"))break;return;case 7:if(o.slice_from("j"))break;return;case 8:if(o.slice_from("k"))break;return;case 9:if(o.slice_from("l"))break;return;case 10:if(o.slice_from("m"))break;return;case 11:var i=o.limit-o.cursor;if(o.eq_s_b("i")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,o.slice_from("n"))break;return;case 12:if(o.slice_from("p"))break;return;case 13:if(o.slice_from("q"))break;return;case 14:if(o.slice_from("r"))break;return;case 15:if(o.slice_from("s"))break;return;case 16:if(o.slice_from("t"))break;return;case 17:if(o.slice_from("v"))break;return;case 18:if(o.slice_from("w"))break;return;case 19:if(o.slice_from("x"))break;return;case 20:if(o.slice_from("z"))break;return}}})(),o.cursor=o.limit-i,o.cursor=o.limit_backward,!0},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/dutch_porter-stemmer.js b/sphinx/search/minified-js/dutch_porter-stemmer.js new file mode 100644 index 00000000000..32f195914c2 --- /dev/null +++ b/sphinx/search/minified-js/dutch_porter-stemmer.js @@ -0,0 +1 @@ +var DutchPorterStemmer=function(){var t=new BaseStemmer,a=[["",-1,6],["á",0,1],["ä",0,1],["é",0,2],["ë",0,2],["í",0,3],["ï",0,3],["ó",0,4],["ö",0,4],["ú",0,5],["ü",0,5]],s=[["",-1,3],["I",0,2],["Y",0,1]],i=[["dd",-1,-1],["kk",-1,-1],["tt",-1,-1]],c=[["ene",-1,2],["se",-1,3],["en",-1,2],["heden",2,1],["s",-1,3]],n=[["end",-1,1],["ig",-1,2],["ing",-1,1],["lijk",-1,3],["baar",-1,4],["bar",-1,5]],_=[["aa",-1,-1],["ee",-1,-1],["oo",-1,-1],["uu",-1,-1]],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],f=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],b=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],u=0,o=0,m=0,k=!1;function g(){return m<=t.cursor}function d(){return o<=t.cursor}function v(){var r=t.limit-t.cursor;return 0!=t.find_among_b(i)&&(t.cursor=t.limit-r,t.ket=t.cursor,!(t.cursor<=t.limit_backward||(t.cursor--,t.bra=t.cursor,!t.slice_del())))}function h(){var r;if((k=!1,t.ket=t.cursor,t.eq_s_b("e"))&&(t.bra=t.cursor,g()))return r=t.limit-t.cursor,t.out_grouping_b(l,97,232)?(t.cursor=t.limit-r,t.slice_del()&&(k=!0,!!v())):void 0}function p(){if(g()){var r=t.limit-t.cursor;if(t.out_grouping_b(l,97,232))return t.cursor=t.limit-r,r=t.limit-t.cursor,t.eq_s_b("gem")?void 0:(t.cursor=t.limit-r,t.slice_del()&&!!v())}}this.stem=function(){var r,i=t.cursor,i=((()=>{for(var r,i=t.cursor;;){var e=t.cursor;r:{switch(t.bra=t.cursor,r=t.find_among(a),t.ket=t.cursor,r){case 1:if(t.slice_from("a"))break;return;case 2:if(t.slice_from("e"))break;return;case 3:if(t.slice_from("i"))break;return;case 4:if(t.slice_from("o"))break;return;case 5:if(t.slice_from("u"))break;return;case 6:if(t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=e;break}if(t.cursor=i,i=t.cursor,t.bra=t.cursor,t.eq_s("y")){if(t.ket=t.cursor,!t.slice_from("Y"))return}else t.cursor=i;for(;;){var s=t.cursor;if(!t.go_out_grouping(l,97,232)){t.cursor=s;break}t.cursor++;var u=t.cursor;r:{t.bra=t.cursor;var o=t.cursor;if(t.eq_s("i")){t.ket=t.cursor;var c=t.cursor;if(t.in_grouping(l,97,232)&&!t.slice_from("I"))return;t.cursor=c}else{if(t.cursor=o,!t.eq_s("y")){t.cursor=u;break r}if(t.ket=t.cursor,!t.slice_from("Y"))return}}}})(),t.cursor=i,t.cursor),e=(m=t.limit,o=t.limit,r=t.cursor,(e=t.cursor+3)>t.limit||(t.cursor=e,u=t.cursor,t.cursor=r,t.go_out_grouping(l,97,232)&&(t.cursor++,t.go_in_grouping(l,97,232))&&(t.cursor++,m=t.cursor,u<=m||(m=u),t.go_out_grouping(l,97,232))&&(t.cursor++,t.go_in_grouping(l,97,232))&&(t.cursor++,o=t.cursor)),t.cursor=i,t.limit_backward=t.cursor,t.cursor=t.limit,(()=>{var r,i=t.limit-t.cursor;r:if(t.ket=t.cursor,0!=(r=t.find_among_b(c)))switch(t.bra=t.cursor,r){case 1:if(!g())break r;if(t.slice_from("heid"))break;return;case 2:p();break;case 3:if(!g())break r;if(!t.out_grouping_b(b,97,232))break r;if(t.slice_del())break;return}if(t.cursor=t.limit-i,i=t.limit-t.cursor,h(),t.cursor=t.limit-i,i=t.limit-t.cursor,t.ket=t.cursor,t.eq_s_b("heid")&&(t.bra=t.cursor,d())){var e=t.limit-t.cursor;if(!t.eq_s_b("c")){if(t.cursor=t.limit-e,!t.slice_del())return;t.ket=t.cursor,t.eq_s_b("en")&&(t.bra=t.cursor,p())}}t.cursor=t.limit-i,e=t.limit-t.cursor;r:if(t.ket=t.cursor,0!=(r=t.find_among_b(n)))switch(t.bra=t.cursor,r){case 1:if(d()){if(!t.slice_del())return;i:{var s=t.limit-t.cursor;if(t.ket=t.cursor,t.eq_s_b("ig")&&(t.bra=t.cursor,d())){var u=t.limit-t.cursor;if(!t.eq_s_b("e")){if(t.cursor=t.limit-u,t.slice_del())break i;return}}if(t.cursor=t.limit-s,!v())break r}}break;case 2:if(!d())break r;var o=t.limit-t.cursor;if(t.eq_s_b("e"))break r;if(t.cursor=t.limit-o,t.slice_del())break;return;case 3:if(d()){if(!t.slice_del())return;h()}break;case 4:if(!d())break r;if(t.slice_del())break;return;case 5:if(!d())break r;if(!k)break r;if(t.slice_del())break;return}if(t.cursor=t.limit-e,i=t.limit-t.cursor,t.out_grouping_b(f,73,232)){e=t.limit-t.cursor;if(0!=t.find_among_b(_)&&t.out_grouping_b(l,97,232)&&(t.cursor=t.limit-e,t.ket=t.cursor,!(t.cursor<=t.limit_backward||(t.cursor--,t.bra=t.cursor,t.slice_del()))))return}t.cursor=t.limit-i})(),t.cursor=t.limit_backward,t.cursor);return(()=>{for(var r;;){var i=t.cursor;r:{switch(t.bra=t.cursor,r=t.find_among(s),t.ket=t.cursor,r){case 1:if(t.slice_from("y"))break;return;case 2:if(t.slice_from("i"))break;return;case 3:if(t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=i;break}})(),t.cursor=e,!0},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/english-stemmer.js b/sphinx/search/minified-js/english-stemmer.js new file mode 100644 index 00000000000..e005f991e6a --- /dev/null +++ b/sphinx/search/minified-js/english-stemmer.js @@ -0,0 +1 @@ +var EnglishStemmer=function(){var a=new BaseStemmer,c=[["arsen",-1,-1],["commun",-1,-1],["emerg",-1,-1],["gener",-1,-1],["later",-1,-1],["organ",-1,-1],["past",-1,-1],["univers",-1,-1]],o=[["'",-1,1],["'s'",0,1],["'s",-1,1]],u=[["ied",-1,2],["s",-1,3],["ies",1,2],["sses",1,1],["ss",1,-1],["us",1,-1]],t=[["succ",-1,1],["proc",-1,1],["exc",-1,1]],l=[["even",-1,2],["cann",-1,2],["inn",-1,2],["earr",-1,2],["herr",-1,2],["out",-1,2],["y",-1,1]],n=[["",-1,-1],["ed",0,2],["eed",1,1],["ing",0,3],["edly",0,2],["eedly",4,1],["ingly",0,2]],f=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],_=[["anci",-1,3],["enci",-1,2],["ogi",-1,14],["li",-1,16],["bli",3,12],["abli",4,4],["alli",3,8],["fulli",3,9],["lessli",3,15],["ousli",3,10],["entli",3,5],["aliti",-1,8],["biliti",-1,12],["iviti",-1,11],["tional",-1,1],["ational",14,7],["alism",-1,8],["ation",-1,7],["ization",17,6],["izer",-1,6],["ator",-1,7],["iveness",-1,11],["fulness",-1,9],["ousness",-1,10],["ogist",-1,13]],m=[["icate",-1,4],["ative",-1,6],["alize",-1,3],["iciti",-1,4],["ical",-1,4],["tional",-1,1],["ational",5,2],["ful",-1,5],["ness",-1,5]],b=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1]],k=[["e",-1,1],["l",-1,2]],g=[["andes",-1,-1],["atlas",-1,-1],["bias",-1,-1],["cosmos",-1,-1],["early",-1,5],["gently",-1,3],["howe",-1,-1],["idly",-1,2],["news",-1,-1],["only",-1,6],["singly",-1,7],["skies",-1,1],["sky",-1,-1],["ugly",-1,4]],d=[17,64],v=[17,65,16,1],i=[1,17,65,208,1],w=[55,141,2],p=!1,y=0,h=0;function q(){var r=a.limit-a.cursor;return!!(a.out_grouping_b(i,89,121)&&a.in_grouping_b(v,97,121)&&a.out_grouping_b(v,97,121)||(a.cursor=a.limit-r,a.out_grouping_b(v,97,121)&&a.in_grouping_b(v,97,121)&&!(a.cursor>a.limit_backward))||(a.cursor=a.limit-r,a.eq_s_b("past")))}function z(){return h<=a.cursor}function Y(){return y<=a.cursor}this.stem=function(){var r=a.cursor;if(!(()=>{var r;if(a.bra=a.cursor,0!=(r=a.find_among(g))&&(a.ket=a.cursor,!(a.cursora.limit)a.cursor=i;else{a.cursor=e,a.cursor=r,(()=>{p=!1;var r=a.cursor;if(a.bra=a.cursor,!a.eq_s("'")||(a.ket=a.cursor,a.slice_del())){a.cursor=r;r=a.cursor;if(a.bra=a.cursor,a.eq_s("y")){if(a.ket=a.cursor,!a.slice_from("Y"))return;p=!0}a.cursor=r;for(r=a.cursor;;){var i=a.cursor;r:{for(;;){var e=a.cursor;if(a.in_grouping(v,97,121)&&(a.bra=a.cursor,a.eq_s("y"))){a.ket=a.cursor,a.cursor=e;break}if(a.cursor=e,a.cursor>=a.limit)break r;a.cursor++}if(!a.slice_from("Y"))return;p=!0;continue}a.cursor=i;break}a.cursor=r}})(),h=a.limit,y=a.limit;i=a.cursor;r:{var s=a.cursor;if(0==a.find_among(c)){if(a.cursor=s,!a.go_out_grouping(v,97,121))break r;if(a.cursor++,!a.go_in_grouping(v,97,121))break r;a.cursor++}h=a.cursor,a.go_out_grouping(v,97,121)&&(a.cursor++,a.go_in_grouping(v,97,121))&&(a.cursor++,y=a.cursor)}a.cursor=i,a.limit_backward=a.cursor,a.cursor=a.limit;var e=a.limit-a.cursor,r=((()=>{var r=a.limit-a.cursor;if(a.ket=a.cursor,0==a.find_among_b(o))a.cursor=a.limit-r;else if(a.bra=a.cursor,!a.slice_del())return;if(a.ket=a.cursor,0!=(r=a.find_among_b(u)))switch(a.bra=a.cursor,r){case 1:if(a.slice_from("ss"))break;return;case 2:r:{var i=a.limit-a.cursor,e=a.cursor-2;if(!(e{a.ket=a.cursor,o=a.find_among_b(n),a.bra=a.cursor;r:{var r=a.limit-a.cursor;i:{switch(o){case 1:var i=a.limit-a.cursor;e:{var e=a.limit-a.cursor;if(0==a.find_among_b(t)||a.cursor>a.limit_backward){if(a.cursor=a.limit-e,!z())break e;if(!a.slice_from("ee"))return}}a.cursor=a.limit-i;break;case 2:break i;case 3:if(0==(o=a.find_among_b(l)))break i;switch(o){case 1:var s=a.limit-a.cursor;if(!a.out_grouping_b(v,97,121))break i;if(a.cursor>a.limit_backward)break i;if(a.cursor=a.limit-s,a.bra=a.cursor,a.slice_from("ie"))break;return;case 2:if(a.cursor>a.limit_backward)break i}}break r}a.cursor=a.limit-r;var c=a.limit-a.cursor;if(!a.go_out_grouping_b(v,97,121))return;if(a.cursor--,a.cursor=a.limit-c,!a.slice_del())return;a.ket=a.cursor,a.bra=a.cursor;var o,c=a.limit-a.cursor;switch(o=a.find_among_b(f)){case 1:return a.slice_from("e");case 2:var u=a.limit-a.cursor;if(a.in_grouping_b(d,97,111)&&!(a.cursor>a.limit_backward))return;a.cursor=a.limit-u;break;case 3:return a.cursor!=h||(u=a.limit-a.cursor,q()&&(a.cursor=a.limit-u,a.slice_from("e")))}if(a.cursor=a.limit-c,a.ket=a.cursor,a.cursor<=a.limit_backward)return;if(a.cursor--,a.bra=a.cursor,!a.slice_del())return}})(),a.cursor=a.limit-r,a.limit-a.cursor),r=(a.ket=a.cursor,e=a.limit-a.cursor,(a.eq_s_b("y")||(a.cursor=a.limit-e,a.eq_s_b("Y")))&&(a.bra=a.cursor,a.out_grouping_b(v,97,121))&&a.cursor>a.limit_backward&&a.slice_from("i"),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ence"))break;return;case 3:if(a.slice_from("ance"))break;return;case 4:if(a.slice_from("able"))break;return;case 5:if(a.slice_from("ent"))break;return;case 6:if(a.slice_from("ize"))break;return;case 7:if(a.slice_from("ate"))break;return;case 8:if(a.slice_from("al"))break;return;case 9:if(a.slice_from("ful"))break;return;case 10:if(a.slice_from("ous"))break;return;case 11:if(a.slice_from("ive"))break;return;case 12:if(a.slice_from("ble"))break;return;case 13:if(a.slice_from("og"))break;return;case 14:if(!a.eq_s_b("l"))return;if(a.slice_from("og"))break;return;case 15:if(a.slice_from("less"))break;return;case 16:if(!a.in_grouping_b(w,99,116))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),i=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(m))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ate"))break;return;case 3:if(a.slice_from("al"))break;return;case 4:if(a.slice_from("ic"))break;return;case 5:if(a.slice_del())break;return;case 6:if(!Y())return;if(a.slice_del())break}})(),a.cursor=a.limit-e,a.limit-a.cursor),r=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(b))&&(a.bra=a.cursor,Y()))switch(r){case 1:if(a.slice_del())break;return;case 2:var i=a.limit-a.cursor;if(!a.eq_s_b("s")&&(a.cursor=a.limit-i,!a.eq_s_b("t")))return;if(a.slice_del())break}})(),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(k)))switch(a.bra=a.cursor,r){case 1:if(!Y()){if(!z())return;var i=a.limit-a.cursor;if(q())return;a.cursor=a.limit-i}if(a.slice_del())break;return;case 2:if(!Y())return;if(!a.eq_s_b("l"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.cursor=a.limit_backward,a.cursor);(()=>{if(p)for(;;){var r=a.cursor;r:{for(;;){var i=a.cursor;if(a.bra=a.cursor,a.eq_s("Y")){a.ket=a.cursor,a.cursor=i;break}if(a.cursor=i,a.cursor>=a.limit)break r;a.cursor++}if(a.slice_from("y"))continue;return}a.cursor=r;break}})(),a.cursor=e}}return!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/esperanto-stemmer.js b/sphinx/search/minified-js/esperanto-stemmer.js new file mode 100644 index 00000000000..44353e6a150 --- /dev/null +++ b/sphinx/search/minified-js/esperanto-stemmer.js @@ -0,0 +1 @@ +var EsperantoStemmer=function(){var o=new BaseStemmer,s=[["",-1,14],["-",0,13],["cx",0,1],["gx",0,2],["hx",0,3],["jx",0,4],["q",0,12],["sx",0,5],["ux",0,6],["w",0,12],["x",0,12],["y",0,12],["á",0,7],["é",0,8],["í",0,9],["ó",0,10],["ú",0,11]],i=[["as",-1,-1],["i",-1,-1],["is",1,-1],["os",-1,-1],["u",-1,-1],["us",4,-1]],u=[["ci",-1,-1],["gi",-1,-1],["hi",-1,-1],["li",-1,-1],["ili",3,-1],["ŝli",3,-1],["mi",-1,-1],["ni",-1,-1],["oni",7,-1],["ri",-1,-1],["si",-1,-1],["vi",-1,-1],["ivi",11,-1],["ĝi",-1,-1],["ŝi",-1,-1],["iŝi",14,-1],["malŝi",14,-1]],e=[["amb",-1,-1],["bald",-1,-1],["malbald",1,-1],["morg",-1,-1],["postmorg",3,-1],["adi",-1,-1],["hodi",-1,-1],["ank",-1,-1],["ĉirk",-1,-1],["tutĉirk",8,-1],["presk",-1,-1],["almen",-1,-1],["apen",-1,-1],["hier",-1,-1],["antaŭhier",13,-1],["malgr",-1,-1],["ankor",-1,-1],["kontr",-1,-1],["anstat",-1,-1],["kvaz",-1,-1]],c=[["aliu",-1,-1],["unu",-1,-1]],a=[["aha",-1,-1],["haha",0,-1],["haleluja",-1,-1],["hola",-1,-1],["hosana",-1,-1],["maltra",-1,-1],["hura",-1,-1],["ĥaĥa",-1,-1],["ekde",-1,-1],["elde",-1,-1],["disde",-1,-1],["ehe",-1,-1],["maltre",-1,-1],["dirlididi",-1,-1],["malpli",-1,-1],["malĉi",-1,-1],["malkaj",-1,-1],["amen",-1,-1],["tamen",17,-1],["oho",-1,-1],["maltro",-1,-1],["minus",-1,-1],["uhu",-1,-1],["muu",-1,-1]],t=[["tri",-1,-1],["du",-1,-1],["unu",-1,-1]],m=[["dek",-1,-1],["cent",-1,-1]],l=[["k",-1,-1],["kelk",0,-1],["nen",-1,-1],["t",-1,-1],["mult",3,-1],["samt",3,-1],["ĉ",-1,-1]],n=[["a",-1,-1],["e",-1,-1],["i",-1,-1],["j",-1,-1,r],["aj",3,-1],["oj",3,-1],["n",-1,-1,r],["an",6,-1],["en",6,-1],["jn",6,-1,r],["ajn",9,-1],["ojn",9,-1],["on",6,-1],["o",-1,-1],["as",-1,-1],["is",-1,-1],["os",-1,-1],["us",-1,-1],["u",-1,-1]],_=[17,65,16],b=[1,64,16],f=[255,3],k=!1;function r(){var r=o.limit-o.cursor;return!(!o.eq_s_b("-")&&(o.cursor=o.limit-r,!o.in_grouping_b(f,48,57)))}this.stem=function(){var r=o.cursor;if(!(()=>{var r;for(k=!1;;){var i=o.cursor;r:{switch(o.bra=o.cursor,r=o.find_among(s),o.ket=o.cursor,r){case 1:if(o.slice_from("ĉ"))break;return;case 2:if(o.slice_from("ĝ"))break;return;case 3:if(o.slice_from("ĥ"))break;return;case 4:if(o.slice_from("ĵ"))break;return;case 5:if(o.slice_from("ŝ"))break;return;case 6:if(o.slice_from("ŭ"))break;return;case 7:if(!o.slice_from("a"))return;k=!0;break;case 8:if(!o.slice_from("e"))return;k=!0;break;case 9:if(!o.slice_from("i"))return;k=!0;break;case 10:if(!o.slice_from("o"))return;k=!0;break;case 11:if(!o.slice_from("u"))return;k=!0;break;case 12:k=!0;break;case 13:k=!1;break;case 14:if(o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}return!k})())return!1;o.cursor=r;r=o.cursor,o.bra=o.cursor,o.eq_s("'")&&(o.ket=o.cursor,!o.eq_s("st")||0==o.find_among(i)||o.cursor{o.ket=o.cursor;var r=o.limit-o.cursor;return o.eq_s_b("n")||(o.cursor=o.limit-r),o.bra=o.cursor,0==o.find_among_b(u)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:!!o.slice_del()})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor,(()=>{if(o.ket=o.cursor,o.eq_s_b("'")){o.bra=o.cursor;r:{var r=o.limit-o.cursor;if(o.eq_s_b("l")&&!(o.cursor>o.limit_backward)){if(o.slice_from("a"))break r;return}if(o.cursor=o.limit-r,o.eq_s_b("un")&&!(o.cursor>o.limit_backward)){if(o.slice_from("u"))break r;return}if(o.cursor=o.limit-r,0!=o.find_among_b(e)){var i=o.limit-o.cursor;if(!(o.cursor>o.limit_backward)||(o.cursor=o.limit-i,o.eq_s_b("-"))){if(o.slice_from("aŭ"))break r;return}}if(o.cursor=o.limit-r,!o.slice_from("o"))return}}})(),o.cursor=o.limit-r,r=o.limit-o.cursor;if((()=>{o.ket=o.cursor,o.bra=o.cursor;var r=o.limit-o.cursor,i=o.limit-o.cursor,s=o.limit-o.cursor;if(o.eq_s_b("n")||(o.cursor=o.limit-s),o.bra=o.cursor,!o.eq_s_b("e")){o.cursor=o.limit-i;var s=o.limit-o.cursor,i=(o.eq_s_b("n")||(o.cursor=o.limit-s),o.limit-o.cursor);if(o.eq_s_b("j")||(o.cursor=o.limit-i),o.bra=o.cursor,!o.in_grouping_b(b,97,117))return}return!o.eq_s_b("i")||(s=o.limit-o.cursor,0==o.find_among_b(l)&&(o.cursor=o.limit-s),i=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-i,!o.eq_s_b("-")))?void 0:(o.cursor=o.limit-r,!!o.slice_del())})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if((()=>{var r;return 0==o.find_among_b(a)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:1})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if(0!=o.find_among_b(t)&&0!=o.find_among_b(m))return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if((()=>{o.ket=o.cursor;var r=o.limit-o.cursor,r=(o.eq_s_b("n")||(o.cursor=o.limit-r),o.limit-o.cursor);return o.eq_s_b("j")||(o.cursor=o.limit-r),o.bra=o.cursor,0==o.find_among_b(c)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:!!o.slice_del()})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;return!!(()=>{r:{var r=o.limit-o.cursor;i:{for(var i=2;0{var r;return o.ket=o.cursor,0!=o.find_among_b(n)&&(r=o.limit-o.cursor,o.eq_s_b("-")||(o.cursor=o.limit-r),o.bra=o.cursor,o.slice_del())?1:void 0})())&&(o.cursor=o.limit_backward,!0)},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/estonian-stemmer.js b/sphinx/search/minified-js/estonian-stemmer.js new file mode 100644 index 00000000000..d27e90fcd3b --- /dev/null +++ b/sphinx/search/minified-js/estonian-stemmer.js @@ -0,0 +1 @@ +var EstonianStemmer=function(){var t=new BaseStemmer,a=[["gi",-1,1],["ki",-1,2]],r=[["da",-1,3],["mata",-1,1],["b",-1,3],["ksid",-1,1],["nuksid",3,1],["me",-1,3],["sime",5,1],["ksime",6,1],["nuksime",7,1],["akse",-1,2],["dakse",9,1],["takse",9,1],["site",-1,1],["ksite",12,1],["nuksite",13,1],["n",-1,3],["sin",15,1],["ksin",16,1],["nuksin",17,1],["daks",-1,1],["taks",-1,1]],i=[["aa",-1,-1],["ee",-1,-1],["ii",-1,-1],["oo",-1,-1],["uu",-1,-1],["ää",-1,-1],["õõ",-1,-1],["öö",-1,-1],["üü",-1,-1]],s=[["i",-1,1]],o=[["lane",-1,1],["line",-1,3],["mine",-1,2],["lasse",-1,1],["lisse",-1,3],["misse",-1,2],["lasi",-1,1],["lisi",-1,3],["misi",-1,2],["last",-1,1],["list",-1,3],["mist",-1,2]],k=[["ga",-1,1],["ta",-1,1],["le",-1,1],["sse",-1,1],["l",-1,1],["s",-1,1],["ks",5,1],["t",-1,2],["lt",7,1],["st",7,1]],m=[["",-1,2],["las",0,1],["lis",0,1],["mis",0,1],["t",0,-1]],l=[["d",-1,4],["sid",0,2],["de",-1,4],["ikkude",2,1],["ike",-1,1],["ikke",-1,1],["te",-1,3]],c=[["va",-1,-1],["du",-1,-1],["nu",-1,-1],["tu",-1,-1]],n=[["kk",-1,1],["pp",-1,2],["tt",-1,3]],u=[["ma",-1,2],["mai",-1,1],["m",-1,1]],d=[["joob",-1,1],["jood",-1,1],["joodakse",1,1],["jooma",-1,1],["joomata",3,1],["joome",-1,1],["joon",-1,1],["joote",-1,1],["joovad",-1,1],["juua",-1,1],["juuakse",9,1],["jäi",-1,12],["jäid",11,12],["jäime",11,12],["jäin",11,12],["jäite",11,12],["jääb",-1,12],["jääd",-1,12],["jääda",17,12],["jäädakse",18,12],["jäädi",17,12],["jääks",-1,12],["jääksid",21,12],["jääksime",21,12],["jääksin",21,12],["jääksite",21,12],["jääma",-1,12],["jäämata",26,12],["jääme",-1,12],["jään",-1,12],["jääte",-1,12],["jäävad",-1,12],["jõi",-1,1],["jõid",32,1],["jõime",32,1],["jõin",32,1],["jõite",32,1],["keeb",-1,4],["keed",-1,4],["keedakse",38,4],["keeks",-1,4],["keeksid",40,4],["keeksime",40,4],["keeksin",40,4],["keeksite",40,4],["keema",-1,4],["keemata",45,4],["keeme",-1,4],["keen",-1,4],["kees",-1,4],["keeta",-1,4],["keete",-1,4],["keevad",-1,4],["käia",-1,8],["käiakse",53,8],["käib",-1,8],["käid",-1,8],["käidi",56,8],["käiks",-1,8],["käiksid",58,8],["käiksime",58,8],["käiksin",58,8],["käiksite",58,8],["käima",-1,8],["käimata",63,8],["käime",-1,8],["käin",-1,8],["käis",-1,8],["käite",-1,8],["käivad",-1,8],["laob",-1,16],["laod",-1,16],["laoks",-1,16],["laoksid",72,16],["laoksime",72,16],["laoksin",72,16],["laoksite",72,16],["laome",-1,16],["laon",-1,16],["laote",-1,16],["laovad",-1,16],["loeb",-1,14],["loed",-1,14],["loeks",-1,14],["loeksid",83,14],["loeksime",83,14],["loeksin",83,14],["loeksite",83,14],["loeme",-1,14],["loen",-1,14],["loete",-1,14],["loevad",-1,14],["loob",-1,7],["lood",-1,7],["loodi",93,7],["looks",-1,7],["looksid",95,7],["looksime",95,7],["looksin",95,7],["looksite",95,7],["looma",-1,7],["loomata",100,7],["loome",-1,7],["loon",-1,7],["loote",-1,7],["loovad",-1,7],["luua",-1,7],["luuakse",106,7],["lõi",-1,6],["lõid",108,6],["lõime",108,6],["lõin",108,6],["lõite",108,6],["lööb",-1,5],["lööd",-1,5],["löödakse",114,5],["löödi",114,5],["lööks",-1,5],["lööksid",117,5],["lööksime",117,5],["lööksin",117,5],["lööksite",117,5],["lööma",-1,5],["löömata",122,5],["lööme",-1,5],["löön",-1,5],["lööte",-1,5],["löövad",-1,5],["lüüa",-1,5],["lüüakse",128,5],["müüa",-1,13],["müüakse",130,13],["müüb",-1,13],["müüd",-1,13],["müüdi",133,13],["müüks",-1,13],["müüksid",135,13],["müüksime",135,13],["müüksin",135,13],["müüksite",135,13],["müüma",-1,13],["müümata",140,13],["müüme",-1,13],["müün",-1,13],["müüs",-1,13],["müüte",-1,13],["müüvad",-1,13],["näeb",-1,18],["näed",-1,18],["näeks",-1,18],["näeksid",149,18],["näeksime",149,18],["näeksin",149,18],["näeksite",149,18],["näeme",-1,18],["näen",-1,18],["näete",-1,18],["näevad",-1,18],["nägema",-1,18],["nägemata",158,18],["näha",-1,18],["nähakse",160,18],["nähti",-1,18],["põeb",-1,15],["põed",-1,15],["põeks",-1,15],["põeksid",165,15],["põeksime",165,15],["põeksin",165,15],["põeksite",165,15],["põeme",-1,15],["põen",-1,15],["põete",-1,15],["põevad",-1,15],["saab",-1,2],["saad",-1,2],["saada",175,2],["saadakse",176,2],["saadi",175,2],["saaks",-1,2],["saaksid",179,2],["saaksime",179,2],["saaksin",179,2],["saaksite",179,2],["saama",-1,2],["saamata",184,2],["saame",-1,2],["saan",-1,2],["saate",-1,2],["saavad",-1,2],["sai",-1,2],["said",190,2],["saime",190,2],["sain",190,2],["saite",190,2],["sõi",-1,9],["sõid",195,9],["sõime",195,9],["sõin",195,9],["sõite",195,9],["sööb",-1,9],["sööd",-1,9],["söödakse",201,9],["söödi",201,9],["sööks",-1,9],["sööksid",204,9],["sööksime",204,9],["sööksin",204,9],["sööksite",204,9],["sööma",-1,9],["söömata",209,9],["sööme",-1,9],["söön",-1,9],["sööte",-1,9],["söövad",-1,9],["süüa",-1,9],["süüakse",215,9],["teeb",-1,17],["teed",-1,17],["teeks",-1,17],["teeksid",219,17],["teeksime",219,17],["teeksin",219,17],["teeksite",219,17],["teeme",-1,17],["teen",-1,17],["teete",-1,17],["teevad",-1,17],["tegema",-1,17],["tegemata",228,17],["teha",-1,17],["tehakse",230,17],["tehti",-1,17],["toob",-1,10],["tood",-1,10],["toodi",234,10],["tooks",-1,10],["tooksid",236,10],["tooksime",236,10],["tooksin",236,10],["tooksite",236,10],["tooma",-1,10],["toomata",241,10],["toome",-1,10],["toon",-1,10],["toote",-1,10],["toovad",-1,10],["tuua",-1,10],["tuuakse",247,10],["tõi",-1,10],["tõid",249,10],["tõime",249,10],["tõin",249,10],["tõite",249,10],["viia",-1,3],["viiakse",254,3],["viib",-1,3],["viid",-1,3],["viidi",257,3],["viiks",-1,3],["viiksid",259,3],["viiksime",259,3],["viiksin",259,3],["viiksite",259,3],["viima",-1,3],["viimata",264,3],["viime",-1,3],["viin",-1,3],["viisime",-1,3],["viisin",-1,3],["viisite",-1,3],["viite",-1,3],["viivad",-1,3],["võib",-1,11],["võid",-1,11],["võida",274,11],["võidakse",275,11],["võidi",274,11],["võiks",-1,11],["võiksid",278,11],["võiksime",278,11],["võiksin",278,11],["võiksite",278,11],["võima",-1,11],["võimata",283,11],["võime",-1,11],["võin",-1,11],["võis",-1,11],["võite",-1,11],["võivad",-1,11]],b=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,48,8],_=[17,65,16],f=[117,66,6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,0,0,0,16],v=[21,123,243,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,48,8],w=0;function g(){return 0!=t.find_among_b(i)}function j(){var i=t.limit-t.cursor,i=((()=>{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(t.bra=t.cursor,0!=(i=t.find_among(d))&&(t.ket=t.cursor,!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(t.in_grouping_b(b,97,252)&&!(w>t.cursor)&&(t.ket=t.cursor,0!=(i=t.find_among_b(n))))switch(t.bra=t.cursor,i){case 1:if(t.slice_from("k"))break;return;case 2:if(t.slice_from("p"))break;return;case 3:if(t.slice_from("t"))break}})(),t.cursor=t.limit-e,t.cursor=t.limit_backward,!0},this.stemWord=function(i){return t.setCurrent(i),this.stem(),t.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/finnish-stemmer.js b/sphinx/search/minified-js/finnish-stemmer.js index f6301fdfa56..160fb267a05 100644 --- a/sphinx/search/minified-js/finnish-stemmer.js +++ b/sphinx/search/minified-js/finnish-stemmer.js @@ -1 +1 @@ -FinnishStemmer=function(){var r=new BaseStemmer;var i=[["pa",-1,1],["sti",-1,2],["kaan",-1,1],["han",-1,1],["kin",-1,1],["hän",-1,1],["kään",-1,1],["ko",-1,1],["pä",-1,1],["kö",-1,1]];var e=[["lla",-1,-1],["na",-1,-1],["ssa",-1,-1],["ta",-1,-1],["lta",3,-1],["sta",3,-1]];var a=[["llä",-1,-1],["nä",-1,-1],["ssä",-1,-1],["tä",-1,-1],["ltä",3,-1],["stä",3,-1]];var s=[["lle",-1,-1],["ine",-1,-1]];var t=[["nsa",-1,3],["mme",-1,3],["nne",-1,3],["ni",-1,2],["si",-1,1],["an",-1,4],["en",-1,6],["än",-1,5],["nsä",-1,3]];var u=[["aa",-1,-1],["ee",-1,-1],["ii",-1,-1],["oo",-1,-1],["uu",-1,-1],["ää",-1,-1],["öö",-1,-1]];var l=[["a",-1,8],["lla",0,-1],["na",0,-1],["ssa",0,-1],["ta",0,-1],["lta",4,-1],["sta",4,-1],["tta",4,2],["lle",-1,-1],["ine",-1,-1],["ksi",-1,-1],["n",-1,7],["han",11,1],["den",11,-1,S],["seen",11,-1,C],["hen",11,2],["tten",11,-1,S],["hin",11,3],["siin",11,-1,S],["hon",11,4],["hän",11,5],["hön",11,6],["ä",-1,8],["llä",22,-1],["nä",22,-1],["ssä",22,-1],["tä",22,-1],["ltä",26,-1],["stä",26,-1],["ttä",26,2]];var c=[["eja",-1,-1],["mma",-1,1],["imma",1,-1],["mpa",-1,1],["impa",3,-1],["mmi",-1,1],["immi",5,-1],["mpi",-1,1],["impi",7,-1],["ejä",-1,-1],["mmä",-1,1],["immä",10,-1],["mpä",-1,1],["impä",12,-1]];var n=[["i",-1,-1],["j",-1,-1]];var f=[["mma",-1,1],["imma",0,-1]];var o=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8];var b=[119,223,119,1];var _=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var m=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var k=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var d=false;var v="";var w=0;var g=0;function p(){g=r.limit;w=r.limit;r:while(true){var i=r.cursor;i:{if(!r.in_grouping(_,97,246)){break i}r.cursor=i;break r}r.cursor=i;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){i:{if(!r.out_grouping(_,97,246)){break i}break r}if(r.cursor>=r.limit){return false}r.cursor++}g=r.cursor;r:while(true){var e=r.cursor;i:{if(!r.in_grouping(_,97,246)){break i}r.cursor=e;break r}r.cursor=e;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){i:{if(!r.out_grouping(_,97,246)){break i}break r}if(r.cursor>=r.limit){return false}r.cursor++}w=r.cursor;return true}function h(){if(!(w<=r.cursor)){return false}return true}function q(){var e;if(r.cursor{var r;if(!(c.cursor{var r;if(!(c.cursor{var r;if(!(c.cursor{var r;if(!(c.cursor{if(!(c.cursor=r.limit){break r}r.cursor++}continue}r.cursor=e;break}return true}function v(){m=r.limit;k=r.limit;b=r.limit;var i=r.cursor;r:{e:{var s=r.cursor;i:{if(!r.in_grouping(o,97,251)){break i}if(!r.in_grouping(o,97,251)){break i}if(r.cursor>=r.limit){break i}r.cursor++;break e}r.cursor=s;i:{if(r.find_among(e)==0){break i}break e}r.cursor=s;if(r.cursor>=r.limit){break r}r.cursor++;i:while(true){s:{if(!r.in_grouping(o,97,251)){break s}break i}if(r.cursor>=r.limit){break r}r.cursor++}}m=r.cursor}r.cursor=i;var a=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}k=r.cursor;e:while(true){i:{if(!r.in_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=a;return true}function d(){var e;while(true){var s=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(!r.slice_from("y")){return false}break;case 4:if(!r.slice_from("ë")){return false}break;case 5:if(!r.slice_from("ï")){return false}break;case 6:if(!r.slice_del()){return false}break;case 7:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(k<=r.cursor)){return false}return true}function q(){if(!(b<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;e=r.find_among_b(u);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!q()){return false}if(!r.slice_del()){return false}break;case 2:if(!q()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;e:{var t=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-t;if(!r.slice_from("iqU")){return false}}}break;case 3:if(!q()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!q()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!q()){return false}if(!r.slice_from("ent")){return false}break;case 6:if(!g()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;switch(e){case 1:if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}break;case 2:e:{var f=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-f;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_from("eux")){return false}}break;case 3:if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}break;case 4:if(!g()){r.cursor=r.limit-c;break r}if(!r.slice_from("i")){return false}break}}break;case 7:if(!q()){return false}if(!r.slice_del()){return false}var l=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(a);if(e==0){r.cursor=r.limit-l;break r}r.bra=r.cursor;switch(e){case 1:e:{var n=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-n;if(!r.slice_from("abl")){return false}}break;case 2:e:{var b=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-b;if(!r.slice_from("iqU")){return false}}break;case 3:if(!q()){r.cursor=r.limit-l;break r}if(!r.slice_del()){return false}break}}break;case 8:if(!q()){return false}if(!r.slice_del()){return false}var k=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-k;break r}r.bra=r.cursor;if(!q()){r.cursor=r.limit-k;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-k;break r}r.bra=r.cursor;e:{var m=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-m;if(!r.slice_from("iqU")){return false}}}break;case 9:if(!r.slice_from("eau")){return false}break;case 10:if(!w()){return false}if(!r.slice_from("al")){return false}break;case 11:r:{var _=r.limit-r.cursor;e:{if(!q()){break e}if(!r.slice_del()){return false}break r}r.cursor=r.limit-_;if(!w()){return false}if(!r.slice_from("eux")){return false}}break;case 12:if(!w()){return false}if(!r.out_grouping_b(o,97,251)){return false}if(!r.slice_del()){return false}break;case 13:if(!g()){return false}if(!r.slice_from("ant")){return false}return false;case 14:if(!g()){return false}if(!r.slice_from("ent")){return false}return false;case 15:var v=r.limit-r.cursor;if(!r.in_grouping_b(o,97,251)){return false}if(!g()){return false}r.cursor=r.limit-v;if(!r.slice_del()){return false}return false}return true}function p(){if(r.cursor0){return false}}r.ket=r.cursor;r:{var i=r.limit-r.cursor;e:{if(!r.eq_s_b("é")){break e}break r}r.cursor=r.limit-i;if(!r.eq_s_b("è")){return false}}r.bra=r.cursor;if(!r.slice_from("e")){return false}return true}this.stem=function(){var e=r.cursor;_();r.cursor=e;v();r.limit_backward=r.cursor;r.cursor=r.limit;var i=r.limit-r.cursor;r:{e:{var s=r.limit-r.cursor;i:{var a=r.limit-r.cursor;s:{var u=r.limit-r.cursor;a:{if(!h()){break a}break s}r.cursor=r.limit-u;a:{if(!p()){break a}break s}r.cursor=r.limit-u;if(!z()){break i}}r.cursor=r.limit-a;var t=r.limit-r.cursor;s:{r.ket=r.cursor;a:{var c=r.limit-r.cursor;u:{if(!r.eq_s_b("Y")){break u}r.bra=r.cursor;if(!r.slice_from("i")){return false}break a}r.cursor=r.limit-c;if(!r.eq_s_b("ç")){r.cursor=r.limit-t;break s}r.bra=r.cursor;if(!r.slice_from("c")){return false}}}break e}r.cursor=r.limit-s;if(!I()){break r}}}r.cursor=r.limit-i;var f=r.limit-r.cursor;U();r.cursor=r.limit-f;var l=r.limit-r.cursor;H();r.cursor=r.limit-l;r.cursor=r.limit_backward;var o=r.cursor;d();r.cursor=o;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}}; \ No newline at end of file +var FrenchStemmer=function(){var n=new BaseStemmer,f=[["col",-1,-1],["ni",-1,1],["par",-1,-1],["tap",-1,-1]],_=[["",-1,7],["H",0,6],["He",1,4],["Hi",1,5],["I",0,1],["U",0,2],["Y",0,3]],m=[["iqU",-1,3],["abl",-1,3],["Ièr",-1,4],["ièr",-1,4],["eus",-1,2],["iv",-1,1]],b=[["ic",-1,2],["abil",-1,1],["iv",-1,3]],k=[["iqUe",-1,1],["atrice",-1,2],["ance",-1,1],["ence",-1,5],["logie",-1,3],["able",-1,1],["isme",-1,1],["euse",-1,12],["iste",-1,1],["ive",-1,8],["if",-1,8],["usion",-1,4],["ation",-1,2],["ution",-1,4],["ateur",-1,2],["iqUes",-1,1],["atrices",-1,2],["ances",-1,1],["ences",-1,5],["logies",-1,3],["ables",-1,1],["ismes",-1,1],["euses",-1,12],["istes",-1,1],["ives",-1,8],["ifs",-1,8],["usions",-1,4],["ations",-1,2],["utions",-1,4],["ateurs",-1,2],["ments",-1,16],["ements",30,6],["issements",31,13],["ités",-1,7],["ment",-1,16],["ement",34,6],["issement",35,13],["amment",34,14],["emment",34,15],["aux",-1,10],["eaux",39,9],["eux",-1,1],["oux",-1,11],["ité",-1,7]],d=[["ira",-1,1],["ie",-1,1],["isse",-1,1],["issante",-1,1],["i",-1,1],["irai",4,1],["ir",-1,1],["iras",-1,1],["ies",-1,1],["îmes",-1,1],["isses",-1,1],["issantes",-1,1],["îtes",-1,1],["is",-1,1],["irais",13,1],["issais",13,1],["irions",-1,1],["issions",-1,1],["irons",-1,1],["issons",-1,1],["issants",-1,1],["it",-1,1],["irait",21,1],["issait",21,1],["issant",-1,1],["iraIent",-1,1],["issaIent",-1,1],["irent",-1,1],["issent",-1,1],["iront",-1,1],["ît",-1,1],["iriez",-1,1],["issiez",-1,1],["irez",-1,1],["issez",-1,1]],g=[["al",-1,1],["épl",-1,-1],["auv",-1,-1]],v=[["a",-1,3],["era",0,2],["aise",-1,4],["asse",-1,3],["ante",-1,3],["ée",-1,2],["ai",-1,3],["erai",6,2],["er",-1,2],["as",-1,3],["eras",9,2],["âmes",-1,3],["aises",-1,4],["asses",-1,3],["antes",-1,3],["âtes",-1,3],["ées",-1,2],["ais",-1,4],["eais",17,2],["erais",17,2],["ions",-1,1],["erions",20,2],["assions",20,3],["erons",-1,2],["ants",-1,3],["és",-1,2],["ait",-1,3],["erait",26,2],["ant",-1,3],["aIent",-1,3],["eraIent",29,2],["èrent",-1,2],["assent",-1,3],["eront",-1,2],["ât",-1,3],["ez",-1,2],["iez",35,2],["eriez",36,2],["assiez",36,3],["erez",35,2],["é",-1,2]],q=[["e",-1,3],["Ière",0,2],["ière",0,2],["ion",-1,1],["Ier",-1,2],["ier",-1,2]],w=[["ell",-1,-1],["eill",-1,-1],["enn",-1,-1],["onn",-1,-1],["ett",-1,-1]],p=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],h=[65,85],z=[131,14,3],I=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],U=0,H=0,x=0;function Y(){return x<=n.cursor}function y(){return H<=n.cursor}function C(){return U<=n.cursor}this.stem=function(){var r=n.cursor,i=(n.bra=n.cursor,i=n.cursor,(n.in_grouping(z,99,116)||(n.cursor=i,n.eq_s("qu")))&&n.eq_s("'")&&(n.ket=n.cursor,n.cursor{for(;;){var r=n.cursor;r:{for(;;){var i=n.cursor;i:{e:{var e=n.cursor;s:if(n.in_grouping(p,97,251)){n.bra=n.cursor;c:{var s=n.cursor;if(n.eq_s("u")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("U"))break c;return}if(n.cursor=s,n.eq_s("i")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("I"))break c;return}if(n.cursor=s,!n.eq_s("y"))break s;if(n.ket=n.cursor,!n.slice_from("Y"))return}break e}if(n.cursor=e,n.bra=n.cursor,n.eq_s("ë")){if(n.ket=n.cursor,n.slice_from("He"))break e;return}if(n.cursor=e,n.bra=n.cursor,n.eq_s("ï")){if(n.ket=n.cursor,n.slice_from("Hi"))break e;return}if(n.cursor=e,n.bra=n.cursor,n.eq_s("y")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("Y"))break e;return}if(n.cursor=e,!n.eq_s("q"))break i;if(n.bra=n.cursor,!n.eq_s("u"))break i;if(n.ket=n.cursor,!n.slice_from("U"))return}n.cursor=i;break}if(n.cursor=i,n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=r;break}})(),n.cursor=i,x=n.limit,H=n.limit,U=n.limit;var e,r=n.cursor;r:{i:{var s=n.cursor;if(!n.in_grouping(p,97,251)||!n.in_grouping(p,97,251)||n.cursor>=n.limit){n.cursor=s;e:if(0!=(e=n.find_among(f))){switch(e){case 1:if(n.in_grouping(p,97,251))break;break e}break i}if(n.cursor=s,n.cursor>=n.limit)break r;if(n.cursor++,!n.go_out_grouping(p,97,251))break r}n.cursor++}x=n.cursor}n.cursor=r,r=n.cursor,n.go_out_grouping(p,97,251)&&(n.cursor++,n.go_in_grouping(p,97,251))&&(n.cursor++,H=n.cursor,n.go_out_grouping(p,97,251))&&(n.cursor++,n.go_in_grouping(p,97,251))&&(n.cursor++,U=n.cursor),n.cursor=r,n.limit_backward=n.cursor,n.cursor=n.limit;i=n.limit-n.cursor;r:{var c=n.limit-n.cursor,u=n.limit-n.cursor,t=n.limit-n.cursor;if((()=>{var r;if(n.ket=n.cursor,0!=(r=n.find_among_b(k))){switch(n.bra=n.cursor,r){case 1:if(!C())return;if(n.slice_del())break;return;case 2:if(!C())return;if(!n.slice_del())return;var i=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("ic")){n.bra=n.cursor;i:{var e=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-e,!n.slice_from("iqU"))return}}else n.cursor=n.limit-i;break;case 3:if(!C())return;if(n.slice_from("log"))break;return;case 4:if(!C())return;if(n.slice_from("u"))break;return;case 5:if(!C())return;if(n.slice_from("ent"))break;return;case 6:if(!Y())return;if(!n.slice_del())return;var s=n.limit-n.cursor;i:if(n.ket=n.cursor,0==(r=n.find_among_b(m)))n.cursor=n.limit-s;else switch(n.bra=n.cursor,r){case 1:if(!C()){n.cursor=n.limit-s;break i}if(!n.slice_del())return;if(n.ket=n.cursor,!n.eq_s_b("at")){n.cursor=n.limit-s;break i}if(n.bra=n.cursor,!C()){n.cursor=n.limit-s;break i}if(n.slice_del())break;return;case 2:e:{var c=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-c,!y()){n.cursor=n.limit-s;break i}if(!n.slice_from("eux"))return}break;case 3:if(!C()){n.cursor=n.limit-s;break i}if(n.slice_del())break;return;case 4:if(!Y()){n.cursor=n.limit-s;break i}if(n.slice_from("i"))break;return}break;case 7:if(!C())return;if(!n.slice_del())return;var u=n.limit-n.cursor;i:if(n.ket=n.cursor,0==(r=n.find_among_b(b)))n.cursor=n.limit-u;else switch(n.bra=n.cursor,r){case 1:e:{var t=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-t,!n.slice_from("abl"))return}break;case 2:e:{var o=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-o,!n.slice_from("iqU"))return}break;case 3:if(!C()){n.cursor=n.limit-u;break i}if(n.slice_del())break;return}break;case 8:if(!C())return;if(!n.slice_del())return;i=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("at"))if(n.bra=n.cursor,C()){if(!n.slice_del())return;if(n.ket=n.cursor,n.eq_s_b("ic")){n.bra=n.cursor;i:{var a=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-a,!n.slice_from("iqU"))return}}else n.cursor=n.limit-i}else n.cursor=n.limit-i;else n.cursor=n.limit-i;break;case 9:if(n.slice_from("eau"))break;return;case 10:if(!y())return;if(n.slice_from("al"))break;return;case 11:if(!n.in_grouping_b(h,98,112))return;if(n.slice_from("ou"))break;return;case 12:i:{var l=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-l,!y())return;if(!n.slice_from("eux"))return}break;case 13:if(!y())return;if(!n.out_grouping_b(p,97,251))return;if(n.slice_del())break;return;case 14:return Y()?void n.slice_from("ant"):void 0;case 15:return Y()?void n.slice_from("ent"):void 0;case 16:i=n.limit-n.cursor;return n.in_grouping_b(p,97,251)?Y()&&(n.cursor=n.limit-i,void n.slice_del()):void 0}return 1}})()||(n.cursor=n.limit-t,(()=>{if(!(n.cursor{var r;if(!(n.cursorn.limit_backward)break i}return}if(n.cursor=n.limit-e,n.slice_del())break;return}return 1}n.limit_backward=i}})())){n.cursor=n.limit-u;var o=n.limit-n.cursor;i:{n.ket=n.cursor;e:{var a=n.limit-n.cursor;if(n.eq_s_b("Y")){if(n.bra=n.cursor,n.slice_from("i"))break e;return!1}if(n.cursor=n.limit-a,!n.eq_s_b("ç")){n.cursor=n.limit-o;break i}if(n.bra=n.cursor,!n.slice_from("c"))return!1}}}else if(n.cursor=n.limit-c,!(()=>{var r=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("s")){n.bra=n.cursor;var i=n.limit-n.cursor,e=n.limit-n.cursor;if(n.eq_s_b("Hi")||(n.cursor=n.limit-e,n.out_grouping_b(I,97,232))){if(n.cursor=n.limit-i,!n.slice_del())return}else n.cursor=n.limit-r}else n.cursor=n.limit-r;if(!(n.cursor{for(var r;;){var i=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(_),n.ket=n.cursor,r){case 1:if(n.slice_from("i"))break;return;case 2:if(n.slice_from("u"))break;return;case 3:if(n.slice_from("y"))break;return;case 4:if(n.slice_from("ë"))break;return;case 5:if(n.slice_from("ï"))break;return;case 6:if(n.slice_del())break;return;case 7:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=i;break}})(),n.cursor=r,!0},this.stemWord=function(r){return n.setCurrent(r),this.stem(),n.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/german-stemmer.js b/sphinx/search/minified-js/german-stemmer.js index e2a335d15e0..da15f9e8f71 100644 --- a/sphinx/search/minified-js/german-stemmer.js +++ b/sphinx/search/minified-js/german-stemmer.js @@ -1 +1 @@ -GermanStemmer=function(){var r=new BaseStemmer;var e=[["",-1,5],["U",0,2],["Y",0,1],["ä",0,3],["ö",0,4],["ü",0,2]];var i=[["e",-1,2],["em",-1,1],["en",-1,2],["ern",-1,1],["er",-1,1],["s",-1,3],["es",5,2]];var s=[["en",-1,1],["er",-1,1],["st",-1,2],["est",2,1]];var u=[["ig",-1,1],["lich",-1,1]];var a=[["end",-1,1],["ig",-1,2],["ung",-1,1],["lich",-1,3],["isch",-1,2],["ik",-1,2],["heit",-1,3],["keit",-1,4]];var c=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8];var t=[117,30,5];var o=[117,30,4];var f=0;var l=0;var n=0;function b(){var e=r.cursor;while(true){var i=r.cursor;r:{e:{var s=r.cursor;i:{r.bra=r.cursor;if(!r.eq_s("ß")){break i}r.ket=r.cursor;if(!r.slice_from("ss")){return false}break e}r.cursor=s;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=i;break}r.cursor=e;while(true){var u=r.cursor;r:{e:while(true){var a=r.cursor;i:{if(!r.in_grouping(c,97,252)){break i}r.bra=r.cursor;s:{var t=r.cursor;u:{if(!r.eq_s("u")){break u}r.ket=r.cursor;if(!r.in_grouping(c,97,252)){break u}if(!r.slice_from("U")){return false}break s}r.cursor=t;if(!r.eq_s("y")){break i}r.ket=r.cursor;if(!r.in_grouping(c,97,252)){break i}if(!r.slice_from("Y")){return false}}r.cursor=a;break e}r.cursor=a;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=u;break}return true}function k(){n=r.limit;l=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}f=r.cursor;r.cursor=e;r:while(true){e:{if(!r.in_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(n=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;return true}function m(){var i;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("y")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(!r.slice_from("a")){return false}break;case 4:if(!r.slice_from("o")){return false}break;case 5:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function _(){if(!(n<=r.cursor)){return false}return true}function v(){if(!(l<=r.cursor)){return false}return true}function g(){var e;var c=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(i);if(e==0){break r}r.bra=r.cursor;if(!_()){break r}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}var f=r.limit-r.cursor;e:{r.ket=r.cursor;if(!r.eq_s_b("s")){r.cursor=r.limit-f;break e}r.bra=r.cursor;if(!r.eq_s_b("nis")){r.cursor=r.limit-f;break e}if(!r.slice_del()){return false}}break;case 3:if(!r.in_grouping_b(t,98,116)){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-c;var l=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){break r}r.bra=r.cursor;if(!_()){break r}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.in_grouping_b(o,98,116)){break r}{var n=r.cursor-3;if(n{for(var r,i=n.cursor;;){var e=n.cursor;r:{for(;;){var s=n.cursor;i:if(n.in_grouping(t,97,252)){n.bra=n.cursor;e:{var c=n.cursor;if(n.eq_s("u")&&(n.ket=n.cursor,n.in_grouping(t,97,252))){if(n.slice_from("U"))break e;return}if(n.cursor=c,!n.eq_s("y"))break i;if(n.ket=n.cursor,!n.in_grouping(t,97,252))break i;if(!n.slice_from("Y"))return}n.cursor=s;break}if(n.cursor=s,n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=e;break}for(n.cursor=i;;){var u=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(o),n.ket=n.cursor,r){case 1:if(n.slice_from("ss"))break;return;case 2:if(n.slice_from("ä"))break;return;case 3:if(n.slice_from("ö"))break;return;case 4:if(n.slice_from("ü"))break;return;case 5:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=u;break}})(),n.cursor=i,n.cursor),e=(a=n.limit,u=n.limit,r=n.cursor,(e=n.cursor+3)>n.limit||(n.cursor=e,c=n.cursor,n.cursor=r,n.go_out_grouping(t,97,252)&&(n.cursor++,n.go_in_grouping(t,97,252))&&(n.cursor++,a=n.cursor,c<=a||(a=c),n.go_out_grouping(t,97,252))&&(n.cursor++,n.go_in_grouping(t,97,252))&&(n.cursor++,u=n.cursor)),n.cursor=i,n.limit_backward=n.cursor,n.cursor=n.limit,(()=>{var r,i=n.limit-n.cursor;r:if(n.ket=n.cursor,0!=(r=n.find_among_b(l))&&(n.bra=n.cursor,v()))switch(r){case 1:var e=n.limit-n.cursor;if(n.eq_s_b("syst"))break r;if(n.cursor=n.limit-e,n.slice_del())break;return;case 2:if(n.slice_del())break;return;case 3:if(!n.slice_del())return;e=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("s"))if(n.bra=n.cursor,n.eq_s_b("nis")){if(!n.slice_del())return}else n.cursor=n.limit-e;else n.cursor=n.limit-e;break;case 4:if(!n.in_grouping_b(g,98,116))break r;if(n.slice_del())break;return;case 5:if(n.slice_from("l"))break;return}n.cursor=n.limit-i,i=n.limit-n.cursor;r:if(n.ket=n.cursor,0!=(r=n.find_among_b(_))&&(n.bra=n.cursor,v()))switch(r){case 1:if(n.slice_del())break;return;case 2:if(!n.in_grouping_b(d,98,116))break r;var s=n.cursor-3;if(s{for(var r;;){var i=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(s),n.ket=n.cursor,r){case 1:if(n.slice_from("y"))break;return;case 2:if(n.slice_from("u"))break;return;case 3:if(n.slice_from("a"))break;return;case 4:if(n.slice_from("o"))break;return;case 5:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=i;break}})(),n.cursor=e,!0},this.stemWord=function(r){return n.setCurrent(r),this.stem(),n.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/greek-stemmer.js b/sphinx/search/minified-js/greek-stemmer.js new file mode 100644 index 00000000000..d02a30bf1fe --- /dev/null +++ b/sphinx/search/minified-js/greek-stemmer.js @@ -0,0 +1 @@ +var GreekStemmer=function(){var s=new BaseStemmer,e=[["",-1,25],["Ά",0,1],["Έ",0,5],["Ή",0,7],["Ί",0,9],["Ό",0,15],["Ύ",0,20],["Ώ",0,24],["ΐ",0,7],["Α",0,1],["Β",0,2],["Γ",0,3],["Δ",0,4],["Ε",0,5],["Ζ",0,6],["Η",0,7],["Θ",0,8],["Ι",0,9],["Κ",0,10],["Λ",0,11],["Μ",0,12],["Ν",0,13],["Ξ",0,14],["Ο",0,15],["Π",0,16],["Ρ",0,17],["Σ",0,18],["Τ",0,19],["Υ",0,20],["Φ",0,21],["Χ",0,22],["Ψ",0,23],["Ω",0,24],["Ϊ",0,9],["Ϋ",0,20],["ά",0,1],["έ",0,5],["ή",0,7],["ί",0,9],["ΰ",0,20],["ς",0,18],["ϊ",0,7],["ϋ",0,20],["ό",0,15],["ύ",0,20],["ώ",0,24]],o=[["σκαγια",-1,2],["φαγια",-1,1],["ολογια",-1,3],["σογια",-1,4],["τατογια",-1,5],["κρεατα",-1,6],["περατα",-1,7],["τερατα",-1,8],["γεγονοτα",-1,11],["καθεστωτα",-1,10],["φωτα",-1,9],["περατη",-1,7],["σκαγιων",-1,2],["φαγιων",-1,1],["ολογιων",-1,3],["σογιων",-1,4],["τατογιων",-1,5],["κρεατων",-1,6],["περατων",-1,7],["τερατων",-1,8],["γεγονοτων",-1,11],["καθεστωτων",-1,10],["φωτων",-1,9],["κρεασ",-1,6],["περασ",-1,7],["τερασ",-1,8],["γεγονοσ",-1,11],["κρεατοσ",-1,6],["περατοσ",-1,7],["τερατοσ",-1,8],["γεγονοτοσ",-1,11],["καθεστωτοσ",-1,10],["φωτοσ",-1,9],["καθεστωσ",-1,10],["φωσ",-1,9],["σκαγιου",-1,2],["φαγιου",-1,1],["ολογιου",-1,3],["σογιου",-1,4],["τατογιου",-1,5]],u=[["πα",-1,1],["ξαναπα",0,1],["επα",0,1],["περιπα",0,1],["αναμπα",0,1],["εμπα",0,1],["β",-1,2],["δανε",-1,1],["βαθυρι",-1,2],["βαρκ",-1,2],["μαρκ",-1,2],["λ",-1,2],["μ",-1,2],["κορν",-1,2],["αθρο",-1,1],["συναθρο",14,1],["π",-1,2],["ιμπ",16,2],["ρ",-1,2],["μαρ",18,2],["αμπαρ",18,2],["γκρ",18,2],["βολβορ",18,2],["γλυκορ",18,2],["πιπερορ",18,2],["πρ",18,2],["μπρ",25,2],["αρρ",18,2],["γλυκυρ",18,2],["πολυρ",18,2],["λου",-1,2]],a=[["ιζα",-1,1],["ιζε",-1,1],["ιζαμε",-1,1],["ιζουμε",-1,1],["ιζανε",-1,1],["ιζουνε",-1,1],["ιζατε",-1,1],["ιζετε",-1,1],["ιζει",-1,1],["ιζαν",-1,1],["ιζουν",-1,1],["ιζεσ",-1,1],["ιζεισ",-1,1],["ιζω",-1,1]],t=[["βι",-1,1],["λι",-1,1],["αλ",-1,1],["εν",-1,1],["σ",-1,1],["χ",-1,1],["υψ",-1,1],["ζω",-1,1]],_=[["ωθηκα",-1,1],["ωθηκε",-1,1],["ωθηκαμε",-1,1],["ωθηκανε",-1,1],["ωθηκατε",-1,1],["ωθηκαν",-1,1],["ωθηκεσ",-1,1]],l=[["ξαναπα",-1,1],["επα",-1,1],["περιπα",-1,1],["αναμπα",-1,1],["εμπα",-1,1],["χαρτοπα",-1,1],["εξαρχα",-1,1],["γε",-1,2],["γκε",-1,2],["κλε",-1,1],["εκλε",9,1],["απεκλε",10,1],["αποκλε",9,1],["εσωκλε",9,1],["δανε",-1,1],["πε",-1,1],["επε",15,1],["μετεπε",16,1],["εσε",-1,1],["γκ",-1,2],["μ",-1,2],["πουκαμ",20,2],["κομ",20,2],["αν",-1,2],["ολο",-1,2],["αθρο",-1,1],["συναθρο",25,1],["π",-1,2],["λαρ",-1,2],["δημοκρατ",-1,2],["αφ",-1,2],["γιγαντοαφ",30,2]],m=[["ισα",-1,1],["ισαμε",-1,1],["ισανε",-1,1],["ισε",-1,1],["ισατε",-1,1],["ισαν",-1,1],["ισεσ",-1,1]],f=[["ξαναπα",-1,1],["επα",-1,1],["περιπα",-1,1],["αναμπα",-1,1],["εμπα",-1,1],["χαρτοπα",-1,1],["εξαρχα",-1,1],["κλε",-1,1],["εκλε",7,1],["απεκλε",8,1],["αποκλε",7,1],["εσωκλε",7,1],["δανε",-1,1],["πε",-1,1],["επε",13,1],["μετεπε",14,1],["εσε",-1,1],["αθρο",-1,1],["συναθρο",17,1]],b=[["ισουμε",-1,1],["ισουνε",-1,1],["ισετε",-1,1],["ισει",-1,1],["ισουν",-1,1],["ισεισ",-1,1],["ισω",-1,1]],n=[["ατα",-1,2],["φα",-1,2],["ηφα",1,2],["μεγ",-1,2],["λυγ",-1,2],["ηδ",-1,2],["κλε",-1,1],["εσωκλε",6,1],["πλε",-1,1],["δανε",-1,1],["σε",-1,1],["ασε",10,1],["καθ",-1,2],["εχθ",-1,2],["κακ",-1,2],["μακ",-1,2],["σκ",-1,2],["φιλ",-1,2],["κυλ",-1,2],["μ",-1,2],["γεμ",19,2],["αχν",-1,2],["συναθρο",-1,1],["π",-1,2],["απ",23,2],["εμπ",23,2],["ευπ",23,2],["αρ",-1,2],["αορ",-1,2],["γυρ",-1,2],["χρ",-1,2],["χωρ",-1,2],["κτ",-1,2],["ακτ",32,2],["χτ",-1,2],["αχτ",34,2],["ταχ",-1,2],["σχ",-1,2],["ασχ",37,2],["υψ",-1,2]],k=[["ιστα",-1,1],["ιστε",-1,1],["ιστη",-1,1],["ιστοι",-1,1],["ιστων",-1,1],["ιστο",-1,1],["ιστεσ",-1,1],["ιστησ",-1,1],["ιστοσ",-1,1],["ιστουσ",-1,1],["ιστου",-1,1]],d=[["εγκλε",-1,1],["αποκλε",-1,1],["δανε",-1,2],["αντιδανε",2,2],["σε",-1,1],["μετασε",4,1],["μικροσε",4,1]],g=[["ατομικ",-1,2],["εθνικ",-1,4],["τοπικ",-1,7],["εκλεκτικ",-1,5],["σκεπτικ",-1,6],["γνωστικ",-1,3],["αγνωστικ",5,1],["αλεξανδριν",-1,8],["θεατριν",-1,10],["βυζαντιν",-1,9]],w=[["ισμοι",-1,1],["ισμων",-1,1],["ισμο",-1,1],["ισμοσ",-1,1],["ισμουσ",-1,1],["ισμου",-1,1]],v=[["σ",-1,1],["χ",-1,1]],h=[["ουδακια",-1,1],["αρακια",-1,1],["ουδακι",-1,1],["αρακι",-1,1]],q=[["β",-1,2],["βαμβ",0,1],["σλοβ",0,1],["τσεχοσλοβ",2,1],["καρδ",-1,2],["ζ",-1,2],["τζ",5,1],["κ",-1,1],["καπακ",7,1],["σοκ",7,1],["σκ",7,1],["βαλ",-1,2],["μαλ",-1,1],["γλ",-1,2],["τριπολ",-1,2],["πλ",-1,1],["λουλ",-1,1],["φυλ",-1,1],["καιμ",-1,1],["κλιμ",-1,1],["φαρμ",-1,1],["γιαν",-1,2],["σπαν",-1,1],["ηγουμεν",-1,2],["κον",-1,1],["μακρυν",-1,2],["π",-1,2],["κατραπ",26,1],["ρ",-1,1],["βρ",28,1],["λαβρ",29,1],["αμβρ",29,1],["μερ",28,1],["πατερ",28,2],["ανθρ",28,1],["κορ",28,1],["σ",-1,1],["ναγκασ",36,1],["τοσ",36,2],["μουστ",-1,1],["ρυ",-1,1],["φ",-1,1],["σφ",41,1],["αλισφ",42,1],["νυφ",41,2],["χ",-1,1]],p=[["ακια",-1,1],["αρακια",0,1],["ιτσα",-1,1],["ακι",-1,1],["αρακι",3,1],["ιτσων",-1,1],["ιτσασ",-1,1],["ιτσεσ",-1,1]],C=[["ψαλ",-1,1],["αιφν",-1,1],["ολο",-1,1],["ιρ",-1,1]],S=[["ε",-1,1],["παιχν",-1,1]],B=[["ιδια",-1,1],["ιδιων",-1,1],["ιδιο",-1,1]],G=[["ιβ",-1,1],["δ",-1,1],["φραγκ",-1,1],["λυκ",-1,1],["οβελ",-1,1],["μην",-1,1],["ρ",-1,1]],W=[["ισκε",-1,1],["ισκο",-1,1],["ισκοσ",-1,1],["ισκου",-1,1]],j=[["αδων",-1,1],["αδεσ",-1,1]],x=[["γιαγι",-1,-1],["θει",-1,-1],["οκ",-1,-1],["μαμ",-1,-1],["μαν",-1,-1],["μπαμπ",-1,-1],["πεθερ",-1,-1],["πατερ",-1,-1],["κυρ",-1,-1],["νταντ",-1,-1]],y=[["εδων",-1,1],["εδεσ",-1,1]],z=[["μιλ",-1,1],["δαπ",-1,1],["γηπ",-1,1],["ιπ",-1,1],["εμπ",-1,1],["οπ",-1,1],["κρασπ",-1,1],["υπ",-1,1]],A=[["ουδων",-1,1],["ουδεσ",-1,1]],D=[["τραγ",-1,1],["φε",-1,1],["καλιακ",-1,1],["αρκ",-1,1],["σκ",-1,1],["πεταλ",-1,1],["βελ",-1,1],["λουλ",-1,1],["φλ",-1,1],["χν",-1,1],["πλεξ",-1,1],["σπ",-1,1],["φρ",-1,1],["σ",-1,1],["λιχ",-1,1]],E=[["εων",-1,1],["εωσ",-1,1]],F=[["δ",-1,1],["ιδ",0,1],["θ",-1,1],["γαλ",-1,1],["ελ",-1,1],["ν",-1,1],["π",-1,1],["παρ",-1,1]],K=[["ια",-1,1],["ιων",-1,1],["ιου",-1,1]],L=[["ικα",-1,1],["ικων",-1,1],["ικο",-1,1],["ικου",-1,1]],M=[["αδ",-1,1],["συναδ",0,1],["καταδ",0,1],["αντιδ",-1,1],["ενδ",-1,1],["φυλοδ",-1,1],["υποδ",-1,1],["πρωτοδ",-1,1],["εξωδ",-1,1],["ηθ",-1,1],["ανηθ",9,1],["ξικ",-1,1],["αλ",-1,1],["αμμοχαλ",12,1],["συνομηλ",-1,1],["μπολ",-1,1],["μουλ",-1,1],["τσαμ",-1,1],["βρωμ",-1,1],["αμαν",-1,1],["μπαν",-1,1],["καλλιν",-1,1],["ποστελν",-1,1],["φιλον",-1,1],["καλπ",-1,1],["γερ",-1,1],["χασ",-1,1],["μποσ",-1,1],["πλιατσ",-1,1],["πετσ",-1,1],["πιτσ",-1,1],["φυσ",-1,1],["μπαγιατ",-1,1],["νιτ",-1,1],["πικαντ",-1,1],["σερτ",-1,1]],N=[["αγαμε",-1,1],["ηκαμε",-1,1],["ηθηκαμε",1,1],["ησαμε",-1,1],["ουσαμε",-1,1]],O=[["βουβ",-1,1],["ξεθ",-1,1],["πεθ",-1,1],["αποθ",-1,1],["αποκ",-1,1],["ουλ",-1,1],["αναπ",-1,1],["πικρ",-1,1],["ποτ",-1,1],["αποστ",-1,1],["χ",-1,1],["σιχ",10,1]],P=[["τρ",-1,1],["τσ",-1,1]],Q=[["αγανε",-1,1],["ηκανε",-1,1],["ηθηκανε",1,1],["ησανε",-1,1],["ουσανε",-1,1],["οντανε",-1,1],["ιοντανε",5,1],["ουντανε",-1,1],["ιουντανε",7,1],["οτανε",-1,1],["ιοτανε",9,1]],R=[["ταβ",-1,1],["νταβ",0,1],["ψηλοταβ",0,1],["λιβ",-1,1],["κλιβ",3,1],["ξηροκλιβ",4,1],["γ",-1,1],["αγ",6,1],["τραγ",7,1],["τσαγ",7,1],["αθιγγ",6,1],["τσιγγ",6,1],["ατσιγγ",11,1],["στεγ",6,1],["απηγ",6,1],["σιγ",6,1],["ανοργ",6,1],["ενοργ",6,1],["καλπουζ",-1,1],["θ",-1,1],["μωαμεθ",19,1],["πιθ",19,1],["απιθ",21,1],["δεκ",-1,1],["πελεκ",-1,1],["ικ",-1,1],["ανικ",25,1],["βουλκ",-1,1],["βασκ",-1,1],["βραχυκ",-1,1],["γαλ",-1,1],["καταγαλ",30,1],["ολογαλ",30,1],["βαθυγαλ",30,1],["μελ",-1,1],["καστελ",-1,1],["πορτολ",-1,1],["πλ",-1,1],["διπλ",37,1],["λαοπλ",37,1],["ψυχοπλ",37,1],["ουλ",-1,1],["μ",-1,1],["ολιγοδαμ",42,1],["μουσουλμ",42,1],["δραδουμ",42,1],["βραχμ",42,1],["ν",-1,1],["αμερικαν",47,1],["π",-1,1],["αδαπ",49,1],["χαμηλοδαπ",49,1],["πολυδαπ",49,1],["κοπ",49,1],["υποκοπ",53,1],["τσοπ",49,1],["σπ",49,1],["ερ",-1,1],["γερ",57,1],["βετερ",57,1],["λουθηρ",-1,1],["κορμορ",-1,1],["περιτρ",-1,1],["ουρ",-1,1],["σ",-1,1],["βασ",64,1],["πολισ",64,1],["σαρακατσ",64,1],["θυσ",64,1],["διατ",-1,1],["πλατ",-1,1],["τσαρλατ",-1,1],["τετ",-1,1],["πουριτ",-1,1],["σουλτ",-1,1],["μαιντ",-1,1],["ζωντ",-1,1],["καστ",-1,1],["φ",-1,1],["διαφ",78,1],["στεφ",78,1],["φωτοστεφ",80,1],["περηφ",78,1],["υπερηφ",82,1],["κοιλαρφ",78,1],["πενταρφ",78,1],["ορφ",78,1],["χ",-1,1],["αμηχ",87,1],["βιομηχ",87,1],["μεγλοβιομηχ",89,1],["καπνοβιομηχ",89,1],["μικροβιομηχ",89,1],["πολυμηχ",87,1],["λιχ",87,1]],T=[["ησετε",-1,1]],U=[["ενδ",-1,1],["συνδ",-1,1],["οδ",-1,1],["διαθ",-1,1],["καθ",-1,1],["ραθ",-1,1],["ταθ",-1,1],["τιθ",-1,1],["εκθ",-1,1],["ενθ",-1,1],["συνθ",-1,1],["ροθ",-1,1],["υπερθ",-1,1],["σθ",-1,1],["ευθ",-1,1],["αρκ",-1,1],["ωφελ",-1,1],["βολ",-1,1],["αιν",-1,1],["πον",-1,1],["ρον",-1,1],["συν",-1,1],["βαρ",-1,1],["βρ",-1,1],["αιρ",-1,1],["φορ",-1,1],["ευρ",-1,1],["πυρ",-1,1],["χωρ",-1,1],["νετ",-1,1],["σχ",-1,1]],V=[["παγ",-1,1],["δ",-1,1],["αδ",1,1],["θ",-1,1],["αθ",3,1],["τοκ",-1,1],["σκ",-1,1],["παρακαλ",-1,1],["σκελ",-1,1],["απλ",-1,1],["εμ",-1,1],["αν",-1,1],["βεν",-1,1],["βαρον",-1,1],["κοπ",-1,1],["σερπ",-1,1],["αβαρ",-1,1],["εναρ",-1,1],["αβρ",-1,1],["μπορ",-1,1],["θαρρ",-1,1],["ντρ",-1,1],["υ",-1,1],["νιφ",-1,1],["συρφ",-1,1]],X=[["οντασ",-1,1],["ωντασ",-1,1]],Y=[["ομαστε",-1,1],["ιομαστε",0,1]],Z=[["π",-1,1],["απ",0,1],["ακαταπ",1,1],["συμπ",0,1],["ασυμπ",3,1],["αμεταμφ",-1,1]],$=[["ζ",-1,1],["αλ",-1,1],["παρακαλ",1,1],["εκτελ",-1,1],["μ",-1,1],["ξ",-1,1],["προ",-1,1],["αρ",-1,1],["νισ",-1,1]],r1=[["ηθηκα",-1,1],["ηθηκε",-1,1],["ηθηκεσ",-1,1]],i1=[["πιθ",-1,1],["οθ",-1,1],["ναρθ",-1,1],["σκουλ",-1,1],["σκωλ",-1,1],["σφ",-1,1]],c1=[["θ",-1,1],["διαθ",0,1],["παρακαταθ",0,1],["συνθ",0,1],["προσθ",0,1]],s1=[["ηκα",-1,1],["ηκε",-1,1],["ηκεσ",-1,1]],e1=[["φαγ",-1,1],["ληγ",-1,1],["φρυδ",-1,1],["μαντιλ",-1,1],["μαλλ",-1,1],["ομ",-1,1],["βλεπ",-1,1],["ποδαρ",-1,1],["κυματ",-1,1],["πρωτ",-1,1],["λαχ",-1,1],["πανταχ",-1,1]],o1=[["τσα",-1,1],["χαδ",-1,1],["μεδ",-1,1],["λαμπιδ",-1,1],["δε",-1,1],["πλε",-1,1],["μεσαζ",-1,1],["δεσποζ",-1,1],["αιθ",-1,1],["φαρμακ",-1,1],["αγκ",-1,1],["ανηκ",-1,1],["λ",-1,1],["μ",-1,1],["αμ",13,1],["βρομ",13,1],["υποτειν",-1,1],["εκλιπ",-1,1],["ρ",-1,1],["ενδιαφερ",18,1],["αναρρ",18,1],["πατ",-1,1],["καθαρευ",-1,1],["δευτερευ",-1,1],["λεχ",-1,1]],u1=[["ουσα",-1,1],["ουσε",-1,1],["ουσεσ",-1,1]],a1=[["πελ",-1,1],["λλ",-1,1],["σμην",-1,1],["ρπ",-1,1],["πρ",-1,1],["φρ",-1,1],["χορτ",-1,1],["οφ",-1,1],["ψοφ",7,-1],["σφ",-1,1],["λοχ",-1,1],["ναυλοχ",10,-1]],t1=[["αμαλλι",-1,1],["λ",-1,1],["αμαλ",1,1],["μ",-1,1],["ουλαμ",3,1],["εν",-1,1],["δερβεν",5,1],["π",-1,1],["αειπ",7,1],["αρτιπ",7,1],["συμπ",7,1],["νεοπ",7,1],["κροκαλοπ",7,1],["ολοπ",7,1],["προσωποπ",7,1],["σιδηροπ",7,1],["δροσοπ",7,1],["ασπ",7,1],["ανυπ",7,1],["ρ",-1,1],["ασπαρ",19,1],["χαρ",19,1],["αχαρ",21,1],["απερ",19,1],["τρ",19,1],["ουρ",19,1],["τ",-1,1],["διατ",26,1],["επιτ",26,1],["συντ",26,1],["ομοτ",26,1],["νομοτ",30,1],["αποτ",26,1],["υποτ",26,1],["αβαστ",26,1],["αιμοστ",26,1],["προστ",26,1],["ανυστ",26,1],["ναυ",-1,1],["αφ",-1,1],["ξεφ",-1,1],["αδηφ",-1,1],["παμφ",-1,1],["πολυφ",-1,1]],_1=[["αγα",-1,1],["αγε",-1,1],["αγεσ",-1,1]],l1=[["ησα",-1,1],["ησε",-1,1],["ησου",-1,1]],m1=[["ν",-1,1],["δωδεκαν",0,1],["επταν",0,1],["μεγαλον",0,1],["ερημον",0,1],["χερσον",0,1]],f1=[["ηστε",-1,1]],b1=[["σβ",-1,1],["ασβ",0,1],["απλ",-1,1],["αειμν",-1,1],["χρ",-1,1],["αχρ",4,1],["κοινοχρ",4,1],["δυσχρ",4,1],["ευχρ",4,1],["παλιμψ",-1,1]],n1=[["ουνε",-1,1],["ηθουνε",0,1],["ησουνε",0,1]],k1=[["σπι",-1,1],["ν",-1,1],["εξων",1,1],["ρ",-1,1],["στραβομουτσ",-1,1],["κακομουτσ",-1,1]],d1=[["ουμε",-1,1],["ηθουμε",0,1],["ησουμε",0,1]],g1=[["αζ",-1,1],["ωριοπλ",-1,1],["ασουσ",-1,1],["παρασουσ",2,1],["αλλοσουσ",-1,1],["φ",-1,1],["χ",-1,1]],w1=[["ματα",-1,1],["ματων",-1,1],["ματοσ",-1,1]],v1=[["α",-1,1],["ιουμα",0,1],["ομουνα",0,1],["ιομουνα",2,1],["οσουνα",0,1],["ιοσουνα",4,1],["ε",-1,1],["αγατε",6,1],["ηκατε",6,1],["ηθηκατε",8,1],["ησατε",6,1],["ουσατε",6,1],["ειτε",6,1],["ηθειτε",12,1],["ιεμαστε",6,1],["ουμαστε",6,1],["ιουμαστε",15,1],["ιεσαστε",6,1],["οσαστε",6,1],["ιοσαστε",18,1],["η",-1,1],["ι",-1,1],["αμαι",21,1],["ιεμαι",21,1],["ομαι",21,1],["ουμαι",21,1],["ασαι",21,1],["εσαι",21,1],["ιεσαι",27,1],["αται",21,1],["εται",21,1],["ιεται",30,1],["ονται",21,1],["ουνται",21,1],["ιουνται",33,1],["ει",21,1],["αει",35,1],["ηθει",35,1],["ησει",35,1],["οι",21,1],["αν",-1,1],["αγαν",40,1],["ηκαν",40,1],["ηθηκαν",42,1],["ησαν",40,1],["ουσαν",40,1],["οντουσαν",45,1],["ιοντουσαν",46,1],["ονταν",40,1],["ιονταν",48,1],["ουνταν",40,1],["ιουνταν",50,1],["οταν",40,1],["ιοταν",52,1],["ομασταν",40,1],["ιομασταν",54,1],["οσασταν",40,1],["ιοσασταν",56,1],["ουν",-1,1],["ηθουν",58,1],["ομουν",58,1],["ιομουν",60,1],["ησουν",58,1],["οσουν",58,1],["ιοσουν",63,1],["ων",-1,1],["ηδων",65,1],["ο",-1,1],["ασ",-1,1],["εσ",-1,1],["ηδεσ",69,1],["ησεσ",69,1],["ησ",-1,1],["εισ",-1,1],["ηθεισ",73,1],["οσ",-1,1],["υσ",-1,1],["ουσ",76,1],["υ",-1,1],["ου",78,1],["ω",-1,1],["αω",80,1],["ηθω",80,1],["ησω",80,1]],h1=[["οτερ",-1,1],["εστερ",-1,1],["υτερ",-1,1],["ωτερ",-1,1],["οτατ",-1,1],["εστατ",-1,1],["υτατ",-1,1],["ωτατ",-1,1]],H=[81,65,16,1],I=[81,65,0,1],J=!1;this.stem=function(){s.limit_backward=s.cursor,s.cursor=s.limit;var r=s.limit-s.cursor;if((()=>{for(var r;;){var i=s.limit-s.cursor;r:{switch(s.ket=s.cursor,r=s.find_among_b(e),s.bra=s.cursor,r){case 1:if(s.slice_from("α"))break;return;case 2:if(s.slice_from("β"))break;return;case 3:if(s.slice_from("γ"))break;return;case 4:if(s.slice_from("δ"))break;return;case 5:if(s.slice_from("ε"))break;return;case 6:if(s.slice_from("ζ"))break;return;case 7:if(s.slice_from("η"))break;return;case 8:if(s.slice_from("θ"))break;return;case 9:if(s.slice_from("ι"))break;return;case 10:if(s.slice_from("κ"))break;return;case 11:if(s.slice_from("λ"))break;return;case 12:if(s.slice_from("μ"))break;return;case 13:if(s.slice_from("ν"))break;return;case 14:if(s.slice_from("ξ"))break;return;case 15:if(s.slice_from("ο"))break;return;case 16:if(s.slice_from("π"))break;return;case 17:if(s.slice_from("ρ"))break;return;case 18:if(s.slice_from("σ"))break;return;case 19:if(s.slice_from("τ"))break;return;case 20:if(s.slice_from("υ"))break;return;case 21:if(s.slice_from("φ"))break;return;case 22:if(s.slice_from("χ"))break;return;case 23:if(s.slice_from("ψ"))break;return;case 24:if(s.slice_from("ω"))break;return;case 25:if(s.cursor<=s.limit_backward)break r;s.cursor--}continue}s.cursor=s.limit-i;break}})(),s.cursor=s.limit-r,!(3<=s.current.length))return!1;J=!0;var r=s.limit-s.cursor,r=((()=>{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(o))){switch(s.bra=s.cursor,r){case 1:if(s.slice_from("φα"))break;return;case 2:if(s.slice_from("σκα"))break;return;case 3:if(s.slice_from("ολο"))break;return;case 4:if(s.slice_from("σο"))break;return;case 5:if(s.slice_from("τατο"))break;return;case 6:if(s.slice_from("κρε"))break;return;case 7:if(s.slice_from("περ"))break;return;case 8:if(s.slice_from("τερ"))break;return;case 9:if(s.slice_from("φω"))break;return;case 10:if(s.slice_from("καθεστ"))break;return;case 11:if(s.slice_from("γεγον"))break;return}J=!1}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(a)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(u)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ιζ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(_)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(t)||s.cursor>s.limit_backward||s.slice_from("ων")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.eq_s_b("ισα")&&(s.bra=s.cursor,!(s.cursor>s.limit_backward))){if(s.slice_from("ισ"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}if(0!=s.find_among_b(m)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(l)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ισ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(b)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(f)||s.cursor>s.limit_backward||s.slice_from("ι")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(k)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(n)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ιστ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(w)&&(s.bra=s.cursor,s.slice_del())){J=!1;var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0==(r=s.find_among_b(d))||s.cursor>s.limit_backward){if(s.cursor=s.limit-i,s.ket=s.cursor,0==(r=s.find_among_b(g)))return;switch(s.bra=s.cursor,r){case 1:if(s.slice_from("αγνωστ"))break;return;case 2:if(s.slice_from("ατομ"))break;return;case 3:if(s.slice_from("γνωστ"))break;return;case 4:if(s.slice_from("εθν"))break;return;case 5:if(s.slice_from("εκλεκτ"))break;return;case 6:if(s.slice_from("σκεπτ"))break;return;case 7:if(s.slice_from("τοπ"))break;return;case 8:if(s.slice_from("αλεξανδρ"))break;return;case 9:if(s.slice_from("βυζαντ"))break;return;case 10:if(s.slice_from("θεατρ"))break}}else switch(r){case 1:if(s.slice_from("ισμ"))break;return;case 2:if(s.slice_from("ι"))break}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(h)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(v)||s.cursor>s.limit_backward||s.slice_from("αρακ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(p)&&(s.bra=s.cursor,s.slice_del())){J=!1;var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0==(r=s.find_among_b(q))||s.cursor>s.limit_backward){if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("κορ"))return;if(!s.slice_from("ιτσ"));}else switch(r){case 1:if(s.slice_from("ακ"))break;return;case 2:if(s.slice_from("ιτσ"))break}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(B)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(C)&&!(s.cursor>s.limit_backward)){if(s.slice_from("ιδ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(S))return;if(!s.slice_from("ιδ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(W)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(G)||s.cursor>s.limit_backward||s.slice_from("ισκ")),s.cursor=s.limit-r,s.limit-s.cursor),i=(s.ket=s.cursor,0!=s.find_among_b(j)&&(s.bra=s.cursor,s.slice_del())&&(i=s.limit-s.cursor,0==s.find_among_b(x))&&(s.cursor=s.limit-i,i=s.cursor,s.insert(s.cursor,s.cursor,"αδ"),s.cursor=i),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(y)&&(s.bra=s.cursor,s.slice_del())&&(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(z))&&s.slice_from("εδ"),s.cursor=s.limit-i,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(A)&&(s.bra=s.cursor,s.slice_del())&&(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(D))&&s.slice_from("ουδ"),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(E)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(F)||s.cursor>s.limit_backward||s.slice_from("ε")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(K)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(H,945,969))&&s.slice_from("ι"),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(L)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(H,945,969)){if(s.slice_from("ικ"))break r;return}s.cursor=s.limit-r,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(M)||s.cursor>s.limit_backward||s.slice_from("ικ")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,!s.eq_s_b("αγαμε")||(s.bra=s.cursor,s.cursor>s.limit_backward||s.slice_from("αγαμ"))){s.cursor=s.limit-r;r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(N)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("αμε")&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(O)||s.cursor>s.limit_backward||s.slice_from("αμ"))}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(Q)){if(s.bra=s.cursor,!s.slice_del())return;if(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(P)&&!(s.cursor>s.limit_backward||s.slice_from("αγαν")))return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("ανε")&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(I,945,969)){if(s.slice_from("αν"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(R)||s.cursor>s.limit_backward||s.slice_from("αν")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(T)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}if(s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("ετε")&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(I,945,969)){if(s.slice_from("ετ"))break r;return}if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(U)){if(s.slice_from("ετ"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(V)||s.cursor>s.limit_backward||s.slice_from("ετ")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(X)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.eq_s_b("αρχ")&&!(s.cursor>s.limit_backward)){if(s.slice_from("οντ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("κρε"))return;if(!s.slice_from("ωντ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(Y)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("ον")||s.cursor>s.limit_backward||s.slice_from("ομαστ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.eq_s_b("ιεστε")){if(s.bra=s.cursor,!s.slice_del())return;if(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(Z)&&!(s.cursor>s.limit_backward||s.slice_from("ιεστ")))return}s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("εστε")&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b($)||s.cursor>s.limit_backward||s.slice_from("ιεστ"))})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(r1)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}if(s.cursor=s.limit-r,s.ket=s.cursor,0!=s.find_among_b(s1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(i1)){if(s.slice_from("ηκ"))break r;return}if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(c1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("ηκ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(u1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(e1)){if(s.slice_from("ουσ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(o1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("ουσ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(l1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(m1)||s.cursor>s.limit_backward||s.slice_from("ησ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(_1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.eq_s_b("κολλ")){if(s.slice_from("αγ"))break r;return}s.cursor=s.limit-i;i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(a1)))switch(r){case 1:if(s.slice_from("αγ"))break;return}else{if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(t1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("αγ"))return}}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(f1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(b1)||s.cursor>s.limit_backward||s.slice_from("ηστ")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(n1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(k1)||s.cursor>s.limit_backward||s.slice_from("ουν")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(d1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(g1)||s.cursor>s.limit_backward||s.slice_from("ουμ")),s.cursor=s.limit-r,s.limit-s.cursor),c=(c=s.limit-s.cursor,s.ket=s.cursor,0!=s.find_among_b(w1)&&(s.bra=s.cursor,!s.slice_from("μα"))||(s.cursor=s.limit-c,J&&(s.ket=s.cursor,0!=s.find_among_b(v1))&&(s.bra=s.cursor,s.slice_del())),s.cursor=s.limit-r,s.limit-s.cursor);return s.ket=s.cursor,0!=s.find_among_b(h1)&&(s.bra=s.cursor,s.slice_del()),s.cursor=s.limit-c,s.cursor=s.limit_backward,!0},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/hindi-stemmer.js b/sphinx/search/minified-js/hindi-stemmer.js new file mode 100644 index 00000000000..850b0430cdd --- /dev/null +++ b/sphinx/search/minified-js/hindi-stemmer.js @@ -0,0 +1 @@ +var HindiStemmer=function(){var t=new BaseStemmer,r=[["आँ",-1,-1],["ाँ",-1,-1],["इयाँ",1,-1],["आइयाँ",2,-1],["ाइयाँ",2,-1],["ियाँ",1,-1],["आं",-1,-1],["उआं",6,-1],["ुआं",6,-1],["ईं",-1,-1],["आईं",9,-1],["ाईं",9,-1],["एं",-1,-1],["आएं",12,-1],["उएं",12,-1],["ाएं",12,-1],["ताएं",15,-1,e],["अताएं",16,-1],["नाएं",15,-1,e],["अनाएं",18,-1],["ुएं",12,-1],["ओं",-1,-1],["आओं",21,-1],["उओं",21,-1],["ाओं",21,-1],["ताओं",24,-1,e],["अताओं",25,-1],["नाओं",24,-1,e],["अनाओं",27,-1],["ुओं",21,-1],["ां",-1,-1],["इयां",30,-1],["आइयां",31,-1],["ाइयां",31,-1],["ियां",30,-1],["ीं",-1,-1],["तीं",35,-1,e],["अतीं",36,-1],["आतीं",36,-1],["ातीं",36,-1],["ें",-1,-1],["ों",-1,-1],["इयों",41,-1],["आइयों",42,-1],["ाइयों",42,-1],["ियों",41,-1],["अ",-1,-1],["आ",-1,-1],["इ",-1,-1],["ई",-1,-1],["आई",49,-1],["ाई",49,-1],["उ",-1,-1],["ऊ",-1,-1],["ए",-1,-1],["आए",54,-1],["इए",54,-1],["आइए",56,-1],["ाइए",56,-1],["ाए",54,-1],["िए",54,-1],["ओ",-1,-1],["आओ",61,-1],["ाओ",61,-1],["कर",-1,-1,e],["अकर",64,-1],["आकर",64,-1],["ाकर",64,-1],["ा",-1,-1],["ऊंगा",68,-1],["आऊंगा",69,-1],["ाऊंगा",69,-1],["ूंगा",68,-1],["एगा",68,-1],["आएगा",73,-1],["ाएगा",73,-1],["ेगा",68,-1],["ता",68,-1,e],["अता",77,-1],["आता",77,-1],["ाता",77,-1],["ना",68,-1,e],["अना",81,-1],["आना",81,-1],["ाना",81,-1],["आया",68,-1],["ाया",68,-1],["ि",-1,-1],["ी",-1,-1],["ऊंगी",88,-1],["आऊंगी",89,-1],["ाऊंगी",89,-1],["एंगी",88,-1],["आएंगी",92,-1],["ाएंगी",92,-1],["ूंगी",88,-1],["ेंगी",88,-1],["एगी",88,-1],["आएगी",97,-1],["ाएगी",97,-1],["ओगी",88,-1],["आओगी",100,-1],["ाओगी",100,-1],["ेगी",88,-1],["ोगी",88,-1],["ती",88,-1,e],["अती",105,-1],["आती",105,-1],["ाती",105,-1],["नी",88,-1,e],["अनी",109,-1],["ु",-1,-1],["ू",-1,-1],["े",-1,-1],["एंगे",113,-1],["आएंगे",114,-1],["ाएंगे",114,-1],["ेंगे",113,-1],["ओगे",113,-1],["आओगे",118,-1],["ाओगे",118,-1],["ोगे",113,-1],["ते",113,-1,e],["अते",122,-1],["आते",122,-1],["ाते",122,-1],["ने",113,-1,e],["अने",126,-1],["आने",126,-1],["ाने",126,-1],["ो",-1,-1],["्",-1,-1]],i=[255,255,255,255,159,0,0,0,248,7];function e(){return!!t.in_grouping_b(i,2325,2399)}this.stem=function(){return!(t.cursor>=t.limit||(t.cursor++,t.limit_backward=t.cursor,t.cursor=t.limit,t.ket=t.cursor,0==t.find_among_b(r))||(t.bra=t.cursor,!t.slice_del())||(t.cursor=t.limit_backward,0))},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/hungarian-stemmer.js b/sphinx/search/minified-js/hungarian-stemmer.js index e1fca971f79..a7f3926cacf 100644 --- a/sphinx/search/minified-js/hungarian-stemmer.js +++ b/sphinx/search/minified-js/hungarian-stemmer.js @@ -1 +1 @@ -HungarianStemmer=function(){var r=new BaseStemmer;var e=[["cs",-1,-1],["dzs",-1,-1],["gy",-1,-1],["ly",-1,-1],["ny",-1,-1],["sz",-1,-1],["ty",-1,-1],["zs",-1,-1]];var i=[["á",-1,1],["é",-1,2]];var a=[["bb",-1,-1],["cc",-1,-1],["dd",-1,-1],["ff",-1,-1],["gg",-1,-1],["jj",-1,-1],["kk",-1,-1],["ll",-1,-1],["mm",-1,-1],["nn",-1,-1],["pp",-1,-1],["rr",-1,-1],["ccs",-1,-1],["ss",-1,-1],["zzs",-1,-1],["tt",-1,-1],["vv",-1,-1],["ggy",-1,-1],["lly",-1,-1],["nny",-1,-1],["tty",-1,-1],["ssz",-1,-1],["zz",-1,-1]];var t=[["al",-1,1],["el",-1,1]];var s=[["ba",-1,-1],["ra",-1,-1],["be",-1,-1],["re",-1,-1],["ig",-1,-1],["nak",-1,-1],["nek",-1,-1],["val",-1,-1],["vel",-1,-1],["ul",-1,-1],["nál",-1,-1],["nél",-1,-1],["ból",-1,-1],["ról",-1,-1],["tól",-1,-1],["ül",-1,-1],["ből",-1,-1],["ről",-1,-1],["től",-1,-1],["n",-1,-1],["an",19,-1],["ban",20,-1],["en",19,-1],["ben",22,-1],["képpen",22,-1],["on",19,-1],["ön",19,-1],["képp",-1,-1],["kor",-1,-1],["t",-1,-1],["at",29,-1],["et",29,-1],["ként",29,-1],["anként",32,-1],["enként",32,-1],["onként",32,-1],["ot",29,-1],["ért",29,-1],["öt",29,-1],["hez",-1,-1],["hoz",-1,-1],["höz",-1,-1],["vá",-1,-1],["vé",-1,-1]];var u=[["án",-1,2],["én",-1,1],["ánként",-1,2]];var n=[["stul",-1,1],["astul",0,1],["ástul",0,2],["stül",-1,1],["estül",3,1],["éstül",3,3]];var f=[["á",-1,1],["é",-1,1]];var c=[["k",-1,3],["ak",0,3],["ek",0,3],["ok",0,3],["ák",0,1],["ék",0,2],["ök",0,3]];var l=[["éi",-1,1],["áéi",0,3],["ééi",0,2],["é",-1,1],["ké",3,1],["aké",4,1],["eké",4,1],["oké",4,1],["áké",4,3],["éké",4,2],["öké",4,1],["éé",3,2]];var o=[["a",-1,1],["ja",0,1],["d",-1,1],["ad",2,1],["ed",2,1],["od",2,1],["ád",2,2],["éd",2,3],["öd",2,1],["e",-1,1],["je",9,1],["nk",-1,1],["unk",11,1],["ánk",11,2],["énk",11,3],["ünk",11,1],["uk",-1,1],["juk",16,1],["ájuk",17,2],["ük",-1,1],["jük",19,1],["éjük",20,3],["m",-1,1],["am",22,1],["em",22,1],["om",22,1],["ám",22,2],["ém",22,3],["o",-1,1],["á",-1,2],["é",-1,3]];var k=[["id",-1,1],["aid",0,1],["jaid",1,1],["eid",0,1],["jeid",3,1],["áid",0,2],["éid",0,3],["i",-1,1],["ai",7,1],["jai",8,1],["ei",7,1],["jei",10,1],["ái",7,2],["éi",7,3],["itek",-1,1],["eitek",14,1],["jeitek",15,1],["éitek",14,3],["ik",-1,1],["aik",18,1],["jaik",19,1],["eik",18,1],["jeik",21,1],["áik",18,2],["éik",18,3],["ink",-1,1],["aink",25,1],["jaink",26,1],["eink",25,1],["jeink",28,1],["áink",25,2],["éink",25,3],["aitok",-1,1],["jaitok",32,1],["áitok",-1,2],["im",-1,1],["aim",35,1],["jaim",36,1],["eim",35,1],["jeim",38,1],["áim",35,2],["éim",35,3]];var m=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,36,10,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1];var b=0;function _(){b=r.limit;r:{var i=r.cursor;e:{if(!r.in_grouping(m,97,369)){break e}i:while(true){var a=r.cursor;a:{if(!r.out_grouping(m,97,369)){break a}r.cursor=a;break i}r.cursor=a;if(r.cursor>=r.limit){break e}r.cursor++}i:{var t=r.cursor;a:{if(r.find_among(e)==0){break a}break i}r.cursor=t;if(r.cursor>=r.limit){break e}r.cursor++}b=r.cursor;break r}r.cursor=i;if(!r.out_grouping(m,97,369)){return false}e:while(true){i:{if(!r.in_grouping(m,97,369)){break i}break e}if(r.cursor>=r.limit){return false}r.cursor++}b=r.cursor}return true}function d(){if(!(b<=r.cursor)){return false}return true}function v(){var e;r.ket=r.cursor;e=r.find_among_b(i);if(e==0){return false}r.bra=r.cursor;if(!d()){return false}switch(e){case 1:if(!r.slice_from("a")){return false}break;case 2:if(!r.slice_from("e")){return false}break}return true}function g(){var e=r.limit-r.cursor;if(r.find_among_b(a)==0){return false}r.cursor=r.limit-e;return true}function j(){if(r.cursor<=r.limit_backward){return false}r.cursor--;r.ket=r.cursor;{var e=r.cursor-1;if(e{_=e.limit;var r=e.cursor;if(e.in_grouping(f,97,369)){var i=e.cursor;e.go_in_grouping(f,97,369)&&(e.cursor++,_=e.cursor),e.cursor=i}else{if(e.cursor=r,!e.go_out_grouping(f,97,369))return;e.cursor++,_=e.cursor}})(),e.cursor=r,e.limit_backward=e.cursor,e.cursor=e.limit,e.limit-e.cursor),r=(e.ket=e.cursor,0!=e.find_among_b(c)&&(e.bra=e.cursor,b())&&d()&&e.slice_del()&&g(),e.cursor=e.limit-r,e.limit-e.cursor),r=(j(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(t))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_from("e"))break;return;case 2:if(e.slice_from("a"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(o))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=(e.ket=e.cursor,0!=e.find_among_b(n)&&(e.bra=e.cursor,b())&&d()&&e.slice_del()&&g(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(k))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_from("a"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(l))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(m))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor);return(()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(u))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_from("a"))break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_del())break}})(),e.cursor=e.limit-r,e.cursor=e.limit_backward,!0},this.stemWord=function(r){return e.setCurrent(r),this.stem(),e.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/indonesian-stemmer.js b/sphinx/search/minified-js/indonesian-stemmer.js new file mode 100644 index 00000000000..89339d9783d --- /dev/null +++ b/sphinx/search/minified-js/indonesian-stemmer.js @@ -0,0 +1 @@ +var IndonesianStemmer=function(){var s=new BaseStemmer,c=[["kah",-1,1],["lah",-1,1],["pun",-1,1]],o=[["nya",-1,1],["ku",-1,1],["mu",-1,1]],r=[["i",-1,1,function(){if(2{var r;if(s.bra=s.cursor,0!=(r=s.find_among(n))){switch(s.ket=s.cursor,r){case 1:if(!s.slice_del())return;a=1,--l;break;case 2:if(!s.slice_del())return;a=3,--l;break;case 3:if(a=1,!s.slice_from("s"))return;--l;break;case 4:if(a=3,!s.slice_from("s"))return;--l;break;case 5:a=1,--l;r:{var e=s.cursor,i=s.cursor;if(s.in_grouping(t,97,117)){if(s.cursor=i,s.slice_from("p"))break r;return}if(s.cursor=e,!s.slice_del())return}break;case 6:a=3,--l;r:{var u=s.cursor,c=s.cursor;if(s.in_grouping(t,97,117)){if(s.cursor=c,s.slice_from("p"))break r;return}if(s.cursor=u,!s.slice_del())return}}return 1}})()?(u=s.cursor,i=s.cursor,l<=2||(s.limit_backward=s.cursor,s.cursor=s.limit,f()&&(s.cursor=s.limit_backward,s.cursor=i,l<=2||m())),s.cursor=u,s.cursor=e):(s.cursor=r,i=s.cursor,m(),s.cursor=i,u=s.cursor,l<=2||(s.limit_backward=s.cursor,s.cursor=s.limit,f()&&(s.cursor=s.limit_backward)),s.cursor=u),0))},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/irish-stemmer.js b/sphinx/search/minified-js/irish-stemmer.js new file mode 100644 index 00000000000..c90c92292cc --- /dev/null +++ b/sphinx/search/minified-js/irish-stemmer.js @@ -0,0 +1 @@ +var IrishStemmer=function(){var i=new BaseStemmer,e=[["b'",-1,1],["bh",-1,4],["bhf",1,2],["bp",-1,8],["ch",-1,5],["d'",-1,1],["d'fh",5,2],["dh",-1,6],["dt",-1,9],["fh",-1,2],["gc",-1,5],["gh",-1,7],["h-",-1,1],["m'",-1,1],["mb",-1,4],["mh",-1,10],["n-",-1,1],["nd",-1,6],["ng",-1,7],["ph",-1,8],["sh",-1,3],["t-",-1,1],["th",-1,9],["ts",-1,3]],a=[["íochta",-1,1],["aíochta",0,1],["ire",-1,2],["aire",2,2],["abh",-1,1],["eabh",4,1],["ibh",-1,1],["aibh",6,1],["amh",-1,1],["eamh",8,1],["imh",-1,1],["aimh",10,1],["íocht",-1,1],["aíocht",12,1],["irí",-1,2],["airí",14,2]],c=[["óideacha",-1,6],["patacha",-1,5],["achta",-1,1],["arcachta",2,2],["eachta",2,1],["grafaíochta",-1,4],["paite",-1,5],["ach",-1,1],["each",7,1],["óideach",8,6],["gineach",8,3],["patach",7,5],["grafaíoch",-1,4],["pataigh",-1,5],["óidigh",-1,6],["achtúil",-1,1],["eachtúil",15,1],["gineas",-1,3],["ginis",-1,3],["acht",-1,1],["arcacht",19,2],["eacht",19,1],["grafaíocht",-1,4],["arcachtaí",-1,2],["grafaíochtaí",-1,4]],t=[["imid",-1,1],["aimid",0,1],["ímid",-1,1],["aímid",2,1],["adh",-1,2],["eadh",4,2],["faidh",-1,1],["fidh",-1,1],["áil",-1,2],["ain",-1,2],["tear",-1,2],["tar",-1,2]],s=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,2],o=0,u=0,f=0;function n(){return u<=i.cursor}function h(){return o<=i.cursor}function m(){var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(t)))switch(i.bra=i.cursor,r){case 1:if(!(f<=i.cursor))return;if(i.slice_del())break;return;case 2:if(!n())return;if(i.slice_del())break;return}}this.stem=function(){var r=i.cursor,r=((()=>{var r;if(i.bra=i.cursor,0!=(r=i.find_among(e)))switch(i.ket=i.cursor,r){case 1:if(i.slice_del())break;return;case 2:if(i.slice_from("f"))break;return;case 3:if(i.slice_from("s"))break;return;case 4:if(i.slice_from("b"))break;return;case 5:if(i.slice_from("c"))break;return;case 6:if(i.slice_from("d"))break;return;case 7:if(i.slice_from("g"))break;return;case 8:if(i.slice_from("p"))break;return;case 9:if(i.slice_from("t"))break;return;case 10:if(i.slice_from("m"))break}})(),i.cursor=r,f=i.limit,u=i.limit,o=i.limit,r=i.cursor,i.go_out_grouping(s,97,250)&&(i.cursor++,f=i.cursor,i.go_in_grouping(s,97,250))&&(i.cursor++,u=i.cursor,i.go_out_grouping(s,97,250))&&(i.cursor++,i.go_in_grouping(s,97,250))&&(i.cursor++,o=i.cursor),i.cursor=r,i.limit_backward=i.cursor,i.cursor=i.limit,i.limit-i.cursor),r=((()=>{var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(a)))switch(i.bra=i.cursor,r){case 1:if(!n())return;if(i.slice_del())break;return;case 2:if(!h())return;if(i.slice_del())break}})(),i.cursor=i.limit-r,i.limit-i.cursor),r=((()=>{var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(c)))switch(i.bra=i.cursor,r){case 1:if(!h())return;if(i.slice_del())break;return;case 2:if(i.slice_from("arc"))break;return;case 3:if(i.slice_from("gin"))break;return;case 4:if(i.slice_from("graf"))break;return;case 5:if(i.slice_from("paite"))break;return;case 6:if(i.slice_from("óid"))break}})(),i.cursor=i.limit-r,i.limit-i.cursor);return m(),i.cursor=i.limit-r,i.cursor=i.limit_backward,!0},this.stemWord=function(r){return i.setCurrent(r),this.stem(),i.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/italian-stemmer.js b/sphinx/search/minified-js/italian-stemmer.js index a3a5c4265e4..ac46b1d415e 100644 --- a/sphinx/search/minified-js/italian-stemmer.js +++ b/sphinx/search/minified-js/italian-stemmer.js @@ -1 +1 @@ -ItalianStemmer=function(){var r=new BaseStemmer;var e=[["",-1,7],["qu",0,6],["á",0,1],["é",0,2],["í",0,3],["ó",0,4],["ú",0,5]];var i=[["",-1,3],["I",0,1],["U",0,2]];var a=[["la",-1,-1],["cela",0,-1],["gliela",0,-1],["mela",0,-1],["tela",0,-1],["vela",0,-1],["le",-1,-1],["cele",6,-1],["gliele",6,-1],["mele",6,-1],["tele",6,-1],["vele",6,-1],["ne",-1,-1],["cene",12,-1],["gliene",12,-1],["mene",12,-1],["sene",12,-1],["tene",12,-1],["vene",12,-1],["ci",-1,-1],["li",-1,-1],["celi",20,-1],["glieli",20,-1],["meli",20,-1],["teli",20,-1],["veli",20,-1],["gli",20,-1],["mi",-1,-1],["si",-1,-1],["ti",-1,-1],["vi",-1,-1],["lo",-1,-1],["celo",31,-1],["glielo",31,-1],["melo",31,-1],["telo",31,-1],["velo",31,-1]];var s=[["ando",-1,1],["endo",-1,1],["ar",-1,2],["er",-1,2],["ir",-1,2]];var o=[["ic",-1,-1],["abil",-1,-1],["os",-1,-1],["iv",-1,1]];var u=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var t=[["ica",-1,1],["logia",-1,3],["osa",-1,1],["ista",-1,1],["iva",-1,9],["anza",-1,1],["enza",-1,5],["ice",-1,1],["atrice",7,1],["iche",-1,1],["logie",-1,3],["abile",-1,1],["ibile",-1,1],["usione",-1,4],["azione",-1,2],["uzione",-1,4],["atore",-1,2],["ose",-1,1],["ante",-1,1],["mente",-1,1],["amente",19,7],["iste",-1,1],["ive",-1,9],["anze",-1,1],["enze",-1,5],["ici",-1,1],["atrici",25,1],["ichi",-1,1],["abili",-1,1],["ibili",-1,1],["ismi",-1,1],["usioni",-1,4],["azioni",-1,2],["uzioni",-1,4],["atori",-1,2],["osi",-1,1],["anti",-1,1],["amenti",-1,6],["imenti",-1,6],["isti",-1,1],["ivi",-1,9],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amento",-1,6],["imento",-1,6],["ivo",-1,9],["ità",-1,8],["istà",-1,1],["istè",-1,1],["istì",-1,1]];var c=[["isca",-1,1],["enda",-1,1],["ata",-1,1],["ita",-1,1],["uta",-1,1],["ava",-1,1],["eva",-1,1],["iva",-1,1],["erebbe",-1,1],["irebbe",-1,1],["isce",-1,1],["ende",-1,1],["are",-1,1],["ere",-1,1],["ire",-1,1],["asse",-1,1],["ate",-1,1],["avate",16,1],["evate",16,1],["ivate",16,1],["ete",-1,1],["erete",20,1],["irete",20,1],["ite",-1,1],["ereste",-1,1],["ireste",-1,1],["ute",-1,1],["erai",-1,1],["irai",-1,1],["isci",-1,1],["endi",-1,1],["erei",-1,1],["irei",-1,1],["assi",-1,1],["ati",-1,1],["iti",-1,1],["eresti",-1,1],["iresti",-1,1],["uti",-1,1],["avi",-1,1],["evi",-1,1],["ivi",-1,1],["isco",-1,1],["ando",-1,1],["endo",-1,1],["Yamo",-1,1],["iamo",-1,1],["avamo",-1,1],["evamo",-1,1],["ivamo",-1,1],["eremo",-1,1],["iremo",-1,1],["assimo",-1,1],["ammo",-1,1],["emmo",-1,1],["eremmo",54,1],["iremmo",54,1],["immo",-1,1],["ano",-1,1],["iscano",58,1],["avano",58,1],["evano",58,1],["ivano",58,1],["eranno",-1,1],["iranno",-1,1],["ono",-1,1],["iscono",65,1],["arono",65,1],["erono",65,1],["irono",65,1],["erebbero",-1,1],["irebbero",-1,1],["assero",-1,1],["essero",-1,1],["issero",-1,1],["ato",-1,1],["ito",-1,1],["uto",-1,1],["avo",-1,1],["evo",-1,1],["ivo",-1,1],["ar",-1,1],["ir",-1,1],["erà",-1,1],["irà",-1,1],["erò",-1,1],["irò",-1,1]];var l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1];var n=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2];var f=[17];var b=0;var m=0;var k=0;function _(){var i;var a=r.cursor;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("à")){return false}break;case 2:if(!r.slice_from("è")){return false}break;case 3:if(!r.slice_from("ì")){return false}break;case 4:if(!r.slice_from("ò")){return false}break;case 5:if(!r.slice_from("ù")){return false}break;case 6:if(!r.slice_from("qU")){return false}break;case 7:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}r.cursor=a;while(true){var o=r.cursor;r:{e:while(true){var u=r.cursor;i:{if(!r.in_grouping(l,97,249)){break i}r.bra=r.cursor;a:{var t=r.cursor;s:{if(!r.eq_s("u")){break s}r.ket=r.cursor;if(!r.in_grouping(l,97,249)){break s}if(!r.slice_from("U")){return false}break a}r.cursor=t;if(!r.eq_s("i")){break i}r.ket=r.cursor;if(!r.in_grouping(l,97,249)){break i}if(!r.slice_from("I")){return false}}r.cursor=u;break e}r.cursor=u;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=o;break}return true}function v(){k=r.limit;m=r.limit;b=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(l,97,249)){break i}a:{var a=r.cursor;s:{if(!r.out_grouping(l,97,249)){break s}o:while(true){u:{if(!r.in_grouping(l,97,249)){break u}break o}if(r.cursor>=r.limit){break s}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(l,97,249)){break i}s:while(true){o:{if(!r.out_grouping(l,97,249)){break o}break s}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(l,97,249)){break r}i:{var s=r.cursor;a:{if(!r.out_grouping(l,97,249)){break a}s:while(true){o:{if(!r.in_grouping(l,97,249)){break o}break s}if(r.cursor>=r.limit){break a}r.cursor++}break i}r.cursor=s;if(!r.in_grouping(l,97,249)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}k=r.cursor}r.cursor=e;var o=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=o;return true}function g(){var e;while(true){var a=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function d(){if(!(k<=r.cursor)){return false}return true}function w(){if(!(m<=r.cursor)){return false}return true}function h(){if(!(b<=r.cursor)){return false}return true}function p(){var e;r.ket=r.cursor;if(r.find_among_b(a)==0){return false}r.bra=r.cursor;e=r.find_among_b(s);if(e==0){return false}if(!d()){return false}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_from("e")){return false}break}return true}function q(){var e;r.ket=r.cursor;e=r.find_among_b(t);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!h()){return false}if(!r.slice_del()){return false}break;case 2:if(!h()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}}break;case 3:if(!h()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!h()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!h()){return false}if(!r.slice_from("ente")){return false}break;case 6:if(!d()){return false}if(!r.slice_del()){return false}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var a=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(o);if(e==0){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}break}}break;case 8:if(!h()){return false}if(!r.slice_del()){return false}var s=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-s;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-s;break r}if(!r.slice_del()){return false}}break;case 9:if(!h()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break}return true}function z(){if(r.cursor{for(var r,i=u.cursor;;){var e=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(a),u.ket=u.cursor,r){case 1:if(u.slice_from("à"))break;return;case 2:if(u.slice_from("è"))break;return;case 3:if(u.slice_from("ì"))break;return;case 4:if(u.slice_from("ò"))break;return;case 5:if(u.slice_from("ù"))break;return;case 6:if(u.slice_from("qU"))break;return;case 7:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=e;break}for(u.cursor=i;;){var o=u.cursor;r:{for(;;){var s=u.cursor;i:if(u.in_grouping(m,97,249)){u.bra=u.cursor;e:{var c=u.cursor;if(u.eq_s("u")&&(u.ket=u.cursor,u.in_grouping(m,97,249))){if(u.slice_from("U"))break e;return}if(u.cursor=c,!u.eq_s("i"))break i;if(u.ket=u.cursor,!u.in_grouping(m,97,249))break i;if(!u.slice_from("I"))return}u.cursor=s;break}if(u.cursor=s,u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=o;break}})(),u.cursor=r,v=u.limit,k=u.limit,g=u.limit,u.cursor);r:{i:{var i=u.cursor;e:if(u.in_grouping(m,97,249)){var e=u.cursor;if(!u.out_grouping(m,97,249)||!u.go_out_grouping(m,97,249)){if(u.cursor=e,!u.in_grouping(m,97,249))break e;if(!u.go_in_grouping(m,97,249))break e}u.cursor++;break i}if(u.cursor=i,!u.eq_s("divan")){if(u.cursor=i,!u.out_grouping(m,97,249))break r;e=u.cursor;if(!u.out_grouping(m,97,249)||!u.go_out_grouping(m,97,249)){if(u.cursor=e,!u.in_grouping(m,97,249))break r;if(u.cursor>=u.limit)break r}u.cursor++}}v=u.cursor}u.cursor=r,r=u.cursor,u.go_out_grouping(m,97,249)&&(u.cursor++,u.go_in_grouping(m,97,249))&&(u.cursor++,k=u.cursor,u.go_out_grouping(m,97,249))&&(u.cursor++,u.go_in_grouping(m,97,249))&&(u.cursor++,g=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit;var r=u.limit-u.cursor,r=((()=>{var r;if(u.ket=u.cursor,0!=u.find_among_b(c)&&(u.bra=u.cursor,0!=(r=u.find_among_b(t)))&&d())switch(r){case 1:if(u.slice_del())break;return;case 2:if(u.slice_from("e"))break}})(),u.cursor=u.limit-r,u.limit-u.cursor),o=u.limit-u.cursor,o=(w()||(u.cursor=u.limit-o,(()=>{if(!(u.cursor{var r=u.limit-u.cursor;if(u.ket=u.cursor,u.in_grouping_b(_,97,242))if(u.bra=u.cursor,d()){if(!u.slice_del())return;if(u.ket=u.cursor,u.eq_s_b("i"))if(u.bra=u.cursor,d()){if(!u.slice_del())return}else u.cursor=u.limit-r;else u.cursor=u.limit-r}else u.cursor=u.limit-r;else u.cursor=u.limit-r;if(r=u.limit-u.cursor,u.ket=u.cursor,u.eq_s_b("h"))if(u.bra=u.cursor,u.in_grouping_b(b,99,103))if(d()){if(!u.slice_del());}else u.cursor=u.limit-r;else u.cursor=u.limit-r;else u.cursor=u.limit-r})(),u.cursor=u.limit-o,u.cursor=u.limit_backward,u.cursor);return(()=>{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(s),u.ket=u.cursor,r){case 1:if(u.slice_from("i"))break;return;case 2:if(u.slice_from("u"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/lithuanian-stemmer.js b/sphinx/search/minified-js/lithuanian-stemmer.js new file mode 100644 index 00000000000..6d48ddac94c --- /dev/null +++ b/sphinx/search/minified-js/lithuanian-stemmer.js @@ -0,0 +1 @@ +var LithuanianStemmer=function(){var e=new BaseStemmer,t=[["a",-1,-1],["ia",0,-1],["eria",1,-1],["osna",0,-1],["iosna",3,-1],["uosna",3,-1],["iuosna",5,-1],["ysna",0,-1],["ėsna",0,-1],["e",-1,-1],["ie",9,-1],["enie",10,-1],["erie",10,-1],["oje",9,-1],["ioje",13,-1],["uje",9,-1],["iuje",15,-1],["yje",9,-1],["enyje",17,-1],["eryje",17,-1],["ėje",9,-1],["ame",9,-1],["iame",21,-1],["sime",9,-1],["ome",9,-1],["ėme",9,-1],["tumėme",25,-1],["ose",9,-1],["iose",27,-1],["uose",27,-1],["iuose",29,-1],["yse",9,-1],["enyse",31,-1],["eryse",31,-1],["ėse",9,-1],["ate",9,-1],["iate",35,-1],["ite",9,-1],["kite",37,-1],["site",37,-1],["ote",9,-1],["tute",9,-1],["ėte",9,-1],["tumėte",42,-1],["i",-1,-1],["ai",44,-1],["iai",45,-1],["eriai",46,-1],["ei",44,-1],["tumei",48,-1],["ki",44,-1],["imi",44,-1],["erimi",51,-1],["umi",44,-1],["iumi",53,-1],["si",44,-1],["asi",55,-1],["iasi",56,-1],["esi",55,-1],["iesi",58,-1],["siesi",59,-1],["isi",55,-1],["aisi",61,-1],["eisi",61,-1],["tumeisi",63,-1],["uisi",61,-1],["osi",55,-1],["ėjosi",66,-1],["uosi",66,-1],["iuosi",68,-1],["siuosi",69,-1],["usi",55,-1],["ausi",71,-1],["čiausi",72,-1],["ąsi",55,-1],["ėsi",55,-1],["ųsi",55,-1],["tųsi",76,-1],["ti",44,-1],["enti",78,-1],["inti",78,-1],["oti",78,-1],["ioti",81,-1],["uoti",81,-1],["iuoti",83,-1],["auti",78,-1],["iauti",85,-1],["yti",78,-1],["ėti",78,-1],["telėti",88,-1],["inėti",88,-1],["terėti",88,-1],["ui",44,-1],["iui",92,-1],["eniui",93,-1],["oj",-1,-1],["ėj",-1,-1],["k",-1,-1],["am",-1,-1],["iam",98,-1],["iem",-1,-1],["im",-1,-1],["sim",101,-1],["om",-1,-1],["tum",-1,-1],["ėm",-1,-1],["tumėm",105,-1],["an",-1,-1],["on",-1,-1],["ion",108,-1],["un",-1,-1],["iun",110,-1],["ėn",-1,-1],["o",-1,-1],["io",113,-1],["enio",114,-1],["ėjo",113,-1],["uo",113,-1],["s",-1,-1],["as",118,-1],["ias",119,-1],["es",118,-1],["ies",121,-1],["is",118,-1],["ais",123,-1],["iais",124,-1],["tumeis",123,-1],["imis",123,-1],["enimis",127,-1],["omis",123,-1],["iomis",129,-1],["umis",123,-1],["ėmis",123,-1],["enis",123,-1],["asis",123,-1],["ysis",123,-1],["ams",118,-1],["iams",136,-1],["iems",118,-1],["ims",118,-1],["enims",139,-1],["erims",139,-1],["oms",118,-1],["ioms",142,-1],["ums",118,-1],["ėms",118,-1],["ens",118,-1],["os",118,-1],["ios",147,-1],["uos",147,-1],["iuos",149,-1],["ers",118,-1],["us",118,-1],["aus",152,-1],["iaus",153,-1],["ius",152,-1],["ys",118,-1],["enys",156,-1],["erys",156,-1],["ąs",118,-1],["iąs",159,-1],["ės",118,-1],["amės",161,-1],["iamės",162,-1],["imės",161,-1],["kimės",164,-1],["simės",164,-1],["omės",161,-1],["ėmės",161,-1],["tumėmės",168,-1],["atės",161,-1],["iatės",170,-1],["sitės",161,-1],["otės",161,-1],["ėtės",161,-1],["tumėtės",174,-1],["įs",118,-1],["ūs",118,-1],["tųs",118,-1],["at",-1,-1],["iat",179,-1],["it",-1,-1],["sit",181,-1],["ot",-1,-1],["ėt",-1,-1],["tumėt",184,-1],["u",-1,-1],["au",186,-1],["iau",187,-1],["čiau",188,-1],["iu",186,-1],["eniu",190,-1],["siu",190,-1],["y",-1,-1],["ą",-1,-1],["ią",194,-1],["ė",-1,-1],["ę",-1,-1],["į",-1,-1],["enį",198,-1],["erį",198,-1],["ų",-1,-1],["ių",201,-1],["erų",201,-1]],a=[["ing",-1,-1],["aj",-1,-1],["iaj",1,-1],["iej",-1,-1],["oj",-1,-1],["ioj",4,-1],["uoj",4,-1],["iuoj",6,-1],["auj",-1,-1],["ąj",-1,-1],["iąj",9,-1],["ėj",-1,-1],["ųj",-1,-1],["iųj",12,-1],["ok",-1,-1],["iok",14,-1],["iuk",-1,-1],["uliuk",16,-1],["učiuk",16,-1],["išk",-1,-1],["iul",-1,-1],["yl",-1,-1],["ėl",-1,-1],["am",-1,-1],["dam",23,-1],["jam",23,-1],["zgan",-1,-1],["ain",-1,-1],["esn",-1,-1],["op",-1,-1],["iop",29,-1],["ias",-1,-1],["ies",-1,-1],["ais",-1,-1],["iais",33,-1],["os",-1,-1],["ios",35,-1],["uos",35,-1],["iuos",37,-1],["aus",-1,-1],["iaus",39,-1],["ąs",-1,-1],["iąs",41,-1],["ęs",-1,-1],["utėait",-1,-1],["ant",-1,-1],["iant",45,-1],["siant",46,-1],["int",-1,-1],["ot",-1,-1],["uot",49,-1],["iuot",50,-1],["yt",-1,-1],["ėt",-1,-1],["ykšt",-1,-1],["iau",-1,-1],["dav",-1,-1],["sv",-1,-1],["šv",-1,-1],["ykšč",-1,-1],["ę",-1,-1],["ėję",60,-1]],u=[["ojime",-1,7],["ėjime",-1,3],["avime",-1,6],["okate",-1,8],["aite",-1,1],["uote",-1,2],["asius",-1,5],["okatės",-1,8],["aitės",-1,1],["uotės",-1,2],["esiu",-1,4]],s=[["č",-1,1],["dž",-1,2]],o=[["gd",-1,1]],m=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,64,1,0,64,0,0,0,0,0,0,0,4,4],c=0;function n(){var i;if(e.ket=e.cursor,0!=(i=e.find_among_b(s)))switch(e.bra=e.cursor,i){case 1:if(e.slice_from("t"))break;return;case 2:if(e.slice_from("d"))break;return}}this.stem=function(){c=e.limit;var i=e.cursor,s=e.cursor,r=e.cursor,r=(e.eq_s("a")?(e.cursor=r,e.current.length<=6||e.cursor>=e.limit?e.cursor=s:e.cursor++):e.cursor=s,e.go_out_grouping(m,97,371)&&(e.cursor++,e.go_in_grouping(m,97,371))&&(e.cursor++,c=e.cursor),e.cursor=i,e.limit_backward=e.cursor,e.cursor=e.limit,e.limit-e.cursor),s=((()=>{var i;if(e.ket=e.cursor,0!=(i=e.find_among_b(u)))switch(e.bra=e.cursor,i){case 1:if(e.slice_from("aitė"))break;return;case 2:if(e.slice_from("uotė"))break;return;case 3:if(e.slice_from("ėjimas"))break;return;case 4:if(e.slice_from("esys"))break;return;case 5:if(e.slice_from("asys"))break;return;case 6:if(e.slice_from("avimas"))break;return;case 7:if(e.slice_from("ojimas"))break;return;case 8:if(e.slice_from("okatė"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=(e.cursor{for(;;){var i=e.limit-e.cursor;if(!(e.cursor{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(t)))switch(s.bra=s.cursor,r){case 1:if(s.slice_del())break;return;case 2:var i=s.limit-s.cursor;if(s.eq_s_b("ए")||(s.cursor=s.limit-i,s.eq_s_b("े"))||(s.cursor=s.limit-i,s.slice_del()))break}})(),s.cursor=s.limit-r;;){var i=s.limit-s.cursor,e=s.limit-s.cursor;if((()=>{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(c)))switch(s.bra=s.cursor,r){case 1:var i=s.limit-s.cursor;if(!s.eq_s_b("यौ")&&(s.cursor=s.limit-i,!s.eq_s_b("छौ")&&(s.cursor=s.limit-i,!s.eq_s_b("नौ"))&&(s.cursor=s.limit-i,!s.eq_s_b("थे"))))return;if(s.slice_del())break;return;case 2:if(!s.eq_s_b("त्र"))return;if(s.slice_del())break}})(),s.cursor=s.limit-e,s.ket=s.cursor,0==s.find_among_b(u)||(s.bra=s.cursor,!s.slice_del())){s.cursor=s.limit-i;break}}return s.cursor=s.limit_backward,!0},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/norwegian-stemmer.js b/sphinx/search/minified-js/norwegian-stemmer.js index c8ec76cc1ca..5cf580e3ed8 100644 --- a/sphinx/search/minified-js/norwegian-stemmer.js +++ b/sphinx/search/minified-js/norwegian-stemmer.js @@ -1 +1 @@ -NorwegianStemmer=function(){var r=new BaseStemmer;var e=[["a",-1,1],["e",-1,1],["ede",1,1],["ande",1,1],["ende",1,1],["ane",1,1],["ene",1,1],["hetene",6,1],["erte",1,3],["en",-1,1],["heten",9,1],["ar",-1,1],["er",-1,1],["heter",12,1],["s",-1,2],["as",14,1],["es",14,1],["edes",16,1],["endes",16,1],["enes",16,1],["hetenes",19,1],["ens",14,1],["hetens",21,1],["ers",14,1],["ets",14,1],["et",-1,1],["het",25,1],["ert",-1,3],["ast",-1,1]];var i=[["dt",-1,-1],["vt",-1,-1]];var t=[["leg",-1,1],["eleg",0,1],["ig",-1,1],["eig",2,1],["lig",2,1],["elig",4,1],["els",-1,1],["lov",-1,1],["elov",7,1],["slov",7,1],["hetslov",9,1]];var a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128];var s=[119,125,149,1];var u=0;var c=0;function l(){c=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}u=r.cursor;r.cursor=e;r:while(true){var t=r.cursor;e:{if(!r.in_grouping(a,97,248)){break e}r.cursor=t;break r}r.cursor=t;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(a,97,248)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}c=r.cursor;r:{if(!(ct.limit||(t.cursor=i,m=t.cursor,t.cursor=e,t.go_out_grouping(u,97,248)&&(t.cursor++,t.go_in_grouping(u,97,248))&&(t.cursor++,n=t.cursor,m<=n||(n=m))),t.cursor=r,t.limit_backward=t.cursor,t.cursor=t.limit,t.limit-t.cursor),e=((()=>{var r;if(!(t.cursor=r.limit){break e}r.cursor++}if(!r.slice_from("Y")){return false}l=true;continue}r.cursor=s;break}}r.cursor=i;n=r.limit;o=r.limit;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor;e:while(true){i:{if(!r.in_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}o=r.cursor}r.cursor=u;r.limit_backward=r.cursor;r.cursor=r.limit;var t=r.limit-r.cursor;k();r.cursor=r.limit-t;var f=r.limit-r.cursor;v();r.cursor=r.limit-f;var b=r.limit-r.cursor;g();r.cursor=r.limit-b;var m=r.limit-r.cursor;d();r.cursor=r.limit-m;var _=r.limit-r.cursor;w();r.cursor=r.limit-_;var z=r.limit-r.cursor;h();r.cursor=r.limit-z;var y=r.limit-r.cursor;p();r.cursor=r.limit-y;var Y=r.limit-r.cursor;q();r.cursor=r.limit-Y;r.cursor=r.limit_backward;var C=r.cursor;r:{if(!l){break r}while(true){var S=r.cursor;e:{i:while(true){var B=r.cursor;s:{r.bra=r.cursor;if(!r.eq_s("Y")){break s}r.ket=r.cursor;r.cursor=B;break i}r.cursor=B;if(r.cursor>=r.limit){break e}r.cursor++}if(!r.slice_from("y")){return false}continue}r.cursor=S;break}}r.cursor=C;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}}; \ No newline at end of file +var PorterStemmer=function(){var u=new BaseStemmer,t=[["s",-1,3],["ies",0,2],["sses",0,1],["ss",0,-1]],a=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],n=[["ed",-1,2],["eed",0,1],["ing",-1,2]],l=[["anci",-1,3],["enci",-1,2],["abli",-1,4],["eli",-1,6],["alli",-1,9],["ousli",-1,11],["entli",-1,5],["aliti",-1,9],["biliti",-1,13],["iviti",-1,12],["tional",-1,1],["ational",10,8],["alism",-1,9],["ation",-1,8],["ization",13,7],["izer",-1,7],["ator",-1,8],["iveness",-1,12],["fulness",-1,10],["ousness",-1,11]],f=[["icate",-1,2],["ative",-1,3],["alize",-1,1],["iciti",-1,2],["ical",-1,2],["ful",-1,3],["ness",-1,3]],_=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1],["ou",-1,1]],m=[17,65,16,1],r=[1,17,65,208,1],b=!1,k=0,g=0;function d(){return u.out_grouping_b(r,89,121)&&u.in_grouping_b(m,97,121)&&!!u.out_grouping_b(m,97,121)}function v(){return g<=u.cursor}function p(){return k<=u.cursor}this.stem=function(){b=!1;var r=u.cursor;if(u.bra=u.cursor,u.eq_s("y")){if(u.ket=u.cursor,!u.slice_from("Y"))return!1;b=!0}u.cursor=r;for(r=u.cursor;;){var i=u.cursor;r:{for(;;){var e=u.cursor;if(u.in_grouping(m,97,121)&&(u.bra=u.cursor,u.eq_s("y"))){u.ket=u.cursor,u.cursor=e;break}if(u.cursor=e,u.cursor>=u.limit)break r;u.cursor++}if(!u.slice_from("Y"))return!1;b=!0;continue}u.cursor=i;break}u.cursor=r,g=u.limit,k=u.limit;var r=u.cursor,r=(u.go_out_grouping(m,97,121)&&(u.cursor++,u.go_in_grouping(m,97,121))&&(u.cursor++,g=u.cursor,u.go_out_grouping(m,97,121))&&(u.cursor++,u.go_in_grouping(m,97,121))&&(u.cursor++,k=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(t)))switch(u.bra=u.cursor,r){case 1:if(u.slice_from("ss"))break;return;case 2:if(u.slice_from("i"))break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{if(u.ket=u.cursor,0!=(i=u.find_among_b(n)))switch(u.bra=u.cursor,i){case 1:if(!v())return;if(u.slice_from("ee"))break;return;case 2:var r=u.limit-u.cursor;if(!u.go_out_grouping_b(m,97,121))return;if(u.cursor--,u.cursor=u.limit-r,!u.slice_del())return;var r=u.limit-u.cursor,i=u.find_among_b(a);switch(u.cursor=u.limit-r,i){case 1:var e=u.cursor;u.insert(u.cursor,u.cursor,"e"),u.cursor=e;break;case 2:if(u.ket=u.cursor,u.cursor<=u.limit_backward)return;if(u.cursor--,u.bra=u.cursor,u.slice_del())break;return;case 3:if(u.cursor!=g)return;e=u.limit-u.cursor;if(!d())return;u.cursor=u.limit-e;e=u.cursor;u.insert(u.cursor,u.cursor,"e"),u.cursor=e}}})(),u.cursor=u.limit-r,u.limit-u.cursor),s=(u.ket=u.cursor,s=u.limit-u.cursor,(u.eq_s_b("y")||(u.cursor=u.limit-s,u.eq_s_b("Y")))&&(u.bra=u.cursor,u.go_out_grouping_b(m,97,121))&&(u.cursor--,u.slice_from("i")),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(l))&&(u.bra=u.cursor,v()))switch(r){case 1:if(u.slice_from("tion"))break;return;case 2:if(u.slice_from("ence"))break;return;case 3:if(u.slice_from("ance"))break;return;case 4:if(u.slice_from("able"))break;return;case 5:if(u.slice_from("ent"))break;return;case 6:if(u.slice_from("e"))break;return;case 7:if(u.slice_from("ize"))break;return;case 8:if(u.slice_from("ate"))break;return;case 9:if(u.slice_from("al"))break;return;case 10:if(u.slice_from("ful"))break;return;case 11:if(u.slice_from("ous"))break;return;case 12:if(u.slice_from("ive"))break;return;case 13:if(u.slice_from("ble"))break}})(),u.cursor=u.limit-s,u.limit-u.cursor),s=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(f))&&(u.bra=u.cursor,v()))switch(r){case 1:if(u.slice_from("al"))break;return;case 2:if(u.slice_from("ic"))break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(_))&&(u.bra=u.cursor,p()))switch(r){case 1:if(u.slice_del())break;return;case 2:var i=u.limit-u.cursor;if(!u.eq_s_b("s")&&(u.cursor=u.limit-i,!u.eq_s_b("t")))return;if(u.slice_del())break}})(),u.cursor=u.limit-s,u.limit-u.cursor),s=((()=>{if(u.ket=u.cursor,u.eq_s_b("e")){if(u.bra=u.cursor,!p()){if(!v())return;var r=u.limit-u.cursor;if(d())return;u.cursor=u.limit-r}u.slice_del()}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=(u.ket=u.cursor,u.eq_s_b("l")&&(u.bra=u.cursor,p())&&u.eq_s_b("l")&&u.slice_del(),u.cursor=u.limit-s,u.cursor=u.limit_backward,u.cursor);if(b)for(;;){var c=u.cursor;r:{for(;;){var o=u.cursor;if(u.bra=u.cursor,u.eq_s("Y")){u.ket=u.cursor,u.cursor=o;break}if(u.cursor=o,u.cursor>=u.limit)break r;u.cursor++}if(u.slice_from("y"))continue;return!1}u.cursor=c;break}return u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/portuguese-stemmer.js b/sphinx/search/minified-js/portuguese-stemmer.js index 022d860e6b3..9cc42155d49 100644 --- a/sphinx/search/minified-js/portuguese-stemmer.js +++ b/sphinx/search/minified-js/portuguese-stemmer.js @@ -1 +1 @@ -PortugueseStemmer=function(){var r=new BaseStemmer;var e=[["",-1,3],["ã",0,1],["õ",0,2]];var i=[["",-1,3],["a~",0,1],["o~",0,2]];var s=[["ic",-1,-1],["ad",-1,-1],["os",-1,-1],["iv",-1,1]];var a=[["ante",-1,1],["avel",-1,1],["ível",-1,1]];var u=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var o=[["ica",-1,1],["ância",-1,1],["ência",-1,4],["logia",-1,2],["ira",-1,9],["adora",-1,1],["osa",-1,1],["ista",-1,1],["iva",-1,8],["eza",-1,1],["idade",-1,7],["ante",-1,1],["mente",-1,6],["amente",12,5],["ável",-1,1],["ível",-1,1],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amento",-1,1],["imento",-1,1],["ivo",-1,8],["aça~o",-1,1],["uça~o",-1,3],["ador",-1,1],["icas",-1,1],["ências",-1,4],["logias",-1,2],["iras",-1,9],["adoras",-1,1],["osas",-1,1],["istas",-1,1],["ivas",-1,8],["ezas",-1,1],["idades",-1,7],["adores",-1,1],["antes",-1,1],["aço~es",-1,1],["uço~es",-1,3],["icos",-1,1],["ismos",-1,1],["osos",-1,1],["amentos",-1,1],["imentos",-1,1],["ivos",-1,8]];var t=[["ada",-1,1],["ida",-1,1],["ia",-1,1],["aria",2,1],["eria",2,1],["iria",2,1],["ara",-1,1],["era",-1,1],["ira",-1,1],["ava",-1,1],["asse",-1,1],["esse",-1,1],["isse",-1,1],["aste",-1,1],["este",-1,1],["iste",-1,1],["ei",-1,1],["arei",16,1],["erei",16,1],["irei",16,1],["am",-1,1],["iam",20,1],["ariam",21,1],["eriam",21,1],["iriam",21,1],["aram",20,1],["eram",20,1],["iram",20,1],["avam",20,1],["em",-1,1],["arem",29,1],["erem",29,1],["irem",29,1],["assem",29,1],["essem",29,1],["issem",29,1],["ado",-1,1],["ido",-1,1],["ando",-1,1],["endo",-1,1],["indo",-1,1],["ara~o",-1,1],["era~o",-1,1],["ira~o",-1,1],["ar",-1,1],["er",-1,1],["ir",-1,1],["as",-1,1],["adas",47,1],["idas",47,1],["ias",47,1],["arias",50,1],["erias",50,1],["irias",50,1],["aras",47,1],["eras",47,1],["iras",47,1],["avas",47,1],["es",-1,1],["ardes",58,1],["erdes",58,1],["irdes",58,1],["ares",58,1],["eres",58,1],["ires",58,1],["asses",58,1],["esses",58,1],["isses",58,1],["astes",58,1],["estes",58,1],["istes",58,1],["is",-1,1],["ais",71,1],["eis",71,1],["areis",73,1],["ereis",73,1],["ireis",73,1],["áreis",73,1],["éreis",73,1],["íreis",73,1],["ásseis",73,1],["ésseis",73,1],["ísseis",73,1],["áveis",73,1],["íeis",73,1],["aríeis",84,1],["eríeis",84,1],["iríeis",84,1],["ados",-1,1],["idos",-1,1],["amos",-1,1],["áramos",90,1],["éramos",90,1],["íramos",90,1],["ávamos",90,1],["íamos",90,1],["aríamos",95,1],["eríamos",95,1],["iríamos",95,1],["emos",-1,1],["aremos",99,1],["eremos",99,1],["iremos",99,1],["ássemos",99,1],["êssemos",99,1],["íssemos",99,1],["imos",-1,1],["armos",-1,1],["ermos",-1,1],["irmos",-1,1],["ámos",-1,1],["arás",-1,1],["erás",-1,1],["irás",-1,1],["eu",-1,1],["iu",-1,1],["ou",-1,1],["ará",-1,1],["erá",-1,1],["irá",-1,1]];var c=[["a",-1,1],["i",-1,1],["o",-1,1],["os",-1,1],["á",-1,1],["í",-1,1],["ó",-1,1]];var f=[["e",-1,1],["ç",-1,2],["é",-1,1],["ê",-1,1]];var l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2];var n=0;var m=0;var b=0;function k(){var i;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("a~")){return false}break;case 2:if(!r.slice_from("o~")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function _(){b=r.limit;m=r.limit;n=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(l,97,250)){break i}s:{var s=r.cursor;a:{if(!r.out_grouping(l,97,250)){break a}u:while(true){o:{if(!r.in_grouping(l,97,250)){break o}break u}if(r.cursor>=r.limit){break a}r.cursor++}break s}r.cursor=s;if(!r.in_grouping(l,97,250)){break i}a:while(true){u:{if(!r.out_grouping(l,97,250)){break u}break a}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(l,97,250)){break r}i:{var a=r.cursor;s:{if(!r.out_grouping(l,97,250)){break s}a:while(true){u:{if(!r.in_grouping(l,97,250)){break u}break a}if(r.cursor>=r.limit){break s}r.cursor++}break i}r.cursor=a;if(!r.in_grouping(l,97,250)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}b=r.cursor}r.cursor=e;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor}r.cursor=u;return true}function v(){var e;while(true){var s=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("ã")){return false}break;case 2:if(!r.slice_from("õ")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function d(){if(!(b<=r.cursor)){return false}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(n<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;e=r.find_among_b(o);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!w()){return false}if(!r.slice_del()){return false}break;case 2:if(!w()){return false}if(!r.slice_from("log")){return false}break;case 3:if(!w()){return false}if(!r.slice_from("u")){return false}break;case 4:if(!w()){return false}if(!r.slice_from("ente")){return false}break;case 5:if(!g()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}break}}break;case 6:if(!w()){return false}if(!r.slice_del()){return false}var t=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(a)==0){r.cursor=r.limit-t;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-t;break r}if(!r.slice_del()){return false}}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break;case 8:if(!w()){return false}if(!r.slice_del()){return false}var f=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-f;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-f;break r}if(!r.slice_del()){return false}}break;case 9:if(!d()){return false}if(!r.eq_s_b("e")){return false}if(!r.slice_from("ir")){return false}break}return true}function p(){if(r.cursor{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(c),u.ket=u.cursor,r){case 1:if(u.slice_from("a~"))break;return;case 2:if(u.slice_from("o~"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,k=u.limit,d=u.limit,b=u.limit,u.cursor);r:{i:{var i=u.cursor;s:if(u.in_grouping(_,97,250)){var s=u.cursor;if(!u.out_grouping(_,97,250)||!u.go_out_grouping(_,97,250)){if(u.cursor=s,!u.in_grouping(_,97,250))break s;if(!u.go_in_grouping(_,97,250))break s}u.cursor++;break i}if(u.cursor=i,!u.out_grouping(_,97,250))break r;s=u.cursor;if(u.out_grouping(_,97,250)&&u.go_out_grouping(_,97,250));else{if(u.cursor=s,!u.in_grouping(_,97,250))break r;if(u.cursor>=u.limit)break r}u.cursor++}k=u.cursor}u.cursor=r,r=u.cursor,u.go_out_grouping(_,97,250)&&(u.cursor++,u.go_in_grouping(_,97,250))&&(u.cursor++,d=u.cursor,u.go_out_grouping(_,97,250))&&(u.cursor++,u.go_in_grouping(_,97,250))&&(u.cursor++,b=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit;r=u.limit-u.cursor;r:{var e=u.limit-u.cursor,o=u.limit-u.cursor,a=u.limit-u.cursor;if(p()||(u.cursor=u.limit-a,(()=>{if(!(u.cursor{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(f)))switch(u.bra=u.cursor,r){case 1:if(!g())return;if(!u.slice_del())return;u.ket=u.cursor;r:{var i=u.limit-u.cursor;if(u.eq_s_b("u")){u.bra=u.cursor;var s=u.limit-u.cursor;if(u.eq_s_b("g")){u.cursor=u.limit-s;break r}}if(u.cursor=u.limit-i,!u.eq_s_b("i"))return;u.bra=u.cursor;s=u.limit-u.cursor;if(!u.eq_s_b("c"))return;u.cursor=u.limit-s}if(!g())return;if(u.slice_del())break;return;case 2:if(u.slice_from("c"))break}})(),u.cursor=u.limit-r,u.cursor=u.limit_backward,r=u.cursor;return(()=>{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(t),u.ket=u.cursor,r){case 1:if(u.slice_from("ã"))break;return;case 2:if(u.slice_from("õ"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/romanian-stemmer.js b/sphinx/search/minified-js/romanian-stemmer.js index 01c54d0185c..aefb071550d 100644 --- a/sphinx/search/minified-js/romanian-stemmer.js +++ b/sphinx/search/minified-js/romanian-stemmer.js @@ -1 +1 @@ -RomanianStemmer=function(){var r=new BaseStemmer;var i=[["",-1,3],["I",0,1],["U",0,2]];var e=[["ea",-1,3],["aţia",-1,7],["aua",-1,2],["iua",-1,4],["aţie",-1,7],["ele",-1,3],["ile",-1,5],["iile",6,4],["iei",-1,4],["atei",-1,6],["ii",-1,4],["ului",-1,1],["ul",-1,1],["elor",-1,3],["ilor",-1,4],["iilor",14,4]];var a=[["icala",-1,4],["iciva",-1,4],["ativa",-1,5],["itiva",-1,6],["icale",-1,4],["aţiune",-1,5],["iţiune",-1,6],["atoare",-1,5],["itoare",-1,6],["ătoare",-1,5],["icitate",-1,4],["abilitate",-1,1],["ibilitate",-1,2],["ivitate",-1,3],["icive",-1,4],["ative",-1,5],["itive",-1,6],["icali",-1,4],["atori",-1,5],["icatori",18,4],["itori",-1,6],["ători",-1,5],["icitati",-1,4],["abilitati",-1,1],["ivitati",-1,3],["icivi",-1,4],["ativi",-1,5],["itivi",-1,6],["icităi",-1,4],["abilităi",-1,1],["ivităi",-1,3],["icităţi",-1,4],["abilităţi",-1,1],["ivităţi",-1,3],["ical",-1,4],["ator",-1,5],["icator",35,4],["itor",-1,6],["ător",-1,5],["iciv",-1,4],["ativ",-1,5],["itiv",-1,6],["icală",-1,4],["icivă",-1,4],["ativă",-1,5],["itivă",-1,6]];var t=[["ica",-1,1],["abila",-1,1],["ibila",-1,1],["oasa",-1,1],["ata",-1,1],["ita",-1,1],["anta",-1,1],["ista",-1,3],["uta",-1,1],["iva",-1,1],["ic",-1,1],["ice",-1,1],["abile",-1,1],["ibile",-1,1],["isme",-1,3],["iune",-1,2],["oase",-1,1],["ate",-1,1],["itate",17,1],["ite",-1,1],["ante",-1,1],["iste",-1,3],["ute",-1,1],["ive",-1,1],["ici",-1,1],["abili",-1,1],["ibili",-1,1],["iuni",-1,2],["atori",-1,1],["osi",-1,1],["ati",-1,1],["itati",30,1],["iti",-1,1],["anti",-1,1],["isti",-1,3],["uti",-1,1],["işti",-1,3],["ivi",-1,1],["ităi",-1,1],["oşi",-1,1],["ităţi",-1,1],["abil",-1,1],["ibil",-1,1],["ism",-1,3],["ator",-1,1],["os",-1,1],["at",-1,1],["it",-1,1],["ant",-1,1],["ist",-1,3],["ut",-1,1],["iv",-1,1],["ică",-1,1],["abilă",-1,1],["ibilă",-1,1],["oasă",-1,1],["ată",-1,1],["ită",-1,1],["antă",-1,1],["istă",-1,3],["ută",-1,1],["ivă",-1,1]];var s=[["ea",-1,1],["ia",-1,1],["esc",-1,1],["ăsc",-1,1],["ind",-1,1],["ând",-1,1],["are",-1,1],["ere",-1,1],["ire",-1,1],["âre",-1,1],["se",-1,2],["ase",10,1],["sese",10,2],["ise",10,1],["use",10,1],["âse",10,1],["eşte",-1,1],["ăşte",-1,1],["eze",-1,1],["ai",-1,1],["eai",19,1],["iai",19,1],["sei",-1,2],["eşti",-1,1],["ăşti",-1,1],["ui",-1,1],["ezi",-1,1],["âi",-1,1],["aşi",-1,1],["seşi",-1,2],["aseşi",29,1],["seseşi",29,2],["iseşi",29,1],["useşi",29,1],["âseşi",29,1],["işi",-1,1],["uşi",-1,1],["âşi",-1,1],["aţi",-1,2],["eaţi",38,1],["iaţi",38,1],["eţi",-1,2],["iţi",-1,2],["âţi",-1,2],["arăţi",-1,1],["serăţi",-1,2],["aserăţi",45,1],["seserăţi",45,2],["iserăţi",45,1],["userăţi",45,1],["âserăţi",45,1],["irăţi",-1,1],["urăţi",-1,1],["ârăţi",-1,1],["am",-1,1],["eam",54,1],["iam",54,1],["em",-1,2],["asem",57,1],["sesem",57,2],["isem",57,1],["usem",57,1],["âsem",57,1],["im",-1,2],["âm",-1,2],["ăm",-1,2],["arăm",65,1],["serăm",65,2],["aserăm",67,1],["seserăm",67,2],["iserăm",67,1],["userăm",67,1],["âserăm",67,1],["irăm",65,1],["urăm",65,1],["ârăm",65,1],["au",-1,1],["eau",76,1],["iau",76,1],["indu",-1,1],["ându",-1,1],["ez",-1,1],["ească",-1,1],["ară",-1,1],["seră",-1,2],["aseră",84,1],["seseră",84,2],["iseră",84,1],["useră",84,1],["âseră",84,1],["iră",-1,1],["ură",-1,1],["âră",-1,1],["ează",-1,1]];var u=[["a",-1,1],["e",-1,1],["ie",1,1],["i",-1,1],["ă",-1,1]];var c=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4];var o=false;var f=0;var l=0;var n=0;function b(){while(true){var i=r.cursor;r:{i:while(true){var e=r.cursor;e:{if(!r.in_grouping(c,97,259)){break e}r.bra=r.cursor;a:{var a=r.cursor;t:{if(!r.eq_s("u")){break t}r.ket=r.cursor;if(!r.in_grouping(c,97,259)){break t}if(!r.slice_from("U")){return false}break a}r.cursor=a;if(!r.eq_s("i")){break e}r.ket=r.cursor;if(!r.in_grouping(c,97,259)){break e}if(!r.slice_from("I")){return false}}r.cursor=e;break i}r.cursor=e;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=i;break}return true}function m(){n=r.limit;l=r.limit;f=r.limit;var i=r.cursor;r:{i:{var e=r.cursor;e:{if(!r.in_grouping(c,97,259)){break e}a:{var a=r.cursor;t:{if(!r.out_grouping(c,97,259)){break t}s:while(true){u:{if(!r.in_grouping(c,97,259)){break u}break s}if(r.cursor>=r.limit){break t}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(c,97,259)){break e}t:while(true){s:{if(!r.out_grouping(c,97,259)){break s}break t}if(r.cursor>=r.limit){break e}r.cursor++}}break i}r.cursor=e;if(!r.out_grouping(c,97,259)){break r}e:{var t=r.cursor;a:{if(!r.out_grouping(c,97,259)){break a}t:while(true){s:{if(!r.in_grouping(c,97,259)){break s}break t}if(r.cursor>=r.limit){break a}r.cursor++}break e}r.cursor=t;if(!r.in_grouping(c,97,259)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}n=r.cursor}r.cursor=i;var s=r.cursor;r:{i:while(true){e:{if(!r.in_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}i:while(true){e:{if(!r.out_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}l=r.cursor;i:while(true){e:{if(!r.in_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}i:while(true){e:{if(!r.out_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}f=r.cursor}r.cursor=s;return true}function k(){var e;while(true){var a=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function _(){if(!(n<=r.cursor)){return false}return true}function v(){if(!(l<=r.cursor)){return false}return true}function g(){if(!(f<=r.cursor)){return false}return true}function w(){var i;r.ket=r.cursor;i=r.find_among_b(e);if(i==0){return false}r.bra=r.cursor;if(!v()){return false}switch(i){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_from("a")){return false}break;case 3:if(!r.slice_from("e")){return false}break;case 4:if(!r.slice_from("i")){return false}break;case 5:{var a=r.limit-r.cursor;r:{if(!r.eq_s_b("ab")){break r}return false}r.cursor=r.limit-a}if(!r.slice_from("i")){return false}break;case 6:if(!r.slice_from("at")){return false}break;case 7:if(!r.slice_from("aţi")){return false}break}return true}function d(){var i;var e=r.limit-r.cursor;r.ket=r.cursor;i=r.find_among_b(a);if(i==0){return false}r.bra=r.cursor;if(!v()){return false}switch(i){case 1:if(!r.slice_from("abil")){return false}break;case 2:if(!r.slice_from("ibil")){return false}break;case 3:if(!r.slice_from("iv")){return false}break;case 4:if(!r.slice_from("ic")){return false}break;case 5:if(!r.slice_from("at")){return false}break;case 6:if(!r.slice_from("it")){return false}break}o=true;r.cursor=r.limit-e;return true}function h(){var i;o=false;while(true){var e=r.limit-r.cursor;r:{if(!d()){break r}continue}r.cursor=r.limit-e;break}r.ket=r.cursor;i=r.find_among_b(t);if(i==0){return false}r.bra=r.cursor;if(!g()){return false}switch(i){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.eq_s_b("ţ")){return false}r.bra=r.cursor;if(!r.slice_from("t")){return false}break;case 3:if(!r.slice_from("ist")){return false}break}o=true;return true}function p(){var i;if(r.cursor{var i,r=s.limit-s.cursor;if(s.ket=s.cursor,0!=(i=s.find_among_b(e))&&(s.bra=s.cursor,_())){switch(i){case 1:if(s.slice_from("abil"))break;return;case 2:if(s.slice_from("ibil"))break;return;case 3:if(s.slice_from("iv"))break;return;case 4:if(s.slice_from("ic"))break;return;case 5:if(s.slice_from("at"))break;return;case 6:if(s.slice_from("it"))break;return}return l=!0,s.cursor=s.limit-r,1}})()){s.cursor=s.limit-r;break}}if(s.ket=s.cursor,0!=(i=s.find_among_b(a))&&(s.bra=s.cursor,m<=s.cursor)){switch(i){case 1:if(s.slice_del())break;return;case 2:if(!s.eq_s_b("ț"))return;if(s.bra=s.cursor,s.slice_from("t"))break;return;case 3:if(s.slice_from("ist"))break;return}l=!0}}function g(){s.ket=s.cursor,0!=s.find_among_b(i)&&(s.bra=s.cursor,b<=s.cursor)&&s.slice_del()}this.stem=function(){(()=>{for(var i,r=s.cursor;;){var e=s.cursor;i:{for(;;){var a=s.cursor;if(s.bra=s.cursor,0!=(i=s.find_among(t))){switch(s.ket=s.cursor,i){case 1:if(s.slice_from("ș"))break;return;case 2:if(s.slice_from("ț"))break;return}s.cursor=a;break}if(s.cursor=a,s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=e;break}s.cursor=r})();var i=s.cursor,i=((()=>{for(;;){var i=s.cursor;i:{for(;;){var r=s.cursor;r:if(s.in_grouping(n,97,259)){s.bra=s.cursor;e:{var e=s.cursor;if(s.eq_s("u")&&(s.ket=s.cursor,s.in_grouping(n,97,259))){if(s.slice_from("U"))break e;return}if(s.cursor=e,!s.eq_s("i"))break r;if(s.ket=s.cursor,!s.in_grouping(n,97,259))break r;if(!s.slice_from("I"))return}s.cursor=r;break}if(s.cursor=r,s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=i;break}})(),s.cursor=i,b=s.limit,f=s.limit,m=s.limit,s.cursor);i:{r:{var r=s.cursor;e:if(s.in_grouping(n,97,259)){var e=s.cursor;if(!s.out_grouping(n,97,259)||!s.go_out_grouping(n,97,259)){if(s.cursor=e,!s.in_grouping(n,97,259))break e;if(!s.go_in_grouping(n,97,259))break e}s.cursor++;break r}if(s.cursor=r,!s.out_grouping(n,97,259))break i;e=s.cursor;if(s.out_grouping(n,97,259)&&s.go_out_grouping(n,97,259));else{if(s.cursor=e,!s.in_grouping(n,97,259))break i;if(s.cursor>=s.limit)break i}s.cursor++}b=s.cursor}s.cursor=i,i=s.cursor,s.go_out_grouping(n,97,259)&&(s.cursor++,s.go_in_grouping(n,97,259))&&(s.cursor++,f=s.cursor,s.go_out_grouping(n,97,259))&&(s.cursor++,s.go_in_grouping(n,97,259))&&(s.cursor++,m=s.cursor),s.cursor=i,s.limit_backward=s.cursor,s.cursor=s.limit;var i=s.limit-s.cursor,i=((()=>{var i;if(s.ket=s.cursor,0!=(i=s.find_among_b(o))&&(s.bra=s.cursor,_()))switch(i){case 1:if(s.slice_del())break;return;case 2:if(s.slice_from("a"))break;return;case 3:if(s.slice_from("e"))break;return;case 4:if(s.slice_from("i"))break;return;case 5:var r=s.limit-s.cursor;if(s.eq_s_b("ab"))return;if(s.cursor=s.limit-r,s.slice_from("i"))break;return;case 6:if(s.slice_from("at"))break;return;case 7:if(s.slice_from("ați"))break}})(),s.cursor=s.limit-i,s.limit-s.cursor),i=(k(),s.cursor=s.limit-i,s.limit-s.cursor),a=s.limit-s.cursor,a=(l||(s.cursor=s.limit-a,(()=>{var i;if(!(s.cursor{for(var i;;){var r=s.cursor;i:{switch(s.bra=s.cursor,i=s.find_among(u),s.ket=s.cursor,i){case 1:if(s.slice_from("i"))break;return;case 2:if(s.slice_from("u"))break;return;case 3:if(s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=r;break}})(),s.cursor=i,!0},this.stemWord=function(i){return s.setCurrent(i),this.stem(),s.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/russian-stemmer.js b/sphinx/search/minified-js/russian-stemmer.js index 698d92bcdb8..7b5410c5e82 100644 --- a/sphinx/search/minified-js/russian-stemmer.js +++ b/sphinx/search/minified-js/russian-stemmer.js @@ -1 +1 @@ -RussianStemmer=function(){var r=new BaseStemmer;var e=[["в",-1,1],["ив",0,2],["ыв",0,2],["вши",-1,1],["ивши",3,2],["ывши",3,2],["вшись",-1,1],["ившись",6,2],["ывшись",6,2]];var i=[["ее",-1,1],["ие",-1,1],["ое",-1,1],["ые",-1,1],["ими",-1,1],["ыми",-1,1],["ей",-1,1],["ий",-1,1],["ой",-1,1],["ый",-1,1],["ем",-1,1],["им",-1,1],["ом",-1,1],["ым",-1,1],["его",-1,1],["ого",-1,1],["ему",-1,1],["ому",-1,1],["их",-1,1],["ых",-1,1],["ею",-1,1],["ою",-1,1],["ую",-1,1],["юю",-1,1],["ая",-1,1],["яя",-1,1]];var u=[["ем",-1,1],["нн",-1,1],["вш",-1,1],["ивш",2,2],["ывш",2,2],["щ",-1,1],["ющ",5,1],["ующ",6,2]];var s=[["сь",-1,1],["ся",-1,1]];var a=[["ла",-1,1],["ила",0,2],["ыла",0,2],["на",-1,1],["ена",3,2],["ете",-1,1],["ите",-1,2],["йте",-1,1],["ейте",7,2],["уйте",7,2],["ли",-1,1],["или",10,2],["ыли",10,2],["й",-1,1],["ей",13,2],["уй",13,2],["л",-1,1],["ил",16,2],["ыл",16,2],["ем",-1,1],["им",-1,2],["ым",-1,2],["н",-1,1],["ен",22,2],["ло",-1,1],["ило",24,2],["ыло",24,2],["но",-1,1],["ено",27,2],["нно",27,1],["ет",-1,1],["ует",30,2],["ит",-1,2],["ыт",-1,2],["ют",-1,1],["уют",34,2],["ят",-1,2],["ны",-1,1],["ены",37,2],["ть",-1,1],["ить",39,2],["ыть",39,2],["ешь",-1,1],["ишь",-1,2],["ю",-1,2],["ую",44,2]];var t=[["а",-1,1],["ев",-1,1],["ов",-1,1],["е",-1,1],["ие",3,1],["ье",3,1],["и",-1,1],["еи",6,1],["ии",6,1],["ами",6,1],["ями",6,1],["иями",10,1],["й",-1,1],["ей",12,1],["ией",13,1],["ий",12,1],["ой",12,1],["ам",-1,1],["ем",-1,1],["ием",18,1],["ом",-1,1],["ям",-1,1],["иям",21,1],["о",-1,1],["у",-1,1],["ах",-1,1],["ях",-1,1],["иях",26,1],["ы",-1,1],["ь",-1,1],["ю",-1,1],["ию",30,1],["ью",30,1],["я",-1,1],["ия",33,1],["ья",33,1]];var c=[["ост",-1,1],["ость",-1,1]];var f=[["ейше",-1,1],["н",-1,2],["ейш",-1,1],["ь",-1,3]];var l=[33,65,8,232];var o=0;var n=0;function b(){n=r.limit;o=r.limit;var e=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor;e:while(true){i:{if(!r.out_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.in_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}o=r.cursor}r.cursor=e;return true}function _(){if(!(o<=r.cursor)){return false}return true}function k(){var i;r.ket=r.cursor;i=r.find_among_b(e);if(i==0){return false}r.bra=r.cursor;switch(i){case 1:r:{var u=r.limit-r.cursor;e:{if(!r.eq_s_b("а")){break e}break r}r.cursor=r.limit-u;if(!r.eq_s_b("я")){return false}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}return true}function m(){r.ket=r.cursor;if(r.find_among_b(i)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function v(){var e;if(!m()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(u);if(e==0){r.cursor=r.limit-i;break r}r.bra=r.cursor;switch(e){case 1:e:{var s=r.limit-r.cursor;i:{if(!r.eq_s_b("а")){break i}break e}r.cursor=r.limit-s;if(!r.eq_s_b("я")){r.cursor=r.limit-i;break r}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}}return true}function d(){r.ket=r.cursor;if(r.find_among_b(s)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function g(){var e;r.ket=r.cursor;e=r.find_among_b(a);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:r:{var i=r.limit-r.cursor;e:{if(!r.eq_s_b("а")){break e}break r}r.cursor=r.limit-i;if(!r.eq_s_b("я")){return false}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}return true}function w(){r.ket=r.cursor;if(r.find_among_b(t)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function h(){r.ket=r.cursor;if(r.find_among_b(c)==0){return false}r.bra=r.cursor;if(!_()){return false}if(!r.slice_del()){return false}return true}function q(){var e;r.ket=r.cursor;e=r.find_among_b(f);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("н")){return false}r.bra=r.cursor;if(!r.eq_s_b("н")){return false}if(!r.slice_del()){return false}break;case 2:if(!r.eq_s_b("н")){return false}if(!r.slice_del()){return false}break;case 3:if(!r.slice_del()){return false}break}return true}this.stem=function(){var e=r.cursor;r:{while(true){var i=r.cursor;e:{i:while(true){var u=r.cursor;u:{r.bra=r.cursor;if(!r.eq_s("ё")){break u}r.ket=r.cursor;r.cursor=u;break i}r.cursor=u;if(r.cursor>=r.limit){break e}r.cursor++}if(!r.slice_from("е")){return false}continue}r.cursor=i;break}}r.cursor=e;b();r.limit_backward=r.cursor;r.cursor=r.limit;if(r.cursor=u.limit)break r;u.cursor++}if(u.slice_from("е"))continue;return!1}u.cursor=i;break}if(u.cursor=r,b=u.limit,m=u.limit,r=u.cursor,u.go_out_grouping(n,1072,1103)&&(u.cursor++,b=u.cursor,u.go_in_grouping(n,1072,1103))&&(u.cursor++,u.go_out_grouping(n,1072,1103))&&(u.cursor++,u.go_in_grouping(n,1072,1103))&&(u.cursor++,m=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit,u.cursor{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(o))){switch(u.bra=u.cursor,r){case 1:var i=u.limit-u.cursor;if(!u.eq_s_b("а")&&(u.cursor=u.limit-i,!u.eq_s_b("я")))return;if(u.slice_del())break;return;case 2:if(u.slice_del())break;return}return 1}})()){u.cursor=u.limit-c;c=u.limit-u.cursor,c=(u.ket=u.cursor,0!=u.find_among_b(t)&&(u.bra=u.cursor,u.slice_del())||(u.cursor=u.limit-c),u.limit-u.cursor);if(!f()&&(u.cursor=u.limit-c,!(()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(a))){switch(u.bra=u.cursor,r){case 1:var i=u.limit-u.cursor;if(!u.eq_s_b("а")&&(u.cursor=u.limit-i,!u.eq_s_b("я")))return;if(u.slice_del())break;return;case 2:if(u.slice_del())break;return}return 1}})())&&(u.cursor=u.limit-c,u.ket=u.cursor,0==u.find_among_b(l)||(u.bra=u.cursor,!u.slice_del())))break r}}u.cursor=u.limit-e;e=u.limit-u.cursor;if(u.ket=u.cursor,u.eq_s_b("и")){if(u.bra=u.cursor,!u.slice_del())return!1}else u.cursor=u.limit-e;e=u.limit-u.cursor,k(),u.cursor=u.limit-e,e=u.limit-u.cursor;return(()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(_)))switch(u.bra=u.cursor,r){case 1:if(!u.slice_del())return;if(u.ket=u.cursor,!u.eq_s_b("н"))return;if(u.bra=u.cursor,!u.eq_s_b("н"))return;if(u.slice_del())break;return;case 2:if(!u.eq_s_b("н"))return;if(u.slice_del())break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-e,u.limit_backward=r,u.cursor=u.limit_backward,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/serbian-stemmer.js b/sphinx/search/minified-js/serbian-stemmer.js new file mode 100644 index 00000000000..0ac2621eb22 --- /dev/null +++ b/sphinx/search/minified-js/serbian-stemmer.js @@ -0,0 +1 @@ +var SerbianStemmer=function(){var m=new BaseStemmer,s=[["а",-1,1],["б",-1,2],["в",-1,3],["г",-1,4],["д",-1,5],["е",-1,7],["ж",-1,8],["з",-1,9],["и",-1,10],["к",-1,12],["л",-1,13],["м",-1,15],["н",-1,16],["о",-1,18],["п",-1,19],["р",-1,20],["с",-1,21],["т",-1,22],["у",-1,24],["ф",-1,25],["х",-1,26],["ц",-1,27],["ч",-1,28],["ш",-1,30],["ђ",-1,6],["ј",-1,11],["љ",-1,14],["њ",-1,17],["ћ",-1,23],["џ",-1,29]],r=[["daba",-1,73],["ajaca",-1,12],["ejaca",-1,14],["ljaca",-1,13],["njaca",-1,85],["ojaca",-1,15],["alaca",-1,82],["elaca",-1,83],["olaca",-1,84],["maca",-1,75],["naca",-1,76],["raca",-1,81],["saca",-1,80],["vaca",-1,79],["šaca",-1,18],["aoca",-1,82],["acaka",-1,55],["ajaka",-1,16],["ojaka",-1,17],["anaka",-1,78],["ataka",-1,58],["etaka",-1,59],["itaka",-1,60],["otaka",-1,61],["utaka",-1,62],["ačaka",-1,54],["esama",-1,67],["izama",-1,87],["jacima",-1,5],["nicima",-1,23],["ticima",-1,24],["teticima",30,21],["zicima",-1,25],["atcima",-1,58],["utcima",-1,62],["čcima",-1,74],["pesima",-1,2],["inzima",-1,19],["lozima",-1,1],["metara",-1,68],["centara",-1,69],["istara",-1,70],["ekata",-1,86],["anata",-1,53],["nstava",-1,22],["kustava",-1,29],["ajac",-1,12],["ejac",-1,14],["ljac",-1,13],["njac",-1,85],["anjac",49,11],["ojac",-1,15],["alac",-1,82],["elac",-1,83],["olac",-1,84],["mac",-1,75],["nac",-1,76],["rac",-1,81],["sac",-1,80],["vac",-1,79],["šac",-1,18],["jebe",-1,88],["olce",-1,84],["kuse",-1,27],["rave",-1,42],["save",-1,52],["šave",-1,51],["baci",-1,89],["jaci",-1,5],["tvenici",-1,20],["snici",-1,26],["tetici",-1,21],["bojci",-1,4],["vojci",-1,3],["ojsci",-1,66],["atci",-1,58],["itci",-1,60],["utci",-1,62],["čci",-1,74],["pesi",-1,2],["inzi",-1,19],["lozi",-1,1],["acak",-1,55],["usak",-1,57],["atak",-1,58],["etak",-1,59],["itak",-1,60],["otak",-1,61],["utak",-1,62],["ačak",-1,54],["ušak",-1,56],["izam",-1,87],["tican",-1,65],["cajan",-1,7],["čajan",-1,6],["voljan",-1,77],["eskan",-1,63],["alan",-1,40],["bilan",-1,33],["gilan",-1,37],["nilan",-1,39],["rilan",-1,38],["silan",-1,36],["tilan",-1,34],["avilan",-1,35],["laran",-1,9],["eran",-1,8],["asan",-1,91],["esan",-1,10],["dusan",-1,31],["kusan",-1,28],["atan",-1,47],["pletan",-1,50],["tetan",-1,49],["antan",-1,32],["pravan",-1,44],["stavan",-1,43],["sivan",-1,46],["tivan",-1,45],["ozan",-1,41],["tičan",-1,64],["ašan",-1,90],["dušan",-1,30],["metar",-1,68],["centar",-1,69],["istar",-1,70],["ekat",-1,86],["enat",-1,48],["oscu",-1,72],["ošću",-1,71]],o=[["aca",-1,124],["eca",-1,125],["uca",-1,126],["ga",-1,20],["acega",3,124],["ecega",3,125],["ucega",3,126],["anjijega",3,84],["enjijega",3,85],["snjijega",3,122],["šnjijega",3,86],["kijega",3,95],["skijega",11,1],["škijega",11,2],["elijega",3,83],["nijega",3,13],["osijega",3,123],["atijega",3,120],["evitijega",3,92],["ovitijega",3,93],["astijega",3,94],["avijega",3,77],["evijega",3,78],["ivijega",3,79],["ovijega",3,80],["ošijega",3,91],["anjega",3,84],["enjega",3,85],["snjega",3,122],["šnjega",3,86],["kega",3,95],["skega",30,1],["škega",30,2],["elega",3,83],["nega",3,13],["anega",34,10],["enega",34,87],["snega",34,159],["šnega",34,88],["osega",3,123],["atega",3,120],["evitega",3,92],["ovitega",3,93],["astega",3,94],["avega",3,77],["evega",3,78],["ivega",3,79],["ovega",3,80],["aćega",3,14],["ećega",3,15],["ućega",3,16],["ošega",3,91],["acoga",3,124],["ecoga",3,125],["ucoga",3,126],["anjoga",3,84],["enjoga",3,85],["snjoga",3,122],["šnjoga",3,86],["koga",3,95],["skoga",59,1],["škoga",59,2],["loga",3,19],["eloga",62,83],["noga",3,13],["cinoga",64,137],["činoga",64,89],["osoga",3,123],["atoga",3,120],["evitoga",3,92],["ovitoga",3,93],["astoga",3,94],["avoga",3,77],["evoga",3,78],["ivoga",3,79],["ovoga",3,80],["aćoga",3,14],["ećoga",3,15],["ućoga",3,16],["ošoga",3,91],["uga",3,18],["aja",-1,109],["caja",81,26],["laja",81,30],["raja",81,31],["ćaja",81,28],["čaja",81,27],["đaja",81,29],["bija",-1,32],["cija",-1,33],["dija",-1,34],["fija",-1,40],["gija",-1,39],["anjija",-1,84],["enjija",-1,85],["snjija",-1,122],["šnjija",-1,86],["kija",-1,95],["skija",97,1],["škija",97,2],["lija",-1,24],["elija",100,83],["mija",-1,37],["nija",-1,13],["ganija",103,9],["manija",103,6],["panija",103,7],["ranija",103,8],["tanija",103,5],["pija",-1,41],["rija",-1,42],["rarija",110,21],["sija",-1,23],["osija",112,123],["tija",-1,44],["atija",114,120],["evitija",114,92],["ovitija",114,93],["otija",114,22],["astija",114,94],["avija",-1,77],["evija",-1,78],["ivija",-1,79],["ovija",-1,80],["zija",-1,45],["ošija",-1,91],["žija",-1,38],["anja",-1,84],["enja",-1,85],["snja",-1,122],["šnja",-1,86],["ka",-1,95],["ska",131,1],["ška",131,2],["ala",-1,104],["acala",134,128],["astajala",134,106],["istajala",134,107],["ostajala",134,108],["ijala",134,47],["injala",134,114],["nala",134,46],["irala",134,100],["urala",134,105],["tala",134,113],["astala",144,110],["istala",144,111],["ostala",144,112],["avala",134,97],["evala",134,96],["ivala",134,98],["ovala",134,76],["uvala",134,99],["ačala",134,102],["ela",-1,83],["ila",-1,116],["acila",155,124],["lucila",155,121],["nila",155,103],["astanila",158,110],["istanila",158,111],["ostanila",158,112],["rosila",155,127],["jetila",155,118],["ozila",155,48],["ačila",155,101],["lučila",155,117],["rošila",155,90],["ola",-1,50],["asla",-1,115],["nula",-1,13],["gama",-1,20],["logama",171,19],["ugama",171,18],["ajama",-1,109],["cajama",174,26],["lajama",174,30],["rajama",174,31],["ćajama",174,28],["čajama",174,27],["đajama",174,29],["bijama",-1,32],["cijama",-1,33],["dijama",-1,34],["fijama",-1,40],["gijama",-1,39],["lijama",-1,35],["mijama",-1,37],["nijama",-1,36],["ganijama",188,9],["manijama",188,6],["panijama",188,7],["ranijama",188,8],["tanijama",188,5],["pijama",-1,41],["rijama",-1,42],["sijama",-1,43],["tijama",-1,44],["zijama",-1,45],["žijama",-1,38],["alama",-1,104],["ijalama",200,47],["nalama",200,46],["elama",-1,119],["ilama",-1,116],["ramama",-1,52],["lemama",-1,51],["inama",-1,11],["cinama",207,137],["činama",207,89],["rama",-1,52],["arama",210,53],["drama",210,54],["erama",210,55],["orama",210,56],["basama",-1,135],["gasama",-1,131],["jasama",-1,129],["kasama",-1,133],["nasama",-1,132],["tasama",-1,130],["vasama",-1,134],["esama",-1,152],["isama",-1,154],["etama",-1,70],["estama",-1,71],["istama",-1,72],["kstama",-1,73],["ostama",-1,74],["avama",-1,77],["evama",-1,78],["ivama",-1,79],["bašama",-1,63],["gašama",-1,64],["jašama",-1,61],["kašama",-1,62],["našama",-1,60],["tašama",-1,59],["vašama",-1,65],["ešama",-1,66],["išama",-1,67],["lema",-1,51],["acima",-1,124],["ecima",-1,125],["ucima",-1,126],["ajima",-1,109],["cajima",245,26],["lajima",245,30],["rajima",245,31],["ćajima",245,28],["čajima",245,27],["đajima",245,29],["bijima",-1,32],["cijima",-1,33],["dijima",-1,34],["fijima",-1,40],["gijima",-1,39],["anjijima",-1,84],["enjijima",-1,85],["snjijima",-1,122],["šnjijima",-1,86],["kijima",-1,95],["skijima",261,1],["škijima",261,2],["lijima",-1,35],["elijima",264,83],["mijima",-1,37],["nijima",-1,13],["ganijima",267,9],["manijima",267,6],["panijima",267,7],["ranijima",267,8],["tanijima",267,5],["pijima",-1,41],["rijima",-1,42],["sijima",-1,43],["osijima",275,123],["tijima",-1,44],["atijima",277,120],["evitijima",277,92],["ovitijima",277,93],["astijima",277,94],["avijima",-1,77],["evijima",-1,78],["ivijima",-1,79],["ovijima",-1,80],["zijima",-1,45],["ošijima",-1,91],["žijima",-1,38],["anjima",-1,84],["enjima",-1,85],["snjima",-1,122],["šnjima",-1,86],["kima",-1,95],["skima",293,1],["škima",293,2],["alima",-1,104],["ijalima",296,47],["nalima",296,46],["elima",-1,83],["ilima",-1,116],["ozilima",300,48],["olima",-1,50],["lemima",-1,51],["nima",-1,13],["anima",304,10],["inima",304,11],["cinima",306,137],["činima",306,89],["onima",304,12],["arima",-1,53],["drima",-1,54],["erima",-1,55],["orima",-1,56],["basima",-1,135],["gasima",-1,131],["jasima",-1,129],["kasima",-1,133],["nasima",-1,132],["tasima",-1,130],["vasima",-1,134],["esima",-1,57],["isima",-1,58],["osima",-1,123],["atima",-1,120],["ikatima",324,68],["latima",324,69],["etima",-1,70],["evitima",-1,92],["ovitima",-1,93],["astima",-1,94],["estima",-1,71],["istima",-1,72],["kstima",-1,73],["ostima",-1,74],["ištima",-1,75],["avima",-1,77],["evima",-1,78],["ajevima",337,109],["cajevima",338,26],["lajevima",338,30],["rajevima",338,31],["ćajevima",338,28],["čajevima",338,27],["đajevima",338,29],["ivima",-1,79],["ovima",-1,80],["govima",346,20],["ugovima",347,17],["lovima",346,82],["olovima",349,49],["movima",346,81],["onovima",346,12],["stvima",-1,3],["štvima",-1,4],["aćima",-1,14],["ećima",-1,15],["ućima",-1,16],["bašima",-1,63],["gašima",-1,64],["jašima",-1,61],["kašima",-1,62],["našima",-1,60],["tašima",-1,59],["vašima",-1,65],["ešima",-1,66],["išima",-1,67],["ošima",-1,91],["na",-1,13],["ana",368,10],["acana",369,128],["urana",369,105],["tana",369,113],["avana",369,97],["evana",369,96],["ivana",369,98],["uvana",369,99],["ačana",369,102],["acena",368,124],["lucena",368,121],["ačena",368,101],["lučena",368,117],["ina",368,11],["cina",382,137],["anina",382,10],["čina",382,89],["ona",368,12],["ara",-1,53],["dra",-1,54],["era",-1,55],["ora",-1,56],["basa",-1,135],["gasa",-1,131],["jasa",-1,129],["kasa",-1,133],["nasa",-1,132],["tasa",-1,130],["vasa",-1,134],["esa",-1,57],["isa",-1,58],["osa",-1,123],["ata",-1,120],["ikata",401,68],["lata",401,69],["eta",-1,70],["evita",-1,92],["ovita",-1,93],["asta",-1,94],["esta",-1,71],["ista",-1,72],["ksta",-1,73],["osta",-1,74],["nuta",-1,13],["išta",-1,75],["ava",-1,77],["eva",-1,78],["ajeva",415,109],["cajeva",416,26],["lajeva",416,30],["rajeva",416,31],["ćajeva",416,28],["čajeva",416,27],["đajeva",416,29],["iva",-1,79],["ova",-1,80],["gova",424,20],["ugova",425,17],["lova",424,82],["olova",427,49],["mova",424,81],["onova",424,12],["stva",-1,3],["štva",-1,4],["aća",-1,14],["eća",-1,15],["uća",-1,16],["baša",-1,63],["gaša",-1,64],["jaša",-1,61],["kaša",-1,62],["naša",-1,60],["taša",-1,59],["vaša",-1,65],["eša",-1,66],["iša",-1,67],["oša",-1,91],["ace",-1,124],["ece",-1,125],["uce",-1,126],["luce",448,121],["astade",-1,110],["istade",-1,111],["ostade",-1,112],["ge",-1,20],["loge",453,19],["uge",453,18],["aje",-1,104],["caje",456,26],["laje",456,30],["raje",456,31],["astaje",456,106],["istaje",456,107],["ostaje",456,108],["ćaje",456,28],["čaje",456,27],["đaje",456,29],["ije",-1,116],["bije",466,32],["cije",466,33],["dije",466,34],["fije",466,40],["gije",466,39],["anjije",466,84],["enjije",466,85],["snjije",466,122],["šnjije",466,86],["kije",466,95],["skije",476,1],["škije",476,2],["lije",466,35],["elije",479,83],["mije",466,37],["nije",466,13],["ganije",482,9],["manije",482,6],["panije",482,7],["ranije",482,8],["tanije",482,5],["pije",466,41],["rije",466,42],["sije",466,43],["osije",490,123],["tije",466,44],["atije",492,120],["evitije",492,92],["ovitije",492,93],["astije",492,94],["avije",466,77],["evije",466,78],["ivije",466,79],["ovije",466,80],["zije",466,45],["ošije",466,91],["žije",466,38],["anje",-1,84],["enje",-1,85],["snje",-1,122],["šnje",-1,86],["uje",-1,25],["lucuje",508,121],["iruje",508,100],["lučuje",508,117],["ke",-1,95],["ske",512,1],["ške",512,2],["ale",-1,104],["acale",515,128],["astajale",515,106],["istajale",515,107],["ostajale",515,108],["ijale",515,47],["injale",515,114],["nale",515,46],["irale",515,100],["urale",515,105],["tale",515,113],["astale",525,110],["istale",525,111],["ostale",525,112],["avale",515,97],["evale",515,96],["ivale",515,98],["ovale",515,76],["uvale",515,99],["ačale",515,102],["ele",-1,83],["ile",-1,116],["acile",536,124],["lucile",536,121],["nile",536,103],["rosile",536,127],["jetile",536,118],["ozile",536,48],["ačile",536,101],["lučile",536,117],["rošile",536,90],["ole",-1,50],["asle",-1,115],["nule",-1,13],["rame",-1,52],["leme",-1,51],["acome",-1,124],["ecome",-1,125],["ucome",-1,126],["anjome",-1,84],["enjome",-1,85],["snjome",-1,122],["šnjome",-1,86],["kome",-1,95],["skome",558,1],["škome",558,2],["elome",-1,83],["nome",-1,13],["cinome",562,137],["činome",562,89],["osome",-1,123],["atome",-1,120],["evitome",-1,92],["ovitome",-1,93],["astome",-1,94],["avome",-1,77],["evome",-1,78],["ivome",-1,79],["ovome",-1,80],["aćome",-1,14],["ećome",-1,15],["ućome",-1,16],["ošome",-1,91],["ne",-1,13],["ane",578,10],["acane",579,128],["urane",579,105],["tane",579,113],["astane",582,110],["istane",582,111],["ostane",582,112],["avane",579,97],["evane",579,96],["ivane",579,98],["uvane",579,99],["ačane",579,102],["acene",578,124],["lucene",578,121],["ačene",578,101],["lučene",578,117],["ine",578,11],["cine",595,137],["anine",595,10],["čine",595,89],["one",578,12],["are",-1,53],["dre",-1,54],["ere",-1,55],["ore",-1,56],["ase",-1,161],["base",604,135],["acase",604,128],["gase",604,131],["jase",604,129],["astajase",608,138],["istajase",608,139],["ostajase",608,140],["injase",608,150],["kase",604,133],["nase",604,132],["irase",604,155],["urase",604,156],["tase",604,130],["vase",604,134],["avase",618,144],["evase",618,145],["ivase",618,146],["ovase",618,148],["uvase",618,147],["ese",-1,57],["ise",-1,58],["acise",625,124],["lucise",625,121],["rosise",625,127],["jetise",625,149],["ose",-1,123],["astadose",630,141],["istadose",630,142],["ostadose",630,143],["ate",-1,104],["acate",634,128],["ikate",634,68],["late",634,69],["irate",634,100],["urate",634,105],["tate",634,113],["avate",634,97],["evate",634,96],["ivate",634,98],["uvate",634,99],["ačate",634,102],["ete",-1,70],["astadete",646,110],["istadete",646,111],["ostadete",646,112],["astajete",646,106],["istajete",646,107],["ostajete",646,108],["ijete",646,116],["injete",646,114],["ujete",646,25],["lucujete",655,121],["irujete",655,100],["lučujete",655,117],["nete",646,13],["astanete",659,110],["istanete",659,111],["ostanete",659,112],["astete",646,115],["ite",-1,116],["acite",664,124],["lucite",664,121],["nite",664,13],["astanite",667,110],["istanite",667,111],["ostanite",667,112],["rosite",664,127],["jetite",664,118],["astite",664,115],["evite",664,92],["ovite",664,93],["ačite",664,101],["lučite",664,117],["rošite",664,90],["ajte",-1,104],["urajte",679,105],["tajte",679,113],["astajte",681,106],["istajte",681,107],["ostajte",681,108],["avajte",679,97],["evajte",679,96],["ivajte",679,98],["uvajte",679,99],["ijte",-1,116],["lucujte",-1,121],["irujte",-1,100],["lučujte",-1,117],["aste",-1,94],["acaste",693,128],["astajaste",693,106],["istajaste",693,107],["ostajaste",693,108],["injaste",693,114],["iraste",693,100],["uraste",693,105],["taste",693,113],["avaste",693,97],["evaste",693,96],["ivaste",693,98],["ovaste",693,76],["uvaste",693,99],["ačaste",693,102],["este",-1,71],["iste",-1,72],["aciste",709,124],["luciste",709,121],["niste",709,103],["rosiste",709,127],["jetiste",709,118],["ačiste",709,101],["lučiste",709,117],["rošiste",709,90],["kste",-1,73],["oste",-1,74],["astadoste",719,110],["istadoste",719,111],["ostadoste",719,112],["nuste",-1,13],["ište",-1,75],["ave",-1,77],["eve",-1,78],["ajeve",726,109],["cajeve",727,26],["lajeve",727,30],["rajeve",727,31],["ćajeve",727,28],["čajeve",727,27],["đajeve",727,29],["ive",-1,79],["ove",-1,80],["gove",735,20],["ugove",736,17],["love",735,82],["olove",738,49],["move",735,81],["onove",735,12],["aće",-1,14],["eće",-1,15],["uće",-1,16],["ače",-1,101],["luče",-1,117],["aše",-1,104],["baše",747,63],["gaše",747,64],["jaše",747,61],["astajaše",750,106],["istajaše",750,107],["ostajaše",750,108],["injaše",750,114],["kaše",747,62],["naše",747,60],["iraše",747,100],["uraše",747,105],["taše",747,59],["vaše",747,65],["avaše",760,97],["evaše",760,96],["ivaše",760,98],["ovaše",760,76],["uvaše",760,99],["ačaše",747,102],["eše",-1,66],["iše",-1,67],["jetiše",768,118],["ačiše",768,101],["lučiše",768,117],["rošiše",768,90],["oše",-1,91],["astadoše",773,110],["istadoše",773,111],["ostadoše",773,112],["aceg",-1,124],["eceg",-1,125],["uceg",-1,126],["anjijeg",-1,84],["enjijeg",-1,85],["snjijeg",-1,122],["šnjijeg",-1,86],["kijeg",-1,95],["skijeg",784,1],["škijeg",784,2],["elijeg",-1,83],["nijeg",-1,13],["osijeg",-1,123],["atijeg",-1,120],["evitijeg",-1,92],["ovitijeg",-1,93],["astijeg",-1,94],["avijeg",-1,77],["evijeg",-1,78],["ivijeg",-1,79],["ovijeg",-1,80],["ošijeg",-1,91],["anjeg",-1,84],["enjeg",-1,85],["snjeg",-1,122],["šnjeg",-1,86],["keg",-1,95],["eleg",-1,83],["neg",-1,13],["aneg",805,10],["eneg",805,87],["sneg",805,159],["šneg",805,88],["oseg",-1,123],["ateg",-1,120],["aveg",-1,77],["eveg",-1,78],["iveg",-1,79],["oveg",-1,80],["aćeg",-1,14],["ećeg",-1,15],["ućeg",-1,16],["ošeg",-1,91],["acog",-1,124],["ecog",-1,125],["ucog",-1,126],["anjog",-1,84],["enjog",-1,85],["snjog",-1,122],["šnjog",-1,86],["kog",-1,95],["skog",827,1],["škog",827,2],["elog",-1,83],["nog",-1,13],["cinog",831,137],["činog",831,89],["osog",-1,123],["atog",-1,120],["evitog",-1,92],["ovitog",-1,93],["astog",-1,94],["avog",-1,77],["evog",-1,78],["ivog",-1,79],["ovog",-1,80],["aćog",-1,14],["ećog",-1,15],["ućog",-1,16],["ošog",-1,91],["ah",-1,104],["acah",847,128],["astajah",847,106],["istajah",847,107],["ostajah",847,108],["injah",847,114],["irah",847,100],["urah",847,105],["tah",847,113],["avah",847,97],["evah",847,96],["ivah",847,98],["ovah",847,76],["uvah",847,99],["ačah",847,102],["ih",-1,116],["acih",862,124],["ecih",862,125],["ucih",862,126],["lucih",865,121],["anjijih",862,84],["enjijih",862,85],["snjijih",862,122],["šnjijih",862,86],["kijih",862,95],["skijih",871,1],["škijih",871,2],["elijih",862,83],["nijih",862,13],["osijih",862,123],["atijih",862,120],["evitijih",862,92],["ovitijih",862,93],["astijih",862,94],["avijih",862,77],["evijih",862,78],["ivijih",862,79],["ovijih",862,80],["ošijih",862,91],["anjih",862,84],["enjih",862,85],["snjih",862,122],["šnjih",862,86],["kih",862,95],["skih",890,1],["ških",890,2],["elih",862,83],["nih",862,13],["cinih",894,137],["činih",894,89],["osih",862,123],["rosih",897,127],["atih",862,120],["jetih",862,118],["evitih",862,92],["ovitih",862,93],["astih",862,94],["avih",862,77],["evih",862,78],["ivih",862,79],["ovih",862,80],["aćih",862,14],["ećih",862,15],["ućih",862,16],["ačih",862,101],["lučih",862,117],["oših",862,91],["roših",913,90],["astadoh",-1,110],["istadoh",-1,111],["ostadoh",-1,112],["acuh",-1,124],["ecuh",-1,125],["ucuh",-1,126],["aćuh",-1,14],["ećuh",-1,15],["ućuh",-1,16],["aci",-1,124],["aceci",-1,124],["ieci",-1,162],["ajuci",-1,161],["irajuci",927,155],["urajuci",927,156],["astajuci",927,138],["istajuci",927,139],["ostajuci",927,140],["avajuci",927,144],["evajuci",927,145],["ivajuci",927,146],["uvajuci",927,147],["ujuci",-1,157],["lucujuci",937,121],["irujuci",937,155],["luci",-1,121],["nuci",-1,164],["etuci",-1,153],["astuci",-1,136],["gi",-1,20],["ugi",944,18],["aji",-1,109],["caji",946,26],["laji",946,30],["raji",946,31],["ćaji",946,28],["čaji",946,27],["đaji",946,29],["biji",-1,32],["ciji",-1,33],["diji",-1,34],["fiji",-1,40],["giji",-1,39],["anjiji",-1,84],["enjiji",-1,85],["snjiji",-1,122],["šnjiji",-1,86],["kiji",-1,95],["skiji",962,1],["škiji",962,2],["liji",-1,35],["eliji",965,83],["miji",-1,37],["niji",-1,13],["ganiji",968,9],["maniji",968,6],["paniji",968,7],["raniji",968,8],["taniji",968,5],["piji",-1,41],["riji",-1,42],["siji",-1,43],["osiji",976,123],["tiji",-1,44],["atiji",978,120],["evitiji",978,92],["ovitiji",978,93],["astiji",978,94],["aviji",-1,77],["eviji",-1,78],["iviji",-1,79],["oviji",-1,80],["ziji",-1,45],["ošiji",-1,91],["žiji",-1,38],["anji",-1,84],["enji",-1,85],["snji",-1,122],["šnji",-1,86],["ki",-1,95],["ski",994,1],["ški",994,2],["ali",-1,104],["acali",997,128],["astajali",997,106],["istajali",997,107],["ostajali",997,108],["ijali",997,47],["injali",997,114],["nali",997,46],["irali",997,100],["urali",997,105],["tali",997,113],["astali",1007,110],["istali",1007,111],["ostali",1007,112],["avali",997,97],["evali",997,96],["ivali",997,98],["ovali",997,76],["uvali",997,99],["ačali",997,102],["eli",-1,83],["ili",-1,116],["acili",1018,124],["lucili",1018,121],["nili",1018,103],["rosili",1018,127],["jetili",1018,118],["ozili",1018,48],["ačili",1018,101],["lučili",1018,117],["rošili",1018,90],["oli",-1,50],["asli",-1,115],["nuli",-1,13],["rami",-1,52],["lemi",-1,51],["ni",-1,13],["ani",1033,10],["acani",1034,128],["urani",1034,105],["tani",1034,113],["avani",1034,97],["evani",1034,96],["ivani",1034,98],["uvani",1034,99],["ačani",1034,102],["aceni",1033,124],["luceni",1033,121],["ačeni",1033,101],["lučeni",1033,117],["ini",1033,11],["cini",1047,137],["čini",1047,89],["oni",1033,12],["ari",-1,53],["dri",-1,54],["eri",-1,55],["ori",-1,56],["basi",-1,135],["gasi",-1,131],["jasi",-1,129],["kasi",-1,133],["nasi",-1,132],["tasi",-1,130],["vasi",-1,134],["esi",-1,152],["isi",-1,154],["osi",-1,123],["avsi",-1,161],["acavsi",1065,128],["iravsi",1065,155],["tavsi",1065,160],["etavsi",1068,153],["astavsi",1068,141],["istavsi",1068,142],["ostavsi",1068,143],["ivsi",-1,162],["nivsi",1073,158],["rosivsi",1073,127],["nuvsi",-1,164],["ati",-1,104],["acati",1077,128],["astajati",1077,106],["istajati",1077,107],["ostajati",1077,108],["injati",1077,114],["ikati",1077,68],["lati",1077,69],["irati",1077,100],["urati",1077,105],["tati",1077,113],["astati",1087,110],["istati",1087,111],["ostati",1087,112],["avati",1077,97],["evati",1077,96],["ivati",1077,98],["ovati",1077,76],["uvati",1077,99],["ačati",1077,102],["eti",-1,70],["iti",-1,116],["aciti",1098,124],["luciti",1098,121],["niti",1098,103],["rositi",1098,127],["jetiti",1098,118],["eviti",1098,92],["oviti",1098,93],["ačiti",1098,101],["lučiti",1098,117],["rošiti",1098,90],["asti",-1,94],["esti",-1,71],["isti",-1,72],["ksti",-1,73],["osti",-1,74],["nuti",-1,13],["avi",-1,77],["evi",-1,78],["ajevi",1116,109],["cajevi",1117,26],["lajevi",1117,30],["rajevi",1117,31],["ćajevi",1117,28],["čajevi",1117,27],["đajevi",1117,29],["ivi",-1,79],["ovi",-1,80],["govi",1125,20],["ugovi",1126,17],["lovi",1125,82],["olovi",1128,49],["movi",1125,81],["onovi",1125,12],["ieći",-1,116],["ačeći",-1,101],["ajući",-1,104],["irajući",1134,100],["urajući",1134,105],["astajući",1134,106],["istajući",1134,107],["ostajući",1134,108],["avajući",1134,97],["evajući",1134,96],["ivajući",1134,98],["uvajući",1134,99],["ujući",-1,25],["irujući",1144,100],["lučujući",1144,117],["nući",-1,13],["etući",-1,70],["astući",-1,115],["ači",-1,101],["luči",-1,117],["baši",-1,63],["gaši",-1,64],["jaši",-1,61],["kaši",-1,62],["naši",-1,60],["taši",-1,59],["vaši",-1,65],["eši",-1,66],["iši",-1,67],["oši",-1,91],["avši",-1,104],["iravši",1162,100],["tavši",1162,113],["etavši",1164,70],["astavši",1164,110],["istavši",1164,111],["ostavši",1164,112],["ačavši",1162,102],["ivši",-1,116],["nivši",1170,103],["rošivši",1170,90],["nuvši",-1,13],["aj",-1,104],["uraj",1174,105],["taj",1174,113],["avaj",1174,97],["evaj",1174,96],["ivaj",1174,98],["uvaj",1174,99],["ij",-1,116],["acoj",-1,124],["ecoj",-1,125],["ucoj",-1,126],["anjijoj",-1,84],["enjijoj",-1,85],["snjijoj",-1,122],["šnjijoj",-1,86],["kijoj",-1,95],["skijoj",1189,1],["škijoj",1189,2],["elijoj",-1,83],["nijoj",-1,13],["osijoj",-1,123],["evitijoj",-1,92],["ovitijoj",-1,93],["astijoj",-1,94],["avijoj",-1,77],["evijoj",-1,78],["ivijoj",-1,79],["ovijoj",-1,80],["ošijoj",-1,91],["anjoj",-1,84],["enjoj",-1,85],["snjoj",-1,122],["šnjoj",-1,86],["koj",-1,95],["skoj",1207,1],["škoj",1207,2],["aloj",-1,104],["eloj",-1,83],["noj",-1,13],["cinoj",1212,137],["činoj",1212,89],["osoj",-1,123],["atoj",-1,120],["evitoj",-1,92],["ovitoj",-1,93],["astoj",-1,94],["avoj",-1,77],["evoj",-1,78],["ivoj",-1,79],["ovoj",-1,80],["aćoj",-1,14],["ećoj",-1,15],["ućoj",-1,16],["ošoj",-1,91],["lucuj",-1,121],["iruj",-1,100],["lučuj",-1,117],["al",-1,104],["iral",1231,100],["ural",1231,105],["el",-1,119],["il",-1,116],["am",-1,104],["acam",1236,128],["iram",1236,100],["uram",1236,105],["tam",1236,113],["avam",1236,97],["evam",1236,96],["ivam",1236,98],["uvam",1236,99],["ačam",1236,102],["em",-1,119],["acem",1246,124],["ecem",1246,125],["ucem",1246,126],["astadem",1246,110],["istadem",1246,111],["ostadem",1246,112],["ajem",1246,104],["cajem",1253,26],["lajem",1253,30],["rajem",1253,31],["astajem",1253,106],["istajem",1253,107],["ostajem",1253,108],["ćajem",1253,28],["čajem",1253,27],["đajem",1253,29],["ijem",1246,116],["anjijem",1263,84],["enjijem",1263,85],["snjijem",1263,123],["šnjijem",1263,86],["kijem",1263,95],["skijem",1268,1],["škijem",1268,2],["lijem",1263,24],["elijem",1271,83],["nijem",1263,13],["rarijem",1263,21],["sijem",1263,23],["osijem",1275,123],["atijem",1263,120],["evitijem",1263,92],["ovitijem",1263,93],["otijem",1263,22],["astijem",1263,94],["avijem",1263,77],["evijem",1263,78],["ivijem",1263,79],["ovijem",1263,80],["ošijem",1263,91],["anjem",1246,84],["enjem",1246,85],["injem",1246,114],["snjem",1246,122],["šnjem",1246,86],["ujem",1246,25],["lucujem",1292,121],["irujem",1292,100],["lučujem",1292,117],["kem",1246,95],["skem",1296,1],["škem",1296,2],["elem",1246,83],["nem",1246,13],["anem",1300,10],["astanem",1301,110],["istanem",1301,111],["ostanem",1301,112],["enem",1300,87],["snem",1300,159],["šnem",1300,88],["basem",1246,135],["gasem",1246,131],["jasem",1246,129],["kasem",1246,133],["nasem",1246,132],["tasem",1246,130],["vasem",1246,134],["esem",1246,152],["isem",1246,154],["osem",1246,123],["atem",1246,120],["etem",1246,70],["evitem",1246,92],["ovitem",1246,93],["astem",1246,94],["istem",1246,151],["ištem",1246,75],["avem",1246,77],["evem",1246,78],["ivem",1246,79],["aćem",1246,14],["ećem",1246,15],["ućem",1246,16],["bašem",1246,63],["gašem",1246,64],["jašem",1246,61],["kašem",1246,62],["našem",1246,60],["tašem",1246,59],["vašem",1246,65],["ešem",1246,66],["išem",1246,67],["ošem",1246,91],["im",-1,116],["acim",1341,124],["ecim",1341,125],["ucim",1341,126],["lucim",1344,121],["anjijim",1341,84],["enjijim",1341,85],["snjijim",1341,122],["šnjijim",1341,86],["kijim",1341,95],["skijim",1350,1],["škijim",1350,2],["elijim",1341,83],["nijim",1341,13],["osijim",1341,123],["atijim",1341,120],["evitijim",1341,92],["ovitijim",1341,93],["astijim",1341,94],["avijim",1341,77],["evijim",1341,78],["ivijim",1341,79],["ovijim",1341,80],["ošijim",1341,91],["anjim",1341,84],["enjim",1341,85],["snjim",1341,122],["šnjim",1341,86],["kim",1341,95],["skim",1369,1],["škim",1369,2],["elim",1341,83],["nim",1341,13],["cinim",1373,137],["činim",1373,89],["osim",1341,123],["rosim",1376,127],["atim",1341,120],["jetim",1341,118],["evitim",1341,92],["ovitim",1341,93],["astim",1341,94],["avim",1341,77],["evim",1341,78],["ivim",1341,79],["ovim",1341,80],["aćim",1341,14],["ećim",1341,15],["ućim",1341,16],["ačim",1341,101],["lučim",1341,117],["ošim",1341,91],["rošim",1392,90],["acom",-1,124],["ecom",-1,125],["ucom",-1,126],["gom",-1,20],["logom",1397,19],["ugom",1397,18],["bijom",-1,32],["cijom",-1,33],["dijom",-1,34],["fijom",-1,40],["gijom",-1,39],["lijom",-1,35],["mijom",-1,37],["nijom",-1,36],["ganijom",1407,9],["manijom",1407,6],["panijom",1407,7],["ranijom",1407,8],["tanijom",1407,5],["pijom",-1,41],["rijom",-1,42],["sijom",-1,43],["tijom",-1,44],["zijom",-1,45],["žijom",-1,38],["anjom",-1,84],["enjom",-1,85],["snjom",-1,122],["šnjom",-1,86],["kom",-1,95],["skom",1423,1],["škom",1423,2],["alom",-1,104],["ijalom",1426,47],["nalom",1426,46],["elom",-1,83],["ilom",-1,116],["ozilom",1430,48],["olom",-1,50],["ramom",-1,52],["lemom",-1,51],["nom",-1,13],["anom",1435,10],["inom",1435,11],["cinom",1437,137],["aninom",1437,10],["činom",1437,89],["onom",1435,12],["arom",-1,53],["drom",-1,54],["erom",-1,55],["orom",-1,56],["basom",-1,135],["gasom",-1,131],["jasom",-1,129],["kasom",-1,133],["nasom",-1,132],["tasom",-1,130],["vasom",-1,134],["esom",-1,57],["isom",-1,58],["osom",-1,123],["atom",-1,120],["ikatom",1456,68],["latom",1456,69],["etom",-1,70],["evitom",-1,92],["ovitom",-1,93],["astom",-1,94],["estom",-1,71],["istom",-1,72],["kstom",-1,73],["ostom",-1,74],["avom",-1,77],["evom",-1,78],["ivom",-1,79],["ovom",-1,80],["lovom",1470,82],["movom",1470,81],["stvom",-1,3],["štvom",-1,4],["aćom",-1,14],["ećom",-1,15],["ućom",-1,16],["bašom",-1,63],["gašom",-1,64],["jašom",-1,61],["kašom",-1,62],["našom",-1,60],["tašom",-1,59],["vašom",-1,65],["ešom",-1,66],["išom",-1,67],["ošom",-1,91],["an",-1,104],["acan",1488,128],["iran",1488,100],["uran",1488,105],["tan",1488,113],["avan",1488,97],["evan",1488,96],["ivan",1488,98],["uvan",1488,99],["ačan",1488,102],["acen",-1,124],["lucen",-1,121],["ačen",-1,101],["lučen",-1,117],["anin",-1,10],["ao",-1,104],["acao",1503,128],["astajao",1503,106],["istajao",1503,107],["ostajao",1503,108],["injao",1503,114],["irao",1503,100],["urao",1503,105],["tao",1503,113],["astao",1511,110],["istao",1511,111],["ostao",1511,112],["avao",1503,97],["evao",1503,96],["ivao",1503,98],["ovao",1503,76],["uvao",1503,99],["ačao",1503,102],["go",-1,20],["ugo",1521,18],["io",-1,116],["acio",1523,124],["lucio",1523,121],["lio",1523,24],["nio",1523,103],["rario",1523,21],["sio",1523,23],["rosio",1529,127],["jetio",1523,118],["otio",1523,22],["ačio",1523,101],["lučio",1523,117],["rošio",1523,90],["bijo",-1,32],["cijo",-1,33],["dijo",-1,34],["fijo",-1,40],["gijo",-1,39],["lijo",-1,35],["mijo",-1,37],["nijo",-1,36],["pijo",-1,41],["rijo",-1,42],["sijo",-1,43],["tijo",-1,44],["zijo",-1,45],["žijo",-1,38],["anjo",-1,84],["enjo",-1,85],["snjo",-1,122],["šnjo",-1,86],["ko",-1,95],["sko",1554,1],["ško",1554,2],["alo",-1,104],["acalo",1557,128],["astajalo",1557,106],["istajalo",1557,107],["ostajalo",1557,108],["ijalo",1557,47],["injalo",1557,114],["nalo",1557,46],["iralo",1557,100],["uralo",1557,105],["talo",1557,113],["astalo",1567,110],["istalo",1567,111],["ostalo",1567,112],["avalo",1557,97],["evalo",1557,96],["ivalo",1557,98],["ovalo",1557,76],["uvalo",1557,99],["ačalo",1557,102],["elo",-1,83],["ilo",-1,116],["acilo",1578,124],["lucilo",1578,121],["nilo",1578,103],["rosilo",1578,127],["jetilo",1578,118],["ačilo",1578,101],["lučilo",1578,117],["rošilo",1578,90],["aslo",-1,115],["nulo",-1,13],["amo",-1,104],["acamo",1589,128],["ramo",1589,52],["iramo",1591,100],["uramo",1591,105],["tamo",1589,113],["avamo",1589,97],["evamo",1589,96],["ivamo",1589,98],["uvamo",1589,99],["ačamo",1589,102],["emo",-1,119],["astademo",1600,110],["istademo",1600,111],["ostademo",1600,112],["astajemo",1600,106],["istajemo",1600,107],["ostajemo",1600,108],["ijemo",1600,116],["injemo",1600,114],["ujemo",1600,25],["lucujemo",1609,121],["irujemo",1609,100],["lučujemo",1609,117],["lemo",1600,51],["nemo",1600,13],["astanemo",1614,110],["istanemo",1614,111],["ostanemo",1614,112],["etemo",1600,70],["astemo",1600,115],["imo",-1,116],["acimo",1620,124],["lucimo",1620,121],["nimo",1620,13],["astanimo",1623,110],["istanimo",1623,111],["ostanimo",1623,112],["rosimo",1620,127],["etimo",1620,70],["jetimo",1628,118],["astimo",1620,115],["ačimo",1620,101],["lučimo",1620,117],["rošimo",1620,90],["ajmo",-1,104],["urajmo",1634,105],["tajmo",1634,113],["astajmo",1636,106],["istajmo",1636,107],["ostajmo",1636,108],["avajmo",1634,97],["evajmo",1634,96],["ivajmo",1634,98],["uvajmo",1634,99],["ijmo",-1,116],["ujmo",-1,25],["lucujmo",1645,121],["irujmo",1645,100],["lučujmo",1645,117],["asmo",-1,104],["acasmo",1649,128],["astajasmo",1649,106],["istajasmo",1649,107],["ostajasmo",1649,108],["injasmo",1649,114],["irasmo",1649,100],["urasmo",1649,105],["tasmo",1649,113],["avasmo",1649,97],["evasmo",1649,96],["ivasmo",1649,98],["ovasmo",1649,76],["uvasmo",1649,99],["ačasmo",1649,102],["ismo",-1,116],["acismo",1664,124],["lucismo",1664,121],["nismo",1664,103],["rosismo",1664,127],["jetismo",1664,118],["ačismo",1664,101],["lučismo",1664,117],["rošismo",1664,90],["astadosmo",-1,110],["istadosmo",-1,111],["ostadosmo",-1,112],["nusmo",-1,13],["no",-1,13],["ano",1677,104],["acano",1678,128],["urano",1678,105],["tano",1678,113],["avano",1678,97],["evano",1678,96],["ivano",1678,98],["uvano",1678,99],["ačano",1678,102],["aceno",1677,124],["luceno",1677,121],["ačeno",1677,101],["lučeno",1677,117],["ino",1677,11],["cino",1691,137],["čino",1691,89],["ato",-1,120],["ikato",1694,68],["lato",1694,69],["eto",-1,70],["evito",-1,92],["ovito",-1,93],["asto",-1,94],["esto",-1,71],["isto",-1,72],["ksto",-1,73],["osto",-1,74],["nuto",-1,13],["nuo",-1,13],["avo",-1,77],["evo",-1,78],["ivo",-1,79],["ovo",-1,80],["stvo",-1,3],["štvo",-1,4],["as",-1,161],["acas",1713,128],["iras",1713,155],["uras",1713,156],["tas",1713,160],["avas",1713,144],["evas",1713,145],["ivas",1713,146],["uvas",1713,147],["es",-1,163],["astades",1722,141],["istades",1722,142],["ostades",1722,143],["astajes",1722,138],["istajes",1722,139],["ostajes",1722,140],["ijes",1722,162],["injes",1722,150],["ujes",1722,157],["lucujes",1731,121],["irujes",1731,155],["nes",1722,164],["astanes",1734,141],["istanes",1734,142],["ostanes",1734,143],["etes",1722,153],["astes",1722,136],["is",-1,162],["acis",1740,124],["lucis",1740,121],["nis",1740,158],["rosis",1740,127],["jetis",1740,149],["at",-1,104],["acat",1746,128],["astajat",1746,106],["istajat",1746,107],["ostajat",1746,108],["injat",1746,114],["irat",1746,100],["urat",1746,105],["tat",1746,113],["astat",1754,110],["istat",1754,111],["ostat",1754,112],["avat",1746,97],["evat",1746,96],["ivat",1746,98],["irivat",1760,100],["ovat",1746,76],["uvat",1746,99],["ačat",1746,102],["it",-1,116],["acit",1765,124],["lucit",1765,121],["rosit",1765,127],["jetit",1765,118],["ačit",1765,101],["lučit",1765,117],["rošit",1765,90],["nut",-1,13],["astadu",-1,110],["istadu",-1,111],["ostadu",-1,112],["gu",-1,20],["logu",1777,19],["ugu",1777,18],["ahu",-1,104],["acahu",1780,128],["astajahu",1780,106],["istajahu",1780,107],["ostajahu",1780,108],["injahu",1780,114],["irahu",1780,100],["urahu",1780,105],["avahu",1780,97],["evahu",1780,96],["ivahu",1780,98],["ovahu",1780,76],["uvahu",1780,99],["ačahu",1780,102],["aju",-1,104],["caju",1794,26],["acaju",1795,128],["laju",1794,30],["raju",1794,31],["iraju",1798,100],["uraju",1798,105],["taju",1794,113],["astaju",1801,106],["istaju",1801,107],["ostaju",1801,108],["avaju",1794,97],["evaju",1794,96],["ivaju",1794,98],["uvaju",1794,99],["ćaju",1794,28],["čaju",1794,27],["ačaju",1810,102],["đaju",1794,29],["iju",-1,116],["biju",1813,32],["ciju",1813,33],["diju",1813,34],["fiju",1813,40],["giju",1813,39],["anjiju",1813,84],["enjiju",1813,85],["snjiju",1813,122],["šnjiju",1813,86],["kiju",1813,95],["liju",1813,24],["eliju",1824,83],["miju",1813,37],["niju",1813,13],["ganiju",1827,9],["maniju",1827,6],["paniju",1827,7],["raniju",1827,8],["taniju",1827,5],["piju",1813,41],["riju",1813,42],["rariju",1834,21],["siju",1813,23],["osiju",1836,123],["tiju",1813,44],["atiju",1838,120],["otiju",1838,22],["aviju",1813,77],["eviju",1813,78],["iviju",1813,79],["oviju",1813,80],["ziju",1813,45],["ošiju",1813,91],["žiju",1813,38],["anju",-1,84],["enju",-1,85],["snju",-1,122],["šnju",-1,86],["uju",-1,25],["lucuju",1852,121],["iruju",1852,100],["lučuju",1852,117],["ku",-1,95],["sku",1856,1],["šku",1856,2],["alu",-1,104],["ijalu",1859,47],["nalu",1859,46],["elu",-1,83],["ilu",-1,116],["ozilu",1863,48],["olu",-1,50],["ramu",-1,52],["acemu",-1,124],["ecemu",-1,125],["ucemu",-1,126],["anjijemu",-1,84],["enjijemu",-1,85],["snjijemu",-1,122],["šnjijemu",-1,86],["kijemu",-1,95],["skijemu",1874,1],["škijemu",1874,2],["elijemu",-1,83],["nijemu",-1,13],["osijemu",-1,123],["atijemu",-1,120],["evitijemu",-1,92],["ovitijemu",-1,93],["astijemu",-1,94],["avijemu",-1,77],["evijemu",-1,78],["ivijemu",-1,79],["ovijemu",-1,80],["ošijemu",-1,91],["anjemu",-1,84],["enjemu",-1,85],["snjemu",-1,122],["šnjemu",-1,86],["kemu",-1,95],["skemu",1893,1],["škemu",1893,2],["lemu",-1,51],["elemu",1896,83],["nemu",-1,13],["anemu",1898,10],["enemu",1898,87],["snemu",1898,159],["šnemu",1898,88],["osemu",-1,123],["atemu",-1,120],["evitemu",-1,92],["ovitemu",-1,93],["astemu",-1,94],["avemu",-1,77],["evemu",-1,78],["ivemu",-1,79],["ovemu",-1,80],["aćemu",-1,14],["ećemu",-1,15],["ućemu",-1,16],["ošemu",-1,91],["acomu",-1,124],["ecomu",-1,125],["ucomu",-1,126],["anjomu",-1,84],["enjomu",-1,85],["snjomu",-1,122],["šnjomu",-1,86],["komu",-1,95],["skomu",1923,1],["škomu",1923,2],["elomu",-1,83],["nomu",-1,13],["cinomu",1927,137],["činomu",1927,89],["osomu",-1,123],["atomu",-1,120],["evitomu",-1,92],["ovitomu",-1,93],["astomu",-1,94],["avomu",-1,77],["evomu",-1,78],["ivomu",-1,79],["ovomu",-1,80],["aćomu",-1,14],["ećomu",-1,15],["ućomu",-1,16],["ošomu",-1,91],["nu",-1,13],["anu",1943,10],["astanu",1944,110],["istanu",1944,111],["ostanu",1944,112],["inu",1943,11],["cinu",1948,137],["aninu",1948,10],["činu",1948,89],["onu",1943,12],["aru",-1,53],["dru",-1,54],["eru",-1,55],["oru",-1,56],["basu",-1,135],["gasu",-1,131],["jasu",-1,129],["kasu",-1,133],["nasu",-1,132],["tasu",-1,130],["vasu",-1,134],["esu",-1,57],["isu",-1,58],["osu",-1,123],["atu",-1,120],["ikatu",1967,68],["latu",1967,69],["etu",-1,70],["evitu",-1,92],["ovitu",-1,93],["astu",-1,94],["estu",-1,71],["istu",-1,72],["kstu",-1,73],["ostu",-1,74],["ištu",-1,75],["avu",-1,77],["evu",-1,78],["ivu",-1,79],["ovu",-1,80],["lovu",1982,82],["movu",1982,81],["stvu",-1,3],["štvu",-1,4],["bašu",-1,63],["gašu",-1,64],["jašu",-1,61],["kašu",-1,62],["našu",-1,60],["tašu",-1,59],["vašu",-1,65],["ešu",-1,66],["išu",-1,67],["ošu",-1,91],["avav",-1,97],["evav",-1,96],["ivav",-1,98],["uvav",-1,99],["kov",-1,95],["aš",-1,104],["iraš",2002,100],["uraš",2002,105],["taš",2002,113],["avaš",2002,97],["evaš",2002,96],["ivaš",2002,98],["uvaš",2002,99],["ačaš",2002,102],["eš",-1,119],["astadeš",2011,110],["istadeš",2011,111],["ostadeš",2011,112],["astaješ",2011,106],["istaješ",2011,107],["ostaješ",2011,108],["iješ",2011,116],["inješ",2011,114],["uješ",2011,25],["iruješ",2020,100],["lučuješ",2020,117],["neš",2011,13],["astaneš",2023,110],["istaneš",2023,111],["ostaneš",2023,112],["eteš",2011,70],["asteš",2011,115],["iš",-1,116],["niš",2029,103],["jetiš",2029,118],["ačiš",2029,101],["lučiš",2029,117],["rošiš",2029,90]],t=[["a",-1,1],["oga",0,1],["ama",0,1],["ima",0,1],["ena",0,1],["e",-1,1],["og",-1,1],["anog",6,1],["enog",6,1],["anih",-1,1],["enih",-1,1],["i",-1,1],["ani",11,1],["eni",11,1],["anoj",-1,1],["enoj",-1,1],["anim",-1,1],["enim",-1,1],["om",-1,1],["enom",18,1],["o",-1,1],["ano",20,1],["eno",20,1],["ost",-1,1],["u",-1,1],["enu",24,1]],u=[17,65,16],n=[65,4,0,0,0,0,0,0,0,0,0,4,0,0,128],j=[119,95,23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,136,0,0,0,0,0,0,0,0,0,128,0,0,0,16],c=[1],f=0,l=!1;function v(){return f<=m.cursor}this.stem=function(){(()=>{for(var a,e=m.cursor;;){var i=m.cursor;a:{for(;;){var r=m.cursor;if(m.bra=m.cursor,0!=(a=m.find_among(s))){switch(m.ket=m.cursor,a){case 1:if(m.slice_from("a"))break;return;case 2:if(m.slice_from("b"))break;return;case 3:if(m.slice_from("v"))break;return;case 4:if(m.slice_from("g"))break;return;case 5:if(m.slice_from("d"))break;return;case 6:if(m.slice_from("đ"))break;return;case 7:if(m.slice_from("e"))break;return;case 8:if(m.slice_from("ž"))break;return;case 9:if(m.slice_from("z"))break;return;case 10:if(m.slice_from("i"))break;return;case 11:if(m.slice_from("j"))break;return;case 12:if(m.slice_from("k"))break;return;case 13:if(m.slice_from("l"))break;return;case 14:if(m.slice_from("lj"))break;return;case 15:if(m.slice_from("m"))break;return;case 16:if(m.slice_from("n"))break;return;case 17:if(m.slice_from("nj"))break;return;case 18:if(m.slice_from("o"))break;return;case 19:if(m.slice_from("p"))break;return;case 20:if(m.slice_from("r"))break;return;case 21:if(m.slice_from("s"))break;return;case 22:if(m.slice_from("t"))break;return;case 23:if(m.slice_from("ć"))break;return;case 24:if(m.slice_from("u"))break;return;case 25:if(m.slice_from("f"))break;return;case 26:if(m.slice_from("h"))break;return;case 27:if(m.slice_from("c"))break;return;case 28:if(m.slice_from("č"))break;return;case 29:if(m.slice_from("dž"))break;return;case 30:if(m.slice_from("š"))break;return}m.cursor=r;break}if(m.cursor=r,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=i;break}m.cursor=e})(),(()=>{for(var a=m.cursor;;){var e=m.cursor;a:{for(;;){var i=m.cursor;if(m.in_grouping(j,98,382)&&(m.bra=m.cursor,m.eq_s("ije"))&&(m.ket=m.cursor,m.in_grouping(j,98,382))){if(!m.slice_from("e"))return;m.cursor=i;break}if(m.cursor=i,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=e;break}for(m.cursor=a,a=m.cursor;;){var r=m.cursor;a:{for(;;){var s=m.cursor;if(m.in_grouping(j,98,382)&&(m.bra=m.cursor,m.eq_s("je"))&&(m.ket=m.cursor,m.in_grouping(j,98,382))){if(!m.slice_from("e"))return;m.cursor=s;break}if(m.cursor=s,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=r;break}for(m.cursor=a,a=m.cursor;;){var o=m.cursor;a:{for(;;){var t=m.cursor;if(m.bra=m.cursor,m.eq_s("dj")){if(m.ket=m.cursor,!m.slice_from("đ"))return;m.cursor=t;break}if(m.cursor=t,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=o;break}m.cursor=a})(),l=!0;var a=m.cursor,a=(m.go_out_grouping(n,263,382)&&(m.cursor++,l=!1),m.cursor=a,f=m.limit,m.cursor),a=(m.go_out_grouping(u,97,117)&&(m.cursor++,2<=(f=m.cursor)||m.go_in_grouping(u,97,117)&&(m.cursor++,f=m.cursor)),m.cursor=a,m.cursor);a:{for(;;){if(m.eq_s("r"))break;if(m.cursor>=m.limit)break a;m.cursor++}var e=m.cursor;if(m.cursor<2){if(m.cursor=e,!m.go_in_grouping(c,114,114))break a;m.cursor++}f-m.cursor<=1||(f=m.cursor)}m.cursor=a,m.limit_backward=m.cursor,m.cursor=m.limit;var a=m.limit-m.cursor,a=((()=>{var a;if(m.ket=m.cursor,0!=(a=m.find_among_b(r)))switch(m.bra=m.cursor,a){case 1:if(m.slice_from("loga"))break;return;case 2:if(m.slice_from("peh"))break;return;case 3:if(m.slice_from("vojka"))break;return;case 4:if(m.slice_from("bojka"))break;return;case 5:if(m.slice_from("jak"))break;return;case 6:if(m.slice_from("čajni"))break;return;case 7:if(!l)return;if(m.slice_from("cajni"))break;return;case 8:if(m.slice_from("erni"))break;return;case 9:if(m.slice_from("larni"))break;return;case 10:if(m.slice_from("esni"))break;return;case 11:if(m.slice_from("anjca"))break;return;case 12:if(m.slice_from("ajca"))break;return;case 13:if(m.slice_from("ljca"))break;return;case 14:if(m.slice_from("ejca"))break;return;case 15:if(m.slice_from("ojca"))break;return;case 16:if(m.slice_from("ajka"))break;return;case 17:if(m.slice_from("ojka"))break;return;case 18:if(m.slice_from("šca"))break;return;case 19:if(m.slice_from("ing"))break;return;case 20:if(m.slice_from("tvenik"))break;return;case 21:if(m.slice_from("tetika"))break;return;case 22:if(m.slice_from("nstva"))break;return;case 23:if(m.slice_from("nik"))break;return;case 24:if(m.slice_from("tik"))break;return;case 25:if(m.slice_from("zik"))break;return;case 26:if(m.slice_from("snik"))break;return;case 27:if(m.slice_from("kusi"))break;return;case 28:if(m.slice_from("kusni"))break;return;case 29:if(m.slice_from("kustva"))break;return;case 30:if(m.slice_from("dušni"))break;return;case 31:if(!l)return;if(m.slice_from("dusni"))break;return;case 32:if(m.slice_from("antni"))break;return;case 33:if(m.slice_from("bilni"))break;return;case 34:if(m.slice_from("tilni"))break;return;case 35:if(m.slice_from("avilni"))break;return;case 36:if(m.slice_from("silni"))break;return;case 37:if(m.slice_from("gilni"))break;return;case 38:if(m.slice_from("rilni"))break;return;case 39:if(m.slice_from("nilni"))break;return;case 40:if(m.slice_from("alni"))break;return;case 41:if(m.slice_from("ozni"))break;return;case 42:if(m.slice_from("ravi"))break;return;case 43:if(m.slice_from("stavni"))break;return;case 44:if(m.slice_from("pravni"))break;return;case 45:if(m.slice_from("tivni"))break;return;case 46:if(m.slice_from("sivni"))break;return;case 47:if(m.slice_from("atni"))break;return;case 48:if(m.slice_from("enta"))break;return;case 49:if(m.slice_from("tetni"))break;return;case 50:if(m.slice_from("pletni"))break;return;case 51:if(m.slice_from("šavi"))break;return;case 52:if(!l)return;if(m.slice_from("savi"))break;return;case 53:if(m.slice_from("anta"))break;return;case 54:if(m.slice_from("ačka"))break;return;case 55:if(!l)return;if(m.slice_from("acka"))break;return;case 56:if(m.slice_from("uška"))break;return;case 57:if(!l)return;if(m.slice_from("uska"))break;return;case 58:if(m.slice_from("atka"))break;return;case 59:if(m.slice_from("etka"))break;return;case 60:if(m.slice_from("itka"))break;return;case 61:if(m.slice_from("otka"))break;return;case 62:if(m.slice_from("utka"))break;return;case 63:if(m.slice_from("eskna"))break;return;case 64:if(m.slice_from("tični"))break;return;case 65:if(!l)return;if(m.slice_from("ticni"))break;return;case 66:if(m.slice_from("ojska"))break;return;case 67:if(m.slice_from("esma"))break;return;case 68:if(m.slice_from("metra"))break;return;case 69:if(m.slice_from("centra"))break;return;case 70:if(m.slice_from("istra"))break;return;case 71:if(m.slice_from("osti"))break;return;case 72:if(!l)return;if(m.slice_from("osti"))break;return;case 73:if(m.slice_from("dba"))break;return;case 74:if(m.slice_from("čka"))break;return;case 75:if(m.slice_from("mca"))break;return;case 76:if(m.slice_from("nca"))break;return;case 77:if(m.slice_from("voljni"))break;return;case 78:if(m.slice_from("anki"))break;return;case 79:if(m.slice_from("vca"))break;return;case 80:if(m.slice_from("sca"))break;return;case 81:if(m.slice_from("rca"))break;return;case 82:if(m.slice_from("alca"))break;return;case 83:if(m.slice_from("elca"))break;return;case 84:if(m.slice_from("olca"))break;return;case 85:if(m.slice_from("njca"))break;return;case 86:if(m.slice_from("ekta"))break;return;case 87:if(m.slice_from("izma"))break;return;case 88:if(m.slice_from("jebi"))break;return;case 89:if(m.slice_from("baci"))break;return;case 90:if(m.slice_from("ašni"))break;return;case 91:if(!l)return;if(m.slice_from("asni"))break}})(),m.cursor=m.limit-a,m.limit-m.cursor),i=m.limit-m.cursor;return(()=>{var a;if(m.ket=m.cursor,0!=(a=m.find_among_b(o))&&(m.bra=m.cursor,v())){switch(a){case 1:if(m.slice_from("sk"))break;return;case 2:if(m.slice_from("šk"))break;return;case 3:if(m.slice_from("stv"))break;return;case 4:if(m.slice_from("štv"))break;return;case 5:if(m.slice_from("tanij"))break;return;case 6:if(m.slice_from("manij"))break;return;case 7:if(m.slice_from("panij"))break;return;case 8:if(m.slice_from("ranij"))break;return;case 9:if(m.slice_from("ganij"))break;return;case 10:if(m.slice_from("an"))break;return;case 11:if(m.slice_from("in"))break;return;case 12:if(m.slice_from("on"))break;return;case 13:if(m.slice_from("n"))break;return;case 14:if(m.slice_from("ać"))break;return;case 15:if(m.slice_from("eć"))break;return;case 16:if(m.slice_from("uć"))break;return;case 17:if(m.slice_from("ugov"))break;return;case 18:if(m.slice_from("ug"))break;return;case 19:if(m.slice_from("log"))break;return;case 20:if(m.slice_from("g"))break;return;case 21:if(m.slice_from("rari"))break;return;case 22:if(m.slice_from("oti"))break;return;case 23:if(m.slice_from("si"))break;return;case 24:if(m.slice_from("li"))break;return;case 25:if(m.slice_from("uj"))break;return;case 26:if(m.slice_from("caj"))break;return;case 27:if(m.slice_from("čaj"))break;return;case 28:if(m.slice_from("ćaj"))break;return;case 29:if(m.slice_from("đaj"))break;return;case 30:if(m.slice_from("laj"))break;return;case 31:if(m.slice_from("raj"))break;return;case 32:if(m.slice_from("bij"))break;return;case 33:if(m.slice_from("cij"))break;return;case 34:if(m.slice_from("dij"))break;return;case 35:if(m.slice_from("lij"))break;return;case 36:if(m.slice_from("nij"))break;return;case 37:if(m.slice_from("mij"))break;return;case 38:if(m.slice_from("žij"))break;return;case 39:if(m.slice_from("gij"))break;return;case 40:if(m.slice_from("fij"))break;return;case 41:if(m.slice_from("pij"))break;return;case 42:if(m.slice_from("rij"))break;return;case 43:if(m.slice_from("sij"))break;return;case 44:if(m.slice_from("tij"))break;return;case 45:if(m.slice_from("zij"))break;return;case 46:if(m.slice_from("nal"))break;return;case 47:if(m.slice_from("ijal"))break;return;case 48:if(m.slice_from("ozil"))break;return;case 49:if(m.slice_from("olov"))break;return;case 50:if(m.slice_from("ol"))break;return;case 51:if(m.slice_from("lem"))break;return;case 52:if(m.slice_from("ram"))break;return;case 53:if(m.slice_from("ar"))break;return;case 54:if(m.slice_from("dr"))break;return;case 55:if(m.slice_from("er"))break;return;case 56:if(m.slice_from("or"))break;return;case 57:if(m.slice_from("es"))break;return;case 58:if(m.slice_from("is"))break;return;case 59:if(m.slice_from("taš"))break;return;case 60:if(m.slice_from("naš"))break;return;case 61:if(m.slice_from("jaš"))break;return;case 62:if(m.slice_from("kaš"))break;return;case 63:if(m.slice_from("baš"))break;return;case 64:if(m.slice_from("gaš"))break;return;case 65:if(m.slice_from("vaš"))break;return;case 66:if(m.slice_from("eš"))break;return;case 67:if(m.slice_from("iš"))break;return;case 68:if(m.slice_from("ikat"))break;return;case 69:if(m.slice_from("lat"))break;return;case 70:if(m.slice_from("et"))break;return;case 71:if(m.slice_from("est"))break;return;case 72:if(m.slice_from("ist"))break;return;case 73:if(m.slice_from("kst"))break;return;case 74:if(m.slice_from("ost"))break;return;case 75:if(m.slice_from("išt"))break;return;case 76:if(m.slice_from("ova"))break;return;case 77:if(m.slice_from("av"))break;return;case 78:if(m.slice_from("ev"))break;return;case 79:if(m.slice_from("iv"))break;return;case 80:if(m.slice_from("ov"))break;return;case 81:if(m.slice_from("mov"))break;return;case 82:if(m.slice_from("lov"))break;return;case 83:if(m.slice_from("el"))break;return;case 84:if(m.slice_from("anj"))break;return;case 85:if(m.slice_from("enj"))break;return;case 86:if(m.slice_from("šnj"))break;return;case 87:if(m.slice_from("en"))break;return;case 88:if(m.slice_from("šn"))break;return;case 89:if(m.slice_from("čin"))break;return;case 90:if(m.slice_from("roši"))break;return;case 91:if(m.slice_from("oš"))break;return;case 92:if(m.slice_from("evit"))break;return;case 93:if(m.slice_from("ovit"))break;return;case 94:if(m.slice_from("ast"))break;return;case 95:if(m.slice_from("k"))break;return;case 96:if(m.slice_from("eva"))break;return;case 97:if(m.slice_from("ava"))break;return;case 98:if(m.slice_from("iva"))break;return;case 99:if(m.slice_from("uva"))break;return;case 100:if(m.slice_from("ir"))break;return;case 101:if(m.slice_from("ač"))break;return;case 102:if(m.slice_from("ača"))break;return;case 103:if(m.slice_from("ni"))break;return;case 104:if(m.slice_from("a"))break;return;case 105:if(m.slice_from("ur"))break;return;case 106:if(m.slice_from("astaj"))break;return;case 107:if(m.slice_from("istaj"))break;return;case 108:if(m.slice_from("ostaj"))break;return;case 109:if(m.slice_from("aj"))break;return;case 110:if(m.slice_from("asta"))break;return;case 111:if(m.slice_from("ista"))break;return;case 112:if(m.slice_from("osta"))break;return;case 113:if(m.slice_from("ta"))break;return;case 114:if(m.slice_from("inj"))break;return;case 115:if(m.slice_from("as"))break;return;case 116:if(m.slice_from("i"))break;return;case 117:if(m.slice_from("luč"))break;return;case 118:if(m.slice_from("jeti"))break;return;case 119:if(m.slice_from("e"))break;return;case 120:if(m.slice_from("at"))break;return;case 121:if(!l)return;if(m.slice_from("luc"))break;return;case 122:if(!l)return;if(m.slice_from("snj"))break;return;case 123:if(!l)return;if(m.slice_from("os"))break;return;case 124:if(!l)return;if(m.slice_from("ac"))break;return;case 125:if(!l)return;if(m.slice_from("ec"))break;return;case 126:if(!l)return;if(m.slice_from("uc"))break;return;case 127:if(!l)return;if(m.slice_from("rosi"))break;return;case 128:if(!l)return;if(m.slice_from("aca"))break;return;case 129:if(!l)return;if(m.slice_from("jas"))break;return;case 130:if(!l)return;if(m.slice_from("tas"))break;return;case 131:if(!l)return;if(m.slice_from("gas"))break;return;case 132:if(!l)return;if(m.slice_from("nas"))break;return;case 133:if(!l)return;if(m.slice_from("kas"))break;return;case 134:if(!l)return;if(m.slice_from("vas"))break;return;case 135:if(!l)return;if(m.slice_from("bas"))break;return;case 136:if(!l)return;if(m.slice_from("as"))break;return;case 137:if(!l)return;if(m.slice_from("cin"))break;return;case 138:if(!l)return;if(m.slice_from("astaj"))break;return;case 139:if(!l)return;if(m.slice_from("istaj"))break;return;case 140:if(!l)return;if(m.slice_from("ostaj"))break;return;case 141:if(!l)return;if(m.slice_from("asta"))break;return;case 142:if(!l)return;if(m.slice_from("ista"))break;return;case 143:if(!l)return;if(m.slice_from("osta"))break;return;case 144:if(!l)return;if(m.slice_from("ava"))break;return;case 145:if(!l)return;if(m.slice_from("eva"))break;return;case 146:if(!l)return;if(m.slice_from("iva"))break;return;case 147:if(!l)return;if(m.slice_from("uva"))break;return;case 148:if(!l)return;if(m.slice_from("ova"))break;return;case 149:if(!l)return;if(m.slice_from("jeti"))break;return;case 150:if(!l)return;if(m.slice_from("inj"))break;return;case 151:if(!l)return;if(m.slice_from("ist"))break;return;case 152:if(!l)return;if(m.slice_from("es"))break;return;case 153:if(!l)return;if(m.slice_from("et"))break;return;case 154:if(!l)return;if(m.slice_from("is"))break;return;case 155:if(!l)return;if(m.slice_from("ir"))break;return;case 156:if(!l)return;if(m.slice_from("ur"))break;return;case 157:if(!l)return;if(m.slice_from("uj"))break;return;case 158:if(!l)return;if(m.slice_from("ni"))break;return;case 159:if(!l)return;if(m.slice_from("sn"))break;return;case 160:if(!l)return;if(m.slice_from("ta"))break;return;case 161:if(!l)return;if(m.slice_from("a"))break;return;case 162:if(!l)return;if(m.slice_from("i"))break;return;case 163:if(!l)return;if(m.slice_from("e"))break;return;case 164:if(!l)return;if(m.slice_from("n"))break;return}return 1}})()||(m.cursor=m.limit-i,m.ket=m.cursor,0!=m.find_among_b(t)&&(m.bra=m.cursor,v())&&m.slice_from("")),m.cursor=m.limit-a,m.cursor=m.limit_backward,!0},this.stemWord=function(a){return m.setCurrent(a),this.stem(),m.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/spanish-stemmer.js b/sphinx/search/minified-js/spanish-stemmer.js index ef634a8b75c..c47ed2e414a 100644 --- a/sphinx/search/minified-js/spanish-stemmer.js +++ b/sphinx/search/minified-js/spanish-stemmer.js @@ -1 +1 @@ -SpanishStemmer=function(){var r=new BaseStemmer;var e=[["",-1,6],["á",0,1],["é",0,2],["í",0,3],["ó",0,4],["ú",0,5]];var i=[["la",-1,-1],["sela",0,-1],["le",-1,-1],["me",-1,-1],["se",-1,-1],["lo",-1,-1],["selo",5,-1],["las",-1,-1],["selas",7,-1],["les",-1,-1],["los",-1,-1],["selos",10,-1],["nos",-1,-1]];var a=[["ando",-1,6],["iendo",-1,6],["yendo",-1,7],["ándo",-1,2],["iéndo",-1,1],["ar",-1,6],["er",-1,6],["ir",-1,6],["ár",-1,3],["ér",-1,4],["ír",-1,5]];var s=[["ic",-1,-1],["ad",-1,-1],["os",-1,-1],["iv",-1,1]];var u=[["able",-1,1],["ible",-1,1],["ante",-1,1]];var o=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var t=[["ica",-1,1],["ancia",-1,2],["encia",-1,5],["adora",-1,2],["osa",-1,1],["ista",-1,1],["iva",-1,9],["anza",-1,1],["logía",-1,3],["idad",-1,8],["able",-1,1],["ible",-1,1],["ante",-1,2],["mente",-1,7],["amente",13,6],["ación",-1,2],["ución",-1,4],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amiento",-1,1],["imiento",-1,1],["ivo",-1,9],["ador",-1,2],["icas",-1,1],["ancias",-1,2],["encias",-1,5],["adoras",-1,2],["osas",-1,1],["istas",-1,1],["ivas",-1,9],["anzas",-1,1],["logías",-1,3],["idades",-1,8],["ables",-1,1],["ibles",-1,1],["aciones",-1,2],["uciones",-1,4],["adores",-1,2],["antes",-1,2],["icos",-1,1],["ismos",-1,1],["osos",-1,1],["amientos",-1,1],["imientos",-1,1],["ivos",-1,9]];var c=[["ya",-1,1],["ye",-1,1],["yan",-1,1],["yen",-1,1],["yeron",-1,1],["yendo",-1,1],["yo",-1,1],["yas",-1,1],["yes",-1,1],["yais",-1,1],["yamos",-1,1],["yó",-1,1]];var l=[["aba",-1,2],["ada",-1,2],["ida",-1,2],["ara",-1,2],["iera",-1,2],["ía",-1,2],["aría",5,2],["ería",5,2],["iría",5,2],["ad",-1,2],["ed",-1,2],["id",-1,2],["ase",-1,2],["iese",-1,2],["aste",-1,2],["iste",-1,2],["an",-1,2],["aban",16,2],["aran",16,2],["ieran",16,2],["ían",16,2],["arían",20,2],["erían",20,2],["irían",20,2],["en",-1,1],["asen",24,2],["iesen",24,2],["aron",-1,2],["ieron",-1,2],["arán",-1,2],["erán",-1,2],["irán",-1,2],["ado",-1,2],["ido",-1,2],["ando",-1,2],["iendo",-1,2],["ar",-1,2],["er",-1,2],["ir",-1,2],["as",-1,2],["abas",39,2],["adas",39,2],["idas",39,2],["aras",39,2],["ieras",39,2],["ías",39,2],["arías",45,2],["erías",45,2],["irías",45,2],["es",-1,1],["ases",49,2],["ieses",49,2],["abais",-1,2],["arais",-1,2],["ierais",-1,2],["íais",-1,2],["aríais",55,2],["eríais",55,2],["iríais",55,2],["aseis",-1,2],["ieseis",-1,2],["asteis",-1,2],["isteis",-1,2],["áis",-1,2],["éis",-1,1],["aréis",64,2],["eréis",64,2],["iréis",64,2],["ados",-1,2],["idos",-1,2],["amos",-1,2],["ábamos",70,2],["áramos",70,2],["iéramos",70,2],["íamos",70,2],["aríamos",74,2],["eríamos",74,2],["iríamos",74,2],["emos",-1,1],["aremos",78,2],["eremos",78,2],["iremos",78,2],["ásemos",78,2],["iésemos",78,2],["imos",-1,2],["arás",-1,2],["erás",-1,2],["irás",-1,2],["ís",-1,2],["ará",-1,2],["erá",-1,2],["irá",-1,2],["aré",-1,2],["eré",-1,2],["iré",-1,2],["ió",-1,2]];var f=[["a",-1,1],["e",-1,2],["o",-1,1],["os",-1,1],["á",-1,1],["é",-1,2],["í",-1,1],["ó",-1,1]];var n=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10];var b=0;var m=0;var k=0;function _(){k=r.limit;m=r.limit;b=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(n,97,252)){break i}a:{var a=r.cursor;s:{if(!r.out_grouping(n,97,252)){break s}u:while(true){o:{if(!r.in_grouping(n,97,252)){break o}break u}if(r.cursor>=r.limit){break s}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(n,97,252)){break i}s:while(true){u:{if(!r.out_grouping(n,97,252)){break u}break s}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(n,97,252)){break r}i:{var s=r.cursor;a:{if(!r.out_grouping(n,97,252)){break a}s:while(true){u:{if(!r.in_grouping(n,97,252)){break u}break s}if(r.cursor>=r.limit){break a}r.cursor++}break i}r.cursor=s;if(!r.in_grouping(n,97,252)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}k=r.cursor}r.cursor=e;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=u;return true}function d(){var i;while(true){var a=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("a")){return false}break;case 2:if(!r.slice_from("e")){return false}break;case 3:if(!r.slice_from("i")){return false}break;case 4:if(!r.slice_from("o")){return false}break;case 5:if(!r.slice_from("u")){return false}break;case 6:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function v(){if(!(k<=r.cursor)){return false}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(b<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;if(r.find_among_b(i)==0){return false}r.bra=r.cursor;e=r.find_among_b(a);if(e==0){return false}if(!v()){return false}switch(e){case 1:r.bra=r.cursor;if(!r.slice_from("iendo")){return false}break;case 2:r.bra=r.cursor;if(!r.slice_from("ando")){return false}break;case 3:r.bra=r.cursor;if(!r.slice_from("ar")){return false}break;case 4:r.bra=r.cursor;if(!r.slice_from("er")){return false}break;case 5:r.bra=r.cursor;if(!r.slice_from("ir")){return false}break;case 6:if(!r.slice_del()){return false}break;case 7:if(!r.eq_s_b("u")){return false}if(!r.slice_del()){return false}break}return true}function p(){var e;r.ket=r.cursor;e=r.find_among_b(t);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!w()){return false}if(!r.slice_del()){return false}break;case 2:if(!w()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}}break;case 3:if(!w()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!w()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!w()){return false}if(!r.slice_from("ente")){return false}break;case 6:if(!g()){return false}if(!r.slice_del()){return false}var a=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}break}}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break;case 8:if(!w()){return false}if(!r.slice_del()){return false}var l=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(o)==0){r.cursor=r.limit-l;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-l;break r}if(!r.slice_del()){return false}}break;case 9:if(!w()){return false}if(!r.slice_del()){return false}var f=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-f;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-f;break r}if(!r.slice_del()){return false}}break}return true}function y(){if(r.cursor=a.limit)break r}a.cursor++}g=a.cursor}a.cursor=r,r=a.cursor,a.go_out_grouping(b,97,252)&&(a.cursor++,a.go_in_grouping(b,97,252))&&(a.cursor++,k=a.cursor,a.go_out_grouping(b,97,252))&&(a.cursor++,a.go_in_grouping(b,97,252))&&(a.cursor++,d=a.cursor),a.cursor=r,a.limit_backward=a.cursor,a.cursor=a.limit;var r=a.limit-a.cursor,r=((()=>{var r;if(a.ket=a.cursor,0!=a.find_among_b(c)&&(a.bra=a.cursor,0!=(r=a.find_among_b(u)))&&v())switch(r){case 1:if(a.bra=a.cursor,a.slice_from("iendo"))break;return;case 2:if(a.bra=a.cursor,a.slice_from("ando"))break;return;case 3:if(a.bra=a.cursor,a.slice_from("ar"))break;return;case 4:if(a.bra=a.cursor,a.slice_from("er"))break;return;case 5:if(a.bra=a.cursor,a.slice_from("ir"))break;return;case 6:if(a.slice_del())break;return;case 7:if(!a.eq_s_b("u"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),s=a.limit-a.cursor,s=(p()||(a.cursor=a.limit-s,(()=>{if(!(a.cursor{var r;if(!(a.cursor{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_)))switch(a.bra=a.cursor,r){case 1:if(!v())return;if(a.slice_del())break;return;case 2:if(!v())return;if(!a.slice_del())return;var i=a.limit-a.cursor;if(a.ket=a.cursor,a.eq_s_b("u")){a.bra=a.cursor;var e=a.limit-a.cursor;if(a.eq_s_b("g"))if(a.cursor=a.limit-e,v()){if(!a.slice_del());}else a.cursor=a.limit-i;else a.cursor=a.limit-i}else a.cursor=a.limit-i}})(),a.cursor=a.limit-s,a.cursor=a.limit_backward,a.cursor);return(()=>{for(var r;;){var i=a.cursor;r:{switch(a.bra=a.cursor,r=a.find_among(o),a.ket=a.cursor,r){case 1:if(a.slice_from("a"))break;return;case 2:if(a.slice_from("e"))break;return;case 3:if(a.slice_from("i"))break;return;case 4:if(a.slice_from("o"))break;return;case 5:if(a.slice_from("u"))break;return;case 6:if(a.cursor>=a.limit)break r;a.cursor++}continue}a.cursor=i;break}})(),a.cursor=r,!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/swedish-stemmer.js b/sphinx/search/minified-js/swedish-stemmer.js index b975f54284d..d66010809c6 100644 --- a/sphinx/search/minified-js/swedish-stemmer.js +++ b/sphinx/search/minified-js/swedish-stemmer.js @@ -1 +1 @@ -SwedishStemmer=function(){var r=new BaseStemmer;var e=[["a",-1,1],["arna",0,1],["erna",0,1],["heterna",2,1],["orna",0,1],["ad",-1,1],["e",-1,1],["ade",6,1],["ande",6,1],["arne",6,1],["are",6,1],["aste",6,1],["en",-1,1],["anden",12,1],["aren",12,1],["heten",12,1],["ern",-1,1],["ar",-1,1],["er",-1,1],["heter",18,1],["or",-1,1],["s",-1,2],["as",21,1],["arnas",22,1],["ernas",22,1],["ornas",22,1],["es",21,1],["ades",26,1],["andes",26,1],["ens",21,1],["arens",29,1],["hetens",29,1],["erns",21,1],["at",-1,1],["andet",-1,1],["het",-1,1],["ast",-1,1]];var a=[["dd",-1,-1],["gd",-1,-1],["nn",-1,-1],["dt",-1,-1],["gt",-1,-1],["kt",-1,-1],["tt",-1,-1]];var i=[["ig",-1,1],["lig",0,1],["els",-1,1],["fullt",-1,3],["löst",-1,2]];var t=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32];var s=[119,127,149];var u=0;var n=0;function c(){n=r.limit;var e=r.cursor;{var a=r.cursor+3;if(a>r.limit){return false}r.cursor=a}u=r.cursor;r.cursor=e;r:while(true){var i=r.cursor;e:{if(!r.in_grouping(t,97,246)){break e}r.cursor=i;break r}r.cursor=i;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(t,97,246)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(na.limit_backward))return a.cursor=a.limit-r,r=a.limit-a.cursor,0==a.find_among_b(i)?(a.cursor=a.limit-r,1):void 0}this.stem=function(){var r=a.cursor,i=(m=a.limit,t=a.cursor,(i=a.cursor+3)>a.limit||(a.cursor=i,l=a.cursor,a.cursor=t,a.go_out_grouping(u,97,246)&&(a.cursor++,a.go_in_grouping(u,97,246))&&(a.cursor++,m=a.cursor,l<=m||(m=l))),a.cursor=r,a.limit_backward=a.cursor,a.cursor=a.limit,a.limit-a.cursor),t=((()=>{var r;if(!(a.cursor{var r;if(!(a.cursor{var r;if(z=!1,A()){l.limit_backward=l.cursor,l.cursor=l.limit;var i=l.limit-l.cursor;r:{var c=l.limit-l.cursor;if(l.ket=l.cursor,0!=(r=l.find_among_b(p))){switch(l.bra=l.cursor,r){case 1:if(l.slice_del())break;return;case 2:var e=l.limit-l.cursor;if(0!=l.find_among_b(W))break r;if(l.cursor=l.limit-e,l.slice_del())break;return;case 3:var s=l.limit-l.cursor;if(0!=l.find_among_b(j))break r;if(l.cursor=l.limit-s,l.slice_del())break;return;case 4:var o=l.limit-l.cursor;if(l.eq_s_b("ச"))break r;if(l.cursor=l.limit-o,l.slice_from("்"))break;return;case 5:if(l.slice_from("்"))break;return;case 6:var u=l.limit-l.cursor;if(!l.eq_s_b("்"))break r;if(l.cursor=l.limit-u,l.slice_del())break;return}z=!0,l.cursor=l.limit-c}}if(l.cursor=l.limit-i,i=l.limit-l.cursor,l.ket=l.cursor,0!=l.find_among_b(x)){if(l.bra=l.cursor,!l.slice_del())return;z=!0}return l.cursor=l.limit-i,l.cursor=l.limit_backward,E(),!!z}})()){l.cursor=r;break}}}this.stem=function(){y=!1;var r,i,c=l.cursor;return F(),l.cursor=c,!!A()&&(c=l.cursor,l.bra=l.cursor,l.eq_s("எ")&&0!=l.find_among(e)&&l.eq_s("்")&&(l.ket=l.cursor,l.slice_del())&&(r=l.cursor,D(),l.cursor=r),l.cursor=c,r=l.cursor,l.bra=l.cursor,0!=l.find_among(s)&&0!=l.find_among(o)&&l.eq_s("்")&&(l.ket=l.cursor,l.slice_del())&&(c=l.cursor,D(),l.cursor=c),l.cursor=r,c=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,i=l.limit-l.cursor,l.ket=l.cursor,0!=l.find_among_b(v)&&(l.bra=l.cursor,!l.slice_from("்"))||(l.cursor=l.limit-i,l.cursor=l.limit_backward,E())),l.cursor=c,i=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,l.eq_s_b("ும்"))&&(l.bra=l.cursor,l.slice_from("்"))&&(l.cursor=l.limit_backward,c=l.cursor,F(),l.cursor=c),l.cursor=i,c=l.cursor,(()=>{var r;if(A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=(r=l.find_among_b(q)))){switch(l.bra=l.cursor,r){case 1:if(l.slice_from("்"))break;return;case 2:var i=l.limit-l.cursor;if(0!=l.find_among_b(w))return;if(l.cursor=l.limit-i,l.slice_from("்"))break;return;case 3:if(l.slice_del())break;return}l.cursor=l.limit_backward,E()}})(),l.cursor=c,c=l.cursor,(()=>{var r;if(y=!1,A()){l.limit_backward=l.cursor,l.cursor=l.limit;r:{var i=l.limit-l.cursor;i:{var c=l.limit-l.cursor;if(l.ket=l.cursor,0!=(r=l.find_among_b(S))){switch(l.bra=l.cursor,r){case 1:if(l.slice_del())break;return;case 2:if(l.slice_from("்"))break;return;case 3:var e=l.limit-l.cursor;if(l.eq_s_b("ம"))break i;if(l.cursor=l.limit-e,l.slice_from("்"))break;return;case 4:if(l.current.length<7)break i;if(l.slice_from("்"))break;return;case 5:var s=l.limit-l.cursor;if(0!=l.find_among_b(h))break i;if(l.cursor=l.limit-s,l.slice_from("்"))break;return;case 6:var o=l.limit-l.cursor;if(0!=l.find_among_b(C))break i;if(l.cursor=l.limit-o,l.slice_del())break;return;case 7:if(l.slice_from("ி"))break;return}l.cursor=l.limit-c;break r}}l.cursor=l.limit-i;i=l.limit-l.cursor;if(l.ket=l.cursor,!l.eq_s_b("ை"))return;var u=l.limit-l.cursor,a=l.limit-l.cursor;if(0==l.find_among_b(B))l.cursor=l.limit-a;else{l.cursor=l.limit-u;a=l.limit-l.cursor;if(0==l.find_among_b(T))return;if(!l.eq_s_b("்"))return;l.cursor=l.limit-a}if(l.bra=l.cursor,!l.slice_from("்"))return;l.cursor=l.limit-i}y=!0;var t=l.limit-l.cursor;l.ket=l.cursor,l.eq_s_b("ின்")&&(l.bra=l.cursor,!l.slice_from("்"))||(l.cursor=l.limit-t,l.cursor=l.limit_backward,E())}})(),l.cursor=c,c=l.cursor,(()=>{var r;if(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=(r=l.find_among_b(d))){switch(l.bra=l.cursor,r){case 1:r:{var i=l.limit-l.cursor;if(0!=l.find_among_b(u)){if(l.slice_from("ுங்"))break r;return}if(l.cursor=l.limit-i,!l.slice_from("்"))return}break;case 2:if(l.slice_from("ல்"))break;return;case 3:if(l.slice_from("ள்"))break;return;case 4:if(l.slice_del())break;return}l.cursor=l.limit_backward}})(),l.cursor=c,c=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=l.find_among_b(g))&&(l.bra=l.cursor,l.slice_del())&&(l.cursor=l.limit_backward),l.cursor=c,c=l.cursor,G(),l.cursor=c,!0)},this.stemWord=function(r){return l.setCurrent(r),this.stem(),l.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/minified-js/turkish-stemmer.js b/sphinx/search/minified-js/turkish-stemmer.js index 4c0a699bfbe..476e4abc4ad 100644 --- a/sphinx/search/minified-js/turkish-stemmer.js +++ b/sphinx/search/minified-js/turkish-stemmer.js @@ -1 +1 @@ -TurkishStemmer=function(){var r=new BaseStemmer;var i=[["m",-1,-1],["n",-1,-1],["miz",-1,-1],["niz",-1,-1],["muz",-1,-1],["nuz",-1,-1],["müz",-1,-1],["nüz",-1,-1],["mız",-1,-1],["nız",-1,-1]];var e=[["leri",-1,-1],["ları",-1,-1]];var u=[["ni",-1,-1],["nu",-1,-1],["nü",-1,-1],["nı",-1,-1]];var a=[["in",-1,-1],["un",-1,-1],["ün",-1,-1],["ın",-1,-1]];var s=[["a",-1,-1],["e",-1,-1]];var t=[["na",-1,-1],["ne",-1,-1]];var l=[["da",-1,-1],["ta",-1,-1],["de",-1,-1],["te",-1,-1]];var c=[["nda",-1,-1],["nde",-1,-1]];var o=[["dan",-1,-1],["tan",-1,-1],["den",-1,-1],["ten",-1,-1]];var f=[["ndan",-1,-1],["nden",-1,-1]];var n=[["la",-1,-1],["le",-1,-1]];var b=[["ca",-1,-1],["ce",-1,-1]];var m=[["im",-1,-1],["um",-1,-1],["üm",-1,-1],["ım",-1,-1]];var k=[["sin",-1,-1],["sun",-1,-1],["sün",-1,-1],["sın",-1,-1]];var _=[["iz",-1,-1],["uz",-1,-1],["üz",-1,-1],["ız",-1,-1]];var v=[["siniz",-1,-1],["sunuz",-1,-1],["sünüz",-1,-1],["sınız",-1,-1]];var d=[["lar",-1,-1],["ler",-1,-1]];var g=[["niz",-1,-1],["nuz",-1,-1],["nüz",-1,-1],["nız",-1,-1]];var w=[["dir",-1,-1],["tir",-1,-1],["dur",-1,-1],["tur",-1,-1],["dür",-1,-1],["tür",-1,-1],["dır",-1,-1],["tır",-1,-1]];var q=[["casına",-1,-1],["cesine",-1,-1]];var p=[["di",-1,-1],["ti",-1,-1],["dik",-1,-1],["tik",-1,-1],["duk",-1,-1],["tuk",-1,-1],["dük",-1,-1],["tük",-1,-1],["dık",-1,-1],["tık",-1,-1],["dim",-1,-1],["tim",-1,-1],["dum",-1,-1],["tum",-1,-1],["düm",-1,-1],["tüm",-1,-1],["dım",-1,-1],["tım",-1,-1],["din",-1,-1],["tin",-1,-1],["dun",-1,-1],["tun",-1,-1],["dün",-1,-1],["tün",-1,-1],["dın",-1,-1],["tın",-1,-1],["du",-1,-1],["tu",-1,-1],["dü",-1,-1],["tü",-1,-1],["dı",-1,-1],["tı",-1,-1]];var h=[["sa",-1,-1],["se",-1,-1],["sak",-1,-1],["sek",-1,-1],["sam",-1,-1],["sem",-1,-1],["san",-1,-1],["sen",-1,-1]];var z=[["miş",-1,-1],["muş",-1,-1],["müş",-1,-1],["mış",-1,-1]];var y=[["b",-1,1],["c",-1,2],["d",-1,3],["ğ",-1,4]];var C=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1];var S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1];var B=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1];var T=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130];var W=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1];var j=[17];var x=[65];var A=[65];var D=false;function E(){var i=r.limit-r.cursor;r:while(true){var e=r.limit-r.cursor;i:{if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-e;if(r.cursor<=r.limit_backward){return false}r.cursor--}r:{var u=r.limit-r.cursor;i:{if(!r.eq_s_b("a")){break i}e:while(true){var a=r.limit-r.cursor;u:{if(!r.in_grouping_b(B,97,305)){break u}r.cursor=r.limit-a;break e}r.cursor=r.limit-a;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("e")){break i}e:while(true){var s=r.limit-r.cursor;u:{if(!r.in_grouping_b(T,101,252)){break u}r.cursor=r.limit-s;break e}r.cursor=r.limit-s;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("ı")){break i}e:while(true){var t=r.limit-r.cursor;u:{if(!r.in_grouping_b(W,97,305)){break u}r.cursor=r.limit-t;break e}r.cursor=r.limit-t;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("i")){break i}e:while(true){var l=r.limit-r.cursor;u:{if(!r.in_grouping_b(j,101,105)){break u}r.cursor=r.limit-l;break e}r.cursor=r.limit-l;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("o")){break i}e:while(true){var c=r.limit-r.cursor;u:{if(!r.in_grouping_b(x,111,117)){break u}r.cursor=r.limit-c;break e}r.cursor=r.limit-c;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("ö")){break i}e:while(true){var o=r.limit-r.cursor;u:{if(!r.in_grouping_b(A,246,252)){break u}r.cursor=r.limit-o;break e}r.cursor=r.limit-o;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("u")){break i}e:while(true){var f=r.limit-r.cursor;u:{if(!r.in_grouping_b(x,111,117)){break u}r.cursor=r.limit-f;break e}r.cursor=r.limit-f;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;if(!r.eq_s_b("ü")){return false}i:while(true){var n=r.limit-r.cursor;e:{if(!r.in_grouping_b(A,246,252)){break e}r.cursor=r.limit-n;break i}r.cursor=r.limit-n;if(r.cursor<=r.limit_backward){return false}r.cursor--}}r.cursor=r.limit-i;return true}function F(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("n")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("n")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function G(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("s")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("s")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function H(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("y")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("y")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function I(){r:{var i=r.limit-r.cursor;i:{if(!r.in_grouping_b(S,105,305)){break i}var e=r.limit-r.cursor;if(!r.out_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.in_grouping_b(S,105,305)){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.out_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function J(){if(r.find_among_b(i)==0){return false}if(!I()){return false}return true}function K(){if(!E()){return false}if(!r.in_grouping_b(S,105,305)){return false}if(!G()){return false}return true}function L(){if(r.find_among_b(e)==0){return false}return true}function M(){if(!E()){return false}if(!r.in_grouping_b(S,105,305)){return false}if(!H()){return false}return true}function N(){if(!E()){return false}if(r.find_among_b(u)==0){return false}return true}function O(){if(!E()){return false}if(r.find_among_b(a)==0){return false}if(!F()){return false}return true}function P(){if(!E()){return false}if(r.find_among_b(s)==0){return false}if(!H()){return false}return true}function Q(){if(!E()){return false}if(r.find_among_b(t)==0){return false}return true}function R(){if(!E()){return false}if(r.find_among_b(l)==0){return false}return true}function U(){if(!E()){return false}if(r.find_among_b(c)==0){return false}return true}function V(){if(!E()){return false}if(r.find_among_b(o)==0){return false}return true}function X(){if(!E()){return false}if(r.find_among_b(f)==0){return false}return true}function Y(){if(!E()){return false}if(r.find_among_b(n)==0){return false}if(!H()){return false}return true}function Z(){if(!r.eq_s_b("ki")){return false}return true}function $(){if(!E()){return false}if(r.find_among_b(b)==0){return false}if(!F()){return false}return true}function rr(){if(!E()){return false}if(r.find_among_b(m)==0){return false}if(!H()){return false}return true}function ir(){if(!E()){return false}if(r.find_among_b(k)==0){return false}return true}function er(){if(!E()){return false}if(r.find_among_b(_)==0){return false}if(!H()){return false}return true}function ur(){if(r.find_among_b(v)==0){return false}return true}function ar(){if(!E()){return false}if(r.find_among_b(d)==0){return false}return true}function sr(){if(!E()){return false}if(r.find_among_b(g)==0){return false}return true}function tr(){if(!E()){return false}if(r.find_among_b(w)==0){return false}return true}function lr(){if(r.find_among_b(q)==0){return false}return true}function cr(){if(!E()){return false}if(r.find_among_b(p)==0){return false}if(!H()){return false}return true}function or(){if(r.find_among_b(h)==0){return false}if(!H()){return false}return true}function fr(){if(!E()){return false}if(r.find_among_b(z)==0){return false}if(!H()){return false}return true}function nr(){if(!r.eq_s_b("ken")){return false}if(!H()){return false}return true}function br(){r.ket=r.cursor;D=true;r:{var i=r.limit-r.cursor;i:{e:{var e=r.limit-r.cursor;u:{if(!fr()){break u}break e}r.cursor=r.limit-e;u:{if(!cr()){break u}break e}r.cursor=r.limit-e;u:{if(!or()){break u}break e}r.cursor=r.limit-e;if(!nr()){break i}}break r}r.cursor=r.limit-i;i:{if(!lr()){break i}e:{var u=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-u;u:{if(!ar()){break u}break e}r.cursor=r.limit-u;u:{if(!rr()){break u}break e}r.cursor=r.limit-u;u:{if(!ir()){break u}break e}r.cursor=r.limit-u;u:{if(!er()){break u}break e}r.cursor=r.limit-u}if(!fr()){break i}break r}r.cursor=r.limit-i;i:{if(!ar()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var a=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var s=r.limit-r.cursor;a:{if(!tr()){break a}break u}r.cursor=r.limit-s;a:{if(!cr()){break a}break u}r.cursor=r.limit-s;a:{if(!or()){break a}break u}r.cursor=r.limit-s;if(!fr()){r.cursor=r.limit-a;break e}}}D=false;break r}r.cursor=r.limit-i;i:{if(!sr()){break i}e:{var t=r.limit-r.cursor;u:{if(!cr()){break u}break e}r.cursor=r.limit-t;if(!or()){break i}}break r}r.cursor=r.limit-i;i:{e:{var l=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-l;u:{if(!er()){break u}break e}r.cursor=r.limit-l;u:{if(!ir()){break u}break e}r.cursor=r.limit-l;if(!rr()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var c=r.limit-r.cursor;e:{r.ket=r.cursor;if(!fr()){r.cursor=r.limit-c;break e}}break r}r.cursor=r.limit-i;if(!tr()){return false}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;i:{r.ket=r.cursor;e:{var f=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-f;u:{if(!ar()){break u}break e}r.cursor=r.limit-f;u:{if(!rr()){break u}break e}r.cursor=r.limit-f;u:{if(!ir()){break u}break e}r.cursor=r.limit-f;u:{if(!er()){break u}break e}r.cursor=r.limit-f}if(!fr()){r.cursor=r.limit-o;break i}}}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function mr(){r.ket=r.cursor;if(!Z()){return false}r:{var i=r.limit-r.cursor;i:{if(!R()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var e=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var u=r.limit-r.cursor;a:{if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var a=r.limit-r.cursor;s:{if(!mr()){r.cursor=r.limit-a;break s}}break u}r.cursor=r.limit-u;if(!J()){r.cursor=r.limit-e;break e}r.bra=r.cursor;if(!r.slice_del()){return false}var s=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-s;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-s;break a}}}}break r}r.cursor=r.limit-i;i:{if(!O()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var t=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var l=r.limit-r.cursor;a:{if(!L()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}break u}r.cursor=r.limit-l;a:{r.ket=r.cursor;s:{var c=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-c;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-o;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-o;break s}}break u}r.cursor=r.limit-l;if(!mr()){r.cursor=r.limit-t;break e}}}break r}r.cursor=r.limit-i;if(!U()){return false}i:{var f=r.limit-r.cursor;e:{if(!L()){break e}r.bra=r.cursor;if(!r.slice_del()){return false}break i}r.cursor=r.limit-f;e:{if(!K()){break e}r.bra=r.cursor;if(!r.slice_del()){return false}var n=r.limit-r.cursor;u:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-n;break u}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-n;break u}}break i}r.cursor=r.limit-f;if(!mr()){return false}}}return true}function kr(){r:{var i=r.limit-r.cursor;i:{r.ket=r.cursor;if(!ar()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var e=r.limit-r.cursor;e:{if(!mr()){r.cursor=r.limit-e;break e}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!$()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var u=r.limit-r.cursor;e:{u:{var a=r.limit-r.cursor;a:{r.ket=r.cursor;if(!L()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}break u}r.cursor=r.limit-a;a:{r.ket=r.cursor;s:{var s=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-s;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var t=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-t;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-t;break s}}break u}r.cursor=r.limit-a;r.ket=r.cursor;if(!ar()){r.cursor=r.limit-u;break e}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-u;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var l=r.limit-r.cursor;u:{if(!U()){break u}break e}r.cursor=r.limit-l;if(!Q()){break i}}e:{var c=r.limit-r.cursor;u:{if(!L()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}break e}r.cursor=r.limit-c;u:{if(!K()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-o;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-o;break a}}break e}r.cursor=r.limit-c;if(!mr()){break i}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var f=r.limit-r.cursor;u:{if(!X()){break u}break e}r.cursor=r.limit-f;if(!N()){break i}}e:{var n=r.limit-r.cursor;u:{if(!K()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}var b=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-b;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-b;break a}}break e}r.cursor=r.limit-n;if(!L()){break i}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!V()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var m=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var k=r.limit-r.cursor;a:{if(!J()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var _=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-_;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-_;break s}}break u}r.cursor=r.limit-k;a:{if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var v=r.limit-r.cursor;s:{if(!mr()){r.cursor=r.limit-v;break s}}break u}r.cursor=r.limit-k;if(!mr()){r.cursor=r.limit-m;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var d=r.limit-r.cursor;u:{if(!O()){break u}break e}r.cursor=r.limit-d;if(!Y()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var g=r.limit-r.cursor;e:{u:{var w=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){break a}break u}r.cursor=r.limit-w;a:{r.ket=r.cursor;s:{var q=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-q;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var p=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-p;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-p;break s}}break u}r.cursor=r.limit-w;if(!mr()){r.cursor=r.limit-g;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!L()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}break r}r.cursor=r.limit-i;i:{if(!mr()){break i}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var h=r.limit-r.cursor;u:{if(!R()){break u}break e}r.cursor=r.limit-h;u:{if(!M()){break u}break e}r.cursor=r.limit-h;if(!P()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var z=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var y=r.limit-r.cursor;a:{if(!J()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var C=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-C;break s}}break u}r.cursor=r.limit-y;if(!ar()){r.cursor=r.limit-z;break e}}r.bra=r.cursor;if(!r.slice_del()){return false}r.ket=r.cursor;if(!mr()){r.cursor=r.limit-z;break e}}break r}r.cursor=r.limit-i;r.ket=r.cursor;i:{var S=r.limit-r.cursor;e:{if(!J()){break e}break i}r.cursor=r.limit-S;if(!K()){return false}}r.bra=r.cursor;if(!r.slice_del()){return false}var B=r.limit-r.cursor;i:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-B;break i}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-B;break i}}}return true}function _r(){var i;r.ket=r.cursor;i=r.find_among_b(y);if(i==0){return false}r.bra=r.cursor;switch(i){case 1:if(!r.slice_from("p")){return false}break;case 2:if(!r.slice_from("ç")){return false}break;case 3:if(!r.slice_from("t")){return false}break;case 4:if(!r.slice_from("k")){return false}break}return true}function vr(){var i=r.limit-r.cursor;r:{var e=r.limit-r.cursor;i:{if(!r.eq_s_b("d")){break i}break r}r.cursor=r.limit-e;if(!r.eq_s_b("g")){return false}}r.cursor=r.limit-i;r:{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;e:while(true){var s=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-s;break e}r.cursor=r.limit-s;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var t=r.limit-r.cursor;u:{if(!r.eq_s_b("a")){break u}break e}r.cursor=r.limit-t;if(!r.eq_s_b("ı")){break i}}r.cursor=r.limit-a;{var l=r.cursor;r.insert(r.cursor,r.cursor,"ı");r.cursor=l}break r}r.cursor=r.limit-u;i:{var c=r.limit-r.cursor;e:while(true){var o=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-o;break e}r.cursor=r.limit-o;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var f=r.limit-r.cursor;u:{if(!r.eq_s_b("e")){break u}break e}r.cursor=r.limit-f;if(!r.eq_s_b("i")){break i}}r.cursor=r.limit-c;{var n=r.cursor;r.insert(r.cursor,r.cursor,"i");r.cursor=n}break r}r.cursor=r.limit-u;i:{var b=r.limit-r.cursor;e:while(true){var m=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-m;break e}r.cursor=r.limit-m;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var k=r.limit-r.cursor;u:{if(!r.eq_s_b("o")){break u}break e}r.cursor=r.limit-k;if(!r.eq_s_b("u")){break i}}r.cursor=r.limit-b;{var _=r.cursor;r.insert(r.cursor,r.cursor,"u");r.cursor=_}break r}r.cursor=r.limit-u;var v=r.limit-r.cursor;i:while(true){var d=r.limit-r.cursor;e:{if(!r.in_grouping_b(C,97,305)){break e}r.cursor=r.limit-d;break i}r.cursor=r.limit-d;if(r.cursor<=r.limit_backward){return false}r.cursor--}i:{var g=r.limit-r.cursor;e:{if(!r.eq_s_b("ö")){break e}break i}r.cursor=r.limit-g;if(!r.eq_s_b("ü")){return false}}r.cursor=r.limit-v;{var w=r.cursor;r.insert(r.cursor,r.cursor,"ü");r.cursor=w}}return true}function dr(){if(!r.eq_s_b("ad")){return false}var i=r.limit-r.cursor;r:{if(!r.eq_s_b("soy")){r.cursor=r.limit-i;break r}}if(r.cursor>r.limit_backward){return false}return true}function gr(){var i=r.cursor;{var e=2;while(true){var u=r.cursor;r:{i:while(true){e:{if(!r.in_grouping(C,97,305)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}e--;continue}r.cursor=u;break}if(e>0){return false}}r.cursor=i;return true}function wr(){r.limit_backward=r.cursor;r.cursor=r.limit;{var i=r.limit-r.cursor;r:{if(!dr()){break r}return false}r.cursor=r.limit-i}var e=r.limit-r.cursor;vr();r.cursor=r.limit-e;var u=r.limit-r.cursor;_r();r.cursor=r.limit-u;r.cursor=r.limit_backward;return true}this.stem=function(){if(!gr()){return false}r.limit_backward=r.cursor;r.cursor=r.limit;var i=r.limit-r.cursor;br();r.cursor=r.limit-i;if(!D){return false}var e=r.limit-r.cursor;kr();r.cursor=r.limit-e;r.cursor=r.limit_backward;if(!wr()){return false}return true};this["stemWord"]=function(i){r.setCurrent(i);this.stem();return r.getCurrent()}}; \ No newline at end of file +var TurkishStemmer=function(){var q=new BaseStemmer,u=[["m",-1,-1],["n",-1,-1],["miz",-1,-1],["niz",-1,-1],["muz",-1,-1],["nuz",-1,-1],["müz",-1,-1],["nüz",-1,-1],["mız",-1,-1],["nız",-1,-1]],r=[["leri",-1,-1],["ları",-1,-1]],p=[["ni",-1,-1],["nu",-1,-1],["nü",-1,-1],["nı",-1,-1]],i=[["in",-1,-1],["un",-1,-1],["ün",-1,-1],["ın",-1,-1]],z=[["a",-1,-1],["e",-1,-1]],w=[["na",-1,-1],["ne",-1,-1]],s=[["da",-1,-1],["ta",-1,-1],["de",-1,-1],["te",-1,-1]],o=[["nda",-1,-1],["nde",-1,-1]],h=[["dan",-1,-1],["tan",-1,-1],["den",-1,-1],["ten",-1,-1]],y=[["ndan",-1,-1],["nden",-1,-1]],C=[["la",-1,-1],["le",-1,-1]],I=[["ca",-1,-1],["ce",-1,-1]],g=[["im",-1,-1],["um",-1,-1],["üm",-1,-1],["ım",-1,-1]],v=[["sin",-1,-1],["sun",-1,-1],["sün",-1,-1],["sın",-1,-1]],J=[["iz",-1,-1],["uz",-1,-1],["üz",-1,-1],["ız",-1,-1]],K=[["siniz",-1,-1],["sunuz",-1,-1],["sünüz",-1,-1],["sınız",-1,-1]],L=[["lar",-1,-1],["ler",-1,-1]],M=[["niz",-1,-1],["nuz",-1,-1],["nüz",-1,-1],["nız",-1,-1]],N=[["dir",-1,-1],["tir",-1,-1],["dur",-1,-1],["tur",-1,-1],["dür",-1,-1],["tür",-1,-1],["dır",-1,-1],["tır",-1,-1]],O=[["casına",-1,-1],["cesine",-1,-1]],P=[["di",-1,-1],["ti",-1,-1],["dik",-1,-1],["tik",-1,-1],["duk",-1,-1],["tuk",-1,-1],["dük",-1,-1],["tük",-1,-1],["dık",-1,-1],["tık",-1,-1],["dim",-1,-1],["tim",-1,-1],["dum",-1,-1],["tum",-1,-1],["düm",-1,-1],["tüm",-1,-1],["dım",-1,-1],["tım",-1,-1],["din",-1,-1],["tin",-1,-1],["dun",-1,-1],["tun",-1,-1],["dün",-1,-1],["tün",-1,-1],["dın",-1,-1],["tın",-1,-1],["du",-1,-1],["tu",-1,-1],["dü",-1,-1],["tü",-1,-1],["dı",-1,-1],["tı",-1,-1]],Q=[["sa",-1,-1],["se",-1,-1],["sak",-1,-1],["sek",-1,-1],["sam",-1,-1],["sem",-1,-1],["san",-1,-1],["sen",-1,-1]],R=[["miş",-1,-1],["muş",-1,-1],["müş",-1,-1],["mış",-1,-1]],U=[["b",-1,1],["c",-1,2],["d",-1,3],["ğ",-1,4]],t=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],V=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],X=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],Y=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],Z=[17],c=[65],e=[65],l=!1;function B(){var r=q.limit-q.cursor;if(q.go_out_grouping_b(t,97,305)){var i=q.limit-q.cursor;if(!(q.eq_s_b("a")&&q.go_out_grouping_b(V,97,305)||(q.cursor=q.limit-i,q.eq_s_b("e")&&q.go_out_grouping_b(X,101,252))||(q.cursor=q.limit-i,q.eq_s_b("ı")&&q.go_out_grouping_b(Y,97,305))||(q.cursor=q.limit-i,q.eq_s_b("i")&&q.go_out_grouping_b(Z,101,105))||(q.cursor=q.limit-i,q.eq_s_b("o")&&q.go_out_grouping_b(c,111,117))||(q.cursor=q.limit-i,q.eq_s_b("ö")&&q.go_out_grouping_b(e,246,252))||(q.cursor=q.limit-i,q.eq_s_b("u")&&q.go_out_grouping_b(c,111,117)))){if(q.cursor=q.limit-i,!q.eq_s_b("ü"))return;if(!q.go_out_grouping_b(e,246,252))return}return q.cursor=q.limit-r,1}}function T(){r:{var r=q.limit-q.cursor;if(q.eq_s_b("n")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("n"))return void(q.cursor=q.limit-r);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return;if(q.cursor--,!q.in_grouping_b(t,97,305))return;q.cursor=q.limit-r}return 1}function W(){r:{var r=q.limit-q.cursor;if(q.eq_s_b("y")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("y"))return void(q.cursor=q.limit-r);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return;if(q.cursor--,!q.in_grouping_b(t,97,305))return;q.cursor=q.limit-r}return 1}function j(){if(0!=q.find_among_b(u)){r:{var r=q.limit-q.cursor;if(q.in_grouping_b(S,105,305)){var i=q.limit-q.cursor;if(q.out_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.in_grouping_b(S,105,305))return!!(q.cursor=q.limit-r,0);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return!!void 0;if(q.cursor--,!q.out_grouping_b(t,97,305))return!!void 0;q.cursor=q.limit-r}return!!1}}function x(){if(B()&&q.in_grouping_b(S,105,305)){r:{var r=q.limit-q.cursor;if(q.eq_s_b("s")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("s"))return!!(q.cursor=q.limit-r,0);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return!!void 0;if(q.cursor--,!q.in_grouping_b(t,97,305))return!!void 0;q.cursor=q.limit-r}return!!1}}function A(){return 0!=q.find_among_b(r)}function D(){return B()&&0!=q.find_among_b(i)&&!!T()}function E(){return B()&&0!=q.find_among_b(s)}function F(){return B()&&0!=q.find_among_b(o)}function m(){return B()&&0!=q.find_among_b(g)&&!!W()}function n(){return B()&&0!=q.find_among_b(v)}function _(){return B()&&0!=q.find_among_b(J)&&!!W()}function f(){return 0!=q.find_among_b(K)}function G(){return B()&&0!=q.find_among_b(L)}function a(){return B()&&0!=q.find_among_b(N)}function b(){return B()&&0!=q.find_among_b(P)&&!!W()}function d(){return 0!=q.find_among_b(Q)&&!!W()}function k(){return B()&&0!=q.find_among_b(R)&&!!W()}function $(){q.ket=q.cursor,l=!0;r:{var r=q.limit-q.cursor,i=q.limit-q.cursor;if(!(k()||(q.cursor=q.limit-i,b())||(q.cursor=q.limit-i,d())||(q.cursor=q.limit-i,q.eq_s_b("ken")&&W()))){if(q.cursor=q.limit-r,0!=q.find_among_b(O)){i=q.limit-q.cursor;if(f()||(q.cursor=q.limit-i,G())||(q.cursor=q.limit-i,m())||(q.cursor=q.limit-i,n())||(q.cursor=q.limit-i,_())||(q.cursor=q.limit-i),k())break r}if(q.cursor=q.limit-r,G()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor,u=(q.ket=q.cursor,q.limit-q.cursor);a()||(q.cursor=q.limit-u,b())||(q.cursor=q.limit-u,d())||(q.cursor=q.limit-u,k())||(q.cursor=q.limit-i),l=!1}else{if(q.cursor=q.limit-r,B()&&0!=q.find_among_b(M)){u=q.limit-q.cursor;if(b()||(q.cursor=q.limit-u,d()))break r}q.cursor=q.limit-r;i=q.limit-q.cursor;if(f()||(q.cursor=q.limit-i,_())||(q.cursor=q.limit-i,n())||(q.cursor=q.limit-i,m())){if(q.bra=q.cursor,!q.slice_del())return;u=q.limit-q.cursor;q.ket=q.cursor,k()||(q.cursor=q.limit-u)}else{if(q.cursor=q.limit-r,!a())return;if(q.bra=q.cursor,!q.slice_del())return;i=q.limit-q.cursor,u=(q.ket=q.cursor,q.limit-q.cursor);f()||(q.cursor=q.limit-u,G())||(q.cursor=q.limit-u,m())||(q.cursor=q.limit-u,n())||(q.cursor=q.limit-u,_())||(q.cursor=q.limit-u),k()||(q.cursor=q.limit-i)}}}}q.bra=q.cursor,q.slice_del()}function H(){if(q.ket=q.cursor,q.eq_s_b("ki")){var r=q.limit-q.cursor;if(E()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor;r:{q.ket=q.cursor;var u=q.limit-q.cursor;if(G()){if(q.bra=q.cursor,!q.slice_del())return;var s=q.limit-q.cursor;H()||(q.cursor=q.limit-s)}else{if(q.cursor=q.limit-u,!j()){q.cursor=q.limit-i;break r}if(q.bra=q.cursor,!q.slice_del())return;s=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-s)}else q.cursor=q.limit-s}}}else if(q.cursor=q.limit-r,D()){if(q.bra=q.cursor,!q.slice_del())return;var o=q.limit-q.cursor;r:{q.ket=q.cursor;i:{var t=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break i;return}q.cursor=q.limit-t,q.ket=q.cursor;var c=q.limit-q.cursor;if(j()||(q.cursor=q.limit-c,x())){if(q.bra=q.cursor,!q.slice_del())return;c=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-c)}else q.cursor=q.limit-c}else if(q.cursor=q.limit-t,!H()){q.cursor=q.limit-o;break r}}}}else{if(q.cursor=q.limit-r,!F())return;r:{var e=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break r;return}if(q.cursor=q.limit-e,x()){if(q.bra=q.cursor,!q.slice_del())return;var l=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-l)}else q.cursor=q.limit-l}else if(q.cursor=q.limit-e,!H())return}}return 1}}function rr(){r:{var r=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor;H()||(q.cursor=q.limit-i)}else if(q.cursor=q.limit-r,q.ket=q.cursor,B()&&0!=q.find_among_b(I)&&T()){if(q.bra=q.cursor,!q.slice_del())return;var u=q.limit-q.cursor;i:u:{var s=q.limit-q.cursor;if(q.ket=q.cursor,A()){if(q.bra=q.cursor,q.slice_del())break u;return}q.cursor=q.limit-s,q.ket=q.cursor;var o=q.limit-q.cursor;if(j()||(q.cursor=q.limit-o,x())){if(q.bra=q.cursor,!q.slice_del())return;o=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-o)}else q.cursor=q.limit-o}else{if(q.cursor=q.limit-s,q.ket=q.cursor,!G()){q.cursor=q.limit-u;break i}if(q.bra=q.cursor,!q.slice_del())return;if(!H()){q.cursor=q.limit-u;break i}}}}else{q.cursor=q.limit-r;i:{q.ket=q.cursor;var t=q.limit-q.cursor;if(F()||(q.cursor=q.limit-t,B()&&0!=q.find_among_b(w))){u:{var c=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break u;return}if(q.cursor=q.limit-c,x()){if(q.bra=q.cursor,!q.slice_del())return;var e=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-e)}else q.cursor=q.limit-e}else if(q.cursor=q.limit-c,!H())break i}break r}}q.cursor=q.limit-r;i:{q.ket=q.cursor;var l=q.limit-q.cursor;if(B()&&0!=q.find_among_b(y)||(q.cursor=q.limit-l,B()&&0!=q.find_among_b(p))){l=q.limit-q.cursor;if(x()){if(q.bra=q.cursor,!q.slice_del())return;var m=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-m)}else q.cursor=q.limit-m}else if(q.cursor=q.limit-l,!A())break i;break r}}if(q.cursor=q.limit-r,q.ket=q.cursor,B()&&0!=q.find_among_b(h)){if(q.bra=q.cursor,!q.slice_del())return;var n=q.limit-q.cursor;i:{q.ket=q.cursor;var _=q.limit-q.cursor;if(j()){if(q.bra=q.cursor,!q.slice_del())return;var f=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-f)}else q.cursor=q.limit-f}else if(q.cursor=q.limit-_,G()){if(q.bra=q.cursor,!q.slice_del())return;f=q.limit-q.cursor;H()||(q.cursor=q.limit-f)}else if(q.cursor=q.limit-_,!H()){q.cursor=q.limit-n;break i}}}else{q.cursor=q.limit-r,q.ket=q.cursor;m=q.limit-q.cursor;if(D()||(q.cursor=q.limit-m,B()&&0!=q.find_among_b(C)&&W())){if(q.bra=q.cursor,!q.slice_del())return;var a=q.limit-q.cursor;i:u:{var b=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;if(H())break u}q.cursor=q.limit-b,q.ket=q.cursor;var d=q.limit-q.cursor;if(j()||(q.cursor=q.limit-d,x())){if(q.bra=q.cursor,!q.slice_del())return;d=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-d)}else q.cursor=q.limit-d}else if(q.cursor=q.limit-b,!H()){q.cursor=q.limit-a;break i}}}else{if(q.cursor=q.limit-r,q.ket=q.cursor,A()){if(q.bra=q.cursor,q.slice_del())break r;return}if(q.cursor=q.limit-r,!H()){q.cursor=q.limit-r,q.ket=q.cursor;l=q.limit-q.cursor;if(E()||(q.cursor=q.limit-l,B()&&q.in_grouping_b(S,105,305)&&W())||(q.cursor=q.limit-l,B()&&0!=q.find_among_b(z)&&W())){if(q.bra=q.cursor,!q.slice_del())return;var k=q.limit-q.cursor;i:{q.ket=q.cursor;var g=q.limit-q.cursor;if(j()){if(q.bra=q.cursor,!q.slice_del())return;var v=q.limit-q.cursor;q.ket=q.cursor,G()||(q.cursor=q.limit-v)}else if(q.cursor=q.limit-g,!G()){q.cursor=q.limit-k;break i}if(q.bra=q.cursor,!q.slice_del())return;q.ket=q.cursor,H()||(q.cursor=q.limit-k)}}else{q.cursor=q.limit-r,q.ket=q.cursor;m=q.limit-q.cursor;if(!j()&&(q.cursor=q.limit-m,!x()))return;if(q.bra=q.cursor,!q.slice_del())return;l=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-l)}else q.cursor=q.limit-l}}}}}}}function ir(){q.limit_backward=q.cursor,q.cursor=q.limit;var r=q.limit-q.cursor;if(!(()=>{var r;return!q.eq_s_b("ad")||(r=q.limit-q.cursor,q.eq_s_b("soy")||(q.cursor=q.limit-r),q.cursor>q.limit_backward)?void 0:1})())return q.cursor=q.limit-r,r=q.limit-q.cursor,(()=>{q.ket=q.cursor,q.bra=q.cursor;var r=q.limit-q.cursor;if((q.eq_s_b("d")||(q.cursor=q.limit-r,q.eq_s_b("g")))&&q.go_out_grouping_b(t,97,305))r:{var i=q.limit-q.cursor,u=q.limit-q.cursor;if(q.eq_s_b("a")||(q.cursor=q.limit-u,q.eq_s_b("ı"))){if(q.slice_from("ı"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(q.eq_s_b("e")||(q.cursor=q.limit-u,q.eq_s_b("i"))){if(q.slice_from("i"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(q.eq_s_b("o")||(q.cursor=q.limit-u,q.eq_s_b("u"))){if(q.slice_from("u"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(!q.eq_s_b("ö")&&(q.cursor=q.limit-u,!q.eq_s_b("ü")))return;if(!q.slice_from("ü"))return}})(),q.cursor=q.limit-r,r=q.limit-q.cursor,(()=>{var r;if(q.ket=q.cursor,0!=(r=q.find_among_b(U)))switch(q.bra=q.cursor,r){case 1:if(q.slice_from("p"))break;return;case 2:if(q.slice_from("ç"))break;return;case 3:if(q.slice_from("t"))break;return;case 4:if(q.slice_from("k"))break}})(),q.cursor=q.limit-r,q.cursor=q.limit_backward,1}this.stem=function(){if((()=>{var r=q.cursor;r:{for(q.bra=q.cursor;;){var i=q.cursor,u=q.cursor;if(!q.eq_s("'")){q.cursor=u,q.cursor=i;break}if(q.cursor=i,q.cursor>=q.limit)break r;q.cursor++}if(q.ket=q.cursor,!q.slice_del())return}q.cursor=r,r=q.cursor;r:{var s=q.cursor+2;if(!(s>q.limit)){for(q.cursor=s;;){var o=q.cursor;if(q.eq_s("'")){q.cursor=o;break}if(q.cursor=o,q.cursor>=q.limit)break r;q.cursor++}if(q.bra=q.cursor,q.cursor=q.limit,q.ket=q.cursor,!q.slice_del())return}}q.cursor=r})(),!(()=>{for(var r=q.cursor,i=2;0{for(var r,e=t.cursor;;){var s=t.cursor;r:{for(;;){var i=t.cursor;e:if(t.bra=t.cursor,0!=(r=t.find_among(l))){switch(t.ket=t.cursor,r){case 1:var c=t.cursor;if(t.eq_s("ּ"))break e;if(t.cursor=c,t.slice_from("װ"))break;return;case 2:var o=t.cursor;if(t.eq_s("ִ"))break e;if(t.cursor=o,t.slice_from("ױ"))break;return;case 3:var u=t.cursor;if(t.eq_s("ִ"))break e;if(t.cursor=u,t.slice_from("ײ"))break;return;case 4:if(t.slice_from("כ"))break;return;case 5:if(t.slice_from("מ"))break;return;case 6:if(t.slice_from("נ"))break;return;case 7:if(t.slice_from("פ"))break;return;case 8:if(t.slice_from("צ"))break;return}t.cursor=i;break}if(t.cursor=i,t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=s;break}for(t.cursor=e,e=t.cursor;;){var a=t.cursor;r:{for(;;){var f=t.cursor;if(t.bra=t.cursor,t.in_grouping(k,1456,1474)){if(t.ket=t.cursor,!t.slice_del())return;t.cursor=f;break}if(t.cursor=f,t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=a;break}t.cursor=e})();var r=t.cursor;return(()=>{q=t.limit;var r=t.cursor;if(t.bra=t.cursor,t.eq_s("גע")){t.ket=t.cursor;var e=t.cursor,s=t.cursor;if(!t.eq_s("לט")&&(t.cursor=s,!t.eq_s("בנ"))&&(t.cursor=s,t.cursort.limit||(t.cursor=e,v=t.cursor,t.cursor=s,r=t.cursor,0==t.find_among(f)&&(t.cursor=r),e=t.cursor,t.in_grouping(d,1489,1520)&&t.in_grouping(d,1489,1520)&&t.in_grouping(d,1489,1520)?q=t.cursor:(t.cursor=e,t.go_out_grouping(g,1488,1522)&&(t.cursor++,t.go_in_grouping(g,1488,1522))&&(q=t.cursor,v<=q||(q=v))))})(),t.cursor=r,t.limit_backward=t.cursor,t.cursor=t.limit,e(),t.cursor=t.limit_backward,!0},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}}; \ No newline at end of file diff --git a/sphinx/search/non-minified-js/arabic-stemmer.js b/sphinx/search/non-minified-js/arabic-stemmer.js new file mode 100644 index 00000000000..dbab12d81e1 --- /dev/null +++ b/sphinx/search/non-minified-js/arabic-stemmer.js @@ -0,0 +1,1612 @@ +// Generated from arabic.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var ArabicStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u0640", -1, 1], + ["\u064B", -1, 1], + ["\u064C", -1, 1], + ["\u064D", -1, 1], + ["\u064E", -1, 1], + ["\u064F", -1, 1], + ["\u0650", -1, 1], + ["\u0651", -1, 1], + ["\u0652", -1, 1], + ["\u0660", -1, 2], + ["\u0661", -1, 3], + ["\u0662", -1, 4], + ["\u0663", -1, 5], + ["\u0664", -1, 6], + ["\u0665", -1, 7], + ["\u0666", -1, 8], + ["\u0667", -1, 9], + ["\u0668", -1, 10], + ["\u0669", -1, 11], + ["\uFE80", -1, 12], + ["\uFE81", -1, 16], + ["\uFE82", -1, 16], + ["\uFE83", -1, 13], + ["\uFE84", -1, 13], + ["\uFE85", -1, 17], + ["\uFE86", -1, 17], + ["\uFE87", -1, 14], + ["\uFE88", -1, 14], + ["\uFE89", -1, 15], + ["\uFE8A", -1, 15], + ["\uFE8B", -1, 15], + ["\uFE8C", -1, 15], + ["\uFE8D", -1, 18], + ["\uFE8E", -1, 18], + ["\uFE8F", -1, 19], + ["\uFE90", -1, 19], + ["\uFE91", -1, 19], + ["\uFE92", -1, 19], + ["\uFE93", -1, 20], + ["\uFE94", -1, 20], + ["\uFE95", -1, 21], + ["\uFE96", -1, 21], + ["\uFE97", -1, 21], + ["\uFE98", -1, 21], + ["\uFE99", -1, 22], + ["\uFE9A", -1, 22], + ["\uFE9B", -1, 22], + ["\uFE9C", -1, 22], + ["\uFE9D", -1, 23], + ["\uFE9E", -1, 23], + ["\uFE9F", -1, 23], + ["\uFEA0", -1, 23], + ["\uFEA1", -1, 24], + ["\uFEA2", -1, 24], + ["\uFEA3", -1, 24], + ["\uFEA4", -1, 24], + ["\uFEA5", -1, 25], + ["\uFEA6", -1, 25], + ["\uFEA7", -1, 25], + ["\uFEA8", -1, 25], + ["\uFEA9", -1, 26], + ["\uFEAA", -1, 26], + ["\uFEAB", -1, 27], + ["\uFEAC", -1, 27], + ["\uFEAD", -1, 28], + ["\uFEAE", -1, 28], + ["\uFEAF", -1, 29], + ["\uFEB0", -1, 29], + ["\uFEB1", -1, 30], + ["\uFEB2", -1, 30], + ["\uFEB3", -1, 30], + ["\uFEB4", -1, 30], + ["\uFEB5", -1, 31], + ["\uFEB6", -1, 31], + ["\uFEB7", -1, 31], + ["\uFEB8", -1, 31], + ["\uFEB9", -1, 32], + ["\uFEBA", -1, 32], + ["\uFEBB", -1, 32], + ["\uFEBC", -1, 32], + ["\uFEBD", -1, 33], + ["\uFEBE", -1, 33], + ["\uFEBF", -1, 33], + ["\uFEC0", -1, 33], + ["\uFEC1", -1, 34], + ["\uFEC2", -1, 34], + ["\uFEC3", -1, 34], + ["\uFEC4", -1, 34], + ["\uFEC5", -1, 35], + ["\uFEC6", -1, 35], + ["\uFEC7", -1, 35], + ["\uFEC8", -1, 35], + ["\uFEC9", -1, 36], + ["\uFECA", -1, 36], + ["\uFECB", -1, 36], + ["\uFECC", -1, 36], + ["\uFECD", -1, 37], + ["\uFECE", -1, 37], + ["\uFECF", -1, 37], + ["\uFED0", -1, 37], + ["\uFED1", -1, 38], + ["\uFED2", -1, 38], + ["\uFED3", -1, 38], + ["\uFED4", -1, 38], + ["\uFED5", -1, 39], + ["\uFED6", -1, 39], + ["\uFED7", -1, 39], + ["\uFED8", -1, 39], + ["\uFED9", -1, 40], + ["\uFEDA", -1, 40], + ["\uFEDB", -1, 40], + ["\uFEDC", -1, 40], + ["\uFEDD", -1, 41], + ["\uFEDE", -1, 41], + ["\uFEDF", -1, 41], + ["\uFEE0", -1, 41], + ["\uFEE1", -1, 42], + ["\uFEE2", -1, 42], + ["\uFEE3", -1, 42], + ["\uFEE4", -1, 42], + ["\uFEE5", -1, 43], + ["\uFEE6", -1, 43], + ["\uFEE7", -1, 43], + ["\uFEE8", -1, 43], + ["\uFEE9", -1, 44], + ["\uFEEA", -1, 44], + ["\uFEEB", -1, 44], + ["\uFEEC", -1, 44], + ["\uFEED", -1, 45], + ["\uFEEE", -1, 45], + ["\uFEEF", -1, 46], + ["\uFEF0", -1, 46], + ["\uFEF1", -1, 47], + ["\uFEF2", -1, 47], + ["\uFEF3", -1, 47], + ["\uFEF4", -1, 47], + ["\uFEF5", -1, 51], + ["\uFEF6", -1, 51], + ["\uFEF7", -1, 49], + ["\uFEF8", -1, 49], + ["\uFEF9", -1, 50], + ["\uFEFA", -1, 50], + ["\uFEFB", -1, 48], + ["\uFEFC", -1, 48] + ]; + + /** @const */ var a_1 = [ + ["\u0622", -1, 1], + ["\u0623", -1, 1], + ["\u0624", -1, 1], + ["\u0625", -1, 1], + ["\u0626", -1, 1] + ]; + + /** @const */ var a_2 = [ + ["\u0622", -1, 1], + ["\u0623", -1, 1], + ["\u0624", -1, 2], + ["\u0625", -1, 1], + ["\u0626", -1, 3] + ]; + + /** @const */ var a_3 = [ + ["\u0627\u0644", -1, 2], + ["\u0628\u0627\u0644", -1, 1], + ["\u0643\u0627\u0644", -1, 1], + ["\u0644\u0644", -1, 2] + ]; + + /** @const */ var a_4 = [ + ["\u0623\u0622", -1, 2], + ["\u0623\u0623", -1, 1], + ["\u0623\u0624", -1, 1], + ["\u0623\u0625", -1, 4], + ["\u0623\u0627", -1, 3] + ]; + + /** @const */ var a_5 = [ + ["\u0641", -1, 1], + ["\u0648", -1, 1] + ]; + + /** @const */ var a_6 = [ + ["\u0627\u0644", -1, 2], + ["\u0628\u0627\u0644", -1, 1], + ["\u0643\u0627\u0644", -1, 1], + ["\u0644\u0644", -1, 2] + ]; + + /** @const */ var a_7 = [ + ["\u0628", -1, 1], + ["\u0628\u0627", 0, -1], + ["\u0628\u0628", 0, 2], + ["\u0643\u0643", -1, 3] + ]; + + /** @const */ var a_8 = [ + ["\u0633\u0623", -1, 4], + ["\u0633\u062A", -1, 2], + ["\u0633\u0646", -1, 3], + ["\u0633\u064A", -1, 1] + ]; + + /** @const */ var a_9 = [ + ["\u062A\u0633\u062A", -1, 1], + ["\u0646\u0633\u062A", -1, 1], + ["\u064A\u0633\u062A", -1, 1] + ]; + + /** @const */ var a_10 = [ + ["\u0643\u0645\u0627", -1, 3], + ["\u0647\u0645\u0627", -1, 3], + ["\u0646\u0627", -1, 2], + ["\u0647\u0627", -1, 2], + ["\u0643", -1, 1], + ["\u0643\u0645", -1, 2], + ["\u0647\u0645", -1, 2], + ["\u0647\u0646", -1, 2], + ["\u0647", -1, 1], + ["\u064A", -1, 1] + ]; + + /** @const */ var a_11 = [ + ["\u0646", -1, 1] + ]; + + /** @const */ var a_12 = [ + ["\u0627", -1, 1], + ["\u0648", -1, 1], + ["\u064A", -1, 1] + ]; + + /** @const */ var a_13 = [ + ["\u0627\u062A", -1, 1] + ]; + + /** @const */ var a_14 = [ + ["\u062A", -1, 1] + ]; + + /** @const */ var a_15 = [ + ["\u0629", -1, 1] + ]; + + /** @const */ var a_16 = [ + ["\u064A", -1, 1] + ]; + + /** @const */ var a_17 = [ + ["\u0643\u0645\u0627", -1, 3], + ["\u0647\u0645\u0627", -1, 3], + ["\u0646\u0627", -1, 2], + ["\u0647\u0627", -1, 2], + ["\u0643", -1, 1], + ["\u0643\u0645", -1, 2], + ["\u0647\u0645", -1, 2], + ["\u0643\u0646", -1, 2], + ["\u0647\u0646", -1, 2], + ["\u0647", -1, 1], + ["\u0643\u0645\u0648", -1, 3], + ["\u0646\u064A", -1, 2] + ]; + + /** @const */ var a_18 = [ + ["\u0627", -1, 1], + ["\u062A\u0627", 0, 2], + ["\u062A\u0645\u0627", 0, 4], + ["\u0646\u0627", 0, 2], + ["\u062A", -1, 1], + ["\u0646", -1, 1], + ["\u0627\u0646", 5, 3], + ["\u062A\u0646", 5, 2], + ["\u0648\u0646", 5, 3], + ["\u064A\u0646", 5, 3], + ["\u064A", -1, 1] + ]; + + /** @const */ var a_19 = [ + ["\u0648\u0627", -1, 1], + ["\u062A\u0645", -1, 1] + ]; + + /** @const */ var a_20 = [ + ["\u0648", -1, 1], + ["\u062A\u0645\u0648", 0, 2] + ]; + + /** @const */ var a_21 = [ + ["\u0649", -1, 1] + ]; + + var /** boolean */ B_is_defined = false; + var /** boolean */ B_is_verb = false; + var /** boolean */ B_is_noun = false; + + + /** @return {boolean} */ + function r_Normalize_pre() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + lab2: { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + if (among_var == 0) + { + break lab3; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("0")) + { + return false; + } + break; + case 3: + if (!base.slice_from("1")) + { + return false; + } + break; + case 4: + if (!base.slice_from("2")) + { + return false; + } + break; + case 5: + if (!base.slice_from("3")) + { + return false; + } + break; + case 6: + if (!base.slice_from("4")) + { + return false; + } + break; + case 7: + if (!base.slice_from("5")) + { + return false; + } + break; + case 8: + if (!base.slice_from("6")) + { + return false; + } + break; + case 9: + if (!base.slice_from("7")) + { + return false; + } + break; + case 10: + if (!base.slice_from("8")) + { + return false; + } + break; + case 11: + if (!base.slice_from("9")) + { + return false; + } + break; + case 12: + if (!base.slice_from("\u0621")) + { + return false; + } + break; + case 13: + if (!base.slice_from("\u0623")) + { + return false; + } + break; + case 14: + if (!base.slice_from("\u0625")) + { + return false; + } + break; + case 15: + if (!base.slice_from("\u0626")) + { + return false; + } + break; + case 16: + if (!base.slice_from("\u0622")) + { + return false; + } + break; + case 17: + if (!base.slice_from("\u0624")) + { + return false; + } + break; + case 18: + if (!base.slice_from("\u0627")) + { + return false; + } + break; + case 19: + if (!base.slice_from("\u0628")) + { + return false; + } + break; + case 20: + if (!base.slice_from("\u0629")) + { + return false; + } + break; + case 21: + if (!base.slice_from("\u062A")) + { + return false; + } + break; + case 22: + if (!base.slice_from("\u062B")) + { + return false; + } + break; + case 23: + if (!base.slice_from("\u062C")) + { + return false; + } + break; + case 24: + if (!base.slice_from("\u062D")) + { + return false; + } + break; + case 25: + if (!base.slice_from("\u062E")) + { + return false; + } + break; + case 26: + if (!base.slice_from("\u062F")) + { + return false; + } + break; + case 27: + if (!base.slice_from("\u0630")) + { + return false; + } + break; + case 28: + if (!base.slice_from("\u0631")) + { + return false; + } + break; + case 29: + if (!base.slice_from("\u0632")) + { + return false; + } + break; + case 30: + if (!base.slice_from("\u0633")) + { + return false; + } + break; + case 31: + if (!base.slice_from("\u0634")) + { + return false; + } + break; + case 32: + if (!base.slice_from("\u0635")) + { + return false; + } + break; + case 33: + if (!base.slice_from("\u0636")) + { + return false; + } + break; + case 34: + if (!base.slice_from("\u0637")) + { + return false; + } + break; + case 35: + if (!base.slice_from("\u0638")) + { + return false; + } + break; + case 36: + if (!base.slice_from("\u0639")) + { + return false; + } + break; + case 37: + if (!base.slice_from("\u063A")) + { + return false; + } + break; + case 38: + if (!base.slice_from("\u0641")) + { + return false; + } + break; + case 39: + if (!base.slice_from("\u0642")) + { + return false; + } + break; + case 40: + if (!base.slice_from("\u0643")) + { + return false; + } + break; + case 41: + if (!base.slice_from("\u0644")) + { + return false; + } + break; + case 42: + if (!base.slice_from("\u0645")) + { + return false; + } + break; + case 43: + if (!base.slice_from("\u0646")) + { + return false; + } + break; + case 44: + if (!base.slice_from("\u0647")) + { + return false; + } + break; + case 45: + if (!base.slice_from("\u0648")) + { + return false; + } + break; + case 46: + if (!base.slice_from("\u0649")) + { + return false; + } + break; + case 47: + if (!base.slice_from("\u064A")) + { + return false; + } + break; + case 48: + if (!base.slice_from("\u0644\u0627")) + { + return false; + } + break; + case 49: + if (!base.slice_from("\u0644\u0623")) + { + return false; + } + break; + case 50: + if (!base.slice_from("\u0644\u0625")) + { + return false; + } + break; + case 51: + if (!base.slice_from("\u0644\u0622")) + { + return false; + } + break; + } + break lab2; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { + break lab1; + } + base.cursor++; + } + continue; + } + base.cursor = v_2; + break; + } + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_Normalize_post() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.limit_backward = base.cursor; base.cursor = base.limit; + base.ket = base.cursor; + if (base.find_among_b(a_1) == 0) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_from("\u0621")) + { + return false; + } + base.cursor = base.limit_backward; + } + base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + while(true) + { + /** @const */ var /** number */ v_3 = base.cursor; + lab2: { + lab3: { + /** @const */ var /** number */ v_4 = base.cursor; + lab4: { + base.bra = base.cursor; + among_var = base.find_among(a_2); + if (among_var == 0) + { + break lab4; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u0627")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u0648")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u064A")) + { + return false; + } + break; + } + break lab3; + } + base.cursor = v_4; + if (base.cursor >= base.limit) + { + break lab2; + } + base.cursor++; + } + continue; + } + base.cursor = v_3; + break; + } + } + base.cursor = v_2; + return true; + }; + + /** @return {boolean} */ + function r_Checks1() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_3); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (base.current.length <= 4) + { + return false; + } + B_is_noun = true; + B_is_verb = false; + B_is_defined = true; + break; + case 2: + if (base.current.length <= 3) + { + return false; + } + B_is_noun = true; + B_is_verb = false; + B_is_defined = true; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Prefix_Step1() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_4); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_from("\u0623")) + { + return false; + } + break; + case 2: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_from("\u0622")) + { + return false; + } + break; + case 3: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_from("\u0627")) + { + return false; + } + break; + case 4: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_from("\u0625")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Prefix_Step2() { + base.bra = base.cursor; + if (base.find_among(a_5) == 0) + { + return false; + } + base.ket = base.cursor; + if (base.current.length <= 3) + { + return false; + } + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!(base.eq_s("\u0627"))) + { + break lab0; + } + return false; + } + base.cursor = v_1; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Prefix_Step3a_Noun() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_6); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (base.current.length <= 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (base.current.length <= 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Prefix_Step3b_Noun() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_7); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_from("\u0628")) + { + return false; + } + break; + case 3: + if (base.current.length <= 3) + { + return false; + } + if (!base.slice_from("\u0643")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Prefix_Step3_Verb() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_8); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (base.current.length <= 4) + { + return false; + } + if (!base.slice_from("\u064A")) + { + return false; + } + break; + case 2: + if (base.current.length <= 4) + { + return false; + } + if (!base.slice_from("\u062A")) + { + return false; + } + break; + case 3: + if (base.current.length <= 4) + { + return false; + } + if (!base.slice_from("\u0646")) + { + return false; + } + break; + case 4: + if (base.current.length <= 4) + { + return false; + } + if (!base.slice_from("\u0623")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Prefix_Step4_Verb() { + base.bra = base.cursor; + if (base.find_among(a_9) == 0) + { + return false; + } + base.ket = base.cursor; + if (base.current.length <= 4) + { + return false; + } + B_is_verb = true; + B_is_noun = false; + if (!base.slice_from("\u0627\u0633\u062A")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step1a() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_10); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (base.current.length < 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (base.current.length < 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (base.current.length < 6) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step1b() { + base.ket = base.cursor; + if (base.find_among_b(a_11) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length <= 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step2a() { + base.ket = base.cursor; + if (base.find_among_b(a_12) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length <= 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step2b() { + base.ket = base.cursor; + if (base.find_among_b(a_13) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length < 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step2c1() { + base.ket = base.cursor; + if (base.find_among_b(a_14) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length < 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step2c2() { + base.ket = base.cursor; + if (base.find_among_b(a_15) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length < 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Noun_Step3() { + base.ket = base.cursor; + if (base.find_among_b(a_16) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length < 3) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Verb_Step1() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_17); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (base.current.length < 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (base.current.length < 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (base.current.length < 6) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Verb_Step2a() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_18); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (base.current.length < 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (base.current.length < 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (base.current.length <= 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 4: + if (base.current.length < 6) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Verb_Step2b() { + base.ket = base.cursor; + if (base.find_among_b(a_19) == 0) + { + return false; + } + base.bra = base.cursor; + if (base.current.length < 5) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_Verb_Step2c() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_20); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (base.current.length < 4) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (base.current.length < 6) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Suffix_All_alef_maqsura() { + base.ket = base.cursor; + if (base.find_among_b(a_21) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_from("\u064A")) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + B_is_noun = true; + B_is_verb = true; + B_is_defined = false; + /** @const */ var /** number */ v_1 = base.cursor; + r_Checks1(); + base.cursor = v_1; + r_Normalize_pre(); + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab0: { + lab1: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!B_is_verb) + { + break lab2; + } + lab3: { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab4: { + { + var v_5 = 1; + while(true) + { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab5: { + if (!r_Suffix_Verb_Step1()) + { + break lab5; + } + v_5--; + continue; + } + base.cursor = base.limit - v_6; + break; + } + if (v_5 > 0) + { + break lab4; + } + } + lab6: { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab7: { + if (!r_Suffix_Verb_Step2a()) + { + break lab7; + } + break lab6; + } + base.cursor = base.limit - v_7; + lab8: { + if (!r_Suffix_Verb_Step2c()) + { + break lab8; + } + break lab6; + } + base.cursor = base.limit - v_7; + if (base.cursor <= base.limit_backward) + { + break lab4; + } + base.cursor--; + } + break lab3; + } + base.cursor = base.limit - v_4; + lab9: { + if (!r_Suffix_Verb_Step2b()) + { + break lab9; + } + break lab3; + } + base.cursor = base.limit - v_4; + if (!r_Suffix_Verb_Step2a()) + { + break lab2; + } + } + break lab1; + } + base.cursor = base.limit - v_3; + lab10: { + if (!B_is_noun) + { + break lab10; + } + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab11: { + lab12: { + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + lab13: { + if (!r_Suffix_Noun_Step2c2()) + { + break lab13; + } + break lab12; + } + base.cursor = base.limit - v_9; + lab14: { + lab15: { + if (!B_is_defined) + { + break lab15; + } + break lab14; + } + if (!r_Suffix_Noun_Step1a()) + { + break lab14; + } + lab16: { + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + lab17: { + if (!r_Suffix_Noun_Step2a()) + { + break lab17; + } + break lab16; + } + base.cursor = base.limit - v_10; + lab18: { + if (!r_Suffix_Noun_Step2b()) + { + break lab18; + } + break lab16; + } + base.cursor = base.limit - v_10; + lab19: { + if (!r_Suffix_Noun_Step2c1()) + { + break lab19; + } + break lab16; + } + base.cursor = base.limit - v_10; + if (base.cursor <= base.limit_backward) + { + break lab14; + } + base.cursor--; + } + break lab12; + } + base.cursor = base.limit - v_9; + lab20: { + if (!r_Suffix_Noun_Step1b()) + { + break lab20; + } + lab21: { + /** @const */ var /** number */ v_11 = base.limit - base.cursor; + lab22: { + if (!r_Suffix_Noun_Step2a()) + { + break lab22; + } + break lab21; + } + base.cursor = base.limit - v_11; + lab23: { + if (!r_Suffix_Noun_Step2b()) + { + break lab23; + } + break lab21; + } + base.cursor = base.limit - v_11; + if (!r_Suffix_Noun_Step2c1()) + { + break lab20; + } + } + break lab12; + } + base.cursor = base.limit - v_9; + lab24: { + lab25: { + if (!B_is_defined) + { + break lab25; + } + break lab24; + } + if (!r_Suffix_Noun_Step2a()) + { + break lab24; + } + break lab12; + } + base.cursor = base.limit - v_9; + if (!r_Suffix_Noun_Step2b()) + { + base.cursor = base.limit - v_8; + break lab11; + } + } + } + if (!r_Suffix_Noun_Step3()) + { + break lab10; + } + break lab1; + } + base.cursor = base.limit - v_3; + if (!r_Suffix_All_alef_maqsura()) + { + break lab0; + } + } + } + base.cursor = base.limit - v_2; + base.cursor = base.limit_backward; + /** @const */ var /** number */ v_12 = base.cursor; + lab26: { + /** @const */ var /** number */ v_13 = base.cursor; + lab27: { + if (!r_Prefix_Step1()) + { + base.cursor = v_13; + break lab27; + } + } + /** @const */ var /** number */ v_14 = base.cursor; + lab28: { + if (!r_Prefix_Step2()) + { + base.cursor = v_14; + break lab28; + } + } + lab29: { + /** @const */ var /** number */ v_15 = base.cursor; + lab30: { + if (!r_Prefix_Step3a_Noun()) + { + break lab30; + } + break lab29; + } + base.cursor = v_15; + lab31: { + if (!B_is_noun) + { + break lab31; + } + if (!r_Prefix_Step3b_Noun()) + { + break lab31; + } + break lab29; + } + base.cursor = v_15; + if (!B_is_verb) + { + break lab26; + } + /** @const */ var /** number */ v_16 = base.cursor; + lab32: { + if (!r_Prefix_Step3_Verb()) + { + base.cursor = v_16; + break lab32; + } + } + if (!r_Prefix_Step4_Verb()) + { + break lab26; + } + } + } + base.cursor = v_12; + r_Normalize_post(); + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/armenian-stemmer.js b/sphinx/search/non-minified-js/armenian-stemmer.js new file mode 100644 index 00000000000..915146dbd0a --- /dev/null +++ b/sphinx/search/non-minified-js/armenian-stemmer.js @@ -0,0 +1,350 @@ +// Generated from armenian.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var ArmenianStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u0580\u0578\u0580\u0564", -1, 1], + ["\u0565\u0580\u0578\u0580\u0564", 0, 1], + ["\u0561\u056C\u056B", -1, 1], + ["\u0561\u056F\u056B", -1, 1], + ["\u0578\u0580\u0561\u056F", -1, 1], + ["\u0565\u0572", -1, 1], + ["\u0561\u056F\u0561\u0576", -1, 1], + ["\u0561\u0580\u0561\u0576", -1, 1], + ["\u0565\u0576", -1, 1], + ["\u0565\u056F\u0565\u0576", 8, 1], + ["\u0565\u0580\u0565\u0576", 8, 1], + ["\u0578\u0580\u0567\u0576", -1, 1], + ["\u056B\u0576", -1, 1], + ["\u0563\u056B\u0576", 12, 1], + ["\u0578\u057E\u056B\u0576", 12, 1], + ["\u056C\u0561\u0575\u0576", -1, 1], + ["\u057E\u0578\u0582\u0576", -1, 1], + ["\u057A\u0565\u057D", -1, 1], + ["\u056B\u057E", -1, 1], + ["\u0561\u057F", -1, 1], + ["\u0561\u057E\u0565\u057F", -1, 1], + ["\u056F\u0578\u057F", -1, 1], + ["\u0562\u0561\u0580", -1, 1] + ]; + + /** @const */ var a_1 = [ + ["\u0561", -1, 1], + ["\u0561\u0581\u0561", 0, 1], + ["\u0565\u0581\u0561", 0, 1], + ["\u057E\u0565", -1, 1], + ["\u0561\u0581\u0580\u056B", -1, 1], + ["\u0561\u0581\u056B", -1, 1], + ["\u0565\u0581\u056B", -1, 1], + ["\u057E\u0565\u0581\u056B", 6, 1], + ["\u0561\u056C", -1, 1], + ["\u0568\u0561\u056C", 8, 1], + ["\u0561\u0576\u0561\u056C", 8, 1], + ["\u0565\u0576\u0561\u056C", 8, 1], + ["\u0561\u0581\u0576\u0561\u056C", 8, 1], + ["\u0565\u056C", -1, 1], + ["\u0568\u0565\u056C", 13, 1], + ["\u0576\u0565\u056C", 13, 1], + ["\u0581\u0576\u0565\u056C", 15, 1], + ["\u0565\u0581\u0576\u0565\u056C", 16, 1], + ["\u0579\u0565\u056C", 13, 1], + ["\u057E\u0565\u056C", 13, 1], + ["\u0561\u0581\u057E\u0565\u056C", 19, 1], + ["\u0565\u0581\u057E\u0565\u056C", 19, 1], + ["\u057F\u0565\u056C", 13, 1], + ["\u0561\u057F\u0565\u056C", 22, 1], + ["\u0578\u057F\u0565\u056C", 22, 1], + ["\u056F\u0578\u057F\u0565\u056C", 24, 1], + ["\u057E\u0561\u056E", -1, 1], + ["\u0578\u0582\u0574", -1, 1], + ["\u057E\u0578\u0582\u0574", 27, 1], + ["\u0561\u0576", -1, 1], + ["\u0581\u0561\u0576", 29, 1], + ["\u0561\u0581\u0561\u0576", 30, 1], + ["\u0561\u0581\u0580\u056B\u0576", -1, 1], + ["\u0561\u0581\u056B\u0576", -1, 1], + ["\u0565\u0581\u056B\u0576", -1, 1], + ["\u057E\u0565\u0581\u056B\u0576", 34, 1], + ["\u0561\u056C\u056B\u057D", -1, 1], + ["\u0565\u056C\u056B\u057D", -1, 1], + ["\u0561\u057E", -1, 1], + ["\u0561\u0581\u0561\u057E", 38, 1], + ["\u0565\u0581\u0561\u057E", 38, 1], + ["\u0561\u056C\u0578\u057E", -1, 1], + ["\u0565\u056C\u0578\u057E", -1, 1], + ["\u0561\u0580", -1, 1], + ["\u0561\u0581\u0561\u0580", 43, 1], + ["\u0565\u0581\u0561\u0580", 43, 1], + ["\u0561\u0581\u0580\u056B\u0580", -1, 1], + ["\u0561\u0581\u056B\u0580", -1, 1], + ["\u0565\u0581\u056B\u0580", -1, 1], + ["\u057E\u0565\u0581\u056B\u0580", 48, 1], + ["\u0561\u0581", -1, 1], + ["\u0565\u0581", -1, 1], + ["\u0561\u0581\u0580\u0565\u0581", 51, 1], + ["\u0561\u056C\u0578\u0582\u0581", -1, 1], + ["\u0565\u056C\u0578\u0582\u0581", -1, 1], + ["\u0561\u056C\u0578\u0582", -1, 1], + ["\u0565\u056C\u0578\u0582", -1, 1], + ["\u0561\u0584", -1, 1], + ["\u0581\u0561\u0584", 57, 1], + ["\u0561\u0581\u0561\u0584", 58, 1], + ["\u0561\u0581\u0580\u056B\u0584", -1, 1], + ["\u0561\u0581\u056B\u0584", -1, 1], + ["\u0565\u0581\u056B\u0584", -1, 1], + ["\u057E\u0565\u0581\u056B\u0584", 62, 1], + ["\u0561\u0576\u0584", -1, 1], + ["\u0581\u0561\u0576\u0584", 64, 1], + ["\u0561\u0581\u0561\u0576\u0584", 65, 1], + ["\u0561\u0581\u0580\u056B\u0576\u0584", -1, 1], + ["\u0561\u0581\u056B\u0576\u0584", -1, 1], + ["\u0565\u0581\u056B\u0576\u0584", -1, 1], + ["\u057E\u0565\u0581\u056B\u0576\u0584", 69, 1] + ]; + + /** @const */ var a_2 = [ + ["\u0578\u0580\u0564", -1, 1], + ["\u0578\u0582\u0575\u0569", -1, 1], + ["\u0578\u0582\u0570\u056B", -1, 1], + ["\u0581\u056B", -1, 1], + ["\u056B\u056C", -1, 1], + ["\u0561\u056F", -1, 1], + ["\u0575\u0561\u056F", 5, 1], + ["\u0561\u0576\u0561\u056F", 5, 1], + ["\u056B\u056F", -1, 1], + ["\u0578\u0582\u056F", -1, 1], + ["\u0561\u0576", -1, 1], + ["\u057A\u0561\u0576", 10, 1], + ["\u057D\u057F\u0561\u0576", 10, 1], + ["\u0561\u0580\u0561\u0576", 10, 1], + ["\u0565\u0572\u0567\u0576", -1, 1], + ["\u0575\u0578\u0582\u0576", -1, 1], + ["\u0578\u0582\u0569\u0575\u0578\u0582\u0576", 15, 1], + ["\u0561\u056E\u0578", -1, 1], + ["\u056B\u0579", -1, 1], + ["\u0578\u0582\u057D", -1, 1], + ["\u0578\u0582\u057D\u057F", -1, 1], + ["\u0563\u0561\u0580", -1, 1], + ["\u057E\u0578\u0580", -1, 1], + ["\u0561\u057E\u0578\u0580", 22, 1], + ["\u0578\u0581", -1, 1], + ["\u0561\u0576\u0585\u0581", -1, 1], + ["\u0578\u0582", -1, 1], + ["\u0584", -1, 1], + ["\u0579\u0565\u0584", 27, 1], + ["\u056B\u0584", 27, 1], + ["\u0561\u056C\u056B\u0584", 29, 1], + ["\u0561\u0576\u056B\u0584", 29, 1], + ["\u057E\u0561\u056E\u0584", 27, 1], + ["\u0578\u0582\u0575\u0584", 27, 1], + ["\u0565\u0576\u0584", 27, 1], + ["\u0578\u0576\u0584", 27, 1], + ["\u0578\u0582\u0576\u0584", 27, 1], + ["\u0574\u0578\u0582\u0576\u0584", 36, 1], + ["\u056B\u0579\u0584", 27, 1], + ["\u0561\u0580\u0584", 27, 1] + ]; + + /** @const */ var a_3 = [ + ["\u057D\u0561", -1, 1], + ["\u057E\u0561", -1, 1], + ["\u0561\u0574\u0562", -1, 1], + ["\u0564", -1, 1], + ["\u0561\u0576\u0564", 3, 1], + ["\u0578\u0582\u0569\u0575\u0561\u0576\u0564", 4, 1], + ["\u057E\u0561\u0576\u0564", 4, 1], + ["\u0578\u057B\u0564", 3, 1], + ["\u0565\u0580\u0564", 3, 1], + ["\u0576\u0565\u0580\u0564", 8, 1], + ["\u0578\u0582\u0564", 3, 1], + ["\u0568", -1, 1], + ["\u0561\u0576\u0568", 11, 1], + ["\u0578\u0582\u0569\u0575\u0561\u0576\u0568", 12, 1], + ["\u057E\u0561\u0576\u0568", 12, 1], + ["\u0578\u057B\u0568", 11, 1], + ["\u0565\u0580\u0568", 11, 1], + ["\u0576\u0565\u0580\u0568", 16, 1], + ["\u056B", -1, 1], + ["\u057E\u056B", 18, 1], + ["\u0565\u0580\u056B", 18, 1], + ["\u0576\u0565\u0580\u056B", 20, 1], + ["\u0561\u0576\u0578\u0582\u0574", -1, 1], + ["\u0565\u0580\u0578\u0582\u0574", -1, 1], + ["\u0576\u0565\u0580\u0578\u0582\u0574", 23, 1], + ["\u0576", -1, 1], + ["\u0561\u0576", 25, 1], + ["\u0578\u0582\u0569\u0575\u0561\u0576", 26, 1], + ["\u057E\u0561\u0576", 26, 1], + ["\u056B\u0576", 25, 1], + ["\u0565\u0580\u056B\u0576", 29, 1], + ["\u0576\u0565\u0580\u056B\u0576", 30, 1], + ["\u0578\u0582\u0569\u0575\u0561\u0576\u0576", 25, 1], + ["\u0565\u0580\u0576", 25, 1], + ["\u0576\u0565\u0580\u0576", 33, 1], + ["\u0578\u0582\u0576", 25, 1], + ["\u0578\u057B", -1, 1], + ["\u0578\u0582\u0569\u0575\u0561\u0576\u057D", -1, 1], + ["\u057E\u0561\u0576\u057D", -1, 1], + ["\u0578\u057B\u057D", -1, 1], + ["\u0578\u057E", -1, 1], + ["\u0561\u0576\u0578\u057E", 40, 1], + ["\u057E\u0578\u057E", 40, 1], + ["\u0565\u0580\u0578\u057E", 40, 1], + ["\u0576\u0565\u0580\u0578\u057E", 43, 1], + ["\u0565\u0580", -1, 1], + ["\u0576\u0565\u0580", 45, 1], + ["\u0581", -1, 1], + ["\u056B\u0581", 47, 1], + ["\u057E\u0561\u0576\u056B\u0581", 48, 1], + ["\u0578\u057B\u056B\u0581", 48, 1], + ["\u057E\u056B\u0581", 48, 1], + ["\u0565\u0580\u056B\u0581", 48, 1], + ["\u0576\u0565\u0580\u056B\u0581", 52, 1], + ["\u0581\u056B\u0581", 48, 1], + ["\u0578\u0581", 47, 1], + ["\u0578\u0582\u0581", 47, 1] + ]; + + /** @const */ var /** Array */ g_v = [209, 4, 128, 0, 18]; + + var /** number */ I_p2 = 0; + var /** number */ I_pV = 0; + + + /** @return {boolean} */ + function r_mark_regions() { + I_pV = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!base.go_out_grouping(g_v, 1377, 1413)) + { + break lab0; + } + base.cursor++; + I_pV = base.cursor; + if (!base.go_in_grouping(g_v, 1377, 1413)) + { + break lab0; + } + base.cursor++; + if (!base.go_out_grouping(g_v, 1377, 1413)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 1377, 1413)) + { + break lab0; + } + base.cursor++; + I_p2 = base.cursor; + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_adjective() { + base.ket = base.cursor; + if (base.find_among_b(a_0) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_verb() { + base.ket = base.cursor; + if (base.find_among_b(a_1) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_noun() { + base.ket = base.cursor; + if (base.find_among_b(a_2) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_ending() { + base.ket = base.cursor; + if (base.find_among_b(a_3) == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + r_mark_regions(); + base.limit_backward = base.cursor; base.cursor = base.limit; + if (base.cursor < I_pV) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_pV; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + r_ending(); + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_verb(); + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_adjective(); + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + r_noun(); + base.cursor = base.limit - v_5; + base.limit_backward = v_1; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/base-stemmer.js b/sphinx/search/non-minified-js/base-stemmer.js index ca6cca156a3..8cf2d585582 100644 --- a/sphinx/search/non-minified-js/base-stemmer.js +++ b/sphinx/search/non-minified-js/base-stemmer.js @@ -1,5 +1,18 @@ +// @ts-check + /**@constructor*/ -BaseStemmer = function() { +const BaseStemmer = function() { + /** @protected */ + this.current = ''; + this.cursor = 0; + this.limit = 0; + this.limit_backward = 0; + this.bra = 0; + this.ket = 0; + + /** + * @param {string} value + */ this.setCurrent = function(value) { this.current = value; this.cursor = 0; @@ -9,11 +22,18 @@ BaseStemmer = function() { this.ket = this.limit; }; + /** + * @return {string} + */ this.getCurrent = function() { return this.current; }; + /** + * @param {BaseStemmer} other + */ this.copy_from = function(other) { + /** @protected */ this.current = other.current; this.cursor = other.cursor; this.limit = other.limit; @@ -22,7 +42,14 @@ BaseStemmer = function() { this.ket = other.ket; }; + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ this.in_grouping = function(s, min, max) { + /** @protected */ if (this.cursor >= this.limit) return false; var ch = this.current.charCodeAt(this.cursor); if (ch > max || ch < min) return false; @@ -32,7 +59,34 @@ BaseStemmer = function() { return true; }; + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ + this.go_in_grouping = function(s, min, max) { + /** @protected */ + while (this.cursor < this.limit) { + var ch = this.current.charCodeAt(this.cursor); + if (ch > max || ch < min) + return true; + ch -= min; + if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) == 0) + return true; + this.cursor++; + } + return false; + }; + + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ this.in_grouping_b = function(s, min, max) { + /** @protected */ if (this.cursor <= this.limit_backward) return false; var ch = this.current.charCodeAt(this.cursor - 1); if (ch > max || ch < min) return false; @@ -42,7 +96,32 @@ BaseStemmer = function() { return true; }; + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ + this.go_in_grouping_b = function(s, min, max) { + /** @protected */ + while (this.cursor > this.limit_backward) { + var ch = this.current.charCodeAt(this.cursor - 1); + if (ch > max || ch < min) return true; + ch -= min; + if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) == 0) return true; + this.cursor--; + } + return false; + }; + + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ this.out_grouping = function(s, min, max) { + /** @protected */ if (this.cursor >= this.limit) return false; var ch = this.current.charCodeAt(this.cursor); if (ch > max || ch < min) { @@ -57,7 +136,35 @@ BaseStemmer = function() { return false; }; + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ + this.go_out_grouping = function(s, min, max) { + /** @protected */ + while (this.cursor < this.limit) { + var ch = this.current.charCodeAt(this.cursor); + if (ch <= max && ch >= min) { + ch -= min; + if ((s[ch >>> 3] & (0X1 << (ch & 0x7))) != 0) { + return true; + } + } + this.cursor++; + } + return false; + }; + + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ this.out_grouping_b = function(s, min, max) { + /** @protected */ if (this.cursor <= this.limit_backward) return false; var ch = this.current.charCodeAt(this.cursor - 1); if (ch > max || ch < min) { @@ -72,8 +179,34 @@ BaseStemmer = function() { return false; }; + /** + * @param {number[]} s + * @param {number} min + * @param {number} max + * @return {boolean} + */ + this.go_out_grouping_b = function(s, min, max) { + /** @protected */ + while (this.cursor > this.limit_backward) { + var ch = this.current.charCodeAt(this.cursor - 1); + if (ch <= max && ch >= min) { + ch -= min; + if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) != 0) { + return true; + } + } + this.cursor--; + } + return false; + }; + + /** + * @param {string} s + * @return {boolean} + */ this.eq_s = function(s) { + /** @protected */ if (this.limit - this.cursor < s.length) return false; if (this.current.slice(this.cursor, this.cursor + s.length) != s) { @@ -83,8 +216,13 @@ BaseStemmer = function() { return true; }; + /** + * @param {string} s + * @return {boolean} + */ this.eq_s_b = function(s) { + /** @protected */ if (this.cursor - this.limit_backward < s.length) return false; if (this.current.slice(this.cursor - s.length, this.cursor) != s) { @@ -94,8 +232,13 @@ BaseStemmer = function() { return true; }; - /** @return {number} */ this.find_among = function(v) + /** + * @param {Among[]} v + * @return {number} + */ + this.find_among = function(v) { + /** @protected */ var i = 0; var j = v.length; @@ -165,8 +308,13 @@ BaseStemmer = function() { }; // find_among_b is for backwards processing. Same comments apply + /** + * @param {Among[]} v + * @return {number} + */ this.find_among_b = function(v) { + /** @protected */ var i = 0; var j = v.length @@ -232,8 +380,15 @@ BaseStemmer = function() { /* to replace chars between c_bra and c_ket in this.current by the * chars in s. */ + /** + * @param {number} c_bra + * @param {number} c_ket + * @param {string} s + * @return {number} + */ this.replace_s = function(c_bra, c_ket, s) { + /** @protected */ var adjustment = s.length - (c_ket - c_bra); this.current = this.current.slice(0, c_bra) + s + this.current.slice(c_ket); this.limit += adjustment; @@ -242,8 +397,12 @@ BaseStemmer = function() { return adjustment; }; + /** + * @return {boolean} + */ this.slice_check = function() { + /** @protected */ if (this.bra < 0 || this.bra > this.ket || this.ket > this.limit || @@ -254,8 +413,13 @@ BaseStemmer = function() { return true; }; + /** + * @param {number} c_bra + * @return {boolean} + */ this.slice_from = function(s) { + /** @protected */ var result = false; if (this.slice_check()) { @@ -265,20 +429,34 @@ BaseStemmer = function() { return result; }; + /** + * @return {boolean} + */ this.slice_del = function() { + /** @protected */ return this.slice_from(""); }; + /** + * @param {number} c_bra + * @param {number} c_ket + * @param {string} s + */ this.insert = function(c_bra, c_ket, s) { + /** @protected */ var adjustment = this.replace_s(c_bra, c_ket, s); if (c_bra <= this.bra) this.bra += adjustment; if (c_bra <= this.ket) this.ket += adjustment; }; + /** + * @return {string} + */ this.slice_to = function() { + /** @protected */ var result = ''; if (this.slice_check()) { @@ -287,8 +465,12 @@ BaseStemmer = function() { return result; }; + /** + * @return {string} + */ this.assign_to = function() { + /** @protected */ return this.current.slice(0, this.limit); }; }; diff --git a/sphinx/search/non-minified-js/basque-stemmer.js b/sphinx/search/non-minified-js/basque-stemmer.js new file mode 100644 index 00000000000..5ed3a26af26 --- /dev/null +++ b/sphinx/search/non-minified-js/basque-stemmer.js @@ -0,0 +1,736 @@ +// Generated from basque.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var BasqueStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["idea", -1, 1], + ["bidea", 0, 1], + ["kidea", 0, 1], + ["pidea", 0, 1], + ["kundea", -1, 1], + ["galea", -1, 1], + ["tailea", -1, 1], + ["tzailea", -1, 1], + ["gunea", -1, 1], + ["kunea", -1, 1], + ["tzaga", -1, 1], + ["gaia", -1, 1], + ["aldia", -1, 1], + ["taldia", 12, 1], + ["karia", -1, 1], + ["garria", -1, 2], + ["karria", -1, 1], + ["ka", -1, 1], + ["tzaka", 17, 1], + ["la", -1, 1], + ["mena", -1, 1], + ["pena", -1, 1], + ["kina", -1, 1], + ["ezina", -1, 1], + ["tezina", 23, 1], + ["kuna", -1, 1], + ["tuna", -1, 1], + ["kizuna", -1, 1], + ["era", -1, 1], + ["bera", 28, 1], + ["arabera", 29, -1], + ["kera", 28, 1], + ["pera", 28, 1], + ["orra", -1, 1], + ["korra", 33, 1], + ["dura", -1, 1], + ["gura", -1, 1], + ["kura", -1, 1], + ["tura", -1, 1], + ["eta", -1, 1], + ["keta", 39, 1], + ["gailua", -1, 1], + ["eza", -1, 1], + ["erreza", 42, 1], + ["tza", -1, 2], + ["gaitza", 44, 1], + ["kaitza", 44, 1], + ["kuntza", 44, 1], + ["ide", -1, 1], + ["bide", 48, 1], + ["kide", 48, 1], + ["pide", 48, 1], + ["kunde", -1, 1], + ["tzake", -1, 1], + ["tzeke", -1, 1], + ["le", -1, 1], + ["gale", 55, 1], + ["taile", 55, 1], + ["tzaile", 55, 1], + ["gune", -1, 1], + ["kune", -1, 1], + ["tze", -1, 1], + ["atze", 61, 1], + ["gai", -1, 1], + ["aldi", -1, 1], + ["taldi", 64, 1], + ["ki", -1, 1], + ["ari", -1, 1], + ["kari", 67, 1], + ["lari", 67, 1], + ["tari", 67, 1], + ["etari", 70, 1], + ["garri", -1, 2], + ["karri", -1, 1], + ["arazi", -1, 1], + ["tarazi", 74, 1], + ["an", -1, 1], + ["ean", 76, 1], + ["rean", 77, 1], + ["kan", 76, 1], + ["etan", 76, 1], + ["atseden", -1, -1], + ["men", -1, 1], + ["pen", -1, 1], + ["kin", -1, 1], + ["rekin", 84, 1], + ["ezin", -1, 1], + ["tezin", 86, 1], + ["tun", -1, 1], + ["kizun", -1, 1], + ["go", -1, 1], + ["ago", 90, 1], + ["tio", -1, 1], + ["dako", -1, 1], + ["or", -1, 1], + ["kor", 94, 1], + ["tzat", -1, 1], + ["du", -1, 1], + ["gailu", -1, 1], + ["tu", -1, 1], + ["atu", 99, 1], + ["aldatu", 100, 1], + ["tatu", 100, 1], + ["baditu", 99, -1], + ["ez", -1, 1], + ["errez", 104, 1], + ["tzez", 104, 1], + ["gaitz", -1, 1], + ["kaitz", -1, 1] + ]; + + /** @const */ var a_1 = [ + ["ada", -1, 1], + ["kada", 0, 1], + ["anda", -1, 1], + ["denda", -1, 1], + ["gabea", -1, 1], + ["kabea", -1, 1], + ["aldea", -1, 1], + ["kaldea", 6, 1], + ["taldea", 6, 1], + ["ordea", -1, 1], + ["zalea", -1, 1], + ["tzalea", 10, 1], + ["gilea", -1, 1], + ["emea", -1, 1], + ["kumea", -1, 1], + ["nea", -1, 1], + ["enea", 15, 1], + ["zionea", 15, 1], + ["unea", 15, 1], + ["gunea", 18, 1], + ["pea", -1, 1], + ["aurrea", -1, 1], + ["tea", -1, 1], + ["kotea", 22, 1], + ["artea", 22, 1], + ["ostea", 22, 1], + ["etxea", -1, 1], + ["ga", -1, 1], + ["anga", 27, 1], + ["gaia", -1, 1], + ["aldia", -1, 1], + ["taldia", 30, 1], + ["handia", -1, 1], + ["mendia", -1, 1], + ["geia", -1, 1], + ["egia", -1, 1], + ["degia", 35, 1], + ["tegia", 35, 1], + ["nahia", -1, 1], + ["ohia", -1, 1], + ["kia", -1, 1], + ["tokia", 40, 1], + ["oia", -1, 1], + ["koia", 42, 1], + ["aria", -1, 1], + ["karia", 44, 1], + ["laria", 44, 1], + ["taria", 44, 1], + ["eria", -1, 1], + ["keria", 48, 1], + ["teria", 48, 1], + ["garria", -1, 2], + ["larria", -1, 1], + ["kirria", -1, 1], + ["duria", -1, 1], + ["asia", -1, 1], + ["tia", -1, 1], + ["ezia", -1, 1], + ["bizia", -1, 1], + ["ontzia", -1, 1], + ["ka", -1, 1], + ["joka", 60, 3], + ["aurka", 60, -1], + ["ska", 60, 1], + ["xka", 60, 1], + ["zka", 60, 1], + ["gibela", -1, 1], + ["gela", -1, 1], + ["kaila", -1, 1], + ["skila", -1, 1], + ["tila", -1, 1], + ["ola", -1, 1], + ["na", -1, 1], + ["kana", 72, 1], + ["ena", 72, 1], + ["garrena", 74, 1], + ["gerrena", 74, 1], + ["urrena", 74, 1], + ["zaina", 72, 1], + ["tzaina", 78, 1], + ["kina", 72, 1], + ["mina", 72, 1], + ["garna", 72, 1], + ["una", 72, 1], + ["duna", 83, 1], + ["asuna", 83, 1], + ["tasuna", 85, 1], + ["ondoa", -1, 1], + ["kondoa", 87, 1], + ["ngoa", -1, 1], + ["zioa", -1, 1], + ["koa", -1, 1], + ["takoa", 91, 1], + ["zkoa", 91, 1], + ["noa", -1, 1], + ["zinoa", 94, 1], + ["aroa", -1, 1], + ["taroa", 96, 1], + ["zaroa", 96, 1], + ["eroa", -1, 1], + ["oroa", -1, 1], + ["osoa", -1, 1], + ["toa", -1, 1], + ["ttoa", 102, 1], + ["ztoa", 102, 1], + ["txoa", -1, 1], + ["tzoa", -1, 1], + ["\u00F1oa", -1, 1], + ["ra", -1, 1], + ["ara", 108, 1], + ["dara", 109, 1], + ["liara", 109, 1], + ["tiara", 109, 1], + ["tara", 109, 1], + ["etara", 113, 1], + ["tzara", 109, 1], + ["bera", 108, 1], + ["kera", 108, 1], + ["pera", 108, 1], + ["ora", 108, 2], + ["tzarra", 108, 1], + ["korra", 108, 1], + ["tra", 108, 1], + ["sa", -1, 1], + ["osa", 123, 1], + ["ta", -1, 1], + ["eta", 125, 1], + ["keta", 126, 1], + ["sta", 125, 1], + ["dua", -1, 1], + ["mendua", 129, 1], + ["ordua", 129, 1], + ["lekua", -1, 1], + ["burua", -1, 1], + ["durua", -1, 1], + ["tsua", -1, 1], + ["tua", -1, 1], + ["mentua", 136, 1], + ["estua", 136, 1], + ["txua", -1, 1], + ["zua", -1, 1], + ["tzua", 140, 1], + ["za", -1, 1], + ["eza", 142, 1], + ["eroza", 142, 1], + ["tza", 142, 2], + ["koitza", 145, 1], + ["antza", 145, 1], + ["gintza", 145, 1], + ["kintza", 145, 1], + ["kuntza", 145, 1], + ["gabe", -1, 1], + ["kabe", -1, 1], + ["kide", -1, 1], + ["alde", -1, 1], + ["kalde", 154, 1], + ["talde", 154, 1], + ["orde", -1, 1], + ["ge", -1, 1], + ["zale", -1, 1], + ["tzale", 159, 1], + ["gile", -1, 1], + ["eme", -1, 1], + ["kume", -1, 1], + ["ne", -1, 1], + ["zione", 164, 1], + ["une", 164, 1], + ["gune", 166, 1], + ["pe", -1, 1], + ["aurre", -1, 1], + ["te", -1, 1], + ["kote", 170, 1], + ["arte", 170, 1], + ["oste", 170, 1], + ["etxe", -1, 1], + ["gai", -1, 1], + ["di", -1, 1], + ["aldi", 176, 1], + ["taldi", 177, 1], + ["geldi", 176, -1], + ["handi", 176, 1], + ["mendi", 176, 1], + ["gei", -1, 1], + ["egi", -1, 1], + ["degi", 183, 1], + ["tegi", 183, 1], + ["nahi", -1, 1], + ["ohi", -1, 1], + ["ki", -1, 1], + ["toki", 188, 1], + ["oi", -1, 1], + ["goi", 190, 1], + ["koi", 190, 1], + ["ari", -1, 1], + ["kari", 193, 1], + ["lari", 193, 1], + ["tari", 193, 1], + ["garri", -1, 2], + ["larri", -1, 1], + ["kirri", -1, 1], + ["duri", -1, 1], + ["asi", -1, 1], + ["ti", -1, 1], + ["ontzi", -1, 1], + ["\u00F1i", -1, 1], + ["ak", -1, 1], + ["ek", -1, 1], + ["tarik", -1, 1], + ["gibel", -1, 1], + ["ail", -1, 1], + ["kail", 209, 1], + ["kan", -1, 1], + ["tan", -1, 1], + ["etan", 212, 1], + ["en", -1, 4], + ["ren", 214, 2], + ["garren", 215, 1], + ["gerren", 215, 1], + ["urren", 215, 1], + ["ten", 214, 4], + ["tzen", 214, 4], + ["zain", -1, 1], + ["tzain", 221, 1], + ["kin", -1, 1], + ["min", -1, 1], + ["dun", -1, 1], + ["asun", -1, 1], + ["tasun", 226, 1], + ["aizun", -1, 1], + ["ondo", -1, 1], + ["kondo", 229, 1], + ["go", -1, 1], + ["ngo", 231, 1], + ["zio", -1, 1], + ["ko", -1, 1], + ["trako", 234, 5], + ["tako", 234, 1], + ["etako", 236, 1], + ["eko", 234, 1], + ["tariko", 234, 1], + ["sko", 234, 1], + ["tuko", 234, 1], + ["minutuko", 241, 6], + ["zko", 234, 1], + ["no", -1, 1], + ["zino", 244, 1], + ["ro", -1, 1], + ["aro", 246, 1], + ["igaro", 247, -1], + ["taro", 247, 1], + ["zaro", 247, 1], + ["ero", 246, 1], + ["giro", 246, 1], + ["oro", 246, 1], + ["oso", -1, 1], + ["to", -1, 1], + ["tto", 255, 1], + ["zto", 255, 1], + ["txo", -1, 1], + ["tzo", -1, 1], + ["gintzo", 259, 1], + ["\u00F1o", -1, 1], + ["zp", -1, 1], + ["ar", -1, 1], + ["dar", 263, 1], + ["behar", 263, 1], + ["zehar", 263, -1], + ["liar", 263, 1], + ["tiar", 263, 1], + ["tar", 263, 1], + ["tzar", 263, 1], + ["or", -1, 2], + ["kor", 271, 1], + ["os", -1, 1], + ["ket", -1, 1], + ["du", -1, 1], + ["mendu", 275, 1], + ["ordu", 275, 1], + ["leku", -1, 1], + ["buru", -1, 2], + ["duru", -1, 1], + ["tsu", -1, 1], + ["tu", -1, 1], + ["tatu", 282, 4], + ["mentu", 282, 1], + ["estu", 282, 1], + ["txu", -1, 1], + ["zu", -1, 1], + ["tzu", 287, 1], + ["gintzu", 288, 1], + ["z", -1, 1], + ["ez", 290, 1], + ["eroz", 290, 1], + ["tz", 290, 1], + ["koitz", 293, 1] + ]; + + /** @const */ var a_2 = [ + ["zlea", -1, 2], + ["keria", -1, 1], + ["la", -1, 1], + ["era", -1, 1], + ["dade", -1, 1], + ["tade", -1, 1], + ["date", -1, 1], + ["tate", -1, 1], + ["gi", -1, 1], + ["ki", -1, 1], + ["ik", -1, 1], + ["lanik", 10, 1], + ["rik", 10, 1], + ["larik", 12, 1], + ["ztik", 10, 1], + ["go", -1, 1], + ["ro", -1, 1], + ["ero", 16, 1], + ["to", -1, 1] + ]; + + /** @const */ var /** Array */ g_v = [17, 65, 16]; + + var /** number */ I_p2 = 0; + var /** number */ I_p1 = 0; + var /** number */ I_pV = 0; + + + /** @return {boolean} */ + function r_mark_regions() { + I_pV = base.limit; + I_p1 = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + lab1: { + /** @const */ var /** number */ v_2 = base.cursor; + lab2: { + if (!(base.in_grouping(g_v, 97, 117))) + { + break lab2; + } + lab3: { + /** @const */ var /** number */ v_3 = base.cursor; + lab4: { + if (!(base.out_grouping(g_v, 97, 117))) + { + break lab4; + } + if (!base.go_out_grouping(g_v, 97, 117)) + { + break lab4; + } + base.cursor++; + break lab3; + } + base.cursor = v_3; + if (!(base.in_grouping(g_v, 97, 117))) + { + break lab2; + } + if (!base.go_in_grouping(g_v, 97, 117)) + { + break lab2; + } + base.cursor++; + } + break lab1; + } + base.cursor = v_2; + if (!(base.out_grouping(g_v, 97, 117))) + { + break lab0; + } + lab5: { + /** @const */ var /** number */ v_4 = base.cursor; + lab6: { + if (!(base.out_grouping(g_v, 97, 117))) + { + break lab6; + } + if (!base.go_out_grouping(g_v, 97, 117)) + { + break lab6; + } + base.cursor++; + break lab5; + } + base.cursor = v_4; + if (!(base.in_grouping(g_v, 97, 117))) + { + break lab0; + } + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; + } + } + I_pV = base.cursor; + } + base.cursor = v_1; + /** @const */ var /** number */ v_5 = base.cursor; + lab7: { + if (!base.go_out_grouping(g_v, 97, 117)) + { + break lab7; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 117)) + { + break lab7; + } + base.cursor++; + I_p1 = base.cursor; + if (!base.go_out_grouping(g_v, 97, 117)) + { + break lab7; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 117)) + { + break lab7; + } + base.cursor++; + I_p2 = base.cursor; + } + base.cursor = v_5; + return true; + }; + + /** @return {boolean} */ + function r_RV() { + return I_pV <= base.cursor; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_aditzak() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_0); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_RV()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_izenak() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_RV()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (!base.slice_from("jok")) + { + return false; + } + break; + case 4: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 5: + if (!base.slice_from("tra")) + { + return false; + } + break; + case 6: + if (!base.slice_from("minutu")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_adjetiboak() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_RV()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("z")) + { + return false; + } + break; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + r_mark_regions(); + base.limit_backward = base.cursor; base.cursor = base.limit; + while(true) + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!r_aditzak()) + { + break lab0; + } + continue; + } + base.cursor = base.limit - v_1; + break; + } + while(true) + { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!r_izenak()) + { + break lab1; + } + continue; + } + base.cursor = base.limit - v_2; + break; + } + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_adjetiboak(); + base.cursor = base.limit - v_3; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/catalan-stemmer.js b/sphinx/search/non-minified-js/catalan-stemmer.js new file mode 100644 index 00000000000..441e655adda --- /dev/null +++ b/sphinx/search/non-minified-js/catalan-stemmer.js @@ -0,0 +1,886 @@ +// Generated from catalan.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var CatalanStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["", -1, 7], + ["\u00B7", 0, 6], + ["\u00E0", 0, 1], + ["\u00E1", 0, 1], + ["\u00E8", 0, 2], + ["\u00E9", 0, 2], + ["\u00EC", 0, 3], + ["\u00ED", 0, 3], + ["\u00EF", 0, 3], + ["\u00F2", 0, 4], + ["\u00F3", 0, 4], + ["\u00FA", 0, 5], + ["\u00FC", 0, 5] + ]; + + /** @const */ var a_1 = [ + ["la", -1, 1], + ["-la", 0, 1], + ["sela", 0, 1], + ["le", -1, 1], + ["me", -1, 1], + ["-me", 4, 1], + ["se", -1, 1], + ["-te", -1, 1], + ["hi", -1, 1], + ["'hi", 8, 1], + ["li", -1, 1], + ["-li", 10, 1], + ["'l", -1, 1], + ["'m", -1, 1], + ["-m", -1, 1], + ["'n", -1, 1], + ["-n", -1, 1], + ["ho", -1, 1], + ["'ho", 17, 1], + ["lo", -1, 1], + ["selo", 19, 1], + ["'s", -1, 1], + ["las", -1, 1], + ["selas", 22, 1], + ["les", -1, 1], + ["-les", 24, 1], + ["'ls", -1, 1], + ["-ls", -1, 1], + ["'ns", -1, 1], + ["-ns", -1, 1], + ["ens", -1, 1], + ["los", -1, 1], + ["selos", 31, 1], + ["nos", -1, 1], + ["-nos", 33, 1], + ["vos", -1, 1], + ["us", -1, 1], + ["-us", 36, 1], + ["'t", -1, 1] + ]; + + /** @const */ var a_2 = [ + ["ica", -1, 4], + ["l\u00F3gica", 0, 3], + ["enca", -1, 1], + ["ada", -1, 2], + ["ancia", -1, 1], + ["encia", -1, 1], + ["\u00E8ncia", -1, 1], + ["\u00EDcia", -1, 1], + ["logia", -1, 3], + ["inia", -1, 1], + ["\u00EDinia", 9, 1], + ["eria", -1, 1], + ["\u00E0ria", -1, 1], + ["at\u00F2ria", -1, 1], + ["alla", -1, 1], + ["ella", -1, 1], + ["\u00EDvola", -1, 1], + ["ima", -1, 1], + ["\u00EDssima", 17, 1], + ["qu\u00EDssima", 18, 5], + ["ana", -1, 1], + ["ina", -1, 1], + ["era", -1, 1], + ["sfera", 22, 1], + ["ora", -1, 1], + ["dora", 24, 1], + ["adora", 25, 1], + ["adura", -1, 1], + ["esa", -1, 1], + ["osa", -1, 1], + ["assa", -1, 1], + ["essa", -1, 1], + ["issa", -1, 1], + ["eta", -1, 1], + ["ita", -1, 1], + ["ota", -1, 1], + ["ista", -1, 1], + ["ialista", 36, 1], + ["ionista", 36, 1], + ["iva", -1, 1], + ["ativa", 39, 1], + ["n\u00E7a", -1, 1], + ["log\u00EDa", -1, 3], + ["ic", -1, 4], + ["\u00EDstic", 43, 1], + ["enc", -1, 1], + ["esc", -1, 1], + ["ud", -1, 1], + ["atge", -1, 1], + ["ble", -1, 1], + ["able", 49, 1], + ["ible", 49, 1], + ["isme", -1, 1], + ["ialisme", 52, 1], + ["ionisme", 52, 1], + ["ivisme", 52, 1], + ["aire", -1, 1], + ["icte", -1, 1], + ["iste", -1, 1], + ["ici", -1, 1], + ["\u00EDci", -1, 1], + ["logi", -1, 3], + ["ari", -1, 1], + ["tori", -1, 1], + ["al", -1, 1], + ["il", -1, 1], + ["all", -1, 1], + ["ell", -1, 1], + ["\u00EDvol", -1, 1], + ["isam", -1, 1], + ["issem", -1, 1], + ["\u00ECssem", -1, 1], + ["\u00EDssem", -1, 1], + ["\u00EDssim", -1, 1], + ["qu\u00EDssim", 73, 5], + ["amen", -1, 1], + ["\u00ECssin", -1, 1], + ["ar", -1, 1], + ["ificar", 77, 1], + ["egar", 77, 1], + ["ejar", 77, 1], + ["itar", 77, 1], + ["itzar", 77, 1], + ["fer", -1, 1], + ["or", -1, 1], + ["dor", 84, 1], + ["dur", -1, 1], + ["doras", -1, 1], + ["ics", -1, 4], + ["l\u00F3gics", 88, 3], + ["uds", -1, 1], + ["nces", -1, 1], + ["ades", -1, 2], + ["ancies", -1, 1], + ["encies", -1, 1], + ["\u00E8ncies", -1, 1], + ["\u00EDcies", -1, 1], + ["logies", -1, 3], + ["inies", -1, 1], + ["\u00EDnies", -1, 1], + ["eries", -1, 1], + ["\u00E0ries", -1, 1], + ["at\u00F2ries", -1, 1], + ["bles", -1, 1], + ["ables", 103, 1], + ["ibles", 103, 1], + ["imes", -1, 1], + ["\u00EDssimes", 106, 1], + ["qu\u00EDssimes", 107, 5], + ["formes", -1, 1], + ["ismes", -1, 1], + ["ialismes", 110, 1], + ["ines", -1, 1], + ["eres", -1, 1], + ["ores", -1, 1], + ["dores", 114, 1], + ["idores", 115, 1], + ["dures", -1, 1], + ["eses", -1, 1], + ["oses", -1, 1], + ["asses", -1, 1], + ["ictes", -1, 1], + ["ites", -1, 1], + ["otes", -1, 1], + ["istes", -1, 1], + ["ialistes", 124, 1], + ["ionistes", 124, 1], + ["iques", -1, 4], + ["l\u00F3giques", 127, 3], + ["ives", -1, 1], + ["atives", 129, 1], + ["log\u00EDes", -1, 3], + ["alleng\u00FCes", -1, 1], + ["icis", -1, 1], + ["\u00EDcis", -1, 1], + ["logis", -1, 3], + ["aris", -1, 1], + ["toris", -1, 1], + ["ls", -1, 1], + ["als", 138, 1], + ["ells", 138, 1], + ["ims", -1, 1], + ["\u00EDssims", 141, 1], + ["qu\u00EDssims", 142, 5], + ["ions", -1, 1], + ["cions", 144, 1], + ["acions", 145, 2], + ["esos", -1, 1], + ["osos", -1, 1], + ["assos", -1, 1], + ["issos", -1, 1], + ["ers", -1, 1], + ["ors", -1, 1], + ["dors", 152, 1], + ["adors", 153, 1], + ["idors", 153, 1], + ["ats", -1, 1], + ["itats", 156, 1], + ["bilitats", 157, 1], + ["ivitats", 157, 1], + ["ativitats", 159, 1], + ["\u00EFtats", 156, 1], + ["ets", -1, 1], + ["ants", -1, 1], + ["ents", -1, 1], + ["ments", 164, 1], + ["aments", 165, 1], + ["ots", -1, 1], + ["uts", -1, 1], + ["ius", -1, 1], + ["trius", 169, 1], + ["atius", 169, 1], + ["\u00E8s", -1, 1], + ["\u00E9s", -1, 1], + ["\u00EDs", -1, 1], + ["d\u00EDs", 174, 1], + ["\u00F3s", -1, 1], + ["itat", -1, 1], + ["bilitat", 177, 1], + ["ivitat", 177, 1], + ["ativitat", 179, 1], + ["\u00EFtat", -1, 1], + ["et", -1, 1], + ["ant", -1, 1], + ["ent", -1, 1], + ["ient", 184, 1], + ["ment", 184, 1], + ["ament", 186, 1], + ["isament", 187, 1], + ["ot", -1, 1], + ["isseu", -1, 1], + ["\u00ECsseu", -1, 1], + ["\u00EDsseu", -1, 1], + ["triu", -1, 1], + ["\u00EDssiu", -1, 1], + ["atiu", -1, 1], + ["\u00F3", -1, 1], + ["i\u00F3", 196, 1], + ["ci\u00F3", 197, 1], + ["aci\u00F3", 198, 1] + ]; + + /** @const */ var a_3 = [ + ["aba", -1, 1], + ["esca", -1, 1], + ["isca", -1, 1], + ["\u00EFsca", -1, 1], + ["ada", -1, 1], + ["ida", -1, 1], + ["uda", -1, 1], + ["\u00EFda", -1, 1], + ["ia", -1, 1], + ["aria", 8, 1], + ["iria", 8, 1], + ["ara", -1, 1], + ["iera", -1, 1], + ["ira", -1, 1], + ["adora", -1, 1], + ["\u00EFra", -1, 1], + ["ava", -1, 1], + ["ixa", -1, 1], + ["itza", -1, 1], + ["\u00EDa", -1, 1], + ["ar\u00EDa", 19, 1], + ["er\u00EDa", 19, 1], + ["ir\u00EDa", 19, 1], + ["\u00EFa", -1, 1], + ["isc", -1, 1], + ["\u00EFsc", -1, 1], + ["ad", -1, 1], + ["ed", -1, 1], + ["id", -1, 1], + ["ie", -1, 1], + ["re", -1, 1], + ["dre", 30, 1], + ["ase", -1, 1], + ["iese", -1, 1], + ["aste", -1, 1], + ["iste", -1, 1], + ["ii", -1, 1], + ["ini", -1, 1], + ["esqui", -1, 1], + ["eixi", -1, 1], + ["itzi", -1, 1], + ["am", -1, 1], + ["em", -1, 1], + ["arem", 42, 1], + ["irem", 42, 1], + ["\u00E0rem", 42, 1], + ["\u00EDrem", 42, 1], + ["\u00E0ssem", 42, 1], + ["\u00E9ssem", 42, 1], + ["iguem", 42, 1], + ["\u00EFguem", 42, 1], + ["avem", 42, 1], + ["\u00E0vem", 42, 1], + ["\u00E1vem", 42, 1], + ["ir\u00ECem", 42, 1], + ["\u00EDem", 42, 1], + ["ar\u00EDem", 55, 1], + ["ir\u00EDem", 55, 1], + ["assim", -1, 1], + ["essim", -1, 1], + ["issim", -1, 1], + ["\u00E0ssim", -1, 1], + ["\u00E8ssim", -1, 1], + ["\u00E9ssim", -1, 1], + ["\u00EDssim", -1, 1], + ["\u00EFm", -1, 1], + ["an", -1, 1], + ["aban", 66, 1], + ["arian", 66, 1], + ["aran", 66, 1], + ["ieran", 66, 1], + ["iran", 66, 1], + ["\u00EDan", 66, 1], + ["ar\u00EDan", 72, 1], + ["er\u00EDan", 72, 1], + ["ir\u00EDan", 72, 1], + ["en", -1, 1], + ["ien", 76, 1], + ["arien", 77, 1], + ["irien", 77, 1], + ["aren", 76, 1], + ["eren", 76, 1], + ["iren", 76, 1], + ["\u00E0ren", 76, 1], + ["\u00EFren", 76, 1], + ["asen", 76, 1], + ["iesen", 76, 1], + ["assen", 76, 1], + ["essen", 76, 1], + ["issen", 76, 1], + ["\u00E9ssen", 76, 1], + ["\u00EFssen", 76, 1], + ["esquen", 76, 1], + ["isquen", 76, 1], + ["\u00EFsquen", 76, 1], + ["aven", 76, 1], + ["ixen", 76, 1], + ["eixen", 96, 1], + ["\u00EFxen", 76, 1], + ["\u00EFen", 76, 1], + ["in", -1, 1], + ["inin", 100, 1], + ["sin", 100, 1], + ["isin", 102, 1], + ["assin", 102, 1], + ["essin", 102, 1], + ["issin", 102, 1], + ["\u00EFssin", 102, 1], + ["esquin", 100, 1], + ["eixin", 100, 1], + ["aron", -1, 1], + ["ieron", -1, 1], + ["ar\u00E1n", -1, 1], + ["er\u00E1n", -1, 1], + ["ir\u00E1n", -1, 1], + ["i\u00EFn", -1, 1], + ["ado", -1, 1], + ["ido", -1, 1], + ["ando", -1, 2], + ["iendo", -1, 1], + ["io", -1, 1], + ["ixo", -1, 1], + ["eixo", 121, 1], + ["\u00EFxo", -1, 1], + ["itzo", -1, 1], + ["ar", -1, 1], + ["tzar", 125, 1], + ["er", -1, 1], + ["eixer", 127, 1], + ["ir", -1, 1], + ["ador", -1, 1], + ["as", -1, 1], + ["abas", 131, 1], + ["adas", 131, 1], + ["idas", 131, 1], + ["aras", 131, 1], + ["ieras", 131, 1], + ["\u00EDas", 131, 1], + ["ar\u00EDas", 137, 1], + ["er\u00EDas", 137, 1], + ["ir\u00EDas", 137, 1], + ["ids", -1, 1], + ["es", -1, 1], + ["ades", 142, 1], + ["ides", 142, 1], + ["udes", 142, 1], + ["\u00EFdes", 142, 1], + ["atges", 142, 1], + ["ies", 142, 1], + ["aries", 148, 1], + ["iries", 148, 1], + ["ares", 142, 1], + ["ires", 142, 1], + ["adores", 142, 1], + ["\u00EFres", 142, 1], + ["ases", 142, 1], + ["ieses", 142, 1], + ["asses", 142, 1], + ["esses", 142, 1], + ["isses", 142, 1], + ["\u00EFsses", 142, 1], + ["ques", 142, 1], + ["esques", 161, 1], + ["\u00EFsques", 161, 1], + ["aves", 142, 1], + ["ixes", 142, 1], + ["eixes", 165, 1], + ["\u00EFxes", 142, 1], + ["\u00EFes", 142, 1], + ["abais", -1, 1], + ["arais", -1, 1], + ["ierais", -1, 1], + ["\u00EDais", -1, 1], + ["ar\u00EDais", 172, 1], + ["er\u00EDais", 172, 1], + ["ir\u00EDais", 172, 1], + ["aseis", -1, 1], + ["ieseis", -1, 1], + ["asteis", -1, 1], + ["isteis", -1, 1], + ["inis", -1, 1], + ["sis", -1, 1], + ["isis", 181, 1], + ["assis", 181, 1], + ["essis", 181, 1], + ["issis", 181, 1], + ["\u00EFssis", 181, 1], + ["esquis", -1, 1], + ["eixis", -1, 1], + ["itzis", -1, 1], + ["\u00E1is", -1, 1], + ["ar\u00E9is", -1, 1], + ["er\u00E9is", -1, 1], + ["ir\u00E9is", -1, 1], + ["ams", -1, 1], + ["ados", -1, 1], + ["idos", -1, 1], + ["amos", -1, 1], + ["\u00E1bamos", 197, 1], + ["\u00E1ramos", 197, 1], + ["i\u00E9ramos", 197, 1], + ["\u00EDamos", 197, 1], + ["ar\u00EDamos", 201, 1], + ["er\u00EDamos", 201, 1], + ["ir\u00EDamos", 201, 1], + ["aremos", -1, 1], + ["eremos", -1, 1], + ["iremos", -1, 1], + ["\u00E1semos", -1, 1], + ["i\u00E9semos", -1, 1], + ["imos", -1, 1], + ["adors", -1, 1], + ["ass", -1, 1], + ["erass", 212, 1], + ["ess", -1, 1], + ["ats", -1, 1], + ["its", -1, 1], + ["ents", -1, 1], + ["\u00E0s", -1, 1], + ["ar\u00E0s", 218, 1], + ["ir\u00E0s", 218, 1], + ["ar\u00E1s", -1, 1], + ["er\u00E1s", -1, 1], + ["ir\u00E1s", -1, 1], + ["\u00E9s", -1, 1], + ["ar\u00E9s", 224, 1], + ["\u00EDs", -1, 1], + ["i\u00EFs", -1, 1], + ["at", -1, 1], + ["it", -1, 1], + ["ant", -1, 1], + ["ent", -1, 1], + ["int", -1, 1], + ["ut", -1, 1], + ["\u00EFt", -1, 1], + ["au", -1, 1], + ["erau", 235, 1], + ["ieu", -1, 1], + ["ineu", -1, 1], + ["areu", -1, 1], + ["ireu", -1, 1], + ["\u00E0reu", -1, 1], + ["\u00EDreu", -1, 1], + ["asseu", -1, 1], + ["esseu", -1, 1], + ["eresseu", 244, 1], + ["\u00E0sseu", -1, 1], + ["\u00E9sseu", -1, 1], + ["igueu", -1, 1], + ["\u00EFgueu", -1, 1], + ["\u00E0veu", -1, 1], + ["\u00E1veu", -1, 1], + ["itzeu", -1, 1], + ["\u00ECeu", -1, 1], + ["ir\u00ECeu", 253, 1], + ["\u00EDeu", -1, 1], + ["ar\u00EDeu", 255, 1], + ["ir\u00EDeu", 255, 1], + ["assiu", -1, 1], + ["issiu", -1, 1], + ["\u00E0ssiu", -1, 1], + ["\u00E8ssiu", -1, 1], + ["\u00E9ssiu", -1, 1], + ["\u00EDssiu", -1, 1], + ["\u00EFu", -1, 1], + ["ix", -1, 1], + ["eix", 265, 1], + ["\u00EFx", -1, 1], + ["itz", -1, 1], + ["i\u00E0", -1, 1], + ["ar\u00E0", -1, 1], + ["ir\u00E0", -1, 1], + ["itz\u00E0", -1, 1], + ["ar\u00E1", -1, 1], + ["er\u00E1", -1, 1], + ["ir\u00E1", -1, 1], + ["ir\u00E8", -1, 1], + ["ar\u00E9", -1, 1], + ["er\u00E9", -1, 1], + ["ir\u00E9", -1, 1], + ["\u00ED", -1, 1], + ["i\u00EF", -1, 1], + ["i\u00F3", -1, 1] + ]; + + /** @const */ var a_4 = [ + ["a", -1, 1], + ["e", -1, 1], + ["i", -1, 1], + ["\u00EFn", -1, 1], + ["o", -1, 1], + ["ir", -1, 1], + ["s", -1, 1], + ["is", 6, 1], + ["os", 6, 1], + ["\u00EFs", 6, 1], + ["it", -1, 1], + ["eu", -1, 1], + ["iu", -1, 1], + ["iqu", -1, 2], + ["itz", -1, 1], + ["\u00E0", -1, 1], + ["\u00E1", -1, 1], + ["\u00E9", -1, 1], + ["\u00EC", -1, 1], + ["\u00ED", -1, 1], + ["\u00EF", -1, 1], + ["\u00F3", -1, 1] + ]; + + /** @const */ var /** Array */ g_v = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 129, 81, 6, 10]; + + var /** number */ I_p2 = 0; + var /** number */ I_p1 = 0; + + + /** @return {boolean} */ + function r_mark_regions() { + I_p1 = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!base.go_out_grouping(g_v, 97, 252)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 252)) + { + break lab0; + } + base.cursor++; + I_p1 = base.cursor; + if (!base.go_out_grouping(g_v, 97, 252)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 252)) + { + break lab0; + } + base.cursor++; + I_p2 = base.cursor; + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_cleaning() { + var /** number */ among_var; + while(true) + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("a")) + { + return false; + } + break; + case 2: + if (!base.slice_from("e")) + { + return false; + } + break; + case 3: + if (!base.slice_from("i")) + { + return false; + } + break; + case 4: + if (!base.slice_from("o")) + { + return false; + } + break; + case 5: + if (!base.slice_from("u")) + { + return false; + } + break; + case 6: + if (!base.slice_from(".")) + { + return false; + } + break; + case 7: + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; + break; + } + continue; + } + base.cursor = v_1; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_attached_pronoun() { + base.ket = base.cursor; + if (base.find_among_b(a_1) == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_standard_suffix() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (!r_R2()) + { + return false; + } + if (!base.slice_from("log")) + { + return false; + } + break; + case 4: + if (!r_R2()) + { + return false; + } + if (!base.slice_from("ic")) + { + return false; + } + break; + case 5: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("c")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_verb_suffix() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_3); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_residual_suffix() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_4); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("ic")) + { + return false; + } + break; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + r_mark_regions(); + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + r_attached_pronoun(); + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab0: { + lab1: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!r_standard_suffix()) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_3; + if (!r_verb_suffix()) + { + break lab0; + } + } + } + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_residual_suffix(); + base.cursor = base.limit - v_4; + base.cursor = base.limit_backward; + /** @const */ var /** number */ v_5 = base.cursor; + r_cleaning(); + base.cursor = v_5; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/danish-stemmer.js b/sphinx/search/non-minified-js/danish-stemmer.js index 46b5d55b0e1..b0867495bbf 100644 --- a/sphinx/search/non-minified-js/danish-stemmer.js +++ b/sphinx/search/non-minified-js/danish-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from danish.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -DanishStemmer = function() { +var DanishStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["hed", -1, 1], ["ethed", 0, 1], @@ -67,9 +68,9 @@ DanishStemmer = function() { /** @return {boolean} */ function r_mark_regions() { I_p1 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; { - var /** number */ c1 = base.cursor + 3; + /** @const */ var /** number */ c1 = base.cursor + 3; if (c1 > base.limit) { return false; @@ -78,44 +79,21 @@ DanishStemmer = function() { } I_x = base.cursor; base.cursor = v_1; - golab0: while(true) + if (!base.go_out_grouping(g_v, 97, 248)) { - var /** number */ v_2 = base.cursor; - lab1: { - if (!(base.in_grouping(g_v, 97, 248))) - { - break lab1; - } - base.cursor = v_2; - break golab0; - } - base.cursor = v_2; - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } - golab2: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 248)) { - lab3: { - if (!(base.out_grouping(g_v, 97, 248))) - { - break lab3; - } - break golab2; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } + base.cursor++; I_p1 = base.cursor; - lab4: { - if (!(I_p1 < I_x)) + lab0: { + if (I_p1 >= I_x) { - break lab4; + break lab0; } I_p1 = I_x; } @@ -129,17 +107,17 @@ DanishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; among_var = base.find_among_b(a_0); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: if (!base.slice_del()) @@ -163,21 +141,21 @@ DanishStemmer = function() { /** @return {boolean} */ function r_consonant_pair() { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; if (base.cursor < I_p1) { return false; } - var /** number */ v_3 = base.limit_backward; + /** @const */ var /** number */ v_2 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; if (base.find_among_b(a_1) == 0) { - base.limit_backward = v_3; + base.limit_backward = v_2; return false; } base.bra = base.cursor; - base.limit_backward = v_3; + base.limit_backward = v_2; base.cursor = base.limit - v_1; if (base.cursor <= base.limit_backward) { @@ -195,7 +173,7 @@ DanishStemmer = function() { /** @return {boolean} */ function r_other_suffix() { var /** number */ among_var; - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.eq_s_b("st"))) @@ -217,26 +195,26 @@ DanishStemmer = function() { { return false; } - var /** number */ v_3 = base.limit_backward; + /** @const */ var /** number */ v_2 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; among_var = base.find_among_b(a_2); if (among_var == 0) { - base.limit_backward = v_3; + base.limit_backward = v_2; return false; } base.bra = base.cursor; - base.limit_backward = v_3; + base.limit_backward = v_2; switch (among_var) { case 1: if (!base.slice_del()) { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_consonant_pair(); - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; break; case 2: if (!base.slice_from("l\u00F8s")) @@ -254,12 +232,12 @@ DanishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; if (!(base.in_grouping_b(g_c, 98, 122))) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; @@ -268,7 +246,7 @@ DanishStemmer = function() { { return false; } - base.limit_backward = v_2; + base.limit_backward = v_1; if (!(base.eq_s_b(S_ch))) { return false; @@ -281,20 +259,20 @@ DanishStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_mark_regions(); base.cursor = v_1; base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_main_suffix(); base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_consonant_pair(); base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; r_other_suffix(); base.cursor = base.limit - v_4; - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; r_undouble(); base.cursor = base.limit - v_5; base.cursor = base.limit_backward; diff --git a/sphinx/search/non-minified-js/dutch-stemmer.js b/sphinx/search/non-minified-js/dutch-stemmer.js index 0ad11e212cc..50e53e7b510 100644 --- a/sphinx/search/non-minified-js/dutch-stemmer.js +++ b/sphinx/search/non-minified-js/dutch-stemmer.js @@ -1,678 +1,1960 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from dutch.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -DutchStemmer = function() { +var DutchStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ - ["", -1, 6], - ["\u00E1", 0, 1], - ["\u00E4", 0, 1], - ["\u00E9", 0, 2], - ["\u00EB", 0, 2], - ["\u00ED", 0, 3], - ["\u00EF", 0, 3], - ["\u00F3", 0, 4], - ["\u00F6", 0, 4], - ["\u00FA", 0, 5], - ["\u00FC", 0, 5] + ["a", -1, 1], + ["e", -1, 2], + ["o", -1, 1], + ["u", -1, 1], + ["\u00E0", -1, 1], + ["\u00E1", -1, 1], + ["\u00E2", -1, 1], + ["\u00E4", -1, 1], + ["\u00E8", -1, 2], + ["\u00E9", -1, 2], + ["\u00EA", -1, 2], + ["e\u00EB", -1, 3], + ["i\u00EB", -1, 4], + ["\u00F2", -1, 1], + ["\u00F3", -1, 1], + ["\u00F4", -1, 1], + ["\u00F6", -1, 1], + ["\u00F9", -1, 1], + ["\u00FA", -1, 1], + ["\u00FB", -1, 1], + ["\u00FC", -1, 1] ]; /** @const */ var a_1 = [ - ["", -1, 3], - ["I", 0, 2], - ["Y", 0, 1] + ["nde", -1, 8], + ["en", -1, 7], + ["s", -1, 2], + ["'s", 2, 1], + ["es", 2, 4], + ["ies", 4, 3], + ["aus", 2, 6], + ["\u00E9s", 2, 5] ]; /** @const */ var a_2 = [ - ["dd", -1, -1], - ["kk", -1, -1], - ["tt", -1, -1] + ["de", -1, 5], + ["ge", -1, 2], + ["ische", -1, 4], + ["je", -1, 1], + ["lijke", -1, 3], + ["le", -1, 9], + ["ene", -1, 10], + ["re", -1, 8], + ["se", -1, 7], + ["te", -1, 6], + ["ieve", -1, 11] ]; /** @const */ var a_3 = [ - ["ene", -1, 2], - ["se", -1, 3], - ["en", -1, 2], - ["heden", 2, 1], - ["s", -1, 3] + ["heid", -1, 3], + ["fie", -1, 7], + ["gie", -1, 8], + ["atie", -1, 1], + ["isme", -1, 5], + ["ing", -1, 5], + ["arij", -1, 6], + ["erij", -1, 5], + ["sel", -1, 3], + ["rder", -1, 4], + ["ster", -1, 3], + ["iteit", -1, 2], + ["dst", -1, 10], + ["tst", -1, 9] ]; /** @const */ var a_4 = [ - ["end", -1, 1], - ["ig", -1, 2], - ["ing", -1, 1], - ["lijk", -1, 3], - ["baar", -1, 4], - ["bar", -1, 5] + ["end", -1, 9], + ["atief", -1, 2], + ["erig", -1, 9], + ["achtig", -1, 3], + ["ioneel", -1, 1], + ["baar", -1, 3], + ["laar", -1, 5], + ["naar", -1, 4], + ["raar", -1, 6], + ["eriger", -1, 9], + ["achtiger", -1, 3], + ["lijker", -1, 8], + ["tant", -1, 7], + ["erigst", -1, 9], + ["achtigst", -1, 3], + ["lijkst", -1, 8] ]; /** @const */ var a_5 = [ - ["aa", -1, -1], - ["ee", -1, -1], - ["oo", -1, -1], - ["uu", -1, -1] + ["ig", -1, 1], + ["iger", -1, 1], + ["igst", -1, 1] + ]; + + /** @const */ var a_6 = [ + ["ft", -1, 2], + ["kt", -1, 1], + ["pt", -1, 3] + ]; + + /** @const */ var a_7 = [ + ["bb", -1, 1], + ["cc", -1, 2], + ["dd", -1, 3], + ["ff", -1, 4], + ["gg", -1, 5], + ["hh", -1, 6], + ["jj", -1, 7], + ["kk", -1, 8], + ["ll", -1, 9], + ["mm", -1, 10], + ["nn", -1, 11], + ["pp", -1, 12], + ["qq", -1, 13], + ["rr", -1, 14], + ["ss", -1, 15], + ["tt", -1, 16], + ["v", -1, 4], + ["vv", 16, 17], + ["ww", -1, 18], + ["xx", -1, 19], + ["z", -1, 15], + ["zz", 20, 20] + ]; + + /** @const */ var a_8 = [ + ["d", -1, 1], + ["t", -1, 2] + ]; + + /** @const */ var a_9 = [ + ["", -1, -1], + ["eft", 0, 1], + ["vaa", 0, 1], + ["val", 0, 1], + ["vali", 3, -1], + ["vare", 0, 1] + ]; + + /** @const */ var a_10 = [ + ["\u00EB", -1, 1], + ["\u00EF", -1, 2] + ]; + + /** @const */ var a_11 = [ + ["\u00EB", -1, 1], + ["\u00EF", -1, 2] ]; - /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + /** @const */ var /** Array */ g_E = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 120]; + + /** @const */ var /** Array */ g_AIOU = [1, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 11, 120, 46, 15]; - /** @const */ var /** Array */ g_v_I = [1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + /** @const */ var /** Array */ g_AEIOU = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15]; - /** @const */ var /** Array */ g_v_j = [17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15]; + /** @const */ var /** Array */ g_v_WX = [17, 65, 208, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15]; + + var /** boolean */ B_GE_removed = false; + var /** boolean */ B_stemmed = false; var /** number */ I_p2 = 0; var /** number */ I_p1 = 0; - var /** boolean */ B_e_found = false; + var /** string */ S_ch = ''; /** @return {boolean} */ - function r_prelude() { - var /** number */ among_var; - var /** number */ v_1 = base.cursor; - while(true) + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_V() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!(base.in_grouping_b(g_v, 97, 252))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_2; + if (!(base.eq_s_b("ij"))) + { + return false; + } + } + base.cursor = base.limit - v_1; + return true; + }; + + /** @return {boolean} */ + function r_VX() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (base.cursor <= base.limit_backward) + { + return false; + } + base.cursor--; + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!(base.in_grouping_b(g_v, 97, 252))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_2; + if (!(base.eq_s_b("ij"))) + { + return false; + } + } + base.cursor = base.limit - v_1; + return true; + }; + + /** @return {boolean} */ + function r_C() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { - base.bra = base.cursor; - among_var = base.find_among(a_0); - if (among_var == 0) + if (!(base.eq_s_b("ij"))) { break lab0; } - base.ket = base.cursor; - switch (among_var) { - case 1: - if (!base.slice_from("a")) - { - return false; - } - break; - case 2: - if (!base.slice_from("e")) - { - return false; - } - break; - case 3: - if (!base.slice_from("i")) - { - return false; - } - break; - case 4: - if (!base.slice_from("o")) - { - return false; - } - break; - case 5: - if (!base.slice_from("u")) - { - return false; - } - break; - case 6: - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; - break; - } - continue; + return false; } - base.cursor = v_2; - break; + base.cursor = base.limit - v_2; } - base.cursor = v_1; - var /** number */ v_3 = base.cursor; - lab1: { - base.bra = base.cursor; - if (!(base.eq_s("y"))) + if (!(base.out_grouping_b(g_v, 97, 252))) + { + return false; + } + base.cursor = base.limit - v_1; + return true; + }; + + /** @return {boolean} */ + function r_lengthen_V() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.out_grouping_b(g_v_WX, 97, 252))) { - base.cursor = v_3; - break lab1; + break lab0; } base.ket = base.cursor; - if (!base.slice_from("Y")) + among_var = base.find_among_b(a_0); + if (among_var == 0) { - return false; + break lab0; } - } - while(true) - { - var /** number */ v_4 = base.cursor; - lab2: { - golab3: while(true) - { - var /** number */ v_5 = base.cursor; - lab4: { - if (!(base.in_grouping(g_v, 97, 232))) + base.bra = base.cursor; + switch (among_var) { + case 1: + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!(base.out_grouping_b(g_AEIOU, 97, 252))) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_3; + if (base.cursor > base.limit_backward) { - break lab4; + break lab0; } - base.bra = base.cursor; + } + base.cursor = base.limit - v_2; + S_ch = base.slice_to(); + if (S_ch == '') + { + return false; + } + { + /** @const */ var /** number */ c1 = base.cursor; + base.insert(base.cursor, base.cursor, S_ch); + base.cursor = c1; + } + break; + case 2: + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab3: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab4: { + if (!(base.out_grouping_b(g_AEIOU, 97, 252))) + { + break lab4; + } + break lab3; + } + base.cursor = base.limit - v_5; + if (base.cursor > base.limit_backward) + { + break lab0; + } + } + { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab5: { - var /** number */ v_6 = base.cursor; lab6: { - if (!(base.eq_s("i"))) - { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab7: { + if (!(base.in_grouping_b(g_AIOU, 97, 252))) + { + break lab7; + } break lab6; } - base.ket = base.cursor; - if (!(base.in_grouping(g_v, 97, 232))) + base.cursor = base.limit - v_7; + if (!(base.in_grouping_b(g_E, 101, 235))) { - break lab6; + break lab5; } - if (!base.slice_from("I")) + if (base.cursor > base.limit_backward) { - return false; + break lab5; } - break lab5; } - base.cursor = v_6; - if (!(base.eq_s("y"))) + break lab0; + } + base.cursor = base.limit - v_6; + } + { + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab8: { + if (base.cursor <= base.limit_backward) { - break lab4; + break lab8; + } + base.cursor--; + if (!(base.in_grouping_b(g_AIOU, 97, 252))) + { + break lab8; } - base.ket = base.cursor; - if (!base.slice_from("Y")) + if (!(base.out_grouping_b(g_AEIOU, 97, 252))) { - return false; + break lab8; } + break lab0; } - base.cursor = v_5; - break golab3; + base.cursor = base.limit - v_8; } - base.cursor = v_5; - if (base.cursor >= base.limit) + base.cursor = base.limit - v_4; + S_ch = base.slice_to(); + if (S_ch == '') { - break lab2; + return false; } - base.cursor++; - } - continue; + { + /** @const */ var /** number */ c2 = base.cursor; + base.insert(base.cursor, base.cursor, S_ch); + base.cursor = c2; + } + break; + case 3: + if (!base.slice_from("e\u00EBe")) + { + return false; + } + break; + case 4: + if (!base.slice_from("iee")) + { + return false; + } + break; } - base.cursor = v_4; - break; } + base.cursor = base.limit - v_1; return true; }; /** @return {boolean} */ - function r_mark_regions() { - I_p1 = base.limit; - I_p2 = base.limit; - golab0: while(true) + function r_Step_1() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) { - lab1: { - if (!(base.in_grouping(g_v, 97, 232))) + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) { - break lab1; + return false; } - break golab0; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; - } - golab2: while(true) - { - lab3: { - if (!(base.out_grouping(g_v, 97, 232))) + break; + case 2: + if (!r_R1()) { - break lab3; + return false; } - break golab2; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; - } - I_p1 = base.cursor; - lab4: { - if (!(I_p1 < 3)) - { - break lab4; - } - I_p1 = 3; - } - golab5: while(true) - { - lab6: { - if (!(base.in_grouping(g_v, 97, 232))) { - break lab6; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("t"))) + { + break lab0; + } + if (!r_R1()) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_1; } - break golab5; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; - } - golab7: while(true) - { - lab8: { - if (!(base.out_grouping(g_v, 97, 232))) + if (!r_C()) { - break lab8; + return false; } - break golab7; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; - } - I_p2 = base.cursor; - return true; - }; - - /** @return {boolean} */ - function r_postlude() { - var /** number */ among_var; - while(true) - { - var /** number */ v_1 = base.cursor; - lab0: { - base.bra = base.cursor; - among_var = base.find_among(a_1); - if (among_var == 0) + if (!base.slice_del()) { - break lab0; + return false; + } + break; + case 3: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("ie")) + { + return false; + } + break; + case 4: + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + if (!(base.eq_s_b("ar"))) + { + break lab2; + } + if (!r_R1()) + { + break lab2; + } + if (!r_C()) + { + break lab2; + } + base.cursor = base.limit - v_3; + if (!base.slice_del()) + { + return false; + } + r_lengthen_V(); + break lab1; + } + base.cursor = base.limit - v_2; + lab3: { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + if (!(base.eq_s_b("er"))) + { + break lab3; + } + if (!r_R1()) + { + break lab3; + } + if (!r_C()) + { + break lab3; + } + base.cursor = base.limit - v_4; + if (!base.slice_del()) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_from("e")) + { + return false; + } + } + break; + case 5: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("\u00E9")) + { + return false; + } + break; + case 6: + if (!r_R1()) + { + return false; + } + if (!r_V()) + { + return false; + } + if (!base.slice_from("au")) + { + return false; + } + break; + case 7: + lab4: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab5: { + if (!(base.eq_s_b("hed"))) + { + break lab5; + } + if (!r_R1()) + { + break lab5; + } + base.bra = base.cursor; + if (!base.slice_from("heid")) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_5; + lab6: { + if (!(base.eq_s_b("nd"))) + { + break lab6; + } + if (!base.slice_del()) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_5; + lab7: { + if (!(base.eq_s_b("d"))) + { + break lab7; + } + if (!r_R1()) + { + break lab7; + } + if (!r_C()) + { + break lab7; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_5; + lab8: { + lab9: { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab10: { + if (!(base.eq_s_b("i"))) + { + break lab10; + } + break lab9; + } + base.cursor = base.limit - v_6; + if (!(base.eq_s_b("j"))) + { + break lab8; + } + } + if (!r_V()) + { + break lab8; + } + if (!base.slice_del()) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_5; + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + r_lengthen_V(); + } + break; + case 8: + if (!base.slice_from("nd")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_2() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("'t"))) + { + break lab1; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + if (!(base.eq_s_b("et"))) + { + break lab2; + } + base.bra = base.cursor; + if (!r_R1()) + { + break lab2; + } + if (!r_C()) + { + break lab2; + } + if (!base.slice_del()) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab3: { + if (!(base.eq_s_b("rnt"))) + { + break lab3; + } + base.bra = base.cursor; + if (!base.slice_from("rn")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab4: { + if (!(base.eq_s_b("t"))) + { + break lab4; + } + base.bra = base.cursor; + if (!r_R1()) + { + break lab4; + } + if (!r_VX()) + { + break lab4; + } + if (!base.slice_del()) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab5: { + if (!(base.eq_s_b("ink"))) + { + break lab5; + } + base.bra = base.cursor; + if (!base.slice_from("ing")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab6: { + if (!(base.eq_s_b("mp"))) + { + break lab6; + } + base.bra = base.cursor; + if (!base.slice_from("m")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab7: { + if (!(base.eq_s_b("'"))) + { + break lab7; + } + base.bra = base.cursor; + if (!r_R1()) + { + break lab7; + } + if (!base.slice_del()) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + } + break; + case 2: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("g")) + { + return false; + } + break; + case 3: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("lijk")) + { + return false; + } + break; + case 4: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("isch")) + { + return false; + } + break; + case 5: + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 6: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("t")) + { + return false; + } + break; + case 7: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("s")) + { + return false; + } + break; + case 8: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("r")) + { + return false; + } + break; + case 9: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + base.insert(base.cursor, base.cursor, "l"); + r_lengthen_V(); + break; + case 10: + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + base.insert(base.cursor, base.cursor, "en"); + r_lengthen_V(); + break; + case 11: + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_from("ief")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_3() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_3); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + return false; + } + if (!base.slice_from("eer")) + { + return false; + } + break; + case 2: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + r_lengthen_V(); + break; + case 3: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 4: + if (!base.slice_from("r")) + { + return false; + } + break; + case 5: + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("ild"))) + { + break lab1; + } + if (!base.slice_from("er")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + r_lengthen_V(); + } + break; + case 6: + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_from("aar")) + { + return false; + } + break; + case 7: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + base.insert(base.cursor, base.cursor, "f"); + r_lengthen_V(); + break; + case 8: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + base.insert(base.cursor, base.cursor, "g"); + r_lengthen_V(); + break; + case 9: + if (!r_R1()) + { + return false; } + if (!r_C()) + { + return false; + } + if (!base.slice_from("t")) + { + return false; + } + break; + case 10: + if (!r_R1()) + { + return false; + } + if (!r_C()) + { + return false; + } + if (!base.slice_from("d")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_4() { + var /** number */ among_var; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { base.ket = base.cursor; + among_var = base.find_among_b(a_4); + if (among_var == 0) + { + break lab1; + } + base.bra = base.cursor; switch (among_var) { case 1: - if (!base.slice_from("y")) + if (!r_R1()) + { + break lab1; + } + if (!base.slice_from("ie")) + { + return false; + } + break; + case 2: + if (!r_R1()) + { + break lab1; + } + if (!base.slice_from("eer")) + { + return false; + } + break; + case 3: + if (!r_R1()) + { + break lab1; + } + if (!base.slice_del()) + { + return false; + } + break; + case 4: + if (!r_R1()) + { + break lab1; + } + if (!r_V()) + { + break lab1; + } + if (!base.slice_from("n")) + { + return false; + } + break; + case 5: + if (!r_R1()) + { + break lab1; + } + if (!r_V()) + { + break lab1; + } + if (!base.slice_from("l")) + { + return false; + } + break; + case 6: + if (!r_R1()) + { + break lab1; + } + if (!r_V()) + { + break lab1; + } + if (!base.slice_from("r")) + { + return false; + } + break; + case 7: + if (!r_R1()) + { + break lab1; + } + if (!base.slice_from("teer")) { return false; } break; - case 2: - if (!base.slice_from("i")) + case 8: + if (!r_R1()) + { + break lab1; + } + if (!base.slice_from("lijk")) { return false; } break; - case 3: - if (base.cursor >= base.limit) + case 9: + if (!r_R1()) { - break lab0; + break lab1; + } + if (!r_C()) + { + break lab1; + } + if (!base.slice_del()) + { + return false; } - base.cursor++; + r_lengthen_V(); break; } - continue; + break lab0; } - base.cursor = v_1; - break; + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (base.find_among_b(a_5) == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + if (!(base.eq_s_b("inn"))) + { + break lab2; + } + if (base.cursor > base.limit_backward) + { + break lab2; + } + return false; + } + base.cursor = base.limit - v_2; + } + if (!r_C()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + r_lengthen_V(); } return true; }; /** @return {boolean} */ - function r_R1() { - if (!(I_p1 <= base.cursor)) + function r_Step_7() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_6); + if (among_var == 0) { return false; } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("k")) + { + return false; + } + break; + case 2: + if (!base.slice_from("f")) + { + return false; + } + break; + case 3: + if (!base.slice_from("p")) + { + return false; + } + break; + } return true; }; /** @return {boolean} */ - function r_R2() { - if (!(I_p2 <= base.cursor)) + function r_Step_6() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_7); + if (among_var == 0) { return false; } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("b")) + { + return false; + } + break; + case 2: + if (!base.slice_from("c")) + { + return false; + } + break; + case 3: + if (!base.slice_from("d")) + { + return false; + } + break; + case 4: + if (!base.slice_from("f")) + { + return false; + } + break; + case 5: + if (!base.slice_from("g")) + { + return false; + } + break; + case 6: + if (!base.slice_from("h")) + { + return false; + } + break; + case 7: + if (!base.slice_from("j")) + { + return false; + } + break; + case 8: + if (!base.slice_from("k")) + { + return false; + } + break; + case 9: + if (!base.slice_from("l")) + { + return false; + } + break; + case 10: + if (!base.slice_from("m")) + { + return false; + } + break; + case 11: + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("i"))) + { + break lab0; + } + if (base.cursor > base.limit_backward) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_1; + } + if (!base.slice_from("n")) + { + return false; + } + break; + case 12: + if (!base.slice_from("p")) + { + return false; + } + break; + case 13: + if (!base.slice_from("q")) + { + return false; + } + break; + case 14: + if (!base.slice_from("r")) + { + return false; + } + break; + case 15: + if (!base.slice_from("s")) + { + return false; + } + break; + case 16: + if (!base.slice_from("t")) + { + return false; + } + break; + case 17: + if (!base.slice_from("v")) + { + return false; + } + break; + case 18: + if (!base.slice_from("w")) + { + return false; + } + break; + case 19: + if (!base.slice_from("x")) + { + return false; + } + break; + case 20: + if (!base.slice_from("z")) + { + return false; + } + break; + } return true; }; /** @return {boolean} */ - function r_undouble() { - var /** number */ v_1 = base.limit - base.cursor; - if (base.find_among_b(a_2) == 0) + function r_Step_1c() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_8); + if (among_var == 0) { return false; } - base.cursor = base.limit - v_1; - base.ket = base.cursor; - if (base.cursor <= base.limit_backward) + base.bra = base.cursor; + if (!r_R1()) { return false; } - base.cursor--; - base.bra = base.cursor; - if (!base.slice_del()) + if (!r_C()) { return false; } + switch (among_var) { + case 1: + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("n"))) + { + break lab0; + } + if (!r_R1()) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_1; + } + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + if (!(base.eq_s_b("in"))) + { + break lab2; + } + if (base.cursor > base.limit_backward) + { + break lab2; + } + if (!base.slice_from("n")) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + if (!base.slice_del()) + { + return false; + } + } + break; + case 2: + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab3: { + if (!(base.eq_s_b("h"))) + { + break lab3; + } + if (!r_R1()) + { + break lab3; + } + return false; + } + base.cursor = base.limit - v_3; + } + { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab4: { + if (!(base.eq_s_b("en"))) + { + break lab4; + } + if (base.cursor > base.limit_backward) + { + break lab4; + } + return false; + } + base.cursor = base.limit - v_4; + } + if (!base.slice_del()) + { + return false; + } + break; + } return true; }; /** @return {boolean} */ - function r_e_ending() { - B_e_found = false; - base.ket = base.cursor; - if (!(base.eq_s_b("e"))) + function r_Lose_prefix() { + var /** number */ among_var; + base.bra = base.cursor; + if (!(base.eq_s("ge"))) { return false; } - base.bra = base.cursor; - if (!r_R1()) + base.ket = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; { - return false; + /** @const */ var /** number */ c1 = base.cursor + 3; + if (c1 > base.limit) + { + return false; + } + base.cursor = c1; } - var /** number */ v_1 = base.limit - base.cursor; - if (!(base.out_grouping_b(g_v, 97, 232))) + base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + golab0: while(true) { - return false; + /** @const */ var /** number */ v_3 = base.cursor; + lab1: { + lab2: { + /** @const */ var /** number */ v_4 = base.cursor; + lab3: { + if (!(base.eq_s("ij"))) + { + break lab3; + } + break lab2; + } + base.cursor = v_4; + if (!(base.in_grouping(g_v, 97, 252))) + { + break lab1; + } + } + break golab0; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { + return false; + } + base.cursor++; } - base.cursor = base.limit - v_1; - if (!base.slice_del()) + while(true) { + /** @const */ var /** number */ v_5 = base.cursor; + lab4: { + lab5: { + /** @const */ var /** number */ v_6 = base.cursor; + lab6: { + if (!(base.eq_s("ij"))) + { + break lab6; + } + break lab5; + } + base.cursor = v_6; + if (!(base.in_grouping(g_v, 97, 252))) + { + break lab4; + } + } + continue; + } + base.cursor = v_5; + break; + } + lab7: { + if (base.cursor < base.limit) + { + break lab7; + } return false; } - B_e_found = true; - if (!r_undouble()) + base.cursor = v_2; + among_var = base.find_among(a_9); + switch (among_var) { + case 1: + return false; + } + B_GE_removed = true; + if (!base.slice_del()) { return false; } + /** @const */ var /** number */ v_7 = base.cursor; + lab8: { + base.bra = base.cursor; + among_var = base.find_among(a_10); + if (among_var == 0) + { + break lab8; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("e")) + { + return false; + } + break; + case 2: + if (!base.slice_from("i")) + { + return false; + } + break; + } + } + base.cursor = v_7; return true; }; /** @return {boolean} */ - function r_en_ending() { - if (!r_R1()) + function r_Lose_infix() { + var /** number */ among_var; + if (base.cursor >= base.limit) { return false; } - var /** number */ v_1 = base.limit - base.cursor; - if (!(base.out_grouping_b(g_v, 97, 232))) + base.cursor++; + golab0: while(true) { - return false; + lab1: { + base.bra = base.cursor; + if (!(base.eq_s("ge"))) + { + break lab1; + } + base.ket = base.cursor; + break golab0; + } + if (base.cursor >= base.limit) + { + return false; + } + base.cursor++; } - base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_1 = base.cursor; + { + /** @const */ var /** number */ c1 = base.cursor + 3; + if (c1 > base.limit) + { + return false; + } + base.cursor = c1; + } + base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + golab2: while(true) { - var /** number */ v_2 = base.limit - base.cursor; - lab0: { - if (!(base.eq_s_b("gem"))) - { - break lab0; + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + lab4: { + /** @const */ var /** number */ v_4 = base.cursor; + lab5: { + if (!(base.eq_s("ij"))) + { + break lab5; + } + break lab4; + } + base.cursor = v_4; + if (!(base.in_grouping(g_v, 97, 252))) + { + break lab3; + } } + break golab2; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { return false; } - base.cursor = base.limit - v_2; + base.cursor++; } - if (!base.slice_del()) + while(true) { + /** @const */ var /** number */ v_5 = base.cursor; + lab6: { + lab7: { + /** @const */ var /** number */ v_6 = base.cursor; + lab8: { + if (!(base.eq_s("ij"))) + { + break lab8; + } + break lab7; + } + base.cursor = v_6; + if (!(base.in_grouping(g_v, 97, 252))) + { + break lab6; + } + } + continue; + } + base.cursor = v_5; + break; + } + lab9: { + if (base.cursor < base.limit) + { + break lab9; + } return false; } - if (!r_undouble()) + base.cursor = v_2; + B_GE_removed = true; + if (!base.slice_del()) { return false; } - return true; - }; - - /** @return {boolean} */ - function r_standard_suffix() { - var /** number */ among_var; - var /** number */ v_1 = base.limit - base.cursor; - lab0: { - base.ket = base.cursor; - among_var = base.find_among_b(a_3); + /** @const */ var /** number */ v_7 = base.cursor; + lab10: { + base.bra = base.cursor; + among_var = base.find_among(a_11); if (among_var == 0) { - break lab0; + break lab10; } - base.bra = base.cursor; + base.ket = base.cursor; switch (among_var) { case 1: - if (!r_R1()) - { - break lab0; - } - if (!base.slice_from("heid")) + if (!base.slice_from("e")) { return false; } break; case 2: - if (!r_en_ending()) + if (!base.slice_from("i")) { - break lab0; + return false; } break; - case 3: - if (!r_R1()) - { - break lab0; - } - if (!(base.out_grouping_b(g_v_j, 97, 232))) + } + } + base.cursor = v_7; + return true; + }; + + /** @return {boolean} */ + function r_measure() { + I_p1 = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + lab1: { + if (!(base.out_grouping(g_v, 97, 252))) { - break lab0; + break lab1; } - if (!base.slice_del()) - { - return false; + continue; + } + break; + } + { + var v_2 = 1; + while(true) + { + /** @const */ var /** number */ v_3 = base.cursor; + lab2: { + lab3: { + /** @const */ var /** number */ v_4 = base.cursor; + lab4: { + if (!(base.eq_s("ij"))) + { + break lab4; + } + break lab3; + } + base.cursor = v_4; + if (!(base.in_grouping(g_v, 97, 252))) + { + break lab2; + } + } + v_2--; + continue; } + base.cursor = v_3; break; + } + if (v_2 > 0) + { + break lab0; + } } - } - base.cursor = base.limit - v_1; - var /** number */ v_2 = base.limit - base.cursor; - r_e_ending(); - base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; - lab1: { - base.ket = base.cursor; - if (!(base.eq_s_b("heid"))) + if (!(base.out_grouping(g_v, 97, 252))) { - break lab1; + break lab0; } - base.bra = base.cursor; - if (!r_R2()) + I_p1 = base.cursor; + while(true) { - break lab1; + lab5: { + if (!(base.out_grouping(g_v, 97, 252))) + { + break lab5; + } + continue; + } + break; } { - var /** number */ v_4 = base.limit - base.cursor; - lab2: { - if (!(base.eq_s_b("c"))) - { - break lab2; + var v_5 = 1; + while(true) + { + /** @const */ var /** number */ v_6 = base.cursor; + lab6: { + lab7: { + /** @const */ var /** number */ v_7 = base.cursor; + lab8: { + if (!(base.eq_s("ij"))) + { + break lab8; + } + break lab7; + } + base.cursor = v_7; + if (!(base.in_grouping(g_v, 97, 252))) + { + break lab6; + } + } + v_5--; + continue; } - break lab1; + base.cursor = v_6; + break; + } + if (v_5 > 0) + { + break lab0; } - base.cursor = base.limit - v_4; } - if (!base.slice_del()) + if (!(base.out_grouping(g_v, 97, 252))) { - return false; + break lab0; } - base.ket = base.cursor; - if (!(base.eq_s_b("en"))) + I_p2 = base.cursor; + } + base.cursor = v_1; + return true; + }; + + this.stem = /** @return {boolean} */ function() { + B_stemmed = false; + r_measure(); + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!r_Step_1()) { - break lab1; + break lab0; } - base.bra = base.cursor; - if (!r_en_ending()) + B_stemmed = true; + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!r_Step_2()) { break lab1; } + B_stemmed = true; + } + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!r_Step_3()) + { + break lab2; + } + B_stemmed = true; } base.cursor = base.limit - v_3; - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab3: { - base.ket = base.cursor; - among_var = base.find_among_b(a_4); - if (among_var == 0) + if (!r_Step_4()) { break lab3; } - base.bra = base.cursor; - switch (among_var) { - case 1: - if (!r_R2()) - { - break lab3; - } - if (!base.slice_del()) - { - return false; - } - lab4: { - var /** number */ v_6 = base.limit - base.cursor; - lab5: { - base.ket = base.cursor; - if (!(base.eq_s_b("ig"))) - { - break lab5; - } - base.bra = base.cursor; - if (!r_R2()) - { - break lab5; - } - { - var /** number */ v_7 = base.limit - base.cursor; - lab6: { - if (!(base.eq_s_b("e"))) - { - break lab6; - } - break lab5; - } - base.cursor = base.limit - v_7; - } - if (!base.slice_del()) - { - return false; - } - break lab4; - } - base.cursor = base.limit - v_6; - if (!r_undouble()) - { - break lab3; - } - } - break; - case 2: - if (!r_R2()) - { - break lab3; - } - { - var /** number */ v_8 = base.limit - base.cursor; - lab7: { - if (!(base.eq_s_b("e"))) - { - break lab7; - } - break lab3; - } - base.cursor = base.limit - v_8; - } - if (!base.slice_del()) - { - return false; - } - break; - case 3: - if (!r_R2()) - { - break lab3; - } - if (!base.slice_del()) - { - return false; - } - if (!r_e_ending()) - { - break lab3; - } - break; - case 4: - if (!r_R2()) - { - break lab3; - } - if (!base.slice_del()) - { - return false; - } - break; - case 5: - if (!r_R2()) - { - break lab3; - } - if (!B_e_found) - { - break lab3; - } - if (!base.slice_del()) - { - return false; - } - break; + B_stemmed = true; + } + base.cursor = base.limit - v_4; + base.cursor = base.limit_backward; + B_GE_removed = false; + /** @const */ var /** number */ v_5 = base.cursor; + lab4: { + /** @const */ var /** number */ v_6 = base.cursor; + if (!r_Lose_prefix()) + { + break lab4; } + base.cursor = v_6; + r_measure(); } - base.cursor = base.limit - v_5; - var /** number */ v_9 = base.limit - base.cursor; - lab8: { - if (!(base.out_grouping_b(g_v_I, 73, 232))) + base.cursor = v_5; + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab5: { + if (!B_GE_removed) { - break lab8; + break lab5; } - var /** number */ v_10 = base.limit - base.cursor; - if (base.find_among_b(a_5) == 0) + B_stemmed = true; + if (!r_Step_1c()) { - break lab8; + break lab5; } - if (!(base.out_grouping_b(g_v, 97, 232))) + } + base.cursor = base.limit - v_7; + base.cursor = base.limit_backward; + B_GE_removed = false; + /** @const */ var /** number */ v_8 = base.cursor; + lab6: { + /** @const */ var /** number */ v_9 = base.cursor; + if (!r_Lose_infix()) { - break lab8; + break lab6; } - base.cursor = base.limit - v_10; - base.ket = base.cursor; - if (base.cursor <= base.limit_backward) + base.cursor = v_9; + r_measure(); + } + base.cursor = v_8; + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + lab7: { + if (!B_GE_removed) { - break lab8; + break lab7; } - base.cursor--; - base.bra = base.cursor; - if (!base.slice_del()) + B_stemmed = true; + if (!r_Step_1c()) { - return false; + break lab7; } } - base.cursor = base.limit - v_9; - return true; - }; - - this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; - r_prelude(); - base.cursor = v_1; - var /** number */ v_2 = base.cursor; - r_mark_regions(); - base.cursor = v_2; + base.cursor = base.limit - v_10; + base.cursor = base.limit_backward; base.limit_backward = base.cursor; base.cursor = base.limit; - r_standard_suffix(); + /** @const */ var /** number */ v_11 = base.limit - base.cursor; + lab8: { + if (!r_Step_7()) + { + break lab8; + } + B_stemmed = true; + } + base.cursor = base.limit - v_11; + /** @const */ var /** number */ v_12 = base.limit - base.cursor; + lab9: { + if (!B_stemmed) + { + break lab9; + } + if (!r_Step_6()) + { + break lab9; + } + } + base.cursor = base.limit - v_12; base.cursor = base.limit_backward; - var /** number */ v_4 = base.cursor; - r_postlude(); - base.cursor = v_4; return true; }; diff --git a/sphinx/search/non-minified-js/dutch_porter-stemmer.js b/sphinx/search/non-minified-js/dutch_porter-stemmer.js new file mode 100644 index 00000000000..6bbf2bf2e8e --- /dev/null +++ b/sphinx/search/non-minified-js/dutch_porter-stemmer.js @@ -0,0 +1,637 @@ +// Generated from dutch_porter.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var DutchPorterStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["", -1, 6], + ["\u00E1", 0, 1], + ["\u00E4", 0, 1], + ["\u00E9", 0, 2], + ["\u00EB", 0, 2], + ["\u00ED", 0, 3], + ["\u00EF", 0, 3], + ["\u00F3", 0, 4], + ["\u00F6", 0, 4], + ["\u00FA", 0, 5], + ["\u00FC", 0, 5] + ]; + + /** @const */ var a_1 = [ + ["", -1, 3], + ["I", 0, 2], + ["Y", 0, 1] + ]; + + /** @const */ var a_2 = [ + ["dd", -1, -1], + ["kk", -1, -1], + ["tt", -1, -1] + ]; + + /** @const */ var a_3 = [ + ["ene", -1, 2], + ["se", -1, 3], + ["en", -1, 2], + ["heden", 2, 1], + ["s", -1, 3] + ]; + + /** @const */ var a_4 = [ + ["end", -1, 1], + ["ig", -1, 2], + ["ing", -1, 1], + ["lijk", -1, 3], + ["baar", -1, 4], + ["bar", -1, 5] + ]; + + /** @const */ var a_5 = [ + ["aa", -1, -1], + ["ee", -1, -1], + ["oo", -1, -1], + ["uu", -1, -1] + ]; + + /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + + /** @const */ var /** Array */ g_v_I = [1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + + /** @const */ var /** Array */ g_v_j = [17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + + var /** number */ I_x = 0; + var /** number */ I_p2 = 0; + var /** number */ I_p1 = 0; + var /** boolean */ B_e_found = false; + + + /** @return {boolean} */ + function r_prelude() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab0: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("a")) + { + return false; + } + break; + case 2: + if (!base.slice_from("e")) + { + return false; + } + break; + case 3: + if (!base.slice_from("i")) + { + return false; + } + break; + case 4: + if (!base.slice_from("o")) + { + return false; + } + break; + case 5: + if (!base.slice_from("u")) + { + return false; + } + break; + case 6: + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; + break; + } + continue; + } + base.cursor = v_2; + break; + } + base.cursor = v_1; + /** @const */ var /** number */ v_3 = base.cursor; + lab1: { + base.bra = base.cursor; + if (!(base.eq_s("y"))) + { + base.cursor = v_3; + break lab1; + } + base.ket = base.cursor; + if (!base.slice_from("Y")) + { + return false; + } + } + while(true) + { + /** @const */ var /** number */ v_4 = base.cursor; + lab2: { + if (!base.go_out_grouping(g_v, 97, 232)) + { + break lab2; + } + base.cursor++; + /** @const */ var /** number */ v_5 = base.cursor; + lab3: { + base.bra = base.cursor; + lab4: { + /** @const */ var /** number */ v_6 = base.cursor; + lab5: { + if (!(base.eq_s("i"))) + { + break lab5; + } + base.ket = base.cursor; + /** @const */ var /** number */ v_7 = base.cursor; + lab6: { + if (!(base.in_grouping(g_v, 97, 232))) + { + break lab6; + } + if (!base.slice_from("I")) + { + return false; + } + } + base.cursor = v_7; + break lab4; + } + base.cursor = v_6; + if (!(base.eq_s("y"))) + { + base.cursor = v_5; + break lab3; + } + base.ket = base.cursor; + if (!base.slice_from("Y")) + { + return false; + } + } + } + continue; + } + base.cursor = v_4; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_mark_regions() { + I_p1 = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + { + /** @const */ var /** number */ c1 = base.cursor + 3; + if (c1 > base.limit) + { + return false; + } + base.cursor = c1; + } + I_x = base.cursor; + base.cursor = v_1; + if (!base.go_out_grouping(g_v, 97, 232)) + { + return false; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 232)) + { + return false; + } + base.cursor++; + I_p1 = base.cursor; + lab0: { + if (I_p1 >= I_x) + { + break lab0; + } + I_p1 = I_x; + } + if (!base.go_out_grouping(g_v, 97, 232)) + { + return false; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 232)) + { + return false; + } + base.cursor++; + I_p2 = base.cursor; + return true; + }; + + /** @return {boolean} */ + function r_postlude() { + var /** number */ among_var; + while(true) + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.bra = base.cursor; + among_var = base.find_among(a_1); + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("y")) + { + return false; + } + break; + case 2: + if (!base.slice_from("i")) + { + return false; + } + break; + case 3: + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; + break; + } + continue; + } + base.cursor = v_1; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_undouble() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (base.find_among_b(a_2) == 0) + { + return false; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (base.cursor <= base.limit_backward) + { + return false; + } + base.cursor--; + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_e_ending() { + B_e_found = false; + base.ket = base.cursor; + if (!(base.eq_s_b("e"))) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (!(base.out_grouping_b(g_v, 97, 232))) + { + return false; + } + base.cursor = base.limit - v_1; + if (!base.slice_del()) + { + return false; + } + B_e_found = true; + if (!r_undouble()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_en_ending() { + if (!r_R1()) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (!(base.out_grouping_b(g_v, 97, 232))) + { + return false; + } + base.cursor = base.limit - v_1; + { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("gem"))) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_2; + } + if (!base.slice_del()) + { + return false; + } + if (!r_undouble()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_standard_suffix() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + among_var = base.find_among_b(a_3); + if (among_var == 0) + { + break lab0; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + break lab0; + } + if (!base.slice_from("heid")) + { + return false; + } + break; + case 2: + if (!r_en_ending()) + { + break lab0; + } + break; + case 3: + if (!r_R1()) + { + break lab0; + } + if (!(base.out_grouping_b(g_v_j, 97, 232))) + { + break lab0; + } + if (!base.slice_del()) + { + return false; + } + break; + } + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + r_e_ending(); + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + if (!(base.eq_s_b("heid"))) + { + break lab1; + } + base.bra = base.cursor; + if (!r_R2()) + { + break lab1; + } + { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab2: { + if (!(base.eq_s_b("c"))) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_4; + } + if (!base.slice_del()) + { + return false; + } + base.ket = base.cursor; + if (!(base.eq_s_b("en"))) + { + break lab1; + } + base.bra = base.cursor; + if (!r_en_ending()) + { + break lab1; + } + } + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab3: { + base.ket = base.cursor; + among_var = base.find_among_b(a_4); + if (among_var == 0) + { + break lab3; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R2()) + { + break lab3; + } + if (!base.slice_del()) + { + return false; + } + lab4: { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab5: { + base.ket = base.cursor; + if (!(base.eq_s_b("ig"))) + { + break lab5; + } + base.bra = base.cursor; + if (!r_R2()) + { + break lab5; + } + { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab6: { + if (!(base.eq_s_b("e"))) + { + break lab6; + } + break lab5; + } + base.cursor = base.limit - v_7; + } + if (!base.slice_del()) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_6; + if (!r_undouble()) + { + break lab3; + } + } + break; + case 2: + if (!r_R2()) + { + break lab3; + } + { + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab7: { + if (!(base.eq_s_b("e"))) + { + break lab7; + } + break lab3; + } + base.cursor = base.limit - v_8; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (!r_R2()) + { + break lab3; + } + if (!base.slice_del()) + { + return false; + } + if (!r_e_ending()) + { + break lab3; + } + break; + case 4: + if (!r_R2()) + { + break lab3; + } + if (!base.slice_del()) + { + return false; + } + break; + case 5: + if (!r_R2()) + { + break lab3; + } + if (!B_e_found) + { + break lab3; + } + if (!base.slice_del()) + { + return false; + } + break; + } + } + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + lab8: { + if (!(base.out_grouping_b(g_v_I, 73, 232))) + { + break lab8; + } + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + if (base.find_among_b(a_5) == 0) + { + break lab8; + } + if (!(base.out_grouping_b(g_v, 97, 232))) + { + break lab8; + } + base.cursor = base.limit - v_10; + base.ket = base.cursor; + if (base.cursor <= base.limit_backward) + { + break lab8; + } + base.cursor--; + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + base.cursor = base.limit - v_9; + return true; + }; + + this.stem = /** @return {boolean} */ function() { + /** @const */ var /** number */ v_1 = base.cursor; + r_prelude(); + base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + r_mark_regions(); + base.cursor = v_2; + base.limit_backward = base.cursor; base.cursor = base.limit; + r_standard_suffix(); + base.cursor = base.limit_backward; + /** @const */ var /** number */ v_3 = base.cursor; + r_postlude(); + base.cursor = v_3; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/english-stemmer.js b/sphinx/search/non-minified-js/english-stemmer.js new file mode 100644 index 00000000000..056760ee8aa --- /dev/null +++ b/sphinx/search/non-minified-js/english-stemmer.js @@ -0,0 +1,1066 @@ +// Generated from english.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var EnglishStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["arsen", -1, -1], + ["commun", -1, -1], + ["emerg", -1, -1], + ["gener", -1, -1], + ["later", -1, -1], + ["organ", -1, -1], + ["past", -1, -1], + ["univers", -1, -1] + ]; + + /** @const */ var a_1 = [ + ["'", -1, 1], + ["'s'", 0, 1], + ["'s", -1, 1] + ]; + + /** @const */ var a_2 = [ + ["ied", -1, 2], + ["s", -1, 3], + ["ies", 1, 2], + ["sses", 1, 1], + ["ss", 1, -1], + ["us", 1, -1] + ]; + + /** @const */ var a_3 = [ + ["succ", -1, 1], + ["proc", -1, 1], + ["exc", -1, 1] + ]; + + /** @const */ var a_4 = [ + ["even", -1, 2], + ["cann", -1, 2], + ["inn", -1, 2], + ["earr", -1, 2], + ["herr", -1, 2], + ["out", -1, 2], + ["y", -1, 1] + ]; + + /** @const */ var a_5 = [ + ["", -1, -1], + ["ed", 0, 2], + ["eed", 1, 1], + ["ing", 0, 3], + ["edly", 0, 2], + ["eedly", 4, 1], + ["ingly", 0, 2] + ]; + + /** @const */ var a_6 = [ + ["", -1, 3], + ["bb", 0, 2], + ["dd", 0, 2], + ["ff", 0, 2], + ["gg", 0, 2], + ["bl", 0, 1], + ["mm", 0, 2], + ["nn", 0, 2], + ["pp", 0, 2], + ["rr", 0, 2], + ["at", 0, 1], + ["tt", 0, 2], + ["iz", 0, 1] + ]; + + /** @const */ var a_7 = [ + ["anci", -1, 3], + ["enci", -1, 2], + ["ogi", -1, 14], + ["li", -1, 16], + ["bli", 3, 12], + ["abli", 4, 4], + ["alli", 3, 8], + ["fulli", 3, 9], + ["lessli", 3, 15], + ["ousli", 3, 10], + ["entli", 3, 5], + ["aliti", -1, 8], + ["biliti", -1, 12], + ["iviti", -1, 11], + ["tional", -1, 1], + ["ational", 14, 7], + ["alism", -1, 8], + ["ation", -1, 7], + ["ization", 17, 6], + ["izer", -1, 6], + ["ator", -1, 7], + ["iveness", -1, 11], + ["fulness", -1, 9], + ["ousness", -1, 10], + ["ogist", -1, 13] + ]; + + /** @const */ var a_8 = [ + ["icate", -1, 4], + ["ative", -1, 6], + ["alize", -1, 3], + ["iciti", -1, 4], + ["ical", -1, 4], + ["tional", -1, 1], + ["ational", 5, 2], + ["ful", -1, 5], + ["ness", -1, 5] + ]; + + /** @const */ var a_9 = [ + ["ic", -1, 1], + ["ance", -1, 1], + ["ence", -1, 1], + ["able", -1, 1], + ["ible", -1, 1], + ["ate", -1, 1], + ["ive", -1, 1], + ["ize", -1, 1], + ["iti", -1, 1], + ["al", -1, 1], + ["ism", -1, 1], + ["ion", -1, 2], + ["er", -1, 1], + ["ous", -1, 1], + ["ant", -1, 1], + ["ent", -1, 1], + ["ment", 15, 1], + ["ement", 16, 1] + ]; + + /** @const */ var a_10 = [ + ["e", -1, 1], + ["l", -1, 2] + ]; + + /** @const */ var a_11 = [ + ["andes", -1, -1], + ["atlas", -1, -1], + ["bias", -1, -1], + ["cosmos", -1, -1], + ["early", -1, 5], + ["gently", -1, 3], + ["howe", -1, -1], + ["idly", -1, 2], + ["news", -1, -1], + ["only", -1, 6], + ["singly", -1, 7], + ["skies", -1, 1], + ["sky", -1, -1], + ["ugly", -1, 4] + ]; + + /** @const */ var /** Array */ g_aeo = [17, 64]; + + /** @const */ var /** Array */ g_v = [17, 65, 16, 1]; + + /** @const */ var /** Array */ g_v_WXY = [1, 17, 65, 208, 1]; + + /** @const */ var /** Array */ g_valid_LI = [55, 141, 2]; + + var /** boolean */ B_Y_found = false; + var /** number */ I_p2 = 0; + var /** number */ I_p1 = 0; + + + /** @return {boolean} */ + function r_prelude() { + B_Y_found = false; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.bra = base.cursor; + if (!(base.eq_s("'"))) + { + break lab0; + } + base.ket = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + base.bra = base.cursor; + if (!(base.eq_s("y"))) + { + break lab1; + } + base.ket = base.cursor; + if (!base.slice_from("Y")) + { + return false; + } + B_Y_found = true; + } + base.cursor = v_2; + /** @const */ var /** number */ v_3 = base.cursor; + lab2: { + while(true) + { + /** @const */ var /** number */ v_4 = base.cursor; + lab3: { + golab4: while(true) + { + /** @const */ var /** number */ v_5 = base.cursor; + lab5: { + if (!(base.in_grouping(g_v, 97, 121))) + { + break lab5; + } + base.bra = base.cursor; + if (!(base.eq_s("y"))) + { + break lab5; + } + base.ket = base.cursor; + base.cursor = v_5; + break golab4; + } + base.cursor = v_5; + if (base.cursor >= base.limit) + { + break lab3; + } + base.cursor++; + } + if (!base.slice_from("Y")) + { + return false; + } + B_Y_found = true; + continue; + } + base.cursor = v_4; + break; + } + } + base.cursor = v_3; + return true; + }; + + /** @return {boolean} */ + function r_mark_regions() { + I_p1 = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + lab1: { + /** @const */ var /** number */ v_2 = base.cursor; + lab2: { + if (base.find_among(a_0) == 0) + { + break lab2; + } + break lab1; + } + base.cursor = v_2; + if (!base.go_out_grouping(g_v, 97, 121)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 121)) + { + break lab0; + } + base.cursor++; + } + I_p1 = base.cursor; + if (!base.go_out_grouping(g_v, 97, 121)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 121)) + { + break lab0; + } + base.cursor++; + I_p2 = base.cursor; + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_shortv() { + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.out_grouping_b(g_v_WXY, 89, 121))) + { + break lab1; + } + if (!(base.in_grouping_b(g_v, 97, 121))) + { + break lab1; + } + if (!(base.out_grouping_b(g_v, 97, 121))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + if (!(base.out_grouping_b(g_v, 97, 121))) + { + break lab2; + } + if (!(base.in_grouping_b(g_v, 97, 121))) + { + break lab2; + } + if (base.cursor > base.limit_backward) + { + break lab2; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!(base.eq_s_b("past"))) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_Step_1a() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (base.find_among_b(a_1) == 0) + { + base.cursor = base.limit - v_1; + break lab0; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("ss")) + { + return false; + } + break; + case 2: + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + { + /** @const */ var /** number */ c1 = base.cursor - 2; + if (c1 < base.limit_backward) + { + break lab2; + } + base.cursor = c1; + } + if (!base.slice_from("i")) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + if (!base.slice_from("ie")) + { + return false; + } + } + break; + case 3: + if (base.cursor <= base.limit_backward) + { + return false; + } + base.cursor--; + if (!base.go_out_grouping_b(g_v, 97, 121)) + { + return false; + } + base.cursor--; + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_1b() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_5); + base.bra = base.cursor; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + switch (among_var) { + case 1: + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + lab3: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab4: { + if (base.find_among_b(a_3) == 0) + { + break lab4; + } + if (base.cursor > base.limit_backward) + { + break lab4; + } + break lab3; + } + base.cursor = base.limit - v_3; + if (!r_R1()) + { + break lab2; + } + if (!base.slice_from("ee")) + { + return false; + } + } + } + base.cursor = base.limit - v_2; + break; + case 2: + break lab1; + case 3: + among_var = base.find_among_b(a_4); + if (among_var == 0) + { + break lab1; + } + switch (among_var) { + case 1: + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + if (!(base.out_grouping_b(g_v, 97, 121))) + { + break lab1; + } + if (base.cursor > base.limit_backward) + { + break lab1; + } + base.cursor = base.limit - v_4; + base.bra = base.cursor; + if (!base.slice_from("ie")) + { + return false; + } + break; + case 2: + if (base.cursor > base.limit_backward) + { + break lab1; + } + break; + } + break; + } + break lab0; + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + if (!base.go_out_grouping_b(g_v, 97, 121)) + { + return false; + } + base.cursor--; + base.cursor = base.limit - v_5; + if (!base.slice_del()) + { + return false; + } + base.ket = base.cursor; + base.bra = base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + among_var = base.find_among_b(a_6); + switch (among_var) { + case 1: + if (!base.slice_from("e")) + { + return false; + } + return false; + case 2: + { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab5: { + if (!(base.in_grouping_b(g_aeo, 97, 111))) + { + break lab5; + } + if (base.cursor > base.limit_backward) + { + break lab5; + } + return false; + } + base.cursor = base.limit - v_7; + } + break; + case 3: + if (base.cursor != I_p1) + { + return false; + } + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + if (!r_shortv()) + { + return false; + } + base.cursor = base.limit - v_8; + if (!base.slice_from("e")) + { + return false; + } + return false; + } + base.cursor = base.limit - v_6; + base.ket = base.cursor; + if (base.cursor <= base.limit_backward) + { + return false; + } + base.cursor--; + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_Step_1c() { + base.ket = base.cursor; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("y"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!(base.eq_s_b("Y"))) + { + return false; + } + } + base.bra = base.cursor; + if (!(base.out_grouping_b(g_v, 97, 121))) + { + return false; + } + lab2: { + if (base.cursor > base.limit_backward) + { + break lab2; + } + return false; + } + if (!base.slice_from("i")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_2() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_7); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("tion")) + { + return false; + } + break; + case 2: + if (!base.slice_from("ence")) + { + return false; + } + break; + case 3: + if (!base.slice_from("ance")) + { + return false; + } + break; + case 4: + if (!base.slice_from("able")) + { + return false; + } + break; + case 5: + if (!base.slice_from("ent")) + { + return false; + } + break; + case 6: + if (!base.slice_from("ize")) + { + return false; + } + break; + case 7: + if (!base.slice_from("ate")) + { + return false; + } + break; + case 8: + if (!base.slice_from("al")) + { + return false; + } + break; + case 9: + if (!base.slice_from("ful")) + { + return false; + } + break; + case 10: + if (!base.slice_from("ous")) + { + return false; + } + break; + case 11: + if (!base.slice_from("ive")) + { + return false; + } + break; + case 12: + if (!base.slice_from("ble")) + { + return false; + } + break; + case 13: + if (!base.slice_from("og")) + { + return false; + } + break; + case 14: + if (!(base.eq_s_b("l"))) + { + return false; + } + if (!base.slice_from("og")) + { + return false; + } + break; + case 15: + if (!base.slice_from("less")) + { + return false; + } + break; + case 16: + if (!(base.in_grouping_b(g_valid_LI, 99, 116))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_3() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_8); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("tion")) + { + return false; + } + break; + case 2: + if (!base.slice_from("ate")) + { + return false; + } + break; + case 3: + if (!base.slice_from("al")) + { + return false; + } + break; + case 4: + if (!base.slice_from("ic")) + { + return false; + } + break; + case 5: + if (!base.slice_del()) + { + return false; + } + break; + case 6: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_4() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_9); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R2()) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("s"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!(base.eq_s_b("t"))) + { + return false; + } + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_5() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_10); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + lab0: { + lab1: { + if (!r_R2()) + { + break lab1; + } + break lab0; + } + if (!r_R1()) + { + return false; + } + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab2: { + if (!r_shortv()) + { + break lab2; + } + return false; + } + base.cursor = base.limit - v_1; + } + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R2()) + { + return false; + } + if (!(base.eq_s_b("l"))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_exception1() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_11); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + if (base.cursor < base.limit) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("sky")) + { + return false; + } + break; + case 2: + if (!base.slice_from("idl")) + { + return false; + } + break; + case 3: + if (!base.slice_from("gentl")) + { + return false; + } + break; + case 4: + if (!base.slice_from("ugli")) + { + return false; + } + break; + case 5: + if (!base.slice_from("earli")) + { + return false; + } + break; + case 6: + if (!base.slice_from("onli")) + { + return false; + } + break; + case 7: + if (!base.slice_from("singl")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_postlude() { + if (!B_Y_found) + { + return false; + } + while(true) + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + golab1: while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab2: { + base.bra = base.cursor; + if (!(base.eq_s("Y"))) + { + break lab2; + } + base.ket = base.cursor; + base.cursor = v_2; + break golab1; + } + base.cursor = v_2; + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; + } + if (!base.slice_from("y")) + { + return false; + } + continue; + } + base.cursor = v_1; + break; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + lab0: { + /** @const */ var /** number */ v_1 = base.cursor; + lab1: { + if (!r_exception1()) + { + break lab1; + } + break lab0; + } + base.cursor = v_1; + lab2: { + { + /** @const */ var /** number */ v_2 = base.cursor; + lab3: { + { + /** @const */ var /** number */ c1 = base.cursor + 3; + if (c1 > base.limit) + { + break lab3; + } + base.cursor = c1; + } + break lab2; + } + base.cursor = v_2; + } + break lab0; + } + base.cursor = v_1; + r_prelude(); + r_mark_regions(); + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_Step_1a(); + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_Step_1b(); + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + r_Step_1c(); + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + r_Step_2(); + base.cursor = base.limit - v_6; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + r_Step_3(); + base.cursor = base.limit - v_7; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + r_Step_4(); + base.cursor = base.limit - v_8; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + r_Step_5(); + base.cursor = base.limit - v_9; + base.cursor = base.limit_backward; + /** @const */ var /** number */ v_10 = base.cursor; + r_postlude(); + base.cursor = v_10; + } + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/esperanto-stemmer.js b/sphinx/search/non-minified-js/esperanto-stemmer.js new file mode 100644 index 00000000000..8fc6af00f1a --- /dev/null +++ b/sphinx/search/non-minified-js/esperanto-stemmer.js @@ -0,0 +1,762 @@ +// Generated from esperanto.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var EsperantoStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["", -1, 14], + ["-", 0, 13], + ["cx", 0, 1], + ["gx", 0, 2], + ["hx", 0, 3], + ["jx", 0, 4], + ["q", 0, 12], + ["sx", 0, 5], + ["ux", 0, 6], + ["w", 0, 12], + ["x", 0, 12], + ["y", 0, 12], + ["\u00E1", 0, 7], + ["\u00E9", 0, 8], + ["\u00ED", 0, 9], + ["\u00F3", 0, 10], + ["\u00FA", 0, 11] + ]; + + /** @const */ var a_1 = [ + ["as", -1, -1], + ["i", -1, -1], + ["is", 1, -1], + ["os", -1, -1], + ["u", -1, -1], + ["us", 4, -1] + ]; + + /** @const */ var a_2 = [ + ["ci", -1, -1], + ["gi", -1, -1], + ["hi", -1, -1], + ["li", -1, -1], + ["ili", 3, -1], + ["\u015Dli", 3, -1], + ["mi", -1, -1], + ["ni", -1, -1], + ["oni", 7, -1], + ["ri", -1, -1], + ["si", -1, -1], + ["vi", -1, -1], + ["ivi", 11, -1], + ["\u011Di", -1, -1], + ["\u015Di", -1, -1], + ["i\u015Di", 14, -1], + ["mal\u015Di", 14, -1] + ]; + + /** @const */ var a_3 = [ + ["amb", -1, -1], + ["bald", -1, -1], + ["malbald", 1, -1], + ["morg", -1, -1], + ["postmorg", 3, -1], + ["adi", -1, -1], + ["hodi", -1, -1], + ["ank", -1, -1], + ["\u0109irk", -1, -1], + ["tut\u0109irk", 8, -1], + ["presk", -1, -1], + ["almen", -1, -1], + ["apen", -1, -1], + ["hier", -1, -1], + ["anta\u016Dhier", 13, -1], + ["malgr", -1, -1], + ["ankor", -1, -1], + ["kontr", -1, -1], + ["anstat", -1, -1], + ["kvaz", -1, -1] + ]; + + /** @const */ var a_4 = [ + ["aliu", -1, -1], + ["unu", -1, -1] + ]; + + /** @const */ var a_5 = [ + ["aha", -1, -1], + ["haha", 0, -1], + ["haleluja", -1, -1], + ["hola", -1, -1], + ["hosana", -1, -1], + ["maltra", -1, -1], + ["hura", -1, -1], + ["\u0125a\u0125a", -1, -1], + ["ekde", -1, -1], + ["elde", -1, -1], + ["disde", -1, -1], + ["ehe", -1, -1], + ["maltre", -1, -1], + ["dirlididi", -1, -1], + ["malpli", -1, -1], + ["mal\u0109i", -1, -1], + ["malkaj", -1, -1], + ["amen", -1, -1], + ["tamen", 17, -1], + ["oho", -1, -1], + ["maltro", -1, -1], + ["minus", -1, -1], + ["uhu", -1, -1], + ["muu", -1, -1] + ]; + + /** @const */ var a_6 = [ + ["tri", -1, -1], + ["du", -1, -1], + ["unu", -1, -1] + ]; + + /** @const */ var a_7 = [ + ["dek", -1, -1], + ["cent", -1, -1] + ]; + + /** @const */ var a_8 = [ + ["k", -1, -1], + ["kelk", 0, -1], + ["nen", -1, -1], + ["t", -1, -1], + ["mult", 3, -1], + ["samt", 3, -1], + ["\u0109", -1, -1] + ]; + + /** @const */ var a_9 = [ + ["a", -1, -1], + ["e", -1, -1], + ["i", -1, -1], + ["j", -1, -1, r_not_after_letter], + ["aj", 3, -1], + ["oj", 3, -1], + ["n", -1, -1, r_not_after_letter], + ["an", 6, -1], + ["en", 6, -1], + ["jn", 6, -1, r_not_after_letter], + ["ajn", 9, -1], + ["ojn", 9, -1], + ["on", 6, -1], + ["o", -1, -1], + ["as", -1, -1], + ["is", -1, -1], + ["os", -1, -1], + ["us", -1, -1], + ["u", -1, -1] + ]; + + /** @const */ var /** Array */ g_vowel = [17, 65, 16]; + + /** @const */ var /** Array */ g_aou = [1, 64, 16]; + + /** @const */ var /** Array */ g_digit = [255, 3]; + + var /** boolean */ B_foreign = false; + + + /** @return {boolean} */ + function r_canonical_form() { + var /** number */ among_var; + B_foreign = false; + while(true) + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u0109")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u011D")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u0125")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u0135")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u015D")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u016D")) + { + return false; + } + break; + case 7: + if (!base.slice_from("a")) + { + return false; + } + B_foreign = true; + break; + case 8: + if (!base.slice_from("e")) + { + return false; + } + B_foreign = true; + break; + case 9: + if (!base.slice_from("i")) + { + return false; + } + B_foreign = true; + break; + case 10: + if (!base.slice_from("o")) + { + return false; + } + B_foreign = true; + break; + case 11: + if (!base.slice_from("u")) + { + return false; + } + B_foreign = true; + break; + case 12: + B_foreign = true; + break; + case 13: + B_foreign = false; + break; + case 14: + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; + break; + } + continue; + } + base.cursor = v_1; + break; + } + lab1: { + if (!B_foreign) + { + break lab1; + } + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_initial_apostrophe() { + base.bra = base.cursor; + if (!(base.eq_s("'"))) + { + return false; + } + base.ket = base.cursor; + if (!(base.eq_s("st"))) + { + return false; + } + if (base.find_among(a_1) == 0) + { + return false; + } + if (base.cursor < base.limit) + { + return false; + } + if (!base.slice_from("e")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_pronoun() { + base.ket = base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("n"))) + { + base.cursor = base.limit - v_1; + break lab0; + } + } + base.bra = base.cursor; + if (base.find_among_b(a_2) == 0) + { + return false; + } + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + if (base.cursor > base.limit_backward) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_2; + if (!(base.eq_s_b("-"))) + { + return false; + } + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_final_apostrophe() { + base.ket = base.cursor; + if (!(base.eq_s_b("'"))) + { + return false; + } + base.bra = base.cursor; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("l"))) + { + break lab1; + } + if (base.cursor > base.limit_backward) + { + break lab1; + } + if (!base.slice_from("a")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + if (!(base.eq_s_b("un"))) + { + break lab2; + } + if (base.cursor > base.limit_backward) + { + break lab2; + } + if (!base.slice_from("u")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab3: { + if (base.find_among_b(a_3) == 0) + { + break lab3; + } + lab4: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab5: { + if (base.cursor > base.limit_backward) + { + break lab5; + } + break lab4; + } + base.cursor = base.limit - v_2; + if (!(base.eq_s_b("-"))) + { + break lab3; + } + } + if (!base.slice_from("a\u016D")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!base.slice_from("o")) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_ujn_suffix() { + base.ket = base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("n"))) + { + base.cursor = base.limit - v_1; + break lab0; + } + } + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("j"))) + { + base.cursor = base.limit - v_2; + break lab1; + } + } + base.bra = base.cursor; + if (base.find_among_b(a_4) == 0) + { + return false; + } + lab2: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab3: { + if (base.cursor > base.limit_backward) + { + break lab3; + } + break lab2; + } + base.cursor = base.limit - v_3; + if (!(base.eq_s_b("-"))) + { + return false; + } + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_uninflected() { + if (base.find_among_b(a_5) == 0) + { + return false; + } + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (base.cursor > base.limit_backward) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!(base.eq_s_b("-"))) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_merged_numeral() { + if (base.find_among_b(a_6) == 0) + { + return false; + } + if (base.find_among_b(a_7) == 0) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_correlative() { + base.ket = base.cursor; + base.bra = base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!(base.eq_s_b("n"))) + { + base.cursor = base.limit - v_3; + break lab2; + } + } + base.bra = base.cursor; + if (!(base.eq_s_b("e"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab3: { + if (!(base.eq_s_b("n"))) + { + base.cursor = base.limit - v_4; + break lab3; + } + } + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab4: { + if (!(base.eq_s_b("j"))) + { + base.cursor = base.limit - v_5; + break lab4; + } + } + base.bra = base.cursor; + if (!(base.in_grouping_b(g_aou, 97, 117))) + { + return false; + } + } + if (!(base.eq_s_b("i"))) + { + return false; + } + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab5: { + if (base.find_among_b(a_8) == 0) + { + base.cursor = base.limit - v_6; + break lab5; + } + } + lab6: { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab7: { + if (base.cursor > base.limit_backward) + { + break lab7; + } + break lab6; + } + base.cursor = base.limit - v_7; + if (!(base.eq_s_b("-"))) + { + return false; + } + } + base.cursor = base.limit - v_1; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_long_word() { + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + for (var /** number */ v_2 = 2; v_2 > 0; v_2--) + { + if (!base.go_out_grouping_b(g_vowel, 97, 117)) + { + break lab1; + } + base.cursor--; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + golab3: while(true) + { + lab4: { + if (!(base.eq_s_b("-"))) + { + break lab4; + } + break golab3; + } + if (base.cursor <= base.limit_backward) + { + break lab2; + } + base.cursor--; + } + if (base.cursor <= base.limit_backward) + { + break lab2; + } + base.cursor--; + break lab0; + } + base.cursor = base.limit - v_1; + if (!base.go_out_grouping_b(g_digit, 48, 57)) + { + return false; + } + base.cursor--; + } + return true; + }; + + /** @return {boolean} */ + function r_not_after_letter() { + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("-"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!(base.in_grouping_b(g_digit, 48, 57))) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_standard_suffix() { + base.ket = base.cursor; + if (base.find_among_b(a_9) == 0) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("-"))) + { + base.cursor = base.limit - v_1; + break lab0; + } + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + /** @const */ var /** number */ v_1 = base.cursor; + if (!r_canonical_form()) + { + return false; + } + base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + r_initial_apostrophe(); + base.cursor = v_2; + base.limit_backward = base.cursor; base.cursor = base.limit; + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab0: { + if (!r_pronoun()) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_3; + } + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_final_apostrophe(); + base.cursor = base.limit - v_4; + { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab1: { + if (!r_correlative()) + { + break lab1; + } + return false; + } + base.cursor = base.limit - v_5; + } + { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab2: { + if (!r_uninflected()) + { + break lab2; + } + return false; + } + base.cursor = base.limit - v_6; + } + { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab3: { + if (!r_merged_numeral()) + { + break lab3; + } + return false; + } + base.cursor = base.limit - v_7; + } + { + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab4: { + if (!r_ujn_suffix()) + { + break lab4; + } + return false; + } + base.cursor = base.limit - v_8; + } + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + if (!r_long_word()) + { + return false; + } + base.cursor = base.limit - v_9; + if (!r_standard_suffix()) + { + return false; + } + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/estonian-stemmer.js b/sphinx/search/non-minified-js/estonian-stemmer.js new file mode 100644 index 00000000000..2700c0b3379 --- /dev/null +++ b/sphinx/search/non-minified-js/estonian-stemmer.js @@ -0,0 +1,1088 @@ +// Generated from estonian.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var EstonianStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["gi", -1, 1], + ["ki", -1, 2] + ]; + + /** @const */ var a_1 = [ + ["da", -1, 3], + ["mata", -1, 1], + ["b", -1, 3], + ["ksid", -1, 1], + ["nuksid", 3, 1], + ["me", -1, 3], + ["sime", 5, 1], + ["ksime", 6, 1], + ["nuksime", 7, 1], + ["akse", -1, 2], + ["dakse", 9, 1], + ["takse", 9, 1], + ["site", -1, 1], + ["ksite", 12, 1], + ["nuksite", 13, 1], + ["n", -1, 3], + ["sin", 15, 1], + ["ksin", 16, 1], + ["nuksin", 17, 1], + ["daks", -1, 1], + ["taks", -1, 1] + ]; + + /** @const */ var a_2 = [ + ["aa", -1, -1], + ["ee", -1, -1], + ["ii", -1, -1], + ["oo", -1, -1], + ["uu", -1, -1], + ["\u00E4\u00E4", -1, -1], + ["\u00F5\u00F5", -1, -1], + ["\u00F6\u00F6", -1, -1], + ["\u00FC\u00FC", -1, -1] + ]; + + /** @const */ var a_3 = [ + ["i", -1, 1] + ]; + + /** @const */ var a_4 = [ + ["lane", -1, 1], + ["line", -1, 3], + ["mine", -1, 2], + ["lasse", -1, 1], + ["lisse", -1, 3], + ["misse", -1, 2], + ["lasi", -1, 1], + ["lisi", -1, 3], + ["misi", -1, 2], + ["last", -1, 1], + ["list", -1, 3], + ["mist", -1, 2] + ]; + + /** @const */ var a_5 = [ + ["ga", -1, 1], + ["ta", -1, 1], + ["le", -1, 1], + ["sse", -1, 1], + ["l", -1, 1], + ["s", -1, 1], + ["ks", 5, 1], + ["t", -1, 2], + ["lt", 7, 1], + ["st", 7, 1] + ]; + + /** @const */ var a_6 = [ + ["", -1, 2], + ["las", 0, 1], + ["lis", 0, 1], + ["mis", 0, 1], + ["t", 0, -1] + ]; + + /** @const */ var a_7 = [ + ["d", -1, 4], + ["sid", 0, 2], + ["de", -1, 4], + ["ikkude", 2, 1], + ["ike", -1, 1], + ["ikke", -1, 1], + ["te", -1, 3] + ]; + + /** @const */ var a_8 = [ + ["va", -1, -1], + ["du", -1, -1], + ["nu", -1, -1], + ["tu", -1, -1] + ]; + + /** @const */ var a_9 = [ + ["kk", -1, 1], + ["pp", -1, 2], + ["tt", -1, 3] + ]; + + /** @const */ var a_10 = [ + ["ma", -1, 2], + ["mai", -1, 1], + ["m", -1, 1] + ]; + + /** @const */ var a_11 = [ + ["joob", -1, 1], + ["jood", -1, 1], + ["joodakse", 1, 1], + ["jooma", -1, 1], + ["joomata", 3, 1], + ["joome", -1, 1], + ["joon", -1, 1], + ["joote", -1, 1], + ["joovad", -1, 1], + ["juua", -1, 1], + ["juuakse", 9, 1], + ["j\u00E4i", -1, 12], + ["j\u00E4id", 11, 12], + ["j\u00E4ime", 11, 12], + ["j\u00E4in", 11, 12], + ["j\u00E4ite", 11, 12], + ["j\u00E4\u00E4b", -1, 12], + ["j\u00E4\u00E4d", -1, 12], + ["j\u00E4\u00E4da", 17, 12], + ["j\u00E4\u00E4dakse", 18, 12], + ["j\u00E4\u00E4di", 17, 12], + ["j\u00E4\u00E4ks", -1, 12], + ["j\u00E4\u00E4ksid", 21, 12], + ["j\u00E4\u00E4ksime", 21, 12], + ["j\u00E4\u00E4ksin", 21, 12], + ["j\u00E4\u00E4ksite", 21, 12], + ["j\u00E4\u00E4ma", -1, 12], + ["j\u00E4\u00E4mata", 26, 12], + ["j\u00E4\u00E4me", -1, 12], + ["j\u00E4\u00E4n", -1, 12], + ["j\u00E4\u00E4te", -1, 12], + ["j\u00E4\u00E4vad", -1, 12], + ["j\u00F5i", -1, 1], + ["j\u00F5id", 32, 1], + ["j\u00F5ime", 32, 1], + ["j\u00F5in", 32, 1], + ["j\u00F5ite", 32, 1], + ["keeb", -1, 4], + ["keed", -1, 4], + ["keedakse", 38, 4], + ["keeks", -1, 4], + ["keeksid", 40, 4], + ["keeksime", 40, 4], + ["keeksin", 40, 4], + ["keeksite", 40, 4], + ["keema", -1, 4], + ["keemata", 45, 4], + ["keeme", -1, 4], + ["keen", -1, 4], + ["kees", -1, 4], + ["keeta", -1, 4], + ["keete", -1, 4], + ["keevad", -1, 4], + ["k\u00E4ia", -1, 8], + ["k\u00E4iakse", 53, 8], + ["k\u00E4ib", -1, 8], + ["k\u00E4id", -1, 8], + ["k\u00E4idi", 56, 8], + ["k\u00E4iks", -1, 8], + ["k\u00E4iksid", 58, 8], + ["k\u00E4iksime", 58, 8], + ["k\u00E4iksin", 58, 8], + ["k\u00E4iksite", 58, 8], + ["k\u00E4ima", -1, 8], + ["k\u00E4imata", 63, 8], + ["k\u00E4ime", -1, 8], + ["k\u00E4in", -1, 8], + ["k\u00E4is", -1, 8], + ["k\u00E4ite", -1, 8], + ["k\u00E4ivad", -1, 8], + ["laob", -1, 16], + ["laod", -1, 16], + ["laoks", -1, 16], + ["laoksid", 72, 16], + ["laoksime", 72, 16], + ["laoksin", 72, 16], + ["laoksite", 72, 16], + ["laome", -1, 16], + ["laon", -1, 16], + ["laote", -1, 16], + ["laovad", -1, 16], + ["loeb", -1, 14], + ["loed", -1, 14], + ["loeks", -1, 14], + ["loeksid", 83, 14], + ["loeksime", 83, 14], + ["loeksin", 83, 14], + ["loeksite", 83, 14], + ["loeme", -1, 14], + ["loen", -1, 14], + ["loete", -1, 14], + ["loevad", -1, 14], + ["loob", -1, 7], + ["lood", -1, 7], + ["loodi", 93, 7], + ["looks", -1, 7], + ["looksid", 95, 7], + ["looksime", 95, 7], + ["looksin", 95, 7], + ["looksite", 95, 7], + ["looma", -1, 7], + ["loomata", 100, 7], + ["loome", -1, 7], + ["loon", -1, 7], + ["loote", -1, 7], + ["loovad", -1, 7], + ["luua", -1, 7], + ["luuakse", 106, 7], + ["l\u00F5i", -1, 6], + ["l\u00F5id", 108, 6], + ["l\u00F5ime", 108, 6], + ["l\u00F5in", 108, 6], + ["l\u00F5ite", 108, 6], + ["l\u00F6\u00F6b", -1, 5], + ["l\u00F6\u00F6d", -1, 5], + ["l\u00F6\u00F6dakse", 114, 5], + ["l\u00F6\u00F6di", 114, 5], + ["l\u00F6\u00F6ks", -1, 5], + ["l\u00F6\u00F6ksid", 117, 5], + ["l\u00F6\u00F6ksime", 117, 5], + ["l\u00F6\u00F6ksin", 117, 5], + ["l\u00F6\u00F6ksite", 117, 5], + ["l\u00F6\u00F6ma", -1, 5], + ["l\u00F6\u00F6mata", 122, 5], + ["l\u00F6\u00F6me", -1, 5], + ["l\u00F6\u00F6n", -1, 5], + ["l\u00F6\u00F6te", -1, 5], + ["l\u00F6\u00F6vad", -1, 5], + ["l\u00FC\u00FCa", -1, 5], + ["l\u00FC\u00FCakse", 128, 5], + ["m\u00FC\u00FCa", -1, 13], + ["m\u00FC\u00FCakse", 130, 13], + ["m\u00FC\u00FCb", -1, 13], + ["m\u00FC\u00FCd", -1, 13], + ["m\u00FC\u00FCdi", 133, 13], + ["m\u00FC\u00FCks", -1, 13], + ["m\u00FC\u00FCksid", 135, 13], + ["m\u00FC\u00FCksime", 135, 13], + ["m\u00FC\u00FCksin", 135, 13], + ["m\u00FC\u00FCksite", 135, 13], + ["m\u00FC\u00FCma", -1, 13], + ["m\u00FC\u00FCmata", 140, 13], + ["m\u00FC\u00FCme", -1, 13], + ["m\u00FC\u00FCn", -1, 13], + ["m\u00FC\u00FCs", -1, 13], + ["m\u00FC\u00FCte", -1, 13], + ["m\u00FC\u00FCvad", -1, 13], + ["n\u00E4eb", -1, 18], + ["n\u00E4ed", -1, 18], + ["n\u00E4eks", -1, 18], + ["n\u00E4eksid", 149, 18], + ["n\u00E4eksime", 149, 18], + ["n\u00E4eksin", 149, 18], + ["n\u00E4eksite", 149, 18], + ["n\u00E4eme", -1, 18], + ["n\u00E4en", -1, 18], + ["n\u00E4ete", -1, 18], + ["n\u00E4evad", -1, 18], + ["n\u00E4gema", -1, 18], + ["n\u00E4gemata", 158, 18], + ["n\u00E4ha", -1, 18], + ["n\u00E4hakse", 160, 18], + ["n\u00E4hti", -1, 18], + ["p\u00F5eb", -1, 15], + ["p\u00F5ed", -1, 15], + ["p\u00F5eks", -1, 15], + ["p\u00F5eksid", 165, 15], + ["p\u00F5eksime", 165, 15], + ["p\u00F5eksin", 165, 15], + ["p\u00F5eksite", 165, 15], + ["p\u00F5eme", -1, 15], + ["p\u00F5en", -1, 15], + ["p\u00F5ete", -1, 15], + ["p\u00F5evad", -1, 15], + ["saab", -1, 2], + ["saad", -1, 2], + ["saada", 175, 2], + ["saadakse", 176, 2], + ["saadi", 175, 2], + ["saaks", -1, 2], + ["saaksid", 179, 2], + ["saaksime", 179, 2], + ["saaksin", 179, 2], + ["saaksite", 179, 2], + ["saama", -1, 2], + ["saamata", 184, 2], + ["saame", -1, 2], + ["saan", -1, 2], + ["saate", -1, 2], + ["saavad", -1, 2], + ["sai", -1, 2], + ["said", 190, 2], + ["saime", 190, 2], + ["sain", 190, 2], + ["saite", 190, 2], + ["s\u00F5i", -1, 9], + ["s\u00F5id", 195, 9], + ["s\u00F5ime", 195, 9], + ["s\u00F5in", 195, 9], + ["s\u00F5ite", 195, 9], + ["s\u00F6\u00F6b", -1, 9], + ["s\u00F6\u00F6d", -1, 9], + ["s\u00F6\u00F6dakse", 201, 9], + ["s\u00F6\u00F6di", 201, 9], + ["s\u00F6\u00F6ks", -1, 9], + ["s\u00F6\u00F6ksid", 204, 9], + ["s\u00F6\u00F6ksime", 204, 9], + ["s\u00F6\u00F6ksin", 204, 9], + ["s\u00F6\u00F6ksite", 204, 9], + ["s\u00F6\u00F6ma", -1, 9], + ["s\u00F6\u00F6mata", 209, 9], + ["s\u00F6\u00F6me", -1, 9], + ["s\u00F6\u00F6n", -1, 9], + ["s\u00F6\u00F6te", -1, 9], + ["s\u00F6\u00F6vad", -1, 9], + ["s\u00FC\u00FCa", -1, 9], + ["s\u00FC\u00FCakse", 215, 9], + ["teeb", -1, 17], + ["teed", -1, 17], + ["teeks", -1, 17], + ["teeksid", 219, 17], + ["teeksime", 219, 17], + ["teeksin", 219, 17], + ["teeksite", 219, 17], + ["teeme", -1, 17], + ["teen", -1, 17], + ["teete", -1, 17], + ["teevad", -1, 17], + ["tegema", -1, 17], + ["tegemata", 228, 17], + ["teha", -1, 17], + ["tehakse", 230, 17], + ["tehti", -1, 17], + ["toob", -1, 10], + ["tood", -1, 10], + ["toodi", 234, 10], + ["tooks", -1, 10], + ["tooksid", 236, 10], + ["tooksime", 236, 10], + ["tooksin", 236, 10], + ["tooksite", 236, 10], + ["tooma", -1, 10], + ["toomata", 241, 10], + ["toome", -1, 10], + ["toon", -1, 10], + ["toote", -1, 10], + ["toovad", -1, 10], + ["tuua", -1, 10], + ["tuuakse", 247, 10], + ["t\u00F5i", -1, 10], + ["t\u00F5id", 249, 10], + ["t\u00F5ime", 249, 10], + ["t\u00F5in", 249, 10], + ["t\u00F5ite", 249, 10], + ["viia", -1, 3], + ["viiakse", 254, 3], + ["viib", -1, 3], + ["viid", -1, 3], + ["viidi", 257, 3], + ["viiks", -1, 3], + ["viiksid", 259, 3], + ["viiksime", 259, 3], + ["viiksin", 259, 3], + ["viiksite", 259, 3], + ["viima", -1, 3], + ["viimata", 264, 3], + ["viime", -1, 3], + ["viin", -1, 3], + ["viisime", -1, 3], + ["viisin", -1, 3], + ["viisite", -1, 3], + ["viite", -1, 3], + ["viivad", -1, 3], + ["v\u00F5ib", -1, 11], + ["v\u00F5id", -1, 11], + ["v\u00F5ida", 274, 11], + ["v\u00F5idakse", 275, 11], + ["v\u00F5idi", 274, 11], + ["v\u00F5iks", -1, 11], + ["v\u00F5iksid", 278, 11], + ["v\u00F5iksime", 278, 11], + ["v\u00F5iksin", 278, 11], + ["v\u00F5iksite", 278, 11], + ["v\u00F5ima", -1, 11], + ["v\u00F5imata", 283, 11], + ["v\u00F5ime", -1, 11], + ["v\u00F5in", -1, 11], + ["v\u00F5is", -1, 11], + ["v\u00F5ite", -1, 11], + ["v\u00F5ivad", -1, 11] + ]; + + /** @const */ var /** Array */ g_V1 = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 48, 8]; + + /** @const */ var /** Array */ g_RV = [17, 65, 16]; + + /** @const */ var /** Array */ g_KI = [117, 66, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 16]; + + /** @const */ var /** Array */ g_GI = [21, 123, 243, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 48, 8]; + + var /** number */ I_p1 = 0; + + + /** @return {boolean} */ + function r_mark_regions() { + I_p1 = base.limit; + if (!base.go_out_grouping(g_V1, 97, 252)) + { + return false; + } + base.cursor++; + if (!base.go_in_grouping(g_V1, 97, 252)) + { + return false; + } + base.cursor++; + I_p1 = base.cursor; + return true; + }; + + /** @return {boolean} */ + function r_emphasis() { + var /** number */ among_var; + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + among_var = base.find_among_b(a_0); + if (among_var == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + { + /** @const */ var /** number */ c1 = base.cursor - 4; + if (c1 < base.limit_backward) + { + return false; + } + base.cursor = c1; + } + base.cursor = base.limit - v_2; + switch (among_var) { + case 1: + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + if (!(base.in_grouping_b(g_GI, 97, 252))) + { + return false; + } + base.cursor = base.limit - v_3; + { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab0: { + if (!r_LONGV()) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_4; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!(base.in_grouping_b(g_KI, 98, 382))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_verb() { + var /** number */ among_var; + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("a")) + { + return false; + } + break; + case 3: + if (!(base.in_grouping_b(g_V1, 97, 252))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_LONGV() { + if (base.find_among_b(a_2) == 0) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_i_plural() { + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + if (base.find_among_b(a_3) == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + if (!(base.in_grouping_b(g_RV, 97, 117))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_special_noun_endings() { + var /** number */ among_var; + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + among_var = base.find_among_b(a_4); + if (among_var == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + switch (among_var) { + case 1: + if (!base.slice_from("lase")) + { + return false; + } + break; + case 2: + if (!base.slice_from("mise")) + { + return false; + } + break; + case 3: + if (!base.slice_from("lise")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_case_ending() { + var /** number */ among_var; + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + among_var = base.find_among_b(a_5); + if (among_var == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + switch (among_var) { + case 1: + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!(base.in_grouping_b(g_RV, 97, 117))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_2; + if (!r_LONGV()) + { + return false; + } + } + break; + case 2: + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + { + /** @const */ var /** number */ c1 = base.cursor - 4; + if (c1 < base.limit_backward) + { + return false; + } + base.cursor = c1; + } + base.cursor = base.limit - v_3; + break; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_plural_three_first_cases() { + var /** number */ among_var; + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + among_var = base.find_among_b(a_7); + if (among_var == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + switch (among_var) { + case 1: + if (!base.slice_from("iku")) + { + return false; + } + break; + case 2: + { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab0: { + if (!r_LONGV()) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_2; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + lab1: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + { + /** @const */ var /** number */ c1 = base.cursor - 4; + if (c1 < base.limit_backward) + { + break lab2; + } + base.cursor = c1; + } + base.cursor = base.limit - v_4; + among_var = base.find_among_b(a_6); + switch (among_var) { + case 1: + if (!base.slice_from("e")) + { + return false; + } + break; + case 2: + if (!base.slice_del()) + { + return false; + } + break; + } + break lab1; + } + base.cursor = base.limit - v_3; + if (!base.slice_from("t")) + { + return false; + } + } + break; + case 4: + lab3: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab4: { + if (!(base.in_grouping_b(g_RV, 97, 117))) + { + break lab4; + } + break lab3; + } + base.cursor = base.limit - v_5; + if (!r_LONGV()) + { + return false; + } + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_nu() { + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + if (base.find_among_b(a_8) == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_undouble_kpt() { + var /** number */ among_var; + if (!(base.in_grouping_b(g_V1, 97, 252))) + { + return false; + } + if (I_p1 > base.cursor) + { + return false; + } + base.ket = base.cursor; + among_var = base.find_among_b(a_9); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("k")) + { + return false; + } + break; + case 2: + if (!base.slice_from("p")) + { + return false; + } + break; + case 3: + if (!base.slice_from("t")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_degrees() { + var /** number */ among_var; + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + among_var = base.find_among_b(a_10); + if (among_var == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + switch (among_var) { + case 1: + if (!(base.in_grouping_b(g_RV, 97, 117))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_substantive() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + r_special_noun_endings(); + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + r_case_ending(); + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_plural_three_first_cases(); + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_degrees(); + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + r_i_plural(); + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + r_nu(); + base.cursor = base.limit - v_6; + return true; + }; + + /** @return {boolean} */ + function r_verb_exceptions() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_11); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + if (base.cursor < base.limit) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("joo")) + { + return false; + } + break; + case 2: + if (!base.slice_from("saa")) + { + return false; + } + break; + case 3: + if (!base.slice_from("viima")) + { + return false; + } + break; + case 4: + if (!base.slice_from("keesi")) + { + return false; + } + break; + case 5: + if (!base.slice_from("l\u00F6\u00F6")) + { + return false; + } + break; + case 6: + if (!base.slice_from("l\u00F5i")) + { + return false; + } + break; + case 7: + if (!base.slice_from("loo")) + { + return false; + } + break; + case 8: + if (!base.slice_from("k\u00E4isi")) + { + return false; + } + break; + case 9: + if (!base.slice_from("s\u00F6\u00F6")) + { + return false; + } + break; + case 10: + if (!base.slice_from("too")) + { + return false; + } + break; + case 11: + if (!base.slice_from("v\u00F5isi")) + { + return false; + } + break; + case 12: + if (!base.slice_from("j\u00E4\u00E4ma")) + { + return false; + } + break; + case 13: + if (!base.slice_from("m\u00FC\u00FCsi")) + { + return false; + } + break; + case 14: + if (!base.slice_from("luge")) + { + return false; + } + break; + case 15: + if (!base.slice_from("p\u00F5de")) + { + return false; + } + break; + case 16: + if (!base.slice_from("ladu")) + { + return false; + } + break; + case 17: + if (!base.slice_from("tegi")) + { + return false; + } + break; + case 18: + if (!base.slice_from("n\u00E4gi")) + { + return false; + } + break; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!r_verb_exceptions()) + { + break lab0; + } + return false; + } + base.cursor = v_1; + } + /** @const */ var /** number */ v_2 = base.cursor; + r_mark_regions(); + base.cursor = v_2; + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_emphasis(); + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab1: { + lab2: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab3: { + if (!r_verb()) + { + break lab3; + } + break lab2; + } + base.cursor = base.limit - v_5; + r_substantive(); + } + } + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + r_undouble_kpt(); + base.cursor = base.limit - v_6; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/finnish-stemmer.js b/sphinx/search/non-minified-js/finnish-stemmer.js index c907c4a70b4..07fd78516f6 100644 --- a/sphinx/search/non-minified-js/finnish-stemmer.js +++ b/sphinx/search/non-minified-js/finnish-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from finnish.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -FinnishStemmer = function() { +var FinnishStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["pa", -1, 1], ["sti", -1, 2], @@ -141,84 +142,34 @@ FinnishStemmer = function() { function r_mark_regions() { I_p1 = base.limit; I_p2 = base.limit; - golab0: while(true) + if (!base.go_out_grouping(g_V1, 97, 246)) { - var /** number */ v_1 = base.cursor; - lab1: { - if (!(base.in_grouping(g_V1, 97, 246))) - { - break lab1; - } - base.cursor = v_1; - break golab0; - } - base.cursor = v_1; - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } - golab2: while(true) + base.cursor++; + if (!base.go_in_grouping(g_V1, 97, 246)) { - lab3: { - if (!(base.out_grouping(g_V1, 97, 246))) - { - break lab3; - } - break golab2; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } + base.cursor++; I_p1 = base.cursor; - golab4: while(true) + if (!base.go_out_grouping(g_V1, 97, 246)) { - var /** number */ v_3 = base.cursor; - lab5: { - if (!(base.in_grouping(g_V1, 97, 246))) - { - break lab5; - } - base.cursor = v_3; - break golab4; - } - base.cursor = v_3; - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } - golab6: while(true) + base.cursor++; + if (!base.go_in_grouping(g_V1, 97, 246)) { - lab7: { - if (!(base.out_grouping(g_V1, 97, 246))) - { - break lab7; - } - break golab6; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } + base.cursor++; I_p2 = base.cursor; return true; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -228,17 +179,17 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; among_var = base.find_among_b(a_0); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: if (!(base.in_grouping_b(g_particle_end, 97, 246))) @@ -267,21 +218,21 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; among_var = base.find_among_b(a_4); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("k"))) { @@ -289,7 +240,7 @@ FinnishStemmer = function() { } return false; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; } if (!base.slice_del()) { @@ -381,17 +332,17 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; among_var = base.find_among_b(a_6); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: if (!(base.eq_s_b("a"))) @@ -430,11 +381,11 @@ FinnishStemmer = function() { } break; case 7: - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab1: { - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab2: { if (!r_LONG()) { @@ -442,17 +393,17 @@ FinnishStemmer = function() { } break lab1; } - base.cursor = base.limit - v_5; + base.cursor = base.limit - v_4; if (!(base.eq_s_b("ie"))) { - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; break lab0; } } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; if (base.cursor <= base.limit_backward) { - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; break lab0; } base.cursor--; @@ -485,21 +436,21 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p2; base.ket = base.cursor; among_var = base.find_among_b(a_7); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("po"))) { @@ -507,7 +458,7 @@ FinnishStemmer = function() { } return false; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; } break; } @@ -524,16 +475,16 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; if (base.find_among_b(a_8) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; if (!base.slice_del()) { return false; @@ -548,46 +499,46 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; if (!(base.eq_s_b("t"))) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.in_grouping_b(g_V1, 97, 246))) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; if (!base.slice_del()) { return false; } - base.limit_backward = v_2; + base.limit_backward = v_1; if (base.cursor < I_p2) { return false; } - var /** number */ v_5 = base.limit_backward; + /** @const */ var /** number */ v_3 = base.limit_backward; base.limit_backward = I_p2; base.ket = base.cursor; among_var = base.find_among_b(a_9); if (among_var == 0) { - base.limit_backward = v_5; + base.limit_backward = v_3; return false; } base.bra = base.cursor; - base.limit_backward = v_5; + base.limit_backward = v_3; switch (among_var) { case 1: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("po"))) { @@ -595,7 +546,7 @@ FinnishStemmer = function() { } return false; } - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_4; } break; } @@ -612,16 +563,16 @@ FinnishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; if (!r_LONG()) { break lab0; } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; base.ket = base.cursor; if (base.cursor <= base.limit_backward) { @@ -634,8 +585,8 @@ FinnishStemmer = function() { return false; } } - base.cursor = base.limit - v_3; - var /** number */ v_5 = base.limit - base.cursor; + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab1: { base.ket = base.cursor; if (!(base.in_grouping_b(g_AEI, 97, 228))) @@ -652,8 +603,8 @@ FinnishStemmer = function() { return false; } } - base.cursor = base.limit - v_5; - var /** number */ v_6 = base.limit - base.cursor; + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab2: { base.ket = base.cursor; if (!(base.eq_s_b("j"))) @@ -662,7 +613,7 @@ FinnishStemmer = function() { } base.bra = base.cursor; lab3: { - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab4: { if (!(base.eq_s_b("o"))) { @@ -670,7 +621,7 @@ FinnishStemmer = function() { } break lab3; } - base.cursor = base.limit - v_7; + base.cursor = base.limit - v_6; if (!(base.eq_s_b("u"))) { break lab2; @@ -681,8 +632,8 @@ FinnishStemmer = function() { return false; } } - base.cursor = base.limit - v_6; - var /** number */ v_8 = base.limit - base.cursor; + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab5: { base.ket = base.cursor; if (!(base.eq_s_b("o"))) @@ -699,25 +650,11 @@ FinnishStemmer = function() { return false; } } - base.cursor = base.limit - v_8; - base.limit_backward = v_2; - golab6: while(true) + base.cursor = base.limit - v_7; + base.limit_backward = v_1; + if (!base.go_in_grouping_b(g_V1, 97, 246)) { - var /** number */ v_9 = base.limit - base.cursor; - lab7: { - if (!(base.out_grouping_b(g_V1, 97, 246))) - { - break lab7; - } - base.cursor = base.limit - v_9; - break golab6; - } - base.cursor = base.limit - v_9; - if (base.cursor <= base.limit_backward) - { - return false; - } - base.cursor--; + return false; } base.ket = base.cursor; if (!(base.in_grouping_b(g_C, 98, 122))) @@ -742,21 +679,21 @@ FinnishStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_mark_regions(); base.cursor = v_1; B_ending_removed = false; base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_particle_etc(); base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_possessive(); base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; r_case_ending(); base.cursor = base.limit - v_4; - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; r_other_endings(); base.cursor = base.limit - v_5; lab0: { @@ -765,18 +702,18 @@ FinnishStemmer = function() { { break lab1; } - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; r_i_plural(); - base.cursor = base.limit - v_7; + base.cursor = base.limit - v_6; break lab0; } - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; r_t_plural(); - base.cursor = base.limit - v_8; + base.cursor = base.limit - v_7; } - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; r_tidy(); - base.cursor = base.limit - v_9; + base.cursor = base.limit - v_8; base.cursor = base.limit_backward; return true; }; diff --git a/sphinx/search/non-minified-js/french-stemmer.js b/sphinx/search/non-minified-js/french-stemmer.js index c9708312a8f..0e7b0655494 100644 --- a/sphinx/search/non-minified-js/french-stemmer.js +++ b/sphinx/search/non-minified-js/french-stemmer.js @@ -1,10 +1,12 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from french.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -FrenchStemmer = function() { +var FrenchStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["col", -1, -1], + ["ni", -1, 1], ["par", -1, -1], ["tap", -1, -1] ]; @@ -42,7 +44,7 @@ FrenchStemmer = function() { ["logie", -1, 3], ["able", -1, 1], ["isme", -1, 1], - ["euse", -1, 11], + ["euse", -1, 12], ["iste", -1, 1], ["ive", -1, 8], ["if", -1, 8], @@ -57,7 +59,7 @@ FrenchStemmer = function() { ["logies", -1, 3], ["ables", -1, 1], ["ismes", -1, 1], - ["euses", -1, 11], + ["euses", -1, 12], ["istes", -1, 1], ["ives", -1, 8], ["ifs", -1, 8], @@ -65,18 +67,19 @@ FrenchStemmer = function() { ["ations", -1, 2], ["utions", -1, 4], ["ateurs", -1, 2], - ["ments", -1, 15], + ["ments", -1, 16], ["ements", 30, 6], - ["issements", 31, 12], + ["issements", 31, 13], ["it\u00E9s", -1, 7], - ["ment", -1, 15], + ["ment", -1, 16], ["ement", 34, 6], - ["issement", 35, 12], - ["amment", 34, 13], - ["emment", 34, 14], + ["issement", 35, 13], + ["amment", 34, 14], + ["emment", 34, 15], ["aux", -1, 10], ["eaux", 39, 9], ["eux", -1, 1], + ["oux", -1, 11], ["it\u00E9", -1, 7] ]; @@ -119,47 +122,56 @@ FrenchStemmer = function() { ]; /** @const */ var a_6 = [ + ["al", -1, 1], + ["\u00E9pl", -1, -1], + ["auv", -1, -1] + ]; + + /** @const */ var a_7 = [ ["a", -1, 3], ["era", 0, 2], + ["aise", -1, 4], ["asse", -1, 3], ["ante", -1, 3], ["\u00E9e", -1, 2], ["ai", -1, 3], - ["erai", 5, 2], + ["erai", 6, 2], ["er", -1, 2], ["as", -1, 3], - ["eras", 8, 2], + ["eras", 9, 2], ["\u00E2mes", -1, 3], + ["aises", -1, 4], ["asses", -1, 3], ["antes", -1, 3], ["\u00E2tes", -1, 3], ["\u00E9es", -1, 2], - ["ais", -1, 3], - ["erais", 15, 2], + ["ais", -1, 4], + ["eais", 17, 2], + ["erais", 17, 2], ["ions", -1, 1], - ["erions", 17, 2], - ["assions", 17, 3], + ["erions", 20, 2], + ["assions", 20, 3], ["erons", -1, 2], ["ants", -1, 3], ["\u00E9s", -1, 2], ["ait", -1, 3], - ["erait", 23, 2], + ["erait", 26, 2], ["ant", -1, 3], ["aIent", -1, 3], - ["eraIent", 26, 2], + ["eraIent", 29, 2], ["\u00E8rent", -1, 2], ["assent", -1, 3], ["eront", -1, 2], ["\u00E2t", -1, 3], ["ez", -1, 2], - ["iez", 32, 2], - ["eriez", 33, 2], - ["assiez", 33, 3], - ["erez", 32, 2], + ["iez", 35, 2], + ["eriez", 36, 2], + ["assiez", 36, 3], + ["erez", 35, 2], ["\u00E9", -1, 2] ]; - /** @const */ var a_7 = [ + /** @const */ var a_8 = [ ["e", -1, 3], ["I\u00E8re", 0, 2], ["i\u00E8re", 0, 2], @@ -168,7 +180,7 @@ FrenchStemmer = function() { ["ier", -1, 2] ]; - /** @const */ var a_8 = [ + /** @const */ var a_9 = [ ["ell", -1, -1], ["eill", -1, -1], ["enn", -1, -1], @@ -178,6 +190,10 @@ FrenchStemmer = function() { /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 130, 103, 8, 5]; + /** @const */ var /** Array */ g_oux_ending = [65, 85]; + + /** @const */ var /** Array */ g_elision_char = [131, 14, 3]; + /** @const */ var /** Array */ g_keep_with_s = [1, 65, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; var /** number */ I_p2 = 0; @@ -185,18 +201,55 @@ FrenchStemmer = function() { var /** number */ I_pV = 0; + /** @return {boolean} */ + function r_elisions() { + base.bra = base.cursor; + lab0: { + /** @const */ var /** number */ v_1 = base.cursor; + lab1: { + if (!(base.in_grouping(g_elision_char, 99, 116))) + { + break lab1; + } + break lab0; + } + base.cursor = v_1; + if (!(base.eq_s("qu"))) + { + return false; + } + } + if (!(base.eq_s("'"))) + { + return false; + } + base.ket = base.cursor; + lab2: { + if (base.cursor < base.limit) + { + break lab2; + } + return false; + } + if (!base.slice_del()) + { + return false; + } + return true; + }; + /** @return {boolean} */ function r_prelude() { while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { golab1: while(true) { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { lab3: { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab4: { if (!(base.in_grouping(g_v, 97, 251))) { @@ -204,7 +257,7 @@ FrenchStemmer = function() { } base.bra = base.cursor; lab5: { - var /** number */ v_4 = base.cursor; + /** @const */ var /** number */ v_4 = base.cursor; lab6: { if (!(base.eq_s("u"))) { @@ -333,13 +386,14 @@ FrenchStemmer = function() { /** @return {boolean} */ function r_mark_regions() { + var /** number */ among_var; I_pV = base.limit; I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { lab1: { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { if (!(base.in_grouping(g_v, 97, 251))) { @@ -358,10 +412,19 @@ FrenchStemmer = function() { } base.cursor = v_2; lab3: { - if (base.find_among(a_0) == 0) + among_var = base.find_among(a_0); + if (among_var == 0) { break lab3; } + switch (among_var) { + case 1: + if (!(base.in_grouping(g_v, 97, 251))) + { + break lab3; + } + break; + } break lab1; } base.cursor = v_2; @@ -370,91 +433,41 @@ FrenchStemmer = function() { break lab0; } base.cursor++; - golab4: while(true) + if (!base.go_out_grouping(g_v, 97, 251)) { - lab5: { - if (!(base.in_grouping(g_v, 97, 251))) - { - break lab5; - } - break golab4; - } - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; + break lab0; } + base.cursor++; } I_pV = base.cursor; } base.cursor = v_1; - var /** number */ v_4 = base.cursor; - lab6: { - golab7: while(true) + /** @const */ var /** number */ v_3 = base.cursor; + lab4: { + if (!base.go_out_grouping(g_v, 97, 251)) { - lab8: { - if (!(base.in_grouping(g_v, 97, 251))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - break lab6; - } - base.cursor++; + break lab4; } - golab9: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 251)) { - lab10: { - if (!(base.out_grouping(g_v, 97, 251))) - { - break lab10; - } - break golab9; - } - if (base.cursor >= base.limit) - { - break lab6; - } - base.cursor++; + break lab4; } + base.cursor++; I_p1 = base.cursor; - golab11: while(true) + if (!base.go_out_grouping(g_v, 97, 251)) { - lab12: { - if (!(base.in_grouping(g_v, 97, 251))) - { - break lab12; - } - break golab11; - } - if (base.cursor >= base.limit) - { - break lab6; - } - base.cursor++; + break lab4; } - golab13: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 251)) { - lab14: { - if (!(base.out_grouping(g_v, 97, 251))) - { - break lab14; - } - break golab13; - } - if (base.cursor >= base.limit) - { - break lab6; - } - base.cursor++; + break lab4; } + base.cursor++; I_p2 = base.cursor; } - base.cursor = v_4; + base.cursor = v_3; return true; }; @@ -463,14 +476,10 @@ FrenchStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; among_var = base.find_among(a_1); - if (among_var == 0) - { - break lab0; - } base.ket = base.cursor; switch (among_var) { case 1: @@ -527,29 +536,17 @@ FrenchStemmer = function() { /** @return {boolean} */ function r_RV() { - if (!(I_pV <= base.cursor)) - { - return false; - } - return true; + return I_pV <= base.cursor; }; /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -582,7 +579,7 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.eq_s_b("ic"))) @@ -592,7 +589,7 @@ FrenchStemmer = function() { } base.bra = base.cursor; lab1: { - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab2: { if (!r_R2()) { @@ -651,7 +648,7 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab3: { base.ket = base.cursor; among_var = base.find_among_b(a_2); @@ -691,7 +688,7 @@ FrenchStemmer = function() { break; case 2: lab4: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab5: { if (!r_R2()) { @@ -749,7 +746,7 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab6: { base.ket = base.cursor; among_var = base.find_among_b(a_3); @@ -762,7 +759,7 @@ FrenchStemmer = function() { switch (among_var) { case 1: lab7: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab8: { if (!r_R2()) { @@ -783,7 +780,7 @@ FrenchStemmer = function() { break; case 2: lab9: { - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab10: { if (!r_R2()) { @@ -825,7 +822,7 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; lab11: { base.ket = base.cursor; if (!(base.eq_s_b("at"))) @@ -851,7 +848,7 @@ FrenchStemmer = function() { } base.bra = base.cursor; lab12: { - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; lab13: { if (!r_R2()) { @@ -888,8 +885,18 @@ FrenchStemmer = function() { } break; case 11: + if (!(base.in_grouping_b(g_oux_ending, 98, 112))) + { + return false; + } + if (!base.slice_from("ou")) + { + return false; + } + break; + case 12: lab14: { - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; lab15: { if (!r_R2()) { @@ -912,7 +919,7 @@ FrenchStemmer = function() { } } break; - case 12: + case 13: if (!r_R1()) { return false; @@ -926,7 +933,7 @@ FrenchStemmer = function() { return false; } break; - case 13: + case 14: if (!r_RV()) { return false; @@ -936,7 +943,7 @@ FrenchStemmer = function() { return false; } return false; - case 14: + case 15: if (!r_RV()) { return false; @@ -946,8 +953,8 @@ FrenchStemmer = function() { return false; } return false; - case 15: - var /** number */ v_11 = base.limit - base.cursor; + case 16: + /** @const */ var /** number */ v_11 = base.limit - base.cursor; if (!(base.in_grouping_b(g_v, 97, 251))) { return false; @@ -972,37 +979,37 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; if (base.find_among_b(a_5) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("H"))) { break lab0; } - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; } if (!(base.out_grouping_b(g_v, 97, 251))) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } if (!base.slice_del()) { return false; } - base.limit_backward = v_2; + base.limit_backward = v_1; return true; }; @@ -1013,21 +1020,21 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; - among_var = base.find_among_b(a_6); + among_var = base.find_among_b(a_7); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; + base.limit_backward = v_1; switch (among_var) { case 1: if (!r_R2()) { - base.limit_backward = v_2; return false; } if (!base.slice_del()) @@ -1042,34 +1049,64 @@ FrenchStemmer = function() { } break; case 3: - if (!base.slice_del()) - { - return false; - } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { - base.ket = base.cursor; if (!(base.eq_s_b("e"))) { - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; break lab0; } - base.bra = base.cursor; - if (!base.slice_del()) + if (!r_RV()) { + base.cursor = base.limit - v_2; + break lab0; + } + base.bra = base.cursor; + } + if (!base.slice_del()) + { + return false; + } + break; + case 4: + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab1: { + among_var = base.find_among_b(a_6); + if (among_var == 0) + { + break lab1; + } + switch (among_var) { + case 1: + if (base.cursor <= base.limit_backward) + { + break lab1; + } + base.cursor--; + if (base.cursor > base.limit_backward) + { + break lab1; + } + break; + } return false; } + base.cursor = base.limit - v_3; + } + if (!base.slice_del()) + { + return false; } break; } - base.limit_backward = v_2; return true; }; /** @return {boolean} */ function r_residual_suffix() { var /** number */ among_var; - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.eq_s_b("s"))) @@ -1078,9 +1115,9 @@ FrenchStemmer = function() { break lab0; } base.bra = base.cursor; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { if (!(base.eq_s_b("Hi"))) { @@ -1105,13 +1142,13 @@ FrenchStemmer = function() { { return false; } - var /** number */ v_5 = base.limit_backward; + /** @const */ var /** number */ v_4 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; - among_var = base.find_among_b(a_7); + among_var = base.find_among_b(a_8); if (among_var == 0) { - base.limit_backward = v_5; + base.limit_backward = v_4; return false; } base.bra = base.cursor; @@ -1119,11 +1156,11 @@ FrenchStemmer = function() { case 1: if (!r_R2()) { - base.limit_backward = v_5; + base.limit_backward = v_4; return false; } lab3: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab4: { if (!(base.eq_s_b("s"))) { @@ -1131,10 +1168,10 @@ FrenchStemmer = function() { } break lab3; } - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_5; if (!(base.eq_s_b("t"))) { - base.limit_backward = v_5; + base.limit_backward = v_4; return false; } } @@ -1156,14 +1193,14 @@ FrenchStemmer = function() { } break; } - base.limit_backward = v_5; + base.limit_backward = v_4; return true; }; /** @return {boolean} */ function r_un_double() { - var /** number */ v_1 = base.limit - base.cursor; - if (base.find_among_b(a_8) == 0) + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (base.find_among_b(a_9) == 0) { return false; } @@ -1205,7 +1242,7 @@ FrenchStemmer = function() { } base.ket = base.cursor; lab1: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab2: { if (!(base.eq_s_b("\u00E9"))) { @@ -1213,7 +1250,7 @@ FrenchStemmer = function() { } break lab1; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; if (!(base.eq_s_b("\u00E8"))) { return false; @@ -1228,19 +1265,22 @@ FrenchStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; - r_prelude(); + /** @const */ var /** number */ v_1 = base.cursor; + r_elisions(); base.cursor = v_1; + /** @const */ var /** number */ v_2 = base.cursor; + r_prelude(); + base.cursor = v_2; r_mark_regions(); base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab0: { lab1: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab2: { - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab3: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab4: { if (!r_standard_suffix()) { @@ -1263,11 +1303,11 @@ FrenchStemmer = function() { } } base.cursor = base.limit - v_5; - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab6: { base.ket = base.cursor; lab7: { - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; lab8: { if (!(base.eq_s_b("Y"))) { @@ -1303,14 +1343,14 @@ FrenchStemmer = function() { } } base.cursor = base.limit - v_3; - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; r_un_double(); base.cursor = base.limit - v_9; - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; r_un_accent(); base.cursor = base.limit - v_10; base.cursor = base.limit_backward; - var /** number */ v_11 = base.cursor; + /** @const */ var /** number */ v_11 = base.cursor; r_postlude(); base.cursor = v_11; return true; diff --git a/sphinx/search/non-minified-js/german-stemmer.js b/sphinx/search/non-minified-js/german-stemmer.js index f5ff81bc9d7..007a8668575 100644 --- a/sphinx/search/non-minified-js/german-stemmer.js +++ b/sphinx/search/non-minified-js/german-stemmer.js @@ -1,9 +1,19 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from german.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -GermanStemmer = function() { +var GermanStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ + ["", -1, 5], + ["ae", 0, 2], + ["oe", 0, 3], + ["qu", 0, -1], + ["ue", 0, 4], + ["\u00DF", 0, 1] + ]; + + /** @const */ var a_1 = [ ["", -1, 5], ["U", 0, 2], ["Y", 0, 1], @@ -12,29 +22,42 @@ GermanStemmer = function() { ["\u00FC", 0, 2] ]; - /** @const */ var a_1 = [ - ["e", -1, 2], + /** @const */ var a_2 = [ + ["e", -1, 3], ["em", -1, 1], - ["en", -1, 2], - ["ern", -1, 1], - ["er", -1, 1], - ["s", -1, 3], - ["es", 5, 2] + ["en", -1, 3], + ["erinnen", 2, 2], + ["erin", -1, 2], + ["ln", -1, 5], + ["ern", -1, 2], + ["er", -1, 2], + ["s", -1, 4], + ["es", 8, 3], + ["lns", 8, 5] ]; - /** @const */ var a_2 = [ + /** @const */ var a_3 = [ + ["tick", -1, -1], + ["plan", -1, -1], + ["geordn", -1, -1], + ["intern", -1, -1], + ["tr", -1, -1] + ]; + + /** @const */ var a_4 = [ ["en", -1, 1], ["er", -1, 1], + ["et", -1, 3], ["st", -1, 2], - ["est", 2, 1] + ["est", 3, 1] ]; - /** @const */ var a_3 = [ + /** @const */ var a_5 = [ ["ig", -1, 1], ["lich", -1, 1] ]; - /** @const */ var a_4 = [ + /** @const */ var a_6 = [ ["end", -1, 1], ["ig", -1, 2], ["ung", -1, 1], @@ -47,6 +70,8 @@ GermanStemmer = function() { /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 32, 8]; + /** @const */ var /** Array */ g_et_ending = [1, 128, 198, 227, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128]; + /** @const */ var /** Array */ g_s_ending = [117, 30, 5]; /** @const */ var /** Array */ g_st_ending = [117, 30, 4]; @@ -58,98 +83,113 @@ GermanStemmer = function() { /** @return {boolean} */ function r_prelude() { - var /** number */ v_1 = base.cursor; + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; while(true) { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab0: { - lab1: { - var /** number */ v_3 = base.cursor; - lab2: { - base.bra = base.cursor; - if (!(base.eq_s("\u00DF"))) - { - break lab2; - } - base.ket = base.cursor; - if (!base.slice_from("ss")) - { - return false; - } - break lab1; - } - base.cursor = v_3; - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; - } - continue; - } - base.cursor = v_2; - break; - } - base.cursor = v_1; - while(true) - { - var /** number */ v_4 = base.cursor; - lab3: { - golab4: while(true) + golab1: while(true) { - var /** number */ v_5 = base.cursor; - lab5: { + /** @const */ var /** number */ v_3 = base.cursor; + lab2: { if (!(base.in_grouping(g_v, 97, 252))) { - break lab5; + break lab2; } base.bra = base.cursor; - lab6: { - var /** number */ v_6 = base.cursor; - lab7: { + lab3: { + /** @const */ var /** number */ v_4 = base.cursor; + lab4: { if (!(base.eq_s("u"))) { - break lab7; + break lab4; } base.ket = base.cursor; if (!(base.in_grouping(g_v, 97, 252))) { - break lab7; + break lab4; } if (!base.slice_from("U")) { return false; } - break lab6; + break lab3; } - base.cursor = v_6; + base.cursor = v_4; if (!(base.eq_s("y"))) { - break lab5; + break lab2; } base.ket = base.cursor; if (!(base.in_grouping(g_v, 97, 252))) { - break lab5; + break lab2; } if (!base.slice_from("Y")) { return false; } } - base.cursor = v_5; - break golab4; + base.cursor = v_3; + break golab1; } - base.cursor = v_5; + base.cursor = v_3; if (base.cursor >= base.limit) { - break lab3; + break lab0; } base.cursor++; } continue; } - base.cursor = v_4; + base.cursor = v_2; + break; + } + base.cursor = v_1; + while(true) + { + /** @const */ var /** number */ v_5 = base.cursor; + lab5: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("ss")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u00E4")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u00F6")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u00FC")) + { + return false; + } + break; + case 5: + if (base.cursor >= base.limit) + { + break lab5; + } + base.cursor++; + break; + } + continue; + } + base.cursor = v_5; break; } return true; @@ -159,9 +199,9 @@ GermanStemmer = function() { function r_mark_regions() { I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; { - var /** number */ c1 = base.cursor + 3; + /** @const */ var /** number */ c1 = base.cursor + 3; if (c1 > base.limit) { return false; @@ -170,74 +210,34 @@ GermanStemmer = function() { } I_x = base.cursor; base.cursor = v_1; - golab0: while(true) + if (!base.go_out_grouping(g_v, 97, 252)) { - lab1: { - if (!(base.in_grouping(g_v, 97, 252))) - { - break lab1; - } - break golab0; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } - golab2: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 252)) { - lab3: { - if (!(base.out_grouping(g_v, 97, 252))) - { - break lab3; - } - break golab2; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } + base.cursor++; I_p1 = base.cursor; - lab4: { - if (!(I_p1 < I_x)) + lab0: { + if (I_p1 >= I_x) { - break lab4; + break lab0; } I_p1 = I_x; } - golab5: while(true) + if (!base.go_out_grouping(g_v, 97, 252)) { - lab6: { - if (!(base.in_grouping(g_v, 97, 252))) - { - break lab6; - } - break golab5; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } - golab7: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 252)) { - lab8: { - if (!(base.out_grouping(g_v, 97, 252))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } + base.cursor++; I_p2 = base.cursor; return true; }; @@ -247,14 +247,10 @@ GermanStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; - among_var = base.find_among(a_0); - if (among_var == 0) - { - break lab0; - } + among_var = base.find_among(a_1); base.ket = base.cursor; switch (among_var) { case 1: @@ -299,29 +295,21 @@ GermanStemmer = function() { /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ function r_standard_suffix() { var /** number */ among_var; - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; - among_var = base.find_among_b(a_1); + among_var = base.find_among_b(a_2); if (among_var == 0) { break lab0; @@ -333,6 +321,17 @@ GermanStemmer = function() { } switch (among_var) { case 1: + { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("syst"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_2; + } if (!base.slice_del()) { return false; @@ -343,19 +342,25 @@ GermanStemmer = function() { { return false; } - var /** number */ v_2 = base.limit - base.cursor; - lab1: { + break; + case 3: + if (!base.slice_del()) + { + return false; + } + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { base.ket = base.cursor; if (!(base.eq_s_b("s"))) { - base.cursor = base.limit - v_2; - break lab1; + base.cursor = base.limit - v_3; + break lab2; } base.bra = base.cursor; if (!(base.eq_s_b("nis"))) { - base.cursor = base.limit - v_2; - break lab1; + base.cursor = base.limit - v_3; + break lab2; } if (!base.slice_del()) { @@ -363,7 +368,7 @@ GermanStemmer = function() { } } break; - case 3: + case 4: if (!(base.in_grouping_b(g_s_ending, 98, 116))) { break lab0; @@ -373,21 +378,27 @@ GermanStemmer = function() { return false; } break; + case 5: + if (!base.slice_from("l")) + { + return false; + } + break; } } base.cursor = base.limit - v_1; - var /** number */ v_3 = base.limit - base.cursor; - lab2: { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab3: { base.ket = base.cursor; - among_var = base.find_among_b(a_2); + among_var = base.find_among_b(a_4); if (among_var == 0) { - break lab2; + break lab3; } base.bra = base.cursor; if (!r_R1()) { - break lab2; + break lab3; } switch (among_var) { case 1: @@ -399,13 +410,13 @@ GermanStemmer = function() { case 2: if (!(base.in_grouping_b(g_st_ending, 98, 116))) { - break lab2; + break lab3; } { - var /** number */ c1 = base.cursor - 3; + /** @const */ var /** number */ c1 = base.cursor - 3; if (c1 < base.limit_backward) { - break lab2; + break lab3; } base.cursor = c1; } @@ -414,21 +425,44 @@ GermanStemmer = function() { return false; } break; + case 3: + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + if (!(base.in_grouping_b(g_et_ending, 85, 228))) + { + break lab3; + } + base.cursor = base.limit - v_5; + { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab4: { + if (base.find_among_b(a_3) == 0) + { + break lab4; + } + break lab3; + } + base.cursor = base.limit - v_6; + } + if (!base.slice_del()) + { + return false; + } + break; } } - base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; - lab3: { + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab5: { base.ket = base.cursor; - among_var = base.find_among_b(a_4); + among_var = base.find_among_b(a_6); if (among_var == 0) { - break lab3; + break lab5; } base.bra = base.cursor; if (!r_R2()) { - break lab3; + break lab5; } switch (among_var) { case 1: @@ -436,31 +470,31 @@ GermanStemmer = function() { { return false; } - var /** number */ v_5 = base.limit - base.cursor; - lab4: { + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab6: { base.ket = base.cursor; if (!(base.eq_s_b("ig"))) { - base.cursor = base.limit - v_5; - break lab4; + base.cursor = base.limit - v_8; + break lab6; } base.bra = base.cursor; { - var /** number */ v_6 = base.limit - base.cursor; - lab5: { + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + lab7: { if (!(base.eq_s_b("e"))) { - break lab5; + break lab7; } - base.cursor = base.limit - v_5; - break lab4; + base.cursor = base.limit - v_8; + break lab6; } - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_9; } if (!r_R2()) { - base.cursor = base.limit - v_5; - break lab4; + base.cursor = base.limit - v_8; + break lab6; } if (!base.slice_del()) { @@ -470,15 +504,15 @@ GermanStemmer = function() { break; case 2: { - var /** number */ v_7 = base.limit - base.cursor; - lab6: { + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + lab8: { if (!(base.eq_s_b("e"))) { - break lab6; + break lab8; } - break lab3; + break lab5; } - base.cursor = base.limit - v_7; + base.cursor = base.limit - v_10; } if (!base.slice_del()) { @@ -490,30 +524,30 @@ GermanStemmer = function() { { return false; } - var /** number */ v_8 = base.limit - base.cursor; - lab7: { + /** @const */ var /** number */ v_11 = base.limit - base.cursor; + lab9: { base.ket = base.cursor; - lab8: { - var /** number */ v_9 = base.limit - base.cursor; - lab9: { + lab10: { + /** @const */ var /** number */ v_12 = base.limit - base.cursor; + lab11: { if (!(base.eq_s_b("er"))) { - break lab9; + break lab11; } - break lab8; + break lab10; } - base.cursor = base.limit - v_9; + base.cursor = base.limit - v_12; if (!(base.eq_s_b("en"))) { - base.cursor = base.limit - v_8; - break lab7; + base.cursor = base.limit - v_11; + break lab9; } } base.bra = base.cursor; if (!r_R1()) { - base.cursor = base.limit - v_8; - break lab7; + base.cursor = base.limit - v_11; + break lab9; } if (!base.slice_del()) { @@ -526,19 +560,19 @@ GermanStemmer = function() { { return false; } - var /** number */ v_10 = base.limit - base.cursor; - lab10: { + /** @const */ var /** number */ v_13 = base.limit - base.cursor; + lab12: { base.ket = base.cursor; - if (base.find_among_b(a_3) == 0) + if (base.find_among_b(a_5) == 0) { - base.cursor = base.limit - v_10; - break lab10; + base.cursor = base.limit - v_13; + break lab12; } base.bra = base.cursor; if (!r_R2()) { - base.cursor = base.limit - v_10; - break lab10; + base.cursor = base.limit - v_13; + break lab12; } if (!base.slice_del()) { @@ -548,23 +582,23 @@ GermanStemmer = function() { break; } } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_7; return true; }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_prelude(); base.cursor = v_1; - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; r_mark_regions(); base.cursor = v_2; base.limit_backward = base.cursor; base.cursor = base.limit; r_standard_suffix(); base.cursor = base.limit_backward; - var /** number */ v_4 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; r_postlude(); - base.cursor = v_4; + base.cursor = v_3; return true; }; diff --git a/sphinx/search/non-minified-js/greek-stemmer.js b/sphinx/search/non-minified-js/greek-stemmer.js new file mode 100644 index 00000000000..06ad1692fb1 --- /dev/null +++ b/sphinx/search/non-minified-js/greek-stemmer.js @@ -0,0 +1,2873 @@ +// Generated from greek.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var GreekStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["", -1, 25], + ["\u0386", 0, 1], + ["\u0388", 0, 5], + ["\u0389", 0, 7], + ["\u038A", 0, 9], + ["\u038C", 0, 15], + ["\u038E", 0, 20], + ["\u038F", 0, 24], + ["\u0390", 0, 7], + ["\u0391", 0, 1], + ["\u0392", 0, 2], + ["\u0393", 0, 3], + ["\u0394", 0, 4], + ["\u0395", 0, 5], + ["\u0396", 0, 6], + ["\u0397", 0, 7], + ["\u0398", 0, 8], + ["\u0399", 0, 9], + ["\u039A", 0, 10], + ["\u039B", 0, 11], + ["\u039C", 0, 12], + ["\u039D", 0, 13], + ["\u039E", 0, 14], + ["\u039F", 0, 15], + ["\u03A0", 0, 16], + ["\u03A1", 0, 17], + ["\u03A3", 0, 18], + ["\u03A4", 0, 19], + ["\u03A5", 0, 20], + ["\u03A6", 0, 21], + ["\u03A7", 0, 22], + ["\u03A8", 0, 23], + ["\u03A9", 0, 24], + ["\u03AA", 0, 9], + ["\u03AB", 0, 20], + ["\u03AC", 0, 1], + ["\u03AD", 0, 5], + ["\u03AE", 0, 7], + ["\u03AF", 0, 9], + ["\u03B0", 0, 20], + ["\u03C2", 0, 18], + ["\u03CA", 0, 7], + ["\u03CB", 0, 20], + ["\u03CC", 0, 15], + ["\u03CD", 0, 20], + ["\u03CE", 0, 24] + ]; + + /** @const */ var a_1 = [ + ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03B1", -1, 2], + ["\u03C6\u03B1\u03B3\u03B9\u03B1", -1, 1], + ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03B1", -1, 3], + ["\u03C3\u03BF\u03B3\u03B9\u03B1", -1, 4], + ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03B1", -1, 5], + ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03B1", -1, 6], + ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03B1", -1, 7], + ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03B1", -1, 8], + ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03B1", -1, 11], + ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03B1", -1, 10], + ["\u03C6\u03C9\u03C4\u03B1", -1, 9], + ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03B7", -1, 7], + ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03C9\u03BD", -1, 2], + ["\u03C6\u03B1\u03B3\u03B9\u03C9\u03BD", -1, 1], + ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 3], + ["\u03C3\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 4], + ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 5], + ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03C9\u03BD", -1, 6], + ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03C9\u03BD", -1, 7], + ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03C9\u03BD", -1, 8], + ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03C9\u03BD", -1, 11], + ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03C9\u03BD", -1, 10], + ["\u03C6\u03C9\u03C4\u03C9\u03BD", -1, 9], + ["\u03BA\u03C1\u03B5\u03B1\u03C3", -1, 6], + ["\u03C0\u03B5\u03C1\u03B1\u03C3", -1, 7], + ["\u03C4\u03B5\u03C1\u03B1\u03C3", -1, 8], + ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C3", -1, 11], + ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03BF\u03C3", -1, 6], + ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03BF\u03C3", -1, 7], + ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03BF\u03C3", -1, 8], + ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03BF\u03C3", -1, 11], + ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03BF\u03C3", -1, 10], + ["\u03C6\u03C9\u03C4\u03BF\u03C3", -1, 9], + ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C3", -1, 10], + ["\u03C6\u03C9\u03C3", -1, 9], + ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03BF\u03C5", -1, 2], + ["\u03C6\u03B1\u03B3\u03B9\u03BF\u03C5", -1, 1], + ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 3], + ["\u03C3\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 4], + ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 5] + ]; + + /** @const */ var a_2 = [ + ["\u03C0\u03B1", -1, 1], + ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", 0, 1], + ["\u03B5\u03C0\u03B1", 0, 1], + ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", 0, 1], + ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", 0, 1], + ["\u03B5\u03BC\u03C0\u03B1", 0, 1], + ["\u03B2", -1, 2], + ["\u03B4\u03B1\u03BD\u03B5", -1, 1], + ["\u03B2\u03B1\u03B8\u03C5\u03C1\u03B9", -1, 2], + ["\u03B2\u03B1\u03C1\u03BA", -1, 2], + ["\u03BC\u03B1\u03C1\u03BA", -1, 2], + ["\u03BB", -1, 2], + ["\u03BC", -1, 2], + ["\u03BA\u03BF\u03C1\u03BD", -1, 2], + ["\u03B1\u03B8\u03C1\u03BF", -1, 1], + ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 14, 1], + ["\u03C0", -1, 2], + ["\u03B9\u03BC\u03C0", 16, 2], + ["\u03C1", -1, 2], + ["\u03BC\u03B1\u03C1", 18, 2], + ["\u03B1\u03BC\u03C0\u03B1\u03C1", 18, 2], + ["\u03B3\u03BA\u03C1", 18, 2], + ["\u03B2\u03BF\u03BB\u03B2\u03BF\u03C1", 18, 2], + ["\u03B3\u03BB\u03C5\u03BA\u03BF\u03C1", 18, 2], + ["\u03C0\u03B9\u03C0\u03B5\u03C1\u03BF\u03C1", 18, 2], + ["\u03C0\u03C1", 18, 2], + ["\u03BC\u03C0\u03C1", 25, 2], + ["\u03B1\u03C1\u03C1", 18, 2], + ["\u03B3\u03BB\u03C5\u03BA\u03C5\u03C1", 18, 2], + ["\u03C0\u03BF\u03BB\u03C5\u03C1", 18, 2], + ["\u03BB\u03BF\u03C5", -1, 2] + ]; + + /** @const */ var a_3 = [ + ["\u03B9\u03B6\u03B1", -1, 1], + ["\u03B9\u03B6\u03B5", -1, 1], + ["\u03B9\u03B6\u03B1\u03BC\u03B5", -1, 1], + ["\u03B9\u03B6\u03BF\u03C5\u03BC\u03B5", -1, 1], + ["\u03B9\u03B6\u03B1\u03BD\u03B5", -1, 1], + ["\u03B9\u03B6\u03BF\u03C5\u03BD\u03B5", -1, 1], + ["\u03B9\u03B6\u03B1\u03C4\u03B5", -1, 1], + ["\u03B9\u03B6\u03B5\u03C4\u03B5", -1, 1], + ["\u03B9\u03B6\u03B5\u03B9", -1, 1], + ["\u03B9\u03B6\u03B1\u03BD", -1, 1], + ["\u03B9\u03B6\u03BF\u03C5\u03BD", -1, 1], + ["\u03B9\u03B6\u03B5\u03C3", -1, 1], + ["\u03B9\u03B6\u03B5\u03B9\u03C3", -1, 1], + ["\u03B9\u03B6\u03C9", -1, 1] + ]; + + /** @const */ var a_4 = [ + ["\u03B2\u03B9", -1, 1], + ["\u03BB\u03B9", -1, 1], + ["\u03B1\u03BB", -1, 1], + ["\u03B5\u03BD", -1, 1], + ["\u03C3", -1, 1], + ["\u03C7", -1, 1], + ["\u03C5\u03C8", -1, 1], + ["\u03B6\u03C9", -1, 1] + ]; + + /** @const */ var a_5 = [ + ["\u03C9\u03B8\u03B7\u03BA\u03B1", -1, 1], + ["\u03C9\u03B8\u03B7\u03BA\u03B5", -1, 1], + ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BC\u03B5", -1, 1], + ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BD\u03B5", -1, 1], + ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03C4\u03B5", -1, 1], + ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BD", -1, 1], + ["\u03C9\u03B8\u03B7\u03BA\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_6 = [ + ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", -1, 1], + ["\u03B5\u03C0\u03B1", -1, 1], + ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", -1, 1], + ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", -1, 1], + ["\u03B5\u03BC\u03C0\u03B1", -1, 1], + ["\u03C7\u03B1\u03C1\u03C4\u03BF\u03C0\u03B1", -1, 1], + ["\u03B5\u03BE\u03B1\u03C1\u03C7\u03B1", -1, 1], + ["\u03B3\u03B5", -1, 2], + ["\u03B3\u03BA\u03B5", -1, 2], + ["\u03BA\u03BB\u03B5", -1, 1], + ["\u03B5\u03BA\u03BB\u03B5", 9, 1], + ["\u03B1\u03C0\u03B5\u03BA\u03BB\u03B5", 10, 1], + ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", 9, 1], + ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 9, 1], + ["\u03B4\u03B1\u03BD\u03B5", -1, 1], + ["\u03C0\u03B5", -1, 1], + ["\u03B5\u03C0\u03B5", 15, 1], + ["\u03BC\u03B5\u03C4\u03B5\u03C0\u03B5", 16, 1], + ["\u03B5\u03C3\u03B5", -1, 1], + ["\u03B3\u03BA", -1, 2], + ["\u03BC", -1, 2], + ["\u03C0\u03BF\u03C5\u03BA\u03B1\u03BC", 20, 2], + ["\u03BA\u03BF\u03BC", 20, 2], + ["\u03B1\u03BD", -1, 2], + ["\u03BF\u03BB\u03BF", -1, 2], + ["\u03B1\u03B8\u03C1\u03BF", -1, 1], + ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 25, 1], + ["\u03C0", -1, 2], + ["\u03BB\u03B1\u03C1", -1, 2], + ["\u03B4\u03B7\u03BC\u03BF\u03BA\u03C1\u03B1\u03C4", -1, 2], + ["\u03B1\u03C6", -1, 2], + ["\u03B3\u03B9\u03B3\u03B1\u03BD\u03C4\u03BF\u03B1\u03C6", 30, 2] + ]; + + /** @const */ var a_7 = [ + ["\u03B9\u03C3\u03B1", -1, 1], + ["\u03B9\u03C3\u03B1\u03BC\u03B5", -1, 1], + ["\u03B9\u03C3\u03B1\u03BD\u03B5", -1, 1], + ["\u03B9\u03C3\u03B5", -1, 1], + ["\u03B9\u03C3\u03B1\u03C4\u03B5", -1, 1], + ["\u03B9\u03C3\u03B1\u03BD", -1, 1], + ["\u03B9\u03C3\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_8 = [ + ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", -1, 1], + ["\u03B5\u03C0\u03B1", -1, 1], + ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", -1, 1], + ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", -1, 1], + ["\u03B5\u03BC\u03C0\u03B1", -1, 1], + ["\u03C7\u03B1\u03C1\u03C4\u03BF\u03C0\u03B1", -1, 1], + ["\u03B5\u03BE\u03B1\u03C1\u03C7\u03B1", -1, 1], + ["\u03BA\u03BB\u03B5", -1, 1], + ["\u03B5\u03BA\u03BB\u03B5", 7, 1], + ["\u03B1\u03C0\u03B5\u03BA\u03BB\u03B5", 8, 1], + ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", 7, 1], + ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 7, 1], + ["\u03B4\u03B1\u03BD\u03B5", -1, 1], + ["\u03C0\u03B5", -1, 1], + ["\u03B5\u03C0\u03B5", 13, 1], + ["\u03BC\u03B5\u03C4\u03B5\u03C0\u03B5", 14, 1], + ["\u03B5\u03C3\u03B5", -1, 1], + ["\u03B1\u03B8\u03C1\u03BF", -1, 1], + ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 17, 1] + ]; + + /** @const */ var a_9 = [ + ["\u03B9\u03C3\u03BF\u03C5\u03BC\u03B5", -1, 1], + ["\u03B9\u03C3\u03BF\u03C5\u03BD\u03B5", -1, 1], + ["\u03B9\u03C3\u03B5\u03C4\u03B5", -1, 1], + ["\u03B9\u03C3\u03B5\u03B9", -1, 1], + ["\u03B9\u03C3\u03BF\u03C5\u03BD", -1, 1], + ["\u03B9\u03C3\u03B5\u03B9\u03C3", -1, 1], + ["\u03B9\u03C3\u03C9", -1, 1] + ]; + + /** @const */ var a_10 = [ + ["\u03B1\u03C4\u03B1", -1, 2], + ["\u03C6\u03B1", -1, 2], + ["\u03B7\u03C6\u03B1", 1, 2], + ["\u03BC\u03B5\u03B3", -1, 2], + ["\u03BB\u03C5\u03B3", -1, 2], + ["\u03B7\u03B4", -1, 2], + ["\u03BA\u03BB\u03B5", -1, 1], + ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 6, 1], + ["\u03C0\u03BB\u03B5", -1, 1], + ["\u03B4\u03B1\u03BD\u03B5", -1, 1], + ["\u03C3\u03B5", -1, 1], + ["\u03B1\u03C3\u03B5", 10, 1], + ["\u03BA\u03B1\u03B8", -1, 2], + ["\u03B5\u03C7\u03B8", -1, 2], + ["\u03BA\u03B1\u03BA", -1, 2], + ["\u03BC\u03B1\u03BA", -1, 2], + ["\u03C3\u03BA", -1, 2], + ["\u03C6\u03B9\u03BB", -1, 2], + ["\u03BA\u03C5\u03BB", -1, 2], + ["\u03BC", -1, 2], + ["\u03B3\u03B5\u03BC", 19, 2], + ["\u03B1\u03C7\u03BD", -1, 2], + ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", -1, 1], + ["\u03C0", -1, 2], + ["\u03B1\u03C0", 23, 2], + ["\u03B5\u03BC\u03C0", 23, 2], + ["\u03B5\u03C5\u03C0", 23, 2], + ["\u03B1\u03C1", -1, 2], + ["\u03B1\u03BF\u03C1", -1, 2], + ["\u03B3\u03C5\u03C1", -1, 2], + ["\u03C7\u03C1", -1, 2], + ["\u03C7\u03C9\u03C1", -1, 2], + ["\u03BA\u03C4", -1, 2], + ["\u03B1\u03BA\u03C4", 32, 2], + ["\u03C7\u03C4", -1, 2], + ["\u03B1\u03C7\u03C4", 34, 2], + ["\u03C4\u03B1\u03C7", -1, 2], + ["\u03C3\u03C7", -1, 2], + ["\u03B1\u03C3\u03C7", 37, 2], + ["\u03C5\u03C8", -1, 2] + ]; + + /** @const */ var a_11 = [ + ["\u03B9\u03C3\u03C4\u03B1", -1, 1], + ["\u03B9\u03C3\u03C4\u03B5", -1, 1], + ["\u03B9\u03C3\u03C4\u03B7", -1, 1], + ["\u03B9\u03C3\u03C4\u03BF\u03B9", -1, 1], + ["\u03B9\u03C3\u03C4\u03C9\u03BD", -1, 1], + ["\u03B9\u03C3\u03C4\u03BF", -1, 1], + ["\u03B9\u03C3\u03C4\u03B5\u03C3", -1, 1], + ["\u03B9\u03C3\u03C4\u03B7\u03C3", -1, 1], + ["\u03B9\u03C3\u03C4\u03BF\u03C3", -1, 1], + ["\u03B9\u03C3\u03C4\u03BF\u03C5\u03C3", -1, 1], + ["\u03B9\u03C3\u03C4\u03BF\u03C5", -1, 1] + ]; + + /** @const */ var a_12 = [ + ["\u03B5\u03B3\u03BA\u03BB\u03B5", -1, 1], + ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", -1, 1], + ["\u03B4\u03B1\u03BD\u03B5", -1, 2], + ["\u03B1\u03BD\u03C4\u03B9\u03B4\u03B1\u03BD\u03B5", 2, 2], + ["\u03C3\u03B5", -1, 1], + ["\u03BC\u03B5\u03C4\u03B1\u03C3\u03B5", 4, 1], + ["\u03BC\u03B9\u03BA\u03C1\u03BF\u03C3\u03B5", 4, 1] + ]; + + /** @const */ var a_13 = [ + ["\u03B1\u03C4\u03BF\u03BC\u03B9\u03BA", -1, 2], + ["\u03B5\u03B8\u03BD\u03B9\u03BA", -1, 4], + ["\u03C4\u03BF\u03C0\u03B9\u03BA", -1, 7], + ["\u03B5\u03BA\u03BB\u03B5\u03BA\u03C4\u03B9\u03BA", -1, 5], + ["\u03C3\u03BA\u03B5\u03C0\u03C4\u03B9\u03BA", -1, 6], + ["\u03B3\u03BD\u03C9\u03C3\u03C4\u03B9\u03BA", -1, 3], + ["\u03B1\u03B3\u03BD\u03C9\u03C3\u03C4\u03B9\u03BA", 5, 1], + ["\u03B1\u03BB\u03B5\u03BE\u03B1\u03BD\u03B4\u03C1\u03B9\u03BD", -1, 8], + ["\u03B8\u03B5\u03B1\u03C4\u03C1\u03B9\u03BD", -1, 10], + ["\u03B2\u03C5\u03B6\u03B1\u03BD\u03C4\u03B9\u03BD", -1, 9] + ]; + + /** @const */ var a_14 = [ + ["\u03B9\u03C3\u03BC\u03BF\u03B9", -1, 1], + ["\u03B9\u03C3\u03BC\u03C9\u03BD", -1, 1], + ["\u03B9\u03C3\u03BC\u03BF", -1, 1], + ["\u03B9\u03C3\u03BC\u03BF\u03C3", -1, 1], + ["\u03B9\u03C3\u03BC\u03BF\u03C5\u03C3", -1, 1], + ["\u03B9\u03C3\u03BC\u03BF\u03C5", -1, 1] + ]; + + /** @const */ var a_15 = [ + ["\u03C3", -1, 1], + ["\u03C7", -1, 1] + ]; + + /** @const */ var a_16 = [ + ["\u03BF\u03C5\u03B4\u03B1\u03BA\u03B9\u03B1", -1, 1], + ["\u03B1\u03C1\u03B1\u03BA\u03B9\u03B1", -1, 1], + ["\u03BF\u03C5\u03B4\u03B1\u03BA\u03B9", -1, 1], + ["\u03B1\u03C1\u03B1\u03BA\u03B9", -1, 1] + ]; + + /** @const */ var a_17 = [ + ["\u03B2", -1, 2], + ["\u03B2\u03B1\u03BC\u03B2", 0, 1], + ["\u03C3\u03BB\u03BF\u03B2", 0, 1], + ["\u03C4\u03C3\u03B5\u03C7\u03BF\u03C3\u03BB\u03BF\u03B2", 2, 1], + ["\u03BA\u03B1\u03C1\u03B4", -1, 2], + ["\u03B6", -1, 2], + ["\u03C4\u03B6", 5, 1], + ["\u03BA", -1, 1], + ["\u03BA\u03B1\u03C0\u03B1\u03BA", 7, 1], + ["\u03C3\u03BF\u03BA", 7, 1], + ["\u03C3\u03BA", 7, 1], + ["\u03B2\u03B1\u03BB", -1, 2], + ["\u03BC\u03B1\u03BB", -1, 1], + ["\u03B3\u03BB", -1, 2], + ["\u03C4\u03C1\u03B9\u03C0\u03BF\u03BB", -1, 2], + ["\u03C0\u03BB", -1, 1], + ["\u03BB\u03BF\u03C5\u03BB", -1, 1], + ["\u03C6\u03C5\u03BB", -1, 1], + ["\u03BA\u03B1\u03B9\u03BC", -1, 1], + ["\u03BA\u03BB\u03B9\u03BC", -1, 1], + ["\u03C6\u03B1\u03C1\u03BC", -1, 1], + ["\u03B3\u03B9\u03B1\u03BD", -1, 2], + ["\u03C3\u03C0\u03B1\u03BD", -1, 1], + ["\u03B7\u03B3\u03BF\u03C5\u03BC\u03B5\u03BD", -1, 2], + ["\u03BA\u03BF\u03BD", -1, 1], + ["\u03BC\u03B1\u03BA\u03C1\u03C5\u03BD", -1, 2], + ["\u03C0", -1, 2], + ["\u03BA\u03B1\u03C4\u03C1\u03B1\u03C0", 26, 1], + ["\u03C1", -1, 1], + ["\u03B2\u03C1", 28, 1], + ["\u03BB\u03B1\u03B2\u03C1", 29, 1], + ["\u03B1\u03BC\u03B2\u03C1", 29, 1], + ["\u03BC\u03B5\u03C1", 28, 1], + ["\u03C0\u03B1\u03C4\u03B5\u03C1", 28, 2], + ["\u03B1\u03BD\u03B8\u03C1", 28, 1], + ["\u03BA\u03BF\u03C1", 28, 1], + ["\u03C3", -1, 1], + ["\u03BD\u03B1\u03B3\u03BA\u03B1\u03C3", 36, 1], + ["\u03C4\u03BF\u03C3", 36, 2], + ["\u03BC\u03BF\u03C5\u03C3\u03C4", -1, 1], + ["\u03C1\u03C5", -1, 1], + ["\u03C6", -1, 1], + ["\u03C3\u03C6", 41, 1], + ["\u03B1\u03BB\u03B9\u03C3\u03C6", 42, 1], + ["\u03BD\u03C5\u03C6", 41, 2], + ["\u03C7", -1, 1] + ]; + + /** @const */ var a_18 = [ + ["\u03B1\u03BA\u03B9\u03B1", -1, 1], + ["\u03B1\u03C1\u03B1\u03BA\u03B9\u03B1", 0, 1], + ["\u03B9\u03C4\u03C3\u03B1", -1, 1], + ["\u03B1\u03BA\u03B9", -1, 1], + ["\u03B1\u03C1\u03B1\u03BA\u03B9", 3, 1], + ["\u03B9\u03C4\u03C3\u03C9\u03BD", -1, 1], + ["\u03B9\u03C4\u03C3\u03B1\u03C3", -1, 1], + ["\u03B9\u03C4\u03C3\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_19 = [ + ["\u03C8\u03B1\u03BB", -1, 1], + ["\u03B1\u03B9\u03C6\u03BD", -1, 1], + ["\u03BF\u03BB\u03BF", -1, 1], + ["\u03B9\u03C1", -1, 1] + ]; + + /** @const */ var a_20 = [ + ["\u03B5", -1, 1], + ["\u03C0\u03B1\u03B9\u03C7\u03BD", -1, 1] + ]; + + /** @const */ var a_21 = [ + ["\u03B9\u03B4\u03B9\u03B1", -1, 1], + ["\u03B9\u03B4\u03B9\u03C9\u03BD", -1, 1], + ["\u03B9\u03B4\u03B9\u03BF", -1, 1] + ]; + + /** @const */ var a_22 = [ + ["\u03B9\u03B2", -1, 1], + ["\u03B4", -1, 1], + ["\u03C6\u03C1\u03B1\u03B3\u03BA", -1, 1], + ["\u03BB\u03C5\u03BA", -1, 1], + ["\u03BF\u03B2\u03B5\u03BB", -1, 1], + ["\u03BC\u03B7\u03BD", -1, 1], + ["\u03C1", -1, 1] + ]; + + /** @const */ var a_23 = [ + ["\u03B9\u03C3\u03BA\u03B5", -1, 1], + ["\u03B9\u03C3\u03BA\u03BF", -1, 1], + ["\u03B9\u03C3\u03BA\u03BF\u03C3", -1, 1], + ["\u03B9\u03C3\u03BA\u03BF\u03C5", -1, 1] + ]; + + /** @const */ var a_24 = [ + ["\u03B1\u03B4\u03C9\u03BD", -1, 1], + ["\u03B1\u03B4\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_25 = [ + ["\u03B3\u03B9\u03B1\u03B3\u03B9", -1, -1], + ["\u03B8\u03B5\u03B9", -1, -1], + ["\u03BF\u03BA", -1, -1], + ["\u03BC\u03B1\u03BC", -1, -1], + ["\u03BC\u03B1\u03BD", -1, -1], + ["\u03BC\u03C0\u03B1\u03BC\u03C0", -1, -1], + ["\u03C0\u03B5\u03B8\u03B5\u03C1", -1, -1], + ["\u03C0\u03B1\u03C4\u03B5\u03C1", -1, -1], + ["\u03BA\u03C5\u03C1", -1, -1], + ["\u03BD\u03C4\u03B1\u03BD\u03C4", -1, -1] + ]; + + /** @const */ var a_26 = [ + ["\u03B5\u03B4\u03C9\u03BD", -1, 1], + ["\u03B5\u03B4\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_27 = [ + ["\u03BC\u03B9\u03BB", -1, 1], + ["\u03B4\u03B1\u03C0", -1, 1], + ["\u03B3\u03B7\u03C0", -1, 1], + ["\u03B9\u03C0", -1, 1], + ["\u03B5\u03BC\u03C0", -1, 1], + ["\u03BF\u03C0", -1, 1], + ["\u03BA\u03C1\u03B1\u03C3\u03C0", -1, 1], + ["\u03C5\u03C0", -1, 1] + ]; + + /** @const */ var a_28 = [ + ["\u03BF\u03C5\u03B4\u03C9\u03BD", -1, 1], + ["\u03BF\u03C5\u03B4\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_29 = [ + ["\u03C4\u03C1\u03B1\u03B3", -1, 1], + ["\u03C6\u03B5", -1, 1], + ["\u03BA\u03B1\u03BB\u03B9\u03B1\u03BA", -1, 1], + ["\u03B1\u03C1\u03BA", -1, 1], + ["\u03C3\u03BA", -1, 1], + ["\u03C0\u03B5\u03C4\u03B1\u03BB", -1, 1], + ["\u03B2\u03B5\u03BB", -1, 1], + ["\u03BB\u03BF\u03C5\u03BB", -1, 1], + ["\u03C6\u03BB", -1, 1], + ["\u03C7\u03BD", -1, 1], + ["\u03C0\u03BB\u03B5\u03BE", -1, 1], + ["\u03C3\u03C0", -1, 1], + ["\u03C6\u03C1", -1, 1], + ["\u03C3", -1, 1], + ["\u03BB\u03B9\u03C7", -1, 1] + ]; + + /** @const */ var a_30 = [ + ["\u03B5\u03C9\u03BD", -1, 1], + ["\u03B5\u03C9\u03C3", -1, 1] + ]; + + /** @const */ var a_31 = [ + ["\u03B4", -1, 1], + ["\u03B9\u03B4", 0, 1], + ["\u03B8", -1, 1], + ["\u03B3\u03B1\u03BB", -1, 1], + ["\u03B5\u03BB", -1, 1], + ["\u03BD", -1, 1], + ["\u03C0", -1, 1], + ["\u03C0\u03B1\u03C1", -1, 1] + ]; + + /** @const */ var a_32 = [ + ["\u03B9\u03B1", -1, 1], + ["\u03B9\u03C9\u03BD", -1, 1], + ["\u03B9\u03BF\u03C5", -1, 1] + ]; + + /** @const */ var a_33 = [ + ["\u03B9\u03BA\u03B1", -1, 1], + ["\u03B9\u03BA\u03C9\u03BD", -1, 1], + ["\u03B9\u03BA\u03BF", -1, 1], + ["\u03B9\u03BA\u03BF\u03C5", -1, 1] + ]; + + /** @const */ var a_34 = [ + ["\u03B1\u03B4", -1, 1], + ["\u03C3\u03C5\u03BD\u03B1\u03B4", 0, 1], + ["\u03BA\u03B1\u03C4\u03B1\u03B4", 0, 1], + ["\u03B1\u03BD\u03C4\u03B9\u03B4", -1, 1], + ["\u03B5\u03BD\u03B4", -1, 1], + ["\u03C6\u03C5\u03BB\u03BF\u03B4", -1, 1], + ["\u03C5\u03C0\u03BF\u03B4", -1, 1], + ["\u03C0\u03C1\u03C9\u03C4\u03BF\u03B4", -1, 1], + ["\u03B5\u03BE\u03C9\u03B4", -1, 1], + ["\u03B7\u03B8", -1, 1], + ["\u03B1\u03BD\u03B7\u03B8", 9, 1], + ["\u03BE\u03B9\u03BA", -1, 1], + ["\u03B1\u03BB", -1, 1], + ["\u03B1\u03BC\u03BC\u03BF\u03C7\u03B1\u03BB", 12, 1], + ["\u03C3\u03C5\u03BD\u03BF\u03BC\u03B7\u03BB", -1, 1], + ["\u03BC\u03C0\u03BF\u03BB", -1, 1], + ["\u03BC\u03BF\u03C5\u03BB", -1, 1], + ["\u03C4\u03C3\u03B1\u03BC", -1, 1], + ["\u03B2\u03C1\u03C9\u03BC", -1, 1], + ["\u03B1\u03BC\u03B1\u03BD", -1, 1], + ["\u03BC\u03C0\u03B1\u03BD", -1, 1], + ["\u03BA\u03B1\u03BB\u03BB\u03B9\u03BD", -1, 1], + ["\u03C0\u03BF\u03C3\u03C4\u03B5\u03BB\u03BD", -1, 1], + ["\u03C6\u03B9\u03BB\u03BF\u03BD", -1, 1], + ["\u03BA\u03B1\u03BB\u03C0", -1, 1], + ["\u03B3\u03B5\u03C1", -1, 1], + ["\u03C7\u03B1\u03C3", -1, 1], + ["\u03BC\u03C0\u03BF\u03C3", -1, 1], + ["\u03C0\u03BB\u03B9\u03B1\u03C4\u03C3", -1, 1], + ["\u03C0\u03B5\u03C4\u03C3", -1, 1], + ["\u03C0\u03B9\u03C4\u03C3", -1, 1], + ["\u03C6\u03C5\u03C3", -1, 1], + ["\u03BC\u03C0\u03B1\u03B3\u03B9\u03B1\u03C4", -1, 1], + ["\u03BD\u03B9\u03C4", -1, 1], + ["\u03C0\u03B9\u03BA\u03B1\u03BD\u03C4", -1, 1], + ["\u03C3\u03B5\u03C1\u03C4", -1, 1] + ]; + + /** @const */ var a_35 = [ + ["\u03B1\u03B3\u03B1\u03BC\u03B5", -1, 1], + ["\u03B7\u03BA\u03B1\u03BC\u03B5", -1, 1], + ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BC\u03B5", 1, 1], + ["\u03B7\u03C3\u03B1\u03BC\u03B5", -1, 1], + ["\u03BF\u03C5\u03C3\u03B1\u03BC\u03B5", -1, 1] + ]; + + /** @const */ var a_36 = [ + ["\u03B2\u03BF\u03C5\u03B2", -1, 1], + ["\u03BE\u03B5\u03B8", -1, 1], + ["\u03C0\u03B5\u03B8", -1, 1], + ["\u03B1\u03C0\u03BF\u03B8", -1, 1], + ["\u03B1\u03C0\u03BF\u03BA", -1, 1], + ["\u03BF\u03C5\u03BB", -1, 1], + ["\u03B1\u03BD\u03B1\u03C0", -1, 1], + ["\u03C0\u03B9\u03BA\u03C1", -1, 1], + ["\u03C0\u03BF\u03C4", -1, 1], + ["\u03B1\u03C0\u03BF\u03C3\u03C4", -1, 1], + ["\u03C7", -1, 1], + ["\u03C3\u03B9\u03C7", 10, 1] + ]; + + /** @const */ var a_37 = [ + ["\u03C4\u03C1", -1, 1], + ["\u03C4\u03C3", -1, 1] + ]; + + /** @const */ var a_38 = [ + ["\u03B1\u03B3\u03B1\u03BD\u03B5", -1, 1], + ["\u03B7\u03BA\u03B1\u03BD\u03B5", -1, 1], + ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BD\u03B5", 1, 1], + ["\u03B7\u03C3\u03B1\u03BD\u03B5", -1, 1], + ["\u03BF\u03C5\u03C3\u03B1\u03BD\u03B5", -1, 1], + ["\u03BF\u03BD\u03C4\u03B1\u03BD\u03B5", -1, 1], + ["\u03B9\u03BF\u03BD\u03C4\u03B1\u03BD\u03B5", 5, 1], + ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD\u03B5", -1, 1], + ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD\u03B5", 7, 1], + ["\u03BF\u03C4\u03B1\u03BD\u03B5", -1, 1], + ["\u03B9\u03BF\u03C4\u03B1\u03BD\u03B5", 9, 1] + ]; + + /** @const */ var a_39 = [ + ["\u03C4\u03B1\u03B2", -1, 1], + ["\u03BD\u03C4\u03B1\u03B2", 0, 1], + ["\u03C8\u03B7\u03BB\u03BF\u03C4\u03B1\u03B2", 0, 1], + ["\u03BB\u03B9\u03B2", -1, 1], + ["\u03BA\u03BB\u03B9\u03B2", 3, 1], + ["\u03BE\u03B7\u03C1\u03BF\u03BA\u03BB\u03B9\u03B2", 4, 1], + ["\u03B3", -1, 1], + ["\u03B1\u03B3", 6, 1], + ["\u03C4\u03C1\u03B1\u03B3", 7, 1], + ["\u03C4\u03C3\u03B1\u03B3", 7, 1], + ["\u03B1\u03B8\u03B9\u03B3\u03B3", 6, 1], + ["\u03C4\u03C3\u03B9\u03B3\u03B3", 6, 1], + ["\u03B1\u03C4\u03C3\u03B9\u03B3\u03B3", 11, 1], + ["\u03C3\u03C4\u03B5\u03B3", 6, 1], + ["\u03B1\u03C0\u03B7\u03B3", 6, 1], + ["\u03C3\u03B9\u03B3", 6, 1], + ["\u03B1\u03BD\u03BF\u03C1\u03B3", 6, 1], + ["\u03B5\u03BD\u03BF\u03C1\u03B3", 6, 1], + ["\u03BA\u03B1\u03BB\u03C0\u03BF\u03C5\u03B6", -1, 1], + ["\u03B8", -1, 1], + ["\u03BC\u03C9\u03B1\u03BC\u03B5\u03B8", 19, 1], + ["\u03C0\u03B9\u03B8", 19, 1], + ["\u03B1\u03C0\u03B9\u03B8", 21, 1], + ["\u03B4\u03B5\u03BA", -1, 1], + ["\u03C0\u03B5\u03BB\u03B5\u03BA", -1, 1], + ["\u03B9\u03BA", -1, 1], + ["\u03B1\u03BD\u03B9\u03BA", 25, 1], + ["\u03B2\u03BF\u03C5\u03BB\u03BA", -1, 1], + ["\u03B2\u03B1\u03C3\u03BA", -1, 1], + ["\u03B2\u03C1\u03B1\u03C7\u03C5\u03BA", -1, 1], + ["\u03B3\u03B1\u03BB", -1, 1], + ["\u03BA\u03B1\u03C4\u03B1\u03B3\u03B1\u03BB", 30, 1], + ["\u03BF\u03BB\u03BF\u03B3\u03B1\u03BB", 30, 1], + ["\u03B2\u03B1\u03B8\u03C5\u03B3\u03B1\u03BB", 30, 1], + ["\u03BC\u03B5\u03BB", -1, 1], + ["\u03BA\u03B1\u03C3\u03C4\u03B5\u03BB", -1, 1], + ["\u03C0\u03BF\u03C1\u03C4\u03BF\u03BB", -1, 1], + ["\u03C0\u03BB", -1, 1], + ["\u03B4\u03B9\u03C0\u03BB", 37, 1], + ["\u03BB\u03B1\u03BF\u03C0\u03BB", 37, 1], + ["\u03C8\u03C5\u03C7\u03BF\u03C0\u03BB", 37, 1], + ["\u03BF\u03C5\u03BB", -1, 1], + ["\u03BC", -1, 1], + ["\u03BF\u03BB\u03B9\u03B3\u03BF\u03B4\u03B1\u03BC", 42, 1], + ["\u03BC\u03BF\u03C5\u03C3\u03BF\u03C5\u03BB\u03BC", 42, 1], + ["\u03B4\u03C1\u03B1\u03B4\u03BF\u03C5\u03BC", 42, 1], + ["\u03B2\u03C1\u03B1\u03C7\u03BC", 42, 1], + ["\u03BD", -1, 1], + ["\u03B1\u03BC\u03B5\u03C1\u03B9\u03BA\u03B1\u03BD", 47, 1], + ["\u03C0", -1, 1], + ["\u03B1\u03B4\u03B1\u03C0", 49, 1], + ["\u03C7\u03B1\u03BC\u03B7\u03BB\u03BF\u03B4\u03B1\u03C0", 49, 1], + ["\u03C0\u03BF\u03BB\u03C5\u03B4\u03B1\u03C0", 49, 1], + ["\u03BA\u03BF\u03C0", 49, 1], + ["\u03C5\u03C0\u03BF\u03BA\u03BF\u03C0", 53, 1], + ["\u03C4\u03C3\u03BF\u03C0", 49, 1], + ["\u03C3\u03C0", 49, 1], + ["\u03B5\u03C1", -1, 1], + ["\u03B3\u03B5\u03C1", 57, 1], + ["\u03B2\u03B5\u03C4\u03B5\u03C1", 57, 1], + ["\u03BB\u03BF\u03C5\u03B8\u03B7\u03C1", -1, 1], + ["\u03BA\u03BF\u03C1\u03BC\u03BF\u03C1", -1, 1], + ["\u03C0\u03B5\u03C1\u03B9\u03C4\u03C1", -1, 1], + ["\u03BF\u03C5\u03C1", -1, 1], + ["\u03C3", -1, 1], + ["\u03B2\u03B1\u03C3", 64, 1], + ["\u03C0\u03BF\u03BB\u03B9\u03C3", 64, 1], + ["\u03C3\u03B1\u03C1\u03B1\u03BA\u03B1\u03C4\u03C3", 64, 1], + ["\u03B8\u03C5\u03C3", 64, 1], + ["\u03B4\u03B9\u03B1\u03C4", -1, 1], + ["\u03C0\u03BB\u03B1\u03C4", -1, 1], + ["\u03C4\u03C3\u03B1\u03C1\u03BB\u03B1\u03C4", -1, 1], + ["\u03C4\u03B5\u03C4", -1, 1], + ["\u03C0\u03BF\u03C5\u03C1\u03B9\u03C4", -1, 1], + ["\u03C3\u03BF\u03C5\u03BB\u03C4", -1, 1], + ["\u03BC\u03B1\u03B9\u03BD\u03C4", -1, 1], + ["\u03B6\u03C9\u03BD\u03C4", -1, 1], + ["\u03BA\u03B1\u03C3\u03C4", -1, 1], + ["\u03C6", -1, 1], + ["\u03B4\u03B9\u03B1\u03C6", 78, 1], + ["\u03C3\u03C4\u03B5\u03C6", 78, 1], + ["\u03C6\u03C9\u03C4\u03BF\u03C3\u03C4\u03B5\u03C6", 80, 1], + ["\u03C0\u03B5\u03C1\u03B7\u03C6", 78, 1], + ["\u03C5\u03C0\u03B5\u03C1\u03B7\u03C6", 82, 1], + ["\u03BA\u03BF\u03B9\u03BB\u03B1\u03C1\u03C6", 78, 1], + ["\u03C0\u03B5\u03BD\u03C4\u03B1\u03C1\u03C6", 78, 1], + ["\u03BF\u03C1\u03C6", 78, 1], + ["\u03C7", -1, 1], + ["\u03B1\u03BC\u03B7\u03C7", 87, 1], + ["\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 87, 1], + ["\u03BC\u03B5\u03B3\u03BB\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1], + ["\u03BA\u03B1\u03C0\u03BD\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1], + ["\u03BC\u03B9\u03BA\u03C1\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1], + ["\u03C0\u03BF\u03BB\u03C5\u03BC\u03B7\u03C7", 87, 1], + ["\u03BB\u03B9\u03C7", 87, 1] + ]; + + /** @const */ var a_40 = [ + ["\u03B7\u03C3\u03B5\u03C4\u03B5", -1, 1] + ]; + + /** @const */ var a_41 = [ + ["\u03B5\u03BD\u03B4", -1, 1], + ["\u03C3\u03C5\u03BD\u03B4", -1, 1], + ["\u03BF\u03B4", -1, 1], + ["\u03B4\u03B9\u03B1\u03B8", -1, 1], + ["\u03BA\u03B1\u03B8", -1, 1], + ["\u03C1\u03B1\u03B8", -1, 1], + ["\u03C4\u03B1\u03B8", -1, 1], + ["\u03C4\u03B9\u03B8", -1, 1], + ["\u03B5\u03BA\u03B8", -1, 1], + ["\u03B5\u03BD\u03B8", -1, 1], + ["\u03C3\u03C5\u03BD\u03B8", -1, 1], + ["\u03C1\u03BF\u03B8", -1, 1], + ["\u03C5\u03C0\u03B5\u03C1\u03B8", -1, 1], + ["\u03C3\u03B8", -1, 1], + ["\u03B5\u03C5\u03B8", -1, 1], + ["\u03B1\u03C1\u03BA", -1, 1], + ["\u03C9\u03C6\u03B5\u03BB", -1, 1], + ["\u03B2\u03BF\u03BB", -1, 1], + ["\u03B1\u03B9\u03BD", -1, 1], + ["\u03C0\u03BF\u03BD", -1, 1], + ["\u03C1\u03BF\u03BD", -1, 1], + ["\u03C3\u03C5\u03BD", -1, 1], + ["\u03B2\u03B1\u03C1", -1, 1], + ["\u03B2\u03C1", -1, 1], + ["\u03B1\u03B9\u03C1", -1, 1], + ["\u03C6\u03BF\u03C1", -1, 1], + ["\u03B5\u03C5\u03C1", -1, 1], + ["\u03C0\u03C5\u03C1", -1, 1], + ["\u03C7\u03C9\u03C1", -1, 1], + ["\u03BD\u03B5\u03C4", -1, 1], + ["\u03C3\u03C7", -1, 1] + ]; + + /** @const */ var a_42 = [ + ["\u03C0\u03B1\u03B3", -1, 1], + ["\u03B4", -1, 1], + ["\u03B1\u03B4", 1, 1], + ["\u03B8", -1, 1], + ["\u03B1\u03B8", 3, 1], + ["\u03C4\u03BF\u03BA", -1, 1], + ["\u03C3\u03BA", -1, 1], + ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03BB", -1, 1], + ["\u03C3\u03BA\u03B5\u03BB", -1, 1], + ["\u03B1\u03C0\u03BB", -1, 1], + ["\u03B5\u03BC", -1, 1], + ["\u03B1\u03BD", -1, 1], + ["\u03B2\u03B5\u03BD", -1, 1], + ["\u03B2\u03B1\u03C1\u03BF\u03BD", -1, 1], + ["\u03BA\u03BF\u03C0", -1, 1], + ["\u03C3\u03B5\u03C1\u03C0", -1, 1], + ["\u03B1\u03B2\u03B1\u03C1", -1, 1], + ["\u03B5\u03BD\u03B1\u03C1", -1, 1], + ["\u03B1\u03B2\u03C1", -1, 1], + ["\u03BC\u03C0\u03BF\u03C1", -1, 1], + ["\u03B8\u03B1\u03C1\u03C1", -1, 1], + ["\u03BD\u03C4\u03C1", -1, 1], + ["\u03C5", -1, 1], + ["\u03BD\u03B9\u03C6", -1, 1], + ["\u03C3\u03C5\u03C1\u03C6", -1, 1] + ]; + + /** @const */ var a_43 = [ + ["\u03BF\u03BD\u03C4\u03B1\u03C3", -1, 1], + ["\u03C9\u03BD\u03C4\u03B1\u03C3", -1, 1] + ]; + + /** @const */ var a_44 = [ + ["\u03BF\u03BC\u03B1\u03C3\u03C4\u03B5", -1, 1], + ["\u03B9\u03BF\u03BC\u03B1\u03C3\u03C4\u03B5", 0, 1] + ]; + + /** @const */ var a_45 = [ + ["\u03C0", -1, 1], + ["\u03B1\u03C0", 0, 1], + ["\u03B1\u03BA\u03B1\u03C4\u03B1\u03C0", 1, 1], + ["\u03C3\u03C5\u03BC\u03C0", 0, 1], + ["\u03B1\u03C3\u03C5\u03BC\u03C0", 3, 1], + ["\u03B1\u03BC\u03B5\u03C4\u03B1\u03BC\u03C6", -1, 1] + ]; + + /** @const */ var a_46 = [ + ["\u03B6", -1, 1], + ["\u03B1\u03BB", -1, 1], + ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03BB", 1, 1], + ["\u03B5\u03BA\u03C4\u03B5\u03BB", -1, 1], + ["\u03BC", -1, 1], + ["\u03BE", -1, 1], + ["\u03C0\u03C1\u03BF", -1, 1], + ["\u03B1\u03C1", -1, 1], + ["\u03BD\u03B9\u03C3", -1, 1] + ]; + + /** @const */ var a_47 = [ + ["\u03B7\u03B8\u03B7\u03BA\u03B1", -1, 1], + ["\u03B7\u03B8\u03B7\u03BA\u03B5", -1, 1], + ["\u03B7\u03B8\u03B7\u03BA\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_48 = [ + ["\u03C0\u03B9\u03B8", -1, 1], + ["\u03BF\u03B8", -1, 1], + ["\u03BD\u03B1\u03C1\u03B8", -1, 1], + ["\u03C3\u03BA\u03BF\u03C5\u03BB", -1, 1], + ["\u03C3\u03BA\u03C9\u03BB", -1, 1], + ["\u03C3\u03C6", -1, 1] + ]; + + /** @const */ var a_49 = [ + ["\u03B8", -1, 1], + ["\u03B4\u03B9\u03B1\u03B8", 0, 1], + ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03C4\u03B1\u03B8", 0, 1], + ["\u03C3\u03C5\u03BD\u03B8", 0, 1], + ["\u03C0\u03C1\u03BF\u03C3\u03B8", 0, 1] + ]; + + /** @const */ var a_50 = [ + ["\u03B7\u03BA\u03B1", -1, 1], + ["\u03B7\u03BA\u03B5", -1, 1], + ["\u03B7\u03BA\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_51 = [ + ["\u03C6\u03B1\u03B3", -1, 1], + ["\u03BB\u03B7\u03B3", -1, 1], + ["\u03C6\u03C1\u03C5\u03B4", -1, 1], + ["\u03BC\u03B1\u03BD\u03C4\u03B9\u03BB", -1, 1], + ["\u03BC\u03B1\u03BB\u03BB", -1, 1], + ["\u03BF\u03BC", -1, 1], + ["\u03B2\u03BB\u03B5\u03C0", -1, 1], + ["\u03C0\u03BF\u03B4\u03B1\u03C1", -1, 1], + ["\u03BA\u03C5\u03BC\u03B1\u03C4", -1, 1], + ["\u03C0\u03C1\u03C9\u03C4", -1, 1], + ["\u03BB\u03B1\u03C7", -1, 1], + ["\u03C0\u03B1\u03BD\u03C4\u03B1\u03C7", -1, 1] + ]; + + /** @const */ var a_52 = [ + ["\u03C4\u03C3\u03B1", -1, 1], + ["\u03C7\u03B1\u03B4", -1, 1], + ["\u03BC\u03B5\u03B4", -1, 1], + ["\u03BB\u03B1\u03BC\u03C0\u03B9\u03B4", -1, 1], + ["\u03B4\u03B5", -1, 1], + ["\u03C0\u03BB\u03B5", -1, 1], + ["\u03BC\u03B5\u03C3\u03B1\u03B6", -1, 1], + ["\u03B4\u03B5\u03C3\u03C0\u03BF\u03B6", -1, 1], + ["\u03B1\u03B9\u03B8", -1, 1], + ["\u03C6\u03B1\u03C1\u03BC\u03B1\u03BA", -1, 1], + ["\u03B1\u03B3\u03BA", -1, 1], + ["\u03B1\u03BD\u03B7\u03BA", -1, 1], + ["\u03BB", -1, 1], + ["\u03BC", -1, 1], + ["\u03B1\u03BC", 13, 1], + ["\u03B2\u03C1\u03BF\u03BC", 13, 1], + ["\u03C5\u03C0\u03BF\u03C4\u03B5\u03B9\u03BD", -1, 1], + ["\u03B5\u03BA\u03BB\u03B9\u03C0", -1, 1], + ["\u03C1", -1, 1], + ["\u03B5\u03BD\u03B4\u03B9\u03B1\u03C6\u03B5\u03C1", 18, 1], + ["\u03B1\u03BD\u03B1\u03C1\u03C1", 18, 1], + ["\u03C0\u03B1\u03C4", -1, 1], + ["\u03BA\u03B1\u03B8\u03B1\u03C1\u03B5\u03C5", -1, 1], + ["\u03B4\u03B5\u03C5\u03C4\u03B5\u03C1\u03B5\u03C5", -1, 1], + ["\u03BB\u03B5\u03C7", -1, 1] + ]; + + /** @const */ var a_53 = [ + ["\u03BF\u03C5\u03C3\u03B1", -1, 1], + ["\u03BF\u03C5\u03C3\u03B5", -1, 1], + ["\u03BF\u03C5\u03C3\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_54 = [ + ["\u03C0\u03B5\u03BB", -1, 1], + ["\u03BB\u03BB", -1, 1], + ["\u03C3\u03BC\u03B7\u03BD", -1, 1], + ["\u03C1\u03C0", -1, 1], + ["\u03C0\u03C1", -1, 1], + ["\u03C6\u03C1", -1, 1], + ["\u03C7\u03BF\u03C1\u03C4", -1, 1], + ["\u03BF\u03C6", -1, 1], + ["\u03C8\u03BF\u03C6", 7, -1], + ["\u03C3\u03C6", -1, 1], + ["\u03BB\u03BF\u03C7", -1, 1], + ["\u03BD\u03B1\u03C5\u03BB\u03BF\u03C7", 10, -1] + ]; + + /** @const */ var a_55 = [ + ["\u03B1\u03BC\u03B1\u03BB\u03BB\u03B9", -1, 1], + ["\u03BB", -1, 1], + ["\u03B1\u03BC\u03B1\u03BB", 1, 1], + ["\u03BC", -1, 1], + ["\u03BF\u03C5\u03BB\u03B1\u03BC", 3, 1], + ["\u03B5\u03BD", -1, 1], + ["\u03B4\u03B5\u03C1\u03B2\u03B5\u03BD", 5, 1], + ["\u03C0", -1, 1], + ["\u03B1\u03B5\u03B9\u03C0", 7, 1], + ["\u03B1\u03C1\u03C4\u03B9\u03C0", 7, 1], + ["\u03C3\u03C5\u03BC\u03C0", 7, 1], + ["\u03BD\u03B5\u03BF\u03C0", 7, 1], + ["\u03BA\u03C1\u03BF\u03BA\u03B1\u03BB\u03BF\u03C0", 7, 1], + ["\u03BF\u03BB\u03BF\u03C0", 7, 1], + ["\u03C0\u03C1\u03BF\u03C3\u03C9\u03C0\u03BF\u03C0", 7, 1], + ["\u03C3\u03B9\u03B4\u03B7\u03C1\u03BF\u03C0", 7, 1], + ["\u03B4\u03C1\u03BF\u03C3\u03BF\u03C0", 7, 1], + ["\u03B1\u03C3\u03C0", 7, 1], + ["\u03B1\u03BD\u03C5\u03C0", 7, 1], + ["\u03C1", -1, 1], + ["\u03B1\u03C3\u03C0\u03B1\u03C1", 19, 1], + ["\u03C7\u03B1\u03C1", 19, 1], + ["\u03B1\u03C7\u03B1\u03C1", 21, 1], + ["\u03B1\u03C0\u03B5\u03C1", 19, 1], + ["\u03C4\u03C1", 19, 1], + ["\u03BF\u03C5\u03C1", 19, 1], + ["\u03C4", -1, 1], + ["\u03B4\u03B9\u03B1\u03C4", 26, 1], + ["\u03B5\u03C0\u03B9\u03C4", 26, 1], + ["\u03C3\u03C5\u03BD\u03C4", 26, 1], + ["\u03BF\u03BC\u03BF\u03C4", 26, 1], + ["\u03BD\u03BF\u03BC\u03BF\u03C4", 30, 1], + ["\u03B1\u03C0\u03BF\u03C4", 26, 1], + ["\u03C5\u03C0\u03BF\u03C4", 26, 1], + ["\u03B1\u03B2\u03B1\u03C3\u03C4", 26, 1], + ["\u03B1\u03B9\u03BC\u03BF\u03C3\u03C4", 26, 1], + ["\u03C0\u03C1\u03BF\u03C3\u03C4", 26, 1], + ["\u03B1\u03BD\u03C5\u03C3\u03C4", 26, 1], + ["\u03BD\u03B1\u03C5", -1, 1], + ["\u03B1\u03C6", -1, 1], + ["\u03BE\u03B5\u03C6", -1, 1], + ["\u03B1\u03B4\u03B7\u03C6", -1, 1], + ["\u03C0\u03B1\u03BC\u03C6", -1, 1], + ["\u03C0\u03BF\u03BB\u03C5\u03C6", -1, 1] + ]; + + /** @const */ var a_56 = [ + ["\u03B1\u03B3\u03B1", -1, 1], + ["\u03B1\u03B3\u03B5", -1, 1], + ["\u03B1\u03B3\u03B5\u03C3", -1, 1] + ]; + + /** @const */ var a_57 = [ + ["\u03B7\u03C3\u03B1", -1, 1], + ["\u03B7\u03C3\u03B5", -1, 1], + ["\u03B7\u03C3\u03BF\u03C5", -1, 1] + ]; + + /** @const */ var a_58 = [ + ["\u03BD", -1, 1], + ["\u03B4\u03C9\u03B4\u03B5\u03BA\u03B1\u03BD", 0, 1], + ["\u03B5\u03C0\u03C4\u03B1\u03BD", 0, 1], + ["\u03BC\u03B5\u03B3\u03B1\u03BB\u03BF\u03BD", 0, 1], + ["\u03B5\u03C1\u03B7\u03BC\u03BF\u03BD", 0, 1], + ["\u03C7\u03B5\u03C1\u03C3\u03BF\u03BD", 0, 1] + ]; + + /** @const */ var a_59 = [ + ["\u03B7\u03C3\u03C4\u03B5", -1, 1] + ]; + + /** @const */ var a_60 = [ + ["\u03C3\u03B2", -1, 1], + ["\u03B1\u03C3\u03B2", 0, 1], + ["\u03B1\u03C0\u03BB", -1, 1], + ["\u03B1\u03B5\u03B9\u03BC\u03BD", -1, 1], + ["\u03C7\u03C1", -1, 1], + ["\u03B1\u03C7\u03C1", 4, 1], + ["\u03BA\u03BF\u03B9\u03BD\u03BF\u03C7\u03C1", 4, 1], + ["\u03B4\u03C5\u03C3\u03C7\u03C1", 4, 1], + ["\u03B5\u03C5\u03C7\u03C1", 4, 1], + ["\u03C0\u03B1\u03BB\u03B9\u03BC\u03C8", -1, 1] + ]; + + /** @const */ var a_61 = [ + ["\u03BF\u03C5\u03BD\u03B5", -1, 1], + ["\u03B7\u03B8\u03BF\u03C5\u03BD\u03B5", 0, 1], + ["\u03B7\u03C3\u03BF\u03C5\u03BD\u03B5", 0, 1] + ]; + + /** @const */ var a_62 = [ + ["\u03C3\u03C0\u03B9", -1, 1], + ["\u03BD", -1, 1], + ["\u03B5\u03BE\u03C9\u03BD", 1, 1], + ["\u03C1", -1, 1], + ["\u03C3\u03C4\u03C1\u03B1\u03B2\u03BF\u03BC\u03BF\u03C5\u03C4\u03C3", -1, 1], + ["\u03BA\u03B1\u03BA\u03BF\u03BC\u03BF\u03C5\u03C4\u03C3", -1, 1] + ]; + + /** @const */ var a_63 = [ + ["\u03BF\u03C5\u03BC\u03B5", -1, 1], + ["\u03B7\u03B8\u03BF\u03C5\u03BC\u03B5", 0, 1], + ["\u03B7\u03C3\u03BF\u03C5\u03BC\u03B5", 0, 1] + ]; + + /** @const */ var a_64 = [ + ["\u03B1\u03B6", -1, 1], + ["\u03C9\u03C1\u03B9\u03BF\u03C0\u03BB", -1, 1], + ["\u03B1\u03C3\u03BF\u03C5\u03C3", -1, 1], + ["\u03C0\u03B1\u03C1\u03B1\u03C3\u03BF\u03C5\u03C3", 2, 1], + ["\u03B1\u03BB\u03BB\u03BF\u03C3\u03BF\u03C5\u03C3", -1, 1], + ["\u03C6", -1, 1], + ["\u03C7", -1, 1] + ]; + + /** @const */ var a_65 = [ + ["\u03BC\u03B1\u03C4\u03B1", -1, 1], + ["\u03BC\u03B1\u03C4\u03C9\u03BD", -1, 1], + ["\u03BC\u03B1\u03C4\u03BF\u03C3", -1, 1] + ]; + + /** @const */ var a_66 = [ + ["\u03B1", -1, 1], + ["\u03B9\u03BF\u03C5\u03BC\u03B1", 0, 1], + ["\u03BF\u03BC\u03BF\u03C5\u03BD\u03B1", 0, 1], + ["\u03B9\u03BF\u03BC\u03BF\u03C5\u03BD\u03B1", 2, 1], + ["\u03BF\u03C3\u03BF\u03C5\u03BD\u03B1", 0, 1], + ["\u03B9\u03BF\u03C3\u03BF\u03C5\u03BD\u03B1", 4, 1], + ["\u03B5", -1, 1], + ["\u03B1\u03B3\u03B1\u03C4\u03B5", 6, 1], + ["\u03B7\u03BA\u03B1\u03C4\u03B5", 6, 1], + ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03C4\u03B5", 8, 1], + ["\u03B7\u03C3\u03B1\u03C4\u03B5", 6, 1], + ["\u03BF\u03C5\u03C3\u03B1\u03C4\u03B5", 6, 1], + ["\u03B5\u03B9\u03C4\u03B5", 6, 1], + ["\u03B7\u03B8\u03B5\u03B9\u03C4\u03B5", 12, 1], + ["\u03B9\u03B5\u03BC\u03B1\u03C3\u03C4\u03B5", 6, 1], + ["\u03BF\u03C5\u03BC\u03B1\u03C3\u03C4\u03B5", 6, 1], + ["\u03B9\u03BF\u03C5\u03BC\u03B1\u03C3\u03C4\u03B5", 15, 1], + ["\u03B9\u03B5\u03C3\u03B1\u03C3\u03C4\u03B5", 6, 1], + ["\u03BF\u03C3\u03B1\u03C3\u03C4\u03B5", 6, 1], + ["\u03B9\u03BF\u03C3\u03B1\u03C3\u03C4\u03B5", 18, 1], + ["\u03B7", -1, 1], + ["\u03B9", -1, 1], + ["\u03B1\u03BC\u03B1\u03B9", 21, 1], + ["\u03B9\u03B5\u03BC\u03B1\u03B9", 21, 1], + ["\u03BF\u03BC\u03B1\u03B9", 21, 1], + ["\u03BF\u03C5\u03BC\u03B1\u03B9", 21, 1], + ["\u03B1\u03C3\u03B1\u03B9", 21, 1], + ["\u03B5\u03C3\u03B1\u03B9", 21, 1], + ["\u03B9\u03B5\u03C3\u03B1\u03B9", 27, 1], + ["\u03B1\u03C4\u03B1\u03B9", 21, 1], + ["\u03B5\u03C4\u03B1\u03B9", 21, 1], + ["\u03B9\u03B5\u03C4\u03B1\u03B9", 30, 1], + ["\u03BF\u03BD\u03C4\u03B1\u03B9", 21, 1], + ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03B9", 21, 1], + ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03B9", 33, 1], + ["\u03B5\u03B9", 21, 1], + ["\u03B1\u03B5\u03B9", 35, 1], + ["\u03B7\u03B8\u03B5\u03B9", 35, 1], + ["\u03B7\u03C3\u03B5\u03B9", 35, 1], + ["\u03BF\u03B9", 21, 1], + ["\u03B1\u03BD", -1, 1], + ["\u03B1\u03B3\u03B1\u03BD", 40, 1], + ["\u03B7\u03BA\u03B1\u03BD", 40, 1], + ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BD", 42, 1], + ["\u03B7\u03C3\u03B1\u03BD", 40, 1], + ["\u03BF\u03C5\u03C3\u03B1\u03BD", 40, 1], + ["\u03BF\u03BD\u03C4\u03BF\u03C5\u03C3\u03B1\u03BD", 45, 1], + ["\u03B9\u03BF\u03BD\u03C4\u03BF\u03C5\u03C3\u03B1\u03BD", 46, 1], + ["\u03BF\u03BD\u03C4\u03B1\u03BD", 40, 1], + ["\u03B9\u03BF\u03BD\u03C4\u03B1\u03BD", 48, 1], + ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD", 40, 1], + ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD", 50, 1], + ["\u03BF\u03C4\u03B1\u03BD", 40, 1], + ["\u03B9\u03BF\u03C4\u03B1\u03BD", 52, 1], + ["\u03BF\u03BC\u03B1\u03C3\u03C4\u03B1\u03BD", 40, 1], + ["\u03B9\u03BF\u03BC\u03B1\u03C3\u03C4\u03B1\u03BD", 54, 1], + ["\u03BF\u03C3\u03B1\u03C3\u03C4\u03B1\u03BD", 40, 1], + ["\u03B9\u03BF\u03C3\u03B1\u03C3\u03C4\u03B1\u03BD", 56, 1], + ["\u03BF\u03C5\u03BD", -1, 1], + ["\u03B7\u03B8\u03BF\u03C5\u03BD", 58, 1], + ["\u03BF\u03BC\u03BF\u03C5\u03BD", 58, 1], + ["\u03B9\u03BF\u03BC\u03BF\u03C5\u03BD", 60, 1], + ["\u03B7\u03C3\u03BF\u03C5\u03BD", 58, 1], + ["\u03BF\u03C3\u03BF\u03C5\u03BD", 58, 1], + ["\u03B9\u03BF\u03C3\u03BF\u03C5\u03BD", 63, 1], + ["\u03C9\u03BD", -1, 1], + ["\u03B7\u03B4\u03C9\u03BD", 65, 1], + ["\u03BF", -1, 1], + ["\u03B1\u03C3", -1, 1], + ["\u03B5\u03C3", -1, 1], + ["\u03B7\u03B4\u03B5\u03C3", 69, 1], + ["\u03B7\u03C3\u03B5\u03C3", 69, 1], + ["\u03B7\u03C3", -1, 1], + ["\u03B5\u03B9\u03C3", -1, 1], + ["\u03B7\u03B8\u03B5\u03B9\u03C3", 73, 1], + ["\u03BF\u03C3", -1, 1], + ["\u03C5\u03C3", -1, 1], + ["\u03BF\u03C5\u03C3", 76, 1], + ["\u03C5", -1, 1], + ["\u03BF\u03C5", 78, 1], + ["\u03C9", -1, 1], + ["\u03B1\u03C9", 80, 1], + ["\u03B7\u03B8\u03C9", 80, 1], + ["\u03B7\u03C3\u03C9", 80, 1] + ]; + + /** @const */ var a_67 = [ + ["\u03BF\u03C4\u03B5\u03C1", -1, 1], + ["\u03B5\u03C3\u03C4\u03B5\u03C1", -1, 1], + ["\u03C5\u03C4\u03B5\u03C1", -1, 1], + ["\u03C9\u03C4\u03B5\u03C1", -1, 1], + ["\u03BF\u03C4\u03B1\u03C4", -1, 1], + ["\u03B5\u03C3\u03C4\u03B1\u03C4", -1, 1], + ["\u03C5\u03C4\u03B1\u03C4", -1, 1], + ["\u03C9\u03C4\u03B1\u03C4", -1, 1] + ]; + + /** @const */ var /** Array */ g_v = [81, 65, 16, 1]; + + /** @const */ var /** Array */ g_v2 = [81, 65, 0, 1]; + + var /** boolean */ B_test1 = false; + + + /** @return {boolean} */ + function r_has_min_length() { + return base.current.length >= 3; + }; + + /** @return {boolean} */ + function r_tolower() { + var /** number */ among_var; + while(true) + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + among_var = base.find_among_b(a_0); + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u03B1")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B2")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u03B3")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u03B4")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u03B5")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u03B6")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u03B7")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u03B8")) + { + return false; + } + break; + case 9: + if (!base.slice_from("\u03B9")) + { + return false; + } + break; + case 10: + if (!base.slice_from("\u03BA")) + { + return false; + } + break; + case 11: + if (!base.slice_from("\u03BB")) + { + return false; + } + break; + case 12: + if (!base.slice_from("\u03BC")) + { + return false; + } + break; + case 13: + if (!base.slice_from("\u03BD")) + { + return false; + } + break; + case 14: + if (!base.slice_from("\u03BE")) + { + return false; + } + break; + case 15: + if (!base.slice_from("\u03BF")) + { + return false; + } + break; + case 16: + if (!base.slice_from("\u03C0")) + { + return false; + } + break; + case 17: + if (!base.slice_from("\u03C1")) + { + return false; + } + break; + case 18: + if (!base.slice_from("\u03C3")) + { + return false; + } + break; + case 19: + if (!base.slice_from("\u03C4")) + { + return false; + } + break; + case 20: + if (!base.slice_from("\u03C5")) + { + return false; + } + break; + case 21: + if (!base.slice_from("\u03C6")) + { + return false; + } + break; + case 22: + if (!base.slice_from("\u03C7")) + { + return false; + } + break; + case 23: + if (!base.slice_from("\u03C8")) + { + return false; + } + break; + case 24: + if (!base.slice_from("\u03C9")) + { + return false; + } + break; + case 25: + if (base.cursor <= base.limit_backward) + { + break lab0; + } + base.cursor--; + break; + } + continue; + } + base.cursor = base.limit - v_1; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_step_1() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u03C6\u03B1")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03C3\u03BA\u03B1")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u03BF\u03BB\u03BF")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u03C3\u03BF")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u03C4\u03B1\u03C4\u03BF")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u03BA\u03C1\u03B5")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u03C0\u03B5\u03C1")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u03C4\u03B5\u03C1")) + { + return false; + } + break; + case 9: + if (!base.slice_from("\u03C6\u03C9")) + { + return false; + } + break; + case 10: + if (!base.slice_from("\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4")) + { + return false; + } + break; + case 11: + if (!base.slice_from("\u03B3\u03B5\u03B3\u03BF\u03BD")) + { + return false; + } + break; + } + B_test1 = false; + return true; + }; + + /** @return {boolean} */ + function r_step_s1() { + var /** number */ among_var; + base.ket = base.cursor; + if (base.find_among_b(a_3) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("\u03B9")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B9\u03B6")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_step_s2() { + base.ket = base.cursor; + if (base.find_among_b(a_5) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_4) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03C9\u03BD")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_s3() { + var /** number */ among_var; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B9\u03C3\u03B1"))) + { + break lab1; + } + base.bra = base.cursor; + if (base.cursor > base.limit_backward) + { + break lab1; + } + if (!base.slice_from("\u03B9\u03C3")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + } + if (base.find_among_b(a_7) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + among_var = base.find_among_b(a_6); + if (among_var == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("\u03B9")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B9\u03C3")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_step_s4() { + base.ket = base.cursor; + if (base.find_among_b(a_9) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_8) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B9")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_s5() { + var /** number */ among_var; + base.ket = base.cursor; + if (base.find_among_b(a_11) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + among_var = base.find_among_b(a_10); + if (among_var == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("\u03B9")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B9\u03C3\u03C4")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_step_s6() { + var /** number */ among_var; + base.ket = base.cursor; + if (base.find_among_b(a_14) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + among_var = base.find_among_b(a_12); + if (among_var == 0) + { + break lab1; + } + if (base.cursor > base.limit_backward) + { + break lab1; + } + switch (among_var) { + case 1: + if (!base.slice_from("\u03B9\u03C3\u03BC")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B9")) + { + return false; + } + break; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + among_var = base.find_among_b(a_13); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u03B1\u03B3\u03BD\u03C9\u03C3\u03C4")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B1\u03C4\u03BF\u03BC")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u03B3\u03BD\u03C9\u03C3\u03C4")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u03B5\u03B8\u03BD")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u03B5\u03BA\u03BB\u03B5\u03BA\u03C4")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u03C3\u03BA\u03B5\u03C0\u03C4")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u03C4\u03BF\u03C0")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u03B1\u03BB\u03B5\u03BE\u03B1\u03BD\u03B4\u03C1")) + { + return false; + } + break; + case 9: + if (!base.slice_from("\u03B2\u03C5\u03B6\u03B1\u03BD\u03C4")) + { + return false; + } + break; + case 10: + if (!base.slice_from("\u03B8\u03B5\u03B1\u03C4\u03C1")) + { + return false; + } + break; + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_s7() { + base.ket = base.cursor; + if (base.find_among_b(a_16) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_15) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B1\u03C1\u03B1\u03BA")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_s8() { + var /** number */ among_var; + base.ket = base.cursor; + if (base.find_among_b(a_18) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + among_var = base.find_among_b(a_17); + if (among_var == 0) + { + break lab1; + } + if (base.cursor > base.limit_backward) + { + break lab1; + } + switch (among_var) { + case 1: + if (!base.slice_from("\u03B1\u03BA")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u03B9\u03C4\u03C3")) + { + return false; + } + break; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.eq_s_b("\u03BA\u03BF\u03C1"))) + { + return false; + } + if (!base.slice_from("\u03B9\u03C4\u03C3")) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_s9() { + base.ket = base.cursor; + if (base.find_among_b(a_21) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_19) == 0) + { + break lab1; + } + if (base.cursor > base.limit_backward) + { + break lab1; + } + if (!base.slice_from("\u03B9\u03B4")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_20) == 0) + { + return false; + } + if (!base.slice_from("\u03B9\u03B4")) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_s10() { + base.ket = base.cursor; + if (base.find_among_b(a_23) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_22) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B9\u03C3\u03BA")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_2a() { + base.ket = base.cursor; + if (base.find_among_b(a_24) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (base.find_among_b(a_25) == 0) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_1; + } + { + /** @const */ var /** number */ c1 = base.cursor; + base.insert(base.cursor, base.cursor, "\u03B1\u03B4"); + base.cursor = c1; + } + return true; + }; + + /** @return {boolean} */ + function r_step_2b() { + base.ket = base.cursor; + if (base.find_among_b(a_26) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_27) == 0) + { + return false; + } + if (!base.slice_from("\u03B5\u03B4")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_2c() { + base.ket = base.cursor; + if (base.find_among_b(a_28) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_29) == 0) + { + return false; + } + if (!base.slice_from("\u03BF\u03C5\u03B4")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_2d() { + base.ket = base.cursor; + if (base.find_among_b(a_30) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_31) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B5")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_3() { + base.ket = base.cursor; + if (base.find_among_b(a_32) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.in_grouping_b(g_v, 945, 969))) + { + return false; + } + if (!base.slice_from("\u03B9")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_4() { + base.ket = base.cursor; + if (base.find_among_b(a_33) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.in_grouping_b(g_v, 945, 969))) + { + break lab1; + } + if (!base.slice_from("\u03B9\u03BA")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + } + base.bra = base.cursor; + if (base.find_among_b(a_34) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B9\u03BA")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5a() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B1\u03B3\u03B1\u03BC\u03B5"))) + { + break lab0; + } + base.bra = base.cursor; + if (base.cursor > base.limit_backward) + { + break lab0; + } + if (!base.slice_from("\u03B1\u03B3\u03B1\u03BC")) + { + return false; + } + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + if (base.find_among_b(a_35) == 0) + { + break lab1; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + } + base.cursor = base.limit - v_2; + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B1\u03BC\u03B5"))) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_36) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B1\u03BC")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5b() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (base.find_among_b(a_38) == 0) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_37) == 0) + { + break lab0; + } + if (base.cursor > base.limit_backward) + { + break lab0; + } + if (!base.slice_from("\u03B1\u03B3\u03B1\u03BD")) + { + return false; + } + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B1\u03BD\u03B5"))) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.in_grouping_b(g_v2, 945, 969))) + { + break lab2; + } + if (!base.slice_from("\u03B1\u03BD")) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + base.ket = base.cursor; + } + base.bra = base.cursor; + if (base.find_among_b(a_39) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B1\u03BD")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5c() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (base.find_among_b(a_40) == 0) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B5\u03C4\u03B5"))) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.in_grouping_b(g_v2, 945, 969))) + { + break lab2; + } + if (!base.slice_from("\u03B5\u03C4")) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + lab3: { + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_41) == 0) + { + break lab3; + } + if (!base.slice_from("\u03B5\u03C4")) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + base.ket = base.cursor; + } + base.bra = base.cursor; + if (base.find_among_b(a_42) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B5\u03C4")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5d() { + base.ket = base.cursor; + if (base.find_among_b(a_43) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.eq_s_b("\u03B1\u03C1\u03C7"))) + { + break lab1; + } + if (base.cursor > base.limit_backward) + { + break lab1; + } + if (!base.slice_from("\u03BF\u03BD\u03C4")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.eq_s_b("\u03BA\u03C1\u03B5"))) + { + return false; + } + if (!base.slice_from("\u03C9\u03BD\u03C4")) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_5e() { + base.ket = base.cursor; + if (base.find_among_b(a_44) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.eq_s_b("\u03BF\u03BD"))) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03BF\u03BC\u03B1\u03C3\u03C4")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5f() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B9\u03B5\u03C3\u03C4\u03B5"))) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_45) == 0) + { + break lab0; + } + if (base.cursor > base.limit_backward) + { + break lab0; + } + if (!base.slice_from("\u03B9\u03B5\u03C3\u03C4")) + { + return false; + } + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (!(base.eq_s_b("\u03B5\u03C3\u03C4\u03B5"))) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_46) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B9\u03B5\u03C3\u03C4")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5g() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (base.find_among_b(a_47) == 0) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (base.find_among_b(a_50) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_48) == 0) + { + break lab2; + } + if (!base.slice_from("\u03B7\u03BA")) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_49) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B7\u03BA")) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_5h() { + base.ket = base.cursor; + if (base.find_among_b(a_53) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_51) == 0) + { + break lab1; + } + if (!base.slice_from("\u03BF\u03C5\u03C3")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_52) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03BF\u03C5\u03C3")) + { + return false; + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_5i() { + var /** number */ among_var; + base.ket = base.cursor; + if (base.find_among_b(a_56) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + base.bra = base.cursor; + if (!(base.eq_s_b("\u03BA\u03BF\u03BB\u03BB"))) + { + break lab1; + } + if (!base.slice_from("\u03B1\u03B3")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab3: { + base.ket = base.cursor; + base.bra = base.cursor; + among_var = base.find_among_b(a_54); + if (among_var == 0) + { + break lab3; + } + switch (among_var) { + case 1: + if (!base.slice_from("\u03B1\u03B3")) + { + return false; + } + break; + } + break lab2; + } + base.cursor = base.limit - v_2; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_55) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B1\u03B3")) + { + return false; + } + } + } + return true; + }; + + /** @return {boolean} */ + function r_step_5j() { + base.ket = base.cursor; + if (base.find_among_b(a_57) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_58) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B7\u03C3")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5k() { + base.ket = base.cursor; + if (base.find_among_b(a_59) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_60) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03B7\u03C3\u03C4")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5l() { + base.ket = base.cursor; + if (base.find_among_b(a_61) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_62) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03BF\u03C5\u03BD")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_5m() { + base.ket = base.cursor; + if (base.find_among_b(a_63) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_test1 = false; + base.ket = base.cursor; + base.bra = base.cursor; + if (base.find_among_b(a_64) == 0) + { + return false; + } + if (base.cursor > base.limit_backward) + { + return false; + } + if (!base.slice_from("\u03BF\u03C5\u03BC")) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_6() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (base.find_among_b(a_65) == 0) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_from("\u03BC\u03B1")) + { + return false; + } + } + base.cursor = base.limit - v_1; + if (!B_test1) + { + return false; + } + base.ket = base.cursor; + if (base.find_among_b(a_66) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step_7() { + base.ket = base.cursor; + if (base.find_among_b(a_67) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + r_tolower(); + base.cursor = base.limit - v_1; + if (!r_has_min_length()) + { + return false; + } + B_test1 = true; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + r_step_1(); + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_step_s1(); + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_step_s2(); + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + r_step_s3(); + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + r_step_s4(); + base.cursor = base.limit - v_6; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + r_step_s5(); + base.cursor = base.limit - v_7; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + r_step_s6(); + base.cursor = base.limit - v_8; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + r_step_s7(); + base.cursor = base.limit - v_9; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + r_step_s8(); + base.cursor = base.limit - v_10; + /** @const */ var /** number */ v_11 = base.limit - base.cursor; + r_step_s9(); + base.cursor = base.limit - v_11; + /** @const */ var /** number */ v_12 = base.limit - base.cursor; + r_step_s10(); + base.cursor = base.limit - v_12; + /** @const */ var /** number */ v_13 = base.limit - base.cursor; + r_step_2a(); + base.cursor = base.limit - v_13; + /** @const */ var /** number */ v_14 = base.limit - base.cursor; + r_step_2b(); + base.cursor = base.limit - v_14; + /** @const */ var /** number */ v_15 = base.limit - base.cursor; + r_step_2c(); + base.cursor = base.limit - v_15; + /** @const */ var /** number */ v_16 = base.limit - base.cursor; + r_step_2d(); + base.cursor = base.limit - v_16; + /** @const */ var /** number */ v_17 = base.limit - base.cursor; + r_step_3(); + base.cursor = base.limit - v_17; + /** @const */ var /** number */ v_18 = base.limit - base.cursor; + r_step_4(); + base.cursor = base.limit - v_18; + /** @const */ var /** number */ v_19 = base.limit - base.cursor; + r_step_5a(); + base.cursor = base.limit - v_19; + /** @const */ var /** number */ v_20 = base.limit - base.cursor; + r_step_5b(); + base.cursor = base.limit - v_20; + /** @const */ var /** number */ v_21 = base.limit - base.cursor; + r_step_5c(); + base.cursor = base.limit - v_21; + /** @const */ var /** number */ v_22 = base.limit - base.cursor; + r_step_5d(); + base.cursor = base.limit - v_22; + /** @const */ var /** number */ v_23 = base.limit - base.cursor; + r_step_5e(); + base.cursor = base.limit - v_23; + /** @const */ var /** number */ v_24 = base.limit - base.cursor; + r_step_5f(); + base.cursor = base.limit - v_24; + /** @const */ var /** number */ v_25 = base.limit - base.cursor; + r_step_5g(); + base.cursor = base.limit - v_25; + /** @const */ var /** number */ v_26 = base.limit - base.cursor; + r_step_5h(); + base.cursor = base.limit - v_26; + /** @const */ var /** number */ v_27 = base.limit - base.cursor; + r_step_5j(); + base.cursor = base.limit - v_27; + /** @const */ var /** number */ v_28 = base.limit - base.cursor; + r_step_5i(); + base.cursor = base.limit - v_28; + /** @const */ var /** number */ v_29 = base.limit - base.cursor; + r_step_5k(); + base.cursor = base.limit - v_29; + /** @const */ var /** number */ v_30 = base.limit - base.cursor; + r_step_5l(); + base.cursor = base.limit - v_30; + /** @const */ var /** number */ v_31 = base.limit - base.cursor; + r_step_5m(); + base.cursor = base.limit - v_31; + /** @const */ var /** number */ v_32 = base.limit - base.cursor; + r_step_6(); + base.cursor = base.limit - v_32; + /** @const */ var /** number */ v_33 = base.limit - base.cursor; + r_step_7(); + base.cursor = base.limit - v_33; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/hindi-stemmer.js b/sphinx/search/non-minified-js/hindi-stemmer.js new file mode 100644 index 00000000000..26a715e7e77 --- /dev/null +++ b/sphinx/search/non-minified-js/hindi-stemmer.js @@ -0,0 +1,181 @@ +// Generated from hindi.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var HindiStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u0906\u0901", -1, -1], + ["\u093E\u0901", -1, -1], + ["\u0907\u092F\u093E\u0901", 1, -1], + ["\u0906\u0907\u092F\u093E\u0901", 2, -1], + ["\u093E\u0907\u092F\u093E\u0901", 2, -1], + ["\u093F\u092F\u093E\u0901", 1, -1], + ["\u0906\u0902", -1, -1], + ["\u0909\u0906\u0902", 6, -1], + ["\u0941\u0906\u0902", 6, -1], + ["\u0908\u0902", -1, -1], + ["\u0906\u0908\u0902", 9, -1], + ["\u093E\u0908\u0902", 9, -1], + ["\u090F\u0902", -1, -1], + ["\u0906\u090F\u0902", 12, -1], + ["\u0909\u090F\u0902", 12, -1], + ["\u093E\u090F\u0902", 12, -1], + ["\u0924\u093E\u090F\u0902", 15, -1, r_CONSONANT], + ["\u0905\u0924\u093E\u090F\u0902", 16, -1], + ["\u0928\u093E\u090F\u0902", 15, -1, r_CONSONANT], + ["\u0905\u0928\u093E\u090F\u0902", 18, -1], + ["\u0941\u090F\u0902", 12, -1], + ["\u0913\u0902", -1, -1], + ["\u0906\u0913\u0902", 21, -1], + ["\u0909\u0913\u0902", 21, -1], + ["\u093E\u0913\u0902", 21, -1], + ["\u0924\u093E\u0913\u0902", 24, -1, r_CONSONANT], + ["\u0905\u0924\u093E\u0913\u0902", 25, -1], + ["\u0928\u093E\u0913\u0902", 24, -1, r_CONSONANT], + ["\u0905\u0928\u093E\u0913\u0902", 27, -1], + ["\u0941\u0913\u0902", 21, -1], + ["\u093E\u0902", -1, -1], + ["\u0907\u092F\u093E\u0902", 30, -1], + ["\u0906\u0907\u092F\u093E\u0902", 31, -1], + ["\u093E\u0907\u092F\u093E\u0902", 31, -1], + ["\u093F\u092F\u093E\u0902", 30, -1], + ["\u0940\u0902", -1, -1], + ["\u0924\u0940\u0902", 35, -1, r_CONSONANT], + ["\u0905\u0924\u0940\u0902", 36, -1], + ["\u0906\u0924\u0940\u0902", 36, -1], + ["\u093E\u0924\u0940\u0902", 36, -1], + ["\u0947\u0902", -1, -1], + ["\u094B\u0902", -1, -1], + ["\u0907\u092F\u094B\u0902", 41, -1], + ["\u0906\u0907\u092F\u094B\u0902", 42, -1], + ["\u093E\u0907\u092F\u094B\u0902", 42, -1], + ["\u093F\u092F\u094B\u0902", 41, -1], + ["\u0905", -1, -1], + ["\u0906", -1, -1], + ["\u0907", -1, -1], + ["\u0908", -1, -1], + ["\u0906\u0908", 49, -1], + ["\u093E\u0908", 49, -1], + ["\u0909", -1, -1], + ["\u090A", -1, -1], + ["\u090F", -1, -1], + ["\u0906\u090F", 54, -1], + ["\u0907\u090F", 54, -1], + ["\u0906\u0907\u090F", 56, -1], + ["\u093E\u0907\u090F", 56, -1], + ["\u093E\u090F", 54, -1], + ["\u093F\u090F", 54, -1], + ["\u0913", -1, -1], + ["\u0906\u0913", 61, -1], + ["\u093E\u0913", 61, -1], + ["\u0915\u0930", -1, -1, r_CONSONANT], + ["\u0905\u0915\u0930", 64, -1], + ["\u0906\u0915\u0930", 64, -1], + ["\u093E\u0915\u0930", 64, -1], + ["\u093E", -1, -1], + ["\u090A\u0902\u0917\u093E", 68, -1], + ["\u0906\u090A\u0902\u0917\u093E", 69, -1], + ["\u093E\u090A\u0902\u0917\u093E", 69, -1], + ["\u0942\u0902\u0917\u093E", 68, -1], + ["\u090F\u0917\u093E", 68, -1], + ["\u0906\u090F\u0917\u093E", 73, -1], + ["\u093E\u090F\u0917\u093E", 73, -1], + ["\u0947\u0917\u093E", 68, -1], + ["\u0924\u093E", 68, -1, r_CONSONANT], + ["\u0905\u0924\u093E", 77, -1], + ["\u0906\u0924\u093E", 77, -1], + ["\u093E\u0924\u093E", 77, -1], + ["\u0928\u093E", 68, -1, r_CONSONANT], + ["\u0905\u0928\u093E", 81, -1], + ["\u0906\u0928\u093E", 81, -1], + ["\u093E\u0928\u093E", 81, -1], + ["\u0906\u092F\u093E", 68, -1], + ["\u093E\u092F\u093E", 68, -1], + ["\u093F", -1, -1], + ["\u0940", -1, -1], + ["\u090A\u0902\u0917\u0940", 88, -1], + ["\u0906\u090A\u0902\u0917\u0940", 89, -1], + ["\u093E\u090A\u0902\u0917\u0940", 89, -1], + ["\u090F\u0902\u0917\u0940", 88, -1], + ["\u0906\u090F\u0902\u0917\u0940", 92, -1], + ["\u093E\u090F\u0902\u0917\u0940", 92, -1], + ["\u0942\u0902\u0917\u0940", 88, -1], + ["\u0947\u0902\u0917\u0940", 88, -1], + ["\u090F\u0917\u0940", 88, -1], + ["\u0906\u090F\u0917\u0940", 97, -1], + ["\u093E\u090F\u0917\u0940", 97, -1], + ["\u0913\u0917\u0940", 88, -1], + ["\u0906\u0913\u0917\u0940", 100, -1], + ["\u093E\u0913\u0917\u0940", 100, -1], + ["\u0947\u0917\u0940", 88, -1], + ["\u094B\u0917\u0940", 88, -1], + ["\u0924\u0940", 88, -1, r_CONSONANT], + ["\u0905\u0924\u0940", 105, -1], + ["\u0906\u0924\u0940", 105, -1], + ["\u093E\u0924\u0940", 105, -1], + ["\u0928\u0940", 88, -1, r_CONSONANT], + ["\u0905\u0928\u0940", 109, -1], + ["\u0941", -1, -1], + ["\u0942", -1, -1], + ["\u0947", -1, -1], + ["\u090F\u0902\u0917\u0947", 113, -1], + ["\u0906\u090F\u0902\u0917\u0947", 114, -1], + ["\u093E\u090F\u0902\u0917\u0947", 114, -1], + ["\u0947\u0902\u0917\u0947", 113, -1], + ["\u0913\u0917\u0947", 113, -1], + ["\u0906\u0913\u0917\u0947", 118, -1], + ["\u093E\u0913\u0917\u0947", 118, -1], + ["\u094B\u0917\u0947", 113, -1], + ["\u0924\u0947", 113, -1, r_CONSONANT], + ["\u0905\u0924\u0947", 122, -1], + ["\u0906\u0924\u0947", 122, -1], + ["\u093E\u0924\u0947", 122, -1], + ["\u0928\u0947", 113, -1, r_CONSONANT], + ["\u0905\u0928\u0947", 126, -1], + ["\u0906\u0928\u0947", 126, -1], + ["\u093E\u0928\u0947", 126, -1], + ["\u094B", -1, -1], + ["\u094D", -1, -1] + ]; + + /** @const */ var /** Array */ g_consonant = [255, 255, 255, 255, 159, 0, 0, 0, 248, 7]; + + + /** @return {boolean} */ + function r_CONSONANT() { + if (!(base.in_grouping_b(g_consonant, 2325, 2399))) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + if (base.cursor >= base.limit) + { + return false; + } + base.cursor++; + base.limit_backward = base.cursor; base.cursor = base.limit; + base.ket = base.cursor; + if (base.find_among_b(a_0) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/hungarian-stemmer.js b/sphinx/search/non-minified-js/hungarian-stemmer.js index 2c550ac0d0e..886e1cf39b2 100644 --- a/sphinx/search/non-minified-js/hungarian-stemmer.js +++ b/sphinx/search/non-minified-js/hungarian-stemmer.js @@ -1,25 +1,15 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from hungarian.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -HungarianStemmer = function() { +var HungarianStemmer = function() { var base = new BaseStemmer(); - /** @const */ var a_0 = [ - ["cs", -1, -1], - ["dzs", -1, -1], - ["gy", -1, -1], - ["ly", -1, -1], - ["ny", -1, -1], - ["sz", -1, -1], - ["ty", -1, -1], - ["zs", -1, -1] - ]; - /** @const */ var a_1 = [ + /** @const */ var a_0 = [ ["\u00E1", -1, 1], ["\u00E9", -1, 2] ]; - /** @const */ var a_2 = [ + /** @const */ var a_1 = [ ["bb", -1, -1], ["cc", -1, -1], ["dd", -1, -1], @@ -45,12 +35,12 @@ HungarianStemmer = function() { ["zz", -1, -1] ]; - /** @const */ var a_3 = [ + /** @const */ var a_2 = [ ["al", -1, 1], ["el", -1, 1] ]; - /** @const */ var a_4 = [ + /** @const */ var a_3 = [ ["ba", -1, -1], ["ra", -1, -1], ["be", -1, -1], @@ -97,13 +87,13 @@ HungarianStemmer = function() { ["v\u00E9", -1, -1] ]; - /** @const */ var a_5 = [ + /** @const */ var a_4 = [ ["\u00E1n", -1, 2], ["\u00E9n", -1, 1], ["\u00E1nk\u00E9nt", -1, 2] ]; - /** @const */ var a_6 = [ + /** @const */ var a_5 = [ ["stul", -1, 1], ["astul", 0, 1], ["\u00E1stul", 0, 2], @@ -112,12 +102,12 @@ HungarianStemmer = function() { ["\u00E9st\u00FCl", 3, 3] ]; - /** @const */ var a_7 = [ + /** @const */ var a_6 = [ ["\u00E1", -1, 1], ["\u00E9", -1, 1] ]; - /** @const */ var a_8 = [ + /** @const */ var a_7 = [ ["k", -1, 3], ["ak", 0, 3], ["ek", 0, 3], @@ -127,7 +117,7 @@ HungarianStemmer = function() { ["\u00F6k", 0, 3] ]; - /** @const */ var a_9 = [ + /** @const */ var a_8 = [ ["\u00E9i", -1, 1], ["\u00E1\u00E9i", 0, 3], ["\u00E9\u00E9i", 0, 2], @@ -142,7 +132,7 @@ HungarianStemmer = function() { ["\u00E9\u00E9", 3, 2] ]; - /** @const */ var a_10 = [ + /** @const */ var a_9 = [ ["a", -1, 1], ["ja", 0, 1], ["d", -1, 1], @@ -176,7 +166,7 @@ HungarianStemmer = function() { ["\u00E9", -1, 3] ]; - /** @const */ var a_11 = [ + /** @const */ var a_10 = [ ["id", -1, 1], ["aid", 0, 1], ["jaid", 1, 1], @@ -230,69 +220,30 @@ HungarianStemmer = function() { function r_mark_regions() { I_p1 = base.limit; lab0: { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab1: { if (!(base.in_grouping(g_v, 97, 369))) { break lab1; } - golab2: while(true) - { - var /** number */ v_2 = base.cursor; - lab3: { - if (!(base.out_grouping(g_v, 97, 369))) - { - break lab3; - } - base.cursor = v_2; - break golab2; - } - base.cursor = v_2; - if (base.cursor >= base.limit) - { - break lab1; - } - base.cursor++; - } - lab4: { - var /** number */ v_3 = base.cursor; - lab5: { - if (base.find_among(a_0) == 0) - { - break lab5; - } - break lab4; - } - base.cursor = v_3; - if (base.cursor >= base.limit) + /** @const */ var /** number */ v_2 = base.cursor; + lab2: { + if (!base.go_in_grouping(g_v, 97, 369)) { - break lab1; + break lab2; } base.cursor++; + I_p1 = base.cursor; } - I_p1 = base.cursor; + base.cursor = v_2; break lab0; } base.cursor = v_1; - if (!(base.out_grouping(g_v, 97, 369))) + if (!base.go_out_grouping(g_v, 97, 369)) { return false; } - golab6: while(true) - { - lab7: { - if (!(base.in_grouping(g_v, 97, 369))) - { - break lab7; - } - break golab6; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; - } + base.cursor++; I_p1 = base.cursor; } return true; @@ -300,18 +251,14 @@ HungarianStemmer = function() { /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_v_ending() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_1); + among_var = base.find_among_b(a_0); if (among_var == 0) { return false; @@ -340,8 +287,8 @@ HungarianStemmer = function() { /** @return {boolean} */ function r_double() { - var /** number */ v_1 = base.limit - base.cursor; - if (base.find_among_b(a_2) == 0) + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (base.find_among_b(a_1) == 0) { return false; } @@ -357,14 +304,11 @@ HungarianStemmer = function() { } base.cursor--; base.ket = base.cursor; + if (base.cursor <= base.limit_backward) { - var /** number */ c1 = base.cursor - 1; - if (c1 < base.limit_backward) - { - return false; - } - base.cursor = c1; + return false; } + base.cursor--; base.bra = base.cursor; if (!base.slice_del()) { @@ -376,7 +320,7 @@ HungarianStemmer = function() { /** @return {boolean} */ function r_instrum() { base.ket = base.cursor; - if (base.find_among_b(a_3) == 0) + if (base.find_among_b(a_2) == 0) { return false; } @@ -403,7 +347,7 @@ HungarianStemmer = function() { /** @return {boolean} */ function r_case() { base.ket = base.cursor; - if (base.find_among_b(a_4) == 0) + if (base.find_among_b(a_3) == 0) { return false; } @@ -427,7 +371,7 @@ HungarianStemmer = function() { function r_case_special() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_5); + among_var = base.find_among_b(a_4); if (among_var == 0) { return false; @@ -458,7 +402,7 @@ HungarianStemmer = function() { function r_case_other() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_6); + among_var = base.find_among_b(a_5); if (among_var == 0) { return false; @@ -494,7 +438,7 @@ HungarianStemmer = function() { /** @return {boolean} */ function r_factive() { base.ket = base.cursor; - if (base.find_among_b(a_7) == 0) + if (base.find_among_b(a_6) == 0) { return false; } @@ -522,7 +466,7 @@ HungarianStemmer = function() { function r_plural() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_8); + among_var = base.find_among_b(a_7); if (among_var == 0) { return false; @@ -559,7 +503,7 @@ HungarianStemmer = function() { function r_owned() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_9); + among_var = base.find_among_b(a_8); if (among_var == 0) { return false; @@ -596,7 +540,7 @@ HungarianStemmer = function() { function r_sing_owner() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_10); + among_var = base.find_among_b(a_9); if (among_var == 0) { return false; @@ -633,7 +577,7 @@ HungarianStemmer = function() { function r_plur_owner() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_11); + among_var = base.find_among_b(a_10); if (among_var == 0) { return false; @@ -667,35 +611,35 @@ HungarianStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_mark_regions(); base.cursor = v_1; base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_instrum(); base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_case(); base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; r_case_special(); base.cursor = base.limit - v_4; - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; r_case_other(); base.cursor = base.limit - v_5; - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; r_factive(); base.cursor = base.limit - v_6; - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; r_owned(); base.cursor = base.limit - v_7; - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; r_sing_owner(); base.cursor = base.limit - v_8; - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; r_plur_owner(); base.cursor = base.limit - v_9; - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; r_plural(); base.cursor = base.limit - v_10; base.cursor = base.limit_backward; diff --git a/sphinx/search/non-minified-js/indonesian-stemmer.js b/sphinx/search/non-minified-js/indonesian-stemmer.js new file mode 100644 index 00000000000..714c410e738 --- /dev/null +++ b/sphinx/search/non-minified-js/indonesian-stemmer.js @@ -0,0 +1,409 @@ +// Generated from indonesian.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var IndonesianStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["kah", -1, 1], + ["lah", -1, 1], + ["pun", -1, 1] + ]; + + /** @const */ var a_1 = [ + ["nya", -1, 1], + ["ku", -1, 1], + ["mu", -1, 1] + ]; + + /** @const */ var a_2 = [ + ["i", -1, 1, r_SUFFIX_I_OK], + ["an", -1, 1, r_SUFFIX_AN_OK], + ["kan", 1, 1, r_SUFFIX_KAN_OK] + ]; + + /** @const */ var a_3 = [ + ["di", -1, 1], + ["ke", -1, 2], + ["me", -1, 1], + ["mem", 2, 5], + ["men", 2, 1], + ["meng", 4, 1], + ["meny", 4, 3, r_VOWEL], + ["pem", -1, 6], + ["pen", -1, 2], + ["peng", 8, 2], + ["peny", 8, 4, r_VOWEL], + ["ter", -1, 1] + ]; + + /** @const */ var a_4 = [ + ["be", -1, 3, r_KER], + ["belajar", 0, 4], + ["ber", 0, 3], + ["pe", -1, 1], + ["pelajar", 3, 2], + ["per", 3, 1] + ]; + + /** @const */ var /** Array */ g_vowel = [17, 65, 16]; + + var /** number */ I_prefix = 0; + var /** number */ I_measure = 0; + + + /** @return {boolean} */ + function r_remove_particle() { + base.ket = base.cursor; + if (base.find_among_b(a_0) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + I_measure -= 1; + return true; + }; + + /** @return {boolean} */ + function r_remove_possessive_pronoun() { + base.ket = base.cursor; + if (base.find_among_b(a_1) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + I_measure -= 1; + return true; + }; + + /** @return {boolean} */ + function r_SUFFIX_KAN_OK() { + if (I_prefix == 3) + { + return false; + } + if (I_prefix == 2) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_SUFFIX_AN_OK() { + return I_prefix != 1; + }; + + /** @return {boolean} */ + function r_SUFFIX_I_OK() { + if (I_prefix > 2) + { + return false; + } + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (!(base.eq_s_b("s"))) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_1; + } + return true; + }; + + /** @return {boolean} */ + function r_remove_suffix() { + base.ket = base.cursor; + if (base.find_among_b(a_2) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + I_measure -= 1; + return true; + }; + + /** @return {boolean} */ + function r_VOWEL() { + if (!(base.in_grouping(g_vowel, 97, 117))) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_KER() { + if (!(base.out_grouping(g_vowel, 97, 117))) + { + return false; + } + if (!(base.eq_s("er"))) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_remove_first_order_prefix() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_3); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + I_prefix = 1; + I_measure -= 1; + break; + case 2: + if (!base.slice_del()) + { + return false; + } + I_prefix = 3; + I_measure -= 1; + break; + case 3: + I_prefix = 1; + if (!base.slice_from("s")) + { + return false; + } + I_measure -= 1; + break; + case 4: + I_prefix = 3; + if (!base.slice_from("s")) + { + return false; + } + I_measure -= 1; + break; + case 5: + I_prefix = 1; + I_measure -= 1; + lab0: { + /** @const */ var /** number */ v_1 = base.cursor; + lab1: { + /** @const */ var /** number */ v_2 = base.cursor; + if (!(base.in_grouping(g_vowel, 97, 117))) + { + break lab1; + } + base.cursor = v_2; + if (!base.slice_from("p")) + { + return false; + } + break lab0; + } + base.cursor = v_1; + if (!base.slice_del()) + { + return false; + } + } + break; + case 6: + I_prefix = 3; + I_measure -= 1; + lab2: { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + /** @const */ var /** number */ v_4 = base.cursor; + if (!(base.in_grouping(g_vowel, 97, 117))) + { + break lab3; + } + base.cursor = v_4; + if (!base.slice_from("p")) + { + return false; + } + break lab2; + } + base.cursor = v_3; + if (!base.slice_del()) + { + return false; + } + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_remove_second_order_prefix() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_4); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + I_prefix = 2; + I_measure -= 1; + break; + case 2: + if (!base.slice_from("ajar")) + { + return false; + } + I_measure -= 1; + break; + case 3: + if (!base.slice_del()) + { + return false; + } + I_prefix = 4; + I_measure -= 1; + break; + case 4: + if (!base.slice_from("ajar")) + { + return false; + } + I_prefix = 4; + I_measure -= 1; + break; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + I_measure = 0; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + lab1: { + if (!base.go_out_grouping(g_vowel, 97, 117)) + { + break lab1; + } + base.cursor++; + I_measure += 1; + continue; + } + break; + } + } + base.cursor = v_1; + if (I_measure <= 2) + { + return false; + } + I_prefix = 0; + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + r_remove_particle(); + base.cursor = base.limit - v_2; + if (I_measure <= 2) + { + return false; + } + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_remove_possessive_pronoun(); + base.cursor = base.limit - v_3; + base.cursor = base.limit_backward; + if (I_measure <= 2) + { + return false; + } + lab2: { + /** @const */ var /** number */ v_4 = base.cursor; + lab3: { + /** @const */ var /** number */ v_5 = base.cursor; + if (!r_remove_first_order_prefix()) + { + break lab3; + } + /** @const */ var /** number */ v_6 = base.cursor; + lab4: { + /** @const */ var /** number */ v_7 = base.cursor; + if (I_measure <= 2) + { + break lab4; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + if (!r_remove_suffix()) + { + break lab4; + } + base.cursor = base.limit_backward; + base.cursor = v_7; + if (I_measure <= 2) + { + break lab4; + } + if (!r_remove_second_order_prefix()) + { + break lab4; + } + } + base.cursor = v_6; + base.cursor = v_5; + break lab2; + } + base.cursor = v_4; + /** @const */ var /** number */ v_8 = base.cursor; + r_remove_second_order_prefix(); + base.cursor = v_8; + /** @const */ var /** number */ v_9 = base.cursor; + lab5: { + if (I_measure <= 2) + { + break lab5; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + if (!r_remove_suffix()) + { + break lab5; + } + base.cursor = base.limit_backward; + } + base.cursor = v_9; + } + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/irish-stemmer.js b/sphinx/search/non-minified-js/irish-stemmer.js new file mode 100644 index 00000000000..ec6601efb04 --- /dev/null +++ b/sphinx/search/non-minified-js/irish-stemmer.js @@ -0,0 +1,378 @@ +// Generated from irish.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var IrishStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["b'", -1, 1], + ["bh", -1, 4], + ["bhf", 1, 2], + ["bp", -1, 8], + ["ch", -1, 5], + ["d'", -1, 1], + ["d'fh", 5, 2], + ["dh", -1, 6], + ["dt", -1, 9], + ["fh", -1, 2], + ["gc", -1, 5], + ["gh", -1, 7], + ["h-", -1, 1], + ["m'", -1, 1], + ["mb", -1, 4], + ["mh", -1, 10], + ["n-", -1, 1], + ["nd", -1, 6], + ["ng", -1, 7], + ["ph", -1, 8], + ["sh", -1, 3], + ["t-", -1, 1], + ["th", -1, 9], + ["ts", -1, 3] + ]; + + /** @const */ var a_1 = [ + ["\u00EDochta", -1, 1], + ["a\u00EDochta", 0, 1], + ["ire", -1, 2], + ["aire", 2, 2], + ["abh", -1, 1], + ["eabh", 4, 1], + ["ibh", -1, 1], + ["aibh", 6, 1], + ["amh", -1, 1], + ["eamh", 8, 1], + ["imh", -1, 1], + ["aimh", 10, 1], + ["\u00EDocht", -1, 1], + ["a\u00EDocht", 12, 1], + ["ir\u00ED", -1, 2], + ["air\u00ED", 14, 2] + ]; + + /** @const */ var a_2 = [ + ["\u00F3ideacha", -1, 6], + ["patacha", -1, 5], + ["achta", -1, 1], + ["arcachta", 2, 2], + ["eachta", 2, 1], + ["grafa\u00EDochta", -1, 4], + ["paite", -1, 5], + ["ach", -1, 1], + ["each", 7, 1], + ["\u00F3ideach", 8, 6], + ["gineach", 8, 3], + ["patach", 7, 5], + ["grafa\u00EDoch", -1, 4], + ["pataigh", -1, 5], + ["\u00F3idigh", -1, 6], + ["acht\u00FAil", -1, 1], + ["eacht\u00FAil", 15, 1], + ["gineas", -1, 3], + ["ginis", -1, 3], + ["acht", -1, 1], + ["arcacht", 19, 2], + ["eacht", 19, 1], + ["grafa\u00EDocht", -1, 4], + ["arcachta\u00ED", -1, 2], + ["grafa\u00EDochta\u00ED", -1, 4] + ]; + + /** @const */ var a_3 = [ + ["imid", -1, 1], + ["aimid", 0, 1], + ["\u00EDmid", -1, 1], + ["a\u00EDmid", 2, 1], + ["adh", -1, 2], + ["eadh", 4, 2], + ["faidh", -1, 1], + ["fidh", -1, 1], + ["\u00E1il", -1, 2], + ["ain", -1, 2], + ["tear", -1, 2], + ["tar", -1, 2] + ]; + + /** @const */ var /** Array */ g_v = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 17, 4, 2]; + + var /** number */ I_p2 = 0; + var /** number */ I_p1 = 0; + var /** number */ I_pV = 0; + + + /** @return {boolean} */ + function r_mark_regions() { + I_pV = base.limit; + I_p1 = base.limit; + I_p2 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!base.go_out_grouping(g_v, 97, 250)) + { + break lab0; + } + base.cursor++; + I_pV = base.cursor; + if (!base.go_in_grouping(g_v, 97, 250)) + { + break lab0; + } + base.cursor++; + I_p1 = base.cursor; + if (!base.go_out_grouping(g_v, 97, 250)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 250)) + { + break lab0; + } + base.cursor++; + I_p2 = base.cursor; + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_initial_morph() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_0); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("f")) + { + return false; + } + break; + case 3: + if (!base.slice_from("s")) + { + return false; + } + break; + case 4: + if (!base.slice_from("b")) + { + return false; + } + break; + case 5: + if (!base.slice_from("c")) + { + return false; + } + break; + case 6: + if (!base.slice_from("d")) + { + return false; + } + break; + case 7: + if (!base.slice_from("g")) + { + return false; + } + break; + case 8: + if (!base.slice_from("p")) + { + return false; + } + break; + case 9: + if (!base.slice_from("t")) + { + return false; + } + break; + case 10: + if (!base.slice_from("m")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_RV() { + return I_pV <= base.cursor; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R2() { + return I_p2 <= base.cursor; + }; + + /** @return {boolean} */ + function r_noun_sfx() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_deriv() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R2()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("arc")) + { + return false; + } + break; + case 3: + if (!base.slice_from("gin")) + { + return false; + } + break; + case 4: + if (!base.slice_from("graf")) + { + return false; + } + break; + case 5: + if (!base.slice_from("paite")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u00F3id")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_verb_sfx() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_3); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_RV()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R1()) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + /** @const */ var /** number */ v_1 = base.cursor; + r_initial_morph(); + base.cursor = v_1; + r_mark_regions(); + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + r_noun_sfx(); + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_deriv(); + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_verb_sfx(); + base.cursor = base.limit - v_4; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/italian-stemmer.js b/sphinx/search/non-minified-js/italian-stemmer.js index df6ddfd332a..f5ab55ecc8d 100644 --- a/sphinx/search/non-minified-js/italian-stemmer.js +++ b/sphinx/search/non-minified-js/italian-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from italian.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -ItalianStemmer = function() { +var ItalianStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["", -1, 7], ["qu", 0, 6], @@ -238,17 +239,13 @@ ItalianStemmer = function() { /** @return {boolean} */ function r_prelude() { var /** number */ among_var; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; while(true) { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab0: { base.bra = base.cursor; among_var = base.find_among(a_0); - if (among_var == 0) - { - break lab0; - } base.ket = base.cursor; switch (among_var) { case 1: @@ -303,11 +300,11 @@ ItalianStemmer = function() { base.cursor = v_1; while(true) { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab1: { golab2: while(true) { - var /** number */ v_4 = base.cursor; + /** @const */ var /** number */ v_4 = base.cursor; lab3: { if (!(base.in_grouping(g_v, 97, 249))) { @@ -315,7 +312,7 @@ ItalianStemmer = function() { } base.bra = base.cursor; lab4: { - var /** number */ v_5 = base.cursor; + /** @const */ var /** number */ v_5 = base.cursor; lab5: { if (!(base.eq_s("u"))) { @@ -370,37 +367,27 @@ ItalianStemmer = function() { I_pV = base.limit; I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { lab1: { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { if (!(base.in_grouping(g_v, 97, 249))) { break lab2; } lab3: { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab4: { if (!(base.out_grouping(g_v, 97, 249))) { break lab4; } - golab5: while(true) + if (!base.go_out_grouping(g_v, 97, 249)) { - lab6: { - if (!(base.in_grouping(g_v, 97, 249))) - { - break lab6; - } - break golab5; - } - if (base.cursor >= base.limit) - { - break lab4; - } - base.cursor++; + break lab4; } + base.cursor++; break lab3; } base.cursor = v_3; @@ -408,21 +395,19 @@ ItalianStemmer = function() { { break lab2; } - golab7: while(true) + if (!base.go_in_grouping(g_v, 97, 249)) { - lab8: { - if (!(base.out_grouping(g_v, 97, 249))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - break lab2; - } - base.cursor++; + break lab2; } + base.cursor++; + } + break lab1; + } + base.cursor = v_2; + lab5: { + if (!(base.eq_s("divan"))) + { + break lab5; } break lab1; } @@ -431,31 +416,21 @@ ItalianStemmer = function() { { break lab0; } - lab9: { - var /** number */ v_6 = base.cursor; - lab10: { + lab6: { + /** @const */ var /** number */ v_4 = base.cursor; + lab7: { if (!(base.out_grouping(g_v, 97, 249))) { - break lab10; + break lab7; } - golab11: while(true) + if (!base.go_out_grouping(g_v, 97, 249)) { - lab12: { - if (!(base.in_grouping(g_v, 97, 249))) - { - break lab12; - } - break golab11; - } - if (base.cursor >= base.limit) - { - break lab10; - } - base.cursor++; + break lab7; } - break lab9; + base.cursor++; + break lab6; } - base.cursor = v_6; + base.cursor = v_4; if (!(base.in_grouping(g_v, 97, 249))) { break lab0; @@ -470,72 +445,32 @@ ItalianStemmer = function() { I_pV = base.cursor; } base.cursor = v_1; - var /** number */ v_8 = base.cursor; - lab13: { - golab14: while(true) + /** @const */ var /** number */ v_5 = base.cursor; + lab8: { + if (!base.go_out_grouping(g_v, 97, 249)) { - lab15: { - if (!(base.in_grouping(g_v, 97, 249))) - { - break lab15; - } - break golab14; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab8; } - golab16: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 249)) { - lab17: { - if (!(base.out_grouping(g_v, 97, 249))) - { - break lab17; - } - break golab16; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab8; } + base.cursor++; I_p1 = base.cursor; - golab18: while(true) + if (!base.go_out_grouping(g_v, 97, 249)) { - lab19: { - if (!(base.in_grouping(g_v, 97, 249))) - { - break lab19; - } - break golab18; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab8; } - golab20: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 249)) { - lab21: { - if (!(base.out_grouping(g_v, 97, 249))) - { - break lab21; - } - break golab20; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab8; } + base.cursor++; I_p2 = base.cursor; } - base.cursor = v_8; + base.cursor = v_5; return true; }; @@ -544,14 +479,10 @@ ItalianStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; among_var = base.find_among(a_1); - if (among_var == 0) - { - break lab0; - } base.ket = base.cursor; switch (among_var) { case 1: @@ -584,29 +515,17 @@ ItalianStemmer = function() { /** @return {boolean} */ function r_RV() { - if (!(I_pV <= base.cursor)) - { - return false; - } - return true; + return I_pV <= base.cursor; }; /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -674,7 +593,7 @@ ItalianStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.eq_s_b("ic"))) @@ -743,7 +662,7 @@ ItalianStemmer = function() { { return false; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { base.ket = base.cursor; among_var = base.find_among_b(a_4); @@ -793,7 +712,7 @@ ItalianStemmer = function() { { return false; } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { base.ket = base.cursor; if (base.find_among_b(a_5) == 0) @@ -822,7 +741,7 @@ ItalianStemmer = function() { { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab3: { base.ket = base.cursor; if (!(base.eq_s_b("at"))) @@ -868,12 +787,12 @@ ItalianStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; if (base.find_among_b(a_7) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; @@ -881,13 +800,13 @@ ItalianStemmer = function() { { return false; } - base.limit_backward = v_2; + base.limit_backward = v_1; return true; }; /** @return {boolean} */ function r_vowel_suffix() { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.in_grouping_b(g_AEIO, 97, 242))) @@ -922,7 +841,7 @@ ItalianStemmer = function() { return false; } } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { base.ket = base.cursor; if (!(base.eq_s_b("h"))) @@ -950,18 +869,18 @@ ItalianStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_prelude(); base.cursor = v_1; r_mark_regions(); base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_attached_pronoun(); - base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab0: { lab1: { - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab2: { if (!r_standard_suffix()) { @@ -969,21 +888,21 @@ ItalianStemmer = function() { } break lab1; } - base.cursor = base.limit - v_5; + base.cursor = base.limit - v_4; if (!r_verb_suffix()) { break lab0; } } } - base.cursor = base.limit - v_4; - var /** number */ v_6 = base.limit - base.cursor; + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; r_vowel_suffix(); - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_5; base.cursor = base.limit_backward; - var /** number */ v_7 = base.cursor; + /** @const */ var /** number */ v_6 = base.cursor; r_postlude(); - base.cursor = v_7; + base.cursor = v_6; return true; }; diff --git a/sphinx/search/non-minified-js/lithuanian-stemmer.js b/sphinx/search/non-minified-js/lithuanian-stemmer.js new file mode 100644 index 00000000000..213ff5979b2 --- /dev/null +++ b/sphinx/search/non-minified-js/lithuanian-stemmer.js @@ -0,0 +1,534 @@ +// Generated from lithuanian.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var LithuanianStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["a", -1, -1], + ["ia", 0, -1], + ["eria", 1, -1], + ["osna", 0, -1], + ["iosna", 3, -1], + ["uosna", 3, -1], + ["iuosna", 5, -1], + ["ysna", 0, -1], + ["\u0117sna", 0, -1], + ["e", -1, -1], + ["ie", 9, -1], + ["enie", 10, -1], + ["erie", 10, -1], + ["oje", 9, -1], + ["ioje", 13, -1], + ["uje", 9, -1], + ["iuje", 15, -1], + ["yje", 9, -1], + ["enyje", 17, -1], + ["eryje", 17, -1], + ["\u0117je", 9, -1], + ["ame", 9, -1], + ["iame", 21, -1], + ["sime", 9, -1], + ["ome", 9, -1], + ["\u0117me", 9, -1], + ["tum\u0117me", 25, -1], + ["ose", 9, -1], + ["iose", 27, -1], + ["uose", 27, -1], + ["iuose", 29, -1], + ["yse", 9, -1], + ["enyse", 31, -1], + ["eryse", 31, -1], + ["\u0117se", 9, -1], + ["ate", 9, -1], + ["iate", 35, -1], + ["ite", 9, -1], + ["kite", 37, -1], + ["site", 37, -1], + ["ote", 9, -1], + ["tute", 9, -1], + ["\u0117te", 9, -1], + ["tum\u0117te", 42, -1], + ["i", -1, -1], + ["ai", 44, -1], + ["iai", 45, -1], + ["eriai", 46, -1], + ["ei", 44, -1], + ["tumei", 48, -1], + ["ki", 44, -1], + ["imi", 44, -1], + ["erimi", 51, -1], + ["umi", 44, -1], + ["iumi", 53, -1], + ["si", 44, -1], + ["asi", 55, -1], + ["iasi", 56, -1], + ["esi", 55, -1], + ["iesi", 58, -1], + ["siesi", 59, -1], + ["isi", 55, -1], + ["aisi", 61, -1], + ["eisi", 61, -1], + ["tumeisi", 63, -1], + ["uisi", 61, -1], + ["osi", 55, -1], + ["\u0117josi", 66, -1], + ["uosi", 66, -1], + ["iuosi", 68, -1], + ["siuosi", 69, -1], + ["usi", 55, -1], + ["ausi", 71, -1], + ["\u010Diausi", 72, -1], + ["\u0105si", 55, -1], + ["\u0117si", 55, -1], + ["\u0173si", 55, -1], + ["t\u0173si", 76, -1], + ["ti", 44, -1], + ["enti", 78, -1], + ["inti", 78, -1], + ["oti", 78, -1], + ["ioti", 81, -1], + ["uoti", 81, -1], + ["iuoti", 83, -1], + ["auti", 78, -1], + ["iauti", 85, -1], + ["yti", 78, -1], + ["\u0117ti", 78, -1], + ["tel\u0117ti", 88, -1], + ["in\u0117ti", 88, -1], + ["ter\u0117ti", 88, -1], + ["ui", 44, -1], + ["iui", 92, -1], + ["eniui", 93, -1], + ["oj", -1, -1], + ["\u0117j", -1, -1], + ["k", -1, -1], + ["am", -1, -1], + ["iam", 98, -1], + ["iem", -1, -1], + ["im", -1, -1], + ["sim", 101, -1], + ["om", -1, -1], + ["tum", -1, -1], + ["\u0117m", -1, -1], + ["tum\u0117m", 105, -1], + ["an", -1, -1], + ["on", -1, -1], + ["ion", 108, -1], + ["un", -1, -1], + ["iun", 110, -1], + ["\u0117n", -1, -1], + ["o", -1, -1], + ["io", 113, -1], + ["enio", 114, -1], + ["\u0117jo", 113, -1], + ["uo", 113, -1], + ["s", -1, -1], + ["as", 118, -1], + ["ias", 119, -1], + ["es", 118, -1], + ["ies", 121, -1], + ["is", 118, -1], + ["ais", 123, -1], + ["iais", 124, -1], + ["tumeis", 123, -1], + ["imis", 123, -1], + ["enimis", 127, -1], + ["omis", 123, -1], + ["iomis", 129, -1], + ["umis", 123, -1], + ["\u0117mis", 123, -1], + ["enis", 123, -1], + ["asis", 123, -1], + ["ysis", 123, -1], + ["ams", 118, -1], + ["iams", 136, -1], + ["iems", 118, -1], + ["ims", 118, -1], + ["enims", 139, -1], + ["erims", 139, -1], + ["oms", 118, -1], + ["ioms", 142, -1], + ["ums", 118, -1], + ["\u0117ms", 118, -1], + ["ens", 118, -1], + ["os", 118, -1], + ["ios", 147, -1], + ["uos", 147, -1], + ["iuos", 149, -1], + ["ers", 118, -1], + ["us", 118, -1], + ["aus", 152, -1], + ["iaus", 153, -1], + ["ius", 152, -1], + ["ys", 118, -1], + ["enys", 156, -1], + ["erys", 156, -1], + ["\u0105s", 118, -1], + ["i\u0105s", 159, -1], + ["\u0117s", 118, -1], + ["am\u0117s", 161, -1], + ["iam\u0117s", 162, -1], + ["im\u0117s", 161, -1], + ["kim\u0117s", 164, -1], + ["sim\u0117s", 164, -1], + ["om\u0117s", 161, -1], + ["\u0117m\u0117s", 161, -1], + ["tum\u0117m\u0117s", 168, -1], + ["at\u0117s", 161, -1], + ["iat\u0117s", 170, -1], + ["sit\u0117s", 161, -1], + ["ot\u0117s", 161, -1], + ["\u0117t\u0117s", 161, -1], + ["tum\u0117t\u0117s", 174, -1], + ["\u012Fs", 118, -1], + ["\u016Bs", 118, -1], + ["t\u0173s", 118, -1], + ["at", -1, -1], + ["iat", 179, -1], + ["it", -1, -1], + ["sit", 181, -1], + ["ot", -1, -1], + ["\u0117t", -1, -1], + ["tum\u0117t", 184, -1], + ["u", -1, -1], + ["au", 186, -1], + ["iau", 187, -1], + ["\u010Diau", 188, -1], + ["iu", 186, -1], + ["eniu", 190, -1], + ["siu", 190, -1], + ["y", -1, -1], + ["\u0105", -1, -1], + ["i\u0105", 194, -1], + ["\u0117", -1, -1], + ["\u0119", -1, -1], + ["\u012F", -1, -1], + ["en\u012F", 198, -1], + ["er\u012F", 198, -1], + ["\u0173", -1, -1], + ["i\u0173", 201, -1], + ["er\u0173", 201, -1] + ]; + + /** @const */ var a_1 = [ + ["ing", -1, -1], + ["aj", -1, -1], + ["iaj", 1, -1], + ["iej", -1, -1], + ["oj", -1, -1], + ["ioj", 4, -1], + ["uoj", 4, -1], + ["iuoj", 6, -1], + ["auj", -1, -1], + ["\u0105j", -1, -1], + ["i\u0105j", 9, -1], + ["\u0117j", -1, -1], + ["\u0173j", -1, -1], + ["i\u0173j", 12, -1], + ["ok", -1, -1], + ["iok", 14, -1], + ["iuk", -1, -1], + ["uliuk", 16, -1], + ["u\u010Diuk", 16, -1], + ["i\u0161k", -1, -1], + ["iul", -1, -1], + ["yl", -1, -1], + ["\u0117l", -1, -1], + ["am", -1, -1], + ["dam", 23, -1], + ["jam", 23, -1], + ["zgan", -1, -1], + ["ain", -1, -1], + ["esn", -1, -1], + ["op", -1, -1], + ["iop", 29, -1], + ["ias", -1, -1], + ["ies", -1, -1], + ["ais", -1, -1], + ["iais", 33, -1], + ["os", -1, -1], + ["ios", 35, -1], + ["uos", 35, -1], + ["iuos", 37, -1], + ["aus", -1, -1], + ["iaus", 39, -1], + ["\u0105s", -1, -1], + ["i\u0105s", 41, -1], + ["\u0119s", -1, -1], + ["ut\u0117ait", -1, -1], + ["ant", -1, -1], + ["iant", 45, -1], + ["siant", 46, -1], + ["int", -1, -1], + ["ot", -1, -1], + ["uot", 49, -1], + ["iuot", 50, -1], + ["yt", -1, -1], + ["\u0117t", -1, -1], + ["yk\u0161t", -1, -1], + ["iau", -1, -1], + ["dav", -1, -1], + ["sv", -1, -1], + ["\u0161v", -1, -1], + ["yk\u0161\u010D", -1, -1], + ["\u0119", -1, -1], + ["\u0117j\u0119", 60, -1] + ]; + + /** @const */ var a_2 = [ + ["ojime", -1, 7], + ["\u0117jime", -1, 3], + ["avime", -1, 6], + ["okate", -1, 8], + ["aite", -1, 1], + ["uote", -1, 2], + ["asius", -1, 5], + ["okat\u0117s", -1, 8], + ["ait\u0117s", -1, 1], + ["uot\u0117s", -1, 2], + ["esiu", -1, 4] + ]; + + /** @const */ var a_3 = [ + ["\u010D", -1, 1], + ["d\u017E", -1, 2] + ]; + + /** @const */ var a_4 = [ + ["gd", -1, 1] + ]; + + /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 64, 1, 0, 64, 0, 0, 0, 0, 0, 0, 0, 4, 4]; + + var /** number */ I_p1 = 0; + + + /** @return {boolean} */ + function r_step1() { + if (base.cursor < I_p1) + { + return false; + } + /** @const */ var /** number */ v_1 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + if (base.find_among_b(a_0) == 0) + { + base.limit_backward = v_1; + return false; + } + base.bra = base.cursor; + base.limit_backward = v_1; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + /** @return {boolean} */ + function r_step2() { + while(true) + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (base.cursor < I_p1) + { + break lab0; + } + /** @const */ var /** number */ v_2 = base.limit_backward; + base.limit_backward = I_p1; + base.ket = base.cursor; + if (base.find_among_b(a_1) == 0) + { + base.limit_backward = v_2; + break lab0; + } + base.bra = base.cursor; + base.limit_backward = v_2; + if (!base.slice_del()) + { + return false; + } + continue; + } + base.cursor = base.limit - v_1; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_fix_conflicts() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("ait\u0117")) + { + return false; + } + break; + case 2: + if (!base.slice_from("uot\u0117")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u0117jimas")) + { + return false; + } + break; + case 4: + if (!base.slice_from("esys")) + { + return false; + } + break; + case 5: + if (!base.slice_from("asys")) + { + return false; + } + break; + case 6: + if (!base.slice_from("avimas")) + { + return false; + } + break; + case 7: + if (!base.slice_from("ojimas")) + { + return false; + } + break; + case 8: + if (!base.slice_from("okat\u0117")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_fix_chdz() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_3); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("t")) + { + return false; + } + break; + case 2: + if (!base.slice_from("d")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_fix_gd() { + base.ket = base.cursor; + if (base.find_among_b(a_4) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_from("g")) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + I_p1 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + /** @const */ var /** number */ v_3 = base.cursor; + if (!(base.eq_s("a"))) + { + base.cursor = v_2; + break lab1; + } + base.cursor = v_3; + if (base.current.length <= 6) + { + base.cursor = v_2; + break lab1; + } + if (base.cursor >= base.limit) + { + base.cursor = v_2; + break lab1; + } + base.cursor++; + } + if (!base.go_out_grouping(g_v, 97, 371)) + { + break lab0; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 371)) + { + break lab0; + } + base.cursor++; + I_p1 = base.cursor; + } + base.cursor = v_1; + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + r_fix_conflicts(); + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + r_step1(); + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + r_fix_chdz(); + base.cursor = base.limit - v_6; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + r_step2(); + base.cursor = base.limit - v_7; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + r_fix_chdz(); + base.cursor = base.limit - v_8; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + r_fix_gd(); + base.cursor = base.limit - v_9; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/nepali-stemmer.js b/sphinx/search/non-minified-js/nepali-stemmer.js new file mode 100644 index 00000000000..d6352d00a4d --- /dev/null +++ b/sphinx/search/non-minified-js/nepali-stemmer.js @@ -0,0 +1,282 @@ +// Generated from nepali.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var NepaliStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u0932\u093E\u0907", -1, 1], + ["\u0932\u093E\u0908", -1, 1], + ["\u0938\u0901\u0917", -1, 1], + ["\u0938\u0902\u0917", -1, 1], + ["\u092E\u093E\u0930\u094D\u092B\u0924", -1, 1], + ["\u0930\u0924", -1, 1], + ["\u0915\u093E", -1, 2], + ["\u092E\u093E", -1, 1], + ["\u0926\u094D\u0935\u093E\u0930\u093E", -1, 1], + ["\u0915\u093F", -1, 2], + ["\u092A\u091B\u093F", -1, 1], + ["\u0915\u0940", -1, 2], + ["\u0932\u0947", -1, 1], + ["\u0915\u0948", -1, 2], + ["\u0938\u0901\u0917\u0948", -1, 1], + ["\u092E\u0948", -1, 1], + ["\u0915\u094B", -1, 2] + ]; + + /** @const */ var a_1 = [ + ["\u0901", -1, 1], + ["\u0902", -1, 1], + ["\u0948", -1, 2] + ]; + + /** @const */ var a_2 = [ + ["\u0925\u093F\u090F", -1, 1], + ["\u091B", -1, 1], + ["\u0907\u091B", 1, 1], + ["\u090F\u091B", 1, 1], + ["\u093F\u091B", 1, 1], + ["\u0947\u091B", 1, 1], + ["\u0928\u0947\u091B", 5, 1], + ["\u0939\u0941\u0928\u0947\u091B", 6, 1], + ["\u0907\u0928\u094D\u091B", 1, 1], + ["\u093F\u0928\u094D\u091B", 1, 1], + ["\u0939\u0941\u0928\u094D\u091B", 1, 1], + ["\u090F\u0915\u093E", -1, 1], + ["\u0907\u090F\u0915\u093E", 11, 1], + ["\u093F\u090F\u0915\u093E", 11, 1], + ["\u0947\u0915\u093E", -1, 1], + ["\u0928\u0947\u0915\u093E", 14, 1], + ["\u0926\u093E", -1, 1], + ["\u0907\u0926\u093E", 16, 1], + ["\u093F\u0926\u093E", 16, 1], + ["\u0926\u0947\u0916\u093F", -1, 1], + ["\u092E\u093E\u0925\u093F", -1, 1], + ["\u090F\u0915\u0940", -1, 1], + ["\u0907\u090F\u0915\u0940", 21, 1], + ["\u093F\u090F\u0915\u0940", 21, 1], + ["\u0947\u0915\u0940", -1, 1], + ["\u0926\u0947\u0916\u0940", -1, 1], + ["\u0925\u0940", -1, 1], + ["\u0926\u0940", -1, 1], + ["\u091B\u0941", -1, 1], + ["\u090F\u091B\u0941", 28, 1], + ["\u0947\u091B\u0941", 28, 1], + ["\u0928\u0947\u091B\u0941", 30, 1], + ["\u0928\u0941", -1, 1], + ["\u0939\u0930\u0941", -1, 1], + ["\u0939\u0930\u0942", -1, 1], + ["\u091B\u0947", -1, 1], + ["\u0925\u0947", -1, 1], + ["\u0928\u0947", -1, 1], + ["\u090F\u0915\u0948", -1, 1], + ["\u0947\u0915\u0948", -1, 1], + ["\u0928\u0947\u0915\u0948", 39, 1], + ["\u0926\u0948", -1, 1], + ["\u0907\u0926\u0948", 41, 1], + ["\u093F\u0926\u0948", 41, 1], + ["\u090F\u0915\u094B", -1, 1], + ["\u0907\u090F\u0915\u094B", 44, 1], + ["\u093F\u090F\u0915\u094B", 44, 1], + ["\u0947\u0915\u094B", -1, 1], + ["\u0928\u0947\u0915\u094B", 47, 1], + ["\u0926\u094B", -1, 1], + ["\u0907\u0926\u094B", 49, 1], + ["\u093F\u0926\u094B", 49, 1], + ["\u092F\u094B", -1, 1], + ["\u0907\u092F\u094B", 52, 1], + ["\u092D\u092F\u094B", 52, 1], + ["\u093F\u092F\u094B", 52, 1], + ["\u0925\u093F\u092F\u094B", 55, 1], + ["\u0926\u093F\u092F\u094B", 55, 1], + ["\u0925\u094D\u092F\u094B", 52, 1], + ["\u091B\u094C", -1, 1], + ["\u0907\u091B\u094C", 59, 1], + ["\u090F\u091B\u094C", 59, 1], + ["\u093F\u091B\u094C", 59, 1], + ["\u0947\u091B\u094C", 59, 1], + ["\u0928\u0947\u091B\u094C", 63, 1], + ["\u092F\u094C", -1, 1], + ["\u0925\u093F\u092F\u094C", 65, 1], + ["\u091B\u094D\u092F\u094C", 65, 1], + ["\u0925\u094D\u092F\u094C", 65, 1], + ["\u091B\u0928\u094D", -1, 1], + ["\u0907\u091B\u0928\u094D", 69, 1], + ["\u090F\u091B\u0928\u094D", 69, 1], + ["\u093F\u091B\u0928\u094D", 69, 1], + ["\u0947\u091B\u0928\u094D", 69, 1], + ["\u0928\u0947\u091B\u0928\u094D", 73, 1], + ["\u0932\u093E\u0928\u094D", -1, 1], + ["\u091B\u093F\u0928\u094D", -1, 1], + ["\u0925\u093F\u0928\u094D", -1, 1], + ["\u092A\u0930\u094D", -1, 1], + ["\u0907\u0938\u094D", -1, 1], + ["\u0925\u093F\u0907\u0938\u094D", 79, 1], + ["\u091B\u0938\u094D", -1, 1], + ["\u0907\u091B\u0938\u094D", 81, 1], + ["\u090F\u091B\u0938\u094D", 81, 1], + ["\u093F\u091B\u0938\u094D", 81, 1], + ["\u0947\u091B\u0938\u094D", 81, 1], + ["\u0928\u0947\u091B\u0938\u094D", 85, 1], + ["\u093F\u0938\u094D", -1, 1], + ["\u0925\u093F\u0938\u094D", 87, 1], + ["\u091B\u0947\u0938\u094D", -1, 1], + ["\u0939\u094B\u0938\u094D", -1, 1] + ]; + + + /** @return {boolean} */ + function r_remove_category_1() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_0); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("\u090F"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + if (!(base.eq_s_b("\u0947"))) + { + break lab2; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!base.slice_del()) + { + return false; + } + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_remove_category_2() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("\u092F\u094C"))) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab2: { + if (!(base.eq_s_b("\u091B\u094C"))) + { + break lab2; + } + break lab0; + } + base.cursor = base.limit - v_1; + lab3: { + if (!(base.eq_s_b("\u0928\u094C"))) + { + break lab3; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!(base.eq_s_b("\u0925\u0947"))) + { + return false; + } + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!(base.eq_s_b("\u0924\u094D\u0930"))) + { + return false; + } + if (!base.slice_del()) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_remove_category_3() { + base.ket = base.cursor; + if (base.find_among_b(a_2) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + r_remove_category_1(); + base.cursor = base.limit - v_1; + while(true) + { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab0: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + r_remove_category_2(); + base.cursor = base.limit - v_3; + if (!r_remove_category_3()) + { + break lab0; + } + continue; + } + base.cursor = base.limit - v_2; + break; + } + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/norwegian-stemmer.js b/sphinx/search/non-minified-js/norwegian-stemmer.js index e1760631ab9..149e63c1a32 100644 --- a/sphinx/search/non-minified-js/norwegian-stemmer.js +++ b/sphinx/search/non-minified-js/norwegian-stemmer.js @@ -1,9 +1,28 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from norwegian.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -NorwegianStemmer = function() { +var NorwegianStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ + ["", -1, 1], + ["ind", 0, -1], + ["kk", 0, -1], + ["nk", 0, -1], + ["amm", 0, -1], + ["omm", 0, -1], + ["kap", 0, -1], + ["skap", 6, 1], + ["pp", 0, -1], + ["lt", 0, -1], + ["ast", 0, -1], + ["\u00F8st", 0, -1], + ["v", 0, -1], + ["hav", 12, 1], + ["giv", 12, 1] + ]; + + /** @const */ var a_1 = [ ["a", -1, 1], ["e", -1, 1], ["ede", 1, 1], @@ -12,13 +31,13 @@ NorwegianStemmer = function() { ["ane", 1, 1], ["ene", 1, 1], ["hetene", 6, 1], - ["erte", 1, 3], + ["erte", 1, 4], ["en", -1, 1], ["heten", 9, 1], ["ar", -1, 1], ["er", -1, 1], ["heter", 12, 1], - ["s", -1, 2], + ["s", -1, 3], ["as", 14, 1], ["es", 14, 1], ["edes", 16, 1], @@ -27,20 +46,20 @@ NorwegianStemmer = function() { ["hetenes", 19, 1], ["ens", 14, 1], ["hetens", 21, 1], - ["ers", 14, 1], + ["ers", 14, 2], ["ets", 14, 1], ["et", -1, 1], ["het", 25, 1], - ["ert", -1, 3], + ["ert", -1, 4], ["ast", -1, 1] ]; - /** @const */ var a_1 = [ + /** @const */ var a_2 = [ ["dt", -1, -1], ["vt", -1, -1] ]; - /** @const */ var a_2 = [ + /** @const */ var a_3 = [ ["leg", -1, 1], ["eleg", 0, 1], ["ig", -1, 1], @@ -54,9 +73,9 @@ NorwegianStemmer = function() { ["hetslov", 9, 1] ]; - /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 128]; + /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 2, 142]; - /** @const */ var /** Array */ g_s_ending = [119, 125, 149, 1]; + /** @const */ var /** Array */ g_s_ending = [119, 125, 148, 1]; var /** number */ I_x = 0; var /** number */ I_p1 = 0; @@ -65,9 +84,9 @@ NorwegianStemmer = function() { /** @return {boolean} */ function r_mark_regions() { I_p1 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; { - var /** number */ c1 = base.cursor + 3; + /** @const */ var /** number */ c1 = base.cursor + 3; if (c1 > base.limit) { return false; @@ -76,44 +95,21 @@ NorwegianStemmer = function() { } I_x = base.cursor; base.cursor = v_1; - golab0: while(true) + if (!base.go_out_grouping(g_v, 97, 248)) { - var /** number */ v_2 = base.cursor; - lab1: { - if (!(base.in_grouping(g_v, 97, 248))) - { - break lab1; - } - base.cursor = v_2; - break golab0; - } - base.cursor = v_2; - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } - golab2: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 248)) { - lab3: { - if (!(base.out_grouping(g_v, 97, 248))) - { - break lab3; - } - break golab2; - } - if (base.cursor >= base.limit) - { - return false; - } - base.cursor++; + return false; } + base.cursor++; I_p1 = base.cursor; - lab4: { - if (!(I_p1 < I_x)) + lab0: { + if (I_p1 >= I_x) { - break lab4; + break lab0; } I_p1 = I_x; } @@ -127,17 +123,17 @@ NorwegianStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; - among_var = base.find_among_b(a_0); + among_var = base.find_among_b(a_1); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: if (!base.slice_del()) @@ -146,8 +142,19 @@ NorwegianStemmer = function() { } break; case 2: + among_var = base.find_among_b(a_0); + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + } + break; + case 3: lab0: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { if (!(base.in_grouping_b(g_s_ending, 98, 122))) { @@ -155,7 +162,26 @@ NorwegianStemmer = function() { } break lab0; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; + lab2: { + if (!(base.eq_s_b("r"))) + { + break lab2; + } + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab3: { + if (!(base.eq_s_b("e"))) + { + break lab3; + } + break lab2; + } + base.cursor = base.limit - v_3; + } + break lab0; + } + base.cursor = base.limit - v_2; if (!(base.eq_s_b("k"))) { return false; @@ -170,7 +196,7 @@ NorwegianStemmer = function() { return false; } break; - case 3: + case 4: if (!base.slice_from("er")) { return false; @@ -182,21 +208,21 @@ NorwegianStemmer = function() { /** @return {boolean} */ function r_consonant_pair() { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; if (base.cursor < I_p1) { return false; } - var /** number */ v_3 = base.limit_backward; + /** @const */ var /** number */ v_2 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; - if (base.find_among_b(a_1) == 0) + if (base.find_among_b(a_2) == 0) { - base.limit_backward = v_3; + base.limit_backward = v_2; return false; } base.bra = base.cursor; - base.limit_backward = v_3; + base.limit_backward = v_2; base.cursor = base.limit - v_1; if (base.cursor <= base.limit_backward) { @@ -217,16 +243,16 @@ NorwegianStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; - if (base.find_among_b(a_2) == 0) + if (base.find_among_b(a_3) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; if (!base.slice_del()) { return false; @@ -235,17 +261,17 @@ NorwegianStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_mark_regions(); base.cursor = v_1; base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_main_suffix(); base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_consonant_pair(); base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; r_other_suffix(); base.cursor = base.limit - v_4; base.cursor = base.limit_backward; diff --git a/sphinx/search/non-minified-js/porter-stemmer.js b/sphinx/search/non-minified-js/porter-stemmer.js index 0747d2cf3ea..182b9d5edfb 100644 --- a/sphinx/search/non-minified-js/porter-stemmer.js +++ b/sphinx/search/non-minified-js/porter-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from porter.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -PorterStemmer = function() { +var PorterStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["s", -1, 3], ["ies", 0, 2], @@ -115,20 +116,12 @@ PorterStemmer = function() { /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -186,38 +179,24 @@ PorterStemmer = function() { } break; case 2: - var /** number */ v_1 = base.limit - base.cursor; - golab0: while(true) + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (!base.go_out_grouping_b(g_v, 97, 121)) { - lab1: { - if (!(base.in_grouping_b(g_v, 97, 121))) - { - break lab1; - } - break golab0; - } - if (base.cursor <= base.limit_backward) - { - return false; - } - base.cursor--; + return false; } + base.cursor--; base.cursor = base.limit - v_1; if (!base.slice_del()) { return false; } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; among_var = base.find_among_b(a_1); - if (among_var == 0) - { - return false; - } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; switch (among_var) { case 1: { - var /** number */ c1 = base.cursor; + /** @const */ var /** number */ c1 = base.cursor; base.insert(base.cursor, base.cursor, "e"); base.cursor = c1; } @@ -240,14 +219,14 @@ PorterStemmer = function() { { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; if (!r_shortv()) { return false; } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; { - var /** number */ c2 = base.cursor; + /** @const */ var /** number */ c2 = base.cursor; base.insert(base.cursor, base.cursor, "e"); base.cursor = c2; } @@ -262,7 +241,7 @@ PorterStemmer = function() { function r_Step_1c() { base.ket = base.cursor; lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("y"))) { @@ -277,21 +256,11 @@ PorterStemmer = function() { } } base.bra = base.cursor; - golab2: while(true) + if (!base.go_out_grouping_b(g_v, 97, 121)) { - lab3: { - if (!(base.in_grouping_b(g_v, 97, 121))) - { - break lab3; - } - break golab2; - } - if (base.cursor <= base.limit_backward) - { - return false; - } - base.cursor--; + return false; } + base.cursor--; if (!base.slice_from("i")) { return false; @@ -456,7 +425,7 @@ PorterStemmer = function() { break; case 2: lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("s"))) { @@ -488,7 +457,6 @@ PorterStemmer = function() { } base.bra = base.cursor; lab0: { - var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!r_R2()) { @@ -496,13 +464,12 @@ PorterStemmer = function() { } break lab0; } - base.cursor = base.limit - v_1; if (!r_R1()) { return false; } { - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab2: { if (!r_shortv()) { @@ -510,7 +477,7 @@ PorterStemmer = function() { } return false; } - base.cursor = base.limit - v_2; + base.cursor = base.limit - v_1; } } if (!base.slice_del()) @@ -545,7 +512,7 @@ PorterStemmer = function() { this.stem = /** @return {boolean} */ function() { B_Y_found = false; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; if (!(base.eq_s("y"))) @@ -560,15 +527,15 @@ PorterStemmer = function() { B_Y_found = true; } base.cursor = v_1; - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab1: { while(true) { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab2: { golab3: while(true) { - var /** number */ v_4 = base.cursor; + /** @const */ var /** number */ v_4 = base.cursor; lab4: { if (!(base.in_grouping(g_v, 97, 121))) { @@ -604,125 +571,85 @@ PorterStemmer = function() { base.cursor = v_2; I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_5 = base.cursor; + /** @const */ var /** number */ v_5 = base.cursor; lab5: { - golab6: while(true) + if (!base.go_out_grouping(g_v, 97, 121)) { - lab7: { - if (!(base.in_grouping(g_v, 97, 121))) - { - break lab7; - } - break golab6; - } - if (base.cursor >= base.limit) - { - break lab5; - } - base.cursor++; + break lab5; } - golab8: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 121)) { - lab9: { - if (!(base.out_grouping(g_v, 97, 121))) - { - break lab9; - } - break golab8; - } - if (base.cursor >= base.limit) - { - break lab5; - } - base.cursor++; + break lab5; } + base.cursor++; I_p1 = base.cursor; - golab10: while(true) + if (!base.go_out_grouping(g_v, 97, 121)) { - lab11: { - if (!(base.in_grouping(g_v, 97, 121))) - { - break lab11; - } - break golab10; - } - if (base.cursor >= base.limit) - { - break lab5; - } - base.cursor++; + break lab5; } - golab12: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 121)) { - lab13: { - if (!(base.out_grouping(g_v, 97, 121))) - { - break lab13; - } - break golab12; - } - if (base.cursor >= base.limit) - { - break lab5; - } - base.cursor++; + break lab5; } + base.cursor++; I_p2 = base.cursor; } base.cursor = v_5; base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; r_Step_1a(); - base.cursor = base.limit - v_10; - var /** number */ v_11 = base.limit - base.cursor; + base.cursor = base.limit - v_6; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; r_Step_1b(); - base.cursor = base.limit - v_11; - var /** number */ v_12 = base.limit - base.cursor; + base.cursor = base.limit - v_7; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; r_Step_1c(); - base.cursor = base.limit - v_12; - var /** number */ v_13 = base.limit - base.cursor; + base.cursor = base.limit - v_8; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; r_Step_2(); - base.cursor = base.limit - v_13; - var /** number */ v_14 = base.limit - base.cursor; + base.cursor = base.limit - v_9; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; r_Step_3(); - base.cursor = base.limit - v_14; - var /** number */ v_15 = base.limit - base.cursor; + base.cursor = base.limit - v_10; + /** @const */ var /** number */ v_11 = base.limit - base.cursor; r_Step_4(); - base.cursor = base.limit - v_15; - var /** number */ v_16 = base.limit - base.cursor; + base.cursor = base.limit - v_11; + /** @const */ var /** number */ v_12 = base.limit - base.cursor; r_Step_5a(); - base.cursor = base.limit - v_16; - var /** number */ v_17 = base.limit - base.cursor; + base.cursor = base.limit - v_12; + /** @const */ var /** number */ v_13 = base.limit - base.cursor; r_Step_5b(); - base.cursor = base.limit - v_17; + base.cursor = base.limit - v_13; base.cursor = base.limit_backward; - var /** number */ v_18 = base.cursor; - lab14: { + /** @const */ var /** number */ v_14 = base.cursor; + lab6: { if (!B_Y_found) { - break lab14; + break lab6; } while(true) { - var /** number */ v_19 = base.cursor; - lab15: { - golab16: while(true) + /** @const */ var /** number */ v_15 = base.cursor; + lab7: { + golab8: while(true) { - var /** number */ v_20 = base.cursor; - lab17: { + /** @const */ var /** number */ v_16 = base.cursor; + lab9: { base.bra = base.cursor; if (!(base.eq_s("Y"))) { - break lab17; + break lab9; } base.ket = base.cursor; - base.cursor = v_20; - break golab16; + base.cursor = v_16; + break golab8; } - base.cursor = v_20; + base.cursor = v_16; if (base.cursor >= base.limit) { - break lab15; + break lab7; } base.cursor++; } @@ -732,11 +659,11 @@ PorterStemmer = function() { } continue; } - base.cursor = v_19; + base.cursor = v_15; break; } } - base.cursor = v_18; + base.cursor = v_14; return true; }; diff --git a/sphinx/search/non-minified-js/portuguese-stemmer.js b/sphinx/search/non-minified-js/portuguese-stemmer.js index 662b976565a..2b4a63fafe6 100644 --- a/sphinx/search/non-minified-js/portuguese-stemmer.js +++ b/sphinx/search/non-minified-js/portuguese-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from portuguese.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -PortugueseStemmer = function() { +var PortugueseStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["", -1, 3], ["\u00E3", 0, 1], @@ -234,14 +235,10 @@ PortugueseStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; among_var = base.find_among(a_0); - if (among_var == 0) - { - break lab0; - } base.ket = base.cursor; switch (among_var) { case 1: @@ -277,37 +274,27 @@ PortugueseStemmer = function() { I_pV = base.limit; I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { lab1: { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { if (!(base.in_grouping(g_v, 97, 250))) { break lab2; } lab3: { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab4: { if (!(base.out_grouping(g_v, 97, 250))) { break lab4; } - golab5: while(true) + if (!base.go_out_grouping(g_v, 97, 250)) { - lab6: { - if (!(base.in_grouping(g_v, 97, 250))) - { - break lab6; - } - break golab5; - } - if (base.cursor >= base.limit) - { - break lab4; - } - base.cursor++; + break lab4; } + base.cursor++; break lab3; } base.cursor = v_3; @@ -315,21 +302,11 @@ PortugueseStemmer = function() { { break lab2; } - golab7: while(true) + if (!base.go_in_grouping(g_v, 97, 250)) { - lab8: { - if (!(base.out_grouping(g_v, 97, 250))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - break lab2; - } - base.cursor++; + break lab2; } + base.cursor++; } break lab1; } @@ -338,31 +315,21 @@ PortugueseStemmer = function() { { break lab0; } - lab9: { - var /** number */ v_6 = base.cursor; - lab10: { + lab5: { + /** @const */ var /** number */ v_4 = base.cursor; + lab6: { if (!(base.out_grouping(g_v, 97, 250))) { - break lab10; + break lab6; } - golab11: while(true) + if (!base.go_out_grouping(g_v, 97, 250)) { - lab12: { - if (!(base.in_grouping(g_v, 97, 250))) - { - break lab12; - } - break golab11; - } - if (base.cursor >= base.limit) - { - break lab10; - } - base.cursor++; + break lab6; } - break lab9; + base.cursor++; + break lab5; } - base.cursor = v_6; + base.cursor = v_4; if (!(base.in_grouping(g_v, 97, 250))) { break lab0; @@ -377,72 +344,32 @@ PortugueseStemmer = function() { I_pV = base.cursor; } base.cursor = v_1; - var /** number */ v_8 = base.cursor; - lab13: { - golab14: while(true) + /** @const */ var /** number */ v_5 = base.cursor; + lab7: { + if (!base.go_out_grouping(g_v, 97, 250)) { - lab15: { - if (!(base.in_grouping(g_v, 97, 250))) - { - break lab15; - } - break golab14; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } - golab16: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 250)) { - lab17: { - if (!(base.out_grouping(g_v, 97, 250))) - { - break lab17; - } - break golab16; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } + base.cursor++; I_p1 = base.cursor; - golab18: while(true) + if (!base.go_out_grouping(g_v, 97, 250)) { - lab19: { - if (!(base.in_grouping(g_v, 97, 250))) - { - break lab19; - } - break golab18; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } - golab20: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 250)) { - lab21: { - if (!(base.out_grouping(g_v, 97, 250))) - { - break lab21; - } - break golab20; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } + base.cursor++; I_p2 = base.cursor; } - base.cursor = v_8; + base.cursor = v_5; return true; }; @@ -451,14 +378,10 @@ PortugueseStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; among_var = base.find_among(a_1); - if (among_var == 0) - { - break lab0; - } base.ket = base.cursor; switch (among_var) { case 1: @@ -491,29 +414,17 @@ PortugueseStemmer = function() { /** @return {boolean} */ function r_RV() { - if (!(I_pV <= base.cursor)) - { - return false; - } - return true; + return I_pV <= base.cursor; }; /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -576,7 +487,7 @@ PortugueseStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; among_var = base.find_among_b(a_2); @@ -626,7 +537,7 @@ PortugueseStemmer = function() { { return false; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { base.ket = base.cursor; if (base.find_among_b(a_3) == 0) @@ -655,7 +566,7 @@ PortugueseStemmer = function() { { return false; } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { base.ket = base.cursor; if (base.find_among_b(a_4) == 0) @@ -684,7 +595,7 @@ PortugueseStemmer = function() { { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab3: { base.ket = base.cursor; if (!(base.eq_s_b("at"))) @@ -728,12 +639,12 @@ PortugueseStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; if (base.find_among_b(a_6) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; @@ -741,7 +652,7 @@ PortugueseStemmer = function() { { return false; } - base.limit_backward = v_2; + base.limit_backward = v_1; return true; }; @@ -786,14 +697,14 @@ PortugueseStemmer = function() { } base.ket = base.cursor; lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("u"))) { break lab1; } base.bra = base.cursor; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.eq_s_b("g"))) { break lab1; @@ -807,7 +718,7 @@ PortugueseStemmer = function() { return false; } base.bra = base.cursor; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; if (!(base.eq_s_b("c"))) { return false; @@ -834,19 +745,19 @@ PortugueseStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_prelude(); base.cursor = v_1; r_mark_regions(); base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { lab1: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab3: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab4: { if (!r_standard_suffix()) { @@ -854,14 +765,14 @@ PortugueseStemmer = function() { } break lab3; } - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_5; if (!r_verb_suffix()) { break lab2; } } - base.cursor = base.limit - v_5; - var /** number */ v_7 = base.limit - base.cursor; + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab5: { base.ket = base.cursor; if (!(base.eq_s_b("i"))) @@ -869,12 +780,12 @@ PortugueseStemmer = function() { break lab5; } base.bra = base.cursor; - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; if (!(base.eq_s_b("c"))) { break lab5; } - base.cursor = base.limit - v_8; + base.cursor = base.limit - v_7; if (!r_RV()) { break lab5; @@ -884,24 +795,24 @@ PortugueseStemmer = function() { return false; } } - base.cursor = base.limit - v_7; + base.cursor = base.limit - v_6; break lab1; } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; if (!r_residual_suffix()) { break lab0; } } } - base.cursor = base.limit - v_3; - var /** number */ v_9 = base.limit - base.cursor; + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; r_residual_form(); - base.cursor = base.limit - v_9; + base.cursor = base.limit - v_8; base.cursor = base.limit_backward; - var /** number */ v_10 = base.cursor; + /** @const */ var /** number */ v_9 = base.cursor; r_postlude(); - base.cursor = v_10; + base.cursor = v_9; return true; }; diff --git a/sphinx/search/non-minified-js/romanian-stemmer.js b/sphinx/search/non-minified-js/romanian-stemmer.js index 67538f1c008..4c58e819ec2 100644 --- a/sphinx/search/non-minified-js/romanian-stemmer.js +++ b/sphinx/search/non-minified-js/romanian-stemmer.js @@ -1,20 +1,26 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from romanian.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -RomanianStemmer = function() { +var RomanianStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ + ["\u015F", -1, 1], + ["\u0163", -1, 2] + ]; + + /** @const */ var a_1 = [ ["", -1, 3], ["I", 0, 1], ["U", 0, 2] ]; - /** @const */ var a_1 = [ + /** @const */ var a_2 = [ ["ea", -1, 3], - ["a\u0163ia", -1, 7], + ["a\u021Bia", -1, 7], ["aua", -1, 2], ["iua", -1, 4], - ["a\u0163ie", -1, 7], + ["a\u021Bie", -1, 7], ["ele", -1, 3], ["ile", -1, 5], ["iile", 6, 4], @@ -28,14 +34,14 @@ RomanianStemmer = function() { ["iilor", 14, 4] ]; - /** @const */ var a_2 = [ + /** @const */ var a_3 = [ ["icala", -1, 4], ["iciva", -1, 4], ["ativa", -1, 5], ["itiva", -1, 6], ["icale", -1, 4], - ["a\u0163iune", -1, 5], - ["i\u0163iune", -1, 6], + ["a\u021Biune", -1, 5], + ["i\u021Biune", -1, 6], ["atoare", -1, 5], ["itoare", -1, 6], ["\u0103toare", -1, 5], @@ -60,9 +66,9 @@ RomanianStemmer = function() { ["icit\u0103i", -1, 4], ["abilit\u0103i", -1, 1], ["ivit\u0103i", -1, 3], - ["icit\u0103\u0163i", -1, 4], - ["abilit\u0103\u0163i", -1, 1], - ["ivit\u0103\u0163i", -1, 3], + ["icit\u0103\u021Bi", -1, 4], + ["abilit\u0103\u021Bi", -1, 1], + ["ivit\u0103\u021Bi", -1, 3], ["ical", -1, 4], ["ator", -1, 5], ["icator", 35, 4], @@ -77,7 +83,7 @@ RomanianStemmer = function() { ["itiv\u0103", -1, 6] ]; - /** @const */ var a_3 = [ + /** @const */ var a_4 = [ ["ica", -1, 1], ["abila", -1, 1], ["ibila", -1, 1], @@ -114,11 +120,11 @@ RomanianStemmer = function() { ["anti", -1, 1], ["isti", -1, 3], ["uti", -1, 1], - ["i\u015Fti", -1, 3], + ["i\u0219ti", -1, 3], ["ivi", -1, 1], ["it\u0103i", -1, 1], - ["o\u015Fi", -1, 1], - ["it\u0103\u0163i", -1, 1], + ["o\u0219i", -1, 1], + ["it\u0103\u021Bi", -1, 1], ["abil", -1, 1], ["ibil", -1, 1], ["ism", -1, 3], @@ -142,7 +148,7 @@ RomanianStemmer = function() { ["iv\u0103", -1, 1] ]; - /** @const */ var a_4 = [ + /** @const */ var a_5 = [ ["ea", -1, 1], ["ia", -1, 1], ["esc", -1, 1], @@ -159,44 +165,44 @@ RomanianStemmer = function() { ["ise", 10, 1], ["use", 10, 1], ["\u00E2se", 10, 1], - ["e\u015Fte", -1, 1], - ["\u0103\u015Fte", -1, 1], + ["e\u0219te", -1, 1], + ["\u0103\u0219te", -1, 1], ["eze", -1, 1], ["ai", -1, 1], ["eai", 19, 1], ["iai", 19, 1], ["sei", -1, 2], - ["e\u015Fti", -1, 1], - ["\u0103\u015Fti", -1, 1], + ["e\u0219ti", -1, 1], + ["\u0103\u0219ti", -1, 1], ["ui", -1, 1], ["ezi", -1, 1], ["\u00E2i", -1, 1], - ["a\u015Fi", -1, 1], - ["se\u015Fi", -1, 2], - ["ase\u015Fi", 29, 1], - ["sese\u015Fi", 29, 2], - ["ise\u015Fi", 29, 1], - ["use\u015Fi", 29, 1], - ["\u00E2se\u015Fi", 29, 1], - ["i\u015Fi", -1, 1], - ["u\u015Fi", -1, 1], - ["\u00E2\u015Fi", -1, 1], - ["a\u0163i", -1, 2], - ["ea\u0163i", 38, 1], - ["ia\u0163i", 38, 1], - ["e\u0163i", -1, 2], - ["i\u0163i", -1, 2], - ["\u00E2\u0163i", -1, 2], - ["ar\u0103\u0163i", -1, 1], - ["ser\u0103\u0163i", -1, 2], - ["aser\u0103\u0163i", 45, 1], - ["seser\u0103\u0163i", 45, 2], - ["iser\u0103\u0163i", 45, 1], - ["user\u0103\u0163i", 45, 1], - ["\u00E2ser\u0103\u0163i", 45, 1], - ["ir\u0103\u0163i", -1, 1], - ["ur\u0103\u0163i", -1, 1], - ["\u00E2r\u0103\u0163i", -1, 1], + ["a\u0219i", -1, 1], + ["se\u0219i", -1, 2], + ["ase\u0219i", 29, 1], + ["sese\u0219i", 29, 2], + ["ise\u0219i", 29, 1], + ["use\u0219i", 29, 1], + ["\u00E2se\u0219i", 29, 1], + ["i\u0219i", -1, 1], + ["u\u0219i", -1, 1], + ["\u00E2\u0219i", -1, 1], + ["a\u021Bi", -1, 2], + ["ea\u021Bi", 38, 1], + ["ia\u021Bi", 38, 1], + ["e\u021Bi", -1, 2], + ["i\u021Bi", -1, 2], + ["\u00E2\u021Bi", -1, 2], + ["ar\u0103\u021Bi", -1, 1], + ["ser\u0103\u021Bi", -1, 2], + ["aser\u0103\u021Bi", 45, 1], + ["seser\u0103\u021Bi", 45, 2], + ["iser\u0103\u021Bi", 45, 1], + ["user\u0103\u021Bi", 45, 1], + ["\u00E2ser\u0103\u021Bi", 45, 1], + ["ir\u0103\u021Bi", -1, 1], + ["ur\u0103\u021Bi", -1, 1], + ["\u00E2r\u0103\u021Bi", -1, 1], ["am", -1, 1], ["eam", 54, 1], ["iam", 54, 1], @@ -239,7 +245,7 @@ RomanianStemmer = function() { ["eaz\u0103", -1, 1] ]; - /** @const */ var a_5 = [ + /** @const */ var a_6 = [ ["a", -1, 1], ["e", -1, 1], ["ie", 1, 1], @@ -255,15 +261,69 @@ RomanianStemmer = function() { var /** number */ I_pV = 0; + /** @return {boolean} */ + function r_norm() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + golab2: while(true) + { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + if (among_var == 0) + { + break lab3; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u0219")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u021B")) + { + return false; + } + break; + } + base.cursor = v_3; + break golab2; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { + break lab1; + } + base.cursor++; + } + continue; + } + base.cursor = v_2; + break; + } + } + base.cursor = v_1; + return true; + }; + /** @return {boolean} */ function r_prelude() { while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { golab1: while(true) { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { if (!(base.in_grouping(g_v, 97, 259))) { @@ -271,7 +331,7 @@ RomanianStemmer = function() { } base.bra = base.cursor; lab3: { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab4: { if (!(base.eq_s("u"))) { @@ -326,37 +386,27 @@ RomanianStemmer = function() { I_pV = base.limit; I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { lab1: { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { if (!(base.in_grouping(g_v, 97, 259))) { break lab2; } lab3: { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab4: { if (!(base.out_grouping(g_v, 97, 259))) { break lab4; } - golab5: while(true) + if (!base.go_out_grouping(g_v, 97, 259)) { - lab6: { - if (!(base.in_grouping(g_v, 97, 259))) - { - break lab6; - } - break golab5; - } - if (base.cursor >= base.limit) - { - break lab4; - } - base.cursor++; + break lab4; } + base.cursor++; break lab3; } base.cursor = v_3; @@ -364,21 +414,11 @@ RomanianStemmer = function() { { break lab2; } - golab7: while(true) + if (!base.go_in_grouping(g_v, 97, 259)) { - lab8: { - if (!(base.out_grouping(g_v, 97, 259))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - break lab2; - } - base.cursor++; + break lab2; } + base.cursor++; } break lab1; } @@ -387,31 +427,21 @@ RomanianStemmer = function() { { break lab0; } - lab9: { - var /** number */ v_6 = base.cursor; - lab10: { + lab5: { + /** @const */ var /** number */ v_4 = base.cursor; + lab6: { if (!(base.out_grouping(g_v, 97, 259))) { - break lab10; + break lab6; } - golab11: while(true) + if (!base.go_out_grouping(g_v, 97, 259)) { - lab12: { - if (!(base.in_grouping(g_v, 97, 259))) - { - break lab12; - } - break golab11; - } - if (base.cursor >= base.limit) - { - break lab10; - } - base.cursor++; + break lab6; } - break lab9; + base.cursor++; + break lab5; } - base.cursor = v_6; + base.cursor = v_4; if (!(base.in_grouping(g_v, 97, 259))) { break lab0; @@ -426,72 +456,32 @@ RomanianStemmer = function() { I_pV = base.cursor; } base.cursor = v_1; - var /** number */ v_8 = base.cursor; - lab13: { - golab14: while(true) + /** @const */ var /** number */ v_5 = base.cursor; + lab7: { + if (!base.go_out_grouping(g_v, 97, 259)) { - lab15: { - if (!(base.in_grouping(g_v, 97, 259))) - { - break lab15; - } - break golab14; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } - golab16: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 259)) { - lab17: { - if (!(base.out_grouping(g_v, 97, 259))) - { - break lab17; - } - break golab16; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } + base.cursor++; I_p1 = base.cursor; - golab18: while(true) + if (!base.go_out_grouping(g_v, 97, 259)) { - lab19: { - if (!(base.in_grouping(g_v, 97, 259))) - { - break lab19; - } - break golab18; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } - golab20: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 259)) { - lab21: { - if (!(base.out_grouping(g_v, 97, 259))) - { - break lab21; - } - break golab20; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } + base.cursor++; I_p2 = base.cursor; } - base.cursor = v_8; + base.cursor = v_5; return true; }; @@ -500,14 +490,10 @@ RomanianStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; - among_var = base.find_among(a_0); - if (among_var == 0) - { - break lab0; - } + among_var = base.find_among(a_1); base.ket = base.cursor; switch (among_var) { case 1: @@ -540,36 +526,24 @@ RomanianStemmer = function() { /** @return {boolean} */ function r_RV() { - if (!(I_pV <= base.cursor)) - { - return false; - } - return true; + return I_pV <= base.cursor; }; /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ function r_step_0() { var /** number */ among_var; base.ket = base.cursor; - among_var = base.find_among_b(a_1); + among_var = base.find_among_b(a_2); if (among_var == 0) { return false; @@ -606,7 +580,7 @@ RomanianStemmer = function() { break; case 5: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("ab"))) { @@ -628,7 +602,7 @@ RomanianStemmer = function() { } break; case 7: - if (!base.slice_from("a\u0163i")) + if (!base.slice_from("a\u021Bi")) { return false; } @@ -640,9 +614,9 @@ RomanianStemmer = function() { /** @return {boolean} */ function r_combo_suffix() { var /** number */ among_var; - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; base.ket = base.cursor; - among_var = base.find_among_b(a_2); + among_var = base.find_among_b(a_3); if (among_var == 0) { return false; @@ -701,7 +675,7 @@ RomanianStemmer = function() { B_standard_suffix_removed = false; while(true) { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { if (!r_combo_suffix()) { @@ -713,7 +687,7 @@ RomanianStemmer = function() { break; } base.ket = base.cursor; - among_var = base.find_among_b(a_3); + among_var = base.find_among_b(a_4); if (among_var == 0) { return false; @@ -731,7 +705,7 @@ RomanianStemmer = function() { } break; case 2: - if (!(base.eq_s_b("\u0163"))) + if (!(base.eq_s_b("\u021B"))) { return false; } @@ -759,20 +733,20 @@ RomanianStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; - among_var = base.find_among_b(a_4); + among_var = base.find_among_b(a_5); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; switch (among_var) { case 1: lab0: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { if (!(base.out_grouping_b(g_v, 97, 259))) { @@ -780,10 +754,10 @@ RomanianStemmer = function() { } break lab0; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; if (!(base.eq_s_b("u"))) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } } @@ -799,14 +773,14 @@ RomanianStemmer = function() { } break; } - base.limit_backward = v_2; + base.limit_backward = v_1; return true; }; /** @return {boolean} */ function r_vowel_suffix() { base.ket = base.cursor; - if (base.find_among_b(a_5) == 0) + if (base.find_among_b(a_6) == 0) { return false; } @@ -823,21 +797,22 @@ RomanianStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + r_norm(); + /** @const */ var /** number */ v_1 = base.cursor; r_prelude(); base.cursor = v_1; r_mark_regions(); base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_step_0(); - base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_standard_suffix(); - base.cursor = base.limit - v_4; - var /** number */ v_5 = base.limit - base.cursor; + base.cursor = base.limit - v_3; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab0: { lab1: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab2: { if (!B_standard_suffix_removed) { @@ -845,21 +820,21 @@ RomanianStemmer = function() { } break lab1; } - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_5; if (!r_verb_suffix()) { break lab0; } } } - base.cursor = base.limit - v_5; - var /** number */ v_7 = base.limit - base.cursor; + base.cursor = base.limit - v_4; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; r_vowel_suffix(); - base.cursor = base.limit - v_7; + base.cursor = base.limit - v_6; base.cursor = base.limit_backward; - var /** number */ v_8 = base.cursor; + /** @const */ var /** number */ v_7 = base.cursor; r_postlude(); - base.cursor = v_8; + base.cursor = v_7; return true; }; diff --git a/sphinx/search/non-minified-js/russian-stemmer.js b/sphinx/search/non-minified-js/russian-stemmer.js index 28ded5fc816..36c655d6bd7 100644 --- a/sphinx/search/non-minified-js/russian-stemmer.js +++ b/sphinx/search/non-minified-js/russian-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from russian.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -RussianStemmer = function() { +var RussianStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["\u0432", -1, 1], ["\u0438\u0432", 0, 2], @@ -170,69 +171,29 @@ RussianStemmer = function() { function r_mark_regions() { I_pV = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { - golab1: while(true) + if (!base.go_out_grouping(g_v, 1072, 1103)) { - lab2: { - if (!(base.in_grouping(g_v, 1072, 1103))) - { - break lab2; - } - break golab1; - } - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; + break lab0; } + base.cursor++; I_pV = base.cursor; - golab3: while(true) + if (!base.go_in_grouping(g_v, 1072, 1103)) { - lab4: { - if (!(base.out_grouping(g_v, 1072, 1103))) - { - break lab4; - } - break golab3; - } - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; + break lab0; } - golab5: while(true) + base.cursor++; + if (!base.go_out_grouping(g_v, 1072, 1103)) { - lab6: { - if (!(base.in_grouping(g_v, 1072, 1103))) - { - break lab6; - } - break golab5; - } - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; + break lab0; } - golab7: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 1072, 1103)) { - lab8: { - if (!(base.out_grouping(g_v, 1072, 1103))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - break lab0; - } - base.cursor++; + break lab0; } + base.cursor++; I_p2 = base.cursor; } base.cursor = v_1; @@ -241,11 +202,7 @@ RussianStemmer = function() { /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -261,7 +218,7 @@ RussianStemmer = function() { switch (among_var) { case 1: lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("\u0430"))) { @@ -312,7 +269,7 @@ RussianStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; among_var = base.find_among_b(a_2); @@ -325,7 +282,7 @@ RussianStemmer = function() { switch (among_var) { case 1: lab1: { - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab2: { if (!(base.eq_s_b("\u0430"))) { @@ -384,7 +341,7 @@ RussianStemmer = function() { switch (among_var) { case 1: lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("\u0430"))) { @@ -499,15 +456,15 @@ RussianStemmer = function() { }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { while(true) { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab1: { golab2: while(true) { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab3: { base.bra = base.cursor; if (!(base.eq_s("\u0451"))) @@ -542,12 +499,12 @@ RussianStemmer = function() { { return false; } - var /** number */ v_6 = base.limit_backward; + /** @const */ var /** number */ v_4 = base.limit_backward; base.limit_backward = I_pV; - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab4: { lab5: { - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab6: { if (!r_perfective_gerund()) { @@ -555,17 +512,17 @@ RussianStemmer = function() { } break lab5; } - base.cursor = base.limit - v_8; - var /** number */ v_9 = base.limit - base.cursor; + base.cursor = base.limit - v_6; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab7: { if (!r_reflexive()) { - base.cursor = base.limit - v_9; + base.cursor = base.limit - v_7; break lab7; } } lab8: { - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; lab9: { if (!r_adjectival()) { @@ -573,7 +530,7 @@ RussianStemmer = function() { } break lab8; } - base.cursor = base.limit - v_10; + base.cursor = base.limit - v_8; lab10: { if (!r_verb()) { @@ -581,7 +538,7 @@ RussianStemmer = function() { } break lab8; } - base.cursor = base.limit - v_10; + base.cursor = base.limit - v_8; if (!r_noun()) { break lab4; @@ -589,13 +546,13 @@ RussianStemmer = function() { } } } - base.cursor = base.limit - v_7; - var /** number */ v_11 = base.limit - base.cursor; + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; lab11: { base.ket = base.cursor; if (!(base.eq_s_b("\u0438"))) { - base.cursor = base.limit - v_11; + base.cursor = base.limit - v_9; break lab11; } base.bra = base.cursor; @@ -604,13 +561,13 @@ RussianStemmer = function() { return false; } } - var /** number */ v_12 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; r_derivational(); - base.cursor = base.limit - v_12; - var /** number */ v_13 = base.limit - base.cursor; + base.cursor = base.limit - v_10; + /** @const */ var /** number */ v_11 = base.limit - base.cursor; r_tidy_up(); - base.cursor = base.limit - v_13; - base.limit_backward = v_6; + base.cursor = base.limit - v_11; + base.limit_backward = v_4; base.cursor = base.limit_backward; return true; }; diff --git a/sphinx/search/non-minified-js/serbian-stemmer.js b/sphinx/search/non-minified-js/serbian-stemmer.js new file mode 100644 index 00000000000..7d6d0ce32e9 --- /dev/null +++ b/sphinx/search/non-minified-js/serbian-stemmer.js @@ -0,0 +1,4516 @@ +// Generated from serbian.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var SerbianStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u0430", -1, 1], + ["\u0431", -1, 2], + ["\u0432", -1, 3], + ["\u0433", -1, 4], + ["\u0434", -1, 5], + ["\u0435", -1, 7], + ["\u0436", -1, 8], + ["\u0437", -1, 9], + ["\u0438", -1, 10], + ["\u043A", -1, 12], + ["\u043B", -1, 13], + ["\u043C", -1, 15], + ["\u043D", -1, 16], + ["\u043E", -1, 18], + ["\u043F", -1, 19], + ["\u0440", -1, 20], + ["\u0441", -1, 21], + ["\u0442", -1, 22], + ["\u0443", -1, 24], + ["\u0444", -1, 25], + ["\u0445", -1, 26], + ["\u0446", -1, 27], + ["\u0447", -1, 28], + ["\u0448", -1, 30], + ["\u0452", -1, 6], + ["\u0458", -1, 11], + ["\u0459", -1, 14], + ["\u045A", -1, 17], + ["\u045B", -1, 23], + ["\u045F", -1, 29] + ]; + + /** @const */ var a_1 = [ + ["daba", -1, 73], + ["ajaca", -1, 12], + ["ejaca", -1, 14], + ["ljaca", -1, 13], + ["njaca", -1, 85], + ["ojaca", -1, 15], + ["alaca", -1, 82], + ["elaca", -1, 83], + ["olaca", -1, 84], + ["maca", -1, 75], + ["naca", -1, 76], + ["raca", -1, 81], + ["saca", -1, 80], + ["vaca", -1, 79], + ["\u0161aca", -1, 18], + ["aoca", -1, 82], + ["acaka", -1, 55], + ["ajaka", -1, 16], + ["ojaka", -1, 17], + ["anaka", -1, 78], + ["ataka", -1, 58], + ["etaka", -1, 59], + ["itaka", -1, 60], + ["otaka", -1, 61], + ["utaka", -1, 62], + ["a\u010Daka", -1, 54], + ["esama", -1, 67], + ["izama", -1, 87], + ["jacima", -1, 5], + ["nicima", -1, 23], + ["ticima", -1, 24], + ["teticima", 30, 21], + ["zicima", -1, 25], + ["atcima", -1, 58], + ["utcima", -1, 62], + ["\u010Dcima", -1, 74], + ["pesima", -1, 2], + ["inzima", -1, 19], + ["lozima", -1, 1], + ["metara", -1, 68], + ["centara", -1, 69], + ["istara", -1, 70], + ["ekata", -1, 86], + ["anata", -1, 53], + ["nstava", -1, 22], + ["kustava", -1, 29], + ["ajac", -1, 12], + ["ejac", -1, 14], + ["ljac", -1, 13], + ["njac", -1, 85], + ["anjac", 49, 11], + ["ojac", -1, 15], + ["alac", -1, 82], + ["elac", -1, 83], + ["olac", -1, 84], + ["mac", -1, 75], + ["nac", -1, 76], + ["rac", -1, 81], + ["sac", -1, 80], + ["vac", -1, 79], + ["\u0161ac", -1, 18], + ["jebe", -1, 88], + ["olce", -1, 84], + ["kuse", -1, 27], + ["rave", -1, 42], + ["save", -1, 52], + ["\u0161ave", -1, 51], + ["baci", -1, 89], + ["jaci", -1, 5], + ["tvenici", -1, 20], + ["snici", -1, 26], + ["tetici", -1, 21], + ["bojci", -1, 4], + ["vojci", -1, 3], + ["ojsci", -1, 66], + ["atci", -1, 58], + ["itci", -1, 60], + ["utci", -1, 62], + ["\u010Dci", -1, 74], + ["pesi", -1, 2], + ["inzi", -1, 19], + ["lozi", -1, 1], + ["acak", -1, 55], + ["usak", -1, 57], + ["atak", -1, 58], + ["etak", -1, 59], + ["itak", -1, 60], + ["otak", -1, 61], + ["utak", -1, 62], + ["a\u010Dak", -1, 54], + ["u\u0161ak", -1, 56], + ["izam", -1, 87], + ["tican", -1, 65], + ["cajan", -1, 7], + ["\u010Dajan", -1, 6], + ["voljan", -1, 77], + ["eskan", -1, 63], + ["alan", -1, 40], + ["bilan", -1, 33], + ["gilan", -1, 37], + ["nilan", -1, 39], + ["rilan", -1, 38], + ["silan", -1, 36], + ["tilan", -1, 34], + ["avilan", -1, 35], + ["laran", -1, 9], + ["eran", -1, 8], + ["asan", -1, 91], + ["esan", -1, 10], + ["dusan", -1, 31], + ["kusan", -1, 28], + ["atan", -1, 47], + ["pletan", -1, 50], + ["tetan", -1, 49], + ["antan", -1, 32], + ["pravan", -1, 44], + ["stavan", -1, 43], + ["sivan", -1, 46], + ["tivan", -1, 45], + ["ozan", -1, 41], + ["ti\u010Dan", -1, 64], + ["a\u0161an", -1, 90], + ["du\u0161an", -1, 30], + ["metar", -1, 68], + ["centar", -1, 69], + ["istar", -1, 70], + ["ekat", -1, 86], + ["enat", -1, 48], + ["oscu", -1, 72], + ["o\u0161\u0107u", -1, 71] + ]; + + /** @const */ var a_2 = [ + ["aca", -1, 124], + ["eca", -1, 125], + ["uca", -1, 126], + ["ga", -1, 20], + ["acega", 3, 124], + ["ecega", 3, 125], + ["ucega", 3, 126], + ["anjijega", 3, 84], + ["enjijega", 3, 85], + ["snjijega", 3, 122], + ["\u0161njijega", 3, 86], + ["kijega", 3, 95], + ["skijega", 11, 1], + ["\u0161kijega", 11, 2], + ["elijega", 3, 83], + ["nijega", 3, 13], + ["osijega", 3, 123], + ["atijega", 3, 120], + ["evitijega", 3, 92], + ["ovitijega", 3, 93], + ["astijega", 3, 94], + ["avijega", 3, 77], + ["evijega", 3, 78], + ["ivijega", 3, 79], + ["ovijega", 3, 80], + ["o\u0161ijega", 3, 91], + ["anjega", 3, 84], + ["enjega", 3, 85], + ["snjega", 3, 122], + ["\u0161njega", 3, 86], + ["kega", 3, 95], + ["skega", 30, 1], + ["\u0161kega", 30, 2], + ["elega", 3, 83], + ["nega", 3, 13], + ["anega", 34, 10], + ["enega", 34, 87], + ["snega", 34, 159], + ["\u0161nega", 34, 88], + ["osega", 3, 123], + ["atega", 3, 120], + ["evitega", 3, 92], + ["ovitega", 3, 93], + ["astega", 3, 94], + ["avega", 3, 77], + ["evega", 3, 78], + ["ivega", 3, 79], + ["ovega", 3, 80], + ["a\u0107ega", 3, 14], + ["e\u0107ega", 3, 15], + ["u\u0107ega", 3, 16], + ["o\u0161ega", 3, 91], + ["acoga", 3, 124], + ["ecoga", 3, 125], + ["ucoga", 3, 126], + ["anjoga", 3, 84], + ["enjoga", 3, 85], + ["snjoga", 3, 122], + ["\u0161njoga", 3, 86], + ["koga", 3, 95], + ["skoga", 59, 1], + ["\u0161koga", 59, 2], + ["loga", 3, 19], + ["eloga", 62, 83], + ["noga", 3, 13], + ["cinoga", 64, 137], + ["\u010Dinoga", 64, 89], + ["osoga", 3, 123], + ["atoga", 3, 120], + ["evitoga", 3, 92], + ["ovitoga", 3, 93], + ["astoga", 3, 94], + ["avoga", 3, 77], + ["evoga", 3, 78], + ["ivoga", 3, 79], + ["ovoga", 3, 80], + ["a\u0107oga", 3, 14], + ["e\u0107oga", 3, 15], + ["u\u0107oga", 3, 16], + ["o\u0161oga", 3, 91], + ["uga", 3, 18], + ["aja", -1, 109], + ["caja", 81, 26], + ["laja", 81, 30], + ["raja", 81, 31], + ["\u0107aja", 81, 28], + ["\u010Daja", 81, 27], + ["\u0111aja", 81, 29], + ["bija", -1, 32], + ["cija", -1, 33], + ["dija", -1, 34], + ["fija", -1, 40], + ["gija", -1, 39], + ["anjija", -1, 84], + ["enjija", -1, 85], + ["snjija", -1, 122], + ["\u0161njija", -1, 86], + ["kija", -1, 95], + ["skija", 97, 1], + ["\u0161kija", 97, 2], + ["lija", -1, 24], + ["elija", 100, 83], + ["mija", -1, 37], + ["nija", -1, 13], + ["ganija", 103, 9], + ["manija", 103, 6], + ["panija", 103, 7], + ["ranija", 103, 8], + ["tanija", 103, 5], + ["pija", -1, 41], + ["rija", -1, 42], + ["rarija", 110, 21], + ["sija", -1, 23], + ["osija", 112, 123], + ["tija", -1, 44], + ["atija", 114, 120], + ["evitija", 114, 92], + ["ovitija", 114, 93], + ["otija", 114, 22], + ["astija", 114, 94], + ["avija", -1, 77], + ["evija", -1, 78], + ["ivija", -1, 79], + ["ovija", -1, 80], + ["zija", -1, 45], + ["o\u0161ija", -1, 91], + ["\u017Eija", -1, 38], + ["anja", -1, 84], + ["enja", -1, 85], + ["snja", -1, 122], + ["\u0161nja", -1, 86], + ["ka", -1, 95], + ["ska", 131, 1], + ["\u0161ka", 131, 2], + ["ala", -1, 104], + ["acala", 134, 128], + ["astajala", 134, 106], + ["istajala", 134, 107], + ["ostajala", 134, 108], + ["ijala", 134, 47], + ["injala", 134, 114], + ["nala", 134, 46], + ["irala", 134, 100], + ["urala", 134, 105], + ["tala", 134, 113], + ["astala", 144, 110], + ["istala", 144, 111], + ["ostala", 144, 112], + ["avala", 134, 97], + ["evala", 134, 96], + ["ivala", 134, 98], + ["ovala", 134, 76], + ["uvala", 134, 99], + ["a\u010Dala", 134, 102], + ["ela", -1, 83], + ["ila", -1, 116], + ["acila", 155, 124], + ["lucila", 155, 121], + ["nila", 155, 103], + ["astanila", 158, 110], + ["istanila", 158, 111], + ["ostanila", 158, 112], + ["rosila", 155, 127], + ["jetila", 155, 118], + ["ozila", 155, 48], + ["a\u010Dila", 155, 101], + ["lu\u010Dila", 155, 117], + ["ro\u0161ila", 155, 90], + ["ola", -1, 50], + ["asla", -1, 115], + ["nula", -1, 13], + ["gama", -1, 20], + ["logama", 171, 19], + ["ugama", 171, 18], + ["ajama", -1, 109], + ["cajama", 174, 26], + ["lajama", 174, 30], + ["rajama", 174, 31], + ["\u0107ajama", 174, 28], + ["\u010Dajama", 174, 27], + ["\u0111ajama", 174, 29], + ["bijama", -1, 32], + ["cijama", -1, 33], + ["dijama", -1, 34], + ["fijama", -1, 40], + ["gijama", -1, 39], + ["lijama", -1, 35], + ["mijama", -1, 37], + ["nijama", -1, 36], + ["ganijama", 188, 9], + ["manijama", 188, 6], + ["panijama", 188, 7], + ["ranijama", 188, 8], + ["tanijama", 188, 5], + ["pijama", -1, 41], + ["rijama", -1, 42], + ["sijama", -1, 43], + ["tijama", -1, 44], + ["zijama", -1, 45], + ["\u017Eijama", -1, 38], + ["alama", -1, 104], + ["ijalama", 200, 47], + ["nalama", 200, 46], + ["elama", -1, 119], + ["ilama", -1, 116], + ["ramama", -1, 52], + ["lemama", -1, 51], + ["inama", -1, 11], + ["cinama", 207, 137], + ["\u010Dinama", 207, 89], + ["rama", -1, 52], + ["arama", 210, 53], + ["drama", 210, 54], + ["erama", 210, 55], + ["orama", 210, 56], + ["basama", -1, 135], + ["gasama", -1, 131], + ["jasama", -1, 129], + ["kasama", -1, 133], + ["nasama", -1, 132], + ["tasama", -1, 130], + ["vasama", -1, 134], + ["esama", -1, 152], + ["isama", -1, 154], + ["etama", -1, 70], + ["estama", -1, 71], + ["istama", -1, 72], + ["kstama", -1, 73], + ["ostama", -1, 74], + ["avama", -1, 77], + ["evama", -1, 78], + ["ivama", -1, 79], + ["ba\u0161ama", -1, 63], + ["ga\u0161ama", -1, 64], + ["ja\u0161ama", -1, 61], + ["ka\u0161ama", -1, 62], + ["na\u0161ama", -1, 60], + ["ta\u0161ama", -1, 59], + ["va\u0161ama", -1, 65], + ["e\u0161ama", -1, 66], + ["i\u0161ama", -1, 67], + ["lema", -1, 51], + ["acima", -1, 124], + ["ecima", -1, 125], + ["ucima", -1, 126], + ["ajima", -1, 109], + ["cajima", 245, 26], + ["lajima", 245, 30], + ["rajima", 245, 31], + ["\u0107ajima", 245, 28], + ["\u010Dajima", 245, 27], + ["\u0111ajima", 245, 29], + ["bijima", -1, 32], + ["cijima", -1, 33], + ["dijima", -1, 34], + ["fijima", -1, 40], + ["gijima", -1, 39], + ["anjijima", -1, 84], + ["enjijima", -1, 85], + ["snjijima", -1, 122], + ["\u0161njijima", -1, 86], + ["kijima", -1, 95], + ["skijima", 261, 1], + ["\u0161kijima", 261, 2], + ["lijima", -1, 35], + ["elijima", 264, 83], + ["mijima", -1, 37], + ["nijima", -1, 13], + ["ganijima", 267, 9], + ["manijima", 267, 6], + ["panijima", 267, 7], + ["ranijima", 267, 8], + ["tanijima", 267, 5], + ["pijima", -1, 41], + ["rijima", -1, 42], + ["sijima", -1, 43], + ["osijima", 275, 123], + ["tijima", -1, 44], + ["atijima", 277, 120], + ["evitijima", 277, 92], + ["ovitijima", 277, 93], + ["astijima", 277, 94], + ["avijima", -1, 77], + ["evijima", -1, 78], + ["ivijima", -1, 79], + ["ovijima", -1, 80], + ["zijima", -1, 45], + ["o\u0161ijima", -1, 91], + ["\u017Eijima", -1, 38], + ["anjima", -1, 84], + ["enjima", -1, 85], + ["snjima", -1, 122], + ["\u0161njima", -1, 86], + ["kima", -1, 95], + ["skima", 293, 1], + ["\u0161kima", 293, 2], + ["alima", -1, 104], + ["ijalima", 296, 47], + ["nalima", 296, 46], + ["elima", -1, 83], + ["ilima", -1, 116], + ["ozilima", 300, 48], + ["olima", -1, 50], + ["lemima", -1, 51], + ["nima", -1, 13], + ["anima", 304, 10], + ["inima", 304, 11], + ["cinima", 306, 137], + ["\u010Dinima", 306, 89], + ["onima", 304, 12], + ["arima", -1, 53], + ["drima", -1, 54], + ["erima", -1, 55], + ["orima", -1, 56], + ["basima", -1, 135], + ["gasima", -1, 131], + ["jasima", -1, 129], + ["kasima", -1, 133], + ["nasima", -1, 132], + ["tasima", -1, 130], + ["vasima", -1, 134], + ["esima", -1, 57], + ["isima", -1, 58], + ["osima", -1, 123], + ["atima", -1, 120], + ["ikatima", 324, 68], + ["latima", 324, 69], + ["etima", -1, 70], + ["evitima", -1, 92], + ["ovitima", -1, 93], + ["astima", -1, 94], + ["estima", -1, 71], + ["istima", -1, 72], + ["kstima", -1, 73], + ["ostima", -1, 74], + ["i\u0161tima", -1, 75], + ["avima", -1, 77], + ["evima", -1, 78], + ["ajevima", 337, 109], + ["cajevima", 338, 26], + ["lajevima", 338, 30], + ["rajevima", 338, 31], + ["\u0107ajevima", 338, 28], + ["\u010Dajevima", 338, 27], + ["\u0111ajevima", 338, 29], + ["ivima", -1, 79], + ["ovima", -1, 80], + ["govima", 346, 20], + ["ugovima", 347, 17], + ["lovima", 346, 82], + ["olovima", 349, 49], + ["movima", 346, 81], + ["onovima", 346, 12], + ["stvima", -1, 3], + ["\u0161tvima", -1, 4], + ["a\u0107ima", -1, 14], + ["e\u0107ima", -1, 15], + ["u\u0107ima", -1, 16], + ["ba\u0161ima", -1, 63], + ["ga\u0161ima", -1, 64], + ["ja\u0161ima", -1, 61], + ["ka\u0161ima", -1, 62], + ["na\u0161ima", -1, 60], + ["ta\u0161ima", -1, 59], + ["va\u0161ima", -1, 65], + ["e\u0161ima", -1, 66], + ["i\u0161ima", -1, 67], + ["o\u0161ima", -1, 91], + ["na", -1, 13], + ["ana", 368, 10], + ["acana", 369, 128], + ["urana", 369, 105], + ["tana", 369, 113], + ["avana", 369, 97], + ["evana", 369, 96], + ["ivana", 369, 98], + ["uvana", 369, 99], + ["a\u010Dana", 369, 102], + ["acena", 368, 124], + ["lucena", 368, 121], + ["a\u010Dena", 368, 101], + ["lu\u010Dena", 368, 117], + ["ina", 368, 11], + ["cina", 382, 137], + ["anina", 382, 10], + ["\u010Dina", 382, 89], + ["ona", 368, 12], + ["ara", -1, 53], + ["dra", -1, 54], + ["era", -1, 55], + ["ora", -1, 56], + ["basa", -1, 135], + ["gasa", -1, 131], + ["jasa", -1, 129], + ["kasa", -1, 133], + ["nasa", -1, 132], + ["tasa", -1, 130], + ["vasa", -1, 134], + ["esa", -1, 57], + ["isa", -1, 58], + ["osa", -1, 123], + ["ata", -1, 120], + ["ikata", 401, 68], + ["lata", 401, 69], + ["eta", -1, 70], + ["evita", -1, 92], + ["ovita", -1, 93], + ["asta", -1, 94], + ["esta", -1, 71], + ["ista", -1, 72], + ["ksta", -1, 73], + ["osta", -1, 74], + ["nuta", -1, 13], + ["i\u0161ta", -1, 75], + ["ava", -1, 77], + ["eva", -1, 78], + ["ajeva", 415, 109], + ["cajeva", 416, 26], + ["lajeva", 416, 30], + ["rajeva", 416, 31], + ["\u0107ajeva", 416, 28], + ["\u010Dajeva", 416, 27], + ["\u0111ajeva", 416, 29], + ["iva", -1, 79], + ["ova", -1, 80], + ["gova", 424, 20], + ["ugova", 425, 17], + ["lova", 424, 82], + ["olova", 427, 49], + ["mova", 424, 81], + ["onova", 424, 12], + ["stva", -1, 3], + ["\u0161tva", -1, 4], + ["a\u0107a", -1, 14], + ["e\u0107a", -1, 15], + ["u\u0107a", -1, 16], + ["ba\u0161a", -1, 63], + ["ga\u0161a", -1, 64], + ["ja\u0161a", -1, 61], + ["ka\u0161a", -1, 62], + ["na\u0161a", -1, 60], + ["ta\u0161a", -1, 59], + ["va\u0161a", -1, 65], + ["e\u0161a", -1, 66], + ["i\u0161a", -1, 67], + ["o\u0161a", -1, 91], + ["ace", -1, 124], + ["ece", -1, 125], + ["uce", -1, 126], + ["luce", 448, 121], + ["astade", -1, 110], + ["istade", -1, 111], + ["ostade", -1, 112], + ["ge", -1, 20], + ["loge", 453, 19], + ["uge", 453, 18], + ["aje", -1, 104], + ["caje", 456, 26], + ["laje", 456, 30], + ["raje", 456, 31], + ["astaje", 456, 106], + ["istaje", 456, 107], + ["ostaje", 456, 108], + ["\u0107aje", 456, 28], + ["\u010Daje", 456, 27], + ["\u0111aje", 456, 29], + ["ije", -1, 116], + ["bije", 466, 32], + ["cije", 466, 33], + ["dije", 466, 34], + ["fije", 466, 40], + ["gije", 466, 39], + ["anjije", 466, 84], + ["enjije", 466, 85], + ["snjije", 466, 122], + ["\u0161njije", 466, 86], + ["kije", 466, 95], + ["skije", 476, 1], + ["\u0161kije", 476, 2], + ["lije", 466, 35], + ["elije", 479, 83], + ["mije", 466, 37], + ["nije", 466, 13], + ["ganije", 482, 9], + ["manije", 482, 6], + ["panije", 482, 7], + ["ranije", 482, 8], + ["tanije", 482, 5], + ["pije", 466, 41], + ["rije", 466, 42], + ["sije", 466, 43], + ["osije", 490, 123], + ["tije", 466, 44], + ["atije", 492, 120], + ["evitije", 492, 92], + ["ovitije", 492, 93], + ["astije", 492, 94], + ["avije", 466, 77], + ["evije", 466, 78], + ["ivije", 466, 79], + ["ovije", 466, 80], + ["zije", 466, 45], + ["o\u0161ije", 466, 91], + ["\u017Eije", 466, 38], + ["anje", -1, 84], + ["enje", -1, 85], + ["snje", -1, 122], + ["\u0161nje", -1, 86], + ["uje", -1, 25], + ["lucuje", 508, 121], + ["iruje", 508, 100], + ["lu\u010Duje", 508, 117], + ["ke", -1, 95], + ["ske", 512, 1], + ["\u0161ke", 512, 2], + ["ale", -1, 104], + ["acale", 515, 128], + ["astajale", 515, 106], + ["istajale", 515, 107], + ["ostajale", 515, 108], + ["ijale", 515, 47], + ["injale", 515, 114], + ["nale", 515, 46], + ["irale", 515, 100], + ["urale", 515, 105], + ["tale", 515, 113], + ["astale", 525, 110], + ["istale", 525, 111], + ["ostale", 525, 112], + ["avale", 515, 97], + ["evale", 515, 96], + ["ivale", 515, 98], + ["ovale", 515, 76], + ["uvale", 515, 99], + ["a\u010Dale", 515, 102], + ["ele", -1, 83], + ["ile", -1, 116], + ["acile", 536, 124], + ["lucile", 536, 121], + ["nile", 536, 103], + ["rosile", 536, 127], + ["jetile", 536, 118], + ["ozile", 536, 48], + ["a\u010Dile", 536, 101], + ["lu\u010Dile", 536, 117], + ["ro\u0161ile", 536, 90], + ["ole", -1, 50], + ["asle", -1, 115], + ["nule", -1, 13], + ["rame", -1, 52], + ["leme", -1, 51], + ["acome", -1, 124], + ["ecome", -1, 125], + ["ucome", -1, 126], + ["anjome", -1, 84], + ["enjome", -1, 85], + ["snjome", -1, 122], + ["\u0161njome", -1, 86], + ["kome", -1, 95], + ["skome", 558, 1], + ["\u0161kome", 558, 2], + ["elome", -1, 83], + ["nome", -1, 13], + ["cinome", 562, 137], + ["\u010Dinome", 562, 89], + ["osome", -1, 123], + ["atome", -1, 120], + ["evitome", -1, 92], + ["ovitome", -1, 93], + ["astome", -1, 94], + ["avome", -1, 77], + ["evome", -1, 78], + ["ivome", -1, 79], + ["ovome", -1, 80], + ["a\u0107ome", -1, 14], + ["e\u0107ome", -1, 15], + ["u\u0107ome", -1, 16], + ["o\u0161ome", -1, 91], + ["ne", -1, 13], + ["ane", 578, 10], + ["acane", 579, 128], + ["urane", 579, 105], + ["tane", 579, 113], + ["astane", 582, 110], + ["istane", 582, 111], + ["ostane", 582, 112], + ["avane", 579, 97], + ["evane", 579, 96], + ["ivane", 579, 98], + ["uvane", 579, 99], + ["a\u010Dane", 579, 102], + ["acene", 578, 124], + ["lucene", 578, 121], + ["a\u010Dene", 578, 101], + ["lu\u010Dene", 578, 117], + ["ine", 578, 11], + ["cine", 595, 137], + ["anine", 595, 10], + ["\u010Dine", 595, 89], + ["one", 578, 12], + ["are", -1, 53], + ["dre", -1, 54], + ["ere", -1, 55], + ["ore", -1, 56], + ["ase", -1, 161], + ["base", 604, 135], + ["acase", 604, 128], + ["gase", 604, 131], + ["jase", 604, 129], + ["astajase", 608, 138], + ["istajase", 608, 139], + ["ostajase", 608, 140], + ["injase", 608, 150], + ["kase", 604, 133], + ["nase", 604, 132], + ["irase", 604, 155], + ["urase", 604, 156], + ["tase", 604, 130], + ["vase", 604, 134], + ["avase", 618, 144], + ["evase", 618, 145], + ["ivase", 618, 146], + ["ovase", 618, 148], + ["uvase", 618, 147], + ["ese", -1, 57], + ["ise", -1, 58], + ["acise", 625, 124], + ["lucise", 625, 121], + ["rosise", 625, 127], + ["jetise", 625, 149], + ["ose", -1, 123], + ["astadose", 630, 141], + ["istadose", 630, 142], + ["ostadose", 630, 143], + ["ate", -1, 104], + ["acate", 634, 128], + ["ikate", 634, 68], + ["late", 634, 69], + ["irate", 634, 100], + ["urate", 634, 105], + ["tate", 634, 113], + ["avate", 634, 97], + ["evate", 634, 96], + ["ivate", 634, 98], + ["uvate", 634, 99], + ["a\u010Date", 634, 102], + ["ete", -1, 70], + ["astadete", 646, 110], + ["istadete", 646, 111], + ["ostadete", 646, 112], + ["astajete", 646, 106], + ["istajete", 646, 107], + ["ostajete", 646, 108], + ["ijete", 646, 116], + ["injete", 646, 114], + ["ujete", 646, 25], + ["lucujete", 655, 121], + ["irujete", 655, 100], + ["lu\u010Dujete", 655, 117], + ["nete", 646, 13], + ["astanete", 659, 110], + ["istanete", 659, 111], + ["ostanete", 659, 112], + ["astete", 646, 115], + ["ite", -1, 116], + ["acite", 664, 124], + ["lucite", 664, 121], + ["nite", 664, 13], + ["astanite", 667, 110], + ["istanite", 667, 111], + ["ostanite", 667, 112], + ["rosite", 664, 127], + ["jetite", 664, 118], + ["astite", 664, 115], + ["evite", 664, 92], + ["ovite", 664, 93], + ["a\u010Dite", 664, 101], + ["lu\u010Dite", 664, 117], + ["ro\u0161ite", 664, 90], + ["ajte", -1, 104], + ["urajte", 679, 105], + ["tajte", 679, 113], + ["astajte", 681, 106], + ["istajte", 681, 107], + ["ostajte", 681, 108], + ["avajte", 679, 97], + ["evajte", 679, 96], + ["ivajte", 679, 98], + ["uvajte", 679, 99], + ["ijte", -1, 116], + ["lucujte", -1, 121], + ["irujte", -1, 100], + ["lu\u010Dujte", -1, 117], + ["aste", -1, 94], + ["acaste", 693, 128], + ["astajaste", 693, 106], + ["istajaste", 693, 107], + ["ostajaste", 693, 108], + ["injaste", 693, 114], + ["iraste", 693, 100], + ["uraste", 693, 105], + ["taste", 693, 113], + ["avaste", 693, 97], + ["evaste", 693, 96], + ["ivaste", 693, 98], + ["ovaste", 693, 76], + ["uvaste", 693, 99], + ["a\u010Daste", 693, 102], + ["este", -1, 71], + ["iste", -1, 72], + ["aciste", 709, 124], + ["luciste", 709, 121], + ["niste", 709, 103], + ["rosiste", 709, 127], + ["jetiste", 709, 118], + ["a\u010Diste", 709, 101], + ["lu\u010Diste", 709, 117], + ["ro\u0161iste", 709, 90], + ["kste", -1, 73], + ["oste", -1, 74], + ["astadoste", 719, 110], + ["istadoste", 719, 111], + ["ostadoste", 719, 112], + ["nuste", -1, 13], + ["i\u0161te", -1, 75], + ["ave", -1, 77], + ["eve", -1, 78], + ["ajeve", 726, 109], + ["cajeve", 727, 26], + ["lajeve", 727, 30], + ["rajeve", 727, 31], + ["\u0107ajeve", 727, 28], + ["\u010Dajeve", 727, 27], + ["\u0111ajeve", 727, 29], + ["ive", -1, 79], + ["ove", -1, 80], + ["gove", 735, 20], + ["ugove", 736, 17], + ["love", 735, 82], + ["olove", 738, 49], + ["move", 735, 81], + ["onove", 735, 12], + ["a\u0107e", -1, 14], + ["e\u0107e", -1, 15], + ["u\u0107e", -1, 16], + ["a\u010De", -1, 101], + ["lu\u010De", -1, 117], + ["a\u0161e", -1, 104], + ["ba\u0161e", 747, 63], + ["ga\u0161e", 747, 64], + ["ja\u0161e", 747, 61], + ["astaja\u0161e", 750, 106], + ["istaja\u0161e", 750, 107], + ["ostaja\u0161e", 750, 108], + ["inja\u0161e", 750, 114], + ["ka\u0161e", 747, 62], + ["na\u0161e", 747, 60], + ["ira\u0161e", 747, 100], + ["ura\u0161e", 747, 105], + ["ta\u0161e", 747, 59], + ["va\u0161e", 747, 65], + ["ava\u0161e", 760, 97], + ["eva\u0161e", 760, 96], + ["iva\u0161e", 760, 98], + ["ova\u0161e", 760, 76], + ["uva\u0161e", 760, 99], + ["a\u010Da\u0161e", 747, 102], + ["e\u0161e", -1, 66], + ["i\u0161e", -1, 67], + ["jeti\u0161e", 768, 118], + ["a\u010Di\u0161e", 768, 101], + ["lu\u010Di\u0161e", 768, 117], + ["ro\u0161i\u0161e", 768, 90], + ["o\u0161e", -1, 91], + ["astado\u0161e", 773, 110], + ["istado\u0161e", 773, 111], + ["ostado\u0161e", 773, 112], + ["aceg", -1, 124], + ["eceg", -1, 125], + ["uceg", -1, 126], + ["anjijeg", -1, 84], + ["enjijeg", -1, 85], + ["snjijeg", -1, 122], + ["\u0161njijeg", -1, 86], + ["kijeg", -1, 95], + ["skijeg", 784, 1], + ["\u0161kijeg", 784, 2], + ["elijeg", -1, 83], + ["nijeg", -1, 13], + ["osijeg", -1, 123], + ["atijeg", -1, 120], + ["evitijeg", -1, 92], + ["ovitijeg", -1, 93], + ["astijeg", -1, 94], + ["avijeg", -1, 77], + ["evijeg", -1, 78], + ["ivijeg", -1, 79], + ["ovijeg", -1, 80], + ["o\u0161ijeg", -1, 91], + ["anjeg", -1, 84], + ["enjeg", -1, 85], + ["snjeg", -1, 122], + ["\u0161njeg", -1, 86], + ["keg", -1, 95], + ["eleg", -1, 83], + ["neg", -1, 13], + ["aneg", 805, 10], + ["eneg", 805, 87], + ["sneg", 805, 159], + ["\u0161neg", 805, 88], + ["oseg", -1, 123], + ["ateg", -1, 120], + ["aveg", -1, 77], + ["eveg", -1, 78], + ["iveg", -1, 79], + ["oveg", -1, 80], + ["a\u0107eg", -1, 14], + ["e\u0107eg", -1, 15], + ["u\u0107eg", -1, 16], + ["o\u0161eg", -1, 91], + ["acog", -1, 124], + ["ecog", -1, 125], + ["ucog", -1, 126], + ["anjog", -1, 84], + ["enjog", -1, 85], + ["snjog", -1, 122], + ["\u0161njog", -1, 86], + ["kog", -1, 95], + ["skog", 827, 1], + ["\u0161kog", 827, 2], + ["elog", -1, 83], + ["nog", -1, 13], + ["cinog", 831, 137], + ["\u010Dinog", 831, 89], + ["osog", -1, 123], + ["atog", -1, 120], + ["evitog", -1, 92], + ["ovitog", -1, 93], + ["astog", -1, 94], + ["avog", -1, 77], + ["evog", -1, 78], + ["ivog", -1, 79], + ["ovog", -1, 80], + ["a\u0107og", -1, 14], + ["e\u0107og", -1, 15], + ["u\u0107og", -1, 16], + ["o\u0161og", -1, 91], + ["ah", -1, 104], + ["acah", 847, 128], + ["astajah", 847, 106], + ["istajah", 847, 107], + ["ostajah", 847, 108], + ["injah", 847, 114], + ["irah", 847, 100], + ["urah", 847, 105], + ["tah", 847, 113], + ["avah", 847, 97], + ["evah", 847, 96], + ["ivah", 847, 98], + ["ovah", 847, 76], + ["uvah", 847, 99], + ["a\u010Dah", 847, 102], + ["ih", -1, 116], + ["acih", 862, 124], + ["ecih", 862, 125], + ["ucih", 862, 126], + ["lucih", 865, 121], + ["anjijih", 862, 84], + ["enjijih", 862, 85], + ["snjijih", 862, 122], + ["\u0161njijih", 862, 86], + ["kijih", 862, 95], + ["skijih", 871, 1], + ["\u0161kijih", 871, 2], + ["elijih", 862, 83], + ["nijih", 862, 13], + ["osijih", 862, 123], + ["atijih", 862, 120], + ["evitijih", 862, 92], + ["ovitijih", 862, 93], + ["astijih", 862, 94], + ["avijih", 862, 77], + ["evijih", 862, 78], + ["ivijih", 862, 79], + ["ovijih", 862, 80], + ["o\u0161ijih", 862, 91], + ["anjih", 862, 84], + ["enjih", 862, 85], + ["snjih", 862, 122], + ["\u0161njih", 862, 86], + ["kih", 862, 95], + ["skih", 890, 1], + ["\u0161kih", 890, 2], + ["elih", 862, 83], + ["nih", 862, 13], + ["cinih", 894, 137], + ["\u010Dinih", 894, 89], + ["osih", 862, 123], + ["rosih", 897, 127], + ["atih", 862, 120], + ["jetih", 862, 118], + ["evitih", 862, 92], + ["ovitih", 862, 93], + ["astih", 862, 94], + ["avih", 862, 77], + ["evih", 862, 78], + ["ivih", 862, 79], + ["ovih", 862, 80], + ["a\u0107ih", 862, 14], + ["e\u0107ih", 862, 15], + ["u\u0107ih", 862, 16], + ["a\u010Dih", 862, 101], + ["lu\u010Dih", 862, 117], + ["o\u0161ih", 862, 91], + ["ro\u0161ih", 913, 90], + ["astadoh", -1, 110], + ["istadoh", -1, 111], + ["ostadoh", -1, 112], + ["acuh", -1, 124], + ["ecuh", -1, 125], + ["ucuh", -1, 126], + ["a\u0107uh", -1, 14], + ["e\u0107uh", -1, 15], + ["u\u0107uh", -1, 16], + ["aci", -1, 124], + ["aceci", -1, 124], + ["ieci", -1, 162], + ["ajuci", -1, 161], + ["irajuci", 927, 155], + ["urajuci", 927, 156], + ["astajuci", 927, 138], + ["istajuci", 927, 139], + ["ostajuci", 927, 140], + ["avajuci", 927, 144], + ["evajuci", 927, 145], + ["ivajuci", 927, 146], + ["uvajuci", 927, 147], + ["ujuci", -1, 157], + ["lucujuci", 937, 121], + ["irujuci", 937, 155], + ["luci", -1, 121], + ["nuci", -1, 164], + ["etuci", -1, 153], + ["astuci", -1, 136], + ["gi", -1, 20], + ["ugi", 944, 18], + ["aji", -1, 109], + ["caji", 946, 26], + ["laji", 946, 30], + ["raji", 946, 31], + ["\u0107aji", 946, 28], + ["\u010Daji", 946, 27], + ["\u0111aji", 946, 29], + ["biji", -1, 32], + ["ciji", -1, 33], + ["diji", -1, 34], + ["fiji", -1, 40], + ["giji", -1, 39], + ["anjiji", -1, 84], + ["enjiji", -1, 85], + ["snjiji", -1, 122], + ["\u0161njiji", -1, 86], + ["kiji", -1, 95], + ["skiji", 962, 1], + ["\u0161kiji", 962, 2], + ["liji", -1, 35], + ["eliji", 965, 83], + ["miji", -1, 37], + ["niji", -1, 13], + ["ganiji", 968, 9], + ["maniji", 968, 6], + ["paniji", 968, 7], + ["raniji", 968, 8], + ["taniji", 968, 5], + ["piji", -1, 41], + ["riji", -1, 42], + ["siji", -1, 43], + ["osiji", 976, 123], + ["tiji", -1, 44], + ["atiji", 978, 120], + ["evitiji", 978, 92], + ["ovitiji", 978, 93], + ["astiji", 978, 94], + ["aviji", -1, 77], + ["eviji", -1, 78], + ["iviji", -1, 79], + ["oviji", -1, 80], + ["ziji", -1, 45], + ["o\u0161iji", -1, 91], + ["\u017Eiji", -1, 38], + ["anji", -1, 84], + ["enji", -1, 85], + ["snji", -1, 122], + ["\u0161nji", -1, 86], + ["ki", -1, 95], + ["ski", 994, 1], + ["\u0161ki", 994, 2], + ["ali", -1, 104], + ["acali", 997, 128], + ["astajali", 997, 106], + ["istajali", 997, 107], + ["ostajali", 997, 108], + ["ijali", 997, 47], + ["injali", 997, 114], + ["nali", 997, 46], + ["irali", 997, 100], + ["urali", 997, 105], + ["tali", 997, 113], + ["astali", 1007, 110], + ["istali", 1007, 111], + ["ostali", 1007, 112], + ["avali", 997, 97], + ["evali", 997, 96], + ["ivali", 997, 98], + ["ovali", 997, 76], + ["uvali", 997, 99], + ["a\u010Dali", 997, 102], + ["eli", -1, 83], + ["ili", -1, 116], + ["acili", 1018, 124], + ["lucili", 1018, 121], + ["nili", 1018, 103], + ["rosili", 1018, 127], + ["jetili", 1018, 118], + ["ozili", 1018, 48], + ["a\u010Dili", 1018, 101], + ["lu\u010Dili", 1018, 117], + ["ro\u0161ili", 1018, 90], + ["oli", -1, 50], + ["asli", -1, 115], + ["nuli", -1, 13], + ["rami", -1, 52], + ["lemi", -1, 51], + ["ni", -1, 13], + ["ani", 1033, 10], + ["acani", 1034, 128], + ["urani", 1034, 105], + ["tani", 1034, 113], + ["avani", 1034, 97], + ["evani", 1034, 96], + ["ivani", 1034, 98], + ["uvani", 1034, 99], + ["a\u010Dani", 1034, 102], + ["aceni", 1033, 124], + ["luceni", 1033, 121], + ["a\u010Deni", 1033, 101], + ["lu\u010Deni", 1033, 117], + ["ini", 1033, 11], + ["cini", 1047, 137], + ["\u010Dini", 1047, 89], + ["oni", 1033, 12], + ["ari", -1, 53], + ["dri", -1, 54], + ["eri", -1, 55], + ["ori", -1, 56], + ["basi", -1, 135], + ["gasi", -1, 131], + ["jasi", -1, 129], + ["kasi", -1, 133], + ["nasi", -1, 132], + ["tasi", -1, 130], + ["vasi", -1, 134], + ["esi", -1, 152], + ["isi", -1, 154], + ["osi", -1, 123], + ["avsi", -1, 161], + ["acavsi", 1065, 128], + ["iravsi", 1065, 155], + ["tavsi", 1065, 160], + ["etavsi", 1068, 153], + ["astavsi", 1068, 141], + ["istavsi", 1068, 142], + ["ostavsi", 1068, 143], + ["ivsi", -1, 162], + ["nivsi", 1073, 158], + ["rosivsi", 1073, 127], + ["nuvsi", -1, 164], + ["ati", -1, 104], + ["acati", 1077, 128], + ["astajati", 1077, 106], + ["istajati", 1077, 107], + ["ostajati", 1077, 108], + ["injati", 1077, 114], + ["ikati", 1077, 68], + ["lati", 1077, 69], + ["irati", 1077, 100], + ["urati", 1077, 105], + ["tati", 1077, 113], + ["astati", 1087, 110], + ["istati", 1087, 111], + ["ostati", 1087, 112], + ["avati", 1077, 97], + ["evati", 1077, 96], + ["ivati", 1077, 98], + ["ovati", 1077, 76], + ["uvati", 1077, 99], + ["a\u010Dati", 1077, 102], + ["eti", -1, 70], + ["iti", -1, 116], + ["aciti", 1098, 124], + ["luciti", 1098, 121], + ["niti", 1098, 103], + ["rositi", 1098, 127], + ["jetiti", 1098, 118], + ["eviti", 1098, 92], + ["oviti", 1098, 93], + ["a\u010Diti", 1098, 101], + ["lu\u010Diti", 1098, 117], + ["ro\u0161iti", 1098, 90], + ["asti", -1, 94], + ["esti", -1, 71], + ["isti", -1, 72], + ["ksti", -1, 73], + ["osti", -1, 74], + ["nuti", -1, 13], + ["avi", -1, 77], + ["evi", -1, 78], + ["ajevi", 1116, 109], + ["cajevi", 1117, 26], + ["lajevi", 1117, 30], + ["rajevi", 1117, 31], + ["\u0107ajevi", 1117, 28], + ["\u010Dajevi", 1117, 27], + ["\u0111ajevi", 1117, 29], + ["ivi", -1, 79], + ["ovi", -1, 80], + ["govi", 1125, 20], + ["ugovi", 1126, 17], + ["lovi", 1125, 82], + ["olovi", 1128, 49], + ["movi", 1125, 81], + ["onovi", 1125, 12], + ["ie\u0107i", -1, 116], + ["a\u010De\u0107i", -1, 101], + ["aju\u0107i", -1, 104], + ["iraju\u0107i", 1134, 100], + ["uraju\u0107i", 1134, 105], + ["astaju\u0107i", 1134, 106], + ["istaju\u0107i", 1134, 107], + ["ostaju\u0107i", 1134, 108], + ["avaju\u0107i", 1134, 97], + ["evaju\u0107i", 1134, 96], + ["ivaju\u0107i", 1134, 98], + ["uvaju\u0107i", 1134, 99], + ["uju\u0107i", -1, 25], + ["iruju\u0107i", 1144, 100], + ["lu\u010Duju\u0107i", 1144, 117], + ["nu\u0107i", -1, 13], + ["etu\u0107i", -1, 70], + ["astu\u0107i", -1, 115], + ["a\u010Di", -1, 101], + ["lu\u010Di", -1, 117], + ["ba\u0161i", -1, 63], + ["ga\u0161i", -1, 64], + ["ja\u0161i", -1, 61], + ["ka\u0161i", -1, 62], + ["na\u0161i", -1, 60], + ["ta\u0161i", -1, 59], + ["va\u0161i", -1, 65], + ["e\u0161i", -1, 66], + ["i\u0161i", -1, 67], + ["o\u0161i", -1, 91], + ["av\u0161i", -1, 104], + ["irav\u0161i", 1162, 100], + ["tav\u0161i", 1162, 113], + ["etav\u0161i", 1164, 70], + ["astav\u0161i", 1164, 110], + ["istav\u0161i", 1164, 111], + ["ostav\u0161i", 1164, 112], + ["a\u010Dav\u0161i", 1162, 102], + ["iv\u0161i", -1, 116], + ["niv\u0161i", 1170, 103], + ["ro\u0161iv\u0161i", 1170, 90], + ["nuv\u0161i", -1, 13], + ["aj", -1, 104], + ["uraj", 1174, 105], + ["taj", 1174, 113], + ["avaj", 1174, 97], + ["evaj", 1174, 96], + ["ivaj", 1174, 98], + ["uvaj", 1174, 99], + ["ij", -1, 116], + ["acoj", -1, 124], + ["ecoj", -1, 125], + ["ucoj", -1, 126], + ["anjijoj", -1, 84], + ["enjijoj", -1, 85], + ["snjijoj", -1, 122], + ["\u0161njijoj", -1, 86], + ["kijoj", -1, 95], + ["skijoj", 1189, 1], + ["\u0161kijoj", 1189, 2], + ["elijoj", -1, 83], + ["nijoj", -1, 13], + ["osijoj", -1, 123], + ["evitijoj", -1, 92], + ["ovitijoj", -1, 93], + ["astijoj", -1, 94], + ["avijoj", -1, 77], + ["evijoj", -1, 78], + ["ivijoj", -1, 79], + ["ovijoj", -1, 80], + ["o\u0161ijoj", -1, 91], + ["anjoj", -1, 84], + ["enjoj", -1, 85], + ["snjoj", -1, 122], + ["\u0161njoj", -1, 86], + ["koj", -1, 95], + ["skoj", 1207, 1], + ["\u0161koj", 1207, 2], + ["aloj", -1, 104], + ["eloj", -1, 83], + ["noj", -1, 13], + ["cinoj", 1212, 137], + ["\u010Dinoj", 1212, 89], + ["osoj", -1, 123], + ["atoj", -1, 120], + ["evitoj", -1, 92], + ["ovitoj", -1, 93], + ["astoj", -1, 94], + ["avoj", -1, 77], + ["evoj", -1, 78], + ["ivoj", -1, 79], + ["ovoj", -1, 80], + ["a\u0107oj", -1, 14], + ["e\u0107oj", -1, 15], + ["u\u0107oj", -1, 16], + ["o\u0161oj", -1, 91], + ["lucuj", -1, 121], + ["iruj", -1, 100], + ["lu\u010Duj", -1, 117], + ["al", -1, 104], + ["iral", 1231, 100], + ["ural", 1231, 105], + ["el", -1, 119], + ["il", -1, 116], + ["am", -1, 104], + ["acam", 1236, 128], + ["iram", 1236, 100], + ["uram", 1236, 105], + ["tam", 1236, 113], + ["avam", 1236, 97], + ["evam", 1236, 96], + ["ivam", 1236, 98], + ["uvam", 1236, 99], + ["a\u010Dam", 1236, 102], + ["em", -1, 119], + ["acem", 1246, 124], + ["ecem", 1246, 125], + ["ucem", 1246, 126], + ["astadem", 1246, 110], + ["istadem", 1246, 111], + ["ostadem", 1246, 112], + ["ajem", 1246, 104], + ["cajem", 1253, 26], + ["lajem", 1253, 30], + ["rajem", 1253, 31], + ["astajem", 1253, 106], + ["istajem", 1253, 107], + ["ostajem", 1253, 108], + ["\u0107ajem", 1253, 28], + ["\u010Dajem", 1253, 27], + ["\u0111ajem", 1253, 29], + ["ijem", 1246, 116], + ["anjijem", 1263, 84], + ["enjijem", 1263, 85], + ["snjijem", 1263, 123], + ["\u0161njijem", 1263, 86], + ["kijem", 1263, 95], + ["skijem", 1268, 1], + ["\u0161kijem", 1268, 2], + ["lijem", 1263, 24], + ["elijem", 1271, 83], + ["nijem", 1263, 13], + ["rarijem", 1263, 21], + ["sijem", 1263, 23], + ["osijem", 1275, 123], + ["atijem", 1263, 120], + ["evitijem", 1263, 92], + ["ovitijem", 1263, 93], + ["otijem", 1263, 22], + ["astijem", 1263, 94], + ["avijem", 1263, 77], + ["evijem", 1263, 78], + ["ivijem", 1263, 79], + ["ovijem", 1263, 80], + ["o\u0161ijem", 1263, 91], + ["anjem", 1246, 84], + ["enjem", 1246, 85], + ["injem", 1246, 114], + ["snjem", 1246, 122], + ["\u0161njem", 1246, 86], + ["ujem", 1246, 25], + ["lucujem", 1292, 121], + ["irujem", 1292, 100], + ["lu\u010Dujem", 1292, 117], + ["kem", 1246, 95], + ["skem", 1296, 1], + ["\u0161kem", 1296, 2], + ["elem", 1246, 83], + ["nem", 1246, 13], + ["anem", 1300, 10], + ["astanem", 1301, 110], + ["istanem", 1301, 111], + ["ostanem", 1301, 112], + ["enem", 1300, 87], + ["snem", 1300, 159], + ["\u0161nem", 1300, 88], + ["basem", 1246, 135], + ["gasem", 1246, 131], + ["jasem", 1246, 129], + ["kasem", 1246, 133], + ["nasem", 1246, 132], + ["tasem", 1246, 130], + ["vasem", 1246, 134], + ["esem", 1246, 152], + ["isem", 1246, 154], + ["osem", 1246, 123], + ["atem", 1246, 120], + ["etem", 1246, 70], + ["evitem", 1246, 92], + ["ovitem", 1246, 93], + ["astem", 1246, 94], + ["istem", 1246, 151], + ["i\u0161tem", 1246, 75], + ["avem", 1246, 77], + ["evem", 1246, 78], + ["ivem", 1246, 79], + ["a\u0107em", 1246, 14], + ["e\u0107em", 1246, 15], + ["u\u0107em", 1246, 16], + ["ba\u0161em", 1246, 63], + ["ga\u0161em", 1246, 64], + ["ja\u0161em", 1246, 61], + ["ka\u0161em", 1246, 62], + ["na\u0161em", 1246, 60], + ["ta\u0161em", 1246, 59], + ["va\u0161em", 1246, 65], + ["e\u0161em", 1246, 66], + ["i\u0161em", 1246, 67], + ["o\u0161em", 1246, 91], + ["im", -1, 116], + ["acim", 1341, 124], + ["ecim", 1341, 125], + ["ucim", 1341, 126], + ["lucim", 1344, 121], + ["anjijim", 1341, 84], + ["enjijim", 1341, 85], + ["snjijim", 1341, 122], + ["\u0161njijim", 1341, 86], + ["kijim", 1341, 95], + ["skijim", 1350, 1], + ["\u0161kijim", 1350, 2], + ["elijim", 1341, 83], + ["nijim", 1341, 13], + ["osijim", 1341, 123], + ["atijim", 1341, 120], + ["evitijim", 1341, 92], + ["ovitijim", 1341, 93], + ["astijim", 1341, 94], + ["avijim", 1341, 77], + ["evijim", 1341, 78], + ["ivijim", 1341, 79], + ["ovijim", 1341, 80], + ["o\u0161ijim", 1341, 91], + ["anjim", 1341, 84], + ["enjim", 1341, 85], + ["snjim", 1341, 122], + ["\u0161njim", 1341, 86], + ["kim", 1341, 95], + ["skim", 1369, 1], + ["\u0161kim", 1369, 2], + ["elim", 1341, 83], + ["nim", 1341, 13], + ["cinim", 1373, 137], + ["\u010Dinim", 1373, 89], + ["osim", 1341, 123], + ["rosim", 1376, 127], + ["atim", 1341, 120], + ["jetim", 1341, 118], + ["evitim", 1341, 92], + ["ovitim", 1341, 93], + ["astim", 1341, 94], + ["avim", 1341, 77], + ["evim", 1341, 78], + ["ivim", 1341, 79], + ["ovim", 1341, 80], + ["a\u0107im", 1341, 14], + ["e\u0107im", 1341, 15], + ["u\u0107im", 1341, 16], + ["a\u010Dim", 1341, 101], + ["lu\u010Dim", 1341, 117], + ["o\u0161im", 1341, 91], + ["ro\u0161im", 1392, 90], + ["acom", -1, 124], + ["ecom", -1, 125], + ["ucom", -1, 126], + ["gom", -1, 20], + ["logom", 1397, 19], + ["ugom", 1397, 18], + ["bijom", -1, 32], + ["cijom", -1, 33], + ["dijom", -1, 34], + ["fijom", -1, 40], + ["gijom", -1, 39], + ["lijom", -1, 35], + ["mijom", -1, 37], + ["nijom", -1, 36], + ["ganijom", 1407, 9], + ["manijom", 1407, 6], + ["panijom", 1407, 7], + ["ranijom", 1407, 8], + ["tanijom", 1407, 5], + ["pijom", -1, 41], + ["rijom", -1, 42], + ["sijom", -1, 43], + ["tijom", -1, 44], + ["zijom", -1, 45], + ["\u017Eijom", -1, 38], + ["anjom", -1, 84], + ["enjom", -1, 85], + ["snjom", -1, 122], + ["\u0161njom", -1, 86], + ["kom", -1, 95], + ["skom", 1423, 1], + ["\u0161kom", 1423, 2], + ["alom", -1, 104], + ["ijalom", 1426, 47], + ["nalom", 1426, 46], + ["elom", -1, 83], + ["ilom", -1, 116], + ["ozilom", 1430, 48], + ["olom", -1, 50], + ["ramom", -1, 52], + ["lemom", -1, 51], + ["nom", -1, 13], + ["anom", 1435, 10], + ["inom", 1435, 11], + ["cinom", 1437, 137], + ["aninom", 1437, 10], + ["\u010Dinom", 1437, 89], + ["onom", 1435, 12], + ["arom", -1, 53], + ["drom", -1, 54], + ["erom", -1, 55], + ["orom", -1, 56], + ["basom", -1, 135], + ["gasom", -1, 131], + ["jasom", -1, 129], + ["kasom", -1, 133], + ["nasom", -1, 132], + ["tasom", -1, 130], + ["vasom", -1, 134], + ["esom", -1, 57], + ["isom", -1, 58], + ["osom", -1, 123], + ["atom", -1, 120], + ["ikatom", 1456, 68], + ["latom", 1456, 69], + ["etom", -1, 70], + ["evitom", -1, 92], + ["ovitom", -1, 93], + ["astom", -1, 94], + ["estom", -1, 71], + ["istom", -1, 72], + ["kstom", -1, 73], + ["ostom", -1, 74], + ["avom", -1, 77], + ["evom", -1, 78], + ["ivom", -1, 79], + ["ovom", -1, 80], + ["lovom", 1470, 82], + ["movom", 1470, 81], + ["stvom", -1, 3], + ["\u0161tvom", -1, 4], + ["a\u0107om", -1, 14], + ["e\u0107om", -1, 15], + ["u\u0107om", -1, 16], + ["ba\u0161om", -1, 63], + ["ga\u0161om", -1, 64], + ["ja\u0161om", -1, 61], + ["ka\u0161om", -1, 62], + ["na\u0161om", -1, 60], + ["ta\u0161om", -1, 59], + ["va\u0161om", -1, 65], + ["e\u0161om", -1, 66], + ["i\u0161om", -1, 67], + ["o\u0161om", -1, 91], + ["an", -1, 104], + ["acan", 1488, 128], + ["iran", 1488, 100], + ["uran", 1488, 105], + ["tan", 1488, 113], + ["avan", 1488, 97], + ["evan", 1488, 96], + ["ivan", 1488, 98], + ["uvan", 1488, 99], + ["a\u010Dan", 1488, 102], + ["acen", -1, 124], + ["lucen", -1, 121], + ["a\u010Den", -1, 101], + ["lu\u010Den", -1, 117], + ["anin", -1, 10], + ["ao", -1, 104], + ["acao", 1503, 128], + ["astajao", 1503, 106], + ["istajao", 1503, 107], + ["ostajao", 1503, 108], + ["injao", 1503, 114], + ["irao", 1503, 100], + ["urao", 1503, 105], + ["tao", 1503, 113], + ["astao", 1511, 110], + ["istao", 1511, 111], + ["ostao", 1511, 112], + ["avao", 1503, 97], + ["evao", 1503, 96], + ["ivao", 1503, 98], + ["ovao", 1503, 76], + ["uvao", 1503, 99], + ["a\u010Dao", 1503, 102], + ["go", -1, 20], + ["ugo", 1521, 18], + ["io", -1, 116], + ["acio", 1523, 124], + ["lucio", 1523, 121], + ["lio", 1523, 24], + ["nio", 1523, 103], + ["rario", 1523, 21], + ["sio", 1523, 23], + ["rosio", 1529, 127], + ["jetio", 1523, 118], + ["otio", 1523, 22], + ["a\u010Dio", 1523, 101], + ["lu\u010Dio", 1523, 117], + ["ro\u0161io", 1523, 90], + ["bijo", -1, 32], + ["cijo", -1, 33], + ["dijo", -1, 34], + ["fijo", -1, 40], + ["gijo", -1, 39], + ["lijo", -1, 35], + ["mijo", -1, 37], + ["nijo", -1, 36], + ["pijo", -1, 41], + ["rijo", -1, 42], + ["sijo", -1, 43], + ["tijo", -1, 44], + ["zijo", -1, 45], + ["\u017Eijo", -1, 38], + ["anjo", -1, 84], + ["enjo", -1, 85], + ["snjo", -1, 122], + ["\u0161njo", -1, 86], + ["ko", -1, 95], + ["sko", 1554, 1], + ["\u0161ko", 1554, 2], + ["alo", -1, 104], + ["acalo", 1557, 128], + ["astajalo", 1557, 106], + ["istajalo", 1557, 107], + ["ostajalo", 1557, 108], + ["ijalo", 1557, 47], + ["injalo", 1557, 114], + ["nalo", 1557, 46], + ["iralo", 1557, 100], + ["uralo", 1557, 105], + ["talo", 1557, 113], + ["astalo", 1567, 110], + ["istalo", 1567, 111], + ["ostalo", 1567, 112], + ["avalo", 1557, 97], + ["evalo", 1557, 96], + ["ivalo", 1557, 98], + ["ovalo", 1557, 76], + ["uvalo", 1557, 99], + ["a\u010Dalo", 1557, 102], + ["elo", -1, 83], + ["ilo", -1, 116], + ["acilo", 1578, 124], + ["lucilo", 1578, 121], + ["nilo", 1578, 103], + ["rosilo", 1578, 127], + ["jetilo", 1578, 118], + ["a\u010Dilo", 1578, 101], + ["lu\u010Dilo", 1578, 117], + ["ro\u0161ilo", 1578, 90], + ["aslo", -1, 115], + ["nulo", -1, 13], + ["amo", -1, 104], + ["acamo", 1589, 128], + ["ramo", 1589, 52], + ["iramo", 1591, 100], + ["uramo", 1591, 105], + ["tamo", 1589, 113], + ["avamo", 1589, 97], + ["evamo", 1589, 96], + ["ivamo", 1589, 98], + ["uvamo", 1589, 99], + ["a\u010Damo", 1589, 102], + ["emo", -1, 119], + ["astademo", 1600, 110], + ["istademo", 1600, 111], + ["ostademo", 1600, 112], + ["astajemo", 1600, 106], + ["istajemo", 1600, 107], + ["ostajemo", 1600, 108], + ["ijemo", 1600, 116], + ["injemo", 1600, 114], + ["ujemo", 1600, 25], + ["lucujemo", 1609, 121], + ["irujemo", 1609, 100], + ["lu\u010Dujemo", 1609, 117], + ["lemo", 1600, 51], + ["nemo", 1600, 13], + ["astanemo", 1614, 110], + ["istanemo", 1614, 111], + ["ostanemo", 1614, 112], + ["etemo", 1600, 70], + ["astemo", 1600, 115], + ["imo", -1, 116], + ["acimo", 1620, 124], + ["lucimo", 1620, 121], + ["nimo", 1620, 13], + ["astanimo", 1623, 110], + ["istanimo", 1623, 111], + ["ostanimo", 1623, 112], + ["rosimo", 1620, 127], + ["etimo", 1620, 70], + ["jetimo", 1628, 118], + ["astimo", 1620, 115], + ["a\u010Dimo", 1620, 101], + ["lu\u010Dimo", 1620, 117], + ["ro\u0161imo", 1620, 90], + ["ajmo", -1, 104], + ["urajmo", 1634, 105], + ["tajmo", 1634, 113], + ["astajmo", 1636, 106], + ["istajmo", 1636, 107], + ["ostajmo", 1636, 108], + ["avajmo", 1634, 97], + ["evajmo", 1634, 96], + ["ivajmo", 1634, 98], + ["uvajmo", 1634, 99], + ["ijmo", -1, 116], + ["ujmo", -1, 25], + ["lucujmo", 1645, 121], + ["irujmo", 1645, 100], + ["lu\u010Dujmo", 1645, 117], + ["asmo", -1, 104], + ["acasmo", 1649, 128], + ["astajasmo", 1649, 106], + ["istajasmo", 1649, 107], + ["ostajasmo", 1649, 108], + ["injasmo", 1649, 114], + ["irasmo", 1649, 100], + ["urasmo", 1649, 105], + ["tasmo", 1649, 113], + ["avasmo", 1649, 97], + ["evasmo", 1649, 96], + ["ivasmo", 1649, 98], + ["ovasmo", 1649, 76], + ["uvasmo", 1649, 99], + ["a\u010Dasmo", 1649, 102], + ["ismo", -1, 116], + ["acismo", 1664, 124], + ["lucismo", 1664, 121], + ["nismo", 1664, 103], + ["rosismo", 1664, 127], + ["jetismo", 1664, 118], + ["a\u010Dismo", 1664, 101], + ["lu\u010Dismo", 1664, 117], + ["ro\u0161ismo", 1664, 90], + ["astadosmo", -1, 110], + ["istadosmo", -1, 111], + ["ostadosmo", -1, 112], + ["nusmo", -1, 13], + ["no", -1, 13], + ["ano", 1677, 104], + ["acano", 1678, 128], + ["urano", 1678, 105], + ["tano", 1678, 113], + ["avano", 1678, 97], + ["evano", 1678, 96], + ["ivano", 1678, 98], + ["uvano", 1678, 99], + ["a\u010Dano", 1678, 102], + ["aceno", 1677, 124], + ["luceno", 1677, 121], + ["a\u010Deno", 1677, 101], + ["lu\u010Deno", 1677, 117], + ["ino", 1677, 11], + ["cino", 1691, 137], + ["\u010Dino", 1691, 89], + ["ato", -1, 120], + ["ikato", 1694, 68], + ["lato", 1694, 69], + ["eto", -1, 70], + ["evito", -1, 92], + ["ovito", -1, 93], + ["asto", -1, 94], + ["esto", -1, 71], + ["isto", -1, 72], + ["ksto", -1, 73], + ["osto", -1, 74], + ["nuto", -1, 13], + ["nuo", -1, 13], + ["avo", -1, 77], + ["evo", -1, 78], + ["ivo", -1, 79], + ["ovo", -1, 80], + ["stvo", -1, 3], + ["\u0161tvo", -1, 4], + ["as", -1, 161], + ["acas", 1713, 128], + ["iras", 1713, 155], + ["uras", 1713, 156], + ["tas", 1713, 160], + ["avas", 1713, 144], + ["evas", 1713, 145], + ["ivas", 1713, 146], + ["uvas", 1713, 147], + ["es", -1, 163], + ["astades", 1722, 141], + ["istades", 1722, 142], + ["ostades", 1722, 143], + ["astajes", 1722, 138], + ["istajes", 1722, 139], + ["ostajes", 1722, 140], + ["ijes", 1722, 162], + ["injes", 1722, 150], + ["ujes", 1722, 157], + ["lucujes", 1731, 121], + ["irujes", 1731, 155], + ["nes", 1722, 164], + ["astanes", 1734, 141], + ["istanes", 1734, 142], + ["ostanes", 1734, 143], + ["etes", 1722, 153], + ["astes", 1722, 136], + ["is", -1, 162], + ["acis", 1740, 124], + ["lucis", 1740, 121], + ["nis", 1740, 158], + ["rosis", 1740, 127], + ["jetis", 1740, 149], + ["at", -1, 104], + ["acat", 1746, 128], + ["astajat", 1746, 106], + ["istajat", 1746, 107], + ["ostajat", 1746, 108], + ["injat", 1746, 114], + ["irat", 1746, 100], + ["urat", 1746, 105], + ["tat", 1746, 113], + ["astat", 1754, 110], + ["istat", 1754, 111], + ["ostat", 1754, 112], + ["avat", 1746, 97], + ["evat", 1746, 96], + ["ivat", 1746, 98], + ["irivat", 1760, 100], + ["ovat", 1746, 76], + ["uvat", 1746, 99], + ["a\u010Dat", 1746, 102], + ["it", -1, 116], + ["acit", 1765, 124], + ["lucit", 1765, 121], + ["rosit", 1765, 127], + ["jetit", 1765, 118], + ["a\u010Dit", 1765, 101], + ["lu\u010Dit", 1765, 117], + ["ro\u0161it", 1765, 90], + ["nut", -1, 13], + ["astadu", -1, 110], + ["istadu", -1, 111], + ["ostadu", -1, 112], + ["gu", -1, 20], + ["logu", 1777, 19], + ["ugu", 1777, 18], + ["ahu", -1, 104], + ["acahu", 1780, 128], + ["astajahu", 1780, 106], + ["istajahu", 1780, 107], + ["ostajahu", 1780, 108], + ["injahu", 1780, 114], + ["irahu", 1780, 100], + ["urahu", 1780, 105], + ["avahu", 1780, 97], + ["evahu", 1780, 96], + ["ivahu", 1780, 98], + ["ovahu", 1780, 76], + ["uvahu", 1780, 99], + ["a\u010Dahu", 1780, 102], + ["aju", -1, 104], + ["caju", 1794, 26], + ["acaju", 1795, 128], + ["laju", 1794, 30], + ["raju", 1794, 31], + ["iraju", 1798, 100], + ["uraju", 1798, 105], + ["taju", 1794, 113], + ["astaju", 1801, 106], + ["istaju", 1801, 107], + ["ostaju", 1801, 108], + ["avaju", 1794, 97], + ["evaju", 1794, 96], + ["ivaju", 1794, 98], + ["uvaju", 1794, 99], + ["\u0107aju", 1794, 28], + ["\u010Daju", 1794, 27], + ["a\u010Daju", 1810, 102], + ["\u0111aju", 1794, 29], + ["iju", -1, 116], + ["biju", 1813, 32], + ["ciju", 1813, 33], + ["diju", 1813, 34], + ["fiju", 1813, 40], + ["giju", 1813, 39], + ["anjiju", 1813, 84], + ["enjiju", 1813, 85], + ["snjiju", 1813, 122], + ["\u0161njiju", 1813, 86], + ["kiju", 1813, 95], + ["liju", 1813, 24], + ["eliju", 1824, 83], + ["miju", 1813, 37], + ["niju", 1813, 13], + ["ganiju", 1827, 9], + ["maniju", 1827, 6], + ["paniju", 1827, 7], + ["raniju", 1827, 8], + ["taniju", 1827, 5], + ["piju", 1813, 41], + ["riju", 1813, 42], + ["rariju", 1834, 21], + ["siju", 1813, 23], + ["osiju", 1836, 123], + ["tiju", 1813, 44], + ["atiju", 1838, 120], + ["otiju", 1838, 22], + ["aviju", 1813, 77], + ["eviju", 1813, 78], + ["iviju", 1813, 79], + ["oviju", 1813, 80], + ["ziju", 1813, 45], + ["o\u0161iju", 1813, 91], + ["\u017Eiju", 1813, 38], + ["anju", -1, 84], + ["enju", -1, 85], + ["snju", -1, 122], + ["\u0161nju", -1, 86], + ["uju", -1, 25], + ["lucuju", 1852, 121], + ["iruju", 1852, 100], + ["lu\u010Duju", 1852, 117], + ["ku", -1, 95], + ["sku", 1856, 1], + ["\u0161ku", 1856, 2], + ["alu", -1, 104], + ["ijalu", 1859, 47], + ["nalu", 1859, 46], + ["elu", -1, 83], + ["ilu", -1, 116], + ["ozilu", 1863, 48], + ["olu", -1, 50], + ["ramu", -1, 52], + ["acemu", -1, 124], + ["ecemu", -1, 125], + ["ucemu", -1, 126], + ["anjijemu", -1, 84], + ["enjijemu", -1, 85], + ["snjijemu", -1, 122], + ["\u0161njijemu", -1, 86], + ["kijemu", -1, 95], + ["skijemu", 1874, 1], + ["\u0161kijemu", 1874, 2], + ["elijemu", -1, 83], + ["nijemu", -1, 13], + ["osijemu", -1, 123], + ["atijemu", -1, 120], + ["evitijemu", -1, 92], + ["ovitijemu", -1, 93], + ["astijemu", -1, 94], + ["avijemu", -1, 77], + ["evijemu", -1, 78], + ["ivijemu", -1, 79], + ["ovijemu", -1, 80], + ["o\u0161ijemu", -1, 91], + ["anjemu", -1, 84], + ["enjemu", -1, 85], + ["snjemu", -1, 122], + ["\u0161njemu", -1, 86], + ["kemu", -1, 95], + ["skemu", 1893, 1], + ["\u0161kemu", 1893, 2], + ["lemu", -1, 51], + ["elemu", 1896, 83], + ["nemu", -1, 13], + ["anemu", 1898, 10], + ["enemu", 1898, 87], + ["snemu", 1898, 159], + ["\u0161nemu", 1898, 88], + ["osemu", -1, 123], + ["atemu", -1, 120], + ["evitemu", -1, 92], + ["ovitemu", -1, 93], + ["astemu", -1, 94], + ["avemu", -1, 77], + ["evemu", -1, 78], + ["ivemu", -1, 79], + ["ovemu", -1, 80], + ["a\u0107emu", -1, 14], + ["e\u0107emu", -1, 15], + ["u\u0107emu", -1, 16], + ["o\u0161emu", -1, 91], + ["acomu", -1, 124], + ["ecomu", -1, 125], + ["ucomu", -1, 126], + ["anjomu", -1, 84], + ["enjomu", -1, 85], + ["snjomu", -1, 122], + ["\u0161njomu", -1, 86], + ["komu", -1, 95], + ["skomu", 1923, 1], + ["\u0161komu", 1923, 2], + ["elomu", -1, 83], + ["nomu", -1, 13], + ["cinomu", 1927, 137], + ["\u010Dinomu", 1927, 89], + ["osomu", -1, 123], + ["atomu", -1, 120], + ["evitomu", -1, 92], + ["ovitomu", -1, 93], + ["astomu", -1, 94], + ["avomu", -1, 77], + ["evomu", -1, 78], + ["ivomu", -1, 79], + ["ovomu", -1, 80], + ["a\u0107omu", -1, 14], + ["e\u0107omu", -1, 15], + ["u\u0107omu", -1, 16], + ["o\u0161omu", -1, 91], + ["nu", -1, 13], + ["anu", 1943, 10], + ["astanu", 1944, 110], + ["istanu", 1944, 111], + ["ostanu", 1944, 112], + ["inu", 1943, 11], + ["cinu", 1948, 137], + ["aninu", 1948, 10], + ["\u010Dinu", 1948, 89], + ["onu", 1943, 12], + ["aru", -1, 53], + ["dru", -1, 54], + ["eru", -1, 55], + ["oru", -1, 56], + ["basu", -1, 135], + ["gasu", -1, 131], + ["jasu", -1, 129], + ["kasu", -1, 133], + ["nasu", -1, 132], + ["tasu", -1, 130], + ["vasu", -1, 134], + ["esu", -1, 57], + ["isu", -1, 58], + ["osu", -1, 123], + ["atu", -1, 120], + ["ikatu", 1967, 68], + ["latu", 1967, 69], + ["etu", -1, 70], + ["evitu", -1, 92], + ["ovitu", -1, 93], + ["astu", -1, 94], + ["estu", -1, 71], + ["istu", -1, 72], + ["kstu", -1, 73], + ["ostu", -1, 74], + ["i\u0161tu", -1, 75], + ["avu", -1, 77], + ["evu", -1, 78], + ["ivu", -1, 79], + ["ovu", -1, 80], + ["lovu", 1982, 82], + ["movu", 1982, 81], + ["stvu", -1, 3], + ["\u0161tvu", -1, 4], + ["ba\u0161u", -1, 63], + ["ga\u0161u", -1, 64], + ["ja\u0161u", -1, 61], + ["ka\u0161u", -1, 62], + ["na\u0161u", -1, 60], + ["ta\u0161u", -1, 59], + ["va\u0161u", -1, 65], + ["e\u0161u", -1, 66], + ["i\u0161u", -1, 67], + ["o\u0161u", -1, 91], + ["avav", -1, 97], + ["evav", -1, 96], + ["ivav", -1, 98], + ["uvav", -1, 99], + ["kov", -1, 95], + ["a\u0161", -1, 104], + ["ira\u0161", 2002, 100], + ["ura\u0161", 2002, 105], + ["ta\u0161", 2002, 113], + ["ava\u0161", 2002, 97], + ["eva\u0161", 2002, 96], + ["iva\u0161", 2002, 98], + ["uva\u0161", 2002, 99], + ["a\u010Da\u0161", 2002, 102], + ["e\u0161", -1, 119], + ["astade\u0161", 2011, 110], + ["istade\u0161", 2011, 111], + ["ostade\u0161", 2011, 112], + ["astaje\u0161", 2011, 106], + ["istaje\u0161", 2011, 107], + ["ostaje\u0161", 2011, 108], + ["ije\u0161", 2011, 116], + ["inje\u0161", 2011, 114], + ["uje\u0161", 2011, 25], + ["iruje\u0161", 2020, 100], + ["lu\u010Duje\u0161", 2020, 117], + ["ne\u0161", 2011, 13], + ["astane\u0161", 2023, 110], + ["istane\u0161", 2023, 111], + ["ostane\u0161", 2023, 112], + ["ete\u0161", 2011, 70], + ["aste\u0161", 2011, 115], + ["i\u0161", -1, 116], + ["ni\u0161", 2029, 103], + ["jeti\u0161", 2029, 118], + ["a\u010Di\u0161", 2029, 101], + ["lu\u010Di\u0161", 2029, 117], + ["ro\u0161i\u0161", 2029, 90] + ]; + + /** @const */ var a_3 = [ + ["a", -1, 1], + ["oga", 0, 1], + ["ama", 0, 1], + ["ima", 0, 1], + ["ena", 0, 1], + ["e", -1, 1], + ["og", -1, 1], + ["anog", 6, 1], + ["enog", 6, 1], + ["anih", -1, 1], + ["enih", -1, 1], + ["i", -1, 1], + ["ani", 11, 1], + ["eni", 11, 1], + ["anoj", -1, 1], + ["enoj", -1, 1], + ["anim", -1, 1], + ["enim", -1, 1], + ["om", -1, 1], + ["enom", 18, 1], + ["o", -1, 1], + ["ano", 20, 1], + ["eno", 20, 1], + ["ost", -1, 1], + ["u", -1, 1], + ["enu", 24, 1] + ]; + + /** @const */ var /** Array */ g_v = [17, 65, 16]; + + /** @const */ var /** Array */ g_sa = [65, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 128]; + + /** @const */ var /** Array */ g_ca = [119, 95, 23, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 16]; + + /** @const */ var /** Array */ g_rg = [1]; + + var /** number */ I_p1 = 0; + var /** boolean */ B_no_diacritics = false; + + + /** @return {boolean} */ + function r_cyr_to_lat() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + golab2: while(true) + { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + if (among_var == 0) + { + break lab3; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("a")) + { + return false; + } + break; + case 2: + if (!base.slice_from("b")) + { + return false; + } + break; + case 3: + if (!base.slice_from("v")) + { + return false; + } + break; + case 4: + if (!base.slice_from("g")) + { + return false; + } + break; + case 5: + if (!base.slice_from("d")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u0111")) + { + return false; + } + break; + case 7: + if (!base.slice_from("e")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u017E")) + { + return false; + } + break; + case 9: + if (!base.slice_from("z")) + { + return false; + } + break; + case 10: + if (!base.slice_from("i")) + { + return false; + } + break; + case 11: + if (!base.slice_from("j")) + { + return false; + } + break; + case 12: + if (!base.slice_from("k")) + { + return false; + } + break; + case 13: + if (!base.slice_from("l")) + { + return false; + } + break; + case 14: + if (!base.slice_from("lj")) + { + return false; + } + break; + case 15: + if (!base.slice_from("m")) + { + return false; + } + break; + case 16: + if (!base.slice_from("n")) + { + return false; + } + break; + case 17: + if (!base.slice_from("nj")) + { + return false; + } + break; + case 18: + if (!base.slice_from("o")) + { + return false; + } + break; + case 19: + if (!base.slice_from("p")) + { + return false; + } + break; + case 20: + if (!base.slice_from("r")) + { + return false; + } + break; + case 21: + if (!base.slice_from("s")) + { + return false; + } + break; + case 22: + if (!base.slice_from("t")) + { + return false; + } + break; + case 23: + if (!base.slice_from("\u0107")) + { + return false; + } + break; + case 24: + if (!base.slice_from("u")) + { + return false; + } + break; + case 25: + if (!base.slice_from("f")) + { + return false; + } + break; + case 26: + if (!base.slice_from("h")) + { + return false; + } + break; + case 27: + if (!base.slice_from("c")) + { + return false; + } + break; + case 28: + if (!base.slice_from("\u010D")) + { + return false; + } + break; + case 29: + if (!base.slice_from("d\u017E")) + { + return false; + } + break; + case 30: + if (!base.slice_from("\u0161")) + { + return false; + } + break; + } + base.cursor = v_3; + break golab2; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { + break lab1; + } + base.cursor++; + } + continue; + } + base.cursor = v_2; + break; + } + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_prelude() { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + golab2: while(true) + { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + if (!(base.in_grouping(g_ca, 98, 382))) + { + break lab3; + } + base.bra = base.cursor; + if (!(base.eq_s("ije"))) + { + break lab3; + } + base.ket = base.cursor; + if (!(base.in_grouping(g_ca, 98, 382))) + { + break lab3; + } + if (!base.slice_from("e")) + { + return false; + } + base.cursor = v_3; + break golab2; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { + break lab1; + } + base.cursor++; + } + continue; + } + base.cursor = v_2; + break; + } + } + base.cursor = v_1; + /** @const */ var /** number */ v_4 = base.cursor; + lab4: { + while(true) + { + /** @const */ var /** number */ v_5 = base.cursor; + lab5: { + golab6: while(true) + { + /** @const */ var /** number */ v_6 = base.cursor; + lab7: { + if (!(base.in_grouping(g_ca, 98, 382))) + { + break lab7; + } + base.bra = base.cursor; + if (!(base.eq_s("je"))) + { + break lab7; + } + base.ket = base.cursor; + if (!(base.in_grouping(g_ca, 98, 382))) + { + break lab7; + } + if (!base.slice_from("e")) + { + return false; + } + base.cursor = v_6; + break golab6; + } + base.cursor = v_6; + if (base.cursor >= base.limit) + { + break lab5; + } + base.cursor++; + } + continue; + } + base.cursor = v_5; + break; + } + } + base.cursor = v_4; + /** @const */ var /** number */ v_7 = base.cursor; + lab8: { + while(true) + { + /** @const */ var /** number */ v_8 = base.cursor; + lab9: { + golab10: while(true) + { + /** @const */ var /** number */ v_9 = base.cursor; + lab11: { + base.bra = base.cursor; + if (!(base.eq_s("dj"))) + { + break lab11; + } + base.ket = base.cursor; + if (!base.slice_from("\u0111")) + { + return false; + } + base.cursor = v_9; + break golab10; + } + base.cursor = v_9; + if (base.cursor >= base.limit) + { + break lab9; + } + base.cursor++; + } + continue; + } + base.cursor = v_8; + break; + } + } + base.cursor = v_7; + return true; + }; + + /** @return {boolean} */ + function r_mark_regions() { + B_no_diacritics = true; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!base.go_out_grouping(g_sa, 263, 382)) + { + break lab0; + } + base.cursor++; + B_no_diacritics = false; + } + base.cursor = v_1; + I_p1 = base.limit; + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + if (!base.go_out_grouping(g_v, 97, 117)) + { + break lab1; + } + base.cursor++; + I_p1 = base.cursor; + if (I_p1 >= 2) + { + break lab1; + } + if (!base.go_in_grouping(g_v, 97, 117)) + { + break lab1; + } + base.cursor++; + I_p1 = base.cursor; + } + base.cursor = v_2; + /** @const */ var /** number */ v_3 = base.cursor; + lab2: { + golab3: while(true) + { + lab4: { + if (!(base.eq_s("r"))) + { + break lab4; + } + break golab3; + } + if (base.cursor >= base.limit) + { + break lab2; + } + base.cursor++; + } + lab5: { + /** @const */ var /** number */ v_4 = base.cursor; + lab6: { + if (base.cursor < 2) + { + break lab6; + } + break lab5; + } + base.cursor = v_4; + if (!base.go_in_grouping(g_rg, 114, 114)) + { + break lab2; + } + base.cursor++; + } + if ((I_p1 - base.cursor) <= 1) + { + break lab2; + } + I_p1 = base.cursor; + } + base.cursor = v_3; + return true; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_Step_1() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_1); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("loga")) + { + return false; + } + break; + case 2: + if (!base.slice_from("peh")) + { + return false; + } + break; + case 3: + if (!base.slice_from("vojka")) + { + return false; + } + break; + case 4: + if (!base.slice_from("bojka")) + { + return false; + } + break; + case 5: + if (!base.slice_from("jak")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u010Dajni")) + { + return false; + } + break; + case 7: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("cajni")) + { + return false; + } + break; + case 8: + if (!base.slice_from("erni")) + { + return false; + } + break; + case 9: + if (!base.slice_from("larni")) + { + return false; + } + break; + case 10: + if (!base.slice_from("esni")) + { + return false; + } + break; + case 11: + if (!base.slice_from("anjca")) + { + return false; + } + break; + case 12: + if (!base.slice_from("ajca")) + { + return false; + } + break; + case 13: + if (!base.slice_from("ljca")) + { + return false; + } + break; + case 14: + if (!base.slice_from("ejca")) + { + return false; + } + break; + case 15: + if (!base.slice_from("ojca")) + { + return false; + } + break; + case 16: + if (!base.slice_from("ajka")) + { + return false; + } + break; + case 17: + if (!base.slice_from("ojka")) + { + return false; + } + break; + case 18: + if (!base.slice_from("\u0161ca")) + { + return false; + } + break; + case 19: + if (!base.slice_from("ing")) + { + return false; + } + break; + case 20: + if (!base.slice_from("tvenik")) + { + return false; + } + break; + case 21: + if (!base.slice_from("tetika")) + { + return false; + } + break; + case 22: + if (!base.slice_from("nstva")) + { + return false; + } + break; + case 23: + if (!base.slice_from("nik")) + { + return false; + } + break; + case 24: + if (!base.slice_from("tik")) + { + return false; + } + break; + case 25: + if (!base.slice_from("zik")) + { + return false; + } + break; + case 26: + if (!base.slice_from("snik")) + { + return false; + } + break; + case 27: + if (!base.slice_from("kusi")) + { + return false; + } + break; + case 28: + if (!base.slice_from("kusni")) + { + return false; + } + break; + case 29: + if (!base.slice_from("kustva")) + { + return false; + } + break; + case 30: + if (!base.slice_from("du\u0161ni")) + { + return false; + } + break; + case 31: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("dusni")) + { + return false; + } + break; + case 32: + if (!base.slice_from("antni")) + { + return false; + } + break; + case 33: + if (!base.slice_from("bilni")) + { + return false; + } + break; + case 34: + if (!base.slice_from("tilni")) + { + return false; + } + break; + case 35: + if (!base.slice_from("avilni")) + { + return false; + } + break; + case 36: + if (!base.slice_from("silni")) + { + return false; + } + break; + case 37: + if (!base.slice_from("gilni")) + { + return false; + } + break; + case 38: + if (!base.slice_from("rilni")) + { + return false; + } + break; + case 39: + if (!base.slice_from("nilni")) + { + return false; + } + break; + case 40: + if (!base.slice_from("alni")) + { + return false; + } + break; + case 41: + if (!base.slice_from("ozni")) + { + return false; + } + break; + case 42: + if (!base.slice_from("ravi")) + { + return false; + } + break; + case 43: + if (!base.slice_from("stavni")) + { + return false; + } + break; + case 44: + if (!base.slice_from("pravni")) + { + return false; + } + break; + case 45: + if (!base.slice_from("tivni")) + { + return false; + } + break; + case 46: + if (!base.slice_from("sivni")) + { + return false; + } + break; + case 47: + if (!base.slice_from("atni")) + { + return false; + } + break; + case 48: + if (!base.slice_from("enta")) + { + return false; + } + break; + case 49: + if (!base.slice_from("tetni")) + { + return false; + } + break; + case 50: + if (!base.slice_from("pletni")) + { + return false; + } + break; + case 51: + if (!base.slice_from("\u0161avi")) + { + return false; + } + break; + case 52: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("savi")) + { + return false; + } + break; + case 53: + if (!base.slice_from("anta")) + { + return false; + } + break; + case 54: + if (!base.slice_from("a\u010Dka")) + { + return false; + } + break; + case 55: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("acka")) + { + return false; + } + break; + case 56: + if (!base.slice_from("u\u0161ka")) + { + return false; + } + break; + case 57: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("uska")) + { + return false; + } + break; + case 58: + if (!base.slice_from("atka")) + { + return false; + } + break; + case 59: + if (!base.slice_from("etka")) + { + return false; + } + break; + case 60: + if (!base.slice_from("itka")) + { + return false; + } + break; + case 61: + if (!base.slice_from("otka")) + { + return false; + } + break; + case 62: + if (!base.slice_from("utka")) + { + return false; + } + break; + case 63: + if (!base.slice_from("eskna")) + { + return false; + } + break; + case 64: + if (!base.slice_from("ti\u010Dni")) + { + return false; + } + break; + case 65: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ticni")) + { + return false; + } + break; + case 66: + if (!base.slice_from("ojska")) + { + return false; + } + break; + case 67: + if (!base.slice_from("esma")) + { + return false; + } + break; + case 68: + if (!base.slice_from("metra")) + { + return false; + } + break; + case 69: + if (!base.slice_from("centra")) + { + return false; + } + break; + case 70: + if (!base.slice_from("istra")) + { + return false; + } + break; + case 71: + if (!base.slice_from("osti")) + { + return false; + } + break; + case 72: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("osti")) + { + return false; + } + break; + case 73: + if (!base.slice_from("dba")) + { + return false; + } + break; + case 74: + if (!base.slice_from("\u010Dka")) + { + return false; + } + break; + case 75: + if (!base.slice_from("mca")) + { + return false; + } + break; + case 76: + if (!base.slice_from("nca")) + { + return false; + } + break; + case 77: + if (!base.slice_from("voljni")) + { + return false; + } + break; + case 78: + if (!base.slice_from("anki")) + { + return false; + } + break; + case 79: + if (!base.slice_from("vca")) + { + return false; + } + break; + case 80: + if (!base.slice_from("sca")) + { + return false; + } + break; + case 81: + if (!base.slice_from("rca")) + { + return false; + } + break; + case 82: + if (!base.slice_from("alca")) + { + return false; + } + break; + case 83: + if (!base.slice_from("elca")) + { + return false; + } + break; + case 84: + if (!base.slice_from("olca")) + { + return false; + } + break; + case 85: + if (!base.slice_from("njca")) + { + return false; + } + break; + case 86: + if (!base.slice_from("ekta")) + { + return false; + } + break; + case 87: + if (!base.slice_from("izma")) + { + return false; + } + break; + case 88: + if (!base.slice_from("jebi")) + { + return false; + } + break; + case 89: + if (!base.slice_from("baci")) + { + return false; + } + break; + case 90: + if (!base.slice_from("a\u0161ni")) + { + return false; + } + break; + case 91: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("asni")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_2() { + var /** number */ among_var; + base.ket = base.cursor; + among_var = base.find_among_b(a_2); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + switch (among_var) { + case 1: + if (!base.slice_from("sk")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u0161k")) + { + return false; + } + break; + case 3: + if (!base.slice_from("stv")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u0161tv")) + { + return false; + } + break; + case 5: + if (!base.slice_from("tanij")) + { + return false; + } + break; + case 6: + if (!base.slice_from("manij")) + { + return false; + } + break; + case 7: + if (!base.slice_from("panij")) + { + return false; + } + break; + case 8: + if (!base.slice_from("ranij")) + { + return false; + } + break; + case 9: + if (!base.slice_from("ganij")) + { + return false; + } + break; + case 10: + if (!base.slice_from("an")) + { + return false; + } + break; + case 11: + if (!base.slice_from("in")) + { + return false; + } + break; + case 12: + if (!base.slice_from("on")) + { + return false; + } + break; + case 13: + if (!base.slice_from("n")) + { + return false; + } + break; + case 14: + if (!base.slice_from("a\u0107")) + { + return false; + } + break; + case 15: + if (!base.slice_from("e\u0107")) + { + return false; + } + break; + case 16: + if (!base.slice_from("u\u0107")) + { + return false; + } + break; + case 17: + if (!base.slice_from("ugov")) + { + return false; + } + break; + case 18: + if (!base.slice_from("ug")) + { + return false; + } + break; + case 19: + if (!base.slice_from("log")) + { + return false; + } + break; + case 20: + if (!base.slice_from("g")) + { + return false; + } + break; + case 21: + if (!base.slice_from("rari")) + { + return false; + } + break; + case 22: + if (!base.slice_from("oti")) + { + return false; + } + break; + case 23: + if (!base.slice_from("si")) + { + return false; + } + break; + case 24: + if (!base.slice_from("li")) + { + return false; + } + break; + case 25: + if (!base.slice_from("uj")) + { + return false; + } + break; + case 26: + if (!base.slice_from("caj")) + { + return false; + } + break; + case 27: + if (!base.slice_from("\u010Daj")) + { + return false; + } + break; + case 28: + if (!base.slice_from("\u0107aj")) + { + return false; + } + break; + case 29: + if (!base.slice_from("\u0111aj")) + { + return false; + } + break; + case 30: + if (!base.slice_from("laj")) + { + return false; + } + break; + case 31: + if (!base.slice_from("raj")) + { + return false; + } + break; + case 32: + if (!base.slice_from("bij")) + { + return false; + } + break; + case 33: + if (!base.slice_from("cij")) + { + return false; + } + break; + case 34: + if (!base.slice_from("dij")) + { + return false; + } + break; + case 35: + if (!base.slice_from("lij")) + { + return false; + } + break; + case 36: + if (!base.slice_from("nij")) + { + return false; + } + break; + case 37: + if (!base.slice_from("mij")) + { + return false; + } + break; + case 38: + if (!base.slice_from("\u017Eij")) + { + return false; + } + break; + case 39: + if (!base.slice_from("gij")) + { + return false; + } + break; + case 40: + if (!base.slice_from("fij")) + { + return false; + } + break; + case 41: + if (!base.slice_from("pij")) + { + return false; + } + break; + case 42: + if (!base.slice_from("rij")) + { + return false; + } + break; + case 43: + if (!base.slice_from("sij")) + { + return false; + } + break; + case 44: + if (!base.slice_from("tij")) + { + return false; + } + break; + case 45: + if (!base.slice_from("zij")) + { + return false; + } + break; + case 46: + if (!base.slice_from("nal")) + { + return false; + } + break; + case 47: + if (!base.slice_from("ijal")) + { + return false; + } + break; + case 48: + if (!base.slice_from("ozil")) + { + return false; + } + break; + case 49: + if (!base.slice_from("olov")) + { + return false; + } + break; + case 50: + if (!base.slice_from("ol")) + { + return false; + } + break; + case 51: + if (!base.slice_from("lem")) + { + return false; + } + break; + case 52: + if (!base.slice_from("ram")) + { + return false; + } + break; + case 53: + if (!base.slice_from("ar")) + { + return false; + } + break; + case 54: + if (!base.slice_from("dr")) + { + return false; + } + break; + case 55: + if (!base.slice_from("er")) + { + return false; + } + break; + case 56: + if (!base.slice_from("or")) + { + return false; + } + break; + case 57: + if (!base.slice_from("es")) + { + return false; + } + break; + case 58: + if (!base.slice_from("is")) + { + return false; + } + break; + case 59: + if (!base.slice_from("ta\u0161")) + { + return false; + } + break; + case 60: + if (!base.slice_from("na\u0161")) + { + return false; + } + break; + case 61: + if (!base.slice_from("ja\u0161")) + { + return false; + } + break; + case 62: + if (!base.slice_from("ka\u0161")) + { + return false; + } + break; + case 63: + if (!base.slice_from("ba\u0161")) + { + return false; + } + break; + case 64: + if (!base.slice_from("ga\u0161")) + { + return false; + } + break; + case 65: + if (!base.slice_from("va\u0161")) + { + return false; + } + break; + case 66: + if (!base.slice_from("e\u0161")) + { + return false; + } + break; + case 67: + if (!base.slice_from("i\u0161")) + { + return false; + } + break; + case 68: + if (!base.slice_from("ikat")) + { + return false; + } + break; + case 69: + if (!base.slice_from("lat")) + { + return false; + } + break; + case 70: + if (!base.slice_from("et")) + { + return false; + } + break; + case 71: + if (!base.slice_from("est")) + { + return false; + } + break; + case 72: + if (!base.slice_from("ist")) + { + return false; + } + break; + case 73: + if (!base.slice_from("kst")) + { + return false; + } + break; + case 74: + if (!base.slice_from("ost")) + { + return false; + } + break; + case 75: + if (!base.slice_from("i\u0161t")) + { + return false; + } + break; + case 76: + if (!base.slice_from("ova")) + { + return false; + } + break; + case 77: + if (!base.slice_from("av")) + { + return false; + } + break; + case 78: + if (!base.slice_from("ev")) + { + return false; + } + break; + case 79: + if (!base.slice_from("iv")) + { + return false; + } + break; + case 80: + if (!base.slice_from("ov")) + { + return false; + } + break; + case 81: + if (!base.slice_from("mov")) + { + return false; + } + break; + case 82: + if (!base.slice_from("lov")) + { + return false; + } + break; + case 83: + if (!base.slice_from("el")) + { + return false; + } + break; + case 84: + if (!base.slice_from("anj")) + { + return false; + } + break; + case 85: + if (!base.slice_from("enj")) + { + return false; + } + break; + case 86: + if (!base.slice_from("\u0161nj")) + { + return false; + } + break; + case 87: + if (!base.slice_from("en")) + { + return false; + } + break; + case 88: + if (!base.slice_from("\u0161n")) + { + return false; + } + break; + case 89: + if (!base.slice_from("\u010Din")) + { + return false; + } + break; + case 90: + if (!base.slice_from("ro\u0161i")) + { + return false; + } + break; + case 91: + if (!base.slice_from("o\u0161")) + { + return false; + } + break; + case 92: + if (!base.slice_from("evit")) + { + return false; + } + break; + case 93: + if (!base.slice_from("ovit")) + { + return false; + } + break; + case 94: + if (!base.slice_from("ast")) + { + return false; + } + break; + case 95: + if (!base.slice_from("k")) + { + return false; + } + break; + case 96: + if (!base.slice_from("eva")) + { + return false; + } + break; + case 97: + if (!base.slice_from("ava")) + { + return false; + } + break; + case 98: + if (!base.slice_from("iva")) + { + return false; + } + break; + case 99: + if (!base.slice_from("uva")) + { + return false; + } + break; + case 100: + if (!base.slice_from("ir")) + { + return false; + } + break; + case 101: + if (!base.slice_from("a\u010D")) + { + return false; + } + break; + case 102: + if (!base.slice_from("a\u010Da")) + { + return false; + } + break; + case 103: + if (!base.slice_from("ni")) + { + return false; + } + break; + case 104: + if (!base.slice_from("a")) + { + return false; + } + break; + case 105: + if (!base.slice_from("ur")) + { + return false; + } + break; + case 106: + if (!base.slice_from("astaj")) + { + return false; + } + break; + case 107: + if (!base.slice_from("istaj")) + { + return false; + } + break; + case 108: + if (!base.slice_from("ostaj")) + { + return false; + } + break; + case 109: + if (!base.slice_from("aj")) + { + return false; + } + break; + case 110: + if (!base.slice_from("asta")) + { + return false; + } + break; + case 111: + if (!base.slice_from("ista")) + { + return false; + } + break; + case 112: + if (!base.slice_from("osta")) + { + return false; + } + break; + case 113: + if (!base.slice_from("ta")) + { + return false; + } + break; + case 114: + if (!base.slice_from("inj")) + { + return false; + } + break; + case 115: + if (!base.slice_from("as")) + { + return false; + } + break; + case 116: + if (!base.slice_from("i")) + { + return false; + } + break; + case 117: + if (!base.slice_from("lu\u010D")) + { + return false; + } + break; + case 118: + if (!base.slice_from("jeti")) + { + return false; + } + break; + case 119: + if (!base.slice_from("e")) + { + return false; + } + break; + case 120: + if (!base.slice_from("at")) + { + return false; + } + break; + case 121: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("luc")) + { + return false; + } + break; + case 122: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("snj")) + { + return false; + } + break; + case 123: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("os")) + { + return false; + } + break; + case 124: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ac")) + { + return false; + } + break; + case 125: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ec")) + { + return false; + } + break; + case 126: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("uc")) + { + return false; + } + break; + case 127: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("rosi")) + { + return false; + } + break; + case 128: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("aca")) + { + return false; + } + break; + case 129: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("jas")) + { + return false; + } + break; + case 130: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("tas")) + { + return false; + } + break; + case 131: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("gas")) + { + return false; + } + break; + case 132: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("nas")) + { + return false; + } + break; + case 133: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("kas")) + { + return false; + } + break; + case 134: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("vas")) + { + return false; + } + break; + case 135: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("bas")) + { + return false; + } + break; + case 136: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("as")) + { + return false; + } + break; + case 137: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("cin")) + { + return false; + } + break; + case 138: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("astaj")) + { + return false; + } + break; + case 139: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("istaj")) + { + return false; + } + break; + case 140: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ostaj")) + { + return false; + } + break; + case 141: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("asta")) + { + return false; + } + break; + case 142: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ista")) + { + return false; + } + break; + case 143: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("osta")) + { + return false; + } + break; + case 144: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ava")) + { + return false; + } + break; + case 145: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("eva")) + { + return false; + } + break; + case 146: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("iva")) + { + return false; + } + break; + case 147: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("uva")) + { + return false; + } + break; + case 148: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ova")) + { + return false; + } + break; + case 149: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("jeti")) + { + return false; + } + break; + case 150: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("inj")) + { + return false; + } + break; + case 151: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ist")) + { + return false; + } + break; + case 152: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("es")) + { + return false; + } + break; + case 153: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("et")) + { + return false; + } + break; + case 154: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("is")) + { + return false; + } + break; + case 155: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ir")) + { + return false; + } + break; + case 156: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ur")) + { + return false; + } + break; + case 157: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("uj")) + { + return false; + } + break; + case 158: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ni")) + { + return false; + } + break; + case 159: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("sn")) + { + return false; + } + break; + case 160: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("ta")) + { + return false; + } + break; + case 161: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("a")) + { + return false; + } + break; + case 162: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("i")) + { + return false; + } + break; + case 163: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("e")) + { + return false; + } + break; + case 164: + if (!B_no_diacritics) + { + return false; + } + if (!base.slice_from("n")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_Step_3() { + base.ket = base.cursor; + if (base.find_among_b(a_3) == 0) + { + return false; + } + base.bra = base.cursor; + if (!r_R1()) + { + return false; + } + if (!base.slice_from("")) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + r_cyr_to_lat(); + r_prelude(); + r_mark_regions(); + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + r_Step_1(); + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab0: { + lab1: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!r_Step_2()) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_3; + if (!r_Step_3()) + { + break lab0; + } + } + } + base.cursor = base.limit - v_2; + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/spanish-stemmer.js b/sphinx/search/non-minified-js/spanish-stemmer.js index fffd6160b13..f800db7467d 100644 --- a/sphinx/search/non-minified-js/spanish-stemmer.js +++ b/sphinx/search/non-minified-js/spanish-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from spanish.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -SpanishStemmer = function() { +var SpanishStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["", -1, 6], ["\u00E1", 0, 1], @@ -77,6 +78,8 @@ SpanishStemmer = function() { ["ante", -1, 2], ["mente", -1, 7], ["amente", 13, 6], + ["acion", -1, 2], + ["ucion", -1, 4], ["aci\u00F3n", -1, 2], ["uci\u00F3n", -1, 4], ["ico", -1, 1], @@ -247,37 +250,27 @@ SpanishStemmer = function() { I_pV = base.limit; I_p1 = base.limit; I_p2 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { lab1: { - var /** number */ v_2 = base.cursor; + /** @const */ var /** number */ v_2 = base.cursor; lab2: { if (!(base.in_grouping(g_v, 97, 252))) { break lab2; } lab3: { - var /** number */ v_3 = base.cursor; + /** @const */ var /** number */ v_3 = base.cursor; lab4: { if (!(base.out_grouping(g_v, 97, 252))) { break lab4; } - golab5: while(true) + if (!base.go_out_grouping(g_v, 97, 252)) { - lab6: { - if (!(base.in_grouping(g_v, 97, 252))) - { - break lab6; - } - break golab5; - } - if (base.cursor >= base.limit) - { - break lab4; - } - base.cursor++; + break lab4; } + base.cursor++; break lab3; } base.cursor = v_3; @@ -285,21 +278,11 @@ SpanishStemmer = function() { { break lab2; } - golab7: while(true) + if (!base.go_in_grouping(g_v, 97, 252)) { - lab8: { - if (!(base.out_grouping(g_v, 97, 252))) - { - break lab8; - } - break golab7; - } - if (base.cursor >= base.limit) - { - break lab2; - } - base.cursor++; + break lab2; } + base.cursor++; } break lab1; } @@ -308,31 +291,21 @@ SpanishStemmer = function() { { break lab0; } - lab9: { - var /** number */ v_6 = base.cursor; - lab10: { + lab5: { + /** @const */ var /** number */ v_4 = base.cursor; + lab6: { if (!(base.out_grouping(g_v, 97, 252))) { - break lab10; + break lab6; } - golab11: while(true) + if (!base.go_out_grouping(g_v, 97, 252)) { - lab12: { - if (!(base.in_grouping(g_v, 97, 252))) - { - break lab12; - } - break golab11; - } - if (base.cursor >= base.limit) - { - break lab10; - } - base.cursor++; + break lab6; } - break lab9; + base.cursor++; + break lab5; } - base.cursor = v_6; + base.cursor = v_4; if (!(base.in_grouping(g_v, 97, 252))) { break lab0; @@ -347,72 +320,32 @@ SpanishStemmer = function() { I_pV = base.cursor; } base.cursor = v_1; - var /** number */ v_8 = base.cursor; - lab13: { - golab14: while(true) + /** @const */ var /** number */ v_5 = base.cursor; + lab7: { + if (!base.go_out_grouping(g_v, 97, 252)) { - lab15: { - if (!(base.in_grouping(g_v, 97, 252))) - { - break lab15; - } - break golab14; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } - golab16: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 252)) { - lab17: { - if (!(base.out_grouping(g_v, 97, 252))) - { - break lab17; - } - break golab16; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } + base.cursor++; I_p1 = base.cursor; - golab18: while(true) + if (!base.go_out_grouping(g_v, 97, 252)) { - lab19: { - if (!(base.in_grouping(g_v, 97, 252))) - { - break lab19; - } - break golab18; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } - golab20: while(true) + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 252)) { - lab21: { - if (!(base.out_grouping(g_v, 97, 252))) - { - break lab21; - } - break golab20; - } - if (base.cursor >= base.limit) - { - break lab13; - } - base.cursor++; + break lab7; } + base.cursor++; I_p2 = base.cursor; } - base.cursor = v_8; + base.cursor = v_5; return true; }; @@ -421,14 +354,10 @@ SpanishStemmer = function() { var /** number */ among_var; while(true) { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; lab0: { base.bra = base.cursor; among_var = base.find_among(a_0); - if (among_var == 0) - { - break lab0; - } base.ket = base.cursor; switch (among_var) { case 1: @@ -479,29 +408,17 @@ SpanishStemmer = function() { /** @return {boolean} */ function r_RV() { - if (!(I_pV <= base.cursor)) - { - return false; - } - return true; + return I_pV <= base.cursor; }; /** @return {boolean} */ function r_R1() { - if (!(I_p1 <= base.cursor)) - { - return false; - } - return true; + return I_p1 <= base.cursor; }; /** @return {boolean} */ function r_R2() { - if (!(I_p2 <= base.cursor)) - { - return false; - } - return true; + return I_p2 <= base.cursor; }; /** @return {boolean} */ @@ -608,7 +525,7 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.eq_s_b("ic"))) @@ -667,7 +584,7 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab1: { base.ket = base.cursor; among_var = base.find_among_b(a_3); @@ -717,7 +634,7 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { base.ket = base.cursor; if (base.find_among_b(a_4) == 0) @@ -746,7 +663,7 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab3: { base.ket = base.cursor; if (base.find_among_b(a_5) == 0) @@ -775,7 +692,7 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab4: { base.ket = base.cursor; if (!(base.eq_s_b("at"))) @@ -805,16 +722,16 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; if (base.find_among_b(a_7) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; if (!(base.eq_s_b("u"))) { return false; @@ -833,33 +750,33 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_pV; base.ket = base.cursor; among_var = base.find_among_b(a_8); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("u"))) { - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; break lab0; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; if (!(base.eq_s_b("g"))) { - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; break lab0; } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; } base.bra = base.cursor; if (!base.slice_del()) @@ -907,7 +824,7 @@ SpanishStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { base.ket = base.cursor; if (!(base.eq_s_b("u"))) @@ -916,7 +833,7 @@ SpanishStemmer = function() { break lab0; } base.bra = base.cursor; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.eq_s_b("g"))) { base.cursor = base.limit - v_1; @@ -941,13 +858,13 @@ SpanishStemmer = function() { this.stem = /** @return {boolean} */ function() { r_mark_regions(); base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; r_attached_pronoun(); - base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab0: { lab1: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { if (!r_standard_suffix()) { @@ -955,7 +872,7 @@ SpanishStemmer = function() { } break lab1; } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; lab3: { if (!r_y_verb_suffix()) { @@ -963,21 +880,21 @@ SpanishStemmer = function() { } break lab1; } - base.cursor = base.limit - v_4; + base.cursor = base.limit - v_3; if (!r_verb_suffix()) { break lab0; } } } - base.cursor = base.limit - v_3; - var /** number */ v_5 = base.limit - base.cursor; + base.cursor = base.limit - v_2; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; r_residual_suffix(); - base.cursor = base.limit - v_5; + base.cursor = base.limit - v_4; base.cursor = base.limit_backward; - var /** number */ v_6 = base.cursor; + /** @const */ var /** number */ v_5 = base.cursor; r_postlude(); - base.cursor = v_6; + base.cursor = v_5; return true; }; diff --git a/sphinx/search/non-minified-js/swedish-stemmer.js b/sphinx/search/non-minified-js/swedish-stemmer.js index 4d7d49fc0e5..bf1a64268d2 100644 --- a/sphinx/search/non-minified-js/swedish-stemmer.js +++ b/sphinx/search/non-minified-js/swedish-stemmer.js @@ -1,9 +1,34 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from swedish.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -SwedishStemmer = function() { +var SwedishStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ + ["fab", -1, -1], + ["h", -1, -1], + ["pak", -1, -1], + ["rak", -1, -1], + ["stak", -1, -1], + ["kom", -1, -1], + ["iet", -1, -1], + ["cit", -1, -1], + ["dit", -1, -1], + ["alit", -1, -1], + ["ilit", -1, -1], + ["mit", -1, -1], + ["nit", -1, -1], + ["pit", -1, -1], + ["rit", -1, -1], + ["sit", -1, -1], + ["tit", -1, -1], + ["uit", -1, -1], + ["ivit", -1, -1], + ["kvit", -1, -1], + ["xit", -1, -1] + ]; + + /** @const */ var a_1 = [ ["a", -1, 1], ["arna", 0, 1], ["erna", 0, 1], @@ -38,12 +63,13 @@ SwedishStemmer = function() { ["hetens", 29, 1], ["erns", 21, 1], ["at", -1, 1], - ["andet", -1, 1], - ["het", -1, 1], + ["et", -1, 3], + ["andet", 34, 1], + ["het", 34, 1], ["ast", -1, 1] ]; - /** @const */ var a_1 = [ + /** @const */ var a_2 = [ ["dd", -1, -1], ["gd", -1, -1], ["nn", -1, -1], @@ -53,18 +79,20 @@ SwedishStemmer = function() { ["tt", -1, -1] ]; - /** @const */ var a_2 = [ + /** @const */ var a_3 = [ ["ig", -1, 1], ["lig", 0, 1], ["els", -1, 1], ["fullt", -1, 3], - ["l\u00F6st", -1, 2] + ["\u00F6st", -1, 2] ]; /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 32]; /** @const */ var /** Array */ g_s_ending = [119, 127, 149]; + /** @const */ var /** Array */ g_ost_ending = [173, 58]; + var /** number */ I_x = 0; var /** number */ I_p1 = 0; @@ -72,9 +100,9 @@ SwedishStemmer = function() { /** @return {boolean} */ function r_mark_regions() { I_p1 = base.limit; - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; { - var /** number */ c1 = base.cursor + 3; + /** @const */ var /** number */ c1 = base.cursor + 3; if (c1 > base.limit) { return false; @@ -83,46 +111,56 @@ SwedishStemmer = function() { } I_x = base.cursor; base.cursor = v_1; - golab0: while(true) + if (!base.go_out_grouping(g_v, 97, 246)) { - var /** number */ v_2 = base.cursor; - lab1: { - if (!(base.in_grouping(g_v, 97, 246))) - { - break lab1; - } - base.cursor = v_2; - break golab0; + return false; + } + base.cursor++; + if (!base.go_in_grouping(g_v, 97, 246)) + { + return false; + } + base.cursor++; + I_p1 = base.cursor; + lab0: { + if (I_p1 >= I_x) + { + break lab0; } - base.cursor = v_2; - if (base.cursor >= base.limit) + I_p1 = I_x; + } + return true; + }; + + /** @return {boolean} */ + function r_et_condition() { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (!(base.out_grouping_b(g_v, 97, 246))) + { + return false; + } + if (!(base.in_grouping_b(g_v, 97, 246))) + { + return false; + } + lab0: { + if (base.cursor > base.limit_backward) { - return false; + break lab0; } - base.cursor++; + return false; } - golab2: while(true) + base.cursor = base.limit - v_1; { - lab3: { - if (!(base.out_grouping(g_v, 97, 246))) + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (base.find_among_b(a_0) == 0) { - break lab3; + break lab1; } - break golab2; - } - if (base.cursor >= base.limit) - { return false; } - base.cursor++; - } - I_p1 = base.cursor; - lab4: { - if (!(I_p1 < I_x)) - { - break lab4; - } - I_p1 = I_x; + base.cursor = base.limit - v_2; } return true; }; @@ -134,17 +172,17 @@ SwedishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; - among_var = base.find_among_b(a_0); + among_var = base.find_among_b(a_1); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; - base.limit_backward = v_2; + base.limit_backward = v_1; switch (among_var) { case 1: if (!base.slice_del()) @@ -153,7 +191,33 @@ SwedishStemmer = function() { } break; case 2: - if (!(base.in_grouping_b(g_s_ending, 98, 121))) + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { + if (!(base.eq_s_b("et"))) + { + break lab1; + } + if (!r_et_condition()) + { + break lab1; + } + base.bra = base.cursor; + break lab0; + } + base.cursor = base.limit - v_2; + if (!(base.in_grouping_b(g_s_ending, 98, 121))) + { + return false; + } + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (!r_et_condition()) { return false; } @@ -172,19 +236,19 @@ SwedishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; - var /** number */ v_3 = base.limit - base.cursor; - if (base.find_among_b(a_1) == 0) + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + if (base.find_among_b(a_2) == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; base.ket = base.cursor; if (base.cursor <= base.limit_backward) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.cursor--; @@ -193,7 +257,7 @@ SwedishStemmer = function() { { return false; } - base.limit_backward = v_2; + base.limit_backward = v_1; return true; }; @@ -204,16 +268,17 @@ SwedishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit_backward; + /** @const */ var /** number */ v_1 = base.limit_backward; base.limit_backward = I_p1; base.ket = base.cursor; - among_var = base.find_among_b(a_2); + among_var = base.find_among_b(a_3); if (among_var == 0) { - base.limit_backward = v_2; + base.limit_backward = v_1; return false; } base.bra = base.cursor; + base.limit_backward = v_1; switch (among_var) { case 1: if (!base.slice_del()) @@ -222,7 +287,11 @@ SwedishStemmer = function() { } break; case 2: - if (!base.slice_from("l\u00F6s")) + if (!(base.in_grouping_b(g_ost_ending, 105, 118))) + { + return false; + } + if (!base.slice_from("\u00F6s")) { return false; } @@ -234,22 +303,21 @@ SwedishStemmer = function() { } break; } - base.limit_backward = v_2; return true; }; this.stem = /** @return {boolean} */ function() { - var /** number */ v_1 = base.cursor; + /** @const */ var /** number */ v_1 = base.cursor; r_mark_regions(); base.cursor = v_1; base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_main_suffix(); base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_consonant_pair(); base.cursor = base.limit - v_3; - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; r_other_suffix(); base.cursor = base.limit - v_4; base.cursor = base.limit_backward; diff --git a/sphinx/search/non-minified-js/tamil-stemmer.js b/sphinx/search/non-minified-js/tamil-stemmer.js new file mode 100644 index 00000000000..2ae474784d7 --- /dev/null +++ b/sphinx/search/non-minified-js/tamil-stemmer.js @@ -0,0 +1,1189 @@ +// Generated from tamil.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var TamilStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u0BB5\u0BC1", -1, 3], + ["\u0BB5\u0BC2", -1, 4], + ["\u0BB5\u0BCA", -1, 2], + ["\u0BB5\u0BCB", -1, 1] + ]; + + /** @const */ var a_1 = [ + ["\u0B95", -1, -1], + ["\u0B99", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9E", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BA8", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BAE", -1, -1], + ["\u0BAF", -1, -1], + ["\u0BB5", -1, -1] + ]; + + /** @const */ var a_2 = [ + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC8", -1, -1] + ]; + + /** @const */ var a_3 = [ + ["\u0BBE", -1, -1], + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC1", -1, -1], + ["\u0BC2", -1, -1], + ["\u0BC6", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BC8", -1, -1] + ]; + + /** @const */ var a_4 = [ + ["", -1, 2], + ["\u0BC8", 0, 1], + ["\u0BCD", 0, 1] + ]; + + /** @const */ var a_5 = [ + ["\u0BA8\u0BCD\u0BA4", -1, 1], + ["\u0BAF", -1, 1], + ["\u0BB5", -1, 1], + ["\u0BA9\u0BC1", -1, 8], + ["\u0BC1\u0B95\u0BCD", -1, 7], + ["\u0BC1\u0B95\u0BCD\u0B95\u0BCD", -1, 7], + ["\u0B9F\u0BCD\u0B95\u0BCD", -1, 3], + ["\u0BB1\u0BCD\u0B95\u0BCD", -1, 4], + ["\u0B99\u0BCD", -1, 9], + ["\u0B9F\u0BCD\u0B9F\u0BCD", -1, 5], + ["\u0BA4\u0BCD\u0BA4\u0BCD", -1, 6], + ["\u0BA8\u0BCD\u0BA4\u0BCD", -1, 1], + ["\u0BA8\u0BCD", -1, 1], + ["\u0B9F\u0BCD\u0BAA\u0BCD", -1, 3], + ["\u0BAF\u0BCD", -1, 2], + ["\u0BA9\u0BCD\u0BB1\u0BCD", -1, 4], + ["\u0BB5\u0BCD", -1, 1] + ]; + + /** @const */ var a_6 = [ + ["\u0B95", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9F", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BB1", -1, -1] + ]; + + /** @const */ var a_7 = [ + ["\u0B95", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9F", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BB1", -1, -1] + ]; + + /** @const */ var a_8 = [ + ["\u0B9E", -1, -1], + ["\u0BA3", -1, -1], + ["\u0BA8", -1, -1], + ["\u0BA9", -1, -1], + ["\u0BAE", -1, -1], + ["\u0BAF", -1, -1], + ["\u0BB0", -1, -1], + ["\u0BB2", -1, -1], + ["\u0BB3", -1, -1], + ["\u0BB4", -1, -1], + ["\u0BB5", -1, -1] + ]; + + /** @const */ var a_9 = [ + ["\u0BBE", -1, -1], + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC1", -1, -1], + ["\u0BC2", -1, -1], + ["\u0BC6", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BC8", -1, -1], + ["\u0BCD", -1, -1] + ]; + + /** @const */ var a_10 = [ + ["\u0B85", -1, -1], + ["\u0B87", -1, -1], + ["\u0B89", -1, -1] + ]; + + /** @const */ var a_11 = [ + ["\u0B95", -1, -1], + ["\u0B99", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9E", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BA8", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BAE", -1, -1], + ["\u0BAF", -1, -1], + ["\u0BB5", -1, -1] + ]; + + /** @const */ var a_12 = [ + ["\u0B95", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9F", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BB1", -1, -1] + ]; + + /** @const */ var a_13 = [ + ["\u0B95\u0BB3\u0BCD", -1, 4], + ["\u0BC1\u0B99\u0BCD\u0B95\u0BB3\u0BCD", 0, 1], + ["\u0B9F\u0BCD\u0B95\u0BB3\u0BCD", 0, 3], + ["\u0BB1\u0BCD\u0B95\u0BB3\u0BCD", 0, 2] + ]; + + /** @const */ var a_14 = [ + ["\u0BBE", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BCB", -1, -1] + ]; + + /** @const */ var a_15 = [ + ["\u0BAA\u0BBF", -1, -1], + ["\u0BB5\u0BBF", -1, -1] + ]; + + /** @const */ var a_16 = [ + ["\u0BBE", -1, -1], + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC1", -1, -1], + ["\u0BC2", -1, -1], + ["\u0BC6", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BC8", -1, -1] + ]; + + /** @const */ var a_17 = [ + ["\u0BAA\u0B9F\u0BCD\u0B9F", -1, 3], + ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BA3", -1, 3], + ["\u0BA4\u0BBE\u0BA9", -1, 3], + ["\u0BAA\u0B9F\u0BBF\u0BA4\u0BBE\u0BA9", 2, 3], + ["\u0BC6\u0BA9", -1, 1], + ["\u0BBE\u0B95\u0BBF\u0BAF", -1, 1], + ["\u0B95\u0BC1\u0BB0\u0BBF\u0BAF", -1, 3], + ["\u0BC1\u0B9F\u0BC8\u0BAF", -1, 1], + ["\u0BB2\u0BCD\u0BB2", -1, 2], + ["\u0BC1\u0BB3\u0BCD\u0BB3", -1, 1], + ["\u0BBE\u0B95\u0BBF", -1, 1], + ["\u0BAA\u0B9F\u0BBF", -1, 3], + ["\u0BBF\u0BA9\u0BCD\u0BB1\u0BBF", -1, 1], + ["\u0BAA\u0BB1\u0BCD\u0BB1\u0BBF", -1, 3], + ["\u0BAA\u0B9F\u0BC1", -1, 3], + ["\u0BB5\u0BBF\u0B9F\u0BC1", -1, 3], + ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BC1", -1, 3], + ["\u0BB5\u0BBF\u0B9F\u0BCD\u0B9F\u0BC1", -1, 3], + ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1", -1, 3], + ["\u0BC6\u0BA9\u0BCD\u0BB1\u0BC1", -1, 1], + ["\u0BC1\u0B9F\u0BC8", -1, 1], + ["\u0BBF\u0BB2\u0BCD\u0BB2\u0BC8", -1, 1], + ["\u0BC1\u0B9F\u0BA9\u0BCD", -1, 1], + ["\u0BBF\u0B9F\u0BAE\u0BCD", -1, 1], + ["\u0BC6\u0BB2\u0BCD\u0BB2\u0BBE\u0BAE\u0BCD", -1, 3], + ["\u0BC6\u0BA9\u0BC1\u0BAE\u0BCD", -1, 1] + ]; + + /** @const */ var a_18 = [ + ["\u0BBE", -1, -1], + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC1", -1, -1], + ["\u0BC2", -1, -1], + ["\u0BC6", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BC8", -1, -1] + ]; + + /** @const */ var a_19 = [ + ["\u0BBE", -1, -1], + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC1", -1, -1], + ["\u0BC2", -1, -1], + ["\u0BC6", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BC8", -1, -1] + ]; + + /** @const */ var a_20 = [ + ["\u0BB5\u0BBF\u0B9F", -1, 2], + ["\u0BC0", -1, 7], + ["\u0BCA\u0B9F\u0BC1", -1, 2], + ["\u0BCB\u0B9F\u0BC1", -1, 2], + ["\u0BA4\u0BC1", -1, 6], + ["\u0BBF\u0BB0\u0BC1\u0BA8\u0BCD\u0BA4\u0BC1", 4, 2], + ["\u0BBF\u0BA9\u0BCD\u0BB1\u0BC1", -1, 2], + ["\u0BC1\u0B9F\u0BC8", -1, 2], + ["\u0BA9\u0BC8", -1, 1], + ["\u0B95\u0BA3\u0BCD", -1, 1], + ["\u0BBF\u0BA9\u0BCD", -1, 3], + ["\u0BAE\u0BC1\u0BA9\u0BCD", -1, 1], + ["\u0BBF\u0B9F\u0BAE\u0BCD", -1, 4], + ["\u0BBF\u0BB1\u0BCD", -1, 2], + ["\u0BAE\u0BC7\u0BB1\u0BCD", -1, 1], + ["\u0BB2\u0BCD", -1, 5], + ["\u0BBE\u0BAE\u0BB2\u0BCD", 15, 2], + ["\u0BBE\u0BB2\u0BCD", 15, 2], + ["\u0BBF\u0BB2\u0BCD", 15, 2], + ["\u0BAE\u0BC7\u0BB2\u0BCD", 15, 1], + ["\u0BC1\u0BB3\u0BCD", -1, 2], + ["\u0B95\u0BC0\u0BB4\u0BCD", -1, 1] + ]; + + /** @const */ var a_21 = [ + ["\u0B95", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9F", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BB1", -1, -1] + ]; + + /** @const */ var a_22 = [ + ["\u0B95", -1, -1], + ["\u0B9A", -1, -1], + ["\u0B9F", -1, -1], + ["\u0BA4", -1, -1], + ["\u0BAA", -1, -1], + ["\u0BB1", -1, -1] + ]; + + /** @const */ var a_23 = [ + ["\u0B85", -1, -1], + ["\u0B86", -1, -1], + ["\u0B87", -1, -1], + ["\u0B88", -1, -1], + ["\u0B89", -1, -1], + ["\u0B8A", -1, -1], + ["\u0B8E", -1, -1], + ["\u0B8F", -1, -1], + ["\u0B90", -1, -1], + ["\u0B92", -1, -1], + ["\u0B93", -1, -1], + ["\u0B94", -1, -1] + ]; + + /** @const */ var a_24 = [ + ["\u0BBE", -1, -1], + ["\u0BBF", -1, -1], + ["\u0BC0", -1, -1], + ["\u0BC1", -1, -1], + ["\u0BC2", -1, -1], + ["\u0BC6", -1, -1], + ["\u0BC7", -1, -1], + ["\u0BC8", -1, -1] + ]; + + /** @const */ var a_25 = [ + ["\u0B95", -1, 1], + ["\u0BA4", -1, 1], + ["\u0BA9", -1, 1], + ["\u0BAA", -1, 1], + ["\u0BAF", -1, 1], + ["\u0BBE", -1, 5], + ["\u0B95\u0BC1", -1, 6], + ["\u0BAA\u0B9F\u0BC1", -1, 1], + ["\u0BA4\u0BC1", -1, 3], + ["\u0BBF\u0BB1\u0BCD\u0BB1\u0BC1", -1, 1], + ["\u0BA9\u0BC8", -1, 1], + ["\u0BB5\u0BC8", -1, 1], + ["\u0BA9\u0BA9\u0BCD", -1, 1], + ["\u0BAA\u0BA9\u0BCD", -1, 1], + ["\u0BB5\u0BA9\u0BCD", -1, 2], + ["\u0BBE\u0BA9\u0BCD", -1, 4], + ["\u0BA9\u0BBE\u0BA9\u0BCD", 15, 1], + ["\u0BAE\u0BBF\u0BA9\u0BCD", -1, 1], + ["\u0BA9\u0BC6\u0BA9\u0BCD", -1, 1], + ["\u0BC7\u0BA9\u0BCD", -1, 5], + ["\u0BA9\u0BAE\u0BCD", -1, 1], + ["\u0BAA\u0BAE\u0BCD", -1, 1], + ["\u0BBE\u0BAE\u0BCD", -1, 5], + ["\u0B95\u0BC1\u0BAE\u0BCD", -1, 1], + ["\u0B9F\u0BC1\u0BAE\u0BCD", -1, 5], + ["\u0BA4\u0BC1\u0BAE\u0BCD", -1, 1], + ["\u0BB1\u0BC1\u0BAE\u0BCD", -1, 1], + ["\u0BC6\u0BAE\u0BCD", -1, 5], + ["\u0BC7\u0BAE\u0BCD", -1, 5], + ["\u0BCB\u0BAE\u0BCD", -1, 5], + ["\u0BBE\u0BAF\u0BCD", -1, 5], + ["\u0BA9\u0BB0\u0BCD", -1, 1], + ["\u0BAA\u0BB0\u0BCD", -1, 1], + ["\u0BC0\u0BAF\u0BB0\u0BCD", -1, 5], + ["\u0BB5\u0BB0\u0BCD", -1, 1], + ["\u0BBE\u0BB0\u0BCD", -1, 5], + ["\u0BA9\u0BBE\u0BB0\u0BCD", 35, 1], + ["\u0BAE\u0BBE\u0BB0\u0BCD", 35, 1], + ["\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BCD", -1, 1], + ["\u0BA9\u0BBF\u0BB0\u0BCD", -1, 5], + ["\u0BC0\u0BB0\u0BCD", -1, 5], + ["\u0BA9\u0BB3\u0BCD", -1, 1], + ["\u0BAA\u0BB3\u0BCD", -1, 1], + ["\u0BB5\u0BB3\u0BCD", -1, 1], + ["\u0BBE\u0BB3\u0BCD", -1, 5], + ["\u0BA9\u0BBE\u0BB3\u0BCD", 44, 1] + ]; + + /** @const */ var a_26 = [ + ["\u0B95\u0BBF\u0BB1", -1, -1], + ["\u0B95\u0BBF\u0BA9\u0BCD\u0BB1", -1, -1], + ["\u0BBE\u0BA8\u0BBF\u0BA9\u0BCD\u0BB1", -1, -1], + ["\u0B95\u0BBF\u0BB1\u0BCD", -1, -1], + ["\u0B95\u0BBF\u0BA9\u0BCD\u0BB1\u0BCD", -1, -1], + ["\u0BBE\u0BA8\u0BBF\u0BA9\u0BCD\u0BB1\u0BCD", -1, -1] + ]; + + var /** boolean */ B_found_vetrumai_urupu = false; + var /** boolean */ B_found_a_match = false; + + + /** @return {boolean} */ + function r_has_min_length() { + return base.current.length > 4; + }; + + /** @return {boolean} */ + function r_fix_va_start() { + var /** number */ among_var; + base.bra = base.cursor; + among_var = base.find_among(a_0); + if (among_var == 0) + { + return false; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u0B93")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u0B92")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u0B89")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u0B8A")) + { + return false; + } + break; + } + return true; + }; + + /** @return {boolean} */ + function r_fix_endings() { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + if (!r_fix_ending()) + { + break lab1; + } + continue; + } + base.cursor = v_2; + break; + } + } + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_remove_question_prefixes() { + base.bra = base.cursor; + if (!(base.eq_s("\u0B8E"))) + { + return false; + } + if (base.find_among(a_1) == 0) + { + return false; + } + if (!(base.eq_s("\u0BCD"))) + { + return false; + } + base.ket = base.cursor; + if (!base.slice_del()) + { + return false; + } + /** @const */ var /** number */ v_1 = base.cursor; + r_fix_va_start(); + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_fix_ending() { + var /** number */ among_var; + if (base.current.length <= 3) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + base.ket = base.cursor; + among_var = base.find_among_b(a_5); + if (among_var == 0) + { + break lab1; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + if (base.find_among_b(a_2) == 0) + { + break lab1; + } + base.cursor = base.limit - v_2; + if (!base.slice_del()) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u0BB3\u0BCD")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u0BB2\u0BCD")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u0B9F\u0BC1")) + { + return false; + } + break; + case 6: + if (!B_found_vetrumai_urupu) + { + break lab1; + } + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!(base.eq_s_b("\u0BC8"))) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_3; + } + if (!base.slice_from("\u0BAE\u0BCD")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 8: + { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab3: { + if (base.find_among_b(a_3) == 0) + { + break lab3; + } + break lab1; + } + base.cursor = base.limit - v_4; + } + if (!base.slice_del()) + { + return false; + } + break; + case 9: + among_var = base.find_among_b(a_4); + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u0BAE\u0BCD")) + { + return false; + } + break; + } + break; + } + break lab0; + } + base.cursor = base.limit - v_1; + base.ket = base.cursor; + if (!(base.eq_s_b("\u0BCD"))) + { + return false; + } + lab4: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab5: { + if (base.find_among_b(a_6) == 0) + { + break lab5; + } + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab6: { + if (!(base.eq_s_b("\u0BCD"))) + { + base.cursor = base.limit - v_6; + break lab6; + } + if (base.find_among_b(a_7) == 0) + { + base.cursor = base.limit - v_6; + break lab6; + } + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_5; + lab7: { + if (base.find_among_b(a_8) == 0) + { + break lab7; + } + base.bra = base.cursor; + if (!(base.eq_s_b("\u0BCD"))) + { + break lab7; + } + if (!base.slice_del()) + { + return false; + } + break lab4; + } + base.cursor = base.limit - v_5; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + if (base.find_among_b(a_9) == 0) + { + return false; + } + base.cursor = base.limit - v_7; + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + } + base.cursor = base.limit_backward; + return true; + }; + + /** @return {boolean} */ + function r_remove_pronoun_prefixes() { + base.bra = base.cursor; + if (base.find_among(a_10) == 0) + { + return false; + } + if (base.find_among(a_11) == 0) + { + return false; + } + if (!(base.eq_s("\u0BCD"))) + { + return false; + } + base.ket = base.cursor; + if (!base.slice_del()) + { + return false; + } + /** @const */ var /** number */ v_1 = base.cursor; + r_fix_va_start(); + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_remove_plural_suffix() { + var /** number */ among_var; + base.limit_backward = base.cursor; base.cursor = base.limit; + base.ket = base.cursor; + among_var = base.find_among_b(a_13); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + if (base.find_among_b(a_12) == 0) + { + break lab1; + } + if (!base.slice_from("\u0BC1\u0B99\u0BCD")) + { + return false; + } + break lab0; + } + base.cursor = base.limit - v_1; + if (!base.slice_from("\u0BCD")) + { + return false; + } + } + break; + case 2: + if (!base.slice_from("\u0BB2\u0BCD")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u0BB3\u0BCD")) + { + return false; + } + break; + case 4: + if (!base.slice_del()) + { + return false; + } + break; + } + base.cursor = base.limit_backward; + return true; + }; + + /** @return {boolean} */ + function r_remove_question_suffixes() { + if (!r_has_min_length()) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + if (base.find_among_b(a_14) == 0) + { + break lab0; + } + base.bra = base.cursor; + if (!base.slice_from("\u0BCD")) + { + return false; + } + } + base.cursor = base.limit - v_1; + base.cursor = base.limit_backward; + r_fix_endings(); + return true; + }; + + /** @return {boolean} */ + function r_remove_command_suffixes() { + if (!r_has_min_length()) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + base.ket = base.cursor; + if (base.find_among_b(a_15) == 0) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + base.cursor = base.limit_backward; + return true; + }; + + /** @return {boolean} */ + function r_remove_um() { + if (!r_has_min_length()) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + base.ket = base.cursor; + if (!(base.eq_s_b("\u0BC1\u0BAE\u0BCD"))) + { + return false; + } + base.bra = base.cursor; + if (!base.slice_from("\u0BCD")) + { + return false; + } + base.cursor = base.limit_backward; + /** @const */ var /** number */ v_1 = base.cursor; + r_fix_ending(); + base.cursor = v_1; + return true; + }; + + /** @return {boolean} */ + function r_remove_common_word_endings() { + var /** number */ among_var; + if (!r_has_min_length()) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + base.ket = base.cursor; + among_var = base.find_among_b(a_17); + if (among_var == 0) + { + return false; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 2: + { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + if (base.find_among_b(a_16) == 0) + { + break lab0; + } + return false; + } + base.cursor = base.limit - v_1; + } + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 3: + if (!base.slice_del()) + { + return false; + } + break; + } + base.cursor = base.limit_backward; + r_fix_endings(); + return true; + }; + + /** @return {boolean} */ + function r_remove_vetrumai_urupukal() { + var /** number */ among_var; + B_found_vetrumai_urupu = false; + if (!r_has_min_length()) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + lab0: { + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + base.ket = base.cursor; + among_var = base.find_among_b(a_20); + if (among_var == 0) + { + break lab1; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 3: + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab2: { + if (!(base.eq_s_b("\u0BAE"))) + { + break lab2; + } + break lab1; + } + base.cursor = base.limit - v_3; + } + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 4: + if (base.current.length < 7) + { + break lab1; + } + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 5: + { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab3: { + if (base.find_among_b(a_18) == 0) + { + break lab3; + } + break lab1; + } + base.cursor = base.limit - v_4; + } + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 6: + { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab4: { + if (base.find_among_b(a_19) == 0) + { + break lab4; + } + break lab1; + } + base.cursor = base.limit - v_5; + } + if (!base.slice_del()) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u0BBF")) + { + return false; + } + break; + } + base.cursor = base.limit - v_2; + break lab0; + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + base.ket = base.cursor; + if (!(base.eq_s_b("\u0BC8"))) + { + return false; + } + lab5: { + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab6: { + { + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab7: { + if (base.find_among_b(a_21) == 0) + { + break lab7; + } + break lab6; + } + base.cursor = base.limit - v_8; + } + break lab5; + } + base.cursor = base.limit - v_7; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + if (base.find_among_b(a_22) == 0) + { + return false; + } + if (!(base.eq_s_b("\u0BCD"))) + { + return false; + } + base.cursor = base.limit - v_9; + } + base.bra = base.cursor; + if (!base.slice_from("\u0BCD")) + { + return false; + } + base.cursor = base.limit - v_6; + } + B_found_vetrumai_urupu = true; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + lab8: { + base.ket = base.cursor; + if (!(base.eq_s_b("\u0BBF\u0BA9\u0BCD"))) + { + break lab8; + } + base.bra = base.cursor; + if (!base.slice_from("\u0BCD")) + { + return false; + } + } + base.cursor = base.limit - v_10; + base.cursor = base.limit_backward; + r_fix_endings(); + return true; + }; + + /** @return {boolean} */ + function r_remove_tense_suffixes() { + while(true) + { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + if (!r_remove_tense_suffix()) + { + break lab0; + } + continue; + } + base.cursor = v_1; + break; + } + return true; + }; + + /** @return {boolean} */ + function r_remove_tense_suffix() { + var /** number */ among_var; + B_found_a_match = false; + if (!r_has_min_length()) + { + return false; + } + base.limit_backward = base.cursor; base.cursor = base.limit; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + base.ket = base.cursor; + among_var = base.find_among_b(a_25); + if (among_var == 0) + { + break lab0; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_del()) + { + return false; + } + break; + case 2: + { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab1: { + if (base.find_among_b(a_23) == 0) + { + break lab1; + } + break lab0; + } + base.cursor = base.limit - v_3; + } + if (!base.slice_del()) + { + return false; + } + break; + case 3: + { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab2: { + if (base.find_among_b(a_24) == 0) + { + break lab2; + } + break lab0; + } + base.cursor = base.limit - v_4; + } + if (!base.slice_del()) + { + return false; + } + break; + case 4: + { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab3: { + if (!(base.eq_s_b("\u0B9A"))) + { + break lab3; + } + break lab0; + } + base.cursor = base.limit - v_5; + } + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u0BCD")) + { + return false; + } + break; + case 6: + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + if (!(base.eq_s_b("\u0BCD"))) + { + break lab0; + } + base.cursor = base.limit - v_6; + if (!base.slice_del()) + { + return false; + } + break; + } + B_found_a_match = true; + base.cursor = base.limit - v_2; + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab4: { + base.ket = base.cursor; + if (base.find_among_b(a_26) == 0) + { + break lab4; + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + B_found_a_match = true; + } + base.cursor = base.limit - v_7; + base.cursor = base.limit_backward; + r_fix_endings(); + if (!B_found_a_match) + { + return false; + } + return true; + }; + + this.stem = /** @return {boolean} */ function() { + B_found_vetrumai_urupu = false; + /** @const */ var /** number */ v_1 = base.cursor; + r_fix_ending(); + base.cursor = v_1; + if (!r_has_min_length()) + { + return false; + } + /** @const */ var /** number */ v_2 = base.cursor; + r_remove_question_prefixes(); + base.cursor = v_2; + /** @const */ var /** number */ v_3 = base.cursor; + r_remove_pronoun_prefixes(); + base.cursor = v_3; + /** @const */ var /** number */ v_4 = base.cursor; + r_remove_question_suffixes(); + base.cursor = v_4; + /** @const */ var /** number */ v_5 = base.cursor; + r_remove_um(); + base.cursor = v_5; + /** @const */ var /** number */ v_6 = base.cursor; + r_remove_common_word_endings(); + base.cursor = v_6; + /** @const */ var /** number */ v_7 = base.cursor; + r_remove_vetrumai_urupukal(); + base.cursor = v_7; + /** @const */ var /** number */ v_8 = base.cursor; + r_remove_plural_suffix(); + base.cursor = v_8; + /** @const */ var /** number */ v_9 = base.cursor; + r_remove_command_suffixes(); + base.cursor = v_9; + /** @const */ var /** number */ v_10 = base.cursor; + r_remove_tense_suffixes(); + base.cursor = v_10; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; diff --git a/sphinx/search/non-minified-js/turkish-stemmer.js b/sphinx/search/non-minified-js/turkish-stemmer.js index 8ba74b9218e..c57ba798066 100644 --- a/sphinx/search/non-minified-js/turkish-stemmer.js +++ b/sphinx/search/non-minified-js/turkish-stemmer.js @@ -1,8 +1,9 @@ -// Generated by Snowball 2.1.0 - https://snowballstem.org/ +// Generated from turkish.sbl by Snowball 3.0.1 - https://snowballstem.org/ /**@constructor*/ -TurkishStemmer = function() { +var TurkishStemmer = function() { var base = new BaseStemmer(); + /** @const */ var a_0 = [ ["m", -1, -1], ["n", -1, -1], @@ -216,230 +217,104 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_check_vowel_harmony() { - var /** number */ v_1 = base.limit - base.cursor; - golab0: while(true) + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + if (!base.go_out_grouping_b(g_vowel, 97, 305)) { - var /** number */ v_2 = base.limit - base.cursor; - lab1: { - if (!(base.in_grouping_b(g_vowel, 97, 305))) - { - break lab1; - } - base.cursor = base.limit - v_2; - break golab0; - } - base.cursor = base.limit - v_2; - if (base.cursor <= base.limit_backward) - { - return false; - } - base.cursor--; + return false; } - lab2: { - var /** number */ v_3 = base.limit - base.cursor; - lab3: { + lab0: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab1: { if (!(base.eq_s_b("a"))) { - break lab3; + break lab1; } - golab4: while(true) + if (!base.go_out_grouping_b(g_vowel1, 97, 305)) { - var /** number */ v_4 = base.limit - base.cursor; - lab5: { - if (!(base.in_grouping_b(g_vowel1, 97, 305))) - { - break lab5; - } - base.cursor = base.limit - v_4; - break golab4; - } - base.cursor = base.limit - v_4; - if (base.cursor <= base.limit_backward) - { - break lab3; - } - base.cursor--; + break lab1; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; - lab6: { + base.cursor = base.limit - v_2; + lab2: { if (!(base.eq_s_b("e"))) { - break lab6; + break lab2; } - golab7: while(true) + if (!base.go_out_grouping_b(g_vowel2, 101, 252)) { - var /** number */ v_5 = base.limit - base.cursor; - lab8: { - if (!(base.in_grouping_b(g_vowel2, 101, 252))) - { - break lab8; - } - base.cursor = base.limit - v_5; - break golab7; - } - base.cursor = base.limit - v_5; - if (base.cursor <= base.limit_backward) - { - break lab6; - } - base.cursor--; + break lab2; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; - lab9: { + base.cursor = base.limit - v_2; + lab3: { if (!(base.eq_s_b("\u0131"))) { - break lab9; + break lab3; } - golab10: while(true) + if (!base.go_out_grouping_b(g_vowel3, 97, 305)) { - var /** number */ v_6 = base.limit - base.cursor; - lab11: { - if (!(base.in_grouping_b(g_vowel3, 97, 305))) - { - break lab11; - } - base.cursor = base.limit - v_6; - break golab10; - } - base.cursor = base.limit - v_6; - if (base.cursor <= base.limit_backward) - { - break lab9; - } - base.cursor--; + break lab3; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; - lab12: { + base.cursor = base.limit - v_2; + lab4: { if (!(base.eq_s_b("i"))) { - break lab12; + break lab4; } - golab13: while(true) + if (!base.go_out_grouping_b(g_vowel4, 101, 105)) { - var /** number */ v_7 = base.limit - base.cursor; - lab14: { - if (!(base.in_grouping_b(g_vowel4, 101, 105))) - { - break lab14; - } - base.cursor = base.limit - v_7; - break golab13; - } - base.cursor = base.limit - v_7; - if (base.cursor <= base.limit_backward) - { - break lab12; - } - base.cursor--; + break lab4; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; - lab15: { + base.cursor = base.limit - v_2; + lab5: { if (!(base.eq_s_b("o"))) { - break lab15; + break lab5; } - golab16: while(true) + if (!base.go_out_grouping_b(g_vowel5, 111, 117)) { - var /** number */ v_8 = base.limit - base.cursor; - lab17: { - if (!(base.in_grouping_b(g_vowel5, 111, 117))) - { - break lab17; - } - base.cursor = base.limit - v_8; - break golab16; - } - base.cursor = base.limit - v_8; - if (base.cursor <= base.limit_backward) - { - break lab15; - } - base.cursor--; + break lab5; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; - lab18: { + base.cursor = base.limit - v_2; + lab6: { if (!(base.eq_s_b("\u00F6"))) { - break lab18; + break lab6; } - golab19: while(true) + if (!base.go_out_grouping_b(g_vowel6, 246, 252)) { - var /** number */ v_9 = base.limit - base.cursor; - lab20: { - if (!(base.in_grouping_b(g_vowel6, 246, 252))) - { - break lab20; - } - base.cursor = base.limit - v_9; - break golab19; - } - base.cursor = base.limit - v_9; - if (base.cursor <= base.limit_backward) - { - break lab18; - } - base.cursor--; + break lab6; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; - lab21: { + base.cursor = base.limit - v_2; + lab7: { if (!(base.eq_s_b("u"))) { - break lab21; + break lab7; } - golab22: while(true) + if (!base.go_out_grouping_b(g_vowel5, 111, 117)) { - var /** number */ v_10 = base.limit - base.cursor; - lab23: { - if (!(base.in_grouping_b(g_vowel5, 111, 117))) - { - break lab23; - } - base.cursor = base.limit - v_10; - break golab22; - } - base.cursor = base.limit - v_10; - if (base.cursor <= base.limit_backward) - { - break lab21; - } - base.cursor--; + break lab7; } - break lab2; + break lab0; } - base.cursor = base.limit - v_3; + base.cursor = base.limit - v_2; if (!(base.eq_s_b("\u00FC"))) { return false; } - golab24: while(true) + if (!base.go_out_grouping_b(g_vowel6, 246, 252)) { - var /** number */ v_11 = base.limit - base.cursor; - lab25: { - if (!(base.in_grouping_b(g_vowel6, 246, 252))) - { - break lab25; - } - base.cursor = base.limit - v_11; - break golab24; - } - base.cursor = base.limit - v_11; - if (base.cursor <= base.limit_backward) - { - return false; - } - base.cursor--; + return false; } } base.cursor = base.limit - v_1; @@ -449,13 +324,13 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_mark_suffix_with_optional_n_consonant() { lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("n"))) { break lab1; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.in_grouping_b(g_vowel, 97, 305))) { break lab1; @@ -465,9 +340,9 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_1; { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; if (!(base.eq_s_b("n"))) { break lab2; @@ -477,7 +352,7 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_3; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; if (base.cursor <= base.limit_backward) { return false; @@ -495,13 +370,13 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_mark_suffix_with_optional_s_consonant() { lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("s"))) { break lab1; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.in_grouping_b(g_vowel, 97, 305))) { break lab1; @@ -511,9 +386,9 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_1; { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; if (!(base.eq_s_b("s"))) { break lab2; @@ -523,7 +398,7 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_3; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; if (base.cursor <= base.limit_backward) { return false; @@ -541,13 +416,13 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_mark_suffix_with_optional_y_consonant() { lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("y"))) { break lab1; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.in_grouping_b(g_vowel, 97, 305))) { break lab1; @@ -557,9 +432,9 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_1; { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; if (!(base.eq_s_b("y"))) { break lab2; @@ -569,7 +444,7 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_3; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; if (base.cursor <= base.limit_backward) { return false; @@ -587,13 +462,13 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_mark_suffix_with_optional_U_vowel() { lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.in_grouping_b(g_U, 105, 305))) { break lab1; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; if (!(base.out_grouping_b(g_vowel, 97, 305))) { break lab1; @@ -603,9 +478,9 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_1; { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab2: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; if (!(base.in_grouping_b(g_U, 105, 305))) { break lab2; @@ -615,7 +490,7 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_3; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; if (base.cursor <= base.limit_backward) { return false; @@ -1010,10 +885,10 @@ TurkishStemmer = function() { base.ket = base.cursor; B_continue_stemming_noun_suffixes = true; lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { lab2: { - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab3: { if (!r_mark_ymUs_()) { @@ -1052,7 +927,7 @@ TurkishStemmer = function() { break lab6; } lab7: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab8: { if (!r_mark_sUnUz()) { @@ -1111,11 +986,11 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab14: { base.ket = base.cursor; lab15: { - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab16: { if (!r_mark_DUr()) { @@ -1157,7 +1032,7 @@ TurkishStemmer = function() { break lab19; } lab20: { - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab21: { if (!r_mark_yDU()) { @@ -1176,7 +1051,7 @@ TurkishStemmer = function() { base.cursor = base.limit - v_1; lab22: { lab23: { - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab24: { if (!r_mark_sUnUz()) { @@ -1211,7 +1086,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; lab27: { base.ket = base.cursor; if (!r_mark_ymUs_()) @@ -1232,11 +1107,11 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; lab28: { base.ket = base.cursor; lab29: { - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; lab30: { if (!r_mark_sUnUz()) { @@ -1301,7 +1176,7 @@ TurkishStemmer = function() { return false; } lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!r_mark_DA()) { @@ -1312,11 +1187,11 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab2: { base.ket = base.cursor; lab3: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab4: { if (!r_mark_lAr()) { @@ -1327,7 +1202,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab5: { if (!r_stem_suffix_chain_before_ki()) { @@ -1348,7 +1223,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab6: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1382,11 +1257,11 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab8: { base.ket = base.cursor; lab9: { - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab10: { if (!r_mark_lArI()) { @@ -1403,7 +1278,7 @@ TurkishStemmer = function() { lab11: { base.ket = base.cursor; lab12: { - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; lab13: { if (!r_mark_possessives()) { @@ -1422,7 +1297,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; lab14: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1459,7 +1334,7 @@ TurkishStemmer = function() { return false; } lab15: { - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; lab16: { if (!r_mark_lArI()) { @@ -1483,7 +1358,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_11 = base.limit - base.cursor; + /** @const */ var /** number */ v_11 = base.limit - base.cursor; lab18: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1517,7 +1392,7 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_stem_noun_suffixes() { lab0: { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1529,7 +1404,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab2: { if (!r_stem_suffix_chain_before_ki()) { @@ -1551,10 +1426,10 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab4: { lab5: { - var /** number */ v_4 = base.limit - base.cursor; + /** @const */ var /** number */ v_4 = base.limit - base.cursor; lab6: { base.ket = base.cursor; if (!r_mark_lArI()) @@ -1572,7 +1447,7 @@ TurkishStemmer = function() { lab7: { base.ket = base.cursor; lab8: { - var /** number */ v_5 = base.limit - base.cursor; + /** @const */ var /** number */ v_5 = base.limit - base.cursor; lab9: { if (!r_mark_possessives()) { @@ -1591,7 +1466,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_6 = base.limit - base.cursor; + /** @const */ var /** number */ v_6 = base.limit - base.cursor; lab10: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1637,7 +1512,7 @@ TurkishStemmer = function() { lab11: { base.ket = base.cursor; lab12: { - var /** number */ v_7 = base.limit - base.cursor; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; lab13: { if (!r_mark_ndA()) { @@ -1652,7 +1527,7 @@ TurkishStemmer = function() { } } lab14: { - var /** number */ v_8 = base.limit - base.cursor; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; lab15: { if (!r_mark_lArI()) { @@ -1676,7 +1551,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_9 = base.limit - base.cursor; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; lab17: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1709,7 +1584,7 @@ TurkishStemmer = function() { lab18: { base.ket = base.cursor; lab19: { - var /** number */ v_10 = base.limit - base.cursor; + /** @const */ var /** number */ v_10 = base.limit - base.cursor; lab20: { if (!r_mark_ndAn()) { @@ -1724,7 +1599,7 @@ TurkishStemmer = function() { } } lab21: { - var /** number */ v_11 = base.limit - base.cursor; + /** @const */ var /** number */ v_11 = base.limit - base.cursor; lab22: { if (!r_mark_sU()) { @@ -1735,7 +1610,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_12 = base.limit - base.cursor; + /** @const */ var /** number */ v_12 = base.limit - base.cursor; lab23: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1776,11 +1651,11 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_13 = base.limit - base.cursor; + /** @const */ var /** number */ v_13 = base.limit - base.cursor; lab25: { base.ket = base.cursor; lab26: { - var /** number */ v_14 = base.limit - base.cursor; + /** @const */ var /** number */ v_14 = base.limit - base.cursor; lab27: { if (!r_mark_possessives()) { @@ -1791,7 +1666,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_15 = base.limit - base.cursor; + /** @const */ var /** number */ v_15 = base.limit - base.cursor; lab28: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1823,7 +1698,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_16 = base.limit - base.cursor; + /** @const */ var /** number */ v_16 = base.limit - base.cursor; lab30: { if (!r_stem_suffix_chain_before_ki()) { @@ -1847,7 +1722,7 @@ TurkishStemmer = function() { lab31: { base.ket = base.cursor; lab32: { - var /** number */ v_17 = base.limit - base.cursor; + /** @const */ var /** number */ v_17 = base.limit - base.cursor; lab33: { if (!r_mark_nUn()) { @@ -1866,10 +1741,10 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_18 = base.limit - base.cursor; + /** @const */ var /** number */ v_18 = base.limit - base.cursor; lab34: { lab35: { - var /** number */ v_19 = base.limit - base.cursor; + /** @const */ var /** number */ v_19 = base.limit - base.cursor; lab36: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1891,7 +1766,7 @@ TurkishStemmer = function() { lab37: { base.ket = base.cursor; lab38: { - var /** number */ v_20 = base.limit - base.cursor; + /** @const */ var /** number */ v_20 = base.limit - base.cursor; lab39: { if (!r_mark_possessives()) { @@ -1910,7 +1785,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_21 = base.limit - base.cursor; + /** @const */ var /** number */ v_21 = base.limit - base.cursor; lab40: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -1967,7 +1842,7 @@ TurkishStemmer = function() { lab43: { base.ket = base.cursor; lab44: { - var /** number */ v_22 = base.limit - base.cursor; + /** @const */ var /** number */ v_22 = base.limit - base.cursor; lab45: { if (!r_mark_DA()) { @@ -1994,11 +1869,11 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_23 = base.limit - base.cursor; + /** @const */ var /** number */ v_23 = base.limit - base.cursor; lab47: { base.ket = base.cursor; lab48: { - var /** number */ v_24 = base.limit - base.cursor; + /** @const */ var /** number */ v_24 = base.limit - base.cursor; lab49: { if (!r_mark_possessives()) { @@ -2009,7 +1884,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_25 = base.limit - base.cursor; + /** @const */ var /** number */ v_25 = base.limit - base.cursor; lab50: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -2044,7 +1919,7 @@ TurkishStemmer = function() { base.cursor = base.limit - v_1; base.ket = base.cursor; lab51: { - var /** number */ v_26 = base.limit - base.cursor; + /** @const */ var /** number */ v_26 = base.limit - base.cursor; lab52: { if (!r_mark_possessives()) { @@ -2063,7 +1938,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_27 = base.limit - base.cursor; + /** @const */ var /** number */ v_27 = base.limit - base.cursor; lab53: { base.ket = base.cursor; if (!r_mark_lAr()) @@ -2127,9 +2002,10 @@ TurkishStemmer = function() { /** @return {boolean} */ function r_append_U_to_stems_ending_with_d_or_g() { - var /** number */ v_1 = base.limit - base.cursor; + base.ket = base.cursor; + base.bra = base.cursor; lab0: { - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab1: { if (!(base.eq_s_b("d"))) { @@ -2137,186 +2013,105 @@ TurkishStemmer = function() { } break lab0; } - base.cursor = base.limit - v_2; + base.cursor = base.limit - v_1; if (!(base.eq_s_b("g"))) { return false; } } - base.cursor = base.limit - v_1; + if (!base.go_out_grouping_b(g_vowel, 97, 305)) + { + return false; + } lab2: { - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; lab3: { - var /** number */ v_4 = base.limit - base.cursor; - golab4: while(true) - { - var /** number */ v_5 = base.limit - base.cursor; + lab4: { + /** @const */ var /** number */ v_3 = base.limit - base.cursor; lab5: { - if (!(base.in_grouping_b(g_vowel, 97, 305))) - { - break lab5; - } - base.cursor = base.limit - v_5; - break golab4; - } - base.cursor = base.limit - v_5; - if (base.cursor <= base.limit_backward) - { - break lab3; - } - base.cursor--; - } - lab6: { - var /** number */ v_6 = base.limit - base.cursor; - lab7: { if (!(base.eq_s_b("a"))) { - break lab7; + break lab5; } - break lab6; + break lab4; } - base.cursor = base.limit - v_6; + base.cursor = base.limit - v_3; if (!(base.eq_s_b("\u0131"))) { break lab3; } } - base.cursor = base.limit - v_4; + if (!base.slice_from("\u0131")) { - var /** number */ c1 = base.cursor; - base.insert(base.cursor, base.cursor, "\u0131"); - base.cursor = c1; + return false; } break lab2; } - base.cursor = base.limit - v_3; - lab8: { - var /** number */ v_7 = base.limit - base.cursor; - golab9: while(true) - { - var /** number */ v_8 = base.limit - base.cursor; - lab10: { - if (!(base.in_grouping_b(g_vowel, 97, 305))) - { - break lab10; - } - base.cursor = base.limit - v_8; - break golab9; - } - base.cursor = base.limit - v_8; - if (base.cursor <= base.limit_backward) - { - break lab8; - } - base.cursor--; - } - lab11: { - var /** number */ v_9 = base.limit - base.cursor; - lab12: { + base.cursor = base.limit - v_2; + lab6: { + lab7: { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab8: { if (!(base.eq_s_b("e"))) { - break lab12; + break lab8; } - break lab11; + break lab7; } - base.cursor = base.limit - v_9; + base.cursor = base.limit - v_4; if (!(base.eq_s_b("i"))) { - break lab8; + break lab6; } } - base.cursor = base.limit - v_7; + if (!base.slice_from("i")) { - var /** number */ c2 = base.cursor; - base.insert(base.cursor, base.cursor, "i"); - base.cursor = c2; + return false; } break lab2; } - base.cursor = base.limit - v_3; - lab13: { - var /** number */ v_10 = base.limit - base.cursor; - golab14: while(true) - { - var /** number */ v_11 = base.limit - base.cursor; - lab15: { - if (!(base.in_grouping_b(g_vowel, 97, 305))) - { - break lab15; - } - base.cursor = base.limit - v_11; - break golab14; - } - base.cursor = base.limit - v_11; - if (base.cursor <= base.limit_backward) - { - break lab13; - } - base.cursor--; - } - lab16: { - var /** number */ v_12 = base.limit - base.cursor; - lab17: { + base.cursor = base.limit - v_2; + lab9: { + lab10: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab11: { if (!(base.eq_s_b("o"))) { - break lab17; + break lab11; } - break lab16; + break lab10; } - base.cursor = base.limit - v_12; + base.cursor = base.limit - v_5; if (!(base.eq_s_b("u"))) { - break lab13; - } - } - base.cursor = base.limit - v_10; - { - var /** number */ c3 = base.cursor; - base.insert(base.cursor, base.cursor, "u"); - base.cursor = c3; - } - break lab2; - } - base.cursor = base.limit - v_3; - var /** number */ v_13 = base.limit - base.cursor; - golab18: while(true) - { - var /** number */ v_14 = base.limit - base.cursor; - lab19: { - if (!(base.in_grouping_b(g_vowel, 97, 305))) - { - break lab19; + break lab9; } - base.cursor = base.limit - v_14; - break golab18; } - base.cursor = base.limit - v_14; - if (base.cursor <= base.limit_backward) + if (!base.slice_from("u")) { return false; } - base.cursor--; + break lab2; } - lab20: { - var /** number */ v_15 = base.limit - base.cursor; - lab21: { + base.cursor = base.limit - v_2; + lab12: { + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab13: { if (!(base.eq_s_b("\u00F6"))) { - break lab21; + break lab13; } - break lab20; + break lab12; } - base.cursor = base.limit - v_15; + base.cursor = base.limit - v_6; if (!(base.eq_s_b("\u00FC"))) { return false; } } - base.cursor = base.limit - v_13; + if (!base.slice_from("\u00FC")) { - var /** number */ c4 = base.cursor; - base.insert(base.cursor, base.cursor, "\u00FC"); - base.cursor = c4; + return false; } } return true; @@ -2328,7 +2123,7 @@ TurkishStemmer = function() { { return false; } - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { if (!(base.eq_s_b("soy"))) { @@ -2344,39 +2139,92 @@ TurkishStemmer = function() { }; /** @return {boolean} */ - function r_more_than_one_syllable_word() { - var /** number */ v_1 = base.cursor; - { - var v_2 = 2; - while(true) + function r_remove_proper_noun_suffix() { + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.bra = base.cursor; + golab1: while(true) { - var /** number */ v_3 = base.cursor; - lab0: { - golab1: while(true) + /** @const */ var /** number */ v_2 = base.cursor; + lab2: { { - lab2: { - if (!(base.in_grouping(g_vowel, 97, 305))) + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + if (!(base.eq_s("'"))) { - break lab2; + break lab3; } - break golab1; - } - if (base.cursor >= base.limit) - { - break lab0; + break lab2; } - base.cursor++; + base.cursor = v_3; } - v_2--; - continue; + base.cursor = v_2; + break golab1; } - base.cursor = v_3; - break; + base.cursor = v_2; + if (base.cursor >= base.limit) + { + break lab0; + } + base.cursor++; } - if (v_2 > 0) + base.ket = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + base.cursor = v_1; + /** @const */ var /** number */ v_4 = base.cursor; + lab4: { + { + /** @const */ var /** number */ c1 = base.cursor + 2; + if (c1 > base.limit) + { + break lab4; + } + base.cursor = c1; + } + golab5: while(true) + { + /** @const */ var /** number */ v_5 = base.cursor; + lab6: { + if (!(base.eq_s("'"))) + { + break lab6; + } + base.cursor = v_5; + break golab5; + } + base.cursor = v_5; + if (base.cursor >= base.limit) + { + break lab4; + } + base.cursor++; + } + base.bra = base.cursor; + base.cursor = base.limit; + base.ket = base.cursor; + if (!base.slice_del()) + { + return false; + } + } + base.cursor = v_4; + return true; + }; + + /** @return {boolean} */ + function r_more_than_one_syllable_word() { + /** @const */ var /** number */ v_1 = base.cursor; + for (var /** number */ v_2 = 2; v_2 > 0; v_2--) + { + if (!base.go_out_grouping(g_vowel, 97, 305)) { return false; } + base.cursor++; } base.cursor = v_1; return true; @@ -2386,7 +2234,7 @@ TurkishStemmer = function() { function r_postlude() { base.limit_backward = base.cursor; base.cursor = base.limit; { - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; lab0: { if (!r_is_reserved_word()) { @@ -2396,10 +2244,10 @@ TurkishStemmer = function() { } base.cursor = base.limit - v_1; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_append_U_to_stems_ending_with_d_or_g(); base.cursor = base.limit - v_2; - var /** number */ v_3 = base.limit - base.cursor; + /** @const */ var /** number */ v_3 = base.limit - base.cursor; r_post_process_last_consonants(); base.cursor = base.limit - v_3; base.cursor = base.limit_backward; @@ -2407,19 +2255,20 @@ TurkishStemmer = function() { }; this.stem = /** @return {boolean} */ function() { + r_remove_proper_noun_suffix(); if (!r_more_than_one_syllable_word()) { return false; } base.limit_backward = base.cursor; base.cursor = base.limit; - var /** number */ v_1 = base.limit - base.cursor; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; r_stem_nominal_verb_suffixes(); base.cursor = base.limit - v_1; if (!B_continue_stemming_noun_suffixes) { return false; } - var /** number */ v_2 = base.limit - base.cursor; + /** @const */ var /** number */ v_2 = base.limit - base.cursor; r_stem_noun_suffixes(); base.cursor = base.limit - v_2; base.cursor = base.limit_backward; diff --git a/sphinx/search/non-minified-js/yiddish-stemmer.js b/sphinx/search/non-minified-js/yiddish-stemmer.js new file mode 100644 index 00000000000..b9a7ddb411c --- /dev/null +++ b/sphinx/search/non-minified-js/yiddish-stemmer.js @@ -0,0 +1,1160 @@ +// Generated from yiddish.sbl by Snowball 3.0.1 - https://snowballstem.org/ + +/**@constructor*/ +var YiddishStemmer = function() { + var base = new BaseStemmer(); + + /** @const */ var a_0 = [ + ["\u05D5\u05D5", -1, 1], + ["\u05D5\u05D9", -1, 2], + ["\u05D9\u05D9", -1, 3], + ["\u05DA", -1, 4], + ["\u05DD", -1, 5], + ["\u05DF", -1, 6], + ["\u05E3", -1, 7], + ["\u05E5", -1, 8] + ]; + + /** @const */ var a_1 = [ + ["\u05D0\u05D3\u05D5\u05E8\u05DB", -1, 1], + ["\u05D0\u05D4\u05D9\u05E0", -1, 1], + ["\u05D0\u05D4\u05E2\u05E8", -1, 1], + ["\u05D0\u05D4\u05F2\u05DE", -1, 1], + ["\u05D0\u05D5\u05DE", -1, 1], + ["\u05D0\u05D5\u05E0\u05D8\u05E2\u05E8", -1, 1], + ["\u05D0\u05D9\u05D1\u05E2\u05E8", -1, 1], + ["\u05D0\u05E0", -1, 1], + ["\u05D0\u05E0\u05D8", 7, 1], + ["\u05D0\u05E0\u05D8\u05E7\u05E2\u05D2\u05E0", 8, 1], + ["\u05D0\u05E0\u05D9\u05D3\u05E2\u05E8", 7, 1], + ["\u05D0\u05E4", -1, 1], + ["\u05D0\u05E4\u05D9\u05E8", 11, 1], + ["\u05D0\u05E7\u05E2\u05D2\u05E0", -1, 1], + ["\u05D0\u05E8\u05D0\u05E4", -1, 1], + ["\u05D0\u05E8\u05D5\u05DE", -1, 1], + ["\u05D0\u05E8\u05D5\u05E0\u05D8\u05E2\u05E8", -1, 1], + ["\u05D0\u05E8\u05D9\u05D1\u05E2\u05E8", -1, 1], + ["\u05D0\u05E8\u05F1\u05E1", -1, 1], + ["\u05D0\u05E8\u05F1\u05E4", -1, 1], + ["\u05D0\u05E8\u05F2\u05E0", -1, 1], + ["\u05D0\u05F0\u05E2\u05E7", -1, 1], + ["\u05D0\u05F1\u05E1", -1, 1], + ["\u05D0\u05F1\u05E4", -1, 1], + ["\u05D0\u05F2\u05E0", -1, 1], + ["\u05D1\u05D0", -1, 1], + ["\u05D1\u05F2", -1, 1], + ["\u05D3\u05D5\u05E8\u05DB", -1, 1], + ["\u05D3\u05E2\u05E8", -1, 1], + ["\u05DE\u05D9\u05D8", -1, 1], + ["\u05E0\u05D0\u05DB", -1, 1], + ["\u05E4\u05D0\u05E8", -1, 1], + ["\u05E4\u05D0\u05E8\u05D1\u05F2", 31, 1], + ["\u05E4\u05D0\u05E8\u05F1\u05E1", 31, 1], + ["\u05E4\u05D5\u05E0\u05D0\u05E0\u05D3\u05E2\u05E8", -1, 1], + ["\u05E6\u05D5", -1, 1], + ["\u05E6\u05D5\u05D6\u05D0\u05DE\u05E2\u05E0", 35, 1], + ["\u05E6\u05D5\u05E0\u05F1\u05E4", 35, 1], + ["\u05E6\u05D5\u05E8\u05D9\u05E7", 35, 1], + ["\u05E6\u05E2", -1, 1] + ]; + + /** @const */ var a_2 = [ + ["\u05D3\u05D6\u05E9", -1, -1], + ["\u05E9\u05D8\u05E8", -1, -1], + ["\u05E9\u05D8\u05E9", -1, -1], + ["\u05E9\u05E4\u05E8", -1, -1] + ]; + + /** @const */ var a_3 = [ + ["\u05E7\u05DC\u05D9\u05D1", -1, 9], + ["\u05E8\u05D9\u05D1", -1, 10], + ["\u05D8\u05E8\u05D9\u05D1", 1, 7], + ["\u05E9\u05E8\u05D9\u05D1", 1, 15], + ["\u05D4\u05F1\u05D1", -1, 23], + ["\u05E9\u05F0\u05D9\u05D2", -1, 12], + ["\u05D2\u05D0\u05E0\u05D2", -1, 1], + ["\u05D6\u05D5\u05E0\u05D2", -1, 18], + ["\u05E9\u05DC\u05D5\u05E0\u05D2", -1, 21], + ["\u05E6\u05F0\u05D5\u05E0\u05D2", -1, 20], + ["\u05D1\u05F1\u05D2", -1, 22], + ["\u05D1\u05D5\u05E0\u05D3", -1, 16], + ["\u05F0\u05D9\u05D6", -1, 6], + ["\u05D1\u05D9\u05D8", -1, 4], + ["\u05DC\u05D9\u05D8", -1, 8], + ["\u05DE\u05D9\u05D8", -1, 3], + ["\u05E9\u05E0\u05D9\u05D8", -1, 14], + ["\u05E0\u05D5\u05DE", -1, 2], + ["\u05E9\u05D8\u05D0\u05E0", -1, 25], + ["\u05D1\u05D9\u05E1", -1, 5], + ["\u05E9\u05DE\u05D9\u05E1", -1, 13], + ["\u05E8\u05D9\u05E1", -1, 11], + ["\u05D8\u05E8\u05D5\u05E0\u05E7", -1, 19], + ["\u05E4\u05D0\u05E8\u05DC\u05F1\u05E8", -1, 24], + ["\u05E9\u05F0\u05F1\u05E8", -1, 26], + ["\u05F0\u05D5\u05D8\u05E9", -1, 17] + ]; + + /** @const */ var a_4 = [ + ["\u05D5\u05E0\u05D2", -1, 1], + ["\u05E1\u05D8\u05D5", -1, 1], + ["\u05D8", -1, 1], + ["\u05D1\u05E8\u05D0\u05DB\u05D8", 2, 31], + ["\u05E1\u05D8", 2, 1], + ["\u05D9\u05E1\u05D8", 4, 33], + ["\u05E2\u05D8", 2, 1], + ["\u05E9\u05D0\u05E4\u05D8", 2, 1], + ["\u05D4\u05F2\u05D8", 2, 1], + ["\u05E7\u05F2\u05D8", 2, 1], + ["\u05D9\u05E7\u05F2\u05D8", 9, 1], + ["\u05DC\u05E2\u05DB", -1, 1], + ["\u05E2\u05DC\u05E2\u05DB", 11, 1], + ["\u05D9\u05D6\u05DE", -1, 1], + ["\u05D9\u05DE", -1, 1], + ["\u05E2\u05DE", -1, 1], + ["\u05E2\u05E0\u05E2\u05DE", 15, 3], + ["\u05D8\u05E2\u05E0\u05E2\u05DE", 16, 4], + ["\u05E0", -1, 1], + ["\u05E7\u05DC\u05D9\u05D1\u05E0", 18, 14], + ["\u05E8\u05D9\u05D1\u05E0", 18, 15], + ["\u05D8\u05E8\u05D9\u05D1\u05E0", 20, 12], + ["\u05E9\u05E8\u05D9\u05D1\u05E0", 20, 7], + ["\u05D4\u05F1\u05D1\u05E0", 18, 27], + ["\u05E9\u05F0\u05D9\u05D2\u05E0", 18, 17], + ["\u05D6\u05D5\u05E0\u05D2\u05E0", 18, 22], + ["\u05E9\u05DC\u05D5\u05E0\u05D2\u05E0", 18, 25], + ["\u05E6\u05F0\u05D5\u05E0\u05D2\u05E0", 18, 24], + ["\u05D1\u05F1\u05D2\u05E0", 18, 26], + ["\u05D1\u05D5\u05E0\u05D3\u05E0", 18, 20], + ["\u05F0\u05D9\u05D6\u05E0", 18, 11], + ["\u05D8\u05E0", 18, 4], + ["GE\u05D1\u05D9\u05D8\u05E0", 31, 9], + ["GE\u05DC\u05D9\u05D8\u05E0", 31, 13], + ["GE\u05DE\u05D9\u05D8\u05E0", 31, 8], + ["\u05E9\u05E0\u05D9\u05D8\u05E0", 31, 19], + ["\u05E1\u05D8\u05E0", 31, 1], + ["\u05D9\u05E1\u05D8\u05E0", 36, 1], + ["\u05E2\u05D8\u05E0", 31, 1], + ["GE\u05D1\u05D9\u05E1\u05E0", 18, 10], + ["\u05E9\u05DE\u05D9\u05E1\u05E0", 18, 18], + ["GE\u05E8\u05D9\u05E1\u05E0", 18, 16], + ["\u05E2\u05E0", 18, 1], + ["\u05D2\u05D0\u05E0\u05D2\u05E2\u05E0", 42, 5], + ["\u05E2\u05DC\u05E2\u05E0", 42, 1], + ["\u05E0\u05D5\u05DE\u05E2\u05E0", 42, 6], + ["\u05D9\u05D6\u05DE\u05E2\u05E0", 42, 1], + ["\u05E9\u05D8\u05D0\u05E0\u05E2\u05E0", 42, 29], + ["\u05D8\u05E8\u05D5\u05E0\u05E7\u05E0", 18, 23], + ["\u05E4\u05D0\u05E8\u05DC\u05F1\u05E8\u05E0", 18, 28], + ["\u05E9\u05F0\u05F1\u05E8\u05E0", 18, 30], + ["\u05F0\u05D5\u05D8\u05E9\u05E0", 18, 21], + ["\u05D2\u05F2\u05E0", 18, 5], + ["\u05E1", -1, 1], + ["\u05D8\u05E1", 53, 4], + ["\u05E2\u05D8\u05E1", 54, 1], + ["\u05E0\u05E1", 53, 1], + ["\u05D8\u05E0\u05E1", 56, 4], + ["\u05E2\u05E0\u05E1", 56, 3], + ["\u05E2\u05E1", 53, 1], + ["\u05D9\u05E2\u05E1", 59, 2], + ["\u05E2\u05DC\u05E2\u05E1", 59, 1], + ["\u05E2\u05E8\u05E1", 53, 1], + ["\u05E2\u05E0\u05E2\u05E8\u05E1", 62, 1], + ["\u05E2", -1, 1], + ["\u05D8\u05E2", 64, 4], + ["\u05E1\u05D8\u05E2", 65, 1], + ["\u05E2\u05D8\u05E2", 65, 1], + ["\u05D9\u05E2", 64, -1], + ["\u05E2\u05DC\u05E2", 64, 1], + ["\u05E2\u05E0\u05E2", 64, 3], + ["\u05D8\u05E2\u05E0\u05E2", 70, 4], + ["\u05E2\u05E8", -1, 1], + ["\u05D8\u05E2\u05E8", 72, 4], + ["\u05E1\u05D8\u05E2\u05E8", 73, 1], + ["\u05E2\u05D8\u05E2\u05E8", 73, 1], + ["\u05E2\u05E0\u05E2\u05E8", 72, 3], + ["\u05D8\u05E2\u05E0\u05E2\u05E8", 76, 4], + ["\u05D5\u05EA", -1, 32] + ]; + + /** @const */ var a_5 = [ + ["\u05D5\u05E0\u05D2", -1, 1], + ["\u05E9\u05D0\u05E4\u05D8", -1, 1], + ["\u05D4\u05F2\u05D8", -1, 1], + ["\u05E7\u05F2\u05D8", -1, 1], + ["\u05D9\u05E7\u05F2\u05D8", 3, 1], + ["\u05DC", -1, 2] + ]; + + /** @const */ var a_6 = [ + ["\u05D9\u05D2", -1, 1], + ["\u05D9\u05E7", -1, 1], + ["\u05D3\u05D9\u05E7", 1, 1], + ["\u05E0\u05D3\u05D9\u05E7", 2, 1], + ["\u05E2\u05E0\u05D3\u05D9\u05E7", 3, 1], + ["\u05D1\u05DC\u05D9\u05E7", 1, -1], + ["\u05D2\u05DC\u05D9\u05E7", 1, -1], + ["\u05E0\u05D9\u05E7", 1, 1], + ["\u05D9\u05E9", -1, 1] + ]; + + /** @const */ var /** Array */ g_niked = [255, 155, 6]; + + /** @const */ var /** Array */ g_vowel = [33, 2, 4, 0, 6]; + + /** @const */ var /** Array */ g_consonant = [239, 254, 253, 131]; + + var /** number */ I_x = 0; + var /** number */ I_p1 = 0; + + + /** @return {boolean} */ + function r_prelude() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + while(true) + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + golab2: while(true) + { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + base.bra = base.cursor; + among_var = base.find_among(a_0); + if (among_var == 0) + { + break lab3; + } + base.ket = base.cursor; + switch (among_var) { + case 1: + { + /** @const */ var /** number */ v_4 = base.cursor; + lab4: { + if (!(base.eq_s("\u05BC"))) + { + break lab4; + } + break lab3; + } + base.cursor = v_4; + } + if (!base.slice_from("\u05F0")) + { + return false; + } + break; + case 2: + { + /** @const */ var /** number */ v_5 = base.cursor; + lab5: { + if (!(base.eq_s("\u05B4"))) + { + break lab5; + } + break lab3; + } + base.cursor = v_5; + } + if (!base.slice_from("\u05F1")) + { + return false; + } + break; + case 3: + { + /** @const */ var /** number */ v_6 = base.cursor; + lab6: { + if (!(base.eq_s("\u05B4"))) + { + break lab6; + } + break lab3; + } + base.cursor = v_6; + } + if (!base.slice_from("\u05F2")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u05DB")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u05DE")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u05E0")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u05E4")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u05E6")) + { + return false; + } + break; + } + base.cursor = v_3; + break golab2; + } + base.cursor = v_3; + if (base.cursor >= base.limit) + { + break lab1; + } + base.cursor++; + } + continue; + } + base.cursor = v_2; + break; + } + } + base.cursor = v_1; + /** @const */ var /** number */ v_7 = base.cursor; + lab7: { + while(true) + { + /** @const */ var /** number */ v_8 = base.cursor; + lab8: { + golab9: while(true) + { + /** @const */ var /** number */ v_9 = base.cursor; + lab10: { + base.bra = base.cursor; + if (!(base.in_grouping(g_niked, 1456, 1474))) + { + break lab10; + } + base.ket = base.cursor; + if (!base.slice_del()) + { + return false; + } + base.cursor = v_9; + break golab9; + } + base.cursor = v_9; + if (base.cursor >= base.limit) + { + break lab8; + } + base.cursor++; + } + continue; + } + base.cursor = v_8; + break; + } + } + base.cursor = v_7; + return true; + }; + + /** @return {boolean} */ + function r_mark_regions() { + I_p1 = base.limit; + /** @const */ var /** number */ v_1 = base.cursor; + lab0: { + base.bra = base.cursor; + if (!(base.eq_s("\u05D2\u05E2"))) + { + base.cursor = v_1; + break lab0; + } + base.ket = base.cursor; + { + /** @const */ var /** number */ v_2 = base.cursor; + lab1: { + lab2: { + /** @const */ var /** number */ v_3 = base.cursor; + lab3: { + if (!(base.eq_s("\u05DC\u05D8"))) + { + break lab3; + } + break lab2; + } + base.cursor = v_3; + lab4: { + if (!(base.eq_s("\u05D1\u05E0"))) + { + break lab4; + } + break lab2; + } + base.cursor = v_3; + if (base.cursor < base.limit) + { + break lab1; + } + } + base.cursor = v_1; + break lab0; + } + base.cursor = v_2; + } + if (!base.slice_from("GE")) + { + return false; + } + } + /** @const */ var /** number */ v_4 = base.cursor; + lab5: { + if (base.find_among(a_1) == 0) + { + base.cursor = v_4; + break lab5; + } + lab6: { + /** @const */ var /** number */ v_5 = base.cursor; + lab7: { + /** @const */ var /** number */ v_6 = base.cursor; + lab8: { + /** @const */ var /** number */ v_7 = base.cursor; + lab9: { + if (!(base.eq_s("\u05E6\u05D5\u05D2\u05E0"))) + { + break lab9; + } + break lab8; + } + base.cursor = v_7; + lab10: { + if (!(base.eq_s("\u05E6\u05D5\u05E7\u05D8"))) + { + break lab10; + } + break lab8; + } + base.cursor = v_7; + if (!(base.eq_s("\u05E6\u05D5\u05E7\u05E0"))) + { + break lab7; + } + } + if (base.cursor < base.limit) + { + break lab7; + } + base.cursor = v_6; + break lab6; + } + base.cursor = v_5; + lab11: { + /** @const */ var /** number */ v_8 = base.cursor; + if (!(base.eq_s("\u05D2\u05E2\u05D1\u05E0"))) + { + break lab11; + } + base.cursor = v_8; + break lab6; + } + base.cursor = v_5; + lab12: { + base.bra = base.cursor; + if (!(base.eq_s("\u05D2\u05E2"))) + { + break lab12; + } + base.ket = base.cursor; + if (!base.slice_from("GE")) + { + return false; + } + break lab6; + } + base.cursor = v_5; + base.bra = base.cursor; + if (!(base.eq_s("\u05E6\u05D5"))) + { + base.cursor = v_4; + break lab5; + } + base.ket = base.cursor; + if (!base.slice_from("TSU")) + { + return false; + } + } + } + /** @const */ var /** number */ v_9 = base.cursor; + { + /** @const */ var /** number */ c1 = base.cursor + 3; + if (c1 > base.limit) + { + return false; + } + base.cursor = c1; + } + I_x = base.cursor; + base.cursor = v_9; + /** @const */ var /** number */ v_10 = base.cursor; + lab13: { + if (base.find_among(a_2) == 0) + { + base.cursor = v_10; + break lab13; + } + } + { + /** @const */ var /** number */ v_11 = base.cursor; + lab14: { + if (!(base.in_grouping(g_consonant, 1489, 1520))) + { + break lab14; + } + if (!(base.in_grouping(g_consonant, 1489, 1520))) + { + break lab14; + } + if (!(base.in_grouping(g_consonant, 1489, 1520))) + { + break lab14; + } + I_p1 = base.cursor; + return false; + } + base.cursor = v_11; + } + if (!base.go_out_grouping(g_vowel, 1488, 1522)) + { + return false; + } + base.cursor++; + if (!base.go_in_grouping(g_vowel, 1488, 1522)) + { + return false; + } + I_p1 = base.cursor; + lab15: { + if (I_p1 >= I_x) + { + break lab15; + } + I_p1 = I_x; + } + return true; + }; + + /** @return {boolean} */ + function r_R1() { + return I_p1 <= base.cursor; + }; + + /** @return {boolean} */ + function r_R1plus3() { + return I_p1 <= (base.cursor + 3); + }; + + /** @return {boolean} */ + function r_standard_suffix() { + var /** number */ among_var; + /** @const */ var /** number */ v_1 = base.limit - base.cursor; + lab0: { + base.ket = base.cursor; + among_var = base.find_among_b(a_4); + if (among_var == 0) + { + break lab0; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + break lab0; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R1()) + { + break lab0; + } + if (!base.slice_from("\u05D9\u05E2")) + { + return false; + } + break; + case 3: + if (!r_R1()) + { + break lab0; + } + if (!base.slice_del()) + { + return false; + } + base.ket = base.cursor; + among_var = base.find_among_b(a_3); + if (among_var == 0) + { + break lab0; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!base.slice_from("\u05D2\u05F2")) + { + return false; + } + break; + case 2: + if (!base.slice_from("\u05E0\u05E2\u05DE")) + { + return false; + } + break; + case 3: + if (!base.slice_from("\u05DE\u05F2\u05D3")) + { + return false; + } + break; + case 4: + if (!base.slice_from("\u05D1\u05F2\u05D8")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u05D1\u05F2\u05E1")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u05F0\u05F2\u05D6")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u05D8\u05E8\u05F2\u05D1")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u05DC\u05F2\u05D8")) + { + return false; + } + break; + case 9: + if (!base.slice_from("\u05E7\u05DC\u05F2\u05D1")) + { + return false; + } + break; + case 10: + if (!base.slice_from("\u05E8\u05F2\u05D1")) + { + return false; + } + break; + case 11: + if (!base.slice_from("\u05E8\u05F2\u05E1")) + { + return false; + } + break; + case 12: + if (!base.slice_from("\u05E9\u05F0\u05F2\u05D2")) + { + return false; + } + break; + case 13: + if (!base.slice_from("\u05E9\u05DE\u05F2\u05E1")) + { + return false; + } + break; + case 14: + if (!base.slice_from("\u05E9\u05E0\u05F2\u05D3")) + { + return false; + } + break; + case 15: + if (!base.slice_from("\u05E9\u05E8\u05F2\u05D1")) + { + return false; + } + break; + case 16: + if (!base.slice_from("\u05D1\u05D9\u05E0\u05D3")) + { + return false; + } + break; + case 17: + if (!base.slice_from("\u05F0\u05D9\u05D8\u05E9")) + { + return false; + } + break; + case 18: + if (!base.slice_from("\u05D6\u05D9\u05E0\u05D2")) + { + return false; + } + break; + case 19: + if (!base.slice_from("\u05D8\u05E8\u05D9\u05E0\u05E7")) + { + return false; + } + break; + case 20: + if (!base.slice_from("\u05E6\u05F0\u05D9\u05E0\u05D2")) + { + return false; + } + break; + case 21: + if (!base.slice_from("\u05E9\u05DC\u05D9\u05E0\u05D2")) + { + return false; + } + break; + case 22: + if (!base.slice_from("\u05D1\u05F2\u05D2")) + { + return false; + } + break; + case 23: + if (!base.slice_from("\u05D4\u05F2\u05D1")) + { + return false; + } + break; + case 24: + if (!base.slice_from("\u05E4\u05D0\u05E8\u05DC\u05D9\u05E8")) + { + return false; + } + break; + case 25: + if (!base.slice_from("\u05E9\u05D8\u05F2")) + { + return false; + } + break; + case 26: + if (!base.slice_from("\u05E9\u05F0\u05E2\u05E8")) + { + return false; + } + break; + } + break; + case 4: + lab1: { + /** @const */ var /** number */ v_2 = base.limit - base.cursor; + lab2: { + if (!r_R1()) + { + break lab2; + } + if (!base.slice_del()) + { + return false; + } + break lab1; + } + base.cursor = base.limit - v_2; + if (!base.slice_from("\u05D8")) + { + return false; + } + } + base.ket = base.cursor; + if (!(base.eq_s_b("\u05D1\u05E8\u05D0\u05DB"))) + { + break lab0; + } + /** @const */ var /** number */ v_3 = base.limit - base.cursor; + lab3: { + if (!(base.eq_s_b("\u05D2\u05E2"))) + { + base.cursor = base.limit - v_3; + break lab3; + } + } + base.bra = base.cursor; + if (!base.slice_from("\u05D1\u05E8\u05E2\u05E0\u05D2")) + { + return false; + } + break; + case 5: + if (!base.slice_from("\u05D2\u05F2")) + { + return false; + } + break; + case 6: + if (!base.slice_from("\u05E0\u05E2\u05DE")) + { + return false; + } + break; + case 7: + if (!base.slice_from("\u05E9\u05E8\u05F2\u05D1")) + { + return false; + } + break; + case 8: + if (!base.slice_from("\u05DE\u05F2\u05D3")) + { + return false; + } + break; + case 9: + if (!base.slice_from("\u05D1\u05F2\u05D8")) + { + return false; + } + break; + case 10: + if (!base.slice_from("\u05D1\u05F2\u05E1")) + { + return false; + } + break; + case 11: + if (!base.slice_from("\u05F0\u05F2\u05D6")) + { + return false; + } + break; + case 12: + if (!base.slice_from("\u05D8\u05E8\u05F2\u05D1")) + { + return false; + } + break; + case 13: + if (!base.slice_from("\u05DC\u05F2\u05D8")) + { + return false; + } + break; + case 14: + if (!base.slice_from("\u05E7\u05DC\u05F2\u05D1")) + { + return false; + } + break; + case 15: + if (!base.slice_from("\u05E8\u05F2\u05D1")) + { + return false; + } + break; + case 16: + if (!base.slice_from("\u05E8\u05F2\u05E1")) + { + return false; + } + break; + case 17: + if (!base.slice_from("\u05E9\u05F0\u05F2\u05D2")) + { + return false; + } + break; + case 18: + if (!base.slice_from("\u05E9\u05DE\u05F2\u05E1")) + { + return false; + } + break; + case 19: + if (!base.slice_from("\u05E9\u05E0\u05F2\u05D3")) + { + return false; + } + break; + case 20: + if (!base.slice_from("\u05D1\u05D9\u05E0\u05D3")) + { + return false; + } + break; + case 21: + if (!base.slice_from("\u05F0\u05D9\u05D8\u05E9")) + { + return false; + } + break; + case 22: + if (!base.slice_from("\u05D6\u05D9\u05E0\u05D2")) + { + return false; + } + break; + case 23: + if (!base.slice_from("\u05D8\u05E8\u05D9\u05E0\u05E7")) + { + return false; + } + break; + case 24: + if (!base.slice_from("\u05E6\u05F0\u05D9\u05E0\u05D2")) + { + return false; + } + break; + case 25: + if (!base.slice_from("\u05E9\u05DC\u05D9\u05E0\u05D2")) + { + return false; + } + break; + case 26: + if (!base.slice_from("\u05D1\u05F2\u05D2")) + { + return false; + } + break; + case 27: + if (!base.slice_from("\u05D4\u05F2\u05D1")) + { + return false; + } + break; + case 28: + if (!base.slice_from("\u05E4\u05D0\u05E8\u05DC\u05D9\u05E8")) + { + return false; + } + break; + case 29: + if (!base.slice_from("\u05E9\u05D8\u05F2")) + { + return false; + } + break; + case 30: + if (!base.slice_from("\u05E9\u05F0\u05E2\u05E8")) + { + return false; + } + break; + case 31: + if (!base.slice_from("\u05D1\u05E8\u05E2\u05E0\u05D2")) + { + return false; + } + break; + case 32: + if (!r_R1()) + { + break lab0; + } + if (!base.slice_from("\u05D4")) + { + return false; + } + break; + case 33: + lab4: { + /** @const */ var /** number */ v_4 = base.limit - base.cursor; + lab5: { + lab6: { + /** @const */ var /** number */ v_5 = base.limit - base.cursor; + lab7: { + if (!(base.eq_s_b("\u05D2"))) + { + break lab7; + } + break lab6; + } + base.cursor = base.limit - v_5; + if (!(base.eq_s_b("\u05E9"))) + { + break lab5; + } + } + /** @const */ var /** number */ v_6 = base.limit - base.cursor; + lab8: { + if (!r_R1plus3()) + { + base.cursor = base.limit - v_6; + break lab8; + } + if (!base.slice_from("\u05D9\u05E1")) + { + return false; + } + } + break lab4; + } + base.cursor = base.limit - v_4; + if (!r_R1()) + { + break lab0; + } + if (!base.slice_del()) + { + return false; + } + } + break; + } + } + base.cursor = base.limit - v_1; + /** @const */ var /** number */ v_7 = base.limit - base.cursor; + lab9: { + base.ket = base.cursor; + among_var = base.find_among_b(a_5); + if (among_var == 0) + { + break lab9; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + break lab9; + } + if (!base.slice_del()) + { + return false; + } + break; + case 2: + if (!r_R1()) + { + break lab9; + } + if (!(base.in_grouping_b(g_consonant, 1489, 1520))) + { + break lab9; + } + if (!base.slice_del()) + { + return false; + } + break; + } + } + base.cursor = base.limit - v_7; + /** @const */ var /** number */ v_8 = base.limit - base.cursor; + lab10: { + base.ket = base.cursor; + among_var = base.find_among_b(a_6); + if (among_var == 0) + { + break lab10; + } + base.bra = base.cursor; + switch (among_var) { + case 1: + if (!r_R1()) + { + break lab10; + } + if (!base.slice_del()) + { + return false; + } + break; + } + } + base.cursor = base.limit - v_8; + /** @const */ var /** number */ v_9 = base.limit - base.cursor; + lab11: { + while(true) + { + /** @const */ var /** number */ v_10 = base.limit - base.cursor; + lab12: { + golab13: while(true) + { + /** @const */ var /** number */ v_11 = base.limit - base.cursor; + lab14: { + base.ket = base.cursor; + lab15: { + /** @const */ var /** number */ v_12 = base.limit - base.cursor; + lab16: { + if (!(base.eq_s_b("GE"))) + { + break lab16; + } + break lab15; + } + base.cursor = base.limit - v_12; + if (!(base.eq_s_b("TSU"))) + { + break lab14; + } + } + base.bra = base.cursor; + if (!base.slice_del()) + { + return false; + } + base.cursor = base.limit - v_11; + break golab13; + } + base.cursor = base.limit - v_11; + if (base.cursor <= base.limit_backward) + { + break lab12; + } + base.cursor--; + } + continue; + } + base.cursor = base.limit - v_10; + break; + } + } + base.cursor = base.limit - v_9; + return true; + }; + + this.stem = /** @return {boolean} */ function() { + r_prelude(); + /** @const */ var /** number */ v_1 = base.cursor; + r_mark_regions(); + base.cursor = v_1; + base.limit_backward = base.cursor; base.cursor = base.limit; + r_standard_suffix(); + base.cursor = base.limit_backward; + return true; + }; + + /**@return{string}*/ + this['stemWord'] = function(/**string*/word) { + base.setCurrent(word); + this.stem(); + return base.getCurrent(); + }; +}; From 4532958b9405d82a14da328d85d08991978464a5 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Mon, 19 May 2025 22:14:16 +0100 Subject: [PATCH 070/435] Remove ``const`` from ``BaseStemmer`` --- sphinx/search/minified-js/base-stemmer.js | 2 +- sphinx/search/non-minified-js/base-stemmer.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sphinx/search/minified-js/base-stemmer.js b/sphinx/search/minified-js/base-stemmer.js index 69a4ad03787..9736db91588 100644 --- a/sphinx/search/minified-js/base-stemmer.js +++ b/sphinx/search/minified-js/base-stemmer.js @@ -1 +1 @@ -let BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}}; \ No newline at end of file +BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}}; \ No newline at end of file diff --git a/sphinx/search/non-minified-js/base-stemmer.js b/sphinx/search/non-minified-js/base-stemmer.js index 8cf2d585582..e6fa0c49260 100644 --- a/sphinx/search/non-minified-js/base-stemmer.js +++ b/sphinx/search/non-minified-js/base-stemmer.js @@ -1,7 +1,7 @@ // @ts-check /**@constructor*/ -const BaseStemmer = function() { +BaseStemmer = function() { /** @protected */ this.current = ''; this.cursor = 0; From 954839afe3a3204a713d40fa4fa9a95da46c305b Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 19 May 2025 22:59:34 +0100 Subject: [PATCH 071/435] Use the more modern English stemmer (#13574) The 'Porter' stemmer is considered frozen. --- sphinx/search/da.py | 2 +- sphinx/search/de.py | 2 +- sphinx/search/en.py | 192 +-------------------- sphinx/search/es.py | 2 +- sphinx/search/fi.py | 2 +- sphinx/search/fr.py | 2 +- sphinx/search/hu.py | 2 +- sphinx/search/it.py | 2 +- sphinx/search/nl.py | 2 +- sphinx/search/no.py | 2 +- sphinx/search/pt.py | 2 +- sphinx/search/ro.py | 2 +- sphinx/search/ru.py | 2 +- sphinx/search/sv.py | 2 +- sphinx/search/tr.py | 2 +- sphinx/search/zh.py | 188 +------------------- tests/js/fixtures/cpp/searchindex.js | 2 +- tests/js/fixtures/multiterm/searchindex.js | 2 +- tests/js/fixtures/partial/searchindex.js | 2 +- tests/js/fixtures/titles/searchindex.js | 2 +- tests/test_search.py | 12 +- 21 files changed, 26 insertions(+), 402 deletions(-) diff --git a/sphinx/search/da.py b/sphinx/search/da.py index 3eb997af1c3..e632a97fb78 100644 --- a/sphinx/search/da.py +++ b/sphinx/search/da.py @@ -1,4 +1,4 @@ -"""Danish search language: includes the JS Danish stemmer.""" +"""Danish search language.""" from __future__ import annotations diff --git a/sphinx/search/de.py b/sphinx/search/de.py index 6875b9c7535..278d78fb487 100644 --- a/sphinx/search/de.py +++ b/sphinx/search/de.py @@ -1,4 +1,4 @@ -"""German search language: includes the JS German stemmer.""" +"""German search language.""" from __future__ import annotations diff --git a/sphinx/search/en.py b/sphinx/search/en.py index 30324c8832a..273a25a0272 100644 --- a/sphinx/search/en.py +++ b/sphinx/search/en.py @@ -1,4 +1,4 @@ -"""English search language: includes the JS porter stemmer.""" +"""English search language.""" from __future__ import annotations @@ -7,202 +7,16 @@ from sphinx.search import SearchLanguage from sphinx.search._stopwords.en import ENGLISH_STOPWORDS -js_porter_stemmer = """ -/** - * Porter Stemmer - */ -var Stemmer = function() { - - var step2list = { - ational: 'ate', - tional: 'tion', - enci: 'ence', - anci: 'ance', - izer: 'ize', - bli: 'ble', - alli: 'al', - entli: 'ent', - eli: 'e', - ousli: 'ous', - ization: 'ize', - ation: 'ate', - ator: 'ate', - alism: 'al', - iveness: 'ive', - fulness: 'ful', - ousness: 'ous', - aliti: 'al', - iviti: 'ive', - biliti: 'ble', - logi: 'log' - }; - - var step3list = { - icate: 'ic', - ative: '', - alize: 'al', - iciti: 'ic', - ical: 'ic', - ful: '', - ness: '' - }; - - var c = "[^aeiou]"; // consonant - var v = "[aeiouy]"; // vowel - var C = c + "[^aeiouy]*"; // consonant sequence - var V = v + "[aeiou]*"; // vowel sequence - - var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 - var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 - var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 - var s_v = "^(" + C + ")?" + v; // vowel in stem - - this.stemWord = function (w) { - var stem; - var suffix; - var firstch; - var origword = w; - - if (w.length < 3) - return w; - - var re; - var re2; - var re3; - var re4; - - firstch = w.substr(0,1); - if (firstch == "y") - w = firstch.toUpperCase() + w.substr(1); - - // Step 1a - re = /^(.+?)(ss|i)es$/; - re2 = /^(.+?)([^s])s$/; - - if (re.test(w)) - w = w.replace(re,"$1$2"); - else if (re2.test(w)) - w = w.replace(re2,"$1$2"); - - // Step 1b - re = /^(.+?)eed$/; - re2 = /^(.+?)(ed|ing)$/; - if (re.test(w)) { - var fp = re.exec(w); - re = new RegExp(mgr0); - if (re.test(fp[1])) { - re = /.$/; - w = w.replace(re,""); - } - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1]; - re2 = new RegExp(s_v); - if (re2.test(stem)) { - w = stem; - re2 = /(at|bl|iz)$/; - re3 = new RegExp("([^aeiouylsz])\\\\1$"); - re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re2.test(w)) - w = w + "e"; - else if (re3.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - else if (re4.test(w)) - w = w + "e"; - } - } - - // Step 1c - re = /^(.+?)y$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(s_v); - if (re.test(stem)) - w = stem + "i"; - } - - // Step 2 - re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\ -ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step2list[suffix]; - } - - // Step 3 - re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step3list[suffix]; - } - - // Step 4 - re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\ -iti|ous|ive|ize)$/; - re2 = /^(.+?)(s|t)(ion)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - if (re.test(stem)) - w = stem; - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1] + fp[2]; - re2 = new RegExp(mgr1); - if (re2.test(stem)) - w = stem; - } - - // Step 5 - re = /^(.+?)e$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - re2 = new RegExp(meq1); - re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) - w = stem; - } - re = /ll$/; - re2 = new RegExp(mgr1); - if (re.test(w) && re2.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - - // and turn initial Y back to y - if (firstch == "y") - w = firstch.toLowerCase() + w.substr(1); - return w; - } -} -""" - class SearchEnglish(SearchLanguage): lang = 'en' language_name = 'English' - js_stemmer_code = js_porter_stemmer + js_stemmer_rawcode = 'english-stemmer.js' stopwords = ENGLISH_STOPWORDS def __init__(self, options: dict[str, str]) -> None: super().__init__(options) - self.stemmer = snowballstemmer.stemmer('porter') + self.stemmer = snowballstemmer.stemmer('english') def stem(self, word: str) -> str: return self.stemmer.stemWord(word.lower()) diff --git a/sphinx/search/es.py b/sphinx/search/es.py index d11937ad0c6..c1b08ab1bad 100644 --- a/sphinx/search/es.py +++ b/sphinx/search/es.py @@ -1,4 +1,4 @@ -"""Spanish search language: includes the JS Spanish stemmer.""" +"""Spanish search language.""" from __future__ import annotations diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py index cd044b71a80..01c7e0ba126 100644 --- a/sphinx/search/fi.py +++ b/sphinx/search/fi.py @@ -1,4 +1,4 @@ -"""Finnish search language: includes the JS Finnish stemmer.""" +"""Finnish search language.""" from __future__ import annotations diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py index 11a2c70f5dc..e79976dfea1 100644 --- a/sphinx/search/fr.py +++ b/sphinx/search/fr.py @@ -1,4 +1,4 @@ -"""French search language: includes the JS French stemmer.""" +"""French search language.""" from __future__ import annotations diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py index e86159cb604..254ad488d78 100644 --- a/sphinx/search/hu.py +++ b/sphinx/search/hu.py @@ -1,4 +1,4 @@ -"""Hungarian search language: includes the JS Hungarian stemmer.""" +"""Hungarian search language.""" from __future__ import annotations diff --git a/sphinx/search/it.py b/sphinx/search/it.py index a7052c9ae82..d8a583f9d85 100644 --- a/sphinx/search/it.py +++ b/sphinx/search/it.py @@ -1,4 +1,4 @@ -"""Italian search language: includes the JS Italian stemmer.""" +"""Italian search language.""" from __future__ import annotations diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py index 0692920efc4..de1a7d1f17d 100644 --- a/sphinx/search/nl.py +++ b/sphinx/search/nl.py @@ -1,4 +1,4 @@ -"""Dutch search language: includes the JS porter stemmer.""" +"""Dutch search language.""" from __future__ import annotations diff --git a/sphinx/search/no.py b/sphinx/search/no.py index a2bb88ee9a4..45b202f0926 100644 --- a/sphinx/search/no.py +++ b/sphinx/search/no.py @@ -1,4 +1,4 @@ -"""Norwegian search language: includes the JS Norwegian stemmer.""" +"""Norwegian search language.""" from __future__ import annotations diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py index 9c5dfa05774..a10e4cd2b53 100644 --- a/sphinx/search/pt.py +++ b/sphinx/search/pt.py @@ -1,4 +1,4 @@ -"""Portuguese search language: includes the JS Portuguese stemmer.""" +"""Portuguese search language.""" from __future__ import annotations diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py index 6aebdc13249..e9d29602f4e 100644 --- a/sphinx/search/ro.py +++ b/sphinx/search/ro.py @@ -1,4 +1,4 @@ -"""Romanian search language: includes the JS Romanian stemmer.""" +"""Romanian search language.""" from __future__ import annotations diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py index 52ff533832e..584b19b9f79 100644 --- a/sphinx/search/ru.py +++ b/sphinx/search/ru.py @@ -1,4 +1,4 @@ -"""Russian search language: includes the JS Russian stemmer.""" +"""Russian search language.""" from __future__ import annotations diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py index bcfac2ba528..8b138ebdf80 100644 --- a/sphinx/search/sv.py +++ b/sphinx/search/sv.py @@ -1,4 +1,4 @@ -"""Swedish search language: includes the JS Swedish stemmer.""" +"""Swedish search language.""" from __future__ import annotations diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py index 674264f1928..40131f0e372 100644 --- a/sphinx/search/tr.py +++ b/sphinx/search/tr.py @@ -1,4 +1,4 @@ -"""Turkish search language: includes the JS Turkish stemmer.""" +"""Turkish search language.""" from __future__ import annotations diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py index d22f765d520..464e1e1fd47 100644 --- a/sphinx/search/zh.py +++ b/sphinx/search/zh.py @@ -33,199 +33,13 @@ def cut_for_search(sentence: str, HMM: bool = True) -> Iterator[str]: ) del jieba -js_porter_stemmer = """ -/** - * Porter Stemmer - */ -var Stemmer = function() { - - var step2list = { - ational: 'ate', - tional: 'tion', - enci: 'ence', - anci: 'ance', - izer: 'ize', - bli: 'ble', - alli: 'al', - entli: 'ent', - eli: 'e', - ousli: 'ous', - ization: 'ize', - ation: 'ate', - ator: 'ate', - alism: 'al', - iveness: 'ive', - fulness: 'ful', - ousness: 'ous', - aliti: 'al', - iviti: 'ive', - biliti: 'ble', - logi: 'log' - }; - - var step3list = { - icate: 'ic', - ative: '', - alize: 'al', - iciti: 'ic', - ical: 'ic', - ful: '', - ness: '' - }; - - var c = "[^aeiou]"; // consonant - var v = "[aeiouy]"; // vowel - var C = c + "[^aeiouy]*"; // consonant sequence - var V = v + "[aeiou]*"; // vowel sequence - - var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 - var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 - var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 - var s_v = "^(" + C + ")?" + v; // vowel in stem - - this.stemWord = function (w) { - var stem; - var suffix; - var firstch; - var origword = w; - - if (w.length < 3) - return w; - - var re; - var re2; - var re3; - var re4; - - firstch = w.substr(0,1); - if (firstch == "y") - w = firstch.toUpperCase() + w.substr(1); - - // Step 1a - re = /^(.+?)(ss|i)es$/; - re2 = /^(.+?)([^s])s$/; - - if (re.test(w)) - w = w.replace(re,"$1$2"); - else if (re2.test(w)) - w = w.replace(re2,"$1$2"); - - // Step 1b - re = /^(.+?)eed$/; - re2 = /^(.+?)(ed|ing)$/; - if (re.test(w)) { - var fp = re.exec(w); - re = new RegExp(mgr0); - if (re.test(fp[1])) { - re = /.$/; - w = w.replace(re,""); - } - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1]; - re2 = new RegExp(s_v); - if (re2.test(stem)) { - w = stem; - re2 = /(at|bl|iz)$/; - re3 = new RegExp("([^aeiouylsz])\\\\1$"); - re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re2.test(w)) - w = w + "e"; - else if (re3.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - else if (re4.test(w)) - w = w + "e"; - } - } - - // Step 1c - re = /^(.+?)y$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(s_v); - if (re.test(stem)) - w = stem + "i"; - } - - // Step 2 - re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\ -ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step2list[suffix]; - } - - // Step 3 - re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step3list[suffix]; - } - - // Step 4 - re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\ -iti|ous|ive|ize)$/; - re2 = /^(.+?)(s|t)(ion)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - if (re.test(stem)) - w = stem; - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1] + fp[2]; - re2 = new RegExp(mgr1); - if (re2.test(stem)) - w = stem; - } - - // Step 5 - re = /^(.+?)e$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - re2 = new RegExp(meq1); - re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) - w = stem; - } - re = /ll$/; - re2 = new RegExp(mgr1); - if (re.test(w) && re2.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - - // and turn initial Y back to y - if (firstch == "y") - w = firstch.toLowerCase() + w.substr(1); - return w; - } -} -""" - class SearchChinese(SearchLanguage): """Chinese search implementation""" lang = 'zh' language_name = 'Chinese' - js_stemmer_code = js_porter_stemmer + js_stemmer_rawcode = 'english-stemmer.js' stopwords = ENGLISH_STOPWORDS latin1_letters = re.compile(r'[a-zA-Z0-9_]+') diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js index 42adb88db92..81f14cc1895 100644 --- a/tests/js/fixtures/cpp/searchindex.js +++ b/tests/js/fixtures/cpp/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"gener":0,"i":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"thei":0,"thi":0,"token":0,"us":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}}) \ No newline at end of file +Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}}) \ No newline at end of file diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js index 6f27d39329b..2f3f5ec39a1 100644 --- a/tests/js/fixtures/multiterm/searchindex.js +++ b/tests/js/fixtures/multiterm/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"ar":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"i":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"successfulli":0,"support":0,"t":0,"term":0,"test":0,"thi":0,"time":0,"us":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}}) \ No newline at end of file +Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}}) \ No newline at end of file diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js index cd9dbabb149..5eb299eea63 100644 --- a/tests/js/fixtures/partial/searchindex.js +++ b/tests/js/fixtures/partial/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"ar":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"i":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"thi":0,"titl":0,"us":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}}) \ No newline at end of file +Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"titl":0,"use":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}}) \ No newline at end of file diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js index cb9abd1da07..fa59e11c884 100644 --- a/tests/js/fixtures/titles/searchindex.js +++ b/tests/js/fixtures/titles/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"":[0,1],"A":1,"By":0,"For":[0,1],"In":[0,1],"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"ar":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpu":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"ha":1,"handl":0,"happen":1,"head":0,"help":0,"highli":[0,1],"how":0,"i":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"rel":0,"research":0,"result":1,"retriev":0,"sai":0,"same":1,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"thei":0,"them":0,"thi":0,"time":0,"titl":0,"two":0,"typic":0,"us":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}}) \ No newline at end of file +Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"handl":0,"happen":1,"has":1,"head":0,"help":0,"high":[0,1],"how":0,"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"same":1,"say":0,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"them":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}}) \ No newline at end of file diff --git a/tests/test_search.py b/tests/test_search.py index 22fa6ab7616..a8ad186a533 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -107,7 +107,7 @@ def test_meta_keys_are_handled_for_language_en(app: SphinxTestApp) -> None: searchindex = load_searchindex(app.outdir / 'searchindex.js') assert not is_registered_term(searchindex, 'thisnoteith') assert is_registered_term(searchindex, 'thisonetoo') - assert is_registered_term(searchindex, 'findthiskei') + assert is_registered_term(searchindex, 'findthiskey') assert is_registered_term(searchindex, 'thistoo') assert not is_registered_term(searchindex, 'onlygerman') assert is_registered_term(searchindex, 'notgerman') @@ -125,7 +125,7 @@ def test_meta_keys_are_handled_for_language_de(app: SphinxTestApp) -> None: searchindex = load_searchindex(app.outdir / 'searchindex.js') assert not is_registered_term(searchindex, 'thisnoteith') assert is_registered_term(searchindex, 'thisonetoo') - assert not is_registered_term(searchindex, 'findthiskei') + assert not is_registered_term(searchindex, 'findthiskey') assert not is_registered_term(searchindex, 'thistoo') assert is_registered_term(searchindex, 'onlygerman') assert not is_registered_term(searchindex, 'notgerman') @@ -144,7 +144,7 @@ def test_stemmer(app: SphinxTestApp) -> None: app.build(force_all=True) searchindex = load_searchindex(app.outdir / 'searchindex.js') print(searchindex) - assert is_registered_term(searchindex, 'findthisstemmedkei') + assert is_registered_term(searchindex, 'findthisstemmedkey') assert is_registered_term(searchindex, 'intern') @@ -219,7 +219,6 @@ def test_IndexBuilder(): # dictionaries below may be iterated in arbitrary order by Python at # runtime. assert index._mapping == { - 'ar': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, 'fermion': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, 'comment': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, 'non': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'}, @@ -250,7 +249,6 @@ def test_IndexBuilder(): }, 'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'}, 'terms': { - 'ar': [0, 1, 2, 3], 'comment': [0, 1, 2, 3], 'fermion': [0, 1, 2, 3], 'index': [0, 1, 2, 3], @@ -309,7 +307,6 @@ def test_IndexBuilder(): 'docname2_2': 'filename2_2', } assert index._mapping == { - 'ar': {'docname1_2', 'docname2_2'}, 'fermion': {'docname1_2', 'docname2_2'}, 'comment': {'docname1_2', 'docname2_2'}, 'non': {'docname1_2', 'docname2_2'}, @@ -338,7 +335,6 @@ def test_IndexBuilder(): }, 'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'}, 'terms': { - 'ar': [0, 1], 'comment': [0, 1], 'fermion': [0, 1], 'index': [0, 1], @@ -466,7 +462,7 @@ def assert_is_sorted( assert_is_sorted(child, f'{path}[{i}]') -@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS) +@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS, ids=lambda p: p.name) def test_check_js_search_indexes(make_app, sphinx_test_tempdir, directory): app = make_app( 'html', From 75400aff91758c3f605a81a8d2afb4ff6a304d49 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 19 May 2025 23:49:41 +0100 Subject: [PATCH 072/435] Update stemming and Snowball (#13561) --- CHANGES.rst | 2 + doc/internals/contributing.rst | 12 +- sphinx/search/__init__.py | 23 +- sphinx/search/_stopwords/da.py | 3 + sphinx/search/_stopwords/da.txt | 9 +- sphinx/search/_stopwords/de.py | 3 + sphinx/search/_stopwords/de.txt | 9 +- sphinx/search/_stopwords/en.py | 148 +++++++++- sphinx/search/_stopwords/en.txt | 310 +++++++++++++++++++++ sphinx/search/_stopwords/es.py | 3 + sphinx/search/_stopwords/es.txt | 11 +- sphinx/search/_stopwords/fi.py | 4 + sphinx/search/_stopwords/fi.txt | 4 +- sphinx/search/_stopwords/fr.py | 13 +- sphinx/search/_stopwords/fr.txt | 25 +- sphinx/search/_stopwords/hu.py | 3 + sphinx/search/_stopwords/hu.txt | 4 +- sphinx/search/_stopwords/it.py | 4 + sphinx/search/_stopwords/it.txt | 6 +- sphinx/search/_stopwords/nl.py | 3 + sphinx/search/_stopwords/nl.txt | 12 +- sphinx/search/_stopwords/no.py | 3 + sphinx/search/_stopwords/no.txt | 16 +- sphinx/search/_stopwords/pt.py | 3 + sphinx/search/_stopwords/pt.txt | 11 +- sphinx/search/_stopwords/ru.py | 3 + sphinx/search/_stopwords/ru.txt | 11 +- sphinx/search/_stopwords/sv.py | 5 +- sphinx/search/_stopwords/sv.txt | 13 +- tests/js/fixtures/cpp/searchindex.js | 2 +- tests/js/fixtures/multiterm/searchindex.js | 2 +- tests/js/fixtures/partial/searchindex.js | 2 +- tests/js/fixtures/titles/searchindex.js | 2 +- utils/generate_snowball.py | 131 +++++++++ 34 files changed, 751 insertions(+), 64 deletions(-) create mode 100644 sphinx/search/_stopwords/en.txt create mode 100755 utils/generate_snowball.py diff --git a/CHANGES.rst b/CHANGES.rst index d26a93871a5..c0ed8089a60 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -20,6 +20,8 @@ Features added ``linkcheck_allowed_redirects = {}``. Patch by Adam Turner. * #13497: Support C domain objects in the table of contents. +* #13535: html search: Update to the latest version of Snowball (v3.0.1). + Patch by Adam Turner. Bugs fixed ---------- diff --git a/doc/internals/contributing.rst b/doc/internals/contributing.rst index 4b8ca84a945..de4224d7bc3 100644 --- a/doc/internals/contributing.rst +++ b/doc/internals/contributing.rst @@ -337,13 +337,15 @@ Updating generated files ------------------------ * JavaScript stemming algorithms in :file:`sphinx/search/non-minified-js/*.js` - are generated using `snowball `_ - by cloning the repository, executing ``make dist_libstemmer_js`` and then - unpacking the tarball which is generated in :file:`dist` directory. + and stopword files in :file:`sphinx/search/_stopwords/` + are generated from the `Snowball project`_ + by running :file:`utils/generate_snowball.py`. Minified files in :file:`sphinx/search/minified-js/*.js` are generated from - non-minified ones using :program:`uglifyjs` (installed via npm), with ``-m`` - option to enable mangling. + non-minified ones using :program:`uglifyjs` (installed via npm). + See :file:`sphinx/search/minified-js/README.rst`. + + .. _Snowball project: https://snowballstem.org/ * The :file:`searchindex.js` files found in the :file:`tests/js/fixtures/*` directories diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index 1cb05bea0e2..cc997bf6456 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -117,10 +117,7 @@ def word_filter(self, word: str) -> bool: """Return true if the target word should be registered in the search index. This method is called after stemming. """ - return len(word) == 0 or not ( - ((len(word) < 3) and (12353 < ord(word[0]) < 12436)) - or (ord(word[0]) < 256 and (word in self.stopwords)) - ) + return not word.isdigit() and word not in self.stopwords # SearchEnglish imported after SearchLanguage is defined due to circular import @@ -583,17 +580,17 @@ def get_js_stemmer_rawcode(self) -> str | None: def get_js_stemmer_code(self) -> str: """Returns JS code that will be inserted into language_data.js.""" - if self.lang.js_stemmer_rawcode: - base_js_path = _NON_MINIFIED_JS_PATH / 'base-stemmer.js' - language_js_path = _NON_MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode - base_js = base_js_path.read_text(encoding='utf-8') - language_js = language_js_path.read_text(encoding='utf-8') - return ( - f'{base_js}\n{language_js}\nStemmer = {self.lang.language_name}Stemmer;' - ) - else: + if not self.lang.js_stemmer_rawcode: return self.lang.js_stemmer_code + base_js_path = _MINIFIED_JS_PATH / 'base-stemmer.js' + language_js_path = _MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode + return '\n'.join(( + base_js_path.read_text(encoding='utf-8'), + language_js_path.read_text(encoding='utf-8'), + f'window.Stemmer = {self.lang.language_name}Stemmer;', + )) + def _feed_visit_nodes( node: nodes.Node, diff --git a/sphinx/search/_stopwords/da.py b/sphinx/search/_stopwords/da.py index c31a51c6df2..de8fa937b8a 100644 --- a/sphinx/search/_stopwords/da.py +++ b/sphinx/search/_stopwords/da.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/danish/stop.txt + from __future__ import annotations DANISH_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/da.txt b/sphinx/search/_stopwords/da.txt index 6f2bd01afc2..37052042642 100644 --- a/sphinx/search/_stopwords/da.txt +++ b/sphinx/search/_stopwords/da.txt @@ -1,4 +1,11 @@ -| source: https://snowballstem.org/algorithms/danish/stop.txt + + | A Danish stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | This is a ranked list (commonest to rarest) of stopwords derived from + | a large text sample. + + og | and i | in jeg | I diff --git a/sphinx/search/_stopwords/de.py b/sphinx/search/_stopwords/de.py index 26ee3322ff3..d37e2105288 100644 --- a/sphinx/search/_stopwords/de.py +++ b/sphinx/search/_stopwords/de.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/german/stop.txt + from __future__ import annotations GERMAN_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/de.txt b/sphinx/search/_stopwords/de.txt index 94c4777bd05..c8935ae61c7 100644 --- a/sphinx/search/_stopwords/de.txt +++ b/sphinx/search/_stopwords/de.txt @@ -1,4 +1,11 @@ -|source: https://snowballstem.org/algorithms/german/stop.txt + + | A German stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | The number of forms in this list is reduced significantly by passing it + | through the German stemmer. + + aber | but alle | all diff --git a/sphinx/search/_stopwords/en.py b/sphinx/search/_stopwords/en.py index 01bac4cf14e..a4d9f800a02 100644 --- a/sphinx/search/_stopwords/en.py +++ b/sphinx/search/_stopwords/en.py @@ -1,37 +1,181 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/english/stop.txt + from __future__ import annotations ENGLISH_STOPWORDS = frozenset({ 'a', + 'about', + 'above', + 'after', + 'again', + 'against', + 'all', + 'am', + 'an', 'and', + 'any', 'are', + "aren't", 'as', 'at', 'be', + 'because', + 'been', + 'before', + 'being', + 'below', + 'between', + 'both', 'but', 'by', + "can't", + 'cannot', + 'could', + "couldn't", + 'did', + "didn't", + 'do', + 'does', + "doesn't", + 'doing', + "don't", + 'down', + 'during', + 'each', + 'few', 'for', + 'from', + 'further', + 'had', + "hadn't", + 'has', + "hasn't", + 'have', + "haven't", + 'having', + 'he', + "he'd", + "he'll", + "he's", + 'her', + 'here', + "here's", + 'hers', + 'herself', + 'him', + 'himself', + 'his', + 'how', + "how's", + 'i', + "i'd", + "i'll", + "i'm", + "i've", 'if', 'in', 'into', 'is', + "isn't", 'it', - 'near', + "it's", + 'its', + 'itself', + "let's", + 'me', + 'more', + 'most', + "mustn't", + 'my', + 'myself', 'no', + 'nor', 'not', 'of', + 'off', 'on', + 'once', + 'only', 'or', + 'other', + 'ought', + 'our', + 'ours', + 'ourselves', + 'out', + 'over', + 'own', + 'same', + "shan't", + 'she', + "she'd", + "she'll", + "she's", + 'should', + "shouldn't", + 'so', + 'some', 'such', + 'than', 'that', + "that's", 'the', 'their', + 'theirs', + 'them', + 'themselves', 'then', 'there', + "there's", 'these', 'they', + "they'd", + "they'll", + "they're", + "they've", 'this', + 'those', + 'through', 'to', + 'too', + 'under', + 'until', + 'up', + 'very', 'was', - 'will', + "wasn't", + 'we', + "we'd", + "we'll", + "we're", + "we've", + 'were', + "weren't", + 'what', + "what's", + 'when', + "when's", + 'where', + "where's", + 'which', + 'while', + 'who', + "who's", + 'whom', + 'why', + "why's", 'with', + "won't", + 'would', + "wouldn't", + 'you', + "you'd", + "you'll", + "you're", + "you've", + 'your', + 'yours', + 'yourself', + 'yourselves', }) diff --git a/sphinx/search/_stopwords/en.txt b/sphinx/search/_stopwords/en.txt new file mode 100644 index 00000000000..e40c8c8cd6e --- /dev/null +++ b/sphinx/search/_stopwords/en.txt @@ -0,0 +1,310 @@ + + | An English stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | Many of the forms below are quite rare (e.g. "yourselves") but included for + | completeness. + + | PRONOUNS FORMS + | 1st person sing + +i | subject, always in upper case of course + +me | object +my | possessive adjective + | the possessive pronoun `mine' is best suppressed, because of the + | sense of coal-mine etc. +myself | reflexive + | 1st person plural +we | subject + +| us | object + | care is required here because US = United States. It is usually + | safe to remove it if it is in lower case. +our | possessive adjective +ours | possessive pronoun +ourselves | reflexive + | second person (archaic `thou' forms not included) +you | subject and object +your | possessive adjective +yours | possessive pronoun +yourself | reflexive (singular) +yourselves | reflexive (plural) + | third person singular +he | subject +him | object +his | possessive adjective and pronoun +himself | reflexive + +she | subject +her | object and possessive adjective +hers | possessive pronoun +herself | reflexive + +it | subject and object +its | possessive adjective +itself | reflexive + | third person plural +they | subject +them | object +their | possessive adjective +theirs | possessive pronoun +themselves | reflexive + | other forms (demonstratives, interrogatives) +what +which +who +whom +this +that +these +those + + | VERB FORMS (using F.R. Palmer's nomenclature) + | BE +am | 1st person, present +is | -s form (3rd person, present) +are | present +was | 1st person, past +were | past +be | infinitive +been | past participle +being | -ing form + | HAVE +have | simple +has | -s form +had | past +having | -ing form + | DO +do | simple +does | -s form +did | past +doing | -ing form + + | The forms below are best omitted, because of the significant homonym forms: + + | He made a WILL + | old tin CAN + | merry month of MAY + | a smell of MUST + | fight the good fight with all thy MIGHT + + | would, could, should, ought might however be included + + | | AUXILIARIES + | | WILL + |will + +would + + | | SHALL + |shall + +should + + | | CAN + |can + +could + + | | MAY + |may + |might + | | MUST + |must + | | OUGHT + +ought + + | COMPOUND FORMS, increasingly encountered nowadays in 'formal' writing + | pronoun + verb + +i'm +you're +he's +she's +it's +we're +they're +i've +you've +we've +they've +i'd +you'd +he'd +she'd +we'd +they'd +i'll +you'll +he'll +she'll +we'll +they'll + + | verb + negation + +isn't +aren't +wasn't +weren't +hasn't +haven't +hadn't +doesn't +don't +didn't + + | auxiliary + negation + +won't +wouldn't +shan't +shouldn't +can't +cannot +couldn't +mustn't + + | miscellaneous forms + +let's +that's +who's +what's +here's +there's +when's +where's +why's +how's + + | rarer forms + + | daren't needn't + + | doubtful forms + + | oughtn't mightn't + + | ARTICLES +a +an +the + + | THE REST (Overlap among prepositions, conjunctions, adverbs etc is so + | high, that classification is pointless.) +and +but +if +or +because +as +until +while + +of +at +by +for +with +about +against +between +into +through +during +before +after +above +below +to +from +up +down +in +out +on +off +over +under + +again +further +then +once + +here +there +when +where +why +how + +all +any +both +each +few +more +most +other +some +such + +no +nor +not +only +own +same +so +than +too +very + + | Just for the record, the following words are among the commonest in English + + | one + | every + | least + | less + | many + | now + | ever + | never + | say + | says + | said + | also + | get + | go + | goes + | just + | made + | make + | put + | see + | seen + | whether + | like + | well + | back + | even + | still + | way + | take + | since + | another + | however + | two + | three + | four + | five + | first + | second + | new + | old + | high + | long diff --git a/sphinx/search/_stopwords/es.py b/sphinx/search/_stopwords/es.py index d70b317d032..5db38b0cd5b 100644 --- a/sphinx/search/_stopwords/es.py +++ b/sphinx/search/_stopwords/es.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/spanish/stop.txt + from __future__ import annotations SPANISH_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/es.txt b/sphinx/search/_stopwords/es.txt index d7047b93164..416c84d225a 100644 --- a/sphinx/search/_stopwords/es.txt +++ b/sphinx/search/_stopwords/es.txt @@ -1,4 +1,13 @@ -|source: https://snowballstem.org/algorithms/spanish/stop.txt + + | A Spanish stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + + | The following is a ranked list (commonest to rarest) of stopwords + | deriving from a large sample of text. + + | Extra words have been added at the end. + de | from, of la | the, her que | who, that diff --git a/sphinx/search/_stopwords/fi.py b/sphinx/search/_stopwords/fi.py index d7586cba227..ed29c8a6f0a 100644 --- a/sphinx/search/_stopwords/fi.py +++ b/sphinx/search/_stopwords/fi.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/finnish/stop.txt + from __future__ import annotations FINNISH_STOPWORDS = frozenset({ @@ -52,6 +55,7 @@ 'jossa', 'josta', 'jota', + 'joten', 'jotka', 'kanssa', 'keiden', diff --git a/sphinx/search/_stopwords/fi.txt b/sphinx/search/_stopwords/fi.txt index 9aff8a79929..5ad14064e58 100644 --- a/sphinx/search/_stopwords/fi.txt +++ b/sphinx/search/_stopwords/fi.txt @@ -1,4 +1,4 @@ -| source: https://snowballstem.org/algorithms/finnish/stop.txt + | forms of BE olla @@ -60,6 +60,7 @@ jotka joiden joita joissa joista joihin joilla joilta joille joina että | that ja | and jos | if +joten | so koska | because kuin | than mutta | but @@ -83,6 +84,5 @@ yli | over, across | other kun | when -niin | so nyt | now itse | self diff --git a/sphinx/search/_stopwords/fr.py b/sphinx/search/_stopwords/fr.py index 7dfd86d7445..cb2e5ef9501 100644 --- a/sphinx/search/_stopwords/fr.py +++ b/sphinx/search/_stopwords/fr.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/french/stop.txt + from __future__ import annotations FRENCH_STOPWORDS = frozenset({ @@ -6,14 +9,11 @@ 'aient', 'aies', 'ait', - 'as', 'au', - 'aura', 'aurai', 'auraient', 'aurais', 'aurait', - 'auras', 'aurez', 'auriez', 'aurions', @@ -26,7 +26,6 @@ 'avec', 'avez', 'aviez', - 'avions', 'avons', 'ayant', 'ayez', @@ -47,7 +46,6 @@ 'elle', 'en', 'es', - 'est', 'et', 'eu', 'eue', @@ -73,7 +71,6 @@ 'fussions', 'fut', 'fûmes', - 'fût', 'fûtes', 'ici', 'il', @@ -133,8 +130,6 @@ 'soient', 'sois', 'soit', - 'sommes', - 'son', 'sont', 'soyez', 'soyons', @@ -160,9 +155,7 @@ 'étant', 'étiez', 'étions', - 'été', 'étée', 'étées', - 'étés', 'êtes', }) diff --git a/sphinx/search/_stopwords/fr.txt b/sphinx/search/_stopwords/fr.txt index 7839ab57c86..9cb744c3c25 100644 --- a/sphinx/search/_stopwords/fr.txt +++ b/sphinx/search/_stopwords/fr.txt @@ -1,4 +1,7 @@ -| source: https://snowballstem.org/algorithms/french/stop.txt + + | A French stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + au | a + le aux | a + les avec | with @@ -40,7 +43,7 @@ qui | who sa | his, her (fem) se | oneself ses | his (pl) -son | his, her (masc) + | son | his, her (masc). Omitted because it is homonym of "sound" sur | on ta | thy (fem) te | thee @@ -68,15 +71,15 @@ t | t' y | there | forms of être (not including the infinitive): -été + | été - Omitted because it is homonym of "summer" étée étées -étés + | étés - Omitted because it is homonym of "summers" étant suis es -est -sommes + | est - Omitted because it is homonym of "east" + | sommes - Omitted because it is homonym of "sums" êtes sont serai @@ -107,7 +110,7 @@ soyez soient fusse fusses -fût + | fût - Omitted because it is homonym of "tap", like in "beer on tap" fussions fussiez fussent @@ -119,13 +122,13 @@ eue eues eus ai -as + | as - Omitted because it is homonym of "ace" avons avez ont aurai -auras -aura + | auras - Omitted because it is also the name of a kind of wind + | aura - Omitted because it is also the name of a kind of wind and homonym of "aura" aurons aurez auront @@ -136,7 +139,7 @@ auriez auraient avais avait -avions + | avions - Omitted because it is homonym of "planes" aviez avaient eut diff --git a/sphinx/search/_stopwords/hu.py b/sphinx/search/_stopwords/hu.py index 83bee011b0f..8f41b67b28a 100644 --- a/sphinx/search/_stopwords/hu.py +++ b/sphinx/search/_stopwords/hu.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/hungarian/stop.txt + from __future__ import annotations HUNGARIAN_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/hu.txt b/sphinx/search/_stopwords/hu.txt index 658c6194f27..2599a8d1b96 100644 --- a/sphinx/search/_stopwords/hu.txt +++ b/sphinx/search/_stopwords/hu.txt @@ -1,5 +1,7 @@ -| source: https://snowballstem.org/algorithms/hungarian/stop.txt + +| Hungarian stop word list | prepared by Anna Tordai + a ahogy ahol diff --git a/sphinx/search/_stopwords/it.py b/sphinx/search/_stopwords/it.py index 4b0f522ac94..873a2c1f77b 100644 --- a/sphinx/search/_stopwords/it.py +++ b/sphinx/search/_stopwords/it.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/italian/stop.txt + from __future__ import annotations ITALIAN_STOPWORDS = frozenset({ @@ -213,6 +216,7 @@ 'sono', 'sta', 'stai', + 'stando', 'stanno', 'starai', 'staranno', diff --git a/sphinx/search/_stopwords/it.txt b/sphinx/search/_stopwords/it.txt index c8776836110..a20bb9528a5 100644 --- a/sphinx/search/_stopwords/it.txt +++ b/sphinx/search/_stopwords/it.txt @@ -1,4 +1,7 @@ -| source: https://snowballstem.org/algorithms/italian/stop.txt + + | An Italian stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + ad | a (to) before vowel al | a + il allo | a + lo @@ -289,3 +292,4 @@ stessi stesse stessimo stessero +stando diff --git a/sphinx/search/_stopwords/nl.py b/sphinx/search/_stopwords/nl.py index 1742ec8dad2..b8b9a4dcfcd 100644 --- a/sphinx/search/_stopwords/nl.py +++ b/sphinx/search/_stopwords/nl.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/dutch/stop.txt + from __future__ import annotations DUTCH_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/nl.txt b/sphinx/search/_stopwords/nl.txt index 64336d0623b..edf99730a2c 100644 --- a/sphinx/search/_stopwords/nl.txt +++ b/sphinx/search/_stopwords/nl.txt @@ -1,4 +1,14 @@ -| source: https://snowballstem.org/algorithms/dutch/stop.txt + + + | A Dutch stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | This is a ranked list (commonest to rarest) of stopwords derived from + | a large sample of Dutch text. + + | Dutch stop words frequently exhibit homonym clashes. These are indicated + | clearly below. + de | the en | and van | of, from diff --git a/sphinx/search/_stopwords/no.py b/sphinx/search/_stopwords/no.py index 9b9bfbea4c9..d06cfc4d798 100644 --- a/sphinx/search/_stopwords/no.py +++ b/sphinx/search/_stopwords/no.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/norwegian/stop.txt + from __future__ import annotations NORWEGIAN_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/no.txt b/sphinx/search/_stopwords/no.txt index 552ad326a55..c3d5da01e72 100644 --- a/sphinx/search/_stopwords/no.txt +++ b/sphinx/search/_stopwords/no.txt @@ -1,4 +1,12 @@ -| source: https://snowballstem.org/algorithms/norwegian/stop.txt + + | A Norwegian stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | This stop word list is for the dominant bokmål dialect. Words unique + | to nynorsk are marked *. + + | Revised by Jan Bruusgaard , Jan 2005 + og | and i | in jeg | I @@ -9,7 +17,7 @@ et | a/an den | it/this/that til | to er | is/am/are -som | who/that +som | who/which/that på | on de | they / you(formal) med | with @@ -68,7 +76,6 @@ noen | some noe | some ville | would dere | you -som | who/which/that deres | their/theirs kun | only/just ja | yes @@ -113,7 +120,6 @@ mange | many også | also slik | just vært | been -være | to be båe | both * begge | both siden | since @@ -139,7 +145,6 @@ hennar | her/hers hennes | hers hoss | how * hossen | how * -ikkje | not * ingi | noone * inkje | noone * korleis | how * @@ -161,7 +166,6 @@ noka | some (fem.) * nokor | some * noko | some * nokre | some * -si | his/hers * sia | since * sidan | since * so | so * diff --git a/sphinx/search/_stopwords/pt.py b/sphinx/search/_stopwords/pt.py index b79799d42a6..17b7f8ec733 100644 --- a/sphinx/search/_stopwords/pt.py +++ b/sphinx/search/_stopwords/pt.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/portuguese/stop.txt + from __future__ import annotations PORTUGUESE_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/pt.txt b/sphinx/search/_stopwords/pt.txt index 5ef15633d81..9c3c9ac76d7 100644 --- a/sphinx/search/_stopwords/pt.txt +++ b/sphinx/search/_stopwords/pt.txt @@ -1,4 +1,13 @@ -| source: https://snowballstem.org/algorithms/portuguese/stop.txt + + | A Portuguese stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + + | The following is a ranked list (commonest to rarest) of stopwords + | deriving from a large sample of text. + + | Extra words have been added at the end. + de | of, from a | the; to, at; her o | the; him diff --git a/sphinx/search/_stopwords/ru.py b/sphinx/search/_stopwords/ru.py index cc275d5184a..ccd0be5badd 100644 --- a/sphinx/search/_stopwords/ru.py +++ b/sphinx/search/_stopwords/ru.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/russian/stop.txt + from __future__ import annotations RUSSIAN_STOPWORDS = frozenset({ diff --git a/sphinx/search/_stopwords/ru.txt b/sphinx/search/_stopwords/ru.txt index 43a73af0b55..96abb77073e 100644 --- a/sphinx/search/_stopwords/ru.txt +++ b/sphinx/search/_stopwords/ru.txt @@ -1,4 +1,13 @@ -| source: https://snowballstem.org/algorithms/russian/stop.txt + + + | a russian stop word list. comments begin with vertical bar. each stop + | word is at the start of a line. + + | this is a ranked list (commonest to rarest) of stopwords derived from + | a large text sample. + + | letter `ё' is translated to `е'. + и | and в | in/into во | alternative form diff --git a/sphinx/search/_stopwords/sv.py b/sphinx/search/_stopwords/sv.py index c1f10635e0b..0c6f48d2703 100644 --- a/sphinx/search/_stopwords/sv.py +++ b/sphinx/search/_stopwords/sv.py @@ -1,3 +1,6 @@ +# automatically generated by utils/generate-snowball.py +# from https://snowballstem.org/algorithms/swedish/stop.txt + from __future__ import annotations SWEDISH_STOPWORDS = frozenset({ @@ -80,7 +83,7 @@ 'sig', 'sin', 'sina', - 'sitta', + 'sitt', 'själv', 'skulle', 'som', diff --git a/sphinx/search/_stopwords/sv.txt b/sphinx/search/_stopwords/sv.txt index 850ae7474d6..77924c68dfe 100644 --- a/sphinx/search/_stopwords/sv.txt +++ b/sphinx/search/_stopwords/sv.txt @@ -1,4 +1,13 @@ -| source: https://snowballstem.org/algorithms/swedish/stop.txt + + | A Swedish stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | This is a ranked list (commonest to rarest) of stopwords derived from + | a large text sample. + + | Swedish stop words occasionally exhibit homonym clashes. For example + | så = so, but also seed. These are indicated clearly below. + och | and det | it, this/that att | to (with infinitive) @@ -103,7 +112,7 @@ vilka | who, that ditt | thy vem | who vilket | who, that -sitta | his +sitt | his sådana | such a vart | each dina | thy diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js index 81f14cc1895..6c50cc9d99d 100644 --- a/tests/js/fixtures/cpp/searchindex.js +++ b/tests/js/fixtures/cpp/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}}) \ No newline at end of file +Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0},"titles":["<no title>"],"titleterms":{}}) \ No newline at end of file diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js index 2f3f5ec39a1..a3a52b8cf14 100644 --- a/tests/js/fixtures/multiterm/searchindex.js +++ b/tests/js/fixtures/multiterm/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}}) \ No newline at end of file +Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"document":0,"doesn":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"will":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}}) \ No newline at end of file diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js index 5eb299eea63..02863d73d83 100644 --- a/tests/js/fixtures/partial/searchindex.js +++ b/tests/js/fixtures/partial/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"titl":0,"use":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}}) \ No newline at end of file +Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"term":0,"titl":0,"use":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}}) \ No newline at end of file diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js index fa59e11c884..9faeadf76c6 100644 --- a/tests/js/fixtures/titles/searchindex.js +++ b/tests/js/fixtures/titles/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"handl":0,"happen":1,"has":1,"head":0,"help":0,"high":[0,1],"how":0,"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"same":1,"say":0,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"them":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}}) \ No newline at end of file +Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"align":0,"also":1,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"function":1,"handl":0,"happen":1,"head":0,"help":0,"high":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"say":0,"search":[0,1],"seem":0,"softwar":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"whether":1,"will":0,"within":0,"word":0},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}}) \ No newline at end of file diff --git a/utils/generate_snowball.py b/utils/generate_snowball.py new file mode 100755 index 00000000000..f59183d7f21 --- /dev/null +++ b/utils/generate_snowball.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 + +"""Refresh and update language stemming data from the Snowball project.""" + +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "requests>=2.30", +# ] +# /// + +from __future__ import annotations + +import hashlib +import shutil +import subprocess +import sys +import tarfile +import tempfile +from io import BytesIO +from pathlib import Path + +import requests + +SNOWBALL_VERSION = '3.0.1' +SNOWBALL_URL = f'https://github.com/snowballstem/snowball/archive/refs/tags/v{SNOWBALL_VERSION}.tar.gz' +SNOWBALL_SHA256 = '80ac10ce40dc4fcfbfed8d085c457b5613da0e86a73611a3d5527d044a142d60' + +ROOT = Path(__file__).resolve().parent.parent +SEARCH_DIR = ROOT / 'sphinx' / 'search' +STOPWORDS_DIR = SEARCH_DIR / '_stopwords' +NON_MINIFIED_JS_DIR = SEARCH_DIR / 'non-minified-js' + +STOPWORD_URLS = ( + ('da', 'danish', 'https://snowballstem.org/algorithms/danish/stop.txt'), + ('de', 'german', 'https://snowballstem.org/algorithms/german/stop.txt'), + ('en', 'english', 'https://snowballstem.org/algorithms/english/stop.txt'), + ('es', 'spanish', 'https://snowballstem.org/algorithms/spanish/stop.txt'), + ('fi', 'finnish', 'https://snowballstem.org/algorithms/finnish/stop.txt'), + ('fr', 'french', 'https://snowballstem.org/algorithms/french/stop.txt'), + ('hu', 'hungarian', 'https://snowballstem.org/algorithms/hungarian/stop.txt'), + ('it', 'italian', 'https://snowballstem.org/algorithms/italian/stop.txt'), + ('nl', 'dutch', 'https://snowballstem.org/algorithms/dutch/stop.txt'), + ('no', 'norwegian', 'https://snowballstem.org/algorithms/norwegian/stop.txt'), + ('pt', 'portuguese', 'https://snowballstem.org/algorithms/portuguese/stop.txt'), + # ('ro', 'romanian', ''), + ('ru', 'russian', 'https://snowballstem.org/algorithms/russian/stop.txt'), + ('sv', 'swedish', 'https://snowballstem.org/algorithms/swedish/stop.txt'), + # ('tr', 'turkish', ''), +) + + +def regenerate_stopwords() -> None: + STOPWORDS_DIR.mkdir(parents=True, exist_ok=True) + STOPWORDS_DIR.joinpath('__init__.py').touch() + + for lang_code, lang_name, url in STOPWORD_URLS: + data = requests.get(url, timeout=5).content.decode('utf-8') + + # record the original source of the stopwords list + txt_path = STOPWORDS_DIR / f'{lang_code}.txt' + txt_path.write_text(data.rstrip() + '\n', encoding='utf-8') + + # generate the Python stopwords set + stopwords = parse_stop_word(data) + with (STOPWORDS_DIR / f'{lang_code}.py').open('w', encoding='utf-8') as f: + f.write('# automatically generated by utils/generate-snowball.py\n') + f.write(f'# from {url}\n\n') + f.write('from __future__ import annotations\n\n') + f.write(f'{lang_name.upper()}_STOPWORDS = frozenset(') + if stopwords: + f.write('{\n') + for word in sorted(stopwords, key=str.casefold): + f.write(f' {word!r},\n') + f.write('}') + f.write(')\n') + + +def parse_stop_word(source: str) -> frozenset[str]: + """Collect the stopwords from a snowball style word list: + + .. code:: text + + list of space separated stop words | optional comment + """ + stop_words: set[str] = set() + for line in source.splitlines(): + stop_words.update(line.partition('|')[0].split()) # remove comment + return frozenset(stop_words) + + +def regenerate_javascript() -> None: + tmp_root = Path(tempfile.mkdtemp()) + + # Download and verify the snowball release + archive = requests.get(SNOWBALL_URL, timeout=60).content + digest = hashlib.sha256(archive).hexdigest() + if digest != SNOWBALL_SHA256: + msg = ( + f'data does not match expected checksum ' + f'(expected {SNOWBALL_SHA256}, saw {digest}).' + ) + raise RuntimeError(msg) + + # Extract the release archive + with tarfile.TarFile.gzopen( + 'snowball.tar.gz', mode='r', fileobj=BytesIO(archive) + ) as tar: + tar.extractall(tmp_root, filter='data') + snowball_root = tmp_root / f'snowball-{SNOWBALL_VERSION}' + snowball_dist = snowball_root / 'dist' + + # Generate JS stemmer files + cmd = ('make', '--jobs=8', 'dist_libstemmer_js') + subprocess.run(cmd, check=True, cwd=snowball_root) + with tarfile.open(snowball_dist / f'jsstemmer-{SNOWBALL_VERSION}.tar.gz') as tar: + tar.extractall(snowball_dist, filter='data') + + # Copy generated JS to sphinx/search/ + NON_MINIFIED_JS_DIR.mkdir(exist_ok=True) + js_dir = snowball_dist / f'jsstemmer-{SNOWBALL_VERSION}' / 'javascript' + shutil.copytree(js_dir, NON_MINIFIED_JS_DIR, dirs_exist_ok=True) + + # Clean up + shutil.rmtree(snowball_root) + + +if __name__ == '__main__': + regenerate_stopwords() + if sys.platform != 'win32': + regenerate_javascript() From 0eae573102502030de6523def99f0b52d2ca2f41 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 20 May 2025 00:05:07 +0100 Subject: [PATCH 073/435] Remove ``section_parents`` (#13560) Docutils revision r10129 [1] removes the member from the namespace. [1]: https://sourceforge.net/p/docutils/code/10129/ --- sphinx/directives/other.py | 14 +------------- sphinx/util/parsing.py | 12 ------------ .../test_util_docutils_sphinx_directive.py | 7 ------- 3 files changed, 1 insertion(+), 32 deletions(-) diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py index 5d6f5b778a6..8c66ed383b5 100644 --- a/sphinx/directives/other.py +++ b/sphinx/directives/other.py @@ -5,7 +5,6 @@ from pathlib import Path from typing import TYPE_CHECKING, cast -import docutils from docutils import nodes from docutils.parsers.rst import directives from docutils.parsers.rst.directives.misc import Class @@ -22,7 +21,7 @@ if TYPE_CHECKING: from collections.abc import Sequence - from typing import Any, ClassVar, Final + from typing import Any, ClassVar from docutils.nodes import Element, Node @@ -30,7 +29,6 @@ from sphinx.util.typing import ExtensionMetadata, OptionSpec -DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0) glob_re = re.compile(r'.*[*?\[].*') logger = logging.getLogger(__name__) @@ -332,14 +330,6 @@ def run(self) -> list[Node]: surrounding_section_level = memo.section_level memo.title_styles = [] memo.section_level = 0 - if DU_22_PLUS: - # https://github.com/sphinx-doc/sphinx/issues/13539 - # https://sourceforge.net/p/docutils/code/10093/ - # https://sourceforge.net/p/docutils/patches/213/ - surrounding_section_parents = memo.section_parents - memo.section_parents = [] - else: - surrounding_section_parents = [] try: self.state.nested_parse( self.content, self.content_offset, node, match_titles=True @@ -375,8 +365,6 @@ def run(self) -> list[Node]: return [] finally: memo.title_styles = surrounding_title_styles - if DU_22_PLUS: - memo.section_parents = surrounding_section_parents memo.section_level = surrounding_section_level diff --git a/sphinx/util/parsing.py b/sphinx/util/parsing.py index ec6649fc247..4c4a6477683 100644 --- a/sphinx/util/parsing.py +++ b/sphinx/util/parsing.py @@ -5,19 +5,15 @@ import contextlib from typing import TYPE_CHECKING -import docutils from docutils.nodes import Element from docutils.statemachine import StringList, string2lines if TYPE_CHECKING: from collections.abc import Iterator - from typing import Final from docutils.nodes import Node from docutils.parsers.rst.states import RSTState -DU_22_PLUS: Final = docutils.__version_info__ >= (0, 22, 0, 'alpha', 0) - def nested_parse_to_nodes( state: RSTState, @@ -79,23 +75,15 @@ def _fresh_title_style_context(state: RSTState) -> Iterator[None]: memo = state.memo surrounding_title_styles: list[str | tuple[str, str]] = memo.title_styles surrounding_section_level: int = memo.section_level - if DU_22_PLUS: - surrounding_section_parents = memo.section_parents - else: - surrounding_section_parents = [] # clear current title styles memo.title_styles = [] memo.section_level = 0 - if DU_22_PLUS: - memo.section_parents = [] try: yield finally: # reset title styles memo.title_styles = surrounding_title_styles memo.section_level = surrounding_section_level - if DU_22_PLUS: - memo.section_parents = surrounding_section_parents def _text_to_string_list( diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py index 00ea5bc3fb5..8c24a3c4a83 100644 --- a/tests/test_util/test_util_docutils_sphinx_directive.py +++ b/tests/test_util/test_util_docutils_sphinx_directive.py @@ -2,7 +2,6 @@ from types import SimpleNamespace -import docutils from docutils import nodes from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined] from docutils.parsers.rst.states import ( @@ -45,16 +44,10 @@ def make_directive_and_state( reporter=document.reporter, language=english, title_styles=[], - # section_parents=[], # Docutils 0.22+ section_level=0, section_bubble_up_kludge=False, inliner=inliner, ) - if docutils.__version_info__ >= (0, 22, 0, 'alpha', 0): - # https://github.com/sphinx-doc/sphinx/issues/13539 - # https://sourceforge.net/p/docutils/code/10093/ - # https://sourceforge.net/p/docutils/patches/213/ - state.memo.section_parents = [] directive = SphinxDirective( name='test_directive', arguments=[], From fb628ccc3dfe72f36baa76a220c4f414e657ff38 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 20 May 2025 01:14:14 +0100 Subject: [PATCH 074/435] Convert stopwords to a JavaScript set (#13575) --- sphinx/themes/basic/static/language_data.js.jinja | 7 ++++--- sphinx/themes/basic/static/searchtools.js | 7 ++----- tests/js/language_data.js | 5 +++-- 3 files changed, 9 insertions(+), 10 deletions(-) diff --git a/sphinx/themes/basic/static/language_data.js.jinja b/sphinx/themes/basic/static/language_data.js.jinja index 64aefa798c7..daefea7eb9e 100644 --- a/sphinx/themes/basic/static/language_data.js.jinja +++ b/sphinx/themes/basic/static/language_data.js.jinja @@ -1,12 +1,13 @@ /* * This script contains the language-specific data used by searchtools.js, - * namely the list of stopwords, stemmer, scorer and splitter. + * namely the set of stopwords, stemmer, scorer and splitter. */ -var stopwords = {{ search_language_stop_words }}; +const stopwords = new Set({{ search_language_stop_words }}); +window.stopwords = stopwords; // Export to global scope {% if search_language_stemming_code %} -/* Non-minified version is copied as a separate JS file, if available */ +/* Non-minified versions are copied as separate JavaScript files, if available */ {{ search_language_stemming_code|safe }} {% endif -%} diff --git a/sphinx/themes/basic/static/searchtools.js b/sphinx/themes/basic/static/searchtools.js index 91f4be57fc8..ba5e67aa75e 100644 --- a/sphinx/themes/basic/static/searchtools.js +++ b/sphinx/themes/basic/static/searchtools.js @@ -287,11 +287,8 @@ const Search = { const queryTermLower = queryTerm.toLowerCase(); // maybe skip this "word" - // stopwords array is from language_data.js - if ( - stopwords.indexOf(queryTermLower) !== -1 || - queryTerm.match(/^\d+$/) - ) + // stopwords set is from language_data.js + if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) return; // stem the word diff --git a/tests/js/language_data.js b/tests/js/language_data.js index 15e4a8447f0..47c81f4a2a5 100644 --- a/tests/js/language_data.js +++ b/tests/js/language_data.js @@ -3,10 +3,11 @@ * namely the list of stopwords, stemmer, scorer and splitter. */ -var stopwords = []; +const stopwords = new Set([]); +window.stopwords = stopwords; // Export to global scope -/* Non-minified version is copied as a separate JS file, if available */ +/* Non-minified versions are copied as separate JavaScript files, if available */ /** * Dummy stemmer for languages without stemming rules. From d71d399d06aed77a01c18953d6192aab4b05eb89 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Tue, 20 May 2025 11:58:25 +1000 Subject: [PATCH 075/435] autodoc: Support ``typing_extensions.{final,overload}`` (#13509) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- CHANGES.rst | 3 ++ sphinx/pycode/parser.py | 42 +++++++++---------- tests/roots/test-ext-autodoc/target/final.py | 11 +++++ .../test-ext-autodoc/target/overload3.py | 18 ++++++++ tests/test_extensions/test_ext_autodoc.py | 34 +++++++++++++++ 5 files changed, 85 insertions(+), 23 deletions(-) create mode 100644 tests/roots/test-ext-autodoc/target/overload3.py diff --git a/CHANGES.rst b/CHANGES.rst index c0ed8089a60..9b86d2df25e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -22,6 +22,9 @@ Features added * #13497: Support C domain objects in the table of contents. * #13535: html search: Update to the latest version of Snowball (v3.0.1). Patch by Adam Turner. +* #13704: autodoc: Detect :py:func:`typing_extensions.overload ` + and :py:func:`~typing.final` decorators. + Patch by Spencer Brown. Bugs fixed ---------- diff --git a/sphinx/pycode/parser.py b/sphinx/pycode/parser.py index 2390b19d4d3..43081c61f13 100644 --- a/sphinx/pycode/parser.py +++ b/sphinx/pycode/parser.py @@ -247,9 +247,9 @@ def __init__(self, buffers: list[str], encoding: str) -> None: self.deforders: dict[str, int] = {} self.finals: list[str] = [] self.overloads: dict[str, list[Signature]] = {} - self.typing: str | None = None - self.typing_final: str | None = None - self.typing_overload: str | None = None + self.typing_mods: set[str] = set() + self.typing_final_names: set[str] = set() + self.typing_overload_names: set[str] = set() super().__init__() def get_qualname_for(self, name: str) -> list[str] | None: @@ -295,11 +295,8 @@ def add_variable_annotation(self, name: str, annotation: ast.AST) -> None: self.annotations[basename, name] = ast_unparse(annotation) def is_final(self, decorators: list[ast.expr]) -> bool: - final = [] - if self.typing: - final.append('%s.final' % self.typing) - if self.typing_final: - final.append(self.typing_final) + final = {f'{modname}.final' for modname in self.typing_mods} + final |= self.typing_final_names for decorator in decorators: try: @@ -311,11 +308,8 @@ def is_final(self, decorators: list[ast.expr]) -> bool: return False def is_overload(self, decorators: list[ast.expr]) -> bool: - overload = [] - if self.typing: - overload.append('%s.overload' % self.typing) - if self.typing_overload: - overload.append(self.typing_overload) + overload = {f'{modname}.overload' for modname in self.typing_mods} + overload |= self.typing_overload_names for decorator in decorators: try: @@ -348,22 +342,24 @@ def visit_Import(self, node: ast.Import) -> None: for name in node.names: self.add_entry(name.asname or name.name) - if name.name == 'typing': - self.typing = name.asname or name.name - elif name.name == 'typing.final': - self.typing_final = name.asname or name.name - elif name.name == 'typing.overload': - self.typing_overload = name.asname or name.name + if name.name in {'typing', 'typing_extensions'}: + self.typing_mods.add(name.asname or name.name) + elif name.name in {'typing.final', 'typing_extensions.final'}: + self.typing_final_names.add(name.asname or name.name) + elif name.name in {'typing.overload', 'typing_extensions.overload'}: + self.typing_overload_names.add(name.asname or name.name) def visit_ImportFrom(self, node: ast.ImportFrom) -> None: """Handles Import node and record the order of definitions.""" for name in node.names: self.add_entry(name.asname or name.name) - if node.module == 'typing' and name.name == 'final': - self.typing_final = name.asname or name.name - elif node.module == 'typing' and name.name == 'overload': - self.typing_overload = name.asname or name.name + if node.module not in {'typing', 'typing_extensions'}: + continue + if name.name == 'final': + self.typing_final_names.add(name.asname or name.name) + elif name.name == 'overload': + self.typing_overload_names.add(name.asname or name.name) def visit_Assign(self, node: ast.Assign) -> None: """Handles Assign node and pick up a variable comment.""" diff --git a/tests/roots/test-ext-autodoc/target/final.py b/tests/roots/test-ext-autodoc/target/final.py index a8c3860e384..bd233abb580 100644 --- a/tests/roots/test-ext-autodoc/target/final.py +++ b/tests/roots/test-ext-autodoc/target/final.py @@ -3,6 +3,9 @@ import typing from typing import final +import typing_extensions +from typing_extensions import final as final_ext # noqa: UP035 + @typing.final class Class: @@ -14,3 +17,11 @@ def meth1(self): def meth2(self): """docstring""" + + @final_ext + def meth3(self): + """docstring""" + + @typing_extensions.final + def meth4(self): + """docstring""" diff --git a/tests/roots/test-ext-autodoc/target/overload3.py b/tests/roots/test-ext-autodoc/target/overload3.py new file mode 100644 index 00000000000..a3cc34a9f85 --- /dev/null +++ b/tests/roots/test-ext-autodoc/target/overload3.py @@ -0,0 +1,18 @@ +import typing +from typing import TYPE_CHECKING, overload + +import typing_extensions +from typing_extensions import overload as over_ext # noqa: UP035 + + +@overload +def test(x: int) -> int: ... +@typing.overload +def test(x: list[int]) -> list[int]: ... +@over_ext +def test(x: str) -> str: ... +@typing_extensions.overload +def test(x: float) -> float: ... +def test(x): + """Documentation.""" + return x diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_extensions/test_ext_autodoc.py index 7aa12db3c32..d7c41291e01 100644 --- a/tests/test_extensions/test_ext_autodoc.py +++ b/tests/test_extensions/test_ext_autodoc.py @@ -2823,6 +2823,20 @@ def test_final(app): '', ' docstring', '', + '', + ' .. py:method:: Class.meth3()', + ' :module: target.final', + ' :final:', + '', + ' docstring', + '', + '', + ' .. py:method:: Class.meth4()', + ' :module: target.final', + ' :final:', + '', + ' docstring', + '', ] @@ -2896,6 +2910,26 @@ def test_overload2(app): ] +@pytest.mark.sphinx('html', testroot='ext-autodoc') +def test_overload3(app): + options = {'members': None} + actual = do_autodoc(app, 'module', 'target.overload3', options) + assert list(actual) == [ + '', + '.. py:module:: target.overload3', + '', + '', + '.. py:function:: test(x: int) -> int', + ' test(x: list[int]) -> list[int]', + ' test(x: str) -> str', + ' test(x: float) -> float', + ' :module: target.overload3', + '', + ' Documentation.', + '', + ] + + @pytest.mark.sphinx('html', testroot='ext-autodoc') def test_pymodule_for_ModuleLevelDocumenter(app): app.env.ref_context['py:module'] = 'target.classes' From ee0e576aef3779dc330baa0f337e5d2c343d0c00 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Wed, 21 May 2025 00:32:00 +0100 Subject: [PATCH 076/435] Remove mypy overrides for ``test_transforms_move_module_targets`` (#13553) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- pyproject.toml | 1 - .../test_transforms_move_module_targets.py | 11 ++++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 39b18f23104..9e1c15f859b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -283,7 +283,6 @@ module = [ "tests.test_theming.test_templating", "tests.test_theming.test_theming", # tests/test_transforms - "tests.test_transforms.test_transforms_move_module_targets", "tests.test_transforms.test_transforms_post_transforms_images", "tests.test_transforms.test_transforms_reorder_nodes", # tests/test_util diff --git a/tests/test_transforms/test_transforms_move_module_targets.py b/tests/test_transforms/test_transforms_move_module_targets.py index f64b7d6a500..839eb615adc 100644 --- a/tests/test_transforms/test_transforms_move_module_targets.py +++ b/tests/test_transforms/test_transforms_move_module_targets.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import TYPE_CHECKING + import pytest from docutils import nodes @@ -7,6 +9,9 @@ from sphinx.testing.util import SphinxTestApp from sphinx.transforms import MoveModuleTargets +if TYPE_CHECKING: + from pathlib import Path + CONTENT_PY = """\ move-module-targets =================== @@ -29,7 +34,7 @@ ], ) @pytest.mark.usefixtures('rollback_sysmodules') -def test_move_module_targets(tmp_path, content): +def test_move_module_targets(tmp_path: Path, content: str) -> None: # Test for the MoveModuleTargets transform tmp_path.joinpath('conf.py').touch() tmp_path.joinpath('index.rst').write_text(content, encoding='utf-8') @@ -48,7 +53,7 @@ def test_move_module_targets(tmp_path, content): @pytest.mark.usefixtures('rollback_sysmodules') -def test_move_module_targets_no_section(tmp_path): +def test_move_module_targets_no_section(tmp_path: Path) -> None: # Test for the MoveModuleTargets transform tmp_path.joinpath('conf.py').touch() tmp_path.joinpath('index.rst').write_text( @@ -63,7 +68,7 @@ def test_move_module_targets_no_section(tmp_path): @pytest.mark.usefixtures('rollback_sysmodules') -def test_move_module_targets_disabled(tmp_path): +def test_move_module_targets_disabled(tmp_path: Path) -> None: # Test for the MoveModuleTargets transform tmp_path.joinpath('conf.py').touch() tmp_path.joinpath('index.rst').write_text(CONTENT_PY, encoding='utf-8') From 4dbdf802ce38a48a95e9648f190c00d0349f0443 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Wed, 21 May 2025 00:47:52 +0100 Subject: [PATCH 077/435] Remove mypy overrides for ``test_util_fileutil`` (#13552) --- pyproject.toml | 1 - tests/test_util/test_util_fileutil.py | 9 +++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9e1c15f859b..2a58ec2abe4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -338,7 +338,6 @@ module = [ # tests/test_transforms "tests.test_transforms.test_transforms_post_transforms", # tests/test_util - "tests.test_util.test_util_fileutil", "tests.test_util.test_util_i18n", "tests.test_util.test_util_inspect", "tests.test_util.test_util_logging", diff --git a/tests/test_util/test_util_fileutil.py b/tests/test_util/test_util_fileutil.py index 26b75d82e05..9311be58153 100644 --- a/tests/test_util/test_util_fileutil.py +++ b/tests/test_util/test_util_fileutil.py @@ -12,12 +12,13 @@ from sphinx._cli.util.errors import strip_escape_sequences from sphinx.jinja2glue import BuiltinTemplateLoader from sphinx.util.fileutil import _template_basename, copy_asset, copy_asset_file +from sphinx.util.template import BaseRenderer if TYPE_CHECKING: from sphinx.testing.util import SphinxTestApp -class DummyTemplateLoader(BuiltinTemplateLoader): +class DummyTemplateLoader(BuiltinTemplateLoader, BaseRenderer): def __init__(self) -> None: super().__init__() builder = mock.Mock() @@ -26,7 +27,7 @@ def __init__(self) -> None: self.init(builder) -def test_copy_asset_file(tmp_path): +def test_copy_asset_file(tmp_path: Path) -> None: renderer = DummyTemplateLoader() # copy normal file @@ -69,7 +70,7 @@ def test_copy_asset_file(tmp_path): assert (subdir2 / 'asset.txt.jinja').read_text(encoding='utf8') == '# {{var1}} data' -def test_copy_asset(tmp_path): +def test_copy_asset(tmp_path: Path) -> None: renderer = DummyTemplateLoader() # prepare source files @@ -113,7 +114,7 @@ def test_copy_asset(tmp_path): assert sidebar == 'sidebar: baz' # copy with exclusion - def excluded(path): + def excluded(path: str) -> bool: return 'sidebar.html' in path or 'basic.css' in path destdir = tmp_path / 'test3' From 6d151533bc8f1f50c2bc58226ebff3290c9d90ac Mon Sep 17 00:00:00 2001 From: Shengyu Zhang Date: Wed, 21 May 2025 09:32:21 +0800 Subject: [PATCH 078/435] Add tilde prefix support for the ``:py:deco:`` role (#13545) Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> --- CHANGES.rst | 2 ++ sphinx/domains/python/__init__.py | 26 ++++++++++---------------- tests/test_domains/test_domain_py.py | 15 +++++++++++++++ 3 files changed, 27 insertions(+), 16 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9b86d2df25e..57e8e0efdf9 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -31,6 +31,8 @@ Bugs fixed * #13369: Correctly parse and cross-reference unpacked type annotations. Patch by Alicia Garcia-Raboso. +* #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`. + Patch by Shengyu Zhang and Adam Turner. Testing ------- diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py index af923cae70e..fb030991464 100644 --- a/sphinx/domains/python/__init__.py +++ b/sphinx/domains/python/__init__.py @@ -29,7 +29,7 @@ from collections.abc import Iterable, Iterator, Sequence, Set from typing import Any, ClassVar - from docutils.nodes import Element, Node, TextElement + from docutils.nodes import Element, Node from sphinx.addnodes import desc_signature, pending_xref from sphinx.application import Sphinx @@ -594,23 +594,17 @@ def process_link( class _PyDecoXRefRole(PyXRefRole): - def __init__( + def process_link( self, - fix_parens: bool = False, - lowercase: bool = False, - nodeclass: type[Element] | None = None, - innernodeclass: type[TextElement] | None = None, - warn_dangling: bool = False, - ) -> None: - super().__init__( - fix_parens=True, - lowercase=lowercase, - nodeclass=nodeclass, - innernodeclass=innernodeclass, - warn_dangling=warn_dangling, + env: BuildEnvironment, + refnode: Element, + has_explicit_title: bool, + title: str, + target: str, + ) -> tuple[str, str]: + title, target = super().process_link( + env, refnode, has_explicit_title, title, target ) - - def update_title_and_target(self, title: str, target: str) -> tuple[str, str]: return f'@{title}', target diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py index 262773af35a..14346b0563a 100644 --- a/tests/test_domains/test_domain_py.py +++ b/tests/test_domains/test_domain_py.py @@ -1791,3 +1791,18 @@ def test_pep_695_and_pep_696_whitespaces_in_default(app, tp_list, tptext): text = f'.. py:function:: f{tp_list}() -> Annotated[T, Qux[int]()]' doctree = restructuredtext.parse(app, text) assert doctree.astext() == f'\n\nf{tptext}() -> Annotated[T, Qux[int]()]\n\n' + + +def test_deco_role(app): + text = """\ +.. py:decorator:: foo.bar + :no-contents-entry: + :no-index-entry: + :no-typesetting: +""" + + doctree = restructuredtext.parse(app, text + '\n:py:deco:`foo.bar`') + assert doctree.astext() == '\n\n\n\n@foo.bar' + + doctree = restructuredtext.parse(app, text + '\n:py:deco:`~foo.bar`') + assert doctree.astext() == '\n\n\n\n@bar' From ad360fd634c9c3378e1cc1693e6acd4cba29cf72 Mon Sep 17 00:00:00 2001 From: Victor Wheeler Date: Tue, 20 May 2025 19:34:39 -0600 Subject: [PATCH 079/435] Fix parameter lists for two event callback functions (#13516) --- doc/extdev/event_callbacks.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/extdev/event_callbacks.rst b/doc/extdev/event_callbacks.rst index 04eae51be1d..aec9a47e848 100644 --- a/doc/extdev/event_callbacks.rst +++ b/doc/extdev/event_callbacks.rst @@ -70,8 +70,8 @@ Below is an overview of the core event that happens during a build. 14. apply post-transforms (by priority): docutils.document -> docutils.document 15. event.doctree-resolved(app, doctree, docname) - In the event that any reference nodes fail to resolve, the following may emit: - - event.missing-reference(env, node, contnode) - - event.warn-missing-reference(domain, node) + - event.missing-reference(app, env, node, contnode) + - event.warn-missing-reference(app, domain, node) 16. Generate output files 17. event.build-finished(app, exception) From d742ddc123978adc96a28cdaf366b33cbc562cf7 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 22 May 2025 05:51:04 +0100 Subject: [PATCH 080/435] Remove mypy overrides for ``tests/test_pycode/test_pycode.py`` (#13585) --- pyproject.toml | 1 - tests/test_pycode/test_pycode.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2a58ec2abe4..cc26c4e7782 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -333,7 +333,6 @@ module = [ # tests/test_intl "tests.test_intl.test_intl", # tests/test_pycode - "tests.test_pycode.test_pycode", "tests.test_pycode.test_pycode_ast", # tests/test_transforms "tests.test_transforms.test_transforms_post_transforms", diff --git a/tests/test_pycode/test_pycode.py b/tests/test_pycode/test_pycode.py index 51b525f7b5b..4caf5019b94 100644 --- a/tests/test_pycode/test_pycode.py +++ b/tests/test_pycode/test_pycode.py @@ -41,7 +41,7 @@ def test_ModuleAnalyzer_for_file() -> None: assert analyzer.srcname == str(SPHINX_MODULE_PATH) -def test_ModuleAnalyzer_for_module(rootdir): +def test_ModuleAnalyzer_for_module(rootdir: Path) -> None: analyzer = ModuleAnalyzer.for_module('sphinx') assert analyzer.modname == 'sphinx' assert analyzer.srcname == str(SPHINX_MODULE_PATH) From 4451a0a18a5448f1a0e2acc68d5b76505fe39267 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 22 May 2025 05:54:49 +0100 Subject: [PATCH 081/435] Remove mypy overrides for ``tests/test_util/test_util.py`` (#13584) --- pyproject.toml | 1 - tests/test_util/test_util.py | 7 ++++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cc26c4e7782..31c1345c9da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -286,7 +286,6 @@ module = [ "tests.test_transforms.test_transforms_post_transforms_images", "tests.test_transforms.test_transforms_reorder_nodes", # tests/test_util - "tests.test_util.test_util", "tests.test_util.test_util_display", "tests.test_util.test_util_docutils", "tests.test_util.test_util_inventory", diff --git a/tests/test_util/test_util.py b/tests/test_util/test_util.py index e4881764680..ce403afd0ed 100644 --- a/tests/test_util/test_util.py +++ b/tests/test_util/test_util.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import TYPE_CHECKING + import pytest import sphinx.util @@ -29,8 +31,11 @@ relative_uri, ) +if TYPE_CHECKING: + from pathlib import Path + -def test_ensuredir(tmp_path): +def test_ensuredir(tmp_path: Path) -> None: # Does not raise an exception for an existing directory. ensuredir(tmp_path) From a94fcd9e6602e2f444f53b8aa59575f9a596d362 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 22 May 2025 05:55:52 +0100 Subject: [PATCH 082/435] Remove mypy overrides for ``tests/test_util/test_util_inventory.py`` (#13582) --- pyproject.toml | 1 - tests/test_util/test_util_inventory.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 31c1345c9da..0e7ba8a19b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -288,7 +288,6 @@ module = [ # tests/test_util "tests.test_util.test_util_display", "tests.test_util.test_util_docutils", - "tests.test_util.test_util_inventory", # tests/test_writers "tests.test_writers.test_docutilsconf", ] diff --git a/tests/test_util/test_util_inventory.py b/tests/test_util/test_util_inventory.py index 0cab37d7904..5432d8cc5ca 100644 --- a/tests/test_util/test_util_inventory.py +++ b/tests/test_util/test_util_inventory.py @@ -107,7 +107,7 @@ def _build_inventory(srcdir: Path) -> Path: return app.outdir / 'objects.inv' -def test_inventory_localization(tmp_path): +def test_inventory_localization(tmp_path: Path) -> None: # Build an app using Estonian (EE) locale srcdir_et = _write_appconfig(tmp_path, 'et') inventory_et = _build_inventory(srcdir_et) From 2992f31115e52e6a59b48471dbf26a3761b2dfea Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 22 May 2025 11:19:45 +0100 Subject: [PATCH 083/435] Remove mypy overrides for ``tests/test_pycode/test_pycode_ast.py`` (#13586) --- pyproject.toml | 2 -- tests/test_pycode/test_pycode_ast.py | 7 ++++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0e7ba8a19b5..1185e9ac977 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -330,8 +330,6 @@ module = [ "tests.test_extensions.test_ext_napoleon_docstring", # tests/test_intl "tests.test_intl.test_intl", - # tests/test_pycode - "tests.test_pycode.test_pycode_ast", # tests/test_transforms "tests.test_transforms.test_transforms_post_transforms", # tests/test_util diff --git a/tests/test_pycode/test_pycode_ast.py b/tests/test_pycode/test_pycode_ast.py index 409e5806d1b..9dd8c8f5d17 100644 --- a/tests/test_pycode/test_pycode_ast.py +++ b/tests/test_pycode/test_pycode_ast.py @@ -65,9 +65,10 @@ ('*tuple[str, int]', '*tuple[str, int]'), # Starred ], ) # fmt: skip -def test_unparse(source, expected): - module = ast.parse(source) - assert ast_unparse(module.body[0].value, source) == expected +def test_unparse(source: str, expected: str) -> None: + expr = ast.parse(source).body[0] + assert isinstance(expr, ast.Expr) + assert ast_unparse(expr.value, source) == expected def test_unparse_None() -> None: From 1a69059295e297f987b58918bd40a6c93771f1d8 Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Thu, 22 May 2025 11:20:37 +0100 Subject: [PATCH 084/435] Remove mypy overrides for ``tests/test_util/test_util_display.py`` (#13583) --- pyproject.toml | 1 - tests/test_util/test_util_display.py | 10 +++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1185e9ac977..c16f5f48f83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -286,7 +286,6 @@ module = [ "tests.test_transforms.test_transforms_post_transforms_images", "tests.test_transforms.test_transforms_reorder_nodes", # tests/test_util - "tests.test_util.test_util_display", "tests.test_util.test_util_docutils", # tests/test_writers "tests.test_writers.test_docutilsconf", diff --git a/tests/test_util/test_util_display.py b/tests/test_util/test_util_display.py index a3dda71b999..f4fa9c997ca 100644 --- a/tests/test_util/test_util_display.py +++ b/tests/test_util/test_util_display.py @@ -41,7 +41,9 @@ def test_status_iterator_length_0(app: SphinxTestApp) -> None: @pytest.mark.sphinx('dummy', testroot='root') -def test_status_iterator_verbosity_0(app, monkeypatch): +def test_status_iterator_verbosity_0( + app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch +) -> None: monkeypatch.setenv('FORCE_COLOR', '1') logging.setup(app, app.status, app.warning) @@ -59,7 +61,9 @@ def test_status_iterator_verbosity_0(app, monkeypatch): @pytest.mark.sphinx('dummy', testroot='root') -def test_status_iterator_verbosity_1(app, monkeypatch): +def test_status_iterator_verbosity_1( + app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch +) -> None: monkeypatch.setenv('FORCE_COLOR', '1') logging.setup(app, app.status, app.warning) @@ -107,7 +111,7 @@ def test_progress_message(app: SphinxTestApp) -> None: # decorator @progress_message('testing') - def func(): + def func() -> None: logger.info('in func ', nonl=True) func() From 2c2159fb9ef3520c75152532cad5c44c21986562 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 24 May 2025 02:15:04 +0100 Subject: [PATCH 085/435] Adopt Prettier for JavaScript formatting (#13581) --- .github/workflows/lint.yml | 18 ++ .prettierrc.toml | 2 + sphinx/search/__init__.py | 8 +- sphinx/themes/basic/static/doctools.js | 11 +- sphinx/themes/basic/static/searchtools.js | 154 ++++++++++------ .../themes/basic/static/sphinx_highlight.js | 59 +++--- sphinx/themes/scrolls/static/theme_extras.js | 22 +-- tests/js/jasmine-browser.mjs | 22 +-- tests/js/language_data.js | 12 +- tests/js/searchtools.spec.js | 171 ++++++++++-------- tests/js/sphinx_highlight.spec.js | 50 ++--- tox.ini | 6 + 12 files changed, 314 insertions(+), 221 deletions(-) create mode 100644 .prettierrc.toml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d051e626886..010a6781aef 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -123,3 +123,21 @@ jobs: run: | python -m build . twine check dist/* + + prettier: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "20" + cache: "npm" + - run: > + npx prettier@3.5 + --check + "sphinx/themes/**/*.js" + "!sphinx/themes/bizstyle/static/css3-mediaqueries*.js" + "tests/js/**/*.{js,mjs}" + "!tests/js/fixtures/**" diff --git a/.prettierrc.toml b/.prettierrc.toml new file mode 100644 index 00000000000..1799612bfdd --- /dev/null +++ b/.prettierrc.toml @@ -0,0 +1,2 @@ +# https://prettier.io/docs/options +experimentalOperatorPosition = "start" diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index cc997bf6456..187e6a2f37f 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -81,11 +81,11 @@ class SearchLanguage: /** * Dummy stemmer for languages without stemming rules. */ -var Stemmer = function() { - this.stemWord = function(w) { +var Stemmer = function () { + this.stemWord = function (w) { return w; - } -} + }; +}; """ _word_re = re.compile(r'\w+') diff --git a/sphinx/themes/basic/static/doctools.js b/sphinx/themes/basic/static/doctools.js index 0398ebb9f03..807cdb176c0 100644 --- a/sphinx/themes/basic/static/doctools.js +++ b/sphinx/themes/basic/static/doctools.js @@ -59,7 +59,7 @@ const Documentation = { Object.assign(Documentation.TRANSLATIONS, catalog.messages); Documentation.PLURAL_EXPR = new Function( "n", - `return (${catalog.plural_expr})` + `return (${catalog.plural_expr})`, ); Documentation.LOCALE = catalog.locale; }, @@ -89,7 +89,7 @@ const Documentation = { const togglerElements = document.querySelectorAll("img.toggler"); togglerElements.forEach((el) => - el.addEventListener("click", (event) => toggler(event.currentTarget)) + el.addEventListener("click", (event) => toggler(event.currentTarget)), ); togglerElements.forEach((el) => (el.style.display = "")); if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); @@ -98,14 +98,15 @@ const Documentation = { initOnKeyListeners: () => { // only install a listener if it is really needed if ( - !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && - !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS + && !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS ) return; document.addEventListener("keydown", (event) => { // bail for input elements - if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) + return; // bail with special keys if (event.altKey || event.ctrlKey || event.metaKey) return; diff --git a/sphinx/themes/basic/static/searchtools.js b/sphinx/themes/basic/static/searchtools.js index ba5e67aa75e..5a7628a18a2 100644 --- a/sphinx/themes/basic/static/searchtools.js +++ b/sphinx/themes/basic/static/searchtools.js @@ -41,11 +41,12 @@ if (typeof Scorer === "undefined") { } // Global search result kind enum, used by themes to style search results. +// prettier-ignore class SearchResultKind { - static get index() { return "index"; } - static get object() { return "object"; } - static get text() { return "text"; } - static get title() { return "title"; } + static get index() { return "index"; } + static get object() { return "object"; } + static get text() { return "text"; } + static get title() { return "title"; } } const _removeChildren = (element) => { @@ -95,20 +96,25 @@ const _displayItem = (item, searchTerms, highlightTerms) => { listItem.appendChild(document.createElement("span")).innerHTML = " (" + descr + ")"; // highlight search terms in the description - if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js - highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); - } - else if (showSearchSummary) + if (SPHINX_HIGHLIGHT_ENABLED) + // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js + highlightTerms.forEach((term) => + _highlightText(listItem, term, "highlighted"), + ); + } else if (showSearchSummary) fetch(requestUrl) .then((responseData) => responseData.text()) .then((data) => { if (data) listItem.appendChild( - Search.makeSearchSummary(data, searchTerms, anchor) + Search.makeSearchSummary(data, searchTerms, anchor), ); // highlight search terms in the summary - if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js - highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + if (SPHINX_HIGHLIGHT_ENABLED) + // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js + highlightTerms.forEach((term) => + _highlightText(listItem, term, "highlighted"), + ); }); Search.output.appendChild(listItem); }; @@ -117,14 +123,14 @@ const _finishSearch = (resultCount) => { Search.title.innerText = _("Search Results"); if (!resultCount) Search.status.innerText = Documentation.gettext( - "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.", ); else Search.status.innerText = Documentation.ngettext( "Search finished, found one page matching the search query.", "Search finished, found ${resultCount} pages matching the search query.", resultCount, - ).replace('${resultCount}', resultCount); + ).replace("${resultCount}", resultCount); }; const _displayNextItem = ( results, @@ -138,7 +144,7 @@ const _displayNextItem = ( _displayItem(results.pop(), searchTerms, highlightTerms); setTimeout( () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), - 5 + 5, ); } // search finished, update title and status message @@ -170,9 +176,10 @@ const _orderResultsByScoreThenName = (a, b) => { * This is the same as ``\W+`` in Python, preserving the surrogate pair area. */ if (typeof splitQuery === "undefined") { - var splitQuery = (query) => query + var splitQuery = (query) => + query .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) - .filter(term => term) // remove remaining empty strings + .filter((term) => term); // remove remaining empty strings } /** @@ -184,16 +191,23 @@ const Search = { _pulse_status: -1, htmlToText: (htmlString, anchor) => { - const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + const htmlElement = new DOMParser().parseFromString( + htmlString, + "text/html", + ); for (const removalQuery of [".headerlink", "script", "style"]) { - htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + htmlElement.querySelectorAll(removalQuery).forEach((el) => { + el.remove(); + }); } if (anchor) { - const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + const anchorContent = htmlElement.querySelector( + `[role="main"] ${anchor}`, + ); if (anchorContent) return anchorContent.textContent; console.warn( - `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`, ); } @@ -202,7 +216,7 @@ const Search = { if (docContent) return docContent.textContent; console.warn( - "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template.", ); return ""; }, @@ -288,8 +302,7 @@ const Search = { // maybe skip this "word" // stopwords set is from language_data.js - if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) - return; + if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) return; // stem the word let word = stemmer.stemWord(queryTermLower); @@ -301,8 +314,12 @@ const Search = { } }); - if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js - localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + if (SPHINX_HIGHLIGHT_ENABLED) { + // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js + localStorage.setItem( + "sphinx_highlight_terms", + [...highlightTerms].join(" "), + ); } // console.debug("SEARCH: searching for:"); @@ -315,7 +332,13 @@ const Search = { /** * execute search (requires search index to be loaded) */ - _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + _performSearch: ( + query, + searchTerms, + excludedTerms, + highlightTerms, + objectTerms, + ) => { const filenames = Search._index.filenames; const docNames = Search._index.docnames; const titles = Search._index.titles; @@ -331,10 +354,15 @@ const Search = { const queryLower = query.toLowerCase().trim(); for (const [title, foundTitles] of Object.entries(allTitles)) { - if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + if ( + title.toLowerCase().trim().includes(queryLower) + && queryLower.length >= title.length / 2 + ) { for (const [file, id] of foundTitles) { - const score = Math.round(Scorer.title * queryLower.length / title.length); - const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + const score = Math.round( + (Scorer.title * queryLower.length) / title.length, + ); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles normalResults.push([ docNames[file], titles[file] !== title ? `${titles[file]} > ${title}` : title, @@ -350,9 +378,9 @@ const Search = { // search for explicit entries in index directives for (const [entry, foundEntries] of Object.entries(indexEntries)) { - if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + if (entry.includes(queryLower) && queryLower.length >= entry.length / 2) { for (const [file, id, isMain] of foundEntries) { - const score = Math.round(100 * queryLower.length / entry.length); + const score = Math.round((100 * queryLower.length) / entry.length); const result = [ docNames[file], titles[file], @@ -373,11 +401,13 @@ const Search = { // lookup as object objectTerms.forEach((term) => - normalResults.push(...Search.performObjectSearch(term, objectTerms)) + normalResults.push(...Search.performObjectSearch(term, objectTerms)), ); // lookup as search terms in fulltext - normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + normalResults.push( + ...Search.performTermsSearch(searchTerms, excludedTerms), + ); // let the scorer override scores with a custom scoring function if (Scorer.score) { @@ -398,7 +428,11 @@ const Search = { // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept let seen = new Set(); results = results.reverse().reduce((acc, result) => { - let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + let resultStr = result + .slice(0, 4) + .concat([result[5]]) + .map((v) => String(v)) + .join(","); if (!seen.has(resultStr)) { acc.push(result); seen.add(resultStr); @@ -410,8 +444,20 @@ const Search = { }, query: (query) => { - const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); - const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + const [ + searchQuery, + searchTerms, + excludedTerms, + highlightTerms, + objectTerms, + ] = Search._parseQuery(query); + const results = Search._performSearch( + searchQuery, + searchTerms, + excludedTerms, + highlightTerms, + objectTerms, + ); // for debugging //Search.lastresults = results.slice(); // a copy @@ -434,7 +480,7 @@ const Search = { const results = []; const objectSearchCallback = (prefix, match) => { - const name = match[4] + const name = match[4]; const fullname = (prefix ? prefix + "." : "") + name; const fullnameLower = fullname.toLowerCase(); if (fullnameLower.indexOf(object) < 0) return; @@ -486,9 +532,7 @@ const Search = { ]); }; Object.keys(objects).forEach((prefix) => - objects[prefix].forEach((array) => - objectSearchCallback(prefix, array) - ) + objects[prefix].forEach((array) => objectSearchCallback(prefix, array)), ); return results; }, @@ -513,8 +557,14 @@ const Search = { // find documents, if any, containing the query word in their text/title term indices // use Object.hasOwnProperty to avoid mismatching against prototype properties const arr = [ - { files: terms.hasOwnProperty(word) ? terms[word] : undefined, score: Scorer.term }, - { files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, score: Scorer.title }, + { + files: terms.hasOwnProperty(word) ? terms[word] : undefined, + score: Scorer.term, + }, + { + files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, + score: Scorer.title, + }, ]; // add support for partial matches if (word.length > 2) { @@ -555,7 +605,8 @@ const Search = { // create the mapping files.forEach((file) => { if (!fileMap.has(file)) fileMap.set(file, [word]); - else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + else if (fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); }); }); @@ -566,11 +617,11 @@ const Search = { // as search terms with length < 3 are discarded const filteredTermCount = [...searchTerms].filter( - (term) => term.length > 2 + (term) => term.length > 2, ).length; if ( - wordList.length !== searchTerms.size && - wordList.length !== filteredTermCount + wordList.length !== searchTerms.size + && wordList.length !== filteredTermCount ) continue; @@ -578,10 +629,10 @@ const Search = { if ( [...excludedTerms].some( (term) => - terms[term] === file || - titleTerms[term] === file || - (terms[term] || []).includes(file) || - (titleTerms[term] || []).includes(file) + terms[term] === file + || titleTerms[term] === file + || (terms[term] || []).includes(file) + || (titleTerms[term] || []).includes(file), ) ) break; @@ -623,7 +674,8 @@ const Search = { let summary = document.createElement("p"); summary.classList.add("context"); - summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + summary.textContent = + top + text.substr(startWithContext, 240).trim() + tail; return summary; }, diff --git a/sphinx/themes/basic/static/sphinx_highlight.js b/sphinx/themes/basic/static/sphinx_highlight.js index 8a96c69a194..ce735d52ee4 100644 --- a/sphinx/themes/basic/static/sphinx_highlight.js +++ b/sphinx/themes/basic/static/sphinx_highlight.js @@ -1,7 +1,7 @@ /* Highlighting utilities for Sphinx HTML documentation. */ "use strict"; -const SPHINX_HIGHLIGHT_ENABLED = true +const SPHINX_HIGHLIGHT_ENABLED = true; /** * highlight a given string on a node by wrapping it in @@ -13,9 +13,9 @@ const _highlight = (node, addItems, text, className) => { const parent = node.parentNode; const pos = val.toLowerCase().indexOf(text); if ( - pos >= 0 && - !parent.classList.contains(className) && - !parent.classList.contains("nohighlight") + pos >= 0 + && !parent.classList.contains(className) + && !parent.classList.contains("nohighlight") ) { let span; @@ -30,13 +30,7 @@ const _highlight = (node, addItems, text, className) => { span.appendChild(document.createTextNode(val.substr(pos, text.length))); const rest = document.createTextNode(val.substr(pos + text.length)); - parent.insertBefore( - span, - parent.insertBefore( - rest, - node.nextSibling - ) - ); + parent.insertBefore(span, parent.insertBefore(rest, node.nextSibling)); node.nodeValue = val.substr(0, pos); /* There may be more occurrences of search term in this node. So call this * function recursively on the remaining fragment. @@ -46,7 +40,7 @@ const _highlight = (node, addItems, text, className) => { if (isInSVG) { const rect = document.createElementNS( "http://www.w3.org/2000/svg", - "rect" + "rect", ); const bbox = parent.getBBox(); rect.x.baseVal.value = bbox.x; @@ -65,7 +59,7 @@ const _highlightText = (thisNode, text, className) => { let addItems = []; _highlight(thisNode, addItems, text, className); addItems.forEach((obj) => - obj.parent.insertAdjacentElement("beforebegin", obj.target) + obj.parent.insertAdjacentElement("beforebegin", obj.target), ); }; @@ -73,25 +67,27 @@ const _highlightText = (thisNode, text, className) => { * Small JavaScript module for the documentation. */ const SphinxHighlight = { - /** * highlight the search words provided in localstorage in the text */ highlightSearchWords: () => { - if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight // get and clear terms from localstorage const url = new URL(window.location); const highlight = - localStorage.getItem("sphinx_highlight_terms") - || url.searchParams.get("highlight") - || ""; - localStorage.removeItem("sphinx_highlight_terms") + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms"); url.searchParams.delete("highlight"); window.history.replaceState({}, "", url); // get individual terms from highlight string - const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + const terms = highlight + .toLowerCase() + .split(/\s+/) + .filter((x) => x); if (terms.length === 0) return; // nothing to do // There should never be more than one element matching "div.body" @@ -107,11 +103,11 @@ const SphinxHighlight = { document .createRange() .createContextualFragment( - '" - ) + '", + ), ); }, @@ -125,7 +121,7 @@ const SphinxHighlight = { document .querySelectorAll("span.highlighted") .forEach((el) => el.classList.remove("highlighted")); - localStorage.removeItem("sphinx_highlight_terms") + localStorage.removeItem("sphinx_highlight_terms"); }, initEscapeListener: () => { @@ -134,10 +130,15 @@ const SphinxHighlight = { document.addEventListener("keydown", (event) => { // bail for input elements - if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) + return; // bail with special keys - if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; - if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) + return; + if ( + DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + && event.key === "Escape" + ) { SphinxHighlight.hideSearchWords(); event.preventDefault(); } diff --git a/sphinx/themes/scrolls/static/theme_extras.js b/sphinx/themes/scrolls/static/theme_extras.js index df2be407339..84cc1509808 100644 --- a/sphinx/themes/scrolls/static/theme_extras.js +++ b/sphinx/themes/scrolls/static/theme_extras.js @@ -1,12 +1,12 @@ const initialiseThemeExtras = () => { - const toc = document.getElementById("toc") - toc.style.display = "" - const items = toc.getElementsByTagName("ul")[0] - items.style.display = "none" - toc.getElementsByTagName("h3").addEventListener("click", () => { - if (items.style.display !== "none") toc.classList.remove("expandedtoc") - else toc.classList.add("expandedtoc"); - }) -} -if (document.readyState !== "loading") initialiseThemeExtras() -else document.addEventListener("DOMContentLoaded", initialiseThemeExtras) + const toc = document.getElementById("toc"); + toc.style.display = ""; + const items = toc.getElementsByTagName("ul")[0]; + items.style.display = "none"; + toc.getElementsByTagName("h3").addEventListener("click", () => { + if (items.style.display !== "none") toc.classList.remove("expandedtoc"); + else toc.classList.add("expandedtoc"); + }); +}; +if (document.readyState !== "loading") initialiseThemeExtras(); +else document.addEventListener("DOMContentLoaded", initialiseThemeExtras); diff --git a/tests/js/jasmine-browser.mjs b/tests/js/jasmine-browser.mjs index b84217fd8c5..f11c04b95b7 100644 --- a/tests/js/jasmine-browser.mjs +++ b/tests/js/jasmine-browser.mjs @@ -1,28 +1,26 @@ export default { srcDir: ".", srcFiles: [ - 'sphinx/themes/basic/static/doctools.js', - 'sphinx/themes/basic/static/searchtools.js', - 'sphinx/themes/basic/static/sphinx_highlight.js', - 'tests/js/fixtures/**/*.js', - 'tests/js/documentation_options.js', - 'tests/js/language_data.js', + "sphinx/themes/basic/static/doctools.js", + "sphinx/themes/basic/static/searchtools.js", + "sphinx/themes/basic/static/sphinx_highlight.js", + "tests/js/fixtures/**/*.js", + "tests/js/documentation_options.js", + "tests/js/language_data.js", ], specDir: "tests/js", - specFiles: [ - '**/*.spec.js', - ], + specFiles: ["**/*.spec.js"], helpers: [], env: { stopSpecOnExpectationFailure: false, stopOnSpecFailure: false, - random: true + random: true, }, listenAddress: "127.0.0.1", hostname: "127.0.0.1", browser: { - name: "headlessFirefox" - } + name: "headlessFirefox", + }, }; diff --git a/tests/js/language_data.js b/tests/js/language_data.js index 47c81f4a2a5..c98e91ff6e2 100644 --- a/tests/js/language_data.js +++ b/tests/js/language_data.js @@ -4,17 +4,15 @@ */ const stopwords = new Set([]); -window.stopwords = stopwords; // Export to global scope - +window.stopwords = stopwords; // Export to global scope /* Non-minified versions are copied as separate JavaScript files, if available */ /** * Dummy stemmer for languages without stemming rules. */ -var Stemmer = function() { - this.stemWord = function(w) { +var Stemmer = function () { + this.stemWord = function (w) { return w; - } -} - + }; +}; diff --git a/tests/js/searchtools.spec.js b/tests/js/searchtools.spec.js index 809fd19d0f4..d00689c907c 100644 --- a/tests/js/searchtools.spec.js +++ b/tests/js/searchtools.spec.js @@ -1,10 +1,9 @@ -describe('Basic html theme search', function() { - +describe("Basic html theme search", function () { function loadFixture(name) { - req = new XMLHttpRequest(); - req.open("GET", `__src__/tests/js/fixtures/${name}`, false); - req.send(null); - return req.responseText; + req = new XMLHttpRequest(); + req.open("GET", `__src__/tests/js/fixtures/${name}`, false); + req.send(null); + return req.responseText; } function checkRanking(expectedRanking, results) { @@ -16,7 +15,11 @@ describe('Basic html theme search', function() { let [expectedPage, expectedTitle, expectedTarget] = nextExpected; let [page, title, target] = result; - if (page == expectedPage && title == expectedTitle && target == expectedTarget) { + if ( + page == expectedPage + && title == expectedTitle + && target == expectedTarget + ) { [nextExpected, ...remainingItems] = remainingItems; } } @@ -25,13 +28,14 @@ describe('Basic html theme search', function() { expect(nextExpected).toEqual(undefined); } - describe('terms search', function() { - - it('should find "C++" when in index', function() { + describe("terms search", function () { + it('should find "C++" when in index', function () { eval(loadFixture("cpp/searchindex.js")); - [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('C++'); + [_searchQuery, searchterms, excluded, ..._remainingItems] = + Search._parseQuery("C++"); + // prettier-ignore hits = [[ "index", "<no title>", @@ -44,10 +48,12 @@ describe('Basic html theme search', function() { expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits); }); - it('should be able to search for multiple terms', function() { + it("should be able to search for multiple terms", function () { eval(loadFixture("multiterm/searchindex.js")); - [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('main page'); + [_searchQuery, searchterms, excluded, ..._remainingItems] = + Search._parseQuery("main page"); + // prettier-ignore hits = [[ 'index', 'Main Page', @@ -60,11 +66,13 @@ describe('Basic html theme search', function() { expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits); }); - it('should partially-match "sphinx" when in title index', function() { + it('should partially-match "sphinx" when in title index', function () { eval(loadFixture("partial/searchindex.js")); - [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('sphinx'); + [_searchQuery, searchterms, excluded, ..._remainingItems] = + Search._parseQuery("sphinx"); + // prettier-ignore hits = [[ "index", "sphinx_utils module", @@ -77,13 +85,15 @@ describe('Basic html theme search', function() { expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits); }); - it('should partially-match within "possible" when in term index', function() { + it('should partially-match within "possible" when in term index', function () { eval(loadFixture("partial/searchindex.js")); - [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('ossibl'); + [_searchQuery, searchterms, excluded, ..._remainingItems] = + Search._parseQuery("ossibl"); terms = Search._index.terms; titleterms = Search._index.titleterms; + // prettier-ignore hits = [[ "index", "sphinx_utils module", @@ -93,18 +103,19 @@ describe('Basic html theme search', function() { "index.rst", "text" ]]; - expect(Search.performTermsSearch(searchterms, excluded, terms, titleterms)).toEqual(hits); + expect( + Search.performTermsSearch(searchterms, excluded, terms, titleterms), + ).toEqual(hits); }); - }); - describe('aggregation of search results', function() { - - it('should combine document title and document term matches', function() { + describe("aggregation of search results", function () { + it("should combine document title and document term matches", function () { eval(loadFixture("multiterm/searchindex.js")); - searchParameters = Search._parseQuery('main page'); + searchParameters = Search._parseQuery("main page"); + // prettier-ignore hits = [ [ 'index', @@ -118,11 +129,9 @@ describe('Basic html theme search', function() { ]; expect(Search._performSearch(...searchParameters)).toEqual(hits); }); - }); - describe('search result ranking', function() { - + describe("search result ranking", function () { /* * These tests should not proscribe precise expected ordering of search * results; instead each test case should describe a single relevance rule @@ -137,95 +146,96 @@ describe('Basic html theme search', function() { * [1] - https://github.com/sphinx-doc/sphinx.git/ */ - it('should score a code module match above a page-title match', function() { + it("should score a code module match above a page-title match", function () { eval(loadFixture("titles/searchindex.js")); + // prettier-ignore expectedRanking = [ ['index', 'relevance', '#module-relevance'], /* py:module documentation */ ['relevance', 'Relevance', ''], /* main title */ ]; - searchParameters = Search._parseQuery('relevance'); + searchParameters = Search._parseQuery("relevance"); results = Search._performSearch(...searchParameters); checkRanking(expectedRanking, results); }); - it('should score a main-title match above an object member match', function() { + it("should score a main-title match above an object member match", function () { eval(loadFixture("titles/searchindex.js")); + // prettier-ignore expectedRanking = [ ['relevance', 'Relevance', ''], /* main title */ ['index', 'relevance.Example.relevance', '#relevance.Example.relevance'], /* py:class attribute */ ]; - searchParameters = Search._parseQuery('relevance'); + searchParameters = Search._parseQuery("relevance"); results = Search._performSearch(...searchParameters); checkRanking(expectedRanking, results); }); - it('should score a title match above a standard index entry match', function() { + it("should score a title match above a standard index entry match", function () { eval(loadFixture("titles/searchindex.js")); + // prettier-ignore expectedRanking = [ ['relevance', 'Relevance', ''], /* title */ ['index', 'Main Page', '#index-1'], /* index entry */ ]; - searchParameters = Search._parseQuery('relevance'); + searchParameters = Search._parseQuery("relevance"); results = Search._performSearch(...searchParameters); checkRanking(expectedRanking, results); }); - it('should score a priority index entry match above a title match', function() { + it("should score a priority index entry match above a title match", function () { eval(loadFixture("titles/searchindex.js")); + // prettier-ignore expectedRanking = [ ['index', 'Main Page', '#index-0'], /* index entry */ ['index', 'Main Page > Result Scoring', '#result-scoring'], /* title */ ]; - searchParameters = Search._parseQuery('scoring'); + searchParameters = Search._parseQuery("scoring"); results = Search._performSearch(...searchParameters); checkRanking(expectedRanking, results); }); - it('should score a main-title match above a subheading-title match', function() { + it("should score a main-title match above a subheading-title match", function () { eval(loadFixture("titles/searchindex.js")); + // prettier-ignore expectedRanking = [ ['relevance', 'Relevance', ''], /* main title */ ['index', 'Main Page > Relevance', '#relevance'], /* subsection heading title */ ]; - searchParameters = Search._parseQuery('relevance'); + searchParameters = Search._parseQuery("relevance"); results = Search._performSearch(...searchParameters); checkRanking(expectedRanking, results); }); - }); - describe('can handle edge-case search queries', function() { - - it('does not find the javascript prototype property in unrelated documents', function() { + describe("can handle edge-case search queries", function () { + it("does not find the javascript prototype property in unrelated documents", function () { eval(loadFixture("partial/searchindex.js")); - searchParameters = Search._parseQuery('__proto__'); + searchParameters = Search._parseQuery("__proto__"); + // prettier-ignore hits = []; expect(Search._performSearch(...searchParameters)).toEqual(hits); }); - }); - }); -describe("htmlToText", function() { - +describe("htmlToText", function () { const testHTML = ` @@ -257,44 +267,47 @@ describe("htmlToText", function() { `; it("basic case", () => { - expect(Search.htmlToText(testHTML).trim().split(/\s+/)).toEqual([ - 'Getting', 'Started', 'Some', 'text', - 'Other', 'Section', 'Other', 'text', - 'Yet', 'Another', 'Section', 'More', 'text' - ]); + expect(Search.htmlToText(testHTML).trim().split(/\s+/)).toEqual( + /* prettier-ignore */ [ + "Getting", "Started", "Some", "text", + "Other", "Section", "Other", "text", + "Yet", "Another", "Section", "More", "text" + ], + ); }); it("will start reading from the anchor", () => { - expect(Search.htmlToText(testHTML, '#other-section').trim().split(/\s+/)).toEqual(['Other', 'Section', 'Other', 'text']); + expect( + Search.htmlToText(testHTML, "#other-section").trim().split(/\s+/), + ).toEqual(["Other", "Section", "Other", "text"]); }); }); // Regression test for https://github.com/sphinx-doc/sphinx/issues/3150 -describe('splitQuery regression tests', () => { - - it('can split English words', () => { - const parts = splitQuery(' Hello World ') - expect(parts).toEqual(['Hello', 'World']) - }) - - it('can split special characters', () => { - const parts = splitQuery('Pin-Code') - expect(parts).toEqual(['Pin', 'Code']) - }) - - it('can split Chinese characters', () => { - const parts = splitQuery('Hello from 中国 上海') - expect(parts).toEqual(['Hello', 'from', '中国', '上海']) - }) - - it('can split Emoji (surrogate pair) characters. It should keep emojis.', () => { - const parts = splitQuery('😁😁') - expect(parts).toEqual(['😁😁']) - }) - - it('can split umlauts. It should keep umlauts.', () => { - const parts = splitQuery('Löschen Prüfung Abändern ærlig spørsmål') - expect(parts).toEqual(['Löschen', 'Prüfung', 'Abändern', 'ærlig', 'spørsmål']) - }) - -}) +describe("splitQuery regression tests", () => { + it("can split English words", () => { + const parts = splitQuery(" Hello World "); + expect(parts).toEqual(["Hello", "World"]); + }); + + it("can split special characters", () => { + const parts = splitQuery("Pin-Code"); + expect(parts).toEqual(["Pin", "Code"]); + }); + + it("can split Chinese characters", () => { + const parts = splitQuery("Hello from 中国 上海"); + expect(parts).toEqual(["Hello", "from", "中国", "上海"]); + }); + + it("can split Emoji (surrogate pair) characters. It should keep emojis.", () => { + const parts = splitQuery("😁😁"); + expect(parts).toEqual(["😁😁"]); + }); + + it("can split umlauts. It should keep umlauts.", () => { + const parts = splitQuery("Löschen Prüfung Abändern ærlig spørsmål"); + // prettier-ignore + expect(parts).toEqual(["Löschen", "Prüfung", "Abändern", "ærlig", "spørsmål"]) + }); +}); diff --git a/tests/js/sphinx_highlight.spec.js b/tests/js/sphinx_highlight.spec.js index 1f52eabb96f..4d57d867745 100644 --- a/tests/js/sphinx_highlight.spec.js +++ b/tests/js/sphinx_highlight.spec.js @@ -1,30 +1,33 @@ -describe('highlightText', function() { +describe("highlightText", function () { + const cyrillicTerm = "шеллы"; + const umlautTerm = "gänsefüßchen"; - const cyrillicTerm = 'шеллы'; - const umlautTerm = 'gänsefüßchen'; - - it('should highlight text incl. special characters correctly in HTML', function() { + it("should highlight text incl. special characters correctly in HTML", function () { const highlightTestSpan = new DOMParser().parseFromString( - 'This is the шеллы and Gänsefüßchen test!', 'text/html').body.firstChild - _highlightText(highlightTestSpan, cyrillicTerm, 'highlighted'); - _highlightText(highlightTestSpan, umlautTerm, 'highlighted'); + "This is the шеллы and Gänsefüßchen test!", + "text/html", + ).body.firstChild; + _highlightText(highlightTestSpan, cyrillicTerm, "highlighted"); + _highlightText(highlightTestSpan, umlautTerm, "highlighted"); const expectedHtmlString = - 'This is the шеллы and ' + - 'Gänsefüßchen test!'; + 'This is the шеллы and ' + + 'Gänsefüßchen test!'; expect(highlightTestSpan.innerHTML).toEqual(expectedHtmlString); }); - it('should highlight text incl. special characters correctly in SVG', function() { + it("should highlight text incl. special characters correctly in SVG", function () { const highlightTestSvg = new DOMParser().parseFromString( - '' + - '' + - '' + - 'This is the шеллы and Gänsefüßchen test!' + - '' + - '' + - '', 'text/html').body.firstChild - _highlightText(highlightTestSvg, cyrillicTerm, 'highlighted'); - _highlightText(highlightTestSvg, umlautTerm, 'highlighted'); + '' + + '' + + '' + + "This is the шеллы and Gänsefüßchen test!" + + "" + + "" + + "", + "text/html", + ).body.firstChild; + _highlightText(highlightTestSvg, cyrillicTerm, "highlighted"); + _highlightText(highlightTestSvg, umlautTerm, "highlighted"); /* Note wild cards and ``toMatch``; allowing for some variability seems to be necessary, even between different FF versions */ const expectedSvgString = @@ -32,8 +35,9 @@ describe('highlightText', function() { + '' + '' + 'This is the шеллы and ' - + 'Gänsefüßchen test!'; - expect(new XMLSerializer().serializeToString(highlightTestSvg.firstChild)).toMatch(new RegExp(expectedSvgString)); + + "Gänsefüßchen test!"; + expect( + new XMLSerializer().serializeToString(highlightTestSvg.firstChild), + ).toMatch(new RegExp(expectedSvgString)); }); - }); diff --git a/tox.ini b/tox.ini index 23b239c7ffc..87b9d1b6316 100644 --- a/tox.ini +++ b/tox.ini @@ -84,3 +84,9 @@ dependency_groups = types commands = mypy {posargs} + +[testenv:prettier] +description = + Run the Prettier JavaScript formatter. +commands = + npx prettier@3.5 --write "sphinx/themes/**/*.js" "!sphinx/themes/bizstyle/static/css3-mediaqueries*.js" "tests/js/**/*.{js,mjs}" "!tests/js/fixtures/**" From cffaf3d103ebfb455fd50df0a4ac8b850f95242f Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Sat, 24 May 2025 08:44:36 +0100 Subject: [PATCH 086/435] Remove unused fixture from ``test_config_pickle_protocol`` (#13590) --- tests/test_config/test_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index 5e68b4a9657..fc1ba4c7321 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -143,7 +143,7 @@ def test_config_not_found(tmp_path): @pytest.mark.parametrize('protocol', list(range(pickle.HIGHEST_PROTOCOL))) -def test_config_pickle_protocol(tmp_path, protocol: int): +def test_config_pickle_protocol(protocol: int): config = Config() pickled_config = pickle.loads(pickle.dumps(config, protocol)) From 7957429f26587da8e3432e62cb12414771143c98 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 24 May 2025 08:45:05 +0100 Subject: [PATCH 087/435] Add initial Pyrefly configuration file (#13579) --- pyproject.toml | 1 + pyrefly.toml | 27 +++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 pyrefly.toml diff --git a/pyproject.toml b/pyproject.toml index c16f5f48f83..16f3aaf7eb5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -158,6 +158,7 @@ translations = [ ] types = [ "mypy==1.15.0", + "pyrefly", "pyright==1.1.400", { include-group = "type-stubs" }, ] diff --git a/pyrefly.toml b/pyrefly.toml new file mode 100644 index 00000000000..88ccae4d84c --- /dev/null +++ b/pyrefly.toml @@ -0,0 +1,27 @@ +# Configuration file for Pyrefly_. +# n.b. Pyrefly is early in development. +# Sphinx's current primary/reference type-checker is mypy. +# +# .. _Pyrefly: https://pyrefly.org/en/docs/configuration/ + +project_includes = [ + "doc/conf.py", + "doc/development/tutorials/examples/autodoc_intenum.py", + "doc/development/tutorials/examples/helloworld.py", + "sphinx", + "tests", + "utils", +] +project_excludes = [ + "**/tests/roots*", +] +python_version = "3.11" +replace_imports_with_any = [ + "imagesize", + "pyximport", + "snowballstemmer", +] + +# https://pyrefly.org/en/docs/error-kinds/ +[errors] +implicitly-defined-attribute = false # many false positives From a1b944488c70125b286dc1fc959f9a5c630d5c4f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 24 May 2025 09:32:59 +0100 Subject: [PATCH 088/435] Bump Ruff to 0.11.11 (#13589) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 16f3aaf7eb5..1e3b7158579 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.10", + "ruff==0.11.11", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.10", + "ruff==0.11.11", "sphinx-lint>=0.9", ] package = [ From 1f2821e20492cc4eefbe7157ddb82dae0b9db2a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B?= <2589111+jfbu@users.noreply.github.com> Date: Thu, 24 Apr 2025 11:54:42 +0200 Subject: [PATCH 089/435] LaTeX: add support for fontawesome6 package --- CHANGES.rst | 2 + doc/latex.rst | 48 ++++--- sphinx/texinputs/sphinx.sty | 148 ++++++++++++-------- sphinx/texinputs/sphinxlatexadmonitions.sty | 5 +- 4 files changed, 116 insertions(+), 87 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 57e8e0efdf9..48d9e689f35 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -20,6 +20,8 @@ Features added ``linkcheck_allowed_redirects = {}``. Patch by Adam Turner. * #13497: Support C domain objects in the table of contents. +* #13500: LaTeX: add support for ``fontawesome6`` package. + Patch by Jean-François B. * #13535: html search: Update to the latest version of Snowball (v3.0.1). Patch by Adam Turner. * #13704: autodoc: Detect :py:func:`typing_extensions.overload ` diff --git a/doc/latex.rst b/doc/latex.rst index 80762b1c2c1..bfc4de73938 100644 --- a/doc/latex.rst +++ b/doc/latex.rst @@ -1006,18 +1006,20 @@ The color used in the above example is available from having passed the ``iconpackage`` - The name of the LaTeX package used for icons in the admonition titles. It - defaults to ``fontawesome5`` or to fall-back ``fontawesome``. In case - neither one is available the option value will automatically default to - ``none``, which means that no attempt at loading a package is done. - Independently of this setting, arbitrary LaTeX code can be associated to - each admonition type via ``div._icon-title`` keys which are - described in the :ref:`additionalcss` section. If these keys are not - used, Sphinx will either apply its default choices of icons (if - ``fontawesome{5,}`` is available) or not draw the icon at all. Notice that - if fall-back ``fontawesome`` is used the common icon for :dudir:`caution` - and :dudir:`danger` will default to "bolt" not "radiation", which is only - found in ``fontawesome5``. + The name of the LaTeX package used for rendering icons in the admonition + titles. Its default is set dynamically to either ``fontawesome6``, + ``fontawesome5``, or ``fontawesome``, or ``none``, depending on whether + packages with those names exist in the used LaTeX installation. The LaTeX + code will use ``\faIcon`` command if with ``fontawesome6/fontawesome5``, + and ``\faicon`` if with ``fontawesome``. In the latter case the icon used + both for :dudir:`caution` and :dudir:`danger` will default to "bolt" not + "radiation", which is only found in ``fontawesome6`` and ``fontawesome5``. + If no "Font Awesome" related package is found (or if the option is set + forcefully to ``none``) the icons are silently dropped. User can set this + option to some specific package and must configure the + ``div.note_title-icon`` and similar keys to use then that LaTeX package + interface (see the :ref:`additionalcss` section for these extra + ``'sphinxsetup'`` keys). .. versionadded:: 7.4.0 @@ -1410,17 +1412,17 @@ The next keys, for admonitions, :dudir:`topic`, contents_, and (it applies only to the icon, not to the title of the admonition). - ``div._title-icon``: the LaTeX code responsible for producing the - icon. For example, the default for :dudir:`note` is - ``div.note_title-icon=\faIcon{info-circle}``. This uses a command from the - LaTeX ``fontawesome5`` package, which is loaded automatically if available. - - If neither ``fontawesome5`` nor fall-back ``fontawesome`` (for which the - associated command is :code-tex:`\\faicon`, not :code-tex:`\\faIcon`) are - found, or if the ``iconpackage`` key of :ref:`'sphinxsetup' - ` is set to load some other user-chosen package, or no - package at all, all the ``title-icons`` default to empty LaTeX code. It is - up to user to employ this interface to inject the icon (or anything else) - into the PDF output. + icon. If you want to modify the icons used by Sphinx, employ in these keys + the ``\faIcon`` LaTeX command (assuming either ``fontawesome6`` or + ``fontawesome5`` LaTeX package is available on your system). For example + the default for :dudir:`note` is + ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5`` and + ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome6`` (which is + used automatically if available). If your system only provides the + ``fontawesome`` package (automatically detected) use its command ``\faicon`` + rather in order to modify the choice of icons. The ``iconpackage`` key can + be used to use some other package providing icons, use then the commands + suitable to that package as values of the ``div._title-icon`` keys. .. note:: diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty index 8837485c5f7..7e06eff7de8 100644 --- a/sphinx/texinputs/sphinx.sty +++ b/sphinx/texinputs/sphinx.sty @@ -9,7 +9,7 @@ % by the Sphinx LaTeX writer. \NeedsTeXFormat{LaTeX2e}[1995/12/01] -\ProvidesPackage{sphinx}[2024/11/23 v8.2.0 Sphinx LaTeX package (sphinx-doc)] +\ProvidesPackage{sphinx}[2025/04/24 v8.3.0 Sphinx LaTeX package (sphinx-doc)] % provides \ltx@ifundefined % (many packages load ltxcmds: graphicx does for pdftex and lualatex but @@ -67,7 +67,7 @@ Footnote rendering may have had problems, due to extra package or document class; check latex log for instructions}% \@namedef{sphinx_buildwarning_badiconpackage}{% - You have set iconpackage=\spx@opt@iconpackage, but this LaTeX package + You have set iconpackage=\spx@usr@iconpackage, but this LaTeX package is not found}% %% OPTION HANDLING @@ -672,7 +672,7 @@ % defaults for them remain not to have specific colour. % % 7.4.0 adds keys for admonition titles: for background and foreground colors, -% and for icons (whose defaults are picked from Free Fontawesome 5). +% and for icons. \def\spx@tempa#1{% \expandafter\spx@tempb \csname if#1withshadowcolor\expandafter\endcsname @@ -869,80 +869,106 @@ } % 7.4.0 Support for icons in admonition titles -% We try to -% - get Sphinx PDF builds to process fine in absence of fontawesome5 -% - use fontawesome5 if present, but not if user prefers another package -% - provide an interface for using other LaTeX code for icons -% - provide an interface for using some other package than fontawesome5 -% Indeed we can't load fontawesome5 unconditionally even if available, -% as it proves incompatible with fontawesome package. -% We thus must delay its loading. -\IfFileExists{fontawesome5.sty}{% - \DeclareStringOption[fontawesome5]{iconpackage}% +% +% We let Sphinx use in order of priority: some user-specifid package, +% fontawesome6 (since 8.3.0), fontawesome5, fontawesome, or nothing (and then +% not draw any icon). To allow a user-specified package, an extra interface +% is provided for specifying the icon-drawing LaTeX code. +% +% We can't load fontawesome6 (or 5) unconditionally even if available, as it +% is incompatible with fontawesome package which may be preferred by user. We +% thus must delay loading the package to at begin document, and for now can +% only set the default value of iconpackage key.. +\IfFileExists{fontawesome6.sty}{% + \DeclareStringOption[fontawesome6]{iconpackage}% }% {% + \IfFileExists{fontawesome5.sty}{% + \DeclareStringOption[fontawesome5]{iconpackage}% + }% + {% \IfFileExists{fontawesome.sty} {\DeclareStringOption[fontawesome]{iconpackage}} {\DeclareStringOption[none]{iconpackage}}% + }% }% -\newcommand\spx@faIcon[2][]{}% -% The above \spx@faIcon which gobbles one mandatory and one optional -% argument is put into use only if both fontawesome5 and fontawesome -% LaTeX packages are not available, as part of the defaults for the -% div.*_title-icon keys (these keys can be redefined via the sphinxsetup -% interface). -% -\def\spxstring@fontawesome{fontawesome} -\def\spxstring@fontawesomev{fontawesome5} +% Unfortunately icon names differ between fontawesome, fontawesome5, and +% fontawesome6 LaTeX packages. At 8.3.0 we refactor the icon support code +% into something easier to maintain in future in case of a fontawesome7, +% etc... +% +% TODO: Handle spaces possibly caused by bad user usage of iconpackage key? +% This would need to check how LaTeX handle spaces in package name +% in \RequirePackage command. Things in this area may have changed +% recently (2025/04). \AtBeginDocument{% \ifx\spx@opt@iconpackage\spxstring@none \else \IfFileExists{\spx@opt@iconpackage.sty} - {\RequirePackage{\spx@opt@iconpackage}% - \ifx\spx@opt@iconpackage\spxstring@fontawesomev - \renewcommand\spx@faIcon{\faIcon}% - \else - \ifx\spx@opt@iconpackage\spxstring@fontawesome - \renewcommand\spx@faIcon[2][]{\faicon{##2}}% - % The \ifdefined's are a bit silly because we know that - % fontawesome.sty does not provide it, but perhaps - % there can be some new release of that package? - \ifdefined\faicon@lightbulb\else - \let\faicon@lightbulb\faLightbulbO - \fi - \ifdefined\faicon@radiation\else - \let\faicon@radiation\faBolt - \fi - \ifdefined\faicon@pen\else - \let\faicon@pen\faPencil - \fi - % if neither has been required, \spx@faIcon will simply swallow - % its argument and it is up to user - % to set the various div.*_title-icon keys appropriately. - \fi\fi % - }% + {\RequirePackage{\spx@opt@iconpackage}}% {% - \sphinxbuildwarning{badiconpackage}% - \PackageWarningNoLine{sphinx}{% - You have set iconpackage=\spx@opt@iconpackage\MessageBreak - But \spx@opt@iconpackage.sty is not found by LaTeX} + \let\spx@usr@iconpackage\spx@opt@iconpackage + \sphinxbuildwarning{badiconpackage}% + \PackageWarningNoLine{sphinx}{% + You have set iconpackage=\spx@usr@iconpackage\MessageBreak + But \spx@usr@iconpackage.sty is not found by LaTeX} + \let\spx@opt@iconpackage\spxstring@none }% \fi } +% Icon defaults depending on package used. +% Attention! no extra spaces for alignment when using \@namedef! +\@namedef{spx@fontawesome6@note}{\faIcon{circle-info}} +\@namedef{spx@fontawesome6@hint}{\faIcon[regular]{lightbulb}} +\@namedef{spx@fontawesome6@tip}{\faIcon[regular]{lightbulb}} +\@namedef{spx@fontawesome6@seealso}{\faIcon{share}} +\@namedef{spx@fontawesome6@todo}{\faIcon{pen}} +\@namedef{spx@fontawesome6@important}{\faIcon{circle-pause}} +\@namedef{spx@fontawesome6@caution}{\faIcon{radiation}} +\@namedef{spx@fontawesome6@warning}{\faIcon{triangle-exclamation}} +\@namedef{spx@fontawesome6@attention}{\faIcon{triangle-exclamation}} +\@namedef{spx@fontawesome6@danger}{\faIcon{radiation}} +\@namedef{spx@fontawesome6@error}{\faIcon{circle-xmark}} + +\@namedef{spx@fontawesome5@note}{\faIcon{info-circle}} +\@namedef{spx@fontawesome5@hint}{\faIcon[regular]{lightbulb}} +\@namedef{spx@fontawesome5@tip}{\faIcon[regular]{lightbulb}} +\@namedef{spx@fontawesome5@seealso}{\faIcon{share}} +\@namedef{spx@fontawesome5@todo}{\faIcon{pen}} +\@namedef{spx@fontawesome5@important}{\faIcon{pause-circle}} +\@namedef{spx@fontawesome5@caution}{\faIcon{radiation}} +\@namedef{spx@fontawesome5@warning}{\faIcon{exclamation-triangle}} +\@namedef{spx@fontawesome5@attention}{\faIcon{exclamation-triangle}} +\@namedef{spx@fontawesome5@danger}{\faIcon{radiation}} +\@namedef{spx@fontawesome5@error}{\faIcon{times-circle}} + +\def\spx@fontawesome@note {\faicon{info-circle}} +\def\spx@fontawesome@hint {\faicon{lightbulb-o}} +\def\spx@fontawesome@tip {\faicon{lightbulb-o}} +\def\spx@fontawesome@seealso {\faicon{share}} +\def\spx@fontawesome@todo {\faicon{pencil}} +\def\spx@fontawesome@important{\faicon{pause-circle}} +\def\spx@fontawesome@caution {\faicon{bolt}} +\def\spx@fontawesome@warning {\faicon{exclamation-triangle}} +\def\spx@fontawesome@attention{\faicon{exclamation-triangle}} +\def\spx@fontawesome@danger {\faicon{bolt}} +\def\spx@fontawesome@error {\faicon{times-circle}} + +% \spx@none@{note,hint,...} left undefined, the \@nameuse will be \relax +\def\spx@titleicon@default#1{\@nameuse{spx@\spx@opt@iconpackage @#1}} \setkeys{sphinx}{ -% Icon defaults. - div.note_title-icon = \spx@faIcon{info-circle}, - div.hint_title-icon = \spx@faIcon[regular]{lightbulb}, - div.tip_title-icon = \spx@faIcon[regular]{lightbulb}, - div.seealso_title-icon = \spx@faIcon{share}, - div.todo_title-icon = \spx@faIcon{pen}, - div.important_title-icon = \spx@faIcon{pause-circle}, - div.caution_title-icon = \spx@faIcon{radiation}, - div.warning_title-icon = \spx@faIcon{exclamation-triangle}, - div.attention_title-icon = \spx@faIcon{exclamation-triangle}, - div.danger_title-icon = \spx@faIcon{radiation}, - div.error_title-icon = \spx@faIcon{times-circle}, + div.note_title-icon = \spx@titleicon@default{note}, + div.hint_title-icon = \spx@titleicon@default{hint}, + div.tip_title-icon = \spx@titleicon@default{tip}, + div.seealso_title-icon = \spx@titleicon@default{seealso}, + div.todo_title-icon = \spx@titleicon@default{todo}, + div.important_title-icon = \spx@titleicon@default{important}, + div.caution_title-icon = \spx@titleicon@default{caution}, + div.warning_title-icon = \spx@titleicon@default{warning}, + div.attention_title-icon = \spx@titleicon@default{attention}, + div.danger_title-icon = \spx@titleicon@default{danger}, + div.error_title-icon = \spx@titleicon@default{error}, % MEMO: the new at 8.1.0 defaults for contents/topic/sidebar directives % use no icons, they use \sphinxdotitlerow which detects automatically % whether title-icon key has been set or not. diff --git a/sphinx/texinputs/sphinxlatexadmonitions.sty b/sphinx/texinputs/sphinxlatexadmonitions.sty index 0519903591b..627ee0d49ad 100644 --- a/sphinx/texinputs/sphinxlatexadmonitions.sty +++ b/sphinx/texinputs/sphinxlatexadmonitions.sty @@ -1,7 +1,7 @@ %% NOTICES AND ADMONITIONS % % change this info string if making any custom modification -\ProvidesPackage{sphinxlatexadmonitions}[2024/10/11 v8.1.1 admonitions] +\ProvidesPackage{sphinxlatexadmonitions}[2025/04/24 v8.3.0 admonitions] % Provides support for this output mark-up from Sphinx latex writer: % @@ -342,8 +342,7 @@ \textcolor{sphinx#1TtlFgColor}{% \@nameuse{sphinx#1TtlIcon}% % The next macro is located here for legacy reasons of earlier - % functioning of \spx@faIcon. When fontawesome{5,}.sty both - % are unavailable, it (formerly) gobbled this next macro. + % functioning of sphinx.sty now removed \spx@faIcon macro. % We leave it here now although it could be moved to after % the closing brace. \sphinxtitlerowaftericonspacecmd From 0d7ba3b2f4ebd4bb673e0c4c8b477f731f3f9131 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Sat, 24 May 2025 15:57:19 +0200 Subject: [PATCH 090/435] Add .auto/ to .gitignore (Emacs AUCTeX) --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 35fd23178f5..5a50535097e 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ *.so *.swp +.auto/ .dir-locals.el .cache/ .idea From 036db81dcdf0a172ece16b013d640cc7763f8faa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Sat, 24 May 2025 15:25:23 +0200 Subject: [PATCH 091/435] CI/LaTeX: run pdflatex twice when building documents --- tests/test_builders/test_build_latex.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py index f1c19a5ab7f..0d1c607462d 100644 --- a/tests/test_builders/test_build_latex.py +++ b/tests/test_builders/test_build_latex.py @@ -72,6 +72,17 @@ def compile_latex_document(app, filename='projectnamenotset.tex', docclass='manu filename, ] subprocess.run(args, capture_output=True, check=True) + # Run a second time (if engine is pdflatex), to have a chance to + # detect problems caused on second LaTeX pass (for example, this + # is required for the TOC in PDF to show up, for internal + # hyperlinks to actually work). Of course, this increases + # duration of test, but also its usefulness. + # TODO: in theory the correct way is to run Latexmk with options + # as configured in the Makefile and in presence of latexmkrc + # or latexmkjarc and also sphinx.xdy and other xindy support. + # And two passes are not enough except for simplest documents. + if app.config.latex_engine == 'pdflatex': + subprocess.run(args, capture_output=True, check=True) except OSError as exc: # most likely the latex executable was not found raise pytest.skip.Exception from exc except CalledProcessError as exc: From 546170754f3f2f96c0d12176b2d2fb5688ca75ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B?= <2589111+jfbu@users.noreply.github.com> Date: Sun, 25 Aug 2024 12:09:42 +0200 Subject: [PATCH 092/435] LaTeX: render in PDF hyperlinks located inside titles Fix #12821 --- CHANGES.rst | 2 ++ sphinx/texinputs/sphinxlatexstyletext.sty | 8 ++++++-- sphinx/writers/latex.py | 2 +- tests/roots/test-root/markup.txt | 6 ++++++ 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 48d9e689f35..f575efab7c7 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -31,6 +31,8 @@ Features added Bugs fixed ---------- +* #12821: LaTeX: URLs/links in section titles should render in PDF. + Patch by Jean-François B. * #13369: Correctly parse and cross-reference unpacked type annotations. Patch by Alicia Garcia-Raboso. * #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`. diff --git a/sphinx/texinputs/sphinxlatexstyletext.sty b/sphinx/texinputs/sphinxlatexstyletext.sty index d083cd96a83..6c80ce64b43 100644 --- a/sphinx/texinputs/sphinxlatexstyletext.sty +++ b/sphinx/texinputs/sphinxlatexstyletext.sty @@ -1,7 +1,7 @@ %% TEXT STYLING % % change this info string if making any custom modification -\ProvidesPackage{sphinxlatexstyletext}[2024/07/28 v8.1.0 text styling] +\ProvidesPackage{sphinxlatexstyletext}[2025/05/24 v8.3.0 text styling] % 7.4.0 has moved all that is related to admonitions to sphinxlatexadmonitions.sty % 8.1.0 has moved topic/contents/sidebar to sphinxlatexshadowbox.sty @@ -57,7 +57,11 @@ % reduce hyperref "Token not allowed in a PDF string" warnings on PDF builds \AtBeginDocument{\pdfstringdefDisableCommands{% % all "protected" macros possibly ending up in section titles should be here -% TODO: examine if \sphinxhref, \sphinxurl, \sphinnolinkurl should be handled +% next four were added so that URLs and internal links in titles can be allowed + \let\sphinxurl \@firstofone + \let\sphinxnolinkurl\@firstofone + \let\sphinxhref \@secondoftwo + \def\hyperref[#1]#2{#2}% for PDF bookmark to ignore #1 \let\sphinxstyleemphasis \@firstofone \let\sphinxstyleliteralemphasis \@firstofone \let\sphinxstylestrong \@firstofone diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index a2a17855c18..5d9bb9bef9c 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -1962,7 +1962,7 @@ def visit_reference(self, node: Element) -> None: uri = node.get('refuri', '') if not uri and node.get('refid'): uri = '%' + self.curfilestack[-1] + '#' + node['refid'] - if self.in_title or not uri: + if not uri: self.context.append('') elif uri.startswith('#'): # references to labels in the same document diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt index 91f41946620..2e45ba33680 100644 --- a/tests/roots/test-root/markup.txt +++ b/tests/roots/test-root/markup.txt @@ -469,3 +469,9 @@ Smart quotes .. [#] Like footnotes. + +Link in a title: `Field lists `_ +--------------------------------------------------------------------------------------------------------------------- + +Again: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#field-lists +------------------------------------------------------------------------------------------ From 31e63d786aefcb54ba08fa6406a3579ec5ecdc8f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Jun 2025 16:49:24 +0100 Subject: [PATCH 093/435] Bump types-requests to 2.32.0.20250602 (#13605) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1e3b7158579..c676e699212 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ lint = [ "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250516", - "types-requests==2.32.0.20250515", # align with requests + "types-requests==2.32.0.20250602", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.400", "pytest>=8.0", @@ -169,7 +169,7 @@ type-stubs = [ "types-docutils==0.21.0.20250514", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250516", - "types-requests==2.32.0.20250515", + "types-requests==2.32.0.20250602", "types-urllib3==1.26.25.14", ] From 55092e794f756452d230320a06d231ad3fa60beb Mon Sep 17 00:00:00 2001 From: James Addison <55152140+jayaddison@users.noreply.github.com> Date: Mon, 2 Jun 2025 21:17:35 +0000 Subject: [PATCH 094/435] Tests: Undo patch for Python 3.14.0 alpha 7; no longer required for 3.14.0 beta 2 (#13606) --- tests/test_extensions/test_ext_autodoc_configs.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py index eb351442673..ab7539190e0 100644 --- a/tests/test_extensions/test_ext_autodoc_configs.py +++ b/tests/test_extensions/test_ext_autodoc_configs.py @@ -1348,11 +1348,6 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: # default options = {'members': None} actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) - attr2_typeinfo: tuple[str, ...] - if sys.version_info >= (3, 14, 0, 'alpha', 7): - attr2_typeinfo = () - else: - attr2_typeinfo = (' :type: int',) assert list(actual) == [ '', '.. py:module:: target.autodoc_type_aliases', @@ -1373,7 +1368,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: '', ' .. py:attribute:: Foo.attr2', ' :module: target.autodoc_type_aliases', - *attr2_typeinfo, + ' :type: int', '', ' docstring', '', @@ -1426,10 +1421,6 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: 'io.StringIO': 'my.module.StringIO', } actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options) - if sys.version_info >= (3, 14, 0, 'alpha', 7): - attr2_typeinfo = () - else: - attr2_typeinfo = (' :type: myint',) assert list(actual) == [ '', '.. py:module:: target.autodoc_type_aliases', @@ -1450,7 +1441,7 @@ def test_autodoc_type_aliases(app: SphinxTestApp) -> None: '', ' .. py:attribute:: Foo.attr2', ' :module: target.autodoc_type_aliases', - *attr2_typeinfo, + ' :type: myint', '', ' docstring', '', From 68d56109ff50dd81dd31d4a01e3dccbd006c50ee Mon Sep 17 00:00:00 2001 From: James Addison <55152140+jayaddison@users.noreply.github.com> Date: Mon, 2 Jun 2025 22:02:48 +0000 Subject: [PATCH 095/435] Tests: update LaTeX label test expectations from Docutils r10151 (#13610) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- tests/test_builders/test_build_latex.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py index 0d1c607462d..37e708a021e 100644 --- a/tests/test_builders/test_build_latex.py +++ b/tests/test_builders/test_build_latex.py @@ -12,6 +12,7 @@ from subprocess import CalledProcessError from typing import TYPE_CHECKING +import docutils import pygments import pytest @@ -1959,10 +1960,16 @@ def test_latex_labels(app: SphinxTestApp) -> None: result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8') + # ref: docutils r10151 + if docutils.__version_info__[:2] < (0, 22): + figure_id, table_id = 'id1', 'id2' + else: + figure_id, table_id = 'id2', 'id3' + # figures assert ( r'\caption{labeled figure}' - r'\label{\detokenize{index:id1}}' + r'\label{\detokenize{index:' + figure_id + '}}' r'\label{\detokenize{index:figure2}}' r'\label{\detokenize{index:figure1}}' r'\end{figure}' @@ -1988,7 +1995,7 @@ def test_latex_labels(app: SphinxTestApp) -> None: # tables assert ( r'\sphinxcaption{table caption}' - r'\label{\detokenize{index:id2}}' + r'\label{\detokenize{index:' + table_id + '}}' r'\label{\detokenize{index:table2}}' r'\label{\detokenize{index:table1}}' ) in result From 03c2373c755281e5c3eab08f8a5e6e10e87abf0a Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Mon, 2 Jun 2025 23:23:57 +0100 Subject: [PATCH 096/435] Extract ``_is_typing()`` to module level --- sphinx/domains/python/__init__.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py index fb030991464..914155cd31e 100644 --- a/sphinx/domains/python/__init__.py +++ b/sphinx/domains/python/__init__.py @@ -52,6 +52,8 @@ py_sig_re, ) +_TYPING_ALL = frozenset(typing.__all__) + logger = logging.getLogger(__name__) pairindextypes = { @@ -1076,13 +1078,6 @@ def builtin_resolver( app: Sphinx, env: BuildEnvironment, node: pending_xref, contnode: Element ) -> Element | None: """Do not emit nitpicky warnings for built-in types.""" - - def istyping(s: str) -> bool: - if s.startswith('typing.'): - s = s.split('.', 1)[1] - - return s in typing.__all__ - if node.get('refdomain') != 'py': return None elif node.get('reftype') in {'class', 'obj'} and node.get('reftarget') == 'None': @@ -1092,13 +1087,17 @@ def istyping(s: str) -> bool: if inspect.isclass(getattr(builtins, reftarget, None)): # built-in class return contnode - if istyping(reftarget): + if _is_typing(reftarget): # typing class return contnode return None +def _is_typing(s: str, /) -> bool: + return s.removeprefix('typing.') in _TYPING_ALL + + def setup(app: Sphinx) -> ExtensionMetadata: app.setup_extension('sphinx.directives') From 987ccb2a9706a344b4ebd347f841a172055e640b Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Tue, 3 Jun 2025 00:37:45 +0100 Subject: [PATCH 097/435] Prefer ``str.partition`` over ``str.split(..., maxsize=1)`` --- sphinx/_cli/__init__.py | 2 +- sphinx/builders/latex/__init__.py | 6 +++--- sphinx/cmd/build.py | 4 ++-- sphinx/config.py | 2 +- sphinx/directives/__init__.py | 2 +- sphinx/domains/c/__init__.py | 2 +- sphinx/domains/cpp/__init__.py | 2 +- sphinx/domains/javascript.py | 2 +- sphinx/domains/python/__init__.py | 2 +- sphinx/domains/python/_object.py | 2 +- sphinx/domains/std/__init__.py | 4 ++-- sphinx/ext/autosummary/__init__.py | 2 +- sphinx/ext/doctest.py | 2 +- sphinx/ext/intersphinx/_resolve.py | 2 +- sphinx/ext/napoleon/docstring.py | 2 +- sphinx/roles.py | 26 ++++++++++++------------- sphinx/search/__init__.py | 2 +- sphinx/util/docfields.py | 2 +- sphinx/util/docstrings.py | 2 +- sphinx/util/i18n.py | 2 +- sphinx/util/images.py | 7 ++++++- sphinx/util/index_entries.py | 2 +- sphinx/util/inventory.py | 2 +- sphinx/util/osutil.py | 4 ++-- sphinx/util/requests.py | 2 +- sphinx/writers/texinfo.py | 4 ++-- tests/test_builders/test_build_latex.py | 6 +++--- tests/test_util/test_util_images.py | 5 +---- 28 files changed, 53 insertions(+), 51 deletions(-) diff --git a/sphinx/_cli/__init__.py b/sphinx/_cli/__init__.py index 87128b0a5a0..8c305442de3 100644 --- a/sphinx/_cli/__init__.py +++ b/sphinx/_cli/__init__.py @@ -64,7 +64,7 @@ def _load_subcommand_descriptions() -> Iterator[tuple[str, str]]: # log an error here, but don't fail the full enumeration print(f'Failed to load the description for {command}', file=sys.stderr) else: - yield command, description.split('\n\n', 1)[0] + yield command, description.partition('\n\n')[0] class _RootArgumentParser(argparse.ArgumentParser): diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py index 5aeafca8bfd..985620f2023 100644 --- a/sphinx/builders/latex/__init__.py +++ b/sphinx/builders/latex/__init__.py @@ -513,9 +513,9 @@ def write_message_catalog(self) -> None: formats = self.config.numfig_format context = { 'addtocaptions': r'\@iden', - 'figurename': formats.get('figure', '').split('%s', 1), - 'tablename': formats.get('table', '').split('%s', 1), - 'literalblockname': formats.get('code-block', '').split('%s', 1), + 'figurename': formats.get('figure', '').split('%s', maxsplit=1), + 'tablename': formats.get('table', '').split('%s', maxsplit=1), + 'literalblockname': formats.get('code-block', '').split('%s', maxsplit=1), } if self.context['babel'] or self.context['polyglossia']: diff --git a/sphinx/cmd/build.py b/sphinx/cmd/build.py index 11a70df0c6c..58f3ad26746 100644 --- a/sphinx/cmd/build.py +++ b/sphinx/cmd/build.py @@ -371,14 +371,14 @@ def _parse_confoverrides( val: Any for val in define: try: - key, val = val.split('=', 1) + key, _, val = val.partition('=') except ValueError: parser.error(__('-D option argument must be in the form name=value')) confoverrides[key] = val for val in htmldefine: try: - key, val = val.split('=') + key, _, val = val.partition('=') except ValueError: parser.error(__('-A option argument must be in the form name=value')) with contextlib.suppress(ValueError): diff --git a/sphinx/config.py b/sphinx/config.py index bedc69f2337..2498ada6c56 100644 --- a/sphinx/config.py +++ b/sphinx/config.py @@ -320,7 +320,7 @@ def __init__( for name in list(self._overrides.keys()): if '.' in name: - real_name, key = name.split('.', 1) + real_name, _, key = name.partition('.') raw_config.setdefault(real_name, {})[key] = self._overrides.pop(name) self.setup: _ExtensionSetupFunc | None = raw_config.get('setup') diff --git a/sphinx/directives/__init__.py b/sphinx/directives/__init__.py index b4fb7f76006..c442ea8e6c8 100644 --- a/sphinx/directives/__init__.py +++ b/sphinx/directives/__init__.py @@ -201,7 +201,7 @@ def run(self) -> list[Node]: * parse the content and handle doc fields in it """ if ':' in self.name: - self.domain, self.objtype = self.name.split(':', 1) + self.domain, _, self.objtype = self.name.partition(':') else: self.domain, self.objtype = '', self.name self.indexnode = addnodes.index(entries=[]) diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py index 7fa1822e4ac..56ce0d170f6 100644 --- a/sphinx/domains/c/__init__.py +++ b/sphinx/domains/c/__init__.py @@ -668,7 +668,7 @@ def run(self) -> list[Node]: The code is therefore based on the ObjectDescription version. """ if ':' in self.name: - self.domain, self.objtype = self.name.split(':', 1) + self.domain, _, self.objtype = self.name.partition(':') else: self.domain, self.objtype = '', self.name diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py index fc72e208791..554f4ebb17e 100644 --- a/sphinx/domains/cpp/__init__.py +++ b/sphinx/domains/cpp/__init__.py @@ -812,7 +812,7 @@ def run(self) -> list[Node]: The code is therefore based on the ObjectDescription version. """ if ':' in self.name: - self.domain, self.objtype = self.name.split(':', 1) + self.domain, _, self.objtype = self.name.partition(':') else: self.domain, self.objtype = '', self.name diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py index 51a93bcf802..eaa69094c78 100644 --- a/sphinx/domains/javascript.py +++ b/sphinx/domains/javascript.py @@ -70,7 +70,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str] """ sig = sig.strip() if '(' in sig and sig[-1:] == ')': - member, arglist = sig.split('(', 1) + member, _, arglist = sig.partition('(') member = member.strip() arglist = arglist[:-1].strip() else: diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py index 914155cd31e..d70c232e725 100644 --- a/sphinx/domains/python/__init__.py +++ b/sphinx/domains/python/__init__.py @@ -671,7 +671,7 @@ def generate( entries = content.setdefault(modname[0].lower(), []) - package = modname.split('.', maxsplit=1)[0] + package = modname.partition('.')[0] if package != modname: # it's a submodule if prev_modname == package: diff --git a/sphinx/domains/python/_object.py b/sphinx/domains/python/_object.py index fd4e62bbbe0..6cd19245aae 100644 --- a/sphinx/domains/python/_object.py +++ b/sphinx/domains/python/_object.py @@ -93,7 +93,7 @@ def make_xref( children = result.children result.clear() - shortname = target.split('.')[-1] + shortname = target.rpartition('.')[-1] textnode = innernode('', shortname) # type: ignore[call-arg] contnodes = [ pending_xref_condition('', '', textnode, condition='resolved'), diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py index e123ce85786..9397733c814 100644 --- a/sphinx/domains/std/__init__.py +++ b/sphinx/domains/std/__init__.py @@ -218,7 +218,7 @@ def run(self) -> list[Node]: ret.insert(0, inode) name = self.name if ':' in self.name: - _, name = self.name.split(':', 1) + name = self.name.partition(':')[-1] std = self.env.domains.standard_domain std.note_object(name, fullname, node_id, location=node) @@ -1235,7 +1235,7 @@ def _resolve_option_xref( if not docname: commands = [] while ws_re.search(target): - subcommand, target = ws_re.split(target, 1) + subcommand, target = ws_re.split(target, maxsplit=1) commands.append(subcommand) progname = '-'.join(commands) diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py index 733c936d8f0..fe7092c0a74 100644 --- a/sphinx/ext/autosummary/__init__.py +++ b/sphinx/ext/autosummary/__init__.py @@ -511,7 +511,7 @@ def append_row(*column_texts: str) -> None: def strip_arg_typehint(s: str) -> str: """Strip a type hint from argument definition.""" - return s.split(':')[0].strip() + return s.partition(':')[0].strip() def _cleanup_signature(s: str) -> str: diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py index 343534f10ce..65b9933785c 100644 --- a/sphinx/ext/doctest.py +++ b/sphinx/ext/doctest.py @@ -392,7 +392,7 @@ def get_filename_for_node(self, node: Node, docname: str) -> str: """ try: filename = relpath(node.source, self.env.srcdir) # type: ignore[arg-type] - return filename.rsplit(':docstring of ', maxsplit=1)[0] + return filename.partition(':docstring of ')[0] except Exception: return str(self.env.doc2path(docname, False)) diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py index 102c5d3ab07..2029a0ea971 100644 --- a/sphinx/ext/intersphinx/_resolve.py +++ b/sphinx/ext/intersphinx/_resolve.py @@ -493,7 +493,7 @@ def get_inventory_and_name_suffix(self, name: str) -> tuple[str | None, str]: assert name.startswith('external'), name suffix = name[9:] if name[8] == '+': - inv_name, suffix = suffix.split(':', 1) + inv_name, _, suffix = suffix.partition(':') return inv_name, suffix elif name[8] == ':': return None, suffix diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py index ea991f72301..26c7741ea60 100644 --- a/sphinx/ext/napoleon/docstring.py +++ b/sphinx/ext/napoleon/docstring.py @@ -1387,7 +1387,7 @@ def translate( if m and line[m.end() :].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[: m.end()], line[m.end() :] - rest = [line.split(':', 1)[1].strip()] + rest = [line.partition(':')[-1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): diff --git a/sphinx/roles.py b/sphinx/roles.py index 04469f45488..79ec70e90a3 100644 --- a/sphinx/roles.py +++ b/sphinx/roles.py @@ -103,7 +103,7 @@ def run(self) -> tuple[list[Node], list[system_message]]: self.refdomain, self.reftype = '', self.name self.classes = ['xref', self.reftype] else: - self.refdomain, self.reftype = self.name.split(':', 1) + self.refdomain, _, self.reftype = self.name.partition(':') self.classes = ['xref', self.refdomain, f'{self.refdomain}-{self.reftype}'] if self.disabled: @@ -234,9 +234,9 @@ def run(self) -> tuple[list[Node], list[system_message]]: return [index, target, reference], [] def build_uri(self) -> str: - ret = self.target.split('#', 1) - if len(ret) == 2: - return f'{CVE._BASE_URL}{ret[0]}#{ret[1]}' + ret = self.target.partition('#') + if ret[1]: + return f'{CVE._BASE_URL}{ret[0]}#{ret[2]}' return f'{CVE._BASE_URL}{ret[0]}' @@ -279,9 +279,9 @@ def run(self) -> tuple[list[Node], list[system_message]]: return [index, target, reference], [] def build_uri(self) -> str: - ret = self.target.split('#', 1) - if len(ret) == 2: - return f'{CWE._BASE_URL}{int(ret[0])}.html#{ret[1]}' + ret = self.target.partition('#') + if ret[1]: + return f'{CWE._BASE_URL}{int(ret[0])}.html#{ret[2]}' return f'{CWE._BASE_URL}{int(ret[0])}.html' @@ -323,9 +323,9 @@ def run(self) -> tuple[list[Node], list[system_message]]: def build_uri(self) -> str: base_url = self.inliner.document.settings.pep_base_url - ret = self.target.split('#', 1) - if len(ret) == 2: - return base_url + 'pep-%04d/#%s' % (int(ret[0]), ret[1]) + ret = self.target.partition('#') + if ret[1]: + return base_url + 'pep-%04d/#%s' % (int(ret[0]), ret[2]) else: return base_url + 'pep-%04d/' % int(ret[0]) @@ -361,9 +361,9 @@ def run(self) -> tuple[list[Node], list[system_message]]: def build_uri(self) -> str: base_url = self.inliner.document.settings.rfc_base_url - ret = self.target.split('#', 1) - if len(ret) == 2: - return base_url + self.inliner.rfc_url % int(ret[0]) + '#' + ret[1] + ret = self.target.partition('#') + if ret[1]: + return base_url + self.inliner.rfc_url % int(ret[0]) + '#' + ret[2] else: return base_url + self.inliner.rfc_url % int(ret[0]) diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py index 187e6a2f37f..b835b7b36db 100644 --- a/sphinx/search/__init__.py +++ b/sphinx/search/__init__.py @@ -300,7 +300,7 @@ def __init__( # fallback; try again with language-code if lang_class is None and '_' in lang: - lang_class = languages.get(lang.split('_')[0]) + lang_class = languages.get(lang.partition('_')[0]) if lang_class is None: self.lang: SearchLanguage = SearchEnglish(options) diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py index dcc24753862..ed04ec6ebd1 100644 --- a/sphinx/util/docfields.py +++ b/sphinx/util/docfields.py @@ -386,7 +386,7 @@ def _transform_step_1( field_body = cast('nodes.field_body', field[1]) try: # split into field type and argument - fieldtype_name, fieldarg = field_name.astext().split(None, 1) + fieldtype_name, fieldarg = field_name.astext().split(None, maxsplit=1) except ValueError: # maybe an argument-less field type? fieldtype_name, fieldarg = field_name.astext(), '' diff --git a/sphinx/util/docstrings.py b/sphinx/util/docstrings.py index 53e7620edc2..6f23096d92b 100644 --- a/sphinx/util/docstrings.py +++ b/sphinx/util/docstrings.py @@ -26,7 +26,7 @@ def separate_metadata(s: str | None) -> tuple[str | None, dict[str, str]]: else: matched = field_list_item_re.match(line) if matched and not in_other_element: - field_name = matched.group()[1:].split(':', 1)[0] + field_name = matched.group()[1:].partition(':')[0] if field_name.startswith('meta '): name = field_name[5:].strip() metadata[name] = line[matched.end() :].strip() diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py index 05542876fc3..dd1616a8f31 100644 --- a/sphinx/util/i18n.py +++ b/sphinx/util/i18n.py @@ -168,7 +168,7 @@ def docname_to_domain(docname: str, compaction: bool | str) -> str: if isinstance(compaction, str): return compaction if compaction: - return docname.split(SEP, 1)[0] + return docname.partition(SEP)[0] else: return docname diff --git a/sphinx/util/images.py b/sphinx/util/images.py index b43a0705d36..ca6cb66764b 100644 --- a/sphinx/util/images.py +++ b/sphinx/util/images.py @@ -90,12 +90,17 @@ def get_image_extension(mimetype: str) -> str | None: def parse_data_uri(uri: str) -> DataURI | None: if not uri.startswith('data:'): return None + uri = uri[5:] + + if ',' not in uri: + msg = 'malformed data URI' + raise ValueError(msg) # data:[][;charset=][;base64], mimetype = 'text/plain' charset = 'US-ASCII' - properties, data = uri[5:].split(',', 1) + properties, _, data = uri.partition(',') for prop in properties.split(';'): if prop == 'base64': pass # skip diff --git a/sphinx/util/index_entries.py b/sphinx/util/index_entries.py index 10046842976..36dba388146 100644 --- a/sphinx/util/index_entries.py +++ b/sphinx/util/index_entries.py @@ -20,7 +20,7 @@ def split_index_msg(entry_type: str, value: str) -> list[str]: def _split_into(n: int, type: str, value: str) -> list[str]: """Split an index entry into a given number of parts at semicolons.""" - parts = [x.strip() for x in value.split(';', n - 1)] + parts = [x.strip() for x in value.split(';', maxsplit=n - 1)] if len(list(filter(None, parts))) < n: msg = f'invalid {type} index entry {value!r}' raise ValueError(msg) diff --git a/sphinx/util/inventory.py b/sphinx/util/inventory.py index d37398a5562..6e4c648bd3f 100644 --- a/sphinx/util/inventory.py +++ b/sphinx/util/inventory.py @@ -75,7 +75,7 @@ def _loads_v1(cls, lines: Sequence[str], *, uri: str) -> _Inventory: projname = lines[0].rstrip()[11:] # Project name version = lines[1].rstrip()[11:] # Project version for line in lines[2:]: - name, item_type, location = line.rstrip().split(None, 2) + name, item_type, location = line.rstrip().split(None, maxsplit=2) location = posixpath.join(uri, location) # version 1 did not add anchors to the location if item_type == 'mod': diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py index 807db899af9..374c59ae904 100644 --- a/sphinx/util/osutil.py +++ b/sphinx/util/osutil.py @@ -47,8 +47,8 @@ def relative_uri(base: str, to: str) -> str: """Return a relative URL from ``base`` to ``to``.""" if to.startswith(SEP): return to - b2 = base.split('#')[0].split(SEP) - t2 = to.split('#')[0].split(SEP) + b2 = base.partition('#')[0].split(SEP) + t2 = to.partition('#')[0].split(SEP) # remove common segments (except the last segment) for x, y in zip(b2[:-1], t2[:-1], strict=False): if x != y: diff --git a/sphinx/util/requests.py b/sphinx/util/requests.py index b439ce437e8..f7d4ffdf4e9 100644 --- a/sphinx/util/requests.py +++ b/sphinx/util/requests.py @@ -40,7 +40,7 @@ def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool: else: hostname = urlsplit(url).netloc if '@' in hostname: - _, hostname = hostname.split('@', 1) + hostname = hostname.partition('@')[-1] return certs.get(hostname, True) diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py index eaa7dbdb6e7..c36b5d12f07 100644 --- a/sphinx/writers/texinfo.py +++ b/sphinx/writers/texinfo.py @@ -416,7 +416,7 @@ def add_menu_entries( name = self.node_names[entry] # special formatting for entries that are divided by an em-dash try: - parts = reg.split(name, 1) + parts = reg.split(name, maxsplit=1) except TypeError: # could be a gettext proxy parts = [name] @@ -748,7 +748,7 @@ def visit_reference(self, node: Element) -> None: uri = self.escape_arg(uri) id = 'Top' if '#' in uri: - uri, id = uri.split('#', 1) + uri, _, id = uri.partition('#') id = self.escape_id(id) name = self.escape_menu(name) if name == id: diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py index 37e708a021e..16f3437c154 100644 --- a/tests/test_builders/test_build_latex.py +++ b/tests/test_builders/test_build_latex.py @@ -1566,7 +1566,7 @@ def test_latex_table_tabulars(app: SphinxTestApp) -> None: result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8') tables = {} for chap in re.split(r'\\(?:section|chapter){', result)[1:]: - sectname, content = chap.split('}', 1) + sectname, _, content = chap.partition('}') content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator tables[sectname] = content.strip() @@ -1644,7 +1644,7 @@ def test_latex_table_longtable(app: SphinxTestApp) -> None: result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8') tables = {} for chap in re.split(r'\\(?:section|chapter){', result)[1:]: - sectname, content = chap.split('}', 1) + sectname, _, content = chap.partition('}') content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator tables[sectname] = content.strip() @@ -1712,7 +1712,7 @@ def test_latex_table_complex_tables(app: SphinxTestApp) -> None: result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8') tables = {} for chap in re.split(r'\\(?:section|renewcommand){', result)[1:]: - sectname, content = chap.split('}', 1) + sectname, _, content = chap.partition('}') tables[sectname] = content.strip() def get_expected(name): diff --git a/tests/test_util/test_util_images.py b/tests/test_util/test_util_images.py index 875fc0d98f4..d0b4f918afc 100644 --- a/tests/test_util/test_util_images.py +++ b/tests/test_util/test_util_images.py @@ -84,8 +84,5 @@ def test_parse_data_uri() -> None: 'data:iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4' '//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==' ) - with pytest.raises( - ValueError, - match=r'not enough values to unpack \(expected 2, got 1\)', - ): + with pytest.raises(ValueError, match=r'malformed data URI'): parse_data_uri(uri) From 2b7e3adf27c158305acca9b5e4d0d93d3e4c6f09 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Tue, 3 Jun 2025 00:56:19 +0100 Subject: [PATCH 098/435] Bump Ruff to 0.11.12 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c676e699212..17cb0463ae5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.11", + "ruff==0.11.12", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.11", + "ruff==0.11.12", "sphinx-lint>=0.9", ] package = [ From 6a860c8c2e9c586b6634fb597503867dd3e053d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Fri, 6 Jun 2025 16:11:34 +0200 Subject: [PATCH 099/435] Fix #13619 about possibly duplicated footnotes from signatures in PDF (#13623) --- CHANGES.rst | 3 +++ sphinx/texinputs/sphinxlatexobjects.sty | 30 +++++++++++++------------ 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index f575efab7c7..40d6984ca55 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -37,6 +37,9 @@ Bugs fixed Patch by Alicia Garcia-Raboso. * #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`. Patch by Shengyu Zhang and Adam Turner. +* #13619: LaTeX: possible duplicated footnotes in PDF from object signatures + (typically if :confval:`latex_show_urls` ``= 'footnote'``). + Patch by Jean-François B. Testing ------- diff --git a/sphinx/texinputs/sphinxlatexobjects.sty b/sphinx/texinputs/sphinxlatexobjects.sty index 1147a016227..2a05dd6de8c 100644 --- a/sphinx/texinputs/sphinxlatexobjects.sty +++ b/sphinx/texinputs/sphinxlatexobjects.sty @@ -1,7 +1,7 @@ %% MODULE RELEASE DATA AND OBJECT DESCRIPTIONS % % change this info string if making any custom modification -\ProvidesPackage{sphinxlatexobjects}[2025/02/11 documentation environments] +\ProvidesPackage{sphinxlatexobjects}[2025/06/06 documentation environments] % Provides support for this output mark-up from Sphinx latex writer: % @@ -155,20 +155,23 @@ \pysigadjustitemsep } \newcommand{\pysiglinewithargsret}[3]{% - % as #1 may contain a footnote using \label we need to make \label - % a no-op here to avoid LaTeX complaining about duplicates -\let\spx@label\label\let\label\@gobble - \settowidth{\py@argswidth}{#1\pysigarglistopen}% -\let\label\spx@label + % #1 may contain a footnote (especially with latex_show_urls='footnote' + % and some intersphinx added hyperlinking). Here we want to measure + % a width but not typeset such a footnote (else #13619). + % Miraculously a sphinxpackagefootnote.sty tabulary compatibility + % layer employing the amsmath \ifmeasuring@ can be used here to let + % a footnote influence the actual width up to opening brace but not + % actually get typeset at this stage... + % MEMO: "argswidth" is misleading here, this code measures the name + % not the arguments. + \settowidth{\py@argswidth}{\measuring@true#1\pysigarglistopen}% \py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax \item[{#1\pysigarglistopen\py@sigparams{#2}{#3}\strut}] \pysigadjustitemsep } -\newcommand{\pysiglinewithargsretwithtypelist}[4]{ -% #1 = name, #2 = typelist, #3 = arglist, #4 = retann -\let\spx@label\label\let\label\@gobble - \settowidth{\py@argswidth}{#1\pysigtypelistopen}% -\let\label\spx@label +\newcommand{\pysiglinewithargsretwithtypelist}[4]{% + % same comment as in \pysiglinewithargsret + \settowidth{\py@argswidth}{\measuring@true#1\pysigtypelistopen}% \py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax \item[{#1\pysigtypelistopen\py@sigparamswithtypelist{#2}{#3}{#4}\strut}] \pysigadjustitemsep @@ -244,9 +247,8 @@ \newcommand{\pysigwithonelineperargwithtypelist}[4]{ % #1 = name, #2 = typelist, #3 = arglist, #4 = retann % render the type parameters list on one line, but each argument is rendered on its own line -\let\spx@label\label\let\label\@gobble - \settowidth{\py@argswidth}{#1\pysigtypelistopen}% -\let\label\spx@label + % for \measuring@true see comment in \pysiglinewithargsret + \settowidth{\py@argswidth}{\measuring@true#1\pysigtypelistopen}% \py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax \item[{#1\pysigtypelistopen\parbox[t]{\py@argswidth}{% \raggedright #2\pysigtypelistclose\pysigarglistopen\strut}\strut}] From 43c298b8e075dff7fa7e7ff72d7a845bfd4aac86 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Fri, 6 Jun 2025 20:40:05 +0100 Subject: [PATCH 100/435] Add ``_write_docname()`` (#13624) --- sphinx/builders/__init__.py | 23 +++++++++++++---------- sphinx/environment/__init__.py | 3 ++- sphinx/environment/adapters/toctree.py | 4 +--- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 21a1eb8b5c4..076fe218434 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -761,11 +761,7 @@ def _write_serial(self, docnames: Sequence[str]) -> None: len(docnames), self.app.verbosity, ): - self.app.phase = BuildPhase.RESOLVING - doctree = self.env.get_and_resolve_doctree(docname, self) - self.app.phase = BuildPhase.WRITING - self.write_doc_serialized(docname, doctree) - self.write_doc(docname, doctree) + _write_docname(docname, app=self.app, env=self.env, builder=self) def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None: def write_process(docs: list[tuple[str, nodes.document]]) -> None: @@ -775,11 +771,7 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None: # warm up caches/compile templates using the first document firstname, docnames = docnames[0], docnames[1:] - self.app.phase = BuildPhase.RESOLVING - doctree = self.env.get_and_resolve_doctree(firstname, self) - self.app.phase = BuildPhase.WRITING - self.write_doc_serialized(firstname, doctree) - self.write_doc(firstname, doctree) + _write_docname(firstname, app=self.app, env=self.env, builder=self) tasks = ParallelTasks(nproc) chunks = make_chunks(docnames, nproc) @@ -867,6 +859,17 @@ def get_builder_config(self, option: str, default: str) -> Any: return getattr(self.config, optname) +def _write_docname( + docname: str, /, *, app: Sphinx, env: BuildEnvironment, builder: Builder +) -> None: + """Write a single document.""" + app.phase = BuildPhase.RESOLVING + doctree = env.get_and_resolve_doctree(docname, builder=builder) + app.phase = BuildPhase.WRITING + builder.write_doc_serialized(docname, doctree) + builder.write_doc(docname, doctree) + + class _UnicodeDecodeErrorHandler: """Custom error handler for open() that warns and replaces.""" diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index 79fa6278549..2f3e25ac477 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -701,6 +701,7 @@ def get_and_resolve_doctree( self.apply_post_transforms(doctree, docname) # now, resolve all toctree nodes + tags = builder.tags for toctreenode in doctree.findall(addnodes.toctree): result = toctree_adapters._resolve_toctree( self, @@ -709,7 +710,7 @@ def get_and_resolve_doctree( toctreenode, prune=prune_toctrees, includehidden=includehidden, - tags=builder.tags, + tags=tags, ) if result is None: toctreenode.parent.replace(toctreenode, []) diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py index bedeca2f299..90344f185d7 100644 --- a/sphinx/environment/adapters/toctree.py +++ b/sphinx/environment/adapters/toctree.py @@ -191,9 +191,7 @@ def _resolve_toctree( # prune the tree to maxdepth, also set toc depth and current classes _toctree_add_classes(newnode, 1, docname) - newnode = _toctree_copy( - newnode, 1, maxdepth if prune else 0, collapse, builder.tags - ) + newnode = _toctree_copy(newnode, 1, maxdepth if prune else 0, collapse, tags) if ( isinstance(newnode[-1], nodes.Element) and len(newnode[-1]) == 0 From 77a0d6658b8e86b8e730e78d8f7bb839babe7567 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Fri, 6 Jun 2025 22:27:18 +0100 Subject: [PATCH 101/435] Extract nested function definitions (#13625) --- doc/conf.py | 10 +- sphinx/builders/_epub_base.py | 115 ++++--- sphinx/domains/c/_parser.py | 15 +- sphinx/domains/c/_symbol.py | 76 +++-- sphinx/domains/cpp/__init__.py | 33 +- sphinx/domains/cpp/_parser.py | 13 +- sphinx/domains/cpp/_symbol.py | 322 ++++++++++-------- sphinx/domains/python/_annotations.py | 26 +- sphinx/domains/std/__init__.py | 10 +- sphinx/ext/doctest.py | 27 +- sphinx/transforms/post_transforms/__init__.py | 41 +-- sphinx/writers/latex.py | 11 +- 12 files changed, 365 insertions(+), 334 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 9cf2f9b4856..28dbdb58c46 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -297,14 +297,12 @@ def linkify_issues_in_changelog( ) -> None: """Linkify issue references like #123 in changelog to GitHub.""" if docname == 'changes': + linkified_changelog = re.sub(r'(?:PR)?#([0-9]+)\b', _linkify, source[0]) + source[0] = linkified_changelog - def linkify(match: re.Match[str]) -> str: - url = 'https://github.com/sphinx-doc/sphinx/issues/' + match[1] - return f'`{match[0]} <{url}>`_' - - linkified_changelog = re.sub(r'(?:PR)?#([0-9]+)\b', linkify, source[0]) - source[0] = linkified_changelog +def _linkify(match: re.Match[str], /) -> str: + return f'`{match[0]} `__' REDIRECT_TEMPLATE = """ diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py index a9527c3c0e3..10ae0820c5b 100644 --- a/sphinx/builders/_epub_base.py +++ b/sphinx/builders/_epub_base.py @@ -279,16 +279,6 @@ def fix_ids(self, tree: nodes.document) -> None: Some readers crash because they interpret the part as a transport protocol specification. """ - - def update_node_id(node: Element) -> None: - """Update IDs of given *node*.""" - new_ids: list[str] = [] - for node_id in node['ids']: - new_id = self.fix_fragment('', node_id) - if new_id not in new_ids: - new_ids.append(new_id) - node['ids'] = new_ids - for reference in tree.findall(nodes.reference): if 'refuri' in reference: m = self.refuri_re.match(reference['refuri']) @@ -298,66 +288,75 @@ def update_node_id(node: Element) -> None: reference['refid'] = self.fix_fragment('', reference['refid']) for target in tree.findall(nodes.target): - update_node_id(target) + self._update_node_id(target) next_node: Node = target.next_node(ascend=True) if isinstance(next_node, nodes.Element): - update_node_id(next_node) + self._update_node_id(next_node) for desc_signature in tree.findall(addnodes.desc_signature): - update_node_id(desc_signature) + self._update_node_id(desc_signature) + + def _update_node_id(self, node: Element, /) -> None: + """Update IDs of given *node*.""" + new_ids: list[str] = [] + for node_id in node['ids']: + new_id = self.fix_fragment('', node_id) + if new_id not in new_ids: + new_ids.append(new_id) + node['ids'] = new_ids + + @staticmethod + def _make_footnote_ref(doc: nodes.document, label: str) -> nodes.footnote_reference: + """Create a footnote_reference node with children""" + footnote_ref = nodes.footnote_reference('[#]_') + footnote_ref.append(nodes.Text(label)) + doc.note_autofootnote_ref(footnote_ref) + return footnote_ref + + @staticmethod + def _make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote: + """Create a footnote node with children""" + footnote = nodes.footnote(uri) + para = nodes.paragraph() + para.append(nodes.Text(uri)) + footnote.append(para) + footnote.insert(0, nodes.label('', label)) + doc.note_autofootnote(footnote) + return footnote + + @staticmethod + def _footnote_spot(tree: nodes.document) -> tuple[Element, int]: + """Find or create a spot to place footnotes. + + The function returns the tuple (parent, index). + """ + # The code uses the following heuristic: + # a) place them after the last existing footnote + # b) place them after an (empty) Footnotes rubric + # c) create an empty Footnotes rubric at the end of the document + fns = list(tree.findall(nodes.footnote)) + if fns: + fn = fns[-1] + return fn.parent, fn.parent.index(fn) + 1 + for node in tree.findall(nodes.rubric): + if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME: + return node.parent, node.parent.index(node) + 1 + doc = next(tree.findall(nodes.document)) + rub = nodes.rubric() + rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME)) + doc.append(rub) + return doc, doc.index(rub) + 1 def add_visible_links( self, tree: nodes.document, show_urls: str = 'inline' ) -> None: """Add visible link targets for external links""" - - def make_footnote_ref( - doc: nodes.document, label: str - ) -> nodes.footnote_reference: - """Create a footnote_reference node with children""" - footnote_ref = nodes.footnote_reference('[#]_') - footnote_ref.append(nodes.Text(label)) - doc.note_autofootnote_ref(footnote_ref) - return footnote_ref - - def make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote: - """Create a footnote node with children""" - footnote = nodes.footnote(uri) - para = nodes.paragraph() - para.append(nodes.Text(uri)) - footnote.append(para) - footnote.insert(0, nodes.label('', label)) - doc.note_autofootnote(footnote) - return footnote - - def footnote_spot(tree: nodes.document) -> tuple[Element, int]: - """Find or create a spot to place footnotes. - - The function returns the tuple (parent, index). - """ - # The code uses the following heuristic: - # a) place them after the last existing footnote - # b) place them after an (empty) Footnotes rubric - # c) create an empty Footnotes rubric at the end of the document - fns = list(tree.findall(nodes.footnote)) - if fns: - fn = fns[-1] - return fn.parent, fn.parent.index(fn) + 1 - for node in tree.findall(nodes.rubric): - if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME: - return node.parent, node.parent.index(node) + 1 - doc = next(tree.findall(nodes.document)) - rub = nodes.rubric() - rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME)) - doc.append(rub) - return doc, doc.index(rub) + 1 - if show_urls == 'no': return if show_urls == 'footnote': doc = next(tree.findall(nodes.document)) - fn_spot, fn_idx = footnote_spot(tree) + fn_spot, fn_idx = self._footnote_spot(tree) nr = 1 for node in list(tree.findall(nodes.reference)): uri = node.get('refuri', '') @@ -371,9 +370,9 @@ def footnote_spot(tree: nodes.document) -> tuple[Element, int]: elif show_urls == 'footnote': label = FOOTNOTE_LABEL_TEMPLATE % nr nr += 1 - footnote_ref = make_footnote_ref(doc, label) + footnote_ref = self._make_footnote_ref(doc, label) node.parent.insert(idx, footnote_ref) - footnote = make_footnote(doc, label, uri) + footnote = self._make_footnote(doc, label, uri) fn_spot.insert(fn_idx, footnote) footnote_ref['refid'] = footnote['ids'][0] footnote.add_backref(footnote_ref['ids'][0]) diff --git a/sphinx/domains/c/_parser.py b/sphinx/domains/c/_parser.py index bd7ddbe2326..c59352b6ee2 100644 --- a/sphinx/domains/c/_parser.py +++ b/sphinx/domains/c/_parser.py @@ -369,10 +369,7 @@ def _parse_logical_or_expression(self) -> ASTExpression: # pm = cast .*, ->* def _parse_bin_op_expr(self: DefinitionParser, op_id: int) -> ASTExpression: if op_id + 1 == len(_expression_bin_ops): - - def parser() -> ASTExpression: - return self._parse_cast_expression() - + parser = self._parse_cast_expression else: def parser() -> ASTExpression: @@ -760,10 +757,7 @@ def _parse_declarator_name_suffix( if self.skip_string(']'): size = None else: - - def parser() -> ASTExpression: - return self._parse_expression() - + parser = self._parse_expression size = self._parse_expression_fallback([']'], parser) self.skip_ws() if not self.skip_string(']'): @@ -1025,10 +1019,7 @@ def _parse_enumerator(self) -> ASTEnumerator: init = None if self.skip_string('='): self.skip_ws() - - def parser() -> ASTExpression: - return self._parse_constant_expression() - + parser = self._parse_constant_expression init_val = self._parse_expression_fallback([], parser) init = ASTInitializer(init_val) return ASTEnumerator(name, init, attrs) diff --git a/sphinx/domains/c/_symbol.py b/sphinx/domains/c/_symbol.py index cb43910e7ab..7ac555415ac 100644 --- a/sphinx/domains/c/_symbol.py +++ b/sphinx/domains/c/_symbol.py @@ -445,43 +445,19 @@ def on_missing_qualified_symbol( # First check if one of those with a declaration matches. # If it's a function, we need to compare IDs, # otherwise there should be only one symbol with a declaration. - def make_cand_symbol() -> Symbol: - if Symbol.debug_lookup: - Symbol.debug_print('begin: creating candidate symbol') - symbol = Symbol( - parent=lookup_result.parent_symbol, - ident=lookup_result.ident, - declaration=declaration, - docname=docname, - line=line, - ) - if Symbol.debug_lookup: - Symbol.debug_print('end: creating candidate symbol') - return symbol if len(with_decl) == 0: cand_symbol = None else: - cand_symbol = make_cand_symbol() - - def handle_duplicate_declaration( - symbol: Symbol, cand_symbol: Symbol - ) -> None: - if Symbol.debug_lookup: - Symbol.debug_indent += 1 - Symbol.debug_print('redeclaration') - Symbol.debug_indent -= 1 - Symbol.debug_indent -= 2 - # Redeclaration of the same symbol. - # Let the new one be there, but raise an error to the client - # so it can use the real symbol as subscope. - # This will probably result in a duplicate id warning. - cand_symbol.isRedeclaration = True - raise _DuplicateSymbolError(symbol, declaration) + cand_symbol = self._make_cand_symbol( + lookup_result, declaration, docname, line + ) if declaration.objectType != 'function': assert len(with_decl) <= 1 - handle_duplicate_declaration(with_decl[0], cand_symbol) + self._handle_duplicate_declaration( + with_decl[0], cand_symbol, declaration + ) # (not reachable) # a function, so compare IDs @@ -493,7 +469,7 @@ def handle_duplicate_declaration( if Symbol.debug_lookup: Symbol.debug_print('old_id: ', old_id) if cand_id == old_id: - handle_duplicate_declaration(symbol, cand_symbol) + self._handle_duplicate_declaration(symbol, cand_symbol, declaration) # (not reachable) # no candidate symbol found with matching ID # if there is an empty symbol, fill that one @@ -507,7 +483,7 @@ def handle_duplicate_declaration( if cand_symbol is not None: return cand_symbol else: - return make_cand_symbol() + return self._make_cand_symbol(lookup_result, declaration, docname, line) else: if Symbol.debug_lookup: Symbol.debug_print( @@ -529,6 +505,42 @@ def handle_duplicate_declaration( symbol._fill_empty(declaration, docname, line) return symbol + @staticmethod + def _make_cand_symbol( + lookup_result: SymbolLookupResult, + declaration: ASTDeclaration | None, + docname: str | None, + line: int | None, + ) -> Symbol: + if Symbol.debug_lookup: + Symbol.debug_print('begin: creating candidate symbol') + symbol = Symbol( + parent=lookup_result.parent_symbol, + ident=lookup_result.ident, + declaration=declaration, + docname=docname, + line=line, + ) + if Symbol.debug_lookup: + Symbol.debug_print('end: creating candidate symbol') + return symbol + + @staticmethod + def _handle_duplicate_declaration( + symbol: Symbol, cand_symbol: Symbol, declaration: ASTDeclaration + ) -> None: + if Symbol.debug_lookup: + Symbol.debug_indent += 1 + Symbol.debug_print('redeclaration') + Symbol.debug_indent -= 1 + Symbol.debug_indent -= 2 + # Redeclaration of the same symbol. + # Let the new one be there, but raise an error to the client + # so it can use the real symbol as subscope. + # This will probably result in a duplicate id warning. + cand_symbol.isRedeclaration = True + raise _DuplicateSymbolError(symbol, declaration) + def merge_with( self, other: Symbol, docnames: list[str], env: BuildEnvironment ) -> None: diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py index 554f4ebb17e..ef486897bc4 100644 --- a/sphinx/domains/cpp/__init__.py +++ b/sphinx/domains/cpp/__init__.py @@ -1056,6 +1056,15 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non logger.debug('\tresult end') logger.debug('merge_domaindata end') + def _check_type(self, typ: str, decl_typ: str) -> bool: + if typ == 'any': + return True + objtypes = self.objtypes_for_role(typ) + if objtypes: + return decl_typ in objtypes + logger.debug(f'Type is {typ}, declaration type is {decl_typ}') # NoQA: G004 + raise AssertionError + def _resolve_xref_inner( self, env: BuildEnvironment, @@ -1150,16 +1159,7 @@ def _resolve_xref_inner( typ = typ.removeprefix('cpp:') decl_typ = s.declaration.objectType - def check_type() -> bool: - if typ == 'any': - return True - objtypes = self.objtypes_for_role(typ) - if objtypes: - return decl_typ in objtypes - logger.debug(f'Type is {typ}, declaration type is {decl_typ}') # NoQA: G004 - raise AssertionError - - if not check_type(): + if not self._check_type(typ, decl_typ): logger.warning( 'cpp:%s targets a %s (%s).', typ, @@ -1299,6 +1299,12 @@ def get_full_qualified_name(self, node: Element) -> str | None: return f'{parent_name}::{target}' +def _init_stuff(app: Sphinx) -> None: + Symbol.debug_lookup = app.config.cpp_debug_lookup + Symbol.debug_show_tree = app.config.cpp_debug_show_tree + app.config.cpp_index_common_prefix.sort(reverse=True) + + def setup(app: Sphinx) -> ExtensionMetadata: app.add_domain(CPPDomain) app.add_config_value('cpp_index_common_prefix', [], 'env', types=frozenset({list})) @@ -1318,12 +1324,7 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.add_config_value('cpp_debug_lookup', False, '', types=frozenset({bool})) app.add_config_value('cpp_debug_show_tree', False, '', types=frozenset({bool})) - def init_stuff(app: Sphinx) -> None: - Symbol.debug_lookup = app.config.cpp_debug_lookup - Symbol.debug_show_tree = app.config.cpp_debug_show_tree - app.config.cpp_index_common_prefix.sort(reverse=True) - - app.connect('builder-inited', init_stuff) + app.connect('builder-inited', _init_stuff) return { 'version': 'builtin', diff --git a/sphinx/domains/cpp/_parser.py b/sphinx/domains/cpp/_parser.py index aa941260da9..2055a942c68 100644 --- a/sphinx/domains/cpp/_parser.py +++ b/sphinx/domains/cpp/_parser.py @@ -438,9 +438,7 @@ def _parse_postfix_expression(self) -> ASTPostfixExpr: if not self.skip_string('('): self.fail("Expected '(' in '%s'." % cast) - def parser() -> ASTExpression: - return self._parse_expression() - + parser = self._parse_expression expr = self._parse_expression_fallback([')'], parser) self.skip_ws() if not self.skip_string(')'): @@ -459,10 +457,7 @@ def parser() -> ASTExpression: except DefinitionError as e_type: self.pos = pos try: - - def parser() -> ASTExpression: - return self._parse_expression() - + parser = self._parse_expression expr = self._parse_expression_fallback([')'], parser) prefix = ASTTypeId(expr, isType=False) if not self.skip_string(')'): @@ -1423,9 +1418,7 @@ def _parse_declarator_name_suffix( array_ops.append(ASTArray(None)) continue - def parser() -> ASTExpression: - return self._parse_expression() - + parser = self._parse_expression value = self._parse_expression_fallback([']'], parser) if not self.skip_string(']'): self.fail("Expected ']' in end of array operator.") diff --git a/sphinx/domains/cpp/_symbol.py b/sphinx/domains/cpp/_symbol.py index 36b965e52ae..7449e616a03 100644 --- a/sphinx/domains/cpp/_symbol.py +++ b/sphinx/domains/cpp/_symbol.py @@ -38,6 +38,10 @@ def __str__(self) -> str: return 'Internal C++ duplicate symbol error:\n%s' % self.symbol.dump(0) +class _QualifiedSymbolIsTemplateParam(Exception): + pass + + class SymbolLookupResult: __slots__ = ( 'symbols', @@ -419,53 +423,19 @@ def _find_named_symbols( if not _is_specialization(template_params, template_args): template_args = None - def matches(s: Symbol) -> bool: - if s.identOrOp != ident_or_op: - return False - if (s.templateParams is None) != (template_params is None): - if template_params is not None: - # we query with params, they must match params - return False - if not template_shorthand: - # we don't query with params, and we do care about them - return False - if template_params: - # TODO: do better comparison - if str(s.templateParams) != str(template_params): - return False - if (s.templateArgs is None) != (template_args is None): - return False - if s.templateArgs: - # TODO: do better comparison - if str(s.templateArgs) != str(template_args): - return False - return True - - def candidates() -> Iterator[Symbol]: - s = self - if Symbol.debug_lookup: - Symbol.debug_print('searching in self:') - logger.debug(s.to_string(Symbol.debug_indent + 1), end='') - while True: - if match_self: - yield s - if recurse_in_anon: - yield from s.children_recurse_anon - else: - yield from s._children - - if s.siblingAbove is None: - break - s = s.siblingAbove - if Symbol.debug_lookup: - Symbol.debug_print('searching in sibling:') - logger.debug(s.to_string(Symbol.debug_indent + 1), end='') - - for s in candidates(): + for s in self._candidates( + match_self=match_self, recurse_in_anon=recurse_in_anon + ): if Symbol.debug_lookup: Symbol.debug_print('candidate:') logger.debug(s.to_string(Symbol.debug_indent + 1), end='') - if matches(s): + if self._matches( + s, + ident_or_op=ident_or_op, + template_params=template_params, + template_args=template_args, + template_shorthand=template_shorthand, + ): if Symbol.debug_lookup: Symbol.debug_indent += 1 Symbol.debug_print('matches') @@ -476,6 +446,59 @@ def candidates() -> Iterator[Symbol]: if Symbol.debug_lookup: Symbol.debug_indent -= 2 + @staticmethod + def _matches( + s: Symbol, + /, + *, + ident_or_op: ASTIdentifier | ASTOperator, + template_params: ASTTemplateParams | ASTTemplateIntroduction, + template_args: ASTTemplateArgs, + template_shorthand: bool, + ) -> bool: + if s.identOrOp != ident_or_op: + return False + if (s.templateParams is None) != (template_params is None): + if template_params is not None: + # we query with params, they must match params + return False + if not template_shorthand: + # we don't query with params, and we do care about them + return False + if template_params: + # TODO: do better comparison + if str(s.templateParams) != str(template_params): + return False + if (s.templateArgs is None) != (template_args is None): + return False + if s.templateArgs: + # TODO: do better comparison + if str(s.templateArgs) != str(template_args): + return False + return True + + def _candidates( + self, *, match_self: bool, recurse_in_anon: bool + ) -> Iterator[Symbol]: + s = self + if Symbol.debug_lookup: + Symbol.debug_print('searching in self:') + logger.debug(s.to_string(Symbol.debug_indent + 1), end='') + while True: + if match_self: + yield s + if recurse_in_anon: + yield from s.children_recurse_anon + else: + yield from s._children + + if s.siblingAbove is None: + break + s = s.siblingAbove + if Symbol.debug_lookup: + Symbol.debug_print('searching in sibling:') + logger.debug(s.to_string(Symbol.debug_indent + 1), end='') + def _symbol_lookup( self, nested_name: ASTNestedName, @@ -661,34 +684,10 @@ def _add_symbols( Symbol.debug_print('decl: ', declaration) Symbol.debug_print(f'location: {docname}:{line}') - def on_missing_qualified_symbol( - parent_symbol: Symbol, - ident_or_op: ASTIdentifier | ASTOperator, - template_params: Any, - template_args: ASTTemplateArgs, - ) -> Symbol | None: - if Symbol.debug_lookup: - Symbol.debug_indent += 1 - Symbol.debug_print('_add_symbols, on_missing_qualified_symbol:') - Symbol.debug_indent += 1 - Symbol.debug_print('template_params:', template_params) - Symbol.debug_print('ident_or_op: ', ident_or_op) - Symbol.debug_print('template_args: ', template_args) - Symbol.debug_indent -= 2 - return Symbol( - parent=parent_symbol, - identOrOp=ident_or_op, - templateParams=template_params, - templateArgs=template_args, - declaration=None, - docname=None, - line=None, - ) - lookup_result = self._symbol_lookup( nested_name, template_decls, - on_missing_qualified_symbol, + _on_missing_qualified_symbol_fresh, strict_template_param_arg_lists=True, ancestor_lookup_type=None, template_shorthand=False, @@ -759,45 +758,18 @@ def on_missing_qualified_symbol( # First check if one of those with a declaration matches. # If it's a function, we need to compare IDs, # otherwise there should be only one symbol with a declaration. - def make_cand_symbol() -> Symbol: - if Symbol.debug_lookup: - Symbol.debug_print('begin: creating candidate symbol') - symbol = Symbol( - parent=lookup_result.parent_symbol, - identOrOp=lookup_result.ident_or_op, - templateParams=lookup_result.template_params, - templateArgs=lookup_result.template_args, - declaration=declaration, - docname=docname, - line=line, - ) - if Symbol.debug_lookup: - Symbol.debug_print('end: creating candidate symbol') - return symbol - if len(with_decl) == 0: cand_symbol = None else: - cand_symbol = make_cand_symbol() - - def handle_duplicate_declaration( - symbol: Symbol, cand_symbol: Symbol - ) -> None: - if Symbol.debug_lookup: - Symbol.debug_indent += 1 - Symbol.debug_print('redeclaration') - Symbol.debug_indent -= 1 - Symbol.debug_indent -= 2 - # Redeclaration of the same symbol. - # Let the new one be there, but raise an error to the client - # so it can use the real symbol as subscope. - # This will probably result in a duplicate id warning. - cand_symbol.isRedeclaration = True - raise _DuplicateSymbolError(symbol, declaration) + cand_symbol = self._make_cand_symbol( + lookup_result, declaration, docname, line + ) if declaration.objectType != 'function': assert len(with_decl) <= 1 - handle_duplicate_declaration(with_decl[0], cand_symbol) + self._handle_duplicate_declaration( + with_decl[0], cand_symbol, declaration + ) # (not reachable) # a function, so compare IDs @@ -808,13 +780,13 @@ def handle_duplicate_declaration( # but all existing must be functions as well, # otherwise we declare it to be a duplicate if symbol.declaration.objectType != 'function': - handle_duplicate_declaration(symbol, cand_symbol) + self._handle_duplicate_declaration(symbol, cand_symbol, declaration) # (not reachable) old_id = symbol.declaration.get_newest_id() if Symbol.debug_lookup: Symbol.debug_print('old_id: ', old_id) if cand_id == old_id: - handle_duplicate_declaration(symbol, cand_symbol) + self._handle_duplicate_declaration(symbol, cand_symbol, declaration) # (not reachable) # no candidate symbol found with matching ID # if there is an empty symbol, fill that one @@ -824,12 +796,12 @@ def handle_duplicate_declaration( if cand_symbol is not None: Symbol.debug_print('result is already created cand_symbol') else: - Symbol.debug_print('result is make_cand_symbol()') + Symbol.debug_print('result is self._make_cand_symbol()') Symbol.debug_indent -= 2 if cand_symbol is not None: return cand_symbol else: - return make_cand_symbol() + return self._make_cand_symbol(lookup_result, declaration, docname, line) else: if Symbol.debug_lookup: Symbol.debug_print( @@ -851,6 +823,44 @@ def handle_duplicate_declaration( symbol._fill_empty(declaration, docname, line) return symbol + @staticmethod + def _make_cand_symbol( + lookup_result: SymbolLookupResult, + declaration: ASTDeclaration | None, + docname: str | None, + line: int | None, + ) -> Symbol: + if Symbol.debug_lookup: + Symbol.debug_print('begin: creating candidate symbol') + symbol = Symbol( + parent=lookup_result.parent_symbol, + identOrOp=lookup_result.ident_or_op, + templateParams=lookup_result.template_params, + templateArgs=lookup_result.template_args, + declaration=declaration, + docname=docname, + line=line, + ) + if Symbol.debug_lookup: + Symbol.debug_print('end: creating candidate symbol') + return symbol + + @staticmethod + def _handle_duplicate_declaration( + symbol: Symbol, cand_symbol: Symbol, declaration: ASTDeclaration + ) -> None: + if Symbol.debug_lookup: + Symbol.debug_indent += 1 + Symbol.debug_print('redeclaration') + Symbol.debug_indent -= 1 + Symbol.debug_indent -= 2 + # Redeclaration of the same symbol. + # Let the new one be there, but raise an error to the client + # so it can use the real symbol as subscope. + # This will probably result in a duplicate id warning. + cand_symbol.isRedeclaration = True + raise _DuplicateSymbolError(symbol, declaration) + def merge_with( self, other: Symbol, docnames: list[str], env: BuildEnvironment ) -> None: @@ -859,12 +869,6 @@ def merge_with( Symbol.debug_print('merge_with:') assert other is not None - def unconditional_add(self: Symbol, other_child: Symbol) -> None: - # TODO: hmm, should we prune by docnames? - self._children.append(other_child) - other_child.parent = self - other_child._assert_invariants() - if Symbol.debug_lookup: Symbol.debug_indent += 1 for other_child in other._children: @@ -874,7 +878,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None: ) Symbol.debug_indent += 1 if other_child.isRedeclaration: - unconditional_add(self, other_child) + self._unconditional_add(other_child) if Symbol.debug_lookup: Symbol.debug_print('is_redeclaration') Symbol.debug_indent -= 1 @@ -898,7 +902,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None: Symbol.debug_print('non-duplicate candidate symbols:', len(symbols)) if len(symbols) == 0: - unconditional_add(self, other_child) + self._unconditional_add(other_child) if Symbol.debug_lookup: Symbol.debug_indent -= 1 continue @@ -929,7 +933,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None: if Symbol.debug_lookup: Symbol.debug_indent -= 1 if our_child is None: - unconditional_add(self, other_child) + self._unconditional_add(other_child) continue if other_child.declaration and other_child.docname in docnames: if not our_child.declaration: @@ -978,6 +982,12 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None: if Symbol.debug_lookup: Symbol.debug_indent -= 2 + def _unconditional_add(self, other_child: Symbol) -> None: + # TODO: hmm, should we prune by docnames? + self._children.append(other_child) + other_child.parent = self + other_child._assert_invariants() + def add_name( self, nestedName: ASTNestedName, @@ -1125,29 +1135,11 @@ def find_name( Symbol.debug_print('recurseInAnon: ', recurseInAnon) Symbol.debug_print('searchInSiblings: ', searchInSiblings) - class QualifiedSymbolIsTemplateParam(Exception): - pass - - def on_missing_qualified_symbol( - parent_symbol: Symbol, - ident_or_op: ASTIdentifier | ASTOperator, - template_params: Any, - template_args: ASTTemplateArgs, - ) -> Symbol | None: - # TODO: Maybe search without template args? - # Though, the correct_primary_template_args does - # that for primary templates. - # Is there another case where it would be good? - if parent_symbol.declaration is not None: - if parent_symbol.declaration.objectType == 'templateParam': - raise QualifiedSymbolIsTemplateParam - return None - try: lookup_result = self._symbol_lookup( nestedName, templateDecls, - on_missing_qualified_symbol, + _on_missing_qualified_symbol_raise, strict_template_param_arg_lists=False, ancestor_lookup_type=typ, template_shorthand=templateShorthand, @@ -1156,7 +1148,7 @@ def on_missing_qualified_symbol( correct_primary_template_args=False, search_in_siblings=searchInSiblings, ) - except QualifiedSymbolIsTemplateParam: + except _QualifiedSymbolIsTemplateParam: return None, 'templateParamInQualified' if lookup_result is None: @@ -1210,18 +1202,10 @@ def find_declaration( else: template_decls = [] - def on_missing_qualified_symbol( - parent_symbol: Symbol, - ident_or_op: ASTIdentifier | ASTOperator, - template_params: Any, - template_args: ASTTemplateArgs, - ) -> Symbol | None: - return None - lookup_result = self._symbol_lookup( nested_name, template_decls, - on_missing_qualified_symbol, + _on_missing_qualified_symbol_none, strict_template_param_arg_lists=False, ancestor_lookup_type=typ, template_shorthand=templateShorthand, @@ -1296,3 +1280,53 @@ def dump(self, indent: int) -> str: self.to_string(indent), *(c.dump(indent + 1) for c in self._children), ]) + + +def _on_missing_qualified_symbol_fresh( + parent_symbol: Symbol, + ident_or_op: ASTIdentifier | ASTOperator, + template_params: Any, + template_args: ASTTemplateArgs, +) -> Symbol | None: + if Symbol.debug_lookup: + Symbol.debug_indent += 1 + Symbol.debug_print('_add_symbols, on_missing_qualified_symbol:') + Symbol.debug_indent += 1 + Symbol.debug_print('template_params:', template_params) + Symbol.debug_print('ident_or_op: ', ident_or_op) + Symbol.debug_print('template_args: ', template_args) + Symbol.debug_indent -= 2 + return Symbol( + parent=parent_symbol, + identOrOp=ident_or_op, + templateParams=template_params, + templateArgs=template_args, + declaration=None, + docname=None, + line=None, + ) + + +def _on_missing_qualified_symbol_raise( + parent_symbol: Symbol, + ident_or_op: ASTIdentifier | ASTOperator, + template_params: Any, + template_args: ASTTemplateArgs, +) -> Symbol | None: + # TODO: Maybe search without template args? + # Though, the correct_primary_template_args does + # that for primary templates. + # Is there another case where it would be good? + if parent_symbol.declaration is not None: + if parent_symbol.declaration.objectType == 'templateParam': + raise _QualifiedSymbolIsTemplateParam + return None + + +def _on_missing_qualified_symbol_none( + parent_symbol: Symbol, + ident_or_op: ASTIdentifier | ASTOperator, + template_params: Any, + template_args: ASTTemplateArgs, +) -> Symbol | None: + return None diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py index 60def00a533..f476ff22fd4 100644 --- a/sphinx/domains/python/_annotations.py +++ b/sphinx/domains/python/_annotations.py @@ -6,6 +6,7 @@ import token from collections import deque from inspect import Parameter +from itertools import chain, islice from typing import TYPE_CHECKING from docutils import nodes @@ -316,18 +317,6 @@ def parse(self) -> None: self.type_params.append(type_param) def _build_identifier(self, tokens: list[Token]) -> str: - from itertools import chain, islice - - def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]: - # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG - it = iter(iterable) - window = deque(islice(it, 3), maxlen=3) - if len(window) == 3: - yield tuple(window) - for x in it: - window.append(x) - yield tuple(window) - idents: list[str] = [] tokens: Iterable[Token] = iter(tokens) # type: ignore[no-redef] # do not format opening brackets @@ -342,7 +331,7 @@ def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]: # check the remaining tokens stop = Token(token.ENDMARKER, '', (-1, -1), (-1, -1), '') is_unpack_operator = False - for tok, op, after in triplewise(chain(tokens, [stop, stop])): + for tok, op, after in _triplewise(chain(tokens, [stop, stop])): ident = self._pformat_token(tok, native=is_unpack_operator) idents.append(ident) # determine if the next token is an unpack operator depending @@ -628,3 +617,14 @@ def _pseudo_parse_arglist( signode += paramlist else: signode += paramlist + + +def _triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]: + # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG + it = iter(iterable) + window = deque(islice(it, 3), maxlen=3) + if len(window) == 3: + yield tuple(window) + for x in it: + window.append(x) + yield tuple(window) diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py index 9397733c814..f8f3f5513e3 100644 --- a/sphinx/domains/std/__init__.py +++ b/sphinx/domains/std/__init__.py @@ -1378,16 +1378,12 @@ def get_numfig_title(self, node: Node) -> str | None: def get_enumerable_node_type(self, node: Node) -> str | None: """Get type of enumerable nodes.""" - - def has_child(node: Element, cls: type) -> bool: - return any(isinstance(child, cls) for child in node) - if isinstance(node, nodes.section): return 'section' elif ( isinstance(node, nodes.container) and 'literal_block' in node - and has_child(node, nodes.literal_block) + and _has_child(node, nodes.literal_block) ): # given node is a code-block having caption return 'code-block' @@ -1440,6 +1436,10 @@ def get_full_qualified_name(self, node: Element) -> str | None: return None +def _has_child(node: Element, cls: type) -> bool: + return any(isinstance(child, cls) for child in node) + + def warn_missing_reference( app: Sphinx, domain: Domain, diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py index 65b9933785c..9610e24d58d 100644 --- a/sphinx/ext/doctest.py +++ b/sphinx/ext/doctest.py @@ -436,21 +436,9 @@ def test_doc(self, docname: str, doctree: Node) -> bool: self.cleanup_runner._fakeout = self.setup_runner._fakeout # type: ignore[attr-defined] if self.config.doctest_test_doctest_blocks: - - def condition(node: Node) -> bool: - return ( - isinstance(node, nodes.literal_block | nodes.comment) - and 'testnodetype' in node - ) or isinstance(node, nodes.doctest_block) - + condition = _condition_with_doctest else: - - def condition(node: Node) -> bool: - return ( - isinstance(node, nodes.literal_block | nodes.comment) - and 'testnodetype' in node - ) - + condition = _condition_default for node in doctree.findall(condition): if self.skipped(node): # type: ignore[arg-type] continue @@ -663,3 +651,14 @@ def setup(app: Sphinx) -> ExtensionMetadata: 'version': sphinx.__display_version__, 'parallel_read_safe': True, } + + +def _condition_default(node: Node) -> bool: + return ( + isinstance(node, (nodes.literal_block, nodes.comment)) + and 'testnodetype' in node + ) + + +def _condition_with_doctest(node: Node) -> bool: + return _condition_default(node) or isinstance(node, nodes.doctest_block) diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py index d9c5031b31c..1a40c3d791a 100644 --- a/sphinx/transforms/post_transforms/__init__.py +++ b/sphinx/transforms/post_transforms/__init__.py @@ -227,12 +227,7 @@ def _resolve_pending_any_xref( if not results: return None if len(results) > 1: - - def stringify(name: str, node: Element) -> str: - reftitle = node.get('reftitle', node.astext()) - return f':{name}:`{reftitle}`' - - candidates = ' or '.join(starmap(stringify, results)) + candidates = ' or '.join(starmap(self._stringify, results)) msg = __( "more than one target found for 'any' cross-reference %r: could be %s" ) @@ -251,6 +246,11 @@ def stringify(name: str, node: Element) -> str: new_node[0]['classes'].extend((res_domain, res_role.replace(':', '-'))) return new_node + @staticmethod + def _stringify(name: str, node: Element) -> str: + reftitle = node.get('reftitle', node.astext()) + return f':{name}:`{reftitle}`' + def warn_missing_reference( self, refdoc: str, @@ -273,21 +273,12 @@ def warn_missing_reference( ): # fmt: skip warn = False if self.config.nitpick_ignore_regex: - - def matches_ignore(entry_type: str, entry_target: str) -> bool: - return any( - ( - re.fullmatch(ignore_type, entry_type) - and re.fullmatch(ignore_target, entry_target) - ) - for ignore_type, ignore_target in self.config.nitpick_ignore_regex - ) - - if matches_ignore(dtype, target): + if _matches_ignore(self.config.nitpick_ignore_regex, dtype, target): warn = False # for "std" types also try without domain name - if (not domain or domain.name == 'std') and matches_ignore(typ, target): - warn = False + if not domain or domain.name == 'std': + if _matches_ignore(self.config.nitpick_ignore_regex, typ, target): + warn = False if not warn: return @@ -317,6 +308,18 @@ def find_pending_xref_condition( return None +def _matches_ignore( + ignore_patterns: Sequence[tuple[str, str]], entry_type: str, entry_target: str +) -> bool: + return any( + ( + re.fullmatch(ignore_type, entry_type) + and re.fullmatch(ignore_target, entry_target) + ) + for ignore_type, ignore_target in ignore_patterns + ) + + class OnlyNodeTransform(SphinxPostTransform): default_priority = 50 diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 5d9bb9bef9c..823db1d875b 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -1852,13 +1852,10 @@ def add_target(id: str) -> None: if node.get('ismod', False): # Detect if the previous nodes are label targets. If so, remove # the refid thereof from node['ids'] to avoid duplicated ids. - def has_dup_label(sib: Node | None) -> bool: - return isinstance(sib, nodes.target) and sib.get('refid') in node['ids'] - prev = get_prev_node(node) - if has_dup_label(prev): + if self._has_dup_label(prev, node): ids = node['ids'][:] # copy to avoid side-effects - while has_dup_label(prev): + while self._has_dup_label(prev, node): ids.remove(prev['refid']) # type: ignore[index] prev = get_prev_node(prev) # type: ignore[arg-type] else: @@ -1872,6 +1869,10 @@ def has_dup_label(sib: Node | None) -> bool: def depart_target(self, node: Element) -> None: pass + @staticmethod + def _has_dup_label(sib: Node | None, node: Element) -> bool: + return isinstance(sib, nodes.target) and sib.get('refid') in node['ids'] + def visit_attribution(self, node: Element) -> None: self.body.append(CR + r'\begin{flushright}' + CR) self.body.append('---') From 50590b19ad0af43357a36289b6ffa4089782c691 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Fri, 6 Jun 2025 23:20:03 +0100 Subject: [PATCH 102/435] Mark class attributes as ``ClassVar`` (#13626) --- sphinx/builders/__init__.py | 24 +++--- sphinx/builders/manpage.py | 2 +- sphinx/domains/__init__.py | 30 +++---- sphinx/domains/_index.py | 7 +- sphinx/domains/c/__init__.py | 2 +- sphinx/domains/changeset.py | 2 +- sphinx/domains/javascript.py | 2 +- sphinx/domains/math.py | 4 +- sphinx/domains/python/__init__.py | 4 +- sphinx/domains/rst.py | 2 +- sphinx/domains/std/__init__.py | 19 ++--- sphinx/environment/adapters/indexentries.py | 1 - sphinx/ext/autodoc/__init__.py | 14 ++-- sphinx/ext/imgmath.py | 93 +++++++++------------ sphinx/registry.py | 8 +- 15 files changed, 99 insertions(+), 115 deletions(-) diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 076fe218434..88b321868ab 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -48,7 +48,7 @@ if TYPE_CHECKING: from collections.abc import Iterable, Sequence, Set from gettext import NullTranslations - from typing import Any, Literal + from typing import Any, ClassVar, Literal from docutils.nodes import Node @@ -70,37 +70,37 @@ class Builder: #: The builder's name. #: This is the value used to select builders on the command line. - name: str = '' + name: ClassVar[str] = '' #: The builder's output format, or '' if no document output is produced. #: This is commonly the file extension, e.g. "html", #: though any string value is accepted. #: The builder's format string can be used by various components #: such as :class:`.SphinxPostTransform` or extensions to determine #: their compatibility with the builder. - format: str = '' + format: ClassVar[str] = '' #: The message emitted upon successful build completion. #: This can be a printf-style template string #: with the following keys: ``outdir``, ``project`` - epilog: str = '' + epilog: ClassVar[str] = '' #: default translator class for the builder. This can be overridden by #: :py:meth:`~sphinx.application.Sphinx.set_translator`. - default_translator_class: type[nodes.NodeVisitor] + default_translator_class: ClassVar[type[nodes.NodeVisitor]] # doctree versioning method - versioning_method = 'none' - versioning_compare = False + versioning_method: ClassVar[str] = 'none' + versioning_compare: ClassVar[bool] = False #: Whether it is safe to make parallel :meth:`~.Builder.write_doc` calls. - allow_parallel: bool = False + allow_parallel: ClassVar[bool] = False # support translation - use_message_catalog = True + use_message_catalog: ClassVar[bool] = True #: The list of MIME types of image formats supported by the builder. #: Image files are searched in the order in which they appear here. - supported_image_types: list[str] = [] + supported_image_types: ClassVar[list[str]] = [] #: The builder can produce output documents that may fetch external images when opened. - supported_remote_images: bool = False + supported_remote_images: ClassVar[bool] = False #: The file format produced by the builder allows images to be embedded using data-URIs. - supported_data_uri_images: bool = False + supported_data_uri_images: ClassVar[bool] = False srcdir = _StrPathProperty() confdir = _StrPathProperty() diff --git a/sphinx/builders/manpage.py b/sphinx/builders/manpage.py index 7b62b7dca5a..feeb35c1877 100644 --- a/sphinx/builders/manpage.py +++ b/sphinx/builders/manpage.py @@ -37,7 +37,7 @@ class ManualPageBuilder(Builder): epilog = __('The manual pages are in %(outdir)s.') default_translator_class = ManualPageTranslator - supported_image_types: list[str] = [] + supported_image_types = [] def init(self) -> None: if not self.config.man_pages: diff --git a/sphinx/domains/__init__.py b/sphinx/domains/__init__.py index 61be6049579..17aa7bdc453 100644 --- a/sphinx/domains/__init__.py +++ b/sphinx/domains/__init__.py @@ -14,7 +14,7 @@ if TYPE_CHECKING: from collections.abc import Iterable, Sequence, Set - from typing import Any + from typing import Any, ClassVar from docutils import nodes from docutils.nodes import Element, Node @@ -82,27 +82,27 @@ class Domain: """ #: domain name: should be short, but unique - name = '' + name: ClassVar[str] = '' #: domain label: longer, more descriptive (used in messages) - label = '' + label: ClassVar[str] = '' #: type (usually directive) name -> ObjType instance - object_types: dict[str, ObjType] = {} + object_types: ClassVar[dict[str, ObjType]] = {} #: directive name -> directive class - directives: dict[str, type[Directive]] = {} + directives: ClassVar[dict[str, type[Directive]]] = {} #: role name -> role callable - roles: dict[str, RoleFunction | XRefRole] = {} + roles: ClassVar[dict[str, RoleFunction | XRefRole]] = {} #: a list of Index subclasses - indices: list[type[Index]] = [] + indices: ClassVar[list[type[Index]]] = [] #: role name -> a warning message if reference is missing - dangling_warnings: dict[str, str] = {} + dangling_warnings: ClassVar[dict[str, str]] = {} #: node_class -> (enum_node_type, title_getter) - enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {} + enumerable_nodes: ClassVar[dict[type[Node], tuple[str, TitleGetter | None]]] = {} #: data value for a fresh environment - initial_data: dict[str, Any] = {} + initial_data: ClassVar[dict[str, Any]] = {} #: data value data: dict[str, Any] #: data version, bump this when the format of `self.data` changes - data_version = 0 + data_version: ClassVar[int] = 0 def __init__(self, env: BuildEnvironment) -> None: domain_data: dict[str, dict[str, Any]] = env.domaindata @@ -113,10 +113,10 @@ def __init__(self, env: BuildEnvironment) -> None: self._type2role: dict[str, str] = {} # convert class variables to instance one (to enhance through API) - self.object_types = dict(self.object_types) - self.directives = dict(self.directives) - self.roles = dict(self.roles) - self.indices = list(self.indices) + self.object_types = dict(self.object_types) # type: ignore[misc] + self.directives = dict(self.directives) # type: ignore[misc] + self.roles = dict(self.roles) # type: ignore[misc] + self.indices = list(self.indices) # type: ignore[misc] if self.name not in domain_data: assert isinstance(self.initial_data, dict) diff --git a/sphinx/domains/_index.py b/sphinx/domains/_index.py index afb5be4007b..3845a97ba7b 100644 --- a/sphinx/domains/_index.py +++ b/sphinx/domains/_index.py @@ -9,6 +9,7 @@ if TYPE_CHECKING: from collections.abc import Iterable + from typing import ClassVar from sphinx.domains import Domain @@ -73,9 +74,9 @@ class Index(ABC): :rst:role:`ref` role. """ - name: str - localname: str - shortname: str | None = None + name: ClassVar[str] + localname: ClassVar[str] + shortname: ClassVar[str | None] = None def __init__(self, domain: Domain) -> None: if not self.name or self.localname is None: diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py index 56ce0d170f6..80d24c1abe2 100644 --- a/sphinx/domains/c/__init__.py +++ b/sphinx/domains/c/__init__.py @@ -818,7 +818,7 @@ class CDomain(Domain): 'expr': CExprRole(asCode=True), 'texpr': CExprRole(asCode=False), } - initial_data: dict[str, Symbol | dict[str, tuple[str, str, str]]] = { + initial_data: ClassVar[dict[str, Symbol | dict[str, tuple[str, str, str]]]] = { 'root_symbol': Symbol(None, None, None, None, None), 'objects': {}, # fullname -> docname, node_id, objtype } diff --git a/sphinx/domains/changeset.py b/sphinx/domains/changeset.py index 2d520e6ff64..d2492dcccb2 100644 --- a/sphinx/domains/changeset.py +++ b/sphinx/domains/changeset.py @@ -121,7 +121,7 @@ class ChangeSetDomain(Domain): name = 'changeset' label = 'changeset' - initial_data: dict[str, dict[str, list[ChangeSet]]] = { + initial_data: ClassVar[dict[str, dict[str, list[ChangeSet]]]] = { 'changes': {}, # version -> list of ChangeSet } diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py index eaa69094c78..22673489d23 100644 --- a/sphinx/domains/javascript.py +++ b/sphinx/domains/javascript.py @@ -436,7 +436,7 @@ class JavaScriptDomain(Domain): 'attr': JSXRefRole(), 'mod': JSXRefRole(), } - initial_data: dict[str, dict[str, tuple[str, str]]] = { + initial_data: ClassVar[dict[str, dict[str, tuple[str, str]]]] = { 'objects': {}, # fullname -> docname, node_id, objtype 'modules': {}, # modname -> docname, node_id } diff --git a/sphinx/domains/math.py b/sphinx/domains/math.py index 56e543917ad..d4f2606531f 100644 --- a/sphinx/domains/math.py +++ b/sphinx/domains/math.py @@ -15,7 +15,7 @@ if TYPE_CHECKING: from collections.abc import Iterable, Set - from typing import Any + from typing import Any, ClassVar from docutils.nodes import Element, Node, system_message @@ -47,7 +47,7 @@ class MathDomain(Domain): name = 'math' label = 'mathematics' - initial_data: dict[str, Any] = { + initial_data: ClassVar[dict[str, Any]] = { 'objects': {}, # labelid -> (docname, eqno) # backwards compatibility 'has_equations': {}, # https://github.com/sphinx-doc/sphinx/issues/13346 diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py index d70c232e725..1281b14ad58 100644 --- a/sphinx/domains/python/__init__.py +++ b/sphinx/domains/python/__init__.py @@ -732,7 +732,7 @@ class PythonDomain(Domain): name = 'py' label = 'Python' - object_types: dict[str, ObjType] = { + object_types = { 'function': ObjType(_('function'), 'func', 'obj'), 'data': ObjType(_('data'), 'data', 'obj'), 'class': ObjType(_('class'), 'class', 'exc', 'obj'), @@ -775,7 +775,7 @@ class PythonDomain(Domain): 'mod': PyXRefRole(), 'obj': PyXRefRole(), } - initial_data: dict[str, dict[str, tuple[Any]]] = { + initial_data: ClassVar[dict[str, dict[str, tuple[Any]]]] = { 'objects': {}, # fullname -> docname, objtype 'modules': {}, # modname -> docname, synopsis, platform, deprecated } diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py index 55aa3103d8a..2b486ea85ed 100644 --- a/sphinx/domains/rst.py +++ b/sphinx/domains/rst.py @@ -244,7 +244,7 @@ class ReSTDomain(Domain): 'dir': XRefRole(), 'role': XRefRole(), } - initial_data: dict[str, dict[tuple[str, str], str]] = { + initial_data: ClassVar[dict[str, dict[tuple[str, str], str]]] = { 'objects': {}, # fullname -> docname, objtype } diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py index f8f3f5513e3..04161736675 100644 --- a/sphinx/domains/std/__init__.py +++ b/sphinx/domains/std/__init__.py @@ -27,7 +27,6 @@ from typing import Any, ClassVar, Final from docutils.nodes import Element, Node, system_message - from docutils.parsers.rst import Directive from sphinx.addnodes import desc_signature from sphinx.application import Sphinx @@ -36,8 +35,6 @@ from sphinx.util.typing import ( ExtensionMetadata, OptionSpec, - RoleFunction, - TitleGetter, ) logger = logging.getLogger(__name__) @@ -725,7 +722,7 @@ class StandardDomain(Domain): name = 'std' label = 'Default' - object_types: dict[str, ObjType] = { + object_types = { 'term': ObjType(_('glossary term'), 'term', searchprio=-1), 'token': ObjType(_('grammar token'), 'token', searchprio=-1), 'label': ObjType(_('reference label'), 'ref', 'keyword', searchprio=-1), @@ -735,7 +732,7 @@ class StandardDomain(Domain): 'doc': ObjType(_('document'), 'doc', searchprio=-1), } - directives: dict[str, type[Directive]] = { + directives = { 'program': Program, 'cmdoption': Cmdoption, # old name for backwards compatibility 'option': Cmdoption, @@ -744,7 +741,7 @@ class StandardDomain(Domain): 'glossary': Glossary, 'productionlist': ProductionList, } - roles: dict[str, RoleFunction | XRefRole] = { + roles = { 'option': OptionXRefRole(warn_dangling=True), 'confval': XRefRole(warn_dangling=True), 'envvar': EnvVarXRefRole(), @@ -780,7 +777,7 @@ class StandardDomain(Domain): } # labelname -> docname, sectionname - _virtual_doc_names: dict[str, tuple[str, str]] = { + _virtual_doc_names: Final = { 'genindex': ('genindex', _('Index')), 'modindex': ('py-modindex', _('Module Index')), 'search': ('search', _('Search Page')), @@ -795,7 +792,7 @@ class StandardDomain(Domain): } # node_class -> (figtype, title_getter) - enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = { + enumerable_nodes = { nodes.figure: ('figure', None), nodes.table: ('table', None), nodes.container: ('code-block', None), @@ -805,9 +802,9 @@ def __init__(self, env: BuildEnvironment) -> None: super().__init__(env) # set up enumerable nodes - self.enumerable_nodes = copy( - self.enumerable_nodes - ) # create a copy for this instance + + # create a copy for this instance + self.enumerable_nodes = copy(self.enumerable_nodes) # type: ignore[misc] for node, settings in env._registry.enumerable_nodes.items(): self.enumerable_nodes[node] = settings diff --git a/sphinx/environment/adapters/indexentries.py b/sphinx/environment/adapters/indexentries.py index e9e6e408b6c..0428e488308 100644 --- a/sphinx/environment/adapters/indexentries.py +++ b/sphinx/environment/adapters/indexentries.py @@ -50,7 +50,6 @@ class IndexEntries: def __init__(self, env: BuildEnvironment) -> None: self.env = env - self.builder: Builder def create_index( self, diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py index 560b6905208..6d20e4007b2 100644 --- a/sphinx/ext/autodoc/__init__.py +++ b/sphinx/ext/autodoc/__init__.py @@ -363,15 +363,15 @@ class Documenter: #: name by which the directive is called (auto...) and the default #: generated directive name - objtype = 'object' + objtype: ClassVar = 'object' #: indentation by which to indent the directive content - content_indent = ' ' + content_indent: ClassVar = ' ' #: priority if multiple documenters return True from can_document_member - priority = 0 + priority: ClassVar = 0 #: order if autodoc_member_order is set to 'groupwise' - member_order = 0 + member_order: ClassVar = 0 #: true if the generated content may contain titles - titles_allowed = True + titles_allowed: ClassVar = True option_spec: ClassVar[OptionSpec] = { 'no-index': bool_option, @@ -2407,11 +2407,11 @@ def import_object(self, raiseerror: bool = False) -> bool: obj = self.parent.__dict__.get(self.object_name, self.object) if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name): # document static members before regular methods - self.member_order -= 1 + self.member_order -= 1 # type: ignore[misc] elif inspect.isclassmethod(obj): # document class methods before static methods as # they usually behave as alternative constructors - self.member_order -= 2 + self.member_order -= 2 # type: ignore[misc] return ret def format_args(self, **kwargs: Any) -> str: diff --git a/sphinx/ext/imgmath.py b/sphinx/ext/imgmath.py index 5b58db7b084..a8f88c62a1c 100644 --- a/sphinx/ext/imgmath.py +++ b/sphinx/ext/imgmath.py @@ -32,7 +32,6 @@ from docutils.nodes import Element from sphinx.application import Sphinx - from sphinx.builders import Builder from sphinx.config import Config from sphinx.util._pathlib import _StrPath from sphinx.util.typing import ExtensionMetadata @@ -116,36 +115,23 @@ def generate_latex_macro( return LaTeXRenderer([templates_path]).render(template_name + '.jinja', variables) -def ensure_tempdir(builder: Builder) -> Path: - """Create temporary directory. - - use only one tempdir per build -- the use of a directory is cleaner - than using temporary files, since we can clean up everything at once - just removing the whole directory (see cleanup_tempdir) - """ - if not hasattr(builder, '_imgmath_tempdir'): - builder._imgmath_tempdir = Path(tempfile.mkdtemp()) # type: ignore[attr-defined] - - return builder._imgmath_tempdir # type: ignore[attr-defined] - - -def compile_math(latex: str, builder: Builder) -> Path: +def compile_math(latex: str, *, config: Config) -> Path: """Compile LaTeX macros for math to DVI.""" - tempdir = ensure_tempdir(builder) + tempdir = Path(tempfile.mkdtemp(suffix='-sphinx-imgmath')) filename = tempdir / 'math.tex' with open(filename, 'w', encoding='utf-8') as f: f.write(latex) - imgmath_latex_name = os.path.basename(builder.config.imgmath_latex) + imgmath_latex_name = os.path.basename(config.imgmath_latex) # build latex command; old versions of latex don't have the # --output-directory option, so we have to manually chdir to the # temp dir to run it. - command = [builder.config.imgmath_latex] + command = [config.imgmath_latex] if imgmath_latex_name != 'tectonic': command.append('--interaction=nonstopmode') # add custom args from the config file - command.extend(builder.config.imgmath_latex_args) + command.extend(config.imgmath_latex_args) command.append('math.tex') try: @@ -162,7 +148,7 @@ def compile_math(latex: str, builder: Builder) -> Path: 'LaTeX command %r cannot be run (needed for math ' 'display), check the imgmath_latex setting' ), - builder.config.imgmath_latex, + config.imgmath_latex, ) raise InvokeError from exc except CalledProcessError as exc: @@ -191,19 +177,19 @@ def convert_dvi_to_image(command: list[str], name: str) -> tuple[str, str]: raise MathExtError(msg, exc.stderr, exc.stdout) from exc -def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int | None: +def convert_dvi_to_png(dvipath: Path, out_path: Path, *, config: Config) -> int | None: """Convert DVI file to PNG image.""" name = 'dvipng' - command = [builder.config.imgmath_dvipng, '-o', out_path, '-T', 'tight', '-z9'] - command.extend(builder.config.imgmath_dvipng_args) - if builder.config.imgmath_use_preview: + command = [config.imgmath_dvipng, '-o', out_path, '-T', 'tight', '-z9'] + command.extend(config.imgmath_dvipng_args) + if config.imgmath_use_preview: command.append('--depth') command.append(dvipath) stdout, _stderr = convert_dvi_to_image(command, name) depth = None - if builder.config.imgmath_use_preview: + if config.imgmath_use_preview: for line in stdout.splitlines(): matched = depth_re.match(line) if matched: @@ -214,17 +200,17 @@ def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int | return depth -def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int | None: +def convert_dvi_to_svg(dvipath: Path, out_path: Path, *, config: Config) -> int | None: """Convert DVI file to SVG image.""" name = 'dvisvgm' - command = [builder.config.imgmath_dvisvgm, '-o', out_path] - command.extend(builder.config.imgmath_dvisvgm_args) + command = [config.imgmath_dvisvgm, '-o', out_path] + command.extend(config.imgmath_dvisvgm_args) command.append(dvipath) _stdout, stderr = convert_dvi_to_image(command, name) depth = None - if builder.config.imgmath_use_preview: + if config.imgmath_use_preview: for line in stderr.splitlines(): # not stdout ! matched = depthsvg_re.match(line) if matched: @@ -236,8 +222,7 @@ def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int | def render_math( - self: HTML5Translator, - math: str, + self: HTML5Translator, math: str, *, config: Config ) -> tuple[_StrPath | None, int | None]: """Render the LaTeX math expression *math* using latex and dvipng or dvisvgm. @@ -252,14 +237,12 @@ def render_math( docs successfully). If the programs are there, however, they may not fail since that indicates a problem in the math source. """ - image_format = self.builder.config.imgmath_image_format.lower() + image_format = config.imgmath_image_format.lower() if image_format not in SUPPORT_FORMAT: unsupported_format_msg = 'imgmath_image_format must be either "png" or "svg"' raise MathExtError(unsupported_format_msg) - latex = generate_latex_macro( - image_format, math, self.builder.config, self.builder.confdir - ) + latex = generate_latex_macro(image_format, math, config, self.builder.confdir) filename = ( f'{sha1(latex.encode(), usedforsecurity=False).hexdigest()}.{image_format}' @@ -281,7 +264,7 @@ def render_math( # .tex -> .dvi try: - dvipath = compile_math(latex, self.builder) + dvipath = compile_math(latex, config=config) except InvokeError: self.builder._imgmath_warned_latex = True # type: ignore[attr-defined] return None, None @@ -289,9 +272,9 @@ def render_math( # .dvi -> .png/.svg try: if image_format == 'png': - depth = convert_dvi_to_png(dvipath, self.builder, generated_path) + depth = convert_dvi_to_png(dvipath, generated_path, config=config) elif image_format == 'svg': - depth = convert_dvi_to_svg(dvipath, self.builder, generated_path) + depth = convert_dvi_to_svg(dvipath, generated_path, config=config) except InvokeError: self.builder._imgmath_warned_image_translator = True # type: ignore[attr-defined] return None, None @@ -315,26 +298,25 @@ def clean_up_files(app: Sphinx, exc: Exception) -> None: if exc: return - if hasattr(app.builder, '_imgmath_tempdir'): - with contextlib.suppress(Exception): - shutil.rmtree(app.builder._imgmath_tempdir) - - if app.builder.config.imgmath_embed: + if app.config.imgmath_embed: # in embed mode, the images are still generated in the math output dir # to be shared across workers, but are not useful to the final document with contextlib.suppress(Exception): shutil.rmtree(app.builder.outdir / app.builder.imagedir / 'math') -def get_tooltip(self: HTML5Translator, node: Element) -> str: - if self.builder.config.imgmath_add_tooltips: +def get_tooltip(self: HTML5Translator, node: Element, *, config: Config) -> str: + if config.imgmath_add_tooltips: return f' alt="{self.encode(node.astext()).strip()}"' return '' def html_visit_math(self: HTML5Translator, node: nodes.math) -> None: + config = self.builder.config try: - rendered_path, depth = render_math(self, '$' + node.astext() + '$') + rendered_path, depth = render_math( + self, '$' + node.astext() + '$', config=config + ) except MathExtError as exc: msg = str(exc) sm = nodes.system_message( @@ -350,27 +332,27 @@ def html_visit_math(self: HTML5Translator, node: nodes.math) -> None: f'{self.encode(node.astext()).strip()}' ) else: - if self.builder.config.imgmath_embed: - image_format = self.builder.config.imgmath_image_format.lower() + if config.imgmath_embed: + image_format = config.imgmath_image_format.lower() img_src = render_maths_to_base64(image_format, rendered_path) else: bname = os.path.basename(rendered_path) relative_path = Path(self.builder.imgpath, 'math', bname) img_src = relative_path.as_posix() align = f' style="vertical-align: {-depth:d}px"' if depth is not None else '' - self.body.append( - f'' - ) + tooltip = get_tooltip(self, node, config=config) + self.body.append(f'') raise nodes.SkipNode def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> None: + config = self.builder.config if node.get('no-wrap', node.get('nowrap', False)): latex = node.astext() else: latex = wrap_displaymath(node.astext(), None, False) try: - rendered_path, _depth = render_math(self, latex) + rendered_path, _depth = render_math(self, latex, config=config) except MathExtError as exc: msg = str(exc) sm = nodes.system_message( @@ -393,14 +375,15 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non f'{self.encode(node.astext()).strip()}

\n
' ) else: - if self.builder.config.imgmath_embed: - image_format = self.builder.config.imgmath_image_format.lower() + if config.imgmath_embed: + image_format = config.imgmath_image_format.lower() img_src = render_maths_to_base64(image_format, rendered_path) else: bname = os.path.basename(rendered_path) relative_path = Path(self.builder.imgpath, 'math', bname) img_src = relative_path.as_posix() - self.body.append(f'

\n') + tooltip = get_tooltip(self, node, config=config) + self.body.append(f'

\n') raise nodes.SkipNode diff --git a/sphinx/registry.py b/sphinx/registry.py index ce52a03b323..973aa6dfed4 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -410,7 +410,9 @@ def add_translation_handlers( % (builder_name, handlers), ) from exc - def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]: + def get_translator_class( + self, builder: type[Builder] | Builder + ) -> type[nodes.NodeVisitor]: try: return self.translators[builder.name] except KeyError: @@ -420,7 +422,9 @@ def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]: msg = f'translator not found for {builder.name}' raise AttributeError(msg) from err - def create_translator(self, builder: Builder, *args: Any) -> nodes.NodeVisitor: + def create_translator( + self, builder: type[Builder] | Builder, *args: Any + ) -> nodes.NodeVisitor: translator_class = self.get_translator_class(builder) translator = translator_class(*args) From 0b0c039c02091300a616b8151299265f67aaf81c Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 7 Jun 2025 01:34:48 +0100 Subject: [PATCH 103/435] Deprecate remaining public ``app`` attributes (#13627) --- CHANGES.rst | 5 +++ doc/extdev/deprecated.rst | 25 +++++++++++++ sphinx/builders/__init__.py | 65 ++++++++++++++++++++-------------- sphinx/environment/__init__.py | 26 +++++++++++--- sphinx/events.py | 16 ++++++--- sphinx/transforms/__init__.py | 3 ++ sphinx/util/logging.py | 20 +++++------ tests/test_events.py | 2 +- tests/test_versioning.py | 2 +- 9 files changed, 116 insertions(+), 48 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 40d6984ca55..71ffa3c1b0d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,11 @@ Incompatible changes Deprecated ---------- +* 13627: Deprecate remaining public :py:attr:`!.app` attributes, + including ``builder.app``, ``env.app``, ``events.app``, + and ``SphinxTransform.`app``. + Patch by Adam Turner. + Features added -------------- diff --git a/doc/extdev/deprecated.rst b/doc/extdev/deprecated.rst index ad05b054d99..898ec49c8fc 100644 --- a/doc/extdev/deprecated.rst +++ b/doc/extdev/deprecated.rst @@ -22,6 +22,31 @@ The following is a list of deprecated interfaces. - Removed - Alternatives + * - ``sphinx.builders.Builder.app`` + - 8.3 + - 10.0 + - N/A + + * - ``sphinx.environment.BuildEnvironment.app`` + - 8.3 + - 10.0 + - N/A + + * - ``sphinx.transforms.Transform.app`` + - 8.3 + - 10.0 + - N/A + + * - ``sphinx.transforms.post_transforms.SphinxPostTransform.app`` + - 8.3 + - 10.0 + - N/A + + * - ``sphinx.events.EventManager.app`` + - 8.3 + - 10.0 + - N/A + * - ``sphinx.builders.singlehtml.SingleFileHTMLBuilder.fix_refuris`` - 8.2 - 10.0 diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 88b321868ab..fb8bd757864 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -14,6 +14,7 @@ from docutils.utils import DependencyList from sphinx._cli.util.colour import bold +from sphinx.deprecation import _deprecation_warning from sphinx.environment import ( CONFIG_CHANGED_REASON, CONFIG_OK, @@ -114,7 +115,7 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None: self.doctreedir = app.doctreedir ensuredir(self.doctreedir) - self.app: Sphinx = app + self._app: Sphinx = app self.env: BuildEnvironment = env self.env.set_versioning_method(self.versioning_method, self.versioning_compare) self.events: EventManager = app.events @@ -136,9 +137,15 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None: self.parallel_ok = False self.finish_tasks: Any = None + @property + def app(self) -> Sphinx: + cls_name = self.__class__.__qualname__ + _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0)) + return self._app + @property def _translator(self) -> NullTranslations | None: - return self.app.translator + return self._app.translator def get_translator_class(self, *args: Any) -> type[nodes.NodeVisitor]: """Return a class of translator.""" @@ -258,7 +265,7 @@ def cat2relpath(cat: CatalogInfo, srcdir: Path = self.srcdir) -> str: __('writing output... '), 'darkgreen', len(catalogs), - self.app.verbosity, + self._app.verbosity, stringify_func=cat2relpath, ): catalog.write_mo( @@ -397,14 +404,14 @@ def build( # while reading, collect all warnings from docutils with ( nullcontext() - if self.app._exception_on_warning + if self._app._exception_on_warning else logging.pending_warnings() ): updated_docnames = set(self.read()) doccount = len(updated_docnames) logger.info(bold(__('looking for now-outdated files... ')), nonl=True) - updated_docnames.update(self.env.check_dependents(self.app, updated_docnames)) + updated_docnames.update(self.env.check_dependents(self._app, updated_docnames)) outdated = len(updated_docnames) - doccount if outdated: logger.info(__('%d found'), outdated) @@ -422,14 +429,14 @@ def build( pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL) # global actions - self.app.phase = BuildPhase.CONSISTENCY_CHECK + self._app.phase = BuildPhase.CONSISTENCY_CHECK with progress_message(__('checking consistency')): self.env.check_consistency() else: if method == 'update' and not docnames: logger.info(bold(__('no targets are out of date.'))) - self.app.phase = BuildPhase.RESOLVING + self._app.phase = BuildPhase.RESOLVING # filter "docnames" (list of outdated files) by the updated # found_docs of the environment; this will remove docs that @@ -438,14 +445,14 @@ def build( docnames = set(docnames) & self.env.found_docs # determine if we can write in parallel - if parallel_available and self.app.parallel > 1 and self.allow_parallel: - self.parallel_ok = self.app.is_parallel_allowed('write') + if parallel_available and self._app.parallel > 1 and self.allow_parallel: + self.parallel_ok = self._app.is_parallel_allowed('write') else: self.parallel_ok = False # create a task executor to use for misc. "finish-up" tasks # if self.parallel_ok: - # self.finish_tasks = ParallelTasks(self.app.parallel) + # self.finish_tasks = ParallelTasks(self._app.parallel) # else: # for now, just execute them serially self.finish_tasks = SerialTasks() @@ -508,13 +515,13 @@ def read(self) -> list[str]: self.events.emit('env-before-read-docs', self.env, docnames) # check if we should do parallel or serial read - if parallel_available and self.app.parallel > 1: - par_ok = self.app.is_parallel_allowed('read') + if parallel_available and self._app.parallel > 1: + par_ok = self._app.is_parallel_allowed('read') else: par_ok = False if par_ok: - self._read_parallel(docnames, nproc=self.app.parallel) + self._read_parallel(docnames, nproc=self._app.parallel) else: self._read_serial(docnames) @@ -576,7 +583,7 @@ def _read_serial(self, docnames: list[str]) -> None: __('reading sources... '), 'purple', len(docnames), - self.app.verbosity, + self._app.verbosity, ): # remove all inventory entries for that file self.events.emit('env-purge-doc', self.env, docname) @@ -589,7 +596,11 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None: # create a status_iterator to step progressbar after reading a document # (see: ``merge()`` function) progress = status_iterator( - chunks, __('reading sources... '), 'purple', len(chunks), self.app.verbosity + chunks, + __('reading sources... '), + 'purple', + len(chunks), + self._app.verbosity, ) # clear all outdated docs at once @@ -598,7 +609,7 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None: self.env.clear_doc(docname) def read_process(docs: list[str]) -> bytes: - self.env.app = self.app + self.env._app = self._app for docname in docs: self.read_doc(docname, _cache=False) # allow pickling self to send it back @@ -606,7 +617,7 @@ def read_process(docs: list[str]) -> bytes: def merge(docs: list[str], otherenv: bytes) -> None: env = pickle.loads(otherenv) - self.env.merge_info_from(docs, env, self.app) + self.env.merge_info_from(docs, env, self._app) next(progress) @@ -630,8 +641,8 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None: env.note_dependency(docutils_conf) filename = str(env.doc2path(docname)) - filetype = get_filetype(self.app.config.source_suffix, filename) - publisher = self.env._registry.get_publisher(self.app, filetype) + filetype = get_filetype(self._app.config.source_suffix, filename) + publisher = self.env._registry.get_publisher(self._app, filetype) self.env.current_document._parser = publisher.parser # record_dependencies is mutable even though it is in settings, # explicitly re-initialise for each document @@ -744,14 +755,14 @@ def write_documents(self, docnames: Set[str]) -> None: if self.parallel_ok: # number of subprocesses is parallel-1 because the main process # is busy loading doctrees and doing write_doc_serialized() - self._write_parallel(sorted_docnames, nproc=self.app.parallel - 1) + self._write_parallel(sorted_docnames, nproc=self._app.parallel - 1) else: self._write_serial(sorted_docnames) def _write_serial(self, docnames: Sequence[str]) -> None: with ( nullcontext() - if self.app._exception_on_warning + if self._app._exception_on_warning else logging.pending_warnings() ): for docname in status_iterator( @@ -759,19 +770,19 @@ def _write_serial(self, docnames: Sequence[str]) -> None: __('writing output... '), 'darkgreen', len(docnames), - self.app.verbosity, + self._app.verbosity, ): - _write_docname(docname, app=self.app, env=self.env, builder=self) + _write_docname(docname, app=self._app, env=self.env, builder=self) def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None: def write_process(docs: list[tuple[str, nodes.document]]) -> None: - self.app.phase = BuildPhase.WRITING + self._app.phase = BuildPhase.WRITING for docname, doctree in docs: self.write_doc(docname, doctree) # warm up caches/compile templates using the first document firstname, docnames = docnames[0], docnames[1:] - _write_docname(firstname, app=self.app, env=self.env, builder=self) + _write_docname(firstname, app=self._app, env=self.env, builder=self) tasks = ParallelTasks(nproc) chunks = make_chunks(docnames, nproc) @@ -783,13 +794,13 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None: __('writing output... '), 'darkgreen', len(chunks), - self.app.verbosity, + self._app.verbosity, ) def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None: next(progress) - self.app.phase = BuildPhase.RESOLVING + self._app.phase = BuildPhase.RESOLVING for chunk in chunks: arg = [] for docname in chunk: diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index 2f3e25ac477..7ad7298c22b 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -11,6 +11,7 @@ from typing import TYPE_CHECKING from sphinx import addnodes +from sphinx.deprecation import _deprecation_warning from sphinx.domains._domains_container import _DomainsContainer from sphinx.environment.adapters import toctree as toctree_adapters from sphinx.errors import ( @@ -107,7 +108,7 @@ class BuildEnvironment: doctreedir = _StrPathProperty() def __init__(self, app: Sphinx) -> None: - self.app: Sphinx = app + self._app: Sphinx = app self.doctreedir = app.doctreedir self.srcdir = app.srcdir self.config: Config = None # type: ignore[assignment] @@ -237,7 +238,7 @@ def __getstate__(self) -> dict[str, Any]: """Obtains serializable data for pickling.""" __dict__ = self.__dict__.copy() # clear unpickleable attributes - __dict__.update(app=None, domains=None, events=None) + __dict__.update(_app=None, domains=None, events=None) # clear in-memory doctree caches, to reduce memory consumption and # ensure that, upon restoring the state, the most recent pickled files # on the disk are used instead of those from a possibly outdated state @@ -257,7 +258,7 @@ def setup(self, app: Sphinx) -> None: if self.project: app.project.restore(self.project) - self.app = app + self._app = app self.doctreedir = app.doctreedir self.events = app.events self.srcdir = app.srcdir @@ -284,13 +285,28 @@ def setup(self, app: Sphinx) -> None: # initialize settings self._update_settings(app.config) + @property + def app(self) -> Sphinx: + _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0)) + return self._app + + @app.setter + def app(self, app: Sphinx) -> None: + _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0)) + self._app = app + + @app.deleter + def app(self) -> None: + _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0)) + del self._app + @property def _registry(self) -> SphinxComponentRegistry: - return self.app.registry + return self._app.registry @property def _tags(self) -> Tags: - return self.app.tags + return self._app.tags @staticmethod def _config_status( diff --git a/sphinx/events.py b/sphinx/events.py index 571ad143269..e408d80b796 100644 --- a/sphinx/events.py +++ b/sphinx/events.py @@ -9,6 +9,7 @@ from operator import attrgetter from typing import TYPE_CHECKING, NamedTuple, overload +from sphinx.deprecation import _deprecation_warning from sphinx.errors import ExtensionError, SphinxError from sphinx.locale import __ from sphinx.util import logging @@ -66,17 +67,25 @@ class EventManager: """Event manager for Sphinx.""" def __init__(self, app: Sphinx) -> None: - self.app = app + self._app = app self.events = core_events.copy() self.listeners: dict[str, list[EventListener]] = defaultdict(list) self.next_listener_id = 0 + # pass through errors for debugging. + self._reraise_errors: bool = app.pdb + def add(self, name: str) -> None: """Register a custom Sphinx event.""" if name in self.events: raise ExtensionError(__('Event %r already present') % name) self.events[name] = '' + @property + def app(self) -> Sphinx: + _deprecation_warning(__name__, 'EventManager.app', remove=(10, 0)) + return self._app + # ---- Core events ------------------------------------------------------- @overload @@ -401,15 +410,14 @@ def emit( listeners = sorted(self.listeners[name], key=attrgetter('priority')) for listener in listeners: try: - results.append(listener.handler(self.app, *args)) + results.append(listener.handler(self._app, *args)) except allowed_exceptions: # pass through the errors specified as *allowed_exceptions* raise except SphinxError: raise except Exception as exc: - if self.app.pdb: - # Just pass through the error, so that it can be debugged. + if self._reraise_errors: raise modname = safe_getattr(listener.handler, '__module__', None) raise ExtensionError( diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py index c6620078e36..e76c9e98331 100644 --- a/sphinx/transforms/__init__.py +++ b/sphinx/transforms/__init__.py @@ -15,6 +15,7 @@ from docutils.utils.smartquotes import smartchars from sphinx import addnodes +from sphinx.deprecation import _deprecation_warning from sphinx.locale import _, __ from sphinx.util import logging from sphinx.util.docutils import new_document @@ -62,6 +63,8 @@ class SphinxTransform(Transform): @property def app(self) -> Sphinx: """Reference to the :class:`.Sphinx` object.""" + cls_name = self.__class__.__qualname__ + _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0)) return self.env.app @property diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py index fab8acc3b90..d5392936334 100644 --- a/sphinx/util/logging.py +++ b/sphinx/util/logging.py @@ -430,7 +430,7 @@ class WarningSuppressor(logging.Filter): """Filter logs by `suppress_warnings`.""" def __init__(self, app: Sphinx) -> None: - self.app = app + self._app = app super().__init__() def filter(self, record: logging.LogRecord) -> bool: @@ -438,7 +438,7 @@ def filter(self, record: logging.LogRecord) -> bool: subtype = getattr(record, 'subtype', '') try: - suppress_warnings = self.app.config.suppress_warnings + suppress_warnings = self._app.config.suppress_warnings except AttributeError: # config is not initialized yet (ex. in conf.py) suppress_warnings = () @@ -446,7 +446,7 @@ def filter(self, record: logging.LogRecord) -> bool: if is_suppressed_warning(type, subtype, suppress_warnings): return False else: - self.app._warncount += 1 + self._app._warncount += 1 return True @@ -496,7 +496,7 @@ class SphinxLogRecordTranslator(logging.Filter): LogRecordClass: type[logging.LogRecord] def __init__(self, app: Sphinx) -> None: - self.app = app + self._app = app super().__init__() def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[override] @@ -509,15 +509,15 @@ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[overri docname, lineno = location if docname: if lineno: - record.location = f'{self.app.env.doc2path(docname)}:{lineno}' + record.location = f'{self._app.env.doc2path(docname)}:{lineno}' else: - record.location = f'{self.app.env.doc2path(docname)}' + record.location = f'{self._app.env.doc2path(docname)}' else: record.location = None elif isinstance(location, nodes.Node): record.location = get_node_location(location) elif location and ':' not in location: - record.location = f'{self.app.env.doc2path(location)}' + record.location = f'{self._app.env.doc2path(location)}' return True @@ -537,7 +537,7 @@ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[overri ret = super().filter(record) try: - show_warning_types = self.app.config.show_warning_types + show_warning_types = self._app.config.show_warning_types except AttributeError: # config is not initialized yet (ex. in conf.py) show_warning_types = False @@ -602,10 +602,10 @@ class LastMessagesWriter: """Stream writer storing last 10 messages in memory to save trackback""" def __init__(self, app: Sphinx, stream: IO[str]) -> None: - self.app = app + self._app = app def write(self, data: str) -> None: - self.app.messagelog.append(data) + self._app.messagelog.append(data) def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None: diff --git a/tests/test_events.py b/tests/test_events.py index 412116c9f4b..50b7bb5fd76 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -18,7 +18,7 @@ def test_event_priority() -> None: result = [] - app = object() # pass a dummy object as an app + app = SimpleNamespace(pdb=False) # pass a dummy object as an app events = EventManager(app) # type: ignore[arg-type] events.connect('builder-inited', lambda app: result.append(1), priority=500) events.connect('builder-inited', lambda app: result.append(2), priority=500) diff --git a/tests/test_versioning.py b/tests/test_versioning.py index 58e3b224c58..7b27106b98e 100644 --- a/tests/test_versioning.py +++ b/tests/test_versioning.py @@ -20,7 +20,7 @@ def _setup_module(rootdir, sphinx_test_tempdir): if not srcdir.exists(): shutil.copytree(rootdir / 'test-versioning', srcdir) app = SphinxTestApp(srcdir=srcdir) - app.builder.env.app = app + app.builder.env._app = app app.connect('doctree-resolved', on_doctree_resolved) app.build() original = doctrees['original'] From 8f18b573d6cb6bbd1f39970a112b9d4c2ece292e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:29:41 +0200 Subject: [PATCH 104/435] Close #13597 (LaTeX table in merged cell of parent table) (#13629) --- CHANGES.rst | 3 +++ sphinx/writers/latex.py | 12 ++++++------ tests/roots/test-root/markup.txt | 6 ++++++ 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 71ffa3c1b0d..9bd8abece0e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -29,6 +29,9 @@ Features added Patch by Jean-François B. * #13535: html search: Update to the latest version of Snowball (v3.0.1). Patch by Adam Turner. +* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up + and PDF cannot be built. + Patch by Jean-François B. * #13704: autodoc: Detect :py:func:`typing_extensions.overload ` and :py:func:`~typing.final` decorators. Patch by Spencer Brown. diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 823db1d875b..0aa550a3b7e 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -134,6 +134,7 @@ def __init__(self, node: Element) -> None: self.has_problematic = False self.has_oldproblematic = False self.has_verbatim = False + self.entry_needs_linetrimming = 0 self.caption: list[str] = [] self.stubs: list[int] = [] @@ -327,7 +328,6 @@ def __init__( self.in_footnote = 0 self.in_caption = 0 self.in_term = 0 - self.needs_linetrimming = 0 self.in_minipage = 0 # only used by figure inside an admonition self.no_latex_floats = 0 @@ -1331,7 +1331,7 @@ def visit_entry(self, node: Element) -> None: r'\par' + CR + r'\vskip-\baselineskip' r'\vbox{\hbox{\strut}}\end{varwidth}%' + CR + context ) - self.needs_linetrimming = 1 + self.table.entry_needs_linetrimming = 1 if len(list(node.findall(nodes.paragraph))) >= 2: self.table.has_oldproblematic = True if ( @@ -1346,13 +1346,14 @@ def visit_entry(self, node: Element) -> None: pass else: self.body.append(r'\sphinxstyletheadfamily ') - if self.needs_linetrimming: + if self.table.entry_needs_linetrimming: self.pushbody([]) self.context.append(context) def depart_entry(self, node: Element) -> None: - if self.needs_linetrimming: - self.needs_linetrimming = 0 + assert self.table is not None + if self.table.entry_needs_linetrimming: + self.table.entry_needs_linetrimming = 0 body = self.popbody() # Remove empty lines from top of merged cell @@ -1362,7 +1363,6 @@ def depart_entry(self, node: Element) -> None: self.body.append(self.context.pop()) - assert self.table is not None cell = self.table.cell() assert cell is not None self.table.col += cell.width diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt index 2e45ba33680..a9d9132ed98 100644 --- a/tests/roots/test-root/markup.txt +++ b/tests/roots/test-root/markup.txt @@ -223,6 +223,12 @@ Tables with multirow and multicol: | | +----+ + +---+---+ + | +---+ | + | | h | | + | +---+ | + +---+---+ + .. list-table:: :header-rows: 0 From 21c8513e49d4827aef5602964c3d892bbdc06d0d Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:12:47 +0100 Subject: [PATCH 105/435] Avoid ``self.app`` in transforms (#13628) --- sphinx/application.py | 2 + sphinx/builders/linkcheck.py | 4 +- sphinx/environment/__init__.py | 3 + sphinx/ext/extlinks.py | 2 +- sphinx/ext/viewcode.py | 12 ++-- sphinx/io.py | 35 +--------- sphinx/transforms/__init__.py | 7 +- sphinx/transforms/i18n.py | 66 +++++++++++++++---- sphinx/transforms/post_transforms/__init__.py | 17 ++--- sphinx/transforms/post_transforms/images.py | 18 ++--- .../test_transforms_post_transforms_images.py | 2 +- 11 files changed, 91 insertions(+), 77 deletions(-) diff --git a/sphinx/application.py b/sphinx/application.py index fe0e8bdf195..d5192eef0b6 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -399,6 +399,8 @@ def _post_init_env(self) -> None: if self._fresh_env_used: self.env.find_files(self.config, self.builder) + self.env._builder_cls = self.builder.__class__ + def preload_builder(self, name: str) -> None: self.registry.preload_builder(self, name) diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py index ff6878f2acb..de102873036 100644 --- a/sphinx/builders/linkcheck.py +++ b/sphinx/builders/linkcheck.py @@ -259,11 +259,11 @@ def _add_uri(self, uri: str, node: nodes.Element) -> None: :param uri: URI to add :param node: A node class where the URI was found """ - builder = cast('CheckExternalLinksBuilder', self.app.builder) + builder = cast('CheckExternalLinksBuilder', self.env._app.builder) hyperlinks = builder.hyperlinks docname = self.env.docname - if newuri := self.app.events.emit_firstresult('linkcheck-process-uri', uri): + if newuri := self.env.events.emit_firstresult('linkcheck-process-uri', uri): uri = newuri try: diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index 7ad7298c22b..846a1bbde98 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -107,6 +107,9 @@ class BuildEnvironment: srcdir = _StrPathProperty() doctreedir = _StrPathProperty() + # builder is created after the environment. + _builder_cls: type[Builder] + def __init__(self, app: Sphinx) -> None: self._app: Sphinx = app self.doctreedir = app.doctreedir diff --git a/sphinx/ext/extlinks.py b/sphinx/ext/extlinks.py index a5e213ac9f9..82a323bd4b8 100644 --- a/sphinx/ext/extlinks.py +++ b/sphinx/ext/extlinks.py @@ -68,7 +68,7 @@ def check_uri(self, refnode: nodes.reference) -> None: uri = refnode['refuri'] title = refnode.astext() - for alias, (base_uri, _caption) in self.app.config.extlinks.items(): + for alias, (base_uri, _caption) in self.config.extlinks.items(): uri_pattern = re.compile(re.escape(base_uri).replace('%s', '(?P.+)')) match = uri_pattern.match(uri) diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py index 39e4cf420b7..195ed95f961 100644 --- a/sphinx/ext/viewcode.py +++ b/sphinx/ext/viewcode.py @@ -103,11 +103,11 @@ def _get_full_modname(modname: str, attribute: str) -> str | None: return None -def is_supported_builder(builder: Builder) -> bool: +def is_supported_builder(builder: type[Builder], viewcode_enable_epub: bool) -> bool: return ( builder.format == 'html' and builder.name != 'singlehtml' - and (not builder.name.startswith('epub') or builder.config.viewcode_enable_epub) + and (not builder.name.startswith('epub') or viewcode_enable_epub) ) @@ -220,7 +220,9 @@ class ViewcodeAnchorTransform(SphinxPostTransform): default_priority = 100 def run(self, **kwargs: Any) -> None: - if is_supported_builder(self.app.builder): + if is_supported_builder( + self.env._builder_cls, self.config.viewcode_enable_epub + ): self.convert_viewcode_anchors() else: self.remove_viewcode_anchors() @@ -229,7 +231,7 @@ def convert_viewcode_anchors(self) -> None: for node in self.document.findall(viewcode_anchor): anchor = nodes.inline('', _('[source]'), classes=['viewcode-link']) refnode = make_refnode( - self.app.builder, + self.env._app.builder, node['refdoc'], node['reftarget'], node['refid'], @@ -281,7 +283,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: env = app.env if not hasattr(env, '_viewcode_modules'): return - if not is_supported_builder(app.builder): + if not is_supported_builder(env._builder_cls, env.config.viewcode_enable_epub): return highlighter = app.builder.highlighter # type: ignore[attr-defined] urito = app.builder.get_relative_uri diff --git a/sphinx/io.py b/sphinx/io.py index 009cd38bf68..26c8b756fab 100644 --- a/sphinx/io.py +++ b/sphinx/io.py @@ -10,16 +10,9 @@ from docutils.transforms.references import DanglingReferences from docutils.writers import UnfilteredWriter -from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransformer -from sphinx.transforms.i18n import ( - Locale, - PreserveTranslatableMessages, - RemoveTranslatableInline, -) -from sphinx.transforms.references import SphinxDomains +from sphinx.transforms import SphinxTransformer from sphinx.util import logging from sphinx.util.docutils import LoggingReporter -from sphinx.versioning import UIDTransform if TYPE_CHECKING: from typing import Any @@ -113,32 +106,6 @@ def read_source(self, env: BuildEnvironment) -> str: return arg[0] -class SphinxI18nReader(SphinxBaseReader): - """A document reader for i18n. - - This returns the source line number of original text as current source line number - to let users know where the error happened. - Because the translated texts are partial and they don't have correct line numbers. - """ - - def setup(self, app: Sphinx) -> None: - super().setup(app) - - self.transforms = self.transforms + app.registry.get_transforms() - unused = [ - PreserveTranslatableMessages, - Locale, - RemoveTranslatableInline, - AutoIndexUpgrader, - SphinxDomains, - DoctreeReadEvent, - UIDTransform, - ] - for transform in unused: - if transform in self.transforms: - self.transforms.remove(transform) - - class SphinxDummyWriter(UnfilteredWriter): # type: ignore[type-arg] """Dummy writer module used for generating doctree.""" diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py index e76c9e98331..6857e05fe58 100644 --- a/sphinx/transforms/__init__.py +++ b/sphinx/transforms/__init__.py @@ -63,8 +63,9 @@ class SphinxTransform(Transform): @property def app(self) -> Sphinx: """Reference to the :class:`.Sphinx` object.""" + cls_module = self.__class__.__module__ cls_name = self.__class__.__qualname__ - _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0)) + _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0)) return self.env.app @property @@ -382,7 +383,7 @@ def is_available(self) -> bool: if self.config.smartquotes is False: # disabled by confval smartquotes return False - if self.app.builder.name in builders: + if self.env._builder_cls.name in builders: # disabled by confval smartquotes_excludes['builders'] return False if self.config.language in languages: @@ -412,7 +413,7 @@ class DoctreeReadEvent(SphinxTransform): default_priority = 880 def apply(self, **kwargs: Any) -> None: - self.app.events.emit('doctree-read', self.document) + self.env.events.emit('doctree-read', self.document) class GlossarySorter(SphinxTransform): diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py index 815ca606bce..bfacfcf1a96 100644 --- a/sphinx/transforms/i18n.py +++ b/sphinx/transforms/i18n.py @@ -5,6 +5,7 @@ import contextlib from re import DOTALL, match from textwrap import indent +from types import SimpleNamespace from typing import TYPE_CHECKING, Any, TypeVar from docutils import nodes @@ -13,9 +14,11 @@ from sphinx import addnodes from sphinx.domains.std import make_glossary_term, split_term_classifiers from sphinx.errors import ConfigError +from sphinx.io import SphinxBaseReader from sphinx.locale import __ from sphinx.locale import init as init_locale -from sphinx.transforms import SphinxTransform +from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransform +from sphinx.transforms.references import SphinxDomains from sphinx.util import get_filetype, logging from sphinx.util.i18n import docname_to_domain from sphinx.util.index_entries import split_index_msg @@ -26,12 +29,15 @@ extract_messages, traverse_translatable_index, ) +from sphinx.versioning import UIDTransform if TYPE_CHECKING: from collections.abc import Sequence from sphinx.application import Sphinx from sphinx.config import Config + from sphinx.environment import BuildEnvironment + from sphinx.registry import SphinxComponentRegistry from sphinx.util.typing import ExtensionMetadata @@ -47,17 +53,46 @@ N = TypeVar('N', bound=nodes.Node) +class _SphinxI18nReader(SphinxBaseReader): + """A document reader for internationalisation (i18n). + + This returns the source line number of the original text + as the current source line number to let users know where + the error happened, because the translated texts are + partial and they don't have correct line numbers. + """ + + def __init__( + self, *args: Any, registry: SphinxComponentRegistry, **kwargs: Any + ) -> None: + super().__init__(*args, **kwargs) + unused = frozenset({ + PreserveTranslatableMessages, + Locale, + RemoveTranslatableInline, + AutoIndexUpgrader, + SphinxDomains, + DoctreeReadEvent, + UIDTransform, + }) + transforms = self.transforms + registry.get_transforms() + self.transforms = [ + transform for transform in transforms if transform not in unused + ] + + def publish_msgstr( - app: Sphinx, source: str, source_path: str, source_line: int, config: Config, settings: Any, + *, + env: BuildEnvironment, + registry: SphinxComponentRegistry, ) -> nodes.Element: """Publish msgstr (single line) into docutils document - :param sphinx.application.Sphinx app: sphinx application :param str source: source text :param str source_path: source path for warning indication :param source_line: source line for warning indication @@ -65,18 +100,18 @@ def publish_msgstr( :param docutils.frontend.Values settings: docutils settings :return: document :rtype: docutils.nodes.document + :param sphinx.environment.BuildEnvironment env: sphinx environment + :param sphinx.registry.SphinxComponentRegistry registry: sphinx registry """ try: # clear rst_prolog temporarily rst_prolog = config.rst_prolog config.rst_prolog = None - from sphinx.io import SphinxI18nReader - - reader = SphinxI18nReader() - reader.setup(app) + reader = _SphinxI18nReader(registry=registry) + app = SimpleNamespace(config=config, env=env, registry=registry) filetype = get_filetype(config.source_suffix, source_path) - parser = app.registry.create_source_parser(app, filetype) + parser = registry.create_source_parser(app, filetype) # type: ignore[arg-type] doc = reader.read( source=StringInput( source=source, source_path=f'{source_path}:{source_line}:' @@ -436,12 +471,13 @@ def apply(self, **kwargs: Any) -> None: msgstr = '::\n\n' + indent(msgstr, ' ' * 3) patch = publish_msgstr( - self.app, msgstr, source, node.line, # type: ignore[arg-type] self.config, settings, + env=self.env, + registry=self.env._registry, ) # FIXME: no warnings about inconsistent references in this part # XXX doctest and other block markup @@ -456,12 +492,13 @@ def apply(self, **kwargs: Any) -> None: for _id in node['ids']: term, first_classifier = split_term_classifiers(msgstr) patch = publish_msgstr( - self.app, term or '', source, node.line, # type: ignore[arg-type] self.config, settings, + env=self.env, + registry=self.env._registry, ) updater.patch = make_glossary_term( self.env, @@ -533,12 +570,13 @@ def apply(self, **kwargs: Any) -> None: msgstr = msgstr + '\n' + '=' * len(msgstr) * 2 patch = publish_msgstr( - self.app, msgstr, source, node.line, # type: ignore[arg-type] self.config, settings, + env=self.env, + registry=self.env._registry, ) # Structural Subelements phase2 if isinstance(node, nodes.title): @@ -612,7 +650,7 @@ class TranslationProgressTotaliser(SphinxTransform): def apply(self, **kwargs: Any) -> None: from sphinx.builders.gettext import MessageCatalogBuilder - if isinstance(self.app.builder, MessageCatalogBuilder): + if issubclass(self.env._builder_cls, MessageCatalogBuilder): return total = translated = 0 @@ -635,7 +673,7 @@ class AddTranslationClasses(SphinxTransform): def apply(self, **kwargs: Any) -> None: from sphinx.builders.gettext import MessageCatalogBuilder - if isinstance(self.app.builder, MessageCatalogBuilder): + if issubclass(self.env._builder_cls, MessageCatalogBuilder): return if not self.config.translation_progress_classes: @@ -673,7 +711,7 @@ class RemoveTranslatableInline(SphinxTransform): def apply(self, **kwargs: Any) -> None: from sphinx.builders.gettext import MessageCatalogBuilder - if isinstance(self.app.builder, MessageCatalogBuilder): + if issubclass(self.env._builder_cls, MessageCatalogBuilder): return matcher = NodeMatcher(nodes.inline, translatable=Any) diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py index 1a40c3d791a..ae70ce195d9 100644 --- a/sphinx/transforms/post_transforms/__init__.py +++ b/sphinx/transforms/post_transforms/__init__.py @@ -47,9 +47,9 @@ def apply(self, **kwargs: Any) -> None: def is_supported(self) -> bool: """Check this transform working for current builder.""" - if self.builders and self.app.builder.name not in self.builders: + if self.builders and self.env._builder_cls.name not in self.builders: return False - return not self.formats or self.app.builder.format in self.formats + return not self.formats or self.env._builder_cls.format in self.formats def run(self, **kwargs: Any) -> None: """Main method of post transforms. @@ -125,7 +125,7 @@ def _resolve_pending_xref( try: # no new node found? try the missing-reference event - new_node = self.app.events.emit_firstresult( + new_node = self.env.events.emit_firstresult( 'missing-reference', self.env, node, @@ -169,10 +169,11 @@ def _resolve_pending_xref_in_domain( typ: str, target: str, ) -> nodes.reference | None: + builder = self.env._app.builder # let the domain try to resolve the reference if domain is not None: return domain.resolve_xref( - self.env, ref_doc, self.app.builder, typ, target, node, contnode + self.env, ref_doc, builder, typ, target, node, contnode ) # really hardwired reference types @@ -193,7 +194,7 @@ def _resolve_pending_any_xref( ) -> nodes.reference | None: """Resolve reference generated by the "any" role.""" env = self.env - builder = self.app.builder + builder = self.env._app.builder domains = env.domains results: list[tuple[str, nodes.reference]] = [] @@ -282,7 +283,7 @@ def warn_missing_reference( if not warn: return - if self.app.events.emit_firstresult('warn-missing-reference', domain, node): + if self.env.events.emit_firstresult('warn-missing-reference', domain, node): return elif domain and typ in domain.dangling_warnings: msg = domain.dangling_warnings[typ] % {'target': target} @@ -328,7 +329,7 @@ def run(self, **kwargs: Any) -> None: # result in a "Losing ids" exception if there is a target node before # the only node, so we make sure docutils can transfer the id to # something, even if it's just a comment and will lose the id anyway... - process_only_nodes(self.document, self.app.tags) + process_only_nodes(self.document, self.env._tags) class SigElementFallbackTransform(SphinxPostTransform): @@ -343,7 +344,7 @@ def has_visitor( return hasattr(translator, 'visit_%s' % node.__name__) try: - translator = self.app.builder.get_translator_class() + translator = self.env._registry.get_translator_class(self.env._builder_cls) except AttributeError: # do nothing if no translator class is specified (e.g., on a dummy builder) return diff --git a/sphinx/transforms/post_transforms/images.py b/sphinx/transforms/post_transforms/images.py index d4c6262e529..97b585d9cf6 100644 --- a/sphinx/transforms/post_transforms/images.py +++ b/sphinx/transforms/post_transforms/images.py @@ -45,16 +45,16 @@ def handle(self, node: nodes.image) -> None: @property def imagedir(self) -> _StrPath: - return self.app.doctreedir / 'images' + return self.env.doctreedir / 'images' class ImageDownloader(BaseImageConverter): default_priority = 100 def match(self, node: nodes.image) -> bool: - if not self.app.builder.supported_image_types: + if not self.env._builder_cls.supported_image_types: return False - if self.app.builder.supported_remote_images: + if self.env._builder_cls.supported_remote_images: return False return '://' in node['uri'] @@ -130,7 +130,7 @@ class DataURIExtractor(BaseImageConverter): default_priority = 150 def match(self, node: nodes.image) -> bool: - if self.app.builder.supported_data_uri_images is True: + if self.env._builder_cls.supported_data_uri_images is True: return False # do not transform the image; data URIs are valid in the build output return node['uri'].startswith('data:') @@ -208,12 +208,12 @@ class ImageConverter(BaseImageConverter): conversion_rules: list[tuple[str, str]] = [] def match(self, node: nodes.image) -> bool: - if not self.app.builder.supported_image_types: + if not self.env._builder_cls.supported_image_types: return False if '?' in node['candidates']: return False node_mime_types = set(self.guess_mimetypes(node)) - supported_image_types = set(self.app.builder.supported_image_types) + supported_image_types = set(self.env._builder_cls.supported_image_types) if node_mime_types & supported_image_types: # builder supports the image; no need to convert return False @@ -233,7 +233,7 @@ def match(self, node: nodes.image) -> bool: def get_conversion_rule(self, node: nodes.image) -> tuple[str, str]: for candidate in self.guess_mimetypes(node): - for supported in self.app.builder.supported_image_types: + for supported in self.env._builder_cls.supported_image_types: rule = (candidate, supported) if rule in self.conversion_rules: return rule @@ -250,7 +250,7 @@ def guess_mimetypes(self, node: nodes.image) -> list[str]: if '?' in node['candidates']: return [] elif '*' in node['candidates']: - path = self.app.srcdir / node['uri'] + path = self.env.srcdir / node['uri'] guessed = guess_mimetype(path) return [guessed] if guessed is not None else [] else: @@ -269,7 +269,7 @@ def handle(self, node: nodes.image) -> None: ensuredir(self.imagedir) destpath = self.imagedir / filename - abs_srcpath = self.app.srcdir / srcpath + abs_srcpath = self.env.srcdir / srcpath if self.convert(abs_srcpath, destpath): if '*' in node['candidates']: node['candidates']['*'] = str(destpath) diff --git a/tests/test_transforms/test_transforms_post_transforms_images.py b/tests/test_transforms/test_transforms_post_transforms_images.py index 800fb3b986b..c6c80f59c62 100644 --- a/tests/test_transforms/test_transforms_post_transforms_images.py +++ b/tests/test_transforms/test_transforms_post_transforms_images.py @@ -35,7 +35,7 @@ def test_guess_mimetype_webp(tmp_path): document = new_document('') - document.settings.env = SimpleNamespace(app=SimpleNamespace(srcdir=tmp_path)) + document.settings.env = SimpleNamespace(srcdir=tmp_path) converter = ImageConverter(document) file_webp = 'webp-image.webp' From 0521d835149063085aa5579eff746ffac2ae1766 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:16:14 +0100 Subject: [PATCH 106/435] Bump Ruff to 0.11.13 (#13622) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 17cb0463ae5..17ad29b7626 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,7 +92,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.12", + "ruff==0.11.13", "mypy==1.15.0", "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", @@ -135,7 +135,7 @@ docs = [ "sphinxcontrib-websupport", ] lint = [ - "ruff==0.11.12", + "ruff==0.11.13", "sphinx-lint>=0.9", ] package = [ From 3601161f0e95f0bb7e22682cdd5fe93f4ed11ed8 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:36:42 +0100 Subject: [PATCH 107/435] Bump types-docutils to 0.21.0.20250525 (#13630) Co-authored-by: Adam Dangoor --- pyproject.toml | 4 +- sphinx/builders/_epub_base.py | 4 +- sphinx/util/rst.py | 4 +- sphinx/writers/html5.py | 42 +++++++------- sphinx/writers/manpage.py | 58 +++++++++---------- .../test_util_docutils_sphinx_directive.py | 2 +- 6 files changed, 57 insertions(+), 57 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 17ad29b7626..fd0cdce21bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ lint = [ "sphinx-lint>=0.9", "types-colorama==0.4.15.20240311", "types-defusedxml==0.7.0.20250516", - "types-docutils==0.21.0.20250514", + "types-docutils==0.21.0.20250525", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250516", "types-requests==2.32.0.20250602", # align with requests @@ -166,7 +166,7 @@ type-stubs = [ # align with versions used elsewhere "types-colorama==0.4.15.20240311", "types-defusedxml==0.7.0.20250516", - "types-docutils==0.21.0.20250514", + "types-docutils==0.21.0.20250525", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250516", "types-requests==2.32.0.20250602", diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py index 10ae0820c5b..28a7553da51 100644 --- a/sphinx/builders/_epub_base.py +++ b/sphinx/builders/_epub_base.py @@ -114,8 +114,8 @@ class NavPoint(NamedTuple): def sphinx_smarty_pants(t: str, language: str = 'en') -> str: t = t.replace('"', '"') - t = smartquotes.educateDashesOldSchool(t) # type: ignore[no-untyped-call] - t = smartquotes.educateQuotes(t, language) # type: ignore[no-untyped-call] + t = smartquotes.educateDashesOldSchool(t) + t = smartquotes.educateQuotes(t, language) t = t.replace('"', '"') return t diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py index c848a9b3657..7e6853a81ef 100644 --- a/sphinx/util/rst.py +++ b/sphinx/util/rst.py @@ -9,7 +9,7 @@ from unicodedata import east_asian_width from docutils.parsers.rst import roles -from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined] +from docutils.parsers.rst.languages import en as english from docutils.parsers.rst.states import Body from docutils.utils import Reporter from jinja2 import pass_environment @@ -66,7 +66,7 @@ def heading(env: Environment, text: str, level: int = 1) -> str: def default_role(docname: str, name: str) -> Iterator[None]: if name: dummy_reporter = Reporter('', 4, 4) - role_fn, _ = roles.role(name, english, 0, dummy_reporter) + role_fn, _ = roles.role(name, english, 0, dummy_reporter) # type: ignore[arg-type] if role_fn: docutils.register_role('', role_fn) # type: ignore[arg-type] else: diff --git a/sphinx/writers/html5.py b/sphinx/writers/html5.py index b39b463d6db..39d7ecea680 100644 --- a/sphinx/writers/html5.py +++ b/sphinx/writers/html5.py @@ -357,7 +357,7 @@ def visit_reference(self, node: Element) -> None: def visit_number_reference(self, node: Element) -> None: self.visit_reference(node) - def depart_number_reference(self, node: Element) -> None: + def depart_number_reference(self, node: nodes.reference) -> None: self.depart_reference(node) # overwritten -- we don't want source comments to show up in the HTML @@ -451,7 +451,7 @@ def add_permalink_ref(self, node: Element, title: str) -> None: ) # overwritten - def visit_bullet_list(self, node: Element) -> None: + def visit_bullet_list(self, node: nodes.bullet_list) -> None: if len(node) == 1 and isinstance(node[0], addnodes.toctree): # avoid emitting empty
    raise nodes.SkipNode @@ -498,7 +498,7 @@ def depart_term(self, node: Element) -> None: self.body.append('') # overwritten - def visit_title(self, node: Element) -> None: + def visit_title(self, node: nodes.title) -> None: if ( isinstance(node.parent, addnodes.compact_paragraph) and node.parent.get('toctree') @@ -535,7 +535,7 @@ def visit_title(self, node: Element) -> None: self.body.pop() self.context[-1] = '

    \n' - def depart_title(self, node: Element) -> None: + def depart_title(self, node: nodes.title) -> None: close_tag = self.context[-1] if ( self.config.html_permalinks @@ -586,7 +586,7 @@ def depart_rubric(self, node: nodes.rubric) -> None: super().depart_rubric(node) # overwritten - def visit_literal_block(self, node: Element) -> None: + def visit_literal_block(self, node: nodes.literal_block) -> None: if node.rawsource != node.astext(): # most probably a parsed-literal block -- don't highlight return super().visit_literal_block(node) @@ -614,7 +614,7 @@ def visit_literal_block(self, node: Element) -> None: self.body.append(starttag + highlighted + '\n') raise nodes.SkipNode - def visit_caption(self, node: Element) -> None: + def visit_caption(self, node: nodes.caption) -> None: if ( isinstance(node.parent, nodes.container) and node.parent.get('literal_block') @@ -625,7 +625,7 @@ def visit_caption(self, node: Element) -> None: self.add_fignumber(node.parent) self.body.append(self.starttag(node, 'span', '', CLASS='caption-text')) - def depart_caption(self, node: Element) -> None: + def depart_caption(self, node: nodes.caption) -> None: self.body.append('') # append permalink if available @@ -648,7 +648,7 @@ def depart_caption(self, node: Element) -> None: super().depart_caption(node) def visit_doctest_block(self, node: Element) -> None: - self.visit_literal_block(node) + self.visit_literal_block(node) # type: ignore[arg-type] # overwritten to add the
    (for XHTML compliance) def visit_block_quote(self, node: Element) -> None: @@ -740,14 +740,14 @@ def depart_download_reference(self, node: Element) -> None: self.body.append(self.context.pop()) # overwritten - def visit_figure(self, node: Element) -> None: + def visit_figure(self, node: nodes.figure) -> None: # set align=default if align not specified to give a default style node.setdefault('align', 'default') return super().visit_figure(node) # overwritten - def visit_image(self, node: Element) -> None: + def visit_image(self, node: nodes.image) -> None: olduri = node['uri'] # rewrite the URI if the environment knows about it if olduri in self.builder.images: @@ -775,7 +775,7 @@ def visit_image(self, node: Element) -> None: super().visit_image(node) # overwritten - def depart_image(self, node: Element) -> None: + def depart_image(self, node: nodes.image) -> None: if node['uri'].lower().endswith(('svg', 'svgz')): pass else: @@ -892,16 +892,16 @@ def visit_tip(self, node: Element) -> None: def depart_tip(self, node: Element) -> None: self.depart_admonition(node) - def visit_literal_emphasis(self, node: Element) -> None: + def visit_literal_emphasis(self, node: nodes.emphasis) -> None: return self.visit_emphasis(node) - def depart_literal_emphasis(self, node: Element) -> None: + def depart_literal_emphasis(self, node: nodes.emphasis) -> None: return self.depart_emphasis(node) - def visit_literal_strong(self, node: Element) -> None: + def visit_literal_strong(self, node: nodes.strong) -> None: return self.visit_strong(node) - def depart_literal_strong(self, node: Element) -> None: + def depart_literal_strong(self, node: nodes.strong) -> None: return self.depart_strong(node) def visit_abbreviation(self, node: Element) -> None: @@ -913,15 +913,15 @@ def visit_abbreviation(self, node: Element) -> None: def depart_abbreviation(self, node: Element) -> None: self.body.append('') - def visit_manpage(self, node: Element) -> None: + def visit_manpage(self, node: nodes.emphasis) -> None: self.visit_literal_emphasis(node) - def depart_manpage(self, node: Element) -> None: + def depart_manpage(self, node: nodes.emphasis) -> None: self.depart_literal_emphasis(node) # overwritten to add even/odd classes - def visit_table(self, node: Element) -> None: + def visit_table(self, node: nodes.table) -> None: self._table_row_indices.append(0) atts = {} @@ -936,7 +936,7 @@ def visit_table(self, node: Element) -> None: tag = self.starttag(node, 'table', CLASS=' '.join(classes), **atts) self.body.append(tag) - def depart_table(self, node: Element) -> None: + def depart_table(self, node: nodes.table) -> None: self._table_row_indices.pop() super().depart_table(node) @@ -949,11 +949,11 @@ def visit_row(self, node: Element) -> None: self.body.append(self.starttag(node, 'tr', '')) node.column = 0 # type: ignore[attr-defined] - def visit_field_list(self, node: Element) -> None: + def visit_field_list(self, node: nodes.field_list) -> None: self._fieldlist_row_indices.append(0) return super().visit_field_list(node) - def depart_field_list(self, node: Element) -> None: + def depart_field_list(self, node: nodes.field_list) -> None: self._fieldlist_row_indices.pop() return super().depart_field_list(node) diff --git a/sphinx/writers/manpage.py b/sphinx/writers/manpage.py index 171761fa2b0..45ab340c4e3 100644 --- a/sphinx/writers/manpage.py +++ b/sphinx/writers/manpage.py @@ -71,7 +71,7 @@ def apply(self, **kwargs: Any) -> None: node.parent.remove(node) -class ManualPageTranslator(SphinxTranslator, BaseTranslator): # type: ignore[misc] +class ManualPageTranslator(SphinxTranslator, BaseTranslator): """Custom man page translator.""" _docinfo: dict[str, Any] = {} @@ -130,17 +130,17 @@ def depart_start_of_file(self, node: Element) -> None: # Top-level nodes for descriptions ################################## - def visit_desc(self, node: Element) -> None: + def visit_desc(self, node: nodes.definition_list) -> None: self.visit_definition_list(node) - def depart_desc(self, node: Element) -> None: + def depart_desc(self, node: nodes.definition_list) -> None: self.depart_definition_list(node) - def visit_desc_signature(self, node: Element) -> None: - self.visit_definition_list_item(node) + def visit_desc_signature(self, node: nodes.term) -> None: + self.visit_definition_list_item(node) # type: ignore[arg-type] self.visit_term(node) - def depart_desc_signature(self, node: Element) -> None: + def depart_desc_signature(self, node: nodes.term) -> None: self.depart_term(node) def visit_desc_signature_line(self, node: Element) -> None: @@ -149,10 +149,10 @@ def visit_desc_signature_line(self, node: Element) -> None: def depart_desc_signature_line(self, node: Element) -> None: self.body.append(' ') - def visit_desc_content(self, node: Element) -> None: + def visit_desc_content(self, node: nodes.definition) -> None: self.visit_definition(node) - def depart_desc_content(self, node: Element) -> None: + def depart_desc_content(self, node: nodes.definition) -> None: self.depart_definition(node) def visit_desc_inline(self, node: Element) -> None: @@ -231,25 +231,25 @@ def depart_desc_annotation(self, node: Element) -> None: ############################################## - def visit_versionmodified(self, node: Element) -> None: + def visit_versionmodified(self, node: nodes.paragraph) -> None: self.visit_paragraph(node) - def depart_versionmodified(self, node: Element) -> None: + def depart_versionmodified(self, node: nodes.paragraph) -> None: self.depart_paragraph(node) # overwritten -- don't make whole of term bold if it includes strong node - def visit_term(self, node: Element) -> None: + def visit_term(self, node: nodes.term) -> None: if any(node.findall(nodes.strong)): self.body.append('\n') else: super().visit_term(node) # overwritten -- we don't want source comments to show up - def visit_comment(self, node: Element) -> None: + def visit_comment(self, node: Element) -> None: # type: ignore[override] raise nodes.SkipNode # overwritten -- added ensure_eol() - def visit_footnote(self, node: Element) -> None: + def visit_footnote(self, node: nodes.footnote) -> None: self.ensure_eol() super().visit_footnote(node) @@ -264,10 +264,10 @@ def visit_rubric(self, node: Element) -> None: def depart_rubric(self, node: Element) -> None: self.body.append('\n') - def visit_seealso(self, node: Element) -> None: + def visit_seealso(self, node: nodes.admonition) -> None: self.visit_admonition(node, 'seealso') - def depart_seealso(self, node: Element) -> None: + def depart_seealso(self, node: nodes.admonition) -> None: self.depart_admonition(node) def visit_productionlist(self, node: Element) -> None: @@ -291,7 +291,7 @@ def visit_image(self, node: Element) -> None: raise nodes.SkipNode # overwritten -- don't visit inner marked up nodes - def visit_reference(self, node: Element) -> None: + def visit_reference(self, node: nodes.reference) -> None: uri = node.get('refuri', '') is_safe_to_click = uri.startswith(('mailto:', 'http:', 'https:', 'ftp:')) if is_safe_to_click: @@ -301,7 +301,7 @@ def visit_reference(self, node: Element) -> None: self.body.append(self.defs['reference'][0]) # avoid repeating escaping code... fine since # visit_Text calls astext() and only works on that afterwards - self.visit_Text(node) + self.visit_Text(node) # type: ignore[arg-type] self.body.append(self.defs['reference'][1]) if uri and not uri.startswith('#'): @@ -369,10 +369,10 @@ def visit_acks(self, node: Element) -> None: self.body.append('\n') raise nodes.SkipNode - def visit_hlist(self, node: Element) -> None: + def visit_hlist(self, node: nodes.bullet_list) -> None: self.visit_bullet_list(node) - def depart_hlist(self, node: Element) -> None: + def depart_hlist(self, node: nodes.bullet_list) -> None: self.depart_bullet_list(node) def visit_hlistcol(self, node: Element) -> None: @@ -381,16 +381,16 @@ def visit_hlistcol(self, node: Element) -> None: def depart_hlistcol(self, node: Element) -> None: pass - def visit_literal_emphasis(self, node: Element) -> None: + def visit_literal_emphasis(self, node: nodes.emphasis) -> None: return self.visit_emphasis(node) - def depart_literal_emphasis(self, node: Element) -> None: + def depart_literal_emphasis(self, node: nodes.emphasis) -> None: return self.depart_emphasis(node) - def visit_literal_strong(self, node: Element) -> None: + def visit_literal_strong(self, node: nodes.strong) -> None: return self.visit_strong(node) - def depart_literal_strong(self, node: Element) -> None: + def depart_literal_strong(self, node: nodes.strong) -> None: return self.depart_strong(node) def visit_abbreviation(self, node: Element) -> None: @@ -399,14 +399,14 @@ def visit_abbreviation(self, node: Element) -> None: def depart_abbreviation(self, node: Element) -> None: pass - def visit_manpage(self, node: Element) -> None: + def visit_manpage(self, node: nodes.strong) -> None: return self.visit_strong(node) - def depart_manpage(self, node: Element) -> None: + def depart_manpage(self, node: nodes.strong) -> None: return self.depart_strong(node) # overwritten: handle section titles better than in 0.6 release - def visit_caption(self, node: Element) -> None: + def visit_caption(self, node: nodes.caption) -> None: if ( isinstance(node.parent, nodes.container) and node.parent.get('literal_block') @@ -415,7 +415,7 @@ def visit_caption(self, node: Element) -> None: else: super().visit_caption(node) - def depart_caption(self, node: Element) -> None: + def depart_caption(self, node: nodes.caption) -> None: if ( isinstance(node.parent, nodes.container) and node.parent.get('literal_block') @@ -425,7 +425,7 @@ def depart_caption(self, node: Element) -> None: super().depart_caption(node) # overwritten: handle section titles better than in 0.6 release - def visit_title(self, node: Element) -> None: + def visit_title(self, node: nodes.title) -> None: if isinstance(node.parent, addnodes.seealso): self.body.append('.IP "') return None @@ -438,7 +438,7 @@ def visit_title(self, node: Element) -> None: raise nodes.SkipNode return super().visit_title(node) - def depart_title(self, node: Element) -> None: + def depart_title(self, node: nodes.title) -> None: if isinstance(node.parent, addnodes.seealso): self.body.append('"\n') return None diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py index 8c24a3c4a83..eb1e4aea16a 100644 --- a/tests/test_util/test_util_docutils_sphinx_directive.py +++ b/tests/test_util/test_util_docutils_sphinx_directive.py @@ -3,7 +3,7 @@ from types import SimpleNamespace from docutils import nodes -from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined] +from docutils.parsers.rst.languages import en as english from docutils.parsers.rst.states import ( Inliner, RSTState, From dc33f988f39e870d7195ab4714b5b55fc9ec2044 Mon Sep 17 00:00:00 2001 From: Marie Sacksick <79304610+MarieSacksick@users.noreply.github.com> Date: Sat, 7 Jun 2025 17:38:30 +0200 Subject: [PATCH 108/435] autosummary: add 'vs.' to the well-known abbreviations (#13591) --- sphinx/ext/autosummary/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py index fe7092c0a74..62c9427ecdb 100644 --- a/sphinx/ext/autosummary/__init__.py +++ b/sphinx/ext/autosummary/__init__.py @@ -107,7 +107,7 @@ periods_re = re.compile(r'\.(?:\s+)') literal_re = re.compile(r'::\s*$') -WELL_KNOWN_ABBREVIATIONS = ('et al.', 'e.g.', 'i.e.') +WELL_KNOWN_ABBREVIATIONS = ('et al.', 'e.g.', 'i.e.', 'vs.') # -- autosummary_toc node ------------------------------------------------------ From 74627b25b01d7adc5e90b3838e637336f31586a3 Mon Sep 17 00:00:00 2001 From: Tim Hoffmann <2836374+timhoffm@users.noreply.github.com> Date: Sat, 7 Jun 2025 18:07:52 +0200 Subject: [PATCH 109/435] Use anonymous references for links with embedded URLs (#13615) Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> --- doc/usage/restructuredtext/basics.rst | 36 ++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/doc/usage/restructuredtext/basics.rst b/doc/usage/restructuredtext/basics.rst index 5d60ea81de4..ea61b80fc85 100644 --- a/doc/usage/restructuredtext/basics.rst +++ b/doc/usage/restructuredtext/basics.rst @@ -208,11 +208,39 @@ Hyperlinks External links ~~~~~~~~~~~~~~ -Use ```Link text `_`` for inline web links. If the -link text should be the web address, you don't need special markup at all, the -parser finds links and mail addresses in ordinary text. +URLs and email addresses in text are automatically linked and do not need +explicit markup at all. +For example, https://domain.invalid/ is written with no special markup +in the source of this document, and is recognised as an external hyperlink. -.. important:: There must be a space between the link text and the opening \< for the URL. +To create text with a link, the best approach is generally to put the URL +below the paragraph as follows (:duref:`ref `):: + + This is a paragraph that contains `a link`_. + + .. _a link: https://domain.invalid/ + +This keeps the paragraph more readable in source code. + +Alternatively, you can embed the URL within the prose for an 'inline link'. +This can lead to longer lines, but has the benefit of keeping the link text +and the URL pointed to in the same place. +This uses the following syntax: ```Link text `__`` +(:duref:`ref `). + +.. important:: + + There must be a space between the link text + and the opening angle bracket ('``<``') for the URL. + +.. tip:: + + Use two trailing underscores when embedding the URL. + Technically, a single underscore works as well, + but that would create a named reference instead of an anonymous one. + Named references typically do not have a benefit when the URL is embedded. + Moreover, they have the disadvantage that you must make sure that you + do not use the same "Link text" for another link in your document. You can also separate the link and the target definition (:duref:`ref `), like this:: From 25ab3d73901c75231880733ee2a3acf966ddcc11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melissa=20Weber=20Mendon=C3=A7a?= Date: Sat, 7 Jun 2025 13:24:56 -0300 Subject: [PATCH 110/435] Document the ``autolink`` role for autosummary (#13596) Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com> --- doc/usage/extensions/autosummary.rst | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/doc/usage/extensions/autosummary.rst b/doc/usage/extensions/autosummary.rst index 456faee1830..c84dcb60eff 100644 --- a/doc/usage/extensions/autosummary.rst +++ b/doc/usage/extensions/autosummary.rst @@ -412,3 +412,27 @@ the title of a page. Stub pages are generated also based on these directives. .. _`escape filter`: https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.escape + +Autolink role +------------- + +.. rst:role:: autolink + + The ``:autolink:`` role functions as ``:py:obj:`` when the referenced *name* + can be resolved to a Python object, and otherwise it becomes simple emphasis. + + There are some known design flaws. + For example, in the case of multiple objects having the same name, + :rst:role:`!autolink` could resolve to the wrong object. + It will fail silently if the referenced object is not found, + for example due to a spelling mistake or renaming. + This is sometimes unwanted behaviour. + + Some users choose to configure their :confval:`default_role` to ``autolink`` + for 'smart' referencing using the default interpreted text role (```content```). + + .. seealso:: + + :rst:role:`any` + + :rst:role:`py:obj` From acdf4a86980efdb37146169fc09d435e54a4a47d Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 7 Jun 2025 17:25:11 +0100 Subject: [PATCH 111/435] Use an explicit tags parameter for ``global_toctree_for_doc()`` (#13631) --- sphinx/builders/__init__.py | 22 ++++++++++++++----- sphinx/builders/_epub_base.py | 12 ++++++++-- sphinx/builders/epub3.py | 6 ++++- sphinx/builders/html/__init__.py | 2 +- sphinx/builders/singlehtml.py | 4 ++-- sphinx/environment/__init__.py | 4 +++- sphinx/environment/adapters/toctree.py | 17 ++++++++++++-- .../test_environment_toctree.py | 17 ++++++++++---- .../test_ext_inheritance_diagram.py | 2 +- tests/test_extensions/test_ext_math.py | 2 +- 10 files changed, 68 insertions(+), 20 deletions(-) diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index fb8bd757864..70602273747 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -772,7 +772,9 @@ def _write_serial(self, docnames: Sequence[str]) -> None: len(docnames), self._app.verbosity, ): - _write_docname(docname, app=self._app, env=self.env, builder=self) + _write_docname( + docname, app=self._app, env=self.env, builder=self, tags=self.tags + ) def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None: def write_process(docs: list[tuple[str, nodes.document]]) -> None: @@ -782,7 +784,9 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None: # warm up caches/compile templates using the first document firstname, docnames = docnames[0], docnames[1:] - _write_docname(firstname, app=self._app, env=self.env, builder=self) + _write_docname( + firstname, app=self._app, env=self.env, builder=self, tags=self.tags + ) tasks = ParallelTasks(nproc) chunks = make_chunks(docnames, nproc) @@ -804,7 +808,9 @@ def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None: for chunk in chunks: arg = [] for docname in chunk: - doctree = self.env.get_and_resolve_doctree(docname, self) + doctree = self.env.get_and_resolve_doctree( + docname, self, tags=self.tags + ) self.write_doc_serialized(docname, doctree) arg.append((docname, doctree)) tasks.add_task(write_process, arg, on_chunk_done) @@ -871,11 +877,17 @@ def get_builder_config(self, option: str, default: str) -> Any: def _write_docname( - docname: str, /, *, app: Sphinx, env: BuildEnvironment, builder: Builder + docname: str, + /, + *, + app: Sphinx, + env: BuildEnvironment, + builder: Builder, + tags: Tags, ) -> None: """Write a single document.""" app.phase = BuildPhase.RESOLVING - doctree = env.get_and_resolve_doctree(docname, builder=builder) + doctree = env.get_and_resolve_doctree(docname, builder=builder, tags=tags) app.phase = BuildPhase.WRITING builder.write_doc_serialized(docname, doctree) builder.write_doc(docname, doctree) diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py index 28a7553da51..1bd4846bf02 100644 --- a/sphinx/builders/_epub_base.py +++ b/sphinx/builders/_epub_base.py @@ -233,7 +233,11 @@ def get_toc(self) -> None: and pre and post files not managed by Sphinx. """ doctree = self.env.get_and_resolve_doctree( - self.config.master_doc, self, prune_toctrees=False, includehidden=True + self.config.master_doc, + self, + tags=self.tags, + prune_toctrees=False, + includehidden=True, ) self.refnodes = self.get_refnodes(doctree, []) master_dir = Path(self.config.master_doc).parent @@ -765,7 +769,11 @@ def build_toc(self) -> None: if self.config.epub_tocscope == 'default': doctree = self.env.get_and_resolve_doctree( - self.config.root_doc, self, prune_toctrees=False, includehidden=False + self.config.root_doc, + self, + tags=self.tags, + prune_toctrees=False, + includehidden=False, ) refnodes = self.get_refnodes(doctree, []) self.toc_add_files(refnodes) diff --git a/sphinx/builders/epub3.py b/sphinx/builders/epub3.py index 2ea66c34b8b..c17be8bd4e9 100644 --- a/sphinx/builders/epub3.py +++ b/sphinx/builders/epub3.py @@ -190,7 +190,11 @@ def build_navigation_doc(self) -> None: if self.config.epub_tocscope == 'default': doctree = self.env.get_and_resolve_doctree( - self.config.root_doc, self, prune_toctrees=False, includehidden=False + self.config.root_doc, + self, + tags=self.tags, + prune_toctrees=False, + includehidden=False, ) refnodes = self.get_refnodes(doctree, []) self.toc_add_files(refnodes) diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py index 1ba026a61d0..a5f725e2922 100644 --- a/sphinx/builders/html/__init__.py +++ b/sphinx/builders/html/__init__.py @@ -1029,7 +1029,7 @@ def _get_local_toctree( if kwargs.get('maxdepth') == '': # NoQA: PLC1901 kwargs.pop('maxdepth') toctree = global_toctree_for_doc( - self.env, docname, self, collapse=collapse, **kwargs + self.env, docname, self, tags=self.tags, collapse=collapse, **kwargs ) return self.render_partial(toctree)['fragment'] diff --git a/sphinx/builders/singlehtml.py b/sphinx/builders/singlehtml.py index c95603927ce..1888f6679d1 100644 --- a/sphinx/builders/singlehtml.py +++ b/sphinx/builders/singlehtml.py @@ -84,7 +84,7 @@ def _get_local_toctree( if kwargs.get('maxdepth') == '': # NoQA: PLC1901 kwargs.pop('maxdepth') toctree = global_toctree_for_doc( - self.env, docname, self, collapse=collapse, **kwargs + self.env, docname, self, tags=self.tags, collapse=collapse, **kwargs ) return self.render_partial(toctree)['fragment'] @@ -141,7 +141,7 @@ def assemble_toc_fignumbers( def get_doc_context(self, docname: str, body: str, metatags: str) -> dict[str, Any]: # no relation links... toctree = global_toctree_for_doc( - self.env, self.config.root_doc, self, collapse=False + self.env, self.config.root_doc, self, tags=self.tags, collapse=False ) # if there is no toctree, toc is None if toctree: diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index 846a1bbde98..fd611639e9c 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -701,6 +701,8 @@ def get_and_resolve_doctree( self, docname: str, builder: Builder, + *, + tags: Tags, doctree: nodes.document | None = None, prune_toctrees: bool = True, includehidden: bool = False, @@ -770,7 +772,7 @@ def resolve_toctree( titles_only=titles_only, collapse=collapse, includehidden=includehidden, - tags=builder.tags, + tags=self._tags, ) def resolve_references( diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py index 90344f185d7..4708383d64b 100644 --- a/sphinx/environment/adapters/toctree.py +++ b/sphinx/environment/adapters/toctree.py @@ -2,12 +2,14 @@ from __future__ import annotations +import warnings from typing import TYPE_CHECKING, TypeVar from docutils import nodes from docutils.nodes import Element from sphinx import addnodes +from sphinx.deprecation import RemovedInSphinx10Warning from sphinx.locale import __ from sphinx.util import logging, url_re from sphinx.util.matching import Matcher @@ -69,6 +71,8 @@ def global_toctree_for_doc( env: BuildEnvironment, docname: str, builder: Builder, + *, + tags: Tags = ..., # type: ignore[assignment] collapse: bool = False, includehidden: bool = True, maxdepth: int = 0, @@ -78,6 +82,15 @@ def global_toctree_for_doc( This gives the global ToC, with all ancestors and their siblings. """ + if tags is ...: + warnings.warn( + "'tags' will become a required keyword argument " + 'for global_toctree_for_doc() in Sphinx 10.0.', + RemovedInSphinx10Warning, + stacklevel=2, + ) + tags = builder.tags + resolved = ( _resolve_toctree( env, @@ -89,7 +102,7 @@ def global_toctree_for_doc( titles_only=titles_only, collapse=collapse, includehidden=includehidden, - tags=builder.tags, + tags=tags, ) for toctree_node in env.master_doctree.findall(addnodes.toctree) ) @@ -582,5 +595,5 @@ def get_toctree_for( **kwargs: Any, ) -> Element | None: return global_toctree_for_doc( - self.env, docname, builder, collapse=collapse, **kwargs + self.env, docname, builder, tags=builder.tags, collapse=collapse, **kwargs ) diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py index f6b849c5bec..22474daef55 100644 --- a/tests/test_environment/test_environment_toctree.py +++ b/tests/test_environment/test_environment_toctree.py @@ -614,7 +614,9 @@ def test_document_toc_tocdepth(app): @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_global_toctree_for_doc(app): app.build() - toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=False) + toctree = global_toctree_for_doc( + app.env, 'index', app.builder, tags=app.tags, collapse=False + ) assert_node( toctree, [ @@ -676,7 +678,9 @@ def test_global_toctree_for_doc(app): @pytest.mark.test_params(shared_result='test_environment_toctree_basic') def test_global_toctree_for_doc_collapse(app): app.build() - toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=True) + toctree = global_toctree_for_doc( + app.env, 'index', app.builder, tags=app.tags, collapse=True + ) assert_node( toctree, [ @@ -723,7 +727,7 @@ def test_global_toctree_for_doc_collapse(app): def test_global_toctree_for_doc_maxdepth(app): app.build() toctree = global_toctree_for_doc( - app.env, 'index', app.builder, collapse=False, maxdepth=3 + app.env, 'index', app.builder, tags=app.tags, collapse=False, maxdepth=3 ) assert_node( toctree, @@ -814,7 +818,12 @@ def test_global_toctree_for_doc_maxdepth(app): def test_global_toctree_for_doc_includehidden(app): app.build() toctree = global_toctree_for_doc( - app.env, 'index', app.builder, collapse=False, includehidden=False + app.env, + 'index', + app.builder, + tags=app.tags, + collapse=False, + includehidden=False, ) assert_node( toctree, diff --git a/tests/test_extensions/test_ext_inheritance_diagram.py b/tests/test_extensions/test_ext_inheritance_diagram.py index 4153113d12e..f98d424eda3 100644 --- a/tests/test_extensions/test_ext_inheritance_diagram.py +++ b/tests/test_extensions/test_ext_inheritance_diagram.py @@ -293,7 +293,7 @@ def test_inheritance_diagram_latex_alias(app): app.config.inheritance_alias = {'test.Foo': 'alias.Foo'} app.build(force_all=True) - doc = app.env.get_and_resolve_doctree('index', app) + doc = app.env.get_and_resolve_doctree('index', app.builder, tags=app.tags) aliased_graph = doc.children[0].children[3]['graph'].class_info assert len(aliased_graph) == 4 assert ( diff --git a/tests/test_extensions/test_ext_math.py b/tests/test_extensions/test_ext_math.py index 02d215bef01..2dbc93629fd 100644 --- a/tests/test_extensions/test_ext_math.py +++ b/tests/test_extensions/test_ext_math.py @@ -323,7 +323,7 @@ def test_imgmath_numfig_html(app: SphinxTestApp) -> None: def test_math_compat(app): with warnings.catch_warnings(record=True): app.build(force_all=True) - doctree = app.env.get_and_resolve_doctree('index', app.builder) + doctree = app.env.get_and_resolve_doctree('index', app.builder, tags=app.tags) assert_node( doctree, From 3044d67531f2a66f285912aa92929ccb3c9e3f8e Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sat, 7 Jun 2025 18:58:06 +0100 Subject: [PATCH 112/435] Avoid self.app in builder (#13632) --- sphinx/application.py | 1 + sphinx/builders/__init__.py | 3 ++- sphinx/builders/_epub_base.py | 2 +- sphinx/builders/changes.py | 7 +++++- sphinx/builders/gettext.py | 14 +++++++----- sphinx/builders/html/__init__.py | 25 ++++++++++++++-------- sphinx/builders/latex/__init__.py | 4 ++-- sphinx/builders/latex/theming.py | 9 ++++---- sphinx/builders/linkcheck.py | 6 +++--- sphinx/builders/texinfo.py | 2 +- sphinx/config.py | 6 ++++++ sphinx/ext/coverage.py | 8 +++---- sphinx/ext/doctest.py | 4 ++-- sphinx/theming.py | 20 ++++++++++++----- sphinx/transforms/__init__.py | 2 +- tests/test_extensions/test_ext_coverage.py | 2 +- tests/test_theming/test_theming.py | 8 +++---- 17 files changed, 78 insertions(+), 45 deletions(-) diff --git a/sphinx/application.py b/sphinx/application.py index d5192eef0b6..3874a6afa52 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -264,6 +264,7 @@ def __init__( else: self.confdir = _StrPath(confdir).resolve() self.config = Config.read(self.confdir, confoverrides or {}, self.tags) + self.config._verbosity = -1 if self.quiet else self.verbosity # set up translation infrastructure self._init_i18n() diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 70602273747..4e116732e7a 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -139,8 +139,9 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None: @property def app(self) -> Sphinx: + cls_module = self.__class__.__module__ cls_name = self.__class__.__qualname__ - _deprecation_warning(__name__, f'{cls_name}.app', remove=(10, 0)) + _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0)) return self._app @property diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py index 1bd4846bf02..3c7c93dfd1f 100644 --- a/sphinx/builders/_epub_base.py +++ b/sphinx/builders/_epub_base.py @@ -425,7 +425,7 @@ def copy_image_files_pil(self) -> None: __('copying images... '), 'brown', len(self.images), - self.app.verbosity, + self.config.verbosity, ): dest = self.images[src] try: diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py index aa926e0809c..059a7d1b055 100644 --- a/sphinx/builders/changes.py +++ b/sphinx/builders/changes.py @@ -30,7 +30,12 @@ class ChangesBuilder(Builder): def init(self) -> None: self.create_template_bridge() - theme_factory = HTMLThemeFactory(self.app) + theme_factory = HTMLThemeFactory( + confdir=self.confdir, + app=self._app, + config=self.config, + registry=self.env._registry, + ) self.theme = theme_factory.create('default') self.templates.init(self, self.theme) diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py index f5f26ffcc88..659bf218983 100644 --- a/sphinx/builders/gettext.py +++ b/sphinx/builders/gettext.py @@ -165,7 +165,7 @@ class I18nBuilder(Builder): def init(self) -> None: super().init() self.env.set_versioning_method(self.versioning_method, self.config.gettext_uuid) - self.tags = self.app.tags = I18nTags() + self.tags = self._app.tags = I18nTags() self.catalogs: defaultdict[str, Catalog] = defaultdict(Catalog) def get_target_uri(self, docname: str, typ: str | None = None) -> str: @@ -251,7 +251,7 @@ def init(self) -> None: def _collect_templates(self) -> set[str]: template_files = set() for template_path in self.config.templates_path: - tmpl_abs_path = self.app.srcdir / template_path + tmpl_abs_path = self.srcdir / template_path for dirpath, _dirs, files in walk(tmpl_abs_path): for fn in files: if fn.endswith('.html'): @@ -268,7 +268,11 @@ def _extract_from_template(self) -> None: extract_translations = self.templates.environment.extract_translations for template in status_iterator( - files, __('reading templates... '), 'purple', len(files), self.app.verbosity + files, + __('reading templates... '), + 'purple', + len(files), + self.config.verbosity, ): try: with codecs.open(template, encoding='utf-8') as f: @@ -307,7 +311,7 @@ def finish(self) -> None: __('writing message catalogs... '), 'darkgreen', len(self.catalogs), - self.app.verbosity, + self.config.verbosity, operator.itemgetter(0), ): # noop if config.gettext_compact is set @@ -315,7 +319,7 @@ def finish(self) -> None: context['messages'] = list(catalog) template_path = [ - self.app.srcdir / rel_path for rel_path in self.config.templates_path + self.srcdir / rel_path for rel_path in self.config.templates_path ] renderer = GettextRenderer(template_path, outdir=self.outdir) content = renderer.render('message.pot.jinja', context) diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py index a5f725e2922..1195d08beb6 100644 --- a/sphinx/builders/html/__init__.py +++ b/sphinx/builders/html/__init__.py @@ -228,7 +228,12 @@ def get_theme_config(self) -> tuple[str, dict[str, str | int | bool]]: return self.config.html_theme, self.config.html_theme_options def init_templates(self) -> None: - theme_factory = HTMLThemeFactory(self.app) + theme_factory = HTMLThemeFactory( + confdir=self.confdir, + app=self._app, + config=self.config, + registry=self.env._registry, + ) theme_name, theme_options = self.get_theme_config() self.theme = theme_factory.create(theme_name) self.theme_options = theme_options @@ -255,11 +260,6 @@ def init_highlighter(self) -> None: self.dark_highlighter: PygmentsBridge | None if dark_style is not None: self.dark_highlighter = PygmentsBridge('html', dark_style) - self.app.add_css_file( - 'pygments_dark.css', - media='(prefers-color-scheme: dark)', - id='pygments_dark_css', - ) else: self.dark_highlighter = None @@ -273,6 +273,13 @@ def css_files(self) -> list[_CascadingStyleSheet]: def init_css_files(self) -> None: self._css_files = [] self.add_css_file('pygments.css', priority=200) + if self.dark_highlighter is not None: + self.add_css_file( + 'pygments_dark.css', + priority=200, + media='(prefers-color-scheme: dark)', + id='pygments_dark_css', + ) for filename in self._get_style_filenames(): self.add_css_file(filename, priority=200) @@ -780,7 +787,7 @@ def copy_image_files(self) -> None: __('copying images... '), 'brown', len(self.images), - self.app.verbosity, + self.config.verbosity, stringify_func=stringify_func, ): dest = self.images[src] @@ -807,7 +814,7 @@ def to_relpath(f: str) -> str: __('copying downloadable files... '), 'brown', len(self.env.dlfiles), - self.app.verbosity, + self.config.verbosity, stringify_func=to_relpath, ): try: @@ -1128,7 +1135,7 @@ def hasdoc(name: str) -> bool: # 'blah.html' should have content_root = './' not ''. ctx['content_root'] = (f'..{SEP}' * default_baseuri.count(SEP)) or f'.{SEP}' - outdir = self.app.outdir + outdir = self.outdir def css_tag(css: _CascadingStyleSheet) -> str: attrs = [ diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py index 985620f2023..d5e4a779aa1 100644 --- a/sphinx/builders/latex/__init__.py +++ b/sphinx/builders/latex/__init__.py @@ -132,7 +132,7 @@ def init(self) -> None: self.context: dict[str, Any] = {} self.docnames: Iterable[str] = {} self.document_data: list[tuple[str, str, str, str, str, bool]] = [] - self.themes = ThemeFactory(self.app) + self.themes = ThemeFactory(srcdir=self.srcdir, config=self.config) texescape.init() self.init_context() @@ -481,7 +481,7 @@ def copy_image_files(self) -> None: __('copying images... '), 'brown', len(self.images), - self.app.verbosity, + self.config.verbosity, stringify_func=stringify_func, ): dest = self.images[src] diff --git a/sphinx/builders/latex/theming.py b/sphinx/builders/latex/theming.py index f55c077c9ca..df8eb48ec4f 100644 --- a/sphinx/builders/latex/theming.py +++ b/sphinx/builders/latex/theming.py @@ -12,7 +12,6 @@ if TYPE_CHECKING: from pathlib import Path - from sphinx.application import Sphinx from sphinx.config import Config logger = logging.getLogger(__name__) @@ -102,11 +101,11 @@ def __init__(self, name: str, filename: Path) -> None: class ThemeFactory: """A factory class for LaTeX Themes.""" - def __init__(self, app: Sphinx) -> None: + def __init__(self, *, srcdir: Path, config: Config) -> None: self.themes: dict[str, Theme] = {} - self.theme_paths = [app.srcdir / p for p in app.config.latex_theme_path] - self.config = app.config - self.load_builtin_themes(app.config) + self.theme_paths = [srcdir / p for p in config.latex_theme_path] + self.config = config + self.load_builtin_themes(config) def load_builtin_themes(self, config: Config) -> None: """Load built-in themes.""" diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py index de102873036..c1b199c5493 100644 --- a/sphinx/builders/linkcheck.py +++ b/sphinx/builders/linkcheck.py @@ -98,7 +98,7 @@ def finish(self) -> None: self.process_result(result) if self.broken_hyperlinks or self.timed_out_hyperlinks: - self.app.statuscode = 1 + self._app.statuscode = 1 def process_result(self, result: CheckResult) -> None: filename = self.env.doc2path(result.docname, False) @@ -130,7 +130,7 @@ def process_result(self, result: CheckResult) -> None: case _Status.WORKING: logger.info(darkgreen('ok ') + f'{res_uri}{result.message}') # NoQA: G003 case _Status.TIMEOUT: - if self.app.quiet: + if self.config.verbosity < 0: msg = 'timeout ' + f'{res_uri}{result.message}' logger.warning(msg, location=(result.docname, result.lineno)) else: @@ -145,7 +145,7 @@ def process_result(self, result: CheckResult) -> None: ) self.timed_out_hyperlinks += 1 case _Status.BROKEN: - if self.app.quiet: + if self.config.verbosity < 0: logger.warning( __('broken link: %s (%s)'), res_uri, diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py index 79afafab84d..6611be05465 100644 --- a/sphinx/builders/texinfo.py +++ b/sphinx/builders/texinfo.py @@ -198,7 +198,7 @@ def copy_image_files(self, targetname: str) -> None: __('copying images... '), 'brown', len(self.images), - self.app.verbosity, + self.config.verbosity, stringify_func=stringify_func, ): dest = self.images[src] diff --git a/sphinx/config.py b/sphinx/config.py index 2498ada6c56..3e16c151ebd 100644 --- a/sphinx/config.py +++ b/sphinx/config.py @@ -333,6 +333,8 @@ def __init__( raw_config['extensions'] = extensions self.extensions: list[str] = raw_config.get('extensions', []) + self._verbosity: int = 0 # updated in Sphinx.__init__() + @property def values(self) -> dict[str, _Opt]: return self._options @@ -341,6 +343,10 @@ def values(self) -> dict[str, _Opt]: def overrides(self) -> dict[str, Any]: return self._overrides + @property + def verbosity(self) -> int: + return self._verbosity + @classmethod def read( cls: type[Config], diff --git a/sphinx/ext/coverage.py b/sphinx/ext/coverage.py index b2d08603f38..5c5a8d51ab3 100644 --- a/sphinx/ext/coverage.py +++ b/sphinx/ext/coverage.py @@ -255,7 +255,7 @@ def write_c_coverage(self) -> None: for typ, name in sorted(undoc): op.write(f' * {name:<50} [{typ:>9}]\n') if self.config.coverage_show_missing_items: - if self.app.quiet: + if self.config.verbosity < 0: logger.warning( __('undocumented c api: %s [%s] in file %s'), name, @@ -446,7 +446,7 @@ def write_py_coverage(self) -> None: op.write('Functions:\n') op.writelines(f' * {x}\n' for x in undoc['funcs']) if self.config.coverage_show_missing_items: - if self.app.quiet: + if self.config.verbosity < 0: for func in undoc['funcs']: logger.warning( __('undocumented python function: %s :: %s'), @@ -468,7 +468,7 @@ def write_py_coverage(self) -> None: if not methods: op.write(f' * {class_name}\n') if self.config.coverage_show_missing_items: - if self.app.quiet: + if self.config.verbosity < 0: logger.warning( __('undocumented python class: %s :: %s'), name, @@ -485,7 +485,7 @@ def write_py_coverage(self) -> None: op.write(f' * {class_name} -- missing methods:\n\n') op.writelines(f' - {x}\n' for x in methods) if self.config.coverage_show_missing_items: - if self.app.quiet: + if self.config.verbosity < 0: for meth in methods: logger.warning( __( diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py index 9610e24d58d..da40a63e781 100644 --- a/sphinx/ext/doctest.py +++ b/sphinx/ext/doctest.py @@ -341,7 +341,7 @@ def _out(self, text: str) -> None: self.outfile.write(text) def _warn_out(self, text: str) -> None: - if self.app.quiet: + if self.config.verbosity < 0: logger.warning(text) else: logger.info(text, nonl=True) @@ -360,7 +360,7 @@ def s(v: int) -> str: header = 'Doctest summary' if self.total_failures or self.setup_failures or self.cleanup_failures: - self.app.statuscode = 1 + self._app.statuscode = 1 if self.config.doctest_fail_fast: header = f'{header} (exiting after first failed test)' underline = '=' * len(header) diff --git a/sphinx/theming.py b/sphinx/theming.py index a27dbfe0973..9e06faaeffc 100644 --- a/sphinx/theming.py +++ b/sphinx/theming.py @@ -28,6 +28,8 @@ from typing import Any, Required, TypedDict from sphinx.application import Sphinx + from sphinx.config import Config + from sphinx.registry import SphinxComponentRegistry class _ThemeToml(TypedDict, total=False): theme: Required[_ThemeTomlTheme] @@ -148,13 +150,21 @@ def _cleanup(self) -> None: class HTMLThemeFactory: """A factory class for HTML Themes.""" - def __init__(self, app: Sphinx) -> None: + def __init__( + self, + *, + confdir: Path, + app: Sphinx, + config: Config, + registry: SphinxComponentRegistry, + ) -> None: self._app = app - self._themes = app.registry.html_themes + self._confdir = confdir + self._themes = registry.html_themes self._entry_point_themes: dict[str, Callable[[], None]] = {} self._load_builtin_themes() - if getattr(app.config, 'html_theme_path', None): - self._load_additional_themes(app.config.html_theme_path) + if html_theme_path := getattr(config, 'html_theme_path', None): + self._load_additional_themes(html_theme_path) self._load_entry_point_themes() def _load_builtin_themes(self) -> None: @@ -166,7 +176,7 @@ def _load_builtin_themes(self) -> None: def _load_additional_themes(self, theme_paths: list[str]) -> None: """Load additional themes placed at specified directories.""" for theme_path in theme_paths: - abs_theme_path = (self._app.confdir / theme_path).resolve() + abs_theme_path = (self._confdir / theme_path).resolve() themes = self._find_themes(abs_theme_path) for name, theme in themes.items(): self._themes[name] = _StrPath(theme) diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py index 6857e05fe58..7ba50aaa240 100644 --- a/sphinx/transforms/__init__.py +++ b/sphinx/transforms/__init__.py @@ -66,7 +66,7 @@ def app(self) -> Sphinx: cls_module = self.__class__.__module__ cls_name = self.__class__.__qualname__ _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0)) - return self.env.app + return self.env._app @property def env(self) -> BuildEnvironment: diff --git a/tests/test_extensions/test_ext_coverage.py b/tests/test_extensions/test_ext_coverage.py index 7422cd3560f..32fc2dba2d7 100644 --- a/tests/test_extensions/test_ext_coverage.py +++ b/tests/test_extensions/test_ext_coverage.py @@ -117,7 +117,7 @@ def test_show_missing_items(app: SphinxTestApp) -> None: 'coverage', testroot='root', confoverrides={'coverage_show_missing_items': True} ) def test_show_missing_items_quiet(app: SphinxTestApp) -> None: - app.quiet = True + app.config._verbosity = -1 # mimics status=None / app.quiet = True app.build(force_all=True) assert ( diff --git a/tests/test_theming/test_theming.py b/tests/test_theming/test_theming.py index 173e0c9c64b..8ff3919c967 100644 --- a/tests/test_theming/test_theming.py +++ b/tests/test_theming/test_theming.py @@ -159,10 +159,10 @@ def test_dark_style(app, monkeypatch): app.build() assert (app.outdir / '_static' / 'pygments_dark.css').exists() - css_file, properties = app.registry.css_files[0] - assert css_file == 'pygments_dark.css' - assert 'media' in properties - assert properties['media'] == '(prefers-color-scheme: dark)' + css_file = app.builder._css_files[1] + assert css_file.filename == '_static/pygments_dark.css' + assert 'media' in css_file.attributes + assert css_file.attributes['media'] == '(prefers-color-scheme: dark)' assert sorted(f.filename for f in app.builder._css_files) == [ '_static/classic.css', From 5392f0f2ed4879949c7f8dc02e3ac43acadea8b3 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 8 Jun 2025 19:52:24 +0100 Subject: [PATCH 113/435] Extract ``_read_conf_py()`` from ``Config.read()`` (#13633) --- sphinx/application.py | 5 ++-- sphinx/config.py | 49 ++++++++++++++++---------------- tests/test_config/test_config.py | 17 +++++------ 3 files changed, 37 insertions(+), 34 deletions(-) diff --git a/sphinx/application.py b/sphinx/application.py index 3874a6afa52..8117eecf340 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -255,15 +255,16 @@ def __init__( self.statuscode = 0 # read config + overrides = confoverrides or {} self.tags = Tags(tags) if confdir is None: # set confdir to srcdir if -C given (!= no confdir); a few pieces # of code expect a confdir to be set self.confdir = self.srcdir - self.config = Config({}, confoverrides or {}) + self.config = Config({}, overrides) else: self.confdir = _StrPath(confdir).resolve() - self.config = Config.read(self.confdir, confoverrides or {}, self.tags) + self.config = Config.read(self.confdir, overrides=overrides, tags=self.tags) self.config._verbosity = -1 if self.quiet else self.verbosity # set up translation infrastructure diff --git a/sphinx/config.py b/sphinx/config.py index 3e16c151ebd..a43b6cc82d0 100644 --- a/sphinx/config.py +++ b/sphinx/config.py @@ -351,8 +351,9 @@ def verbosity(self) -> int: def read( cls: type[Config], confdir: str | os.PathLike[str], - overrides: dict[str, Any] | None = None, - tags: Tags | None = None, + *, + overrides: dict[str, Any], + tags: Tags, ) -> Config: """Create a Config object from configuration file.""" filename = Path(confdir, CONFIG_FILENAME) @@ -360,23 +361,7 @@ def read( raise ConfigError( __("config directory doesn't contain a conf.py file (%s)") % confdir ) - namespace = eval_config_file(filename, tags) - - # Note: Old sphinx projects have been configured as "language = None" because - # sphinx-quickstart previously generated this by default. - # To keep compatibility, they should be fallback to 'en' for a while - # (This conversion should not be removed before 2025-01-01). - if namespace.get('language', ...) is None: - logger.warning( - __( - "Invalid configuration value found: 'language = None'. " - 'Update your configuration to a valid language code. ' - "Falling back to 'en' (English)." - ) - ) - namespace['language'] = 'en' - - return cls(namespace, overrides) + return _read_conf_py(filename, overrides=overrides, tags=tags) def convert_overrides(self, name: str, value: str) -> Any: opt = self._options[name] @@ -589,12 +574,28 @@ def __setstate__(self, state: dict[str, Any]) -> None: self.__dict__.update(state) -def eval_config_file( - filename: str | os.PathLike[str], tags: Tags | None -) -> dict[str, Any]: - """Evaluate a config file.""" - filename = Path(filename) +def _read_conf_py(conf_path: Path, *, overrides: dict[str, Any], tags: Tags) -> Config: + """Create a Config object from a conf.py file.""" + namespace = eval_config_file(conf_path, tags) + # Note: Old sphinx projects have been configured as "language = None" because + # sphinx-quickstart previously generated this by default. + # To keep compatibility, they should be fallback to 'en' for a while + # (This conversion should not be removed before 2025-01-01). + if namespace.get('language', ...) is None: + logger.warning( + __( + "Invalid configuration value found: 'language = None'. " + 'Update your configuration to a valid language code. ' + "Falling back to 'en' (English)." + ) + ) + namespace['language'] = 'en' + return Config(namespace, overrides) + + +def eval_config_file(filename: Path, tags: Tags) -> dict[str, Any]: + """Evaluate a config file.""" namespace: dict[str, Any] = { '__file__': str(filename), 'tags': tags, diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index fc1ba4c7321..b3392e654b2 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -19,6 +19,7 @@ ) from sphinx.deprecation import RemovedInSphinx90Warning from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError +from sphinx.util.tags import Tags if TYPE_CHECKING: from collections.abc import Iterable @@ -139,7 +140,7 @@ def test_core_config(app: SphinxTestApp) -> None: def test_config_not_found(tmp_path): with pytest.raises(ConfigError): - Config.read(tmp_path) + Config.read(tmp_path, overrides={}, tags=Tags()) @pytest.mark.parametrize('protocol', list(range(pickle.HIGHEST_PROTOCOL))) @@ -394,12 +395,12 @@ def test_errors_warnings(logger, tmp_path): # test the error for syntax errors in the config file (tmp_path / 'conf.py').write_text('project = \n', encoding='ascii') with pytest.raises(ConfigError) as excinfo: - Config.read(tmp_path, {}, None) + Config.read(tmp_path, overrides={}, tags=Tags()) assert 'conf.py' in str(excinfo.value) # test the automatic conversion of 2.x only code in configs (tmp_path / 'conf.py').write_text('project = u"Jägermeister"\n', encoding='utf8') - cfg = Config.read(tmp_path, {}, None) + cfg = Config.read(tmp_path, overrides={}, tags=Tags()) assert cfg.project == 'Jägermeister' assert logger.called is False @@ -440,7 +441,7 @@ def test_config_eol(logger, tmp_path): configfile = tmp_path / 'conf.py' for eol in (b'\n', b'\r\n'): configfile.write_bytes(b'project = "spam"' + eol) - cfg = Config.read(tmp_path, {}, None) + cfg = Config.read(tmp_path, overrides={}, tags=Tags()) assert cfg.project == 'spam' assert logger.called is False @@ -678,7 +679,7 @@ def test_conf_py_language_none(tmp_path): (tmp_path / 'conf.py').write_text('language = None', encoding='utf-8') # When we load conf.py into a Config object - cfg = Config.read(tmp_path, {}, None) + cfg = Config.read(tmp_path, overrides={}, tags=Tags()) # Then the language is coerced to English assert cfg.language == 'en' @@ -691,7 +692,7 @@ def test_conf_py_language_none_warning(logger, tmp_path): (tmp_path / 'conf.py').write_text('language = None', encoding='utf-8') # When we load conf.py into a Config object - Config.read(tmp_path, {}, None) + Config.read(tmp_path, overrides={}, tags=Tags()) # Then a warning is raised assert logger.warning.called @@ -708,7 +709,7 @@ def test_conf_py_no_language(tmp_path): (tmp_path / 'conf.py').touch() # When we load conf.py into a Config object - cfg = Config.read(tmp_path, {}, None) + cfg = Config.read(tmp_path, overrides={}, tags=Tags()) # Then the language is coerced to English assert cfg.language == 'en' @@ -720,7 +721,7 @@ def test_conf_py_nitpick_ignore_list(tmp_path): (tmp_path / 'conf.py').touch() # When we load conf.py into a Config object - cfg = Config.read(tmp_path, {}, None) + cfg = Config.read(tmp_path, overrides={}, tags=Tags()) # Then the default nitpick_ignore[_regex] is an empty list assert cfg.nitpick_ignore == [] From 39c81254de1708426b929611e418c76fb15b39aa Mon Sep 17 00:00:00 2001 From: Victor Wheeler Date: Sun, 8 Jun 2025 13:35:36 -0600 Subject: [PATCH 114/435] Update ``extending_build.rst`` to match ``tutorials/example/todo.py`` (#13515) Co-authored-by: Adam Turner <9087854+aa-turner@users.noreply.github.com> --- doc/development/tutorials/extending_build.rst | 49 ++++++++++--------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/doc/development/tutorials/extending_build.rst b/doc/development/tutorials/extending_build.rst index 4d3606a0a33..9894d656fed 100644 --- a/doc/development/tutorials/extending_build.rst +++ b/doc/development/tutorials/extending_build.rst @@ -143,7 +143,7 @@ Looking first at the ``TodolistDirective`` directive: .. literalinclude:: examples/todo.py :language: python :linenos: - :lines: 24-27 + :pyobject: TodolistDirective It's very simple, creating and returning an instance of our ``todolist`` node class. The ``TodolistDirective`` directive itself has neither content nor @@ -153,7 +153,7 @@ directive: .. literalinclude:: examples/todo.py :language: python :linenos: - :lines: 30-53 + :pyobject: TodoDirective Several important things are covered here. First, as you can see, we're now subclassing the :class:`~sphinx.util.docutils.SphinxDirective` helper class @@ -168,16 +168,16 @@ new unique integer on each call and therefore leads to unique target names. The target node is instantiated without any text (the first two arguments). On creating admonition node, the content body of the directive are parsed using -``self.state.nested_parse``. The first argument gives the content body, and -the second one gives content offset. The third argument gives the parent node -of parsed result, in our case the ``todo`` node. Following this, the ``todo`` -node is added to the environment. This is needed to be able to create a list of -all todo entries throughout the documentation, in the place where the author -puts a ``todolist`` directive. For this case, the environment attribute -``todo_all_todos`` is used (again, the name should be unique, so it is prefixed -by the extension name). It does not exist when a new environment is created, so -the directive must check and create it if necessary. Various information about -the todo entry's location are stored along with a copy of the node. +``self.parse_content_to_nodes()``. +Following this, the ``todo`` node is added to the environment. +This is needed to be able to create a list of all todo entries throughout +the documentation, in the place where the author puts a ``todolist`` directive. +For this case, the environment attribute ``todo_all_todos`` is used +(again, the name should be unique, so it is prefixed by the extension name). +It does not exist when a new environment is created, so the directive must +check and create it if necessary. +Various information about the todo entry's location are stored along with +a copy of the node. In the last line, the nodes that should be put into the doctree are returned: the target node and the admonition node. @@ -211,7 +211,7 @@ the :event:`env-purge-doc` event: .. literalinclude:: examples/todo.py :language: python :linenos: - :lines: 56-61 + :pyobject: purge_todos Since we store information from source files in the environment, which is persistent, it may become out of date when the source file changes. Therefore, @@ -229,7 +229,7 @@ to be merged: .. literalinclude:: examples/todo.py :language: python :linenos: - :lines: 64-68 + :pyobject: merge_todos The other handler belongs to the :event:`doctree-resolved` event: @@ -237,12 +237,13 @@ The other handler belongs to the :event:`doctree-resolved` event: .. literalinclude:: examples/todo.py :language: python :linenos: - :lines: 71-113 + :pyobject: process_todo_nodes -The :event:`doctree-resolved` event is emitted at the end of :ref:`phase 3 -(resolving) ` and allows custom resolving to be done. The handler -we have written for this event is a bit more involved. If the -``todo_include_todos`` config value (which we'll describe shortly) is false, +The :event:`doctree-resolved` event is emitted for each document that is +about to be written at the end of :ref:`phase 3 (resolving) ` +and allows custom resolving to be done on that document. +The handler we have written for this event is a bit more involved. +If the ``todo_include_todos`` config value (which we'll describe shortly) is false, all ``todo`` and ``todolist`` nodes are removed from the documents. If not, ``todo`` nodes just stay where and how they are. ``todolist`` nodes are replaced by a list of todo entries, complete with backlinks to the location @@ -266,17 +267,17 @@ the other parts of our extension. Let's look at our ``setup`` function: .. literalinclude:: examples/todo.py :language: python :linenos: - :lines: 116- + :pyobject: setup The calls in this function refer to the classes and functions we added earlier. What the individual calls do is the following: * :meth:`~Sphinx.add_config_value` lets Sphinx know that it should recognize the - new *config value* ``todo_include_todos``, whose default value should be - ``False`` (this also tells Sphinx that it is a boolean value). + new *config value* ``todo_include_todos``, whose default value is ``False`` + (which also tells Sphinx that it is a boolean value). - If the third argument was ``'html'``, HTML documents would be full rebuild if the - config value changed its value. This is needed for config values that + If the third argument was ``'html'``, HTML documents would be fully rebuilt + if the config value changed its value. This is needed for config values that influence reading (build :ref:`phase 1 (reading) `). * :meth:`~Sphinx.add_node` adds a new *node class* to the build system. It also From 1580f5f7fda2f741c8052fca7239948612bf4463 Mon Sep 17 00:00:00 2001 From: Victor Wheeler Date: Sun, 8 Jun 2025 13:41:45 -0600 Subject: [PATCH 115/435] Note that the ``:doc:`` role is case-sensitive (#13587) --- doc/usage/referencing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/usage/referencing.rst b/doc/usage/referencing.rst index 2597c9ce597..571d3c798bc 100644 --- a/doc/usage/referencing.rst +++ b/doc/usage/referencing.rst @@ -136,8 +136,8 @@ There is also a way to directly link to documents: .. rst:role:: doc - Link to the specified document; the document name can be specified in - absolute or relative fashion. For example, if the reference + Link to the specified document; the document name can be a relative or absolute + path and is always case-sensitive, even on Windows. For example, if the reference ``:doc:`parrot``` occurs in the document ``sketches/index``, then the link refers to ``sketches/parrot``. If the reference is ``:doc:`/people``` or ``:doc:`../people```, the link refers to ``people``. From e4accf42fcaf2bb4ea4b3055719076d1000e03ba Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+aa-turner@users.noreply.github.com> Date: Sun, 8 Jun 2025 23:09:43 +0100 Subject: [PATCH 116/435] Add private reference to the registry for builders --- sphinx/builders/__init__.py | 7 ++++--- sphinx/builders/changes.py | 2 +- sphinx/builders/html/__init__.py | 12 ++++++------ sphinx/builders/latex/__init__.py | 2 +- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 4e116732e7a..b855168f817 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -125,6 +125,7 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None: self.tags.add(self.name) self.tags.add(f'format_{self.format}') self.tags.add(f'builder_{self.name}') + self._registry = app.registry # images that need to be copied over (source -> dest) self.images: dict[str, str] = {} @@ -150,7 +151,7 @@ def _translator(self) -> NullTranslations | None: def get_translator_class(self, *args: Any) -> type[nodes.NodeVisitor]: """Return a class of translator.""" - return self.env._registry.get_translator_class(self) + return self._registry.get_translator_class(self) def create_translator(self, *args: Any) -> nodes.NodeVisitor: """Return an instance of translator. @@ -158,7 +159,7 @@ def create_translator(self, *args: Any) -> nodes.NodeVisitor: This method returns an instance of ``default_translator_class`` by default. Users can replace the translator class with ``app.set_translator()`` API. """ - return self.env._registry.create_translator(self, *args) + return self._registry.create_translator(self, *args) # helper methods def init(self) -> None: @@ -643,7 +644,7 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None: filename = str(env.doc2path(docname)) filetype = get_filetype(self._app.config.source_suffix, filename) - publisher = self.env._registry.get_publisher(self._app, filetype) + publisher = self._registry.get_publisher(self._app, filetype) self.env.current_document._parser = publisher.parser # record_dependencies is mutable even though it is in settings, # explicitly re-initialise for each document diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py index 059a7d1b055..99d46fa0486 100644 --- a/sphinx/builders/changes.py +++ b/sphinx/builders/changes.py @@ -34,7 +34,7 @@ def init(self) -> None: confdir=self.confdir, app=self._app, config=self.config, - registry=self.env._registry, + registry=self._registry, ) self.theme = theme_factory.create('default') self.templates.init(self, self.theme) diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py index 1195d08beb6..de49f89bbb4 100644 --- a/sphinx/builders/html/__init__.py +++ b/sphinx/builders/html/__init__.py @@ -232,7 +232,7 @@ def init_templates(self) -> None: confdir=self.confdir, app=self._app, config=self.config, - registry=self.env._registry, + registry=self._registry, ) theme_name, theme_options = self.get_theme_config() self.theme = theme_factory.create(theme_name) @@ -284,7 +284,7 @@ def init_css_files(self) -> None: for filename in self._get_style_filenames(): self.add_css_file(filename, priority=200) - for filename, attrs in self.env._registry.css_files: + for filename, attrs in self._registry.css_files: self.add_css_file(filename, **attrs) for filename, attrs in self.get_builder_config('css_files', 'html'): @@ -311,7 +311,7 @@ def init_js_files(self) -> None: self.add_js_file('doctools.js', priority=200) self.add_js_file('sphinx_highlight.js', priority=200) - for filename, attrs in self.env._registry.js_files: + for filename, attrs in self._registry.js_files: self.add_js_file(filename or '', **attrs) for filename, attrs in self.get_builder_config('js_files', 'html'): @@ -336,7 +336,7 @@ def math_renderer_name(self) -> str | None: return name else: # not given: choose a math_renderer from registered ones as possible - renderers = list(self.env._registry.html_inline_math_renderers) + renderers = list(self._registry.html_inline_math_renderers) if len(renderers) == 1: # only default math_renderer (mathjax) is registered return renderers[0] @@ -524,9 +524,9 @@ def prepare_writing(self, docnames: Set[str]) -> None: )) # add assets registered after ``Builder.init()``. - for css_filename, attrs in self.env._registry.css_files: + for css_filename, attrs in self._registry.css_files: self.add_css_file(css_filename, **attrs) - for js_filename, attrs in self.env._registry.js_files: + for js_filename, attrs in self._registry.js_files: self.add_js_file(js_filename or '', **attrs) # back up _css_files and _js_files to allow adding CSS/JS files to a specific page. diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py index d5e4a779aa1..d22c959b276 100644 --- a/sphinx/builders/latex/__init__.py +++ b/sphinx/builders/latex/__init__.py @@ -211,7 +211,7 @@ def init_context(self) -> None: def update_context(self) -> None: """Update template variables for .tex file just before writing.""" # Apply extension settings to context - registry = self.env._registry + registry = self._registry self.context['packages'] = registry.latex_packages self.context['packages_after_hyperref'] = registry.latex_packages_after_hyperref From 4d860475d77f9cd6580df22356c3d94e4328d06d Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 9 Jun 2025 21:39:39 +0100 Subject: [PATCH 117/435] Deprecate ``Parser.set_application()`` (#13637) --- CHANGES.rst | 4 ++++ sphinx/parsers.py | 5 ++++- sphinx/registry.py | 3 ++- sphinx/testing/restructuredtext.py | 3 ++- tests/test_markup/test_parser.py | 3 ++- 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9bd8abece0e..9b6861dd14a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -14,6 +14,10 @@ Deprecated including ``builder.app``, ``env.app``, ``events.app``, and ``SphinxTransform.`app``. Patch by Adam Turner. +* #13637: Deprecate the :py:meth:`!set_application` method + of :py:class:`~sphinx.parsers.Parser` objects. + Sphinx now directly sets the :py:attr:`!config` and :py:attr:`!env` attributes. + Patch by Adam Turner. Features added -------------- diff --git a/sphinx/parsers.py b/sphinx/parsers.py index 70ff3eaae62..698cd12e76d 100644 --- a/sphinx/parsers.py +++ b/sphinx/parsers.py @@ -10,6 +10,7 @@ from docutils.statemachine import StringList from docutils.transforms.universal import SmartQuotes +from sphinx.deprecation import _deprecation_warning from sphinx.util.rst import append_epilog, prepend_prolog if TYPE_CHECKING: @@ -44,7 +45,9 @@ def set_application(self, app: Sphinx) -> None: :param sphinx.application.Sphinx app: Sphinx application object """ - self._app = app + cls_module = self.__class__.__module__ + cls_name = self.__class__.__qualname__ + _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(10, 0)) self.config = app.config self.env = app.env diff --git a/sphinx/registry.py b/sphinx/registry.py index 973aa6dfed4..529036c8f8e 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -379,7 +379,8 @@ def create_source_parser(self, app: Sphinx, filename: str) -> Parser: parser_class = self.get_source_parser(filename) parser = parser_class() if isinstance(parser, SphinxParser): - parser.set_application(app) + parser.config = app.config + parser.env = app.env return parser def add_translator( diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index b04b61a4021..b2ebcf23002 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -22,7 +22,8 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: reader = SphinxStandaloneReader() reader.setup(app) parser = RSTParser() - parser.set_application(app) + parser.config = app.config + parser.env = app.env with sphinx_domains(env): return publish_doctree( text, diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py index eb8ccf24f1d..dbaa5e8cb4e 100644 --- a/tests/test_markup/test_parser.py +++ b/tests/test_markup/test_parser.py @@ -16,7 +16,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): document = new_document('dummy.rst') document.settings = Mock(tab_width=8, language_code='') parser = RSTParser() - parser.set_application(app) + parser.config = app.config + parser.env = app.env # normal case text = 'hello Sphinx world\nSphinx is a document generator' From b544cfca21ea2cba854d963beb774a6848edfba0 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 9 Jun 2025 21:56:20 +0100 Subject: [PATCH 118/435] Rename ``SphinxBaseReader.setup()`` to ``_setup_transforms()`` (#13638) --- sphinx/io.py | 22 ++++------------------ sphinx/testing/restructuredtext.py | 2 +- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/sphinx/io.py b/sphinx/io.py index 26c8b756fab..f61c9d16378 100644 --- a/sphinx/io.py +++ b/sphinx/io.py @@ -25,6 +25,7 @@ from sphinx.application import Sphinx from sphinx.environment import BuildEnvironment + from sphinx.registry import SphinxComponentRegistry logger = logging.getLogger(__name__) @@ -38,20 +39,6 @@ class SphinxBaseReader(standalone.Reader): # type: ignore[misc] transforms: list[type[Transform]] = [] - def __init__(self, *args: Any, **kwargs: Any) -> None: - from sphinx.application import Sphinx - - if len(args) > 0 and isinstance(args[0], Sphinx): - self._app = args[0] - self._env = self._app.env - args = args[1:] - - super().__init__(*args, **kwargs) - - def setup(self, app: Sphinx) -> None: - self._app = app # hold application object only for compatibility - self._env = app.env - def get_transforms(self) -> list[type[Transform]]: transforms = super().get_transforms() + self.transforms @@ -83,9 +70,8 @@ def new_document(self) -> nodes.document: class SphinxStandaloneReader(SphinxBaseReader): """A basic document reader for Sphinx.""" - def setup(self, app: Sphinx) -> None: - self.transforms = self.transforms + app.registry.get_transforms() - super().setup(app) + def _setup_transforms(self, *, registry: SphinxComponentRegistry) -> None: + self.transforms = self.transforms + registry.get_transforms() def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document: # type: ignore[type-arg] self.source = source @@ -130,7 +116,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: def create_publisher(app: Sphinx, filetype: str) -> Publisher: reader = SphinxStandaloneReader() - reader.setup(app) + reader._setup_transforms(registry=app.registry) parser = app.registry.create_source_parser(app, filetype) if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == (): diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index b2ebcf23002..4439b128cd5 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -20,7 +20,7 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: try: app.env.current_document.docname = docname reader = SphinxStandaloneReader() - reader.setup(app) + reader._setup_transforms(registry=app.registry) parser = RSTParser() parser.config = app.config parser.env = app.env From 8eaa0ab60f044cd15435b8f54d1e079adddaa899 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 9 Jun 2025 22:27:03 +0100 Subject: [PATCH 119/435] Stop taking ``app`` in ``create_source_parser()`` (#13639) --- CHANGES.rst | 4 ++++ sphinx/io.py | 2 +- sphinx/registry.py | 8 +++++--- sphinx/transforms/i18n.py | 4 +--- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 9b6861dd14a..28c9daaf09d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -7,6 +7,10 @@ Dependencies Incompatible changes -------------------- +* #13639: :py:meth:`!SphinxComponentRegistry.create_source_parser` no longer + has an *app* parameter, instead taking *config* and *env*. + Patch by Adam Turner. + Deprecated ---------- diff --git a/sphinx/io.py b/sphinx/io.py index f61c9d16378..8124f3ddf38 100644 --- a/sphinx/io.py +++ b/sphinx/io.py @@ -118,7 +118,7 @@ def create_publisher(app: Sphinx, filetype: str) -> Publisher: reader = SphinxStandaloneReader() reader._setup_transforms(registry=app.registry) - parser = app.registry.create_source_parser(app, filetype) + parser = app.registry.create_source_parser(filetype, config=app.config, env=app.env) if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == (): # a workaround for recommonmark # If recommonmark.AutoStrictify is enabled, the parser invokes reST parser diff --git a/sphinx/registry.py b/sphinx/registry.py index 529036c8f8e..1f5fef1821a 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -375,12 +375,14 @@ def get_source_parser(self, filetype: str) -> type[Parser]: def get_source_parsers(self) -> dict[str, type[Parser]]: return self.source_parsers - def create_source_parser(self, app: Sphinx, filename: str) -> Parser: + def create_source_parser( + self, filename: str, *, config: Config, env: BuildEnvironment + ) -> Parser: parser_class = self.get_source_parser(filename) parser = parser_class() if isinstance(parser, SphinxParser): - parser.config = app.config - parser.env = app.env + parser.config = config + parser.env = env return parser def add_translator( diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py index bfacfcf1a96..27db99c542f 100644 --- a/sphinx/transforms/i18n.py +++ b/sphinx/transforms/i18n.py @@ -5,7 +5,6 @@ import contextlib from re import DOTALL, match from textwrap import indent -from types import SimpleNamespace from typing import TYPE_CHECKING, Any, TypeVar from docutils import nodes @@ -109,9 +108,8 @@ def publish_msgstr( config.rst_prolog = None reader = _SphinxI18nReader(registry=registry) - app = SimpleNamespace(config=config, env=env, registry=registry) filetype = get_filetype(config.source_suffix, source_path) - parser = registry.create_source_parser(app, filetype) # type: ignore[arg-type] + parser = registry.create_source_parser(filetype, config=config, env=env) doc = reader.read( source=StringInput( source=source, source_path=f'{source_path}:{source_line}:' From 82736e0cee14cf5503a7510b37e03291cba23a13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Mon, 9 Jun 2025 20:17:42 +0200 Subject: [PATCH 120/435] LaTeX: fix #13635 (cell containing table turns row colors off) --- sphinx/texinputs/sphinxlatextables.sty | 91 ++++++++++++++++++++++---- 1 file changed, 77 insertions(+), 14 deletions(-) diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty index 4114955e071..dda2711f2e3 100644 --- a/sphinx/texinputs/sphinxlatextables.sty +++ b/sphinx/texinputs/sphinxlatextables.sty @@ -1,7 +1,7 @@ %% TABLES (WITH SUPPORT FOR MERGED CELLS OF GENERAL CONTENTS) % % change this info string if making any custom modification -\ProvidesPackage{sphinxlatextables}[2024/07/01 v7.4.0 tables]% +\ProvidesPackage{sphinxlatextables}[2025/06/09 v8.3.0 tables]% % Provides support for this output mark-up from Sphinx latex writer % and table templates: @@ -42,6 +42,11 @@ % - \sphinxthistablewithnocolorrowsstyle % - \sphinxthistablewithvlinesstyle % - \sphinxthistablewithnovlinesstyle +% These conditionals added at 8.3.0 for nested tables not to break row colors +% (#13635). Nested tables are only partially supported by Sphinx LaTeX. +% The method here is with no changes to neither writer nor templates. +\newif\ifspx@intable +\newif\ifspx@thistableisnested % % Also provides user command (see docs) % - \sphixncolorblend @@ -62,8 +67,8 @@ \RequirePackage{tabulary} % tabulary has a bug with its re-definition of \multicolumn in its first pass % which is not \long. But now Sphinx does not use LaTeX's \multicolumn but its -% own macro. Hence we don't even need to patch tabulary. See -% sphinxpackagemulticell.sty +% own macro. Hence we don't even need to patch tabulary. +% % X or S (Sphinx) may have meanings if some table package is loaded hence % \X was chosen to avoid possibility of conflict \newcolumntype{\X}[2]{p{\dimexpr @@ -109,7 +114,8 @@ \LTpre\z@skip\LTpost\z@skip % set to zero longtable's own skips \edef\sphinxbaselineskip{\dimexpr\the\dimexpr\baselineskip\relax\relax}% \spx@inframedtrue % message to sphinxheavybox - }% + \spx@table@setnestedflags +} % Compatibility with caption package \def\sphinxthelongtablecaptionisattop{% \spx@ifcaptionpackage{\noalign{\vskip-\belowcaptionskip}}{}% @@ -124,7 +130,18 @@ % B. Table with tabular or tabulary \def\sphinxattablestart{\par\vskip\dimexpr\sphinxtablepre\relax \spx@inframedtrue % message to sphinxheavybox + \spx@table@setnestedflags }% +% MEMO: this happens inside a savenotes environment and hence flags +% are reset on exit of it. +\def\spx@table@setnestedflags{% Issue #13635 + \ifspx@intable + \let\spx@table@resetcolortbl\spx@nestedtable@resetcolortbl + \spx@thistableisnestedtrue + \else + \spx@intabletrue + \fi + }% \let\sphinxattableend\sphinxatlongtableend % This is used by tabular and tabulary templates \newcommand*\sphinxcapstartof[1]{% @@ -270,6 +287,9 @@ % cells (the code does inserts & tokens, see TN1b). It was decided to keep it % simple with \sphinxstartmulticolumn...\sphinxstopmulticolumn. % +% **** ATTENTION: Sphinx does generate at least some nested tables in LaTeX +% **** TODO: clarify if next paragraph means we must raise an +% **** if LaTeX writer detects a merged cell inside nested table. % MEMO about nesting: if sphinxmulticolumn is encountered in a nested tabular % inside a tabulary it will think to be at top level in the tabulary. But % Sphinx generates no nested tables, and if some LaTeX macro uses internally a @@ -857,7 +877,32 @@ }% \the\everycr }% - \global\rownum\@ne % is done from inside table so ok with tabulary two passes + \ifspx@thistableisnested + % Attention that tabulary does two passes so we need to push the + % initial rownum and, after the first pass, we must reset it! + % Fortunately Sphinx LaTeX writer makes parent table tabular or + % longtable if a nested table is a tabulary. So we don't need to + % worry about distinguishing if this or parent is tabulary. + \ifx\TY@final\@undefined % tabular + \spx@gpush@rownum + \else + \ifx\equation$% tabulary, first pass + \spx@gpush@rownum + \else % tabulary, second pass + \spx@gpop@rownum % reset \rownum + \spx@gpush@rownum% and push it again. + \fi + \fi + % To make nested tables stand out in a color row, we toggle the parity. + % TODO: Double-check if compatible with method for color of header + % row. + % TODO: Perhaps better to use specific colors for nested tables? + % This would mean though adding new sphinxsetup parameters + % and extending the documentation... + \ifodd\rownum\global\rownum\z@\else\global\rownum\@ne\fi + \else + \global\rownum\@ne + \fi \sphinxSwitchCaseRowColor\rownum % set up color for the first body row \sphinxrowcolorON % has been done from \sphinxtoprule location but let's do % it again in case \sphinxtabletoprulehook has been used @@ -883,20 +928,34 @@ \let\sphinxtabledecrementrownum\@empty % \sphinxtableafterendhook will be modified by colorrows class to execute -% this after the table +% this after the table. \def\spx@table@resetcolortbl{% \sphinxrowcolorOFF - \spx@table@reset@CTeverycr + \spx@table@reset@CT@everycr % this last bit is done in order for the \sphinxbottomrule from the "foot" -% longtable template to be able to use same code as the \sphinxbottomrule -% at end of table body; see \sphinxbooktabsspecialbottomrule code +% part of the longtable template to be able to use same code as the +% \sphinxbottomrule at end of table body; see \sphinxbooktabsspecialbottomrule. \global\rownum\z@ + \global\let\spx@rownum@stack\@empty +} +% Most of \spx@table@resetcolortbl must be avoided if the table is nested. +% Besides the sphinxTableRowColor must be reset because it has been +% redefined by the cells of the nested table. So this is the alternative +% macro which is executed on exit of nested table. +\def\spx@nestedtable@resetcolortbl{% + \spx@gpop@rownum + \sphinxSwitchCaseRowColor\rownum } -\def\spx@table@reset@CTeverycr{% +\def\spx@table@reset@CT@everycr{% % we should probably be more cautious and not hard-code here the colortbl -% set-up; so the macro is defined without @ to fac +% set-up. \global\CT@everycr{\noalign{\global\let\CT@row@color\relax}\the\everycr}% } +\let\spx@rownum@stack\@empty +\def\spx@gpush@rownum{\xdef\spx@rownum@stack{\the\rownum.\spx@rownum@stack}}% +\def\spx@gpop@rownum{\afterassignment\spx@gpop@rownum@i + \global\rownum=\spx@rownum@stack\relax} +\def\spx@gpop@rownum@i.#1\relax{\gdef\spx@rownum@stack{#1}} % At last the style macros \sphinxthistablewithstandardstyle etc... @@ -1047,10 +1106,13 @@ local use of booktabs table style}% % % this one is not set to \@empty by nocolorrows, because it looks harmless % to execute it always, as it simply resets to standard colortbl state after -% the table; so we don't need an @@ version for this one +% the table [^1]; so we don't need an @@ version for this one. +% .. [1]: which is bad if nested in another table. This is taken care of +% at level of \sphinxattablestart and \sphinxatlongtablestart. \spx@prepend\spx@table@resetcolortbl\to\sphinxtableafterendhook } \def\spx@prepend#1\to#2{% attention about using this only with #2 "storage macro" +% MEMO: #1 is prepended with no expansion, i.e. "as is". \toks@{#1}% \toks@\expandafter\expandafter\expandafter{\expandafter\the\expandafter\toks@#2}% \edef#2{\the\toks@}% @@ -1064,9 +1126,10 @@ local use of booktabs table style}% \let\spx@table@startbodycolorrows\@empty \let\sphinxtabledecrementrownum \@empty % we don't worry about \sphinxtableafterendhook as the \spx@table@resetcolortbl -% done at end can not do harm; and we could also have not bothered with the +% done at end can not do harm [^1]; and we could also have not bothered with the % \sphinxtabledecrementrownum as its \rownum decrement, if active, is harmless -% in non-colorrows context +% in non-colorrows context. +% .. [1]: if nested in another table it is modified to do no harm. } % (not so easy) implementation of the booktabscolorgaps option. This option From dfa7254cf4cfbbcc0a199b3f3665be63101d22a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Mon, 9 Jun 2025 23:43:48 +0200 Subject: [PATCH 121/435] Update CHANGES for fix of issues #13597 and #13635 --- CHANGES.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 28c9daaf09d..fefcd768f0f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -37,9 +37,6 @@ Features added Patch by Jean-François B. * #13535: html search: Update to the latest version of Snowball (v3.0.1). Patch by Adam Turner. -* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up - and PDF cannot be built. - Patch by Jean-François B. * #13704: autodoc: Detect :py:func:`typing_extensions.overload ` and :py:func:`~typing.final` decorators. Patch by Spencer Brown. @@ -53,9 +50,15 @@ Bugs fixed Patch by Alicia Garcia-Raboso. * #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`. Patch by Shengyu Zhang and Adam Turner. +* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up + and PDF cannot be built. + Patch by Jean-François B. * #13619: LaTeX: possible duplicated footnotes in PDF from object signatures (typically if :confval:`latex_show_urls` ``= 'footnote'``). Patch by Jean-François B. +* #13635: LaTeX: if a cell contains a table, row coloring is turned off for + the next table cells. + Patch by Jean-François B. Testing ------- From 9f5e80375e62723456e868362b5a218b0212a666 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Tue, 10 Jun 2025 00:46:48 +0200 Subject: [PATCH 122/435] LaTeX: protect the fix of #13635 against a particular configuration (#13641) --- sphinx/texinputs/sphinxlatextables.sty | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty index dda2711f2e3..08efac559c0 100644 --- a/sphinx/texinputs/sphinxlatextables.sty +++ b/sphinx/texinputs/sphinxlatextables.sty @@ -943,7 +943,12 @@ % redefined by the cells of the nested table. So this is the alternative % macro which is executed on exit of nested table. \def\spx@nestedtable@resetcolortbl{% - \spx@gpop@rownum + \ifx\spx@rownum@stack\@empty\else +% The stack can be empty if this is executed on exit of a nested table, +% and the parent table has received the "nocolorrows" class, but globally +% colorrows are activated (default). So we protected against that case. + \spx@gpop@rownum + \fi \sphinxSwitchCaseRowColor\rownum } \def\spx@table@reset@CT@everycr{% From 97f946a598dfde8e9f84a15396e9805fef779f9a Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 10 Jun 2025 00:56:29 +0100 Subject: [PATCH 123/435] Refactor and simplify ``sphinx.io._create_publisher()`` (#13642) --- sphinx/io.py | 26 +++++-------------- sphinx/registry.py | 6 +++-- sphinx/testing/restructuredtext.py | 2 +- .../test_directive_object_description.py | 9 +++++-- 4 files changed, 19 insertions(+), 24 deletions(-) diff --git a/sphinx/io.py b/sphinx/io.py index 8124f3ddf38..e2d299f8ae2 100644 --- a/sphinx/io.py +++ b/sphinx/io.py @@ -23,9 +23,7 @@ from docutils.parsers import Parser from docutils.transforms import Transform - from sphinx.application import Sphinx from sphinx.environment import BuildEnvironment - from sphinx.registry import SphinxComponentRegistry logger = logging.getLogger(__name__) @@ -70,8 +68,8 @@ def new_document(self) -> nodes.document: class SphinxStandaloneReader(SphinxBaseReader): """A basic document reader for Sphinx.""" - def _setup_transforms(self, *, registry: SphinxComponentRegistry) -> None: - self.transforms = self.transforms + registry.get_transforms() + def _setup_transforms(self, transforms: list[type[Transform]], /) -> None: + self.transforms = self.transforms + transforms def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document: # type: ignore[type-arg] self.source = source @@ -114,21 +112,11 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -def create_publisher(app: Sphinx, filetype: str) -> Publisher: +def _create_publisher( + *, env: BuildEnvironment, parser: Parser, transforms: list[type[Transform]] +) -> Publisher: reader = SphinxStandaloneReader() - reader._setup_transforms(registry=app.registry) - - parser = app.registry.create_source_parser(filetype, config=app.config, env=app.env) - if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == (): - # a workaround for recommonmark - # If recommonmark.AutoStrictify is enabled, the parser invokes reST parser - # internally. But recommonmark-0.4.0 does not provide settings_spec for reST - # parser. As a workaround, this copies settings_spec for RSTParser to the - # CommonMarkParser. - from docutils.parsers.rst import Parser as RSTParser - - parser.settings_spec = RSTParser.settings_spec # type: ignore[misc] - + reader._setup_transforms(transforms) pub = Publisher( reader=reader, parser=parser, @@ -137,7 +125,7 @@ def create_publisher(app: Sphinx, filetype: str) -> Publisher: destination=NullOutput(), ) # Propagate exceptions by default when used programmatically: - defaults = {'traceback': True, **app.env.settings} + defaults = {'traceback': True, **env.settings} # Set default settings pub.get_settings(**defaults) return pub diff --git a/sphinx/registry.py b/sphinx/registry.py index 1f5fef1821a..f8247296eb5 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -12,7 +12,7 @@ from sphinx.domains.std import GenericObject, Target from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError from sphinx.extension import Extension -from sphinx.io import create_publisher +from sphinx.io import _create_publisher from sphinx.locale import __ from sphinx.parsers import Parser as SphinxParser from sphinx.roles import XRefRole @@ -601,7 +601,9 @@ def get_publisher(self, app: Sphinx, filetype: str) -> Publisher: return self.publishers[filetype] except KeyError: pass - publisher = create_publisher(app, filetype) + parser = self.create_source_parser(filetype, config=app.config, env=app.env) + transforms = self.get_transforms() + publisher = _create_publisher(env=app.env, parser=parser, transforms=transforms) self.publishers[filetype] = publisher return publisher diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index 4439b128cd5..b17fd387946 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -20,7 +20,7 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: try: app.env.current_document.docname = docname reader = SphinxStandaloneReader() - reader._setup_transforms(registry=app.registry) + reader._setup_transforms(app.registry.get_transforms()) parser = RSTParser() parser.config = app.config parser.env = app.env diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py index 210b9aac381..6b85c34d326 100644 --- a/tests/test_directives/test_directive_object_description.py +++ b/tests/test_directives/test_directive_object_description.py @@ -9,7 +9,7 @@ from docutils import nodes from sphinx import addnodes -from sphinx.io import create_publisher +from sphinx.io import _create_publisher from sphinx.testing import restructuredtext from sphinx.util.docutils import sphinx_domains @@ -22,8 +22,13 @@ def _doctree_for_test( app: Sphinx, env: BuildEnvironment, docname: str ) -> nodes.document: + config = app.config + registry = app.registry env.prepare_settings(docname) - publisher = create_publisher(app, 'restructuredtext') + parser = registry.create_source_parser('restructuredtext', config=config, env=env) + publisher = _create_publisher( + env=env, parser=parser, transforms=registry.get_transforms() + ) with sphinx_domains(env): publisher.set_source(source_path=str(env.doc2path(docname))) publisher.publish() From 58ebe2d3e9c3f335db23e945f0a7dd6fcc0d6877 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 10 Jun 2025 01:22:22 +0100 Subject: [PATCH 124/435] Avoid passing ``app`` to ``SphinxComponentRegistry._get_publisher()`` (#13643) --- sphinx/builders/__init__.py | 4 +++- sphinx/registry.py | 8 +++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index b855168f817..18c2596b383 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -644,7 +644,9 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None: filename = str(env.doc2path(docname)) filetype = get_filetype(self._app.config.source_suffix, filename) - publisher = self._registry.get_publisher(self._app, filetype) + publisher = self._registry._get_publisher( + filetype, config=self.config, env=self.env + ) self.env.current_document._parser = publisher.parser # record_dependencies is mutable even though it is in settings, # explicitly re-initialise for each document diff --git a/sphinx/registry.py b/sphinx/registry.py index f8247296eb5..0d4151ca67b 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -596,14 +596,16 @@ def get_envversion(self, app: Sphinx) -> Mapping[str, int]: return _get_env_version(app.extensions) - def get_publisher(self, app: Sphinx, filetype: str) -> Publisher: + def _get_publisher( + self, filetype: str, *, config: Config, env: BuildEnvironment + ) -> Publisher: try: return self.publishers[filetype] except KeyError: pass - parser = self.create_source_parser(filetype, config=app.config, env=app.env) + parser = self.create_source_parser(filetype, config=config, env=env) transforms = self.get_transforms() - publisher = _create_publisher(env=app.env, parser=parser, transforms=transforms) + publisher = _create_publisher(env=env, parser=parser, transforms=transforms) self.publishers[filetype] = publisher return publisher From bb5e5454bcef531af75526b3b669e99bd3af411a Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 10 Jun 2025 01:50:48 +0100 Subject: [PATCH 125/435] Deprecate ``Parser.{config,env}`` (#13644) --- CHANGES.rst | 3 ++- sphinx/parsers.py | 38 +++++++++++++++++++----------- sphinx/registry.py | 4 ++-- sphinx/testing/restructuredtext.py | 4 ++-- tests/test_markup/test_parser.py | 4 ++-- 5 files changed, 32 insertions(+), 21 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index fefcd768f0f..7098714e670 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -20,7 +20,8 @@ Deprecated Patch by Adam Turner. * #13637: Deprecate the :py:meth:`!set_application` method of :py:class:`~sphinx.parsers.Parser` objects. - Sphinx now directly sets the :py:attr:`!config` and :py:attr:`!env` attributes. + Patch by Adam Turner. +* #13644: Deprecate the :py:attr:`!Parser.config` and :py:attr:`!env` attributes. Patch by Adam Turner. Features added diff --git a/sphinx/parsers.py b/sphinx/parsers.py index 698cd12e76d..eb5e77d9387 100644 --- a/sphinx/parsers.py +++ b/sphinx/parsers.py @@ -24,21 +24,31 @@ class Parser(docutils.parsers.Parser): - """A base class of source parsers. + """A base class for source parsers. - The additional parsers should inherit this class - instead of ``docutils.parsers.Parser``. - Compared with ``docutils.parsers.Parser``, - this class improves accessibility to Sphinx APIs. - - The subclasses can access sphinx core runtime objects (app, config and env). + Additional parsers should inherit from this class instead of + ``docutils.parsers.Parser``. + This class provides access to core Sphinx objects; *config* and *env*. """ - #: The config object - config: Config + _config: Config + _env: BuildEnvironment + + @property + def config(self) -> Config: + """The config object.""" + cls_module = self.__class__.__module__ + cls_name = self.__class__.__qualname__ + _deprecation_warning(cls_module, f'{cls_name}.config', remove=(9, 0)) + return self._config - #: The environment object - env: BuildEnvironment + @property + def env(self) -> BuildEnvironment: + """The environment object.""" + cls_module = self.__class__.__module__ + cls_name = self.__class__.__qualname__ + _deprecation_warning(cls_module, f'{cls_name}.env', remove=(9, 0)) + return self._env def set_application(self, app: Sphinx) -> None: """set_application will be called from Sphinx to set app and other instance variables @@ -47,9 +57,9 @@ def set_application(self, app: Sphinx) -> None: """ cls_module = self.__class__.__module__ cls_name = self.__class__.__qualname__ - _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(10, 0)) - self.config = app.config - self.env = app.env + _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(9, 0)) + self._config = app.config + self._env = app.env class RSTParser(docutils.parsers.rst.Parser, Parser): diff --git a/sphinx/registry.py b/sphinx/registry.py index 0d4151ca67b..6f7d7c477fe 100644 --- a/sphinx/registry.py +++ b/sphinx/registry.py @@ -381,8 +381,8 @@ def create_source_parser( parser_class = self.get_source_parser(filename) parser = parser_class() if isinstance(parser, SphinxParser): - parser.config = config - parser.env = env + parser._config = config + parser._env = env return parser def add_translator( diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index b17fd387946..68c78199606 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -22,8 +22,8 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: reader = SphinxStandaloneReader() reader._setup_transforms(app.registry.get_transforms()) parser = RSTParser() - parser.config = app.config - parser.env = app.env + parser._config = app.config + parser._env = app.env with sphinx_domains(env): return publish_doctree( text, diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py index dbaa5e8cb4e..6a71fed9e49 100644 --- a/tests/test_markup/test_parser.py +++ b/tests/test_markup/test_parser.py @@ -16,8 +16,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): document = new_document('dummy.rst') document.settings = Mock(tab_width=8, language_code='') parser = RSTParser() - parser.config = app.config - parser.env = app.env + parser._config = app.config + parser._env = app.env # normal case text = 'hello Sphinx world\nSphinx is a document generator' From a5366394ae527712c4edfeb07a5fbeecd4ca72e1 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 10 Jun 2025 02:23:51 +0100 Subject: [PATCH 126/435] Move build phase to the builder (#13645) --- sphinx/application.py | 9 +++++++-- sphinx/builders/__init__.py | 23 ++++++++++------------- 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/sphinx/application.py b/sphinx/application.py index 8117eecf340..8e53770545f 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -196,7 +196,6 @@ def __init__( :param pdb: If true, enable the Python debugger on an exception. :param exception_on_warning: If true, raise an exception on warnings. """ - self.phase = BuildPhase.INITIALIZATION self.verbosity = verbosity self._fresh_env_used: bool | None = None self.extensions: dict[str, Extension] = {} @@ -340,6 +339,12 @@ def fresh_env_used(self) -> bool | None: """ return self._fresh_env_used + @property + def phase(self) -> BuildPhase: + if not hasattr(self, 'builder'): + return BuildPhase.INITIALIZATION + return self.builder.phase + def _init_i18n(self) -> None: """Load translated strings from the configured localedirs if enabled in the configuration. @@ -420,7 +425,7 @@ def _init_builder(self) -> None: # ---- main "build" method ------------------------------------------------- def build(self, force_all: bool = False, filenames: Sequence[Path] = ()) -> None: - self.phase = BuildPhase.READING + self.builder.phase = BuildPhase.READING try: if force_all: self.builder.build_all() diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 18c2596b383..73426c6dc81 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -103,6 +103,8 @@ class Builder: #: The file format produced by the builder allows images to be embedded using data-URIs. supported_data_uri_images: ClassVar[bool] = False + phase: BuildPhase = BuildPhase.INITIALIZATION + srcdir = _StrPathProperty() confdir = _StrPathProperty() outdir = _StrPathProperty() @@ -431,14 +433,14 @@ def build( pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL) # global actions - self._app.phase = BuildPhase.CONSISTENCY_CHECK + self.phase = BuildPhase.CONSISTENCY_CHECK with progress_message(__('checking consistency')): self.env.check_consistency() else: if method == 'update' and not docnames: logger.info(bold(__('no targets are out of date.'))) - self._app.phase = BuildPhase.RESOLVING + self.phase = BuildPhase.RESOLVING # filter "docnames" (list of outdated files) by the updated # found_docs of the environment; this will remove docs that @@ -776,21 +778,17 @@ def _write_serial(self, docnames: Sequence[str]) -> None: len(docnames), self._app.verbosity, ): - _write_docname( - docname, app=self._app, env=self.env, builder=self, tags=self.tags - ) + _write_docname(docname, env=self.env, builder=self, tags=self.tags) def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None: def write_process(docs: list[tuple[str, nodes.document]]) -> None: - self._app.phase = BuildPhase.WRITING + self.phase = BuildPhase.WRITING for docname, doctree in docs: self.write_doc(docname, doctree) # warm up caches/compile templates using the first document firstname, docnames = docnames[0], docnames[1:] - _write_docname( - firstname, app=self._app, env=self.env, builder=self, tags=self.tags - ) + _write_docname(firstname, env=self.env, builder=self, tags=self.tags) tasks = ParallelTasks(nproc) chunks = make_chunks(docnames, nproc) @@ -808,7 +806,7 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None: def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None: next(progress) - self._app.phase = BuildPhase.RESOLVING + self.phase = BuildPhase.RESOLVING for chunk in chunks: arg = [] for docname in chunk: @@ -884,15 +882,14 @@ def _write_docname( docname: str, /, *, - app: Sphinx, env: BuildEnvironment, builder: Builder, tags: Tags, ) -> None: """Write a single document.""" - app.phase = BuildPhase.RESOLVING + builder.phase = BuildPhase.RESOLVING doctree = env.get_and_resolve_doctree(docname, builder=builder, tags=tags) - app.phase = BuildPhase.WRITING + builder.phase = BuildPhase.WRITING builder.write_doc_serialized(docname, doctree) builder.write_doc(docname, doctree) From f1316bb1698d5f217cb273c84272c76cc7528979 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Tue, 10 Jun 2025 13:36:27 +0200 Subject: [PATCH 127/435] LaTeX: allow more cases of table nesting, fix #13646 Tables using longtable can now contain nested tables inclusive of those rendered by tabulary, up to the suppression of the latter horizontal lines due to an upstream LaTeX bug. A longtable can never itself be nested, and will fall-back to tabular. Formerly longtable would raise (in principle) an error if it contained any sort of nested table, but the detection of being a longtable was faulty if not specified as class option. Relates #6838. --- doc/usage/restructuredtext/directives.rst | 61 ++++++++++-------- sphinx/templates/latex/tabulary.tex.jinja | 3 + sphinx/texinputs/sphinxlatextables.sty | 23 ++++++- sphinx/writers/latex.py | 64 ++++++++++++++----- .../expects/tabularcolumn.tex | 4 +- tests/roots/test-root/markup.txt | 8 +++ 6 files changed, 116 insertions(+), 47 deletions(-) diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst index 33269b522a6..94526304cdf 100644 --- a/doc/usage/restructuredtext/directives.rst +++ b/doc/usage/restructuredtext/directives.rst @@ -1472,6 +1472,15 @@ Check the :confval:`latex_table_style`. complex contents such as multiple paragraphs, blockquotes, lists, literal blocks, will render correctly to LaTeX output. +.. versionchanged:: 8.3.0 + + The partial support for nesting a table in another has been extended. + Formerly Sphinx would raise an error if ``longtable`` class was specified + for a table containing a nested table, and some cases would not raise an + error at Sphinx level but fail at LaTeX level during PDF build. This is a + complex topic in LaTeX rendering and the output can sometimes be improved + via the :rst:dir:`tabularcolumns` directive. + .. rst:directive:: .. tabularcolumns:: column spec This directive influences only the LaTeX output for the next table in @@ -1489,40 +1498,38 @@ Check the :confval:`latex_table_style`. :rst:dir:`tabularcolumns` conflicts with ``:widths:`` option of table directives. If both are specified, ``:widths:`` option will be ignored. - Sphinx will render tables with more than 30 rows with ``longtable``. - Besides the ``l``, ``r``, ``c`` and ``p{width}`` column specifiers, one can - also use ``\X{a}{b}`` (new in version 1.5) which configures the column - width to be a fraction ``a/b`` of the total line width and ``\Y{f}`` (new - in version 1.6) where ``f`` is a decimal: for example ``\Y{0.2}`` means that - the column will occupy ``0.2`` times the line width. + Sphinx renders tables with at most 30 rows using ``tabulary``, and those + with more rows with ``longtable``. - When this directive is used for a table with at most 30 rows, Sphinx will - render it with ``tabulary``. One can then use specific column types ``L`` - (left), ``R`` (right), ``C`` (centered) and ``J`` (justified). They have - the effect of a ``p{width}`` (i.e. each cell is a LaTeX ``\parbox``) with - the specified internal text alignment and an automatically computed - ``width``. - - .. warning:: + ``tabulary`` tries to compute automatically (internally to LaTeX) suitable + column widths. However, cells are then not allowed to contain + "problematic" elements such as lists, object descriptions, + blockquotes... Sphinx will fall back to using ``tabular`` if such a cell is + encountered (or a nested ``tabulary``). In such a case the table will have + a tendency to try to fill the whole available line width. - - Cells that contain list-like elements such as object descriptions, - blockquotes or any kind of lists are not compatible with the ``LRCJ`` - column types. The column type must then be some ``p{width}`` with an - explicit ``width`` (or ``\X{a}{b}`` or ``\Y{f}``). + :rst:dir:`tabularcolumns` can help in coercing the usage of ``tabulary`` if + one is careful to not employ the ``tabulary`` column types (``L``, ``R``, + ``C`` or ``J``) for those columns with at least one "problematic" cell, but + only LaTeX's ``p{}`` or Sphinx ``\X`` and ``\Y`` (described next). - - Literal blocks do not work with ``tabulary`` at all. Sphinx will - fall back to ``tabular`` or ``longtable`` environments and generate a - suitable column specification. + Literal blocks do not work at all with ``tabulary``. Sphinx will fall back + to ``tabular`` or ``longtable`` environments depending on the number of + rows. It will employ the :rst:dir:`tabularcolumns` specification only if it + contains no usage of the ``tabulary`` specific types. -In absence of the :rst:dir:`tabularcolumns` directive, and for a table with at -most 30 rows and no problematic cells as described in the above warning, -Sphinx uses ``tabulary`` and the ``J`` column-type for every column. + Besides the LaTeX ``l``, ``r``, ``c`` and ``p{width}`` column specifiers, + one can also use ``\X{a}{b}`` which configures the column width to be a + fraction ``a/b`` of the total line width and ``\Y{f}`` where ``f`` is a + decimal: for example ``\Y{0.2}`` means that the column will occupy ``0.2`` + times the line width. .. versionchanged:: 1.6 - Formerly, the ``L`` column-type was used (text is flushed-left). To revert - to this, include ``\newcolumntype{T}{L}`` in the LaTeX preamble, as in fact - Sphinx uses ``T`` and sets it by default to be an alias of ``J``. + Use ``J`` (justified) by default with ``tabulary``, not ``L`` + (flushed-left). To revert, include ``\newcolumntype{T}{L}`` in the LaTeX + preamble, as in fact Sphinx uses ``T`` and sets it by default to be an + alias of ``J``. .. hint:: diff --git a/sphinx/templates/latex/tabulary.tex.jinja b/sphinx/templates/latex/tabulary.tex.jinja index 6ebcec6d264..7ba065ed1a7 100644 --- a/sphinx/templates/latex/tabulary.tex.jinja +++ b/sphinx/templates/latex/tabulary.tex.jinja @@ -21,6 +21,9 @@ <% if 'nocolorrows' in table.styles -%> \sphinxthistablewithnocolorrowsstyle <% endif -%> +<% if table.is_nested -%> +\sphinxthistabularywithnohlinesifinlongtable +<% endif -%> <% if table.align -%> <%- if table.align in ('center', 'default') -%> \centering diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty index 08efac559c0..96ce2d0c80f 100644 --- a/sphinx/texinputs/sphinxlatextables.sty +++ b/sphinx/texinputs/sphinxlatextables.sty @@ -47,6 +47,8 @@ % The method here is with no changes to neither writer nor templates. \newif\ifspx@intable \newif\ifspx@thistableisnested +% Try to allow nested tables in a longtable. But tabulary causes problems. +\newif\ifspx@longtable % % Also provides user command (see docs) % - \sphixncolorblend @@ -115,6 +117,7 @@ \edef\sphinxbaselineskip{\dimexpr\the\dimexpr\baselineskip\relax\relax}% \spx@inframedtrue % message to sphinxheavybox \spx@table@setnestedflags + \spx@longtabletrue } % Compatibility with caption package \def\sphinxthelongtablecaptionisattop{% @@ -128,7 +131,10 @@ \def\sphinxatlongtableend{\@nobreakfalse % latex3/latex2e#173 \prevdepth\z@\vskip\sphinxtablepost\relax}% % B. Table with tabular or tabulary -\def\sphinxattablestart{\par\vskip\dimexpr\sphinxtablepre\relax +\def\sphinxattablestart{\par + \ifvmode % guard agains being nested in a table cell + \vskip\dimexpr\sphinxtablepre\relax + \fi \spx@inframedtrue % message to sphinxheavybox \spx@table@setnestedflags }% @@ -142,7 +148,12 @@ \spx@intabletrue \fi }% -\let\sphinxattableend\sphinxatlongtableend +\def\sphinxattableend{% + \@nobreakfalse % <- probably unneeded as this is not a longtable + \ifvmode % guard against being nested in a table cell + \prevdepth\z@\vskip\sphinxtablepost\relax + \fi +}% % This is used by tabular and tabulary templates \newcommand*\sphinxcapstartof[1]{% \vskip\parskip @@ -1083,6 +1094,10 @@ local use of booktabs table style}% % borderless style \def\sphinxthistablewithborderlessstyle{% + \sphinxthistablewithnohlines + \def\spx@arrayrulewidth{\z@}% +}% +\def\sphinxthistablewithnohlines{% \let\sphinxhline \@empty \let\sphinxcline \@gobble \let\sphinxvlinecrossing\@gobble @@ -1090,7 +1105,9 @@ local use of booktabs table style}% \let\spx@toprule \@empty \let\sphinxmidrule \@empty \let\sphinxbottomrule \@empty - \def\spx@arrayrulewidth{\z@}% +}% +\def\sphinxthistabularywithnohlinesifinlongtable{% + \ifspx@longtable\sphinxthistablewithnohlines\fi }% % colorrows style diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 0aa550a3b7e..53fa2564f37 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -134,6 +134,8 @@ def __init__(self, node: Element) -> None: self.has_problematic = False self.has_oldproblematic = False self.has_verbatim = False + # cf https://github.com/sphinx-doc/sphinx/issues/13646#issuecomment-2958309632 + self.is_nested = False self.entry_needs_linetrimming = 0 self.caption: list[str] = [] self.stubs: list[int] = [] @@ -147,29 +149,47 @@ def __init__(self, node: Element) -> None: self.cell_id = 0 # last assigned cell_id def is_longtable(self) -> bool: - """True if and only if table uses longtable environment.""" + """True if and only if table uses longtable environment. + + In absence of longtable class can only be used trustfully on departing + the table, as the number of rows is not known until then. + """ return self.row > 30 or 'longtable' in self.classes def get_table_type(self) -> str: """Returns the LaTeX environment name for the table. + It is used at time of ``depart_table()`` and again via ``get_colspec()``. The class currently supports: * longtable * tabular * tabulary """ - if self.is_longtable(): + if self.is_longtable() and not self.is_nested: return 'longtable' elif self.has_verbatim: return 'tabular' elif self.colspec: - return 'tabulary' + if any(c in 'LRCJT' for c in self.colspec): + # tabulary would complain "no suitable columns" if none of its + # column type were used so we ensure at least one matches. + # It is responsability of user to make sure not to use tabulary + # column types for a column containing a problematic cell. + return 'tabulary' + else: + return 'tabular' elif self.has_problematic or ( self.colwidths and 'colwidths-given' in self.classes ): return 'tabular' else: + # A nested tabulary in a longtable can not use any \hline's, + # i.e. it can not use "booktabs" or "standard" styles (due to a + # LaTeX upstream bug we do not try to solve). But we can't know + # here if it ends up in a tabular or longtable. So it is via + # LaTeX macros inserted by the tabulary template that the problem + # will be solved. return 'tabulary' def get_colspec(self) -> str: @@ -179,6 +199,7 @@ def get_colspec(self) -> str: .. note:: + This is used by the template renderer at time of depart_table(). The ``\\X`` and ``T`` column type specifiers are defined in ``sphinxlatextables.sty``. """ @@ -1146,23 +1167,17 @@ def visit_tabular_col_spec(self, node: Element) -> None: raise nodes.SkipNode def visit_table(self, node: Element) -> None: - if len(self.tables) == 1: - assert self.table is not None - if self.table.get_table_type() == 'longtable': - raise UnsupportedError( - '%s:%s: longtable does not support nesting a table.' - % (self.curfilestack[-1], node.line or '') - ) - # change type of parent table to tabular - # see https://groups.google.com/d/msg/sphinx-users/7m3NeOBixeo/9LKP2B4WBQAJ - self.table.has_problematic = True - elif len(self.tables) > 2: + table = Table(node) + assert table is not None + if len(self.tables) >= 1: + table.is_nested = True + # TODO: do we want > 2, > 1, or actually nothing here? + if len(self.tables) > 2: raise UnsupportedError( '%s:%s: deeply nested tables are not implemented.' % (self.curfilestack[-1], node.line or '') ) - table = Table(node) self.tables.append(table) if table.colsep is None: table.colsep = '|' * ( @@ -1191,6 +1206,25 @@ def depart_table(self, node: Element) -> None: assert self.table is not None labels = self.hypertarget_to(node) table_type = self.table.get_table_type() + if table_type == 'tabulary': + if len(self.tables) > 1: + # tell parents to not be tabulary + for _ in self.tables[:-1]: + _.has_problematic = True + else: + if self.table.colspec: + if any(c in self.table.colspec for c in 'LRJCT'): + logger.warning( + __( + 'colspec %s was given which uses ' + 'tabulary syntax. But this table can not be ' + 'rendered as a tabulary; colspec will be ignored.' + ), + self.table.colspec[:-1], + type='latex', + location=node, + ) + self.table.colspec = '' table = self.render( table_type + '.tex.jinja', {'table': self.table, 'labels': labels} ) diff --git a/tests/roots/test-latex-table/expects/tabularcolumn.tex b/tests/roots/test-latex-table/expects/tabularcolumn.tex index fcb01be3f50..c1f88421f1d 100644 --- a/tests/roots/test-latex-table/expects/tabularcolumn.tex +++ b/tests/roots/test-latex-table/expects/tabularcolumn.tex @@ -4,7 +4,7 @@ \sphinxthistablewithglobalstyle \sphinxthistablewithnovlinesstyle \centering -\begin{tabulary}{\linewidth}[t]{cc} +\begin{tabular}[t]{cc} \sphinxtoprule \sphinxstyletheadfamily \sphinxAtStartPar @@ -36,6 +36,6 @@ cell3\sphinxhyphen{}2 \\ \sphinxbottomrule -\end{tabulary} +\end{tabular} \sphinxtableafterendhook\par \sphinxattableend\end{savenotes} diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt index a9d9132ed98..ca32ce9ddc5 100644 --- a/tests/roots/test-root/markup.txt +++ b/tests/roots/test-root/markup.txt @@ -229,6 +229,14 @@ Tables with multirow and multicol: | +---+ | +---+---+ + .. rst-class:: longtable + + +---+---+ + | +---+ | + | | h | | + | +---+ | + +---+---+ + .. list-table:: :header-rows: 0 From d20d348fdb8ca0782c8ecd14d34ad8149c564b7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Tue, 10 Jun 2025 17:56:58 +0200 Subject: [PATCH 128/435] Fix and update CHANGES.rst --- CHANGES.rst | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 7098714e670..4920b4c7736 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -36,11 +36,13 @@ Features added * #13497: Support C domain objects in the table of contents. * #13500: LaTeX: add support for ``fontawesome6`` package. Patch by Jean-François B. -* #13535: html search: Update to the latest version of Snowball (v3.0.1). - Patch by Adam Turner. -* #13704: autodoc: Detect :py:func:`typing_extensions.overload ` +* #13509: autodoc: Detect :py:func:`typing_extensions.overload ` and :py:func:`~typing.final` decorators. Patch by Spencer Brown. +* #13535: html search: Update to the latest version of Snowball (v3.0.1). + Patch by Adam Turner. +* #13647: LaTeX: allow more cases of table nesting. + Patch by Jean-François B. Bugs fixed ---------- From 62198d83c33ec43ec8c83c7d5d67c2a4f91dc8ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Tue, 10 Jun 2025 22:41:55 +0200 Subject: [PATCH 129/435] LaTeX: test better if tabularcolumns colspec is tabulary-specific (#13648) --- sphinx/writers/latex.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 53fa2564f37..95eb35dda36 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -171,7 +171,9 @@ def get_table_type(self) -> str: elif self.has_verbatim: return 'tabular' elif self.colspec: - if any(c in 'LRCJT' for c in self.colspec): + assert len(self.colspec) > 2 + _colspec = re.sub(r'\{.*?\}', '', self.colspec[1:-2]) + if any(c in 'LRCJT' for c in _colspec): # tabulary would complain "no suitable columns" if none of its # column type were used so we ensure at least one matches. # It is responsability of user to make sure not to use tabulary @@ -1212,8 +1214,13 @@ def depart_table(self, node: Element) -> None: for _ in self.tables[:-1]: _.has_problematic = True else: + # We try to catch a tabularcolumns using L, R, J, C, or T. + # We can not simply test for presence in the colspec of + # one of those letters due to syntax such as >{\RaggedRight}. if self.table.colspec: - if any(c in self.table.colspec for c in 'LRJCT'): + assert len(self.table.colspec) > 2 + _colspec = re.sub(r'\{.*?\}', '', self.table.colspec[1:-2]) + if any(c in _colspec for c in 'LRJCT'): logger.warning( __( 'colspec %s was given which uses ' From 4564486af459d9f862aeea79c9ebbb7405a54618 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Wed, 11 Jun 2025 10:02:50 +0200 Subject: [PATCH 130/435] LaTeX: clarify that latest docs addition refers to LaTeX --- doc/usage/restructuredtext/directives.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst index 94526304cdf..f882f33ba3e 100644 --- a/doc/usage/restructuredtext/directives.rst +++ b/doc/usage/restructuredtext/directives.rst @@ -1473,8 +1473,8 @@ Check the :confval:`latex_table_style`. blocks, will render correctly to LaTeX output. .. versionchanged:: 8.3.0 - - The partial support for nesting a table in another has been extended. + The partial support of the LaTeX builder for nesting a table in another + has been extended. Formerly Sphinx would raise an error if ``longtable`` class was specified for a table containing a nested table, and some cases would not raise an error at Sphinx level but fail at LaTeX level during PDF build. This is a From 118f4a13a78f861983a68dd9b249ecd34b399d3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Wed, 11 Jun 2025 11:44:09 +0200 Subject: [PATCH 131/435] LaTeX: make sure tabulary is used if colspec requires it (#13653) --- sphinx/writers/latex.py | 29 +++++++++++++++++------------ tests/roots/test-root/markup.txt | 2 +- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py index 95eb35dda36..39aef55ddfe 100644 --- a/sphinx/writers/latex.py +++ b/sphinx/writers/latex.py @@ -171,13 +171,13 @@ def get_table_type(self) -> str: elif self.has_verbatim: return 'tabular' elif self.colspec: - assert len(self.colspec) > 2 - _colspec = re.sub(r'\{.*?\}', '', self.colspec[1:-2]) - if any(c in 'LRCJT' for c in _colspec): - # tabulary would complain "no suitable columns" if none of its - # column type were used so we ensure at least one matches. - # It is responsability of user to make sure not to use tabulary - # column types for a column containing a problematic cell. + # tabulary complains (only a LaTeX warning) if none of its column + # types is used. The next test will have false positive from + # syntax such as >{\RaggedRight} but it will catch *{3}{J} which + # does require tabulary and would crash tabular + # It is user responsability not to use a tabulary column type for + # a column having a problematic cell. + if any(c in 'LRCJT' for c in self.colspec): return 'tabulary' else: return 'tabular' @@ -1217,17 +1217,22 @@ def depart_table(self, node: Element) -> None: # We try to catch a tabularcolumns using L, R, J, C, or T. # We can not simply test for presence in the colspec of # one of those letters due to syntax such as >{\RaggedRight}. + # The test will not catch *{3}{J} syntax, but it would be + # overkill to try to implement LaTeX preamble mini-language. if self.table.colspec: assert len(self.table.colspec) > 2 - _colspec = re.sub(r'\{.*?\}', '', self.table.colspec[1:-2]) - if any(c in _colspec for c in 'LRJCT'): + # cf how self.table.colspec got set in visit_table(). + _colspec_as_given = self.table.colspec[1:-2] + _colspec_stripped = re.sub(r'\{.*?\}', '', _colspec_as_given) + if any(c in _colspec_stripped for c in 'LRJCT'): logger.warning( __( - 'colspec %s was given which uses ' + 'colspec %s was given which appears to use ' 'tabulary syntax. But this table can not be ' - 'rendered as a tabulary; colspec will be ignored.' + 'rendered as a tabulary; the given colspec will ' + 'be ignored.' ), - self.table.colspec[:-1], + _colspec_as_given, type='latex', location=node, ) diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt index ca32ce9ddc5..0a7b6cb2c92 100644 --- a/tests/roots/test-root/markup.txt +++ b/tests/roots/test-root/markup.txt @@ -182,7 +182,7 @@ With Tables ------ -.. tabularcolumns:: |L|p{5cm}|R| +.. tabularcolumns:: |*{1}{L|}p{5cm}|*{1}{R}| .. _my-table: From fa6f8c87b79ab9bc13f97503bc9f92ca79af955f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Jun 2025 15:58:40 +0100 Subject: [PATCH 132/435] Bump types-requests to 2.32.4.20250611 (#13651) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fd0cdce21bc..70962dd7adf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -100,7 +100,7 @@ lint = [ "types-docutils==0.21.0.20250525", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250516", - "types-requests==2.32.0.20250602", # align with requests + "types-requests==2.32.4.20250611", # align with requests "types-urllib3==1.26.25.14", "pyright==1.1.400", "pytest>=8.0", @@ -169,7 +169,7 @@ type-stubs = [ "types-docutils==0.21.0.20250525", "types-Pillow==10.2.0.20240822", "types-Pygments==2.19.0.20250516", - "types-requests==2.32.0.20250602", + "types-requests==2.32.4.20250611", "types-urllib3==1.26.25.14", ] From 40b0f8048940865325063b68f087a16860a66c72 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Wed, 11 Jun 2025 15:58:53 +0100 Subject: [PATCH 133/435] Use ``config.verbosity`` (#13650) --- sphinx/application.py | 2 +- sphinx/builders/__init__.py | 10 +++++----- sphinx/environment/__init__.py | 4 +++- sphinx/ext/viewcode.py | 2 +- sphinx/util/logging.py | 10 +++++++--- tests/test_util/test_util_logging.py | 17 ++++++----------- 6 files changed, 23 insertions(+), 22 deletions(-) diff --git a/sphinx/application.py b/sphinx/application.py index 8e53770545f..e8da1e4d058 100644 --- a/sphinx/application.py +++ b/sphinx/application.py @@ -239,7 +239,7 @@ def __init__( self._fail_on_warnings = bool(warningiserror) self.pdb = pdb self._exception_on_warning = exception_on_warning - logging.setup(self, self._status, self._warning) + logging.setup(self, self._status, self._warning, verbosity=verbosity) self.events = EventManager(self) diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py index 73426c6dc81..184a27f2cd2 100644 --- a/sphinx/builders/__init__.py +++ b/sphinx/builders/__init__.py @@ -269,7 +269,7 @@ def cat2relpath(cat: CatalogInfo, srcdir: Path = self.srcdir) -> str: __('writing output... '), 'darkgreen', len(catalogs), - self._app.verbosity, + self.config.verbosity, stringify_func=cat2relpath, ): catalog.write_mo( @@ -587,7 +587,7 @@ def _read_serial(self, docnames: list[str]) -> None: __('reading sources... '), 'purple', len(docnames), - self._app.verbosity, + self.config.verbosity, ): # remove all inventory entries for that file self.events.emit('env-purge-doc', self.env, docname) @@ -604,7 +604,7 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None: __('reading sources... '), 'purple', len(chunks), - self._app.verbosity, + self.config.verbosity, ) # clear all outdated docs at once @@ -776,7 +776,7 @@ def _write_serial(self, docnames: Sequence[str]) -> None: __('writing output... '), 'darkgreen', len(docnames), - self._app.verbosity, + self.config.verbosity, ): _write_docname(docname, env=self.env, builder=self, tags=self.tags) @@ -800,7 +800,7 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None: __('writing output... '), 'darkgreen', len(chunks), - self._app.verbosity, + self.config.verbosity, ) def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None: diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index fd611639e9c..a09978ba279 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -281,7 +281,9 @@ def setup(self, app: Sphinx) -> None: # The old config is self.config, restored from the pickled environment. # The new config is app.config, always recreated from ``conf.py`` self.config_status, self.config_status_extra = self._config_status( - old_config=self.config, new_config=app.config, verbosity=app.verbosity + old_config=self.config, + new_config=app.config, + verbosity=app.config.verbosity, ) self.config = app.config diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py index 195ed95f961..2b9b479e0a1 100644 --- a/sphinx/ext/viewcode.py +++ b/sphinx/ext/viewcode.py @@ -295,7 +295,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]: __('highlighting module code... '), 'blue', len(env._viewcode_modules), - app.verbosity, + app.config.verbosity, operator.itemgetter(0), ): if not entry: diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py index d5392936334..9ad035c49af 100644 --- a/sphinx/util/logging.py +++ b/sphinx/util/logging.py @@ -608,8 +608,12 @@ def write(self, data: str) -> None: self._app.messagelog.append(data) -def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None: +def setup( + app: Sphinx, status: IO[str], warning: IO[str], *, verbosity: int = 0 +) -> None: """Setup root logger for Sphinx""" + log_level = VERBOSITY_MAP[max(verbosity, 0)] + logger = logging.getLogger(NAMESPACE) logger.setLevel(logging.DEBUG) logger.propagate = False @@ -621,7 +625,7 @@ def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None: info_handler = NewLineStreamHandler(SafeEncodingWriter(status)) info_handler.addFilter(InfoFilter()) info_handler.addFilter(InfoLogRecordTranslator(app)) - info_handler.setLevel(VERBOSITY_MAP[app.verbosity]) + info_handler.setLevel(log_level) info_handler.setFormatter(ColorizeFormatter()) warning_handler = WarningStreamHandler(SafeEncodingWriter(warning)) @@ -635,7 +639,7 @@ def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None: messagelog_handler = logging.StreamHandler(LastMessagesWriter(app, status)) messagelog_handler.addFilter(InfoFilter()) - messagelog_handler.setLevel(VERBOSITY_MAP[app.verbosity]) + messagelog_handler.setLevel(log_level) logger.addHandler(info_handler) logger.addHandler(warning_handler) diff --git a/tests/test_util/test_util_logging.py b/tests/test_util/test_util_logging.py index a9ef7f6c4c7..c21434a8414 100644 --- a/tests/test_util/test_util_logging.py +++ b/tests/test_util/test_util_logging.py @@ -26,8 +26,7 @@ @pytest.mark.sphinx('html', testroot='root') def test_info_and_warning(app: SphinxTestApp) -> None: - app.verbosity = 2 - logging.setup(app, app.status, app.warning) + logging.setup(app, app.status, app.warning, verbosity=2) logger = logging.getLogger(__name__) logger.debug('message1') @@ -61,8 +60,7 @@ def test_Exception(app: SphinxTestApp) -> None: @pytest.mark.sphinx('html', testroot='root') def test_verbosity_filter(app: SphinxTestApp) -> None: # verbosity = 0: INFO - app.verbosity = 0 - logging.setup(app, app.status, app.warning) + logging.setup(app, app.status, app.warning, verbosity=0) logger = logging.getLogger(__name__) logger.info('message1') @@ -75,8 +73,7 @@ def test_verbosity_filter(app: SphinxTestApp) -> None: assert 'message4' not in app.status.getvalue() # verbosity = 1: VERBOSE - app.verbosity = 1 - logging.setup(app, app.status, app.warning) + logging.setup(app, app.status, app.warning, verbosity=1) logger = logging.getLogger(__name__) logger.info('message1') @@ -89,8 +86,7 @@ def test_verbosity_filter(app: SphinxTestApp) -> None: assert 'message4' not in app.status.getvalue() # verbosity = 2: DEBUG - app.verbosity = 2 - logging.setup(app, app.status, app.warning) + logging.setup(app, app.status, app.warning, verbosity=2) logger = logging.getLogger(__name__) logger.info('message1') @@ -312,8 +308,7 @@ def test_log_no_ansi_colors(tmp_path): @pytest.mark.sphinx('html', testroot='root') def test_colored_logs(app: SphinxTestApp) -> None: - app.verbosity = 2 - logging.setup(app, app.status, app.warning) + logging.setup(app, app.status, app.warning, verbosity=2) logger = logging.getLogger(__name__) # default colors @@ -363,7 +358,7 @@ class StreamWriter(codecs.StreamWriter): def write(self, object): self.stream.write(object.encode('cp1252').decode('cp1252')) - logging.setup(app, StreamWriter(app.status), app.warning) + logging.setup(app, StreamWriter(app.status), app.warning, verbosity=0) logger = logging.getLogger(__name__) # info with UnicodeEncodeError From a2f7b41806bffea6f9528435effa700b3df35ed9 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Wed, 11 Jun 2025 16:16:10 +0100 Subject: [PATCH 134/435] Test with Python 3.15 alpha releases (#13654) --- .github/workflows/main.yml | 2 ++ pyproject.toml | 1 + tox.ini | 4 ++-- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cf40554e6f4..ca71534d3a6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -89,6 +89,7 @@ jobs: matrix: python: - "3.14" + - "3.15" docutils: - "0.20" - "0.21" @@ -125,6 +126,7 @@ jobs: matrix: python: - "3.14" + - "3.15" steps: - uses: actions/checkout@v4 diff --git a/pyproject.toml b/pyproject.toml index 70962dd7adf..3d0e01deb0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,7 @@ classifiers = [ "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", + "Programming Language :: Python :: 3.15", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Framework :: Sphinx", diff --git a/tox.ini b/tox.ini index 87b9d1b6316..58c2b140351 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] minversion = 4.2.0 -envlist = py{311,312,313,314} +envlist = py{311,312,313,314,315} [testenv] usedevelop = True @@ -19,7 +19,7 @@ passenv = BUILDER READTHEDOCS description = - py{311,312,313,314}: Run unit tests against {envname}. + py{311,312,313,314,315}: Run unit tests against {envname}. dependency_groups = test setenv = From 6b136f9b9162acdfc9eb0331fe05a8d464c78992 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Wed, 11 Jun 2025 19:01:08 +0100 Subject: [PATCH 135/435] Replace ``codecs.open()`` with ``open()`` (#13655) --- sphinx/builders/gettext.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py index 659bf218983..fc659d744d5 100644 --- a/sphinx/builders/gettext.py +++ b/sphinx/builders/gettext.py @@ -2,7 +2,6 @@ from __future__ import annotations -import codecs import operator import os import os.path @@ -212,7 +211,7 @@ def should_write(filepath: Path, new_content: str) -> bool: if not filepath.exists(): return True try: - with codecs.open(str(filepath), encoding='utf-8') as oldpot: + with open(filepath, encoding='utf-8') as oldpot: old_content = oldpot.read() old_header_index = old_content.index('"POT-Creation-Date:') new_header_index = new_content.index('"POT-Creation-Date:') @@ -275,7 +274,7 @@ def _extract_from_template(self) -> None: self.config.verbosity, ): try: - with codecs.open(template, encoding='utf-8') as f: + with open(template, encoding='utf-8') as f: context = f.read() for line, _meth, msg in extract_translations(context): origin = MsgOrigin(source=template, line=line) @@ -326,7 +325,7 @@ def finish(self) -> None: pofn = self.outdir / f'{textdomain}.pot' if should_write(pofn, content): - with codecs.open(str(pofn), 'w', encoding='utf-8') as pofile: + with open(pofn, 'w', encoding='utf-8') as pofile: pofile.write(content) From e1bd9cb3863cd1dfeaec9729dc6c842ef0f7a1f7 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Wed, 11 Jun 2025 19:43:47 +0100 Subject: [PATCH 136/435] Make ``_prepend_prologue()`` and ``_append_epilogue()`` private (#13658) --- sphinx/parsers.py | 8 ++-- sphinx/util/rst.py | 68 +++++++++++++++---------------- tests/test_markup/test_parser.py | 8 ++-- tests/test_util/test_util_rst.py | 70 +++++++++++++++++--------------- 4 files changed, 80 insertions(+), 74 deletions(-) diff --git a/sphinx/parsers.py b/sphinx/parsers.py index eb5e77d9387..26437654cc5 100644 --- a/sphinx/parsers.py +++ b/sphinx/parsers.py @@ -11,7 +11,7 @@ from docutils.transforms.universal import SmartQuotes from sphinx.deprecation import _deprecation_warning -from sphinx.util.rst import append_epilog, prepend_prolog +from sphinx.util.rst import _append_epilogue, _prepend_prologue if TYPE_CHECKING: from docutils import nodes @@ -100,9 +100,9 @@ def parse(self, inputstring: str | StringList, document: nodes.document) -> None self.finish_parse() def decorate(self, content: StringList) -> None: - """Preprocess reST content before parsing.""" - prepend_prolog(content, self.config.rst_prolog) - append_epilog(content, self.config.rst_epilog) + """Preprocess reStructuredText content before parsing.""" + _prepend_prologue(content, self._config.rst_prolog) + _append_epilogue(content, self._config.rst_epilog) def setup(app: Sphinx) -> ExtensionMetadata: diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py index 7e6853a81ef..485f369766e 100644 --- a/sphinx/util/rst.py +++ b/sphinx/util/rst.py @@ -1,11 +1,11 @@ -"""reST helper functions.""" +"""reStructuredText helper functions.""" from __future__ import annotations import re from collections import defaultdict from contextlib import contextmanager -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING from unicodedata import east_asian_width from docutils.parsers.rst import roles @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) -FIELD_NAME_RE = re.compile(Body.patterns['field_marker']) +_FIELD_NAME_RE = re.compile(Body.patterns['field_marker']) symbols_re = re.compile(r'([!-\-/:-@\[-`{-~])') # symbols without dot(0x2e) SECTIONING_CHARS = ['=', '-', '~'] @@ -77,39 +77,39 @@ def default_role(docname: str, name: str) -> Iterator[None]: docutils.unregister_role('') -def prepend_prolog(content: StringList, prolog: str) -> None: - """Prepend a string to content body as prolog.""" - if prolog: - pos = 0 - for line in content: - if FIELD_NAME_RE.match(line): - pos += 1 - else: - break - - if pos > 0: - # insert a blank line after docinfo - content.insert(pos, '', '', 0) +def _prepend_prologue(content: StringList, prologue: str) -> None: + """Prepend a string to content body as a prologue.""" + if not prologue: + return + pos = 0 + for line in content: + if _FIELD_NAME_RE.match(line): pos += 1 + else: + break - # insert prolog (after docinfo if exists) - lineno = 0 - for lineno, line in enumerate(prolog.splitlines()): - content.insert(pos + lineno, line, '', lineno) + if pos > 0: + # insert a blank line after docinfo + content.insert(pos, '', '', 0) + pos += 1 - content.insert(pos + lineno + 1, '', '', 0) + # insert prologue (after docinfo if exists) + lineno = 0 + for lineno, line in enumerate(prologue.splitlines()): + content.insert(pos + lineno, line, '', lineno) + content.insert(pos + lineno + 1, '', '', 0) -def append_epilog(content: StringList, epilog: str) -> None: - """Append a string to content body as epilog.""" - if epilog: - if len(content) > 0: - source, lineno = content.info(-1) - # lineno will never be None, since len(content) > 0 - lineno = cast('int', lineno) - else: - source = '' - lineno = 0 - content.append('', source, lineno + 1) - for lineno, line in enumerate(epilog.splitlines()): - content.append(line, '', lineno) + +def _append_epilogue(content: StringList, epilogue: str) -> None: + """Append a string to content body as an epilogue.""" + if not epilogue: + return + if len(content) > 0: + source, lineno = content.items[-1] + else: + source = '' + lineno = 0 + content.append('', source, lineno + 1) + for lineno, line in enumerate(epilogue.splitlines()): + content.append(line, '', lineno) diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py index 6a71fed9e49..9a79373e6b7 100644 --- a/tests/test_markup/test_parser.py +++ b/tests/test_markup/test_parser.py @@ -34,8 +34,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): parser.parse(text, document) (content, _), _ = RSTStateMachine().run.call_args assert list(content.xitems()) == [ - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 0, 'hello Sphinx world'), ('dummy.rst', 1, 'Sphinx is a document generator'), @@ -50,8 +50,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app): ('dummy.rst', 0, 'hello Sphinx world'), ('dummy.rst', 1, 'Sphinx is a document generator'), ('dummy.rst', 2, ''), - ('', 0, 'this is rst_epilog'), - ('', 1, 'good-bye reST!'), + ('', 0, 'this is rst_epilog'), + ('', 1, 'good-bye reST!'), ] # expandtabs / convert whitespaces diff --git a/tests/test_util/test_util_rst.py b/tests/test_util/test_util_rst.py index d8fcf8d12d0..41854c2aecc 100644 --- a/tests/test_util/test_util_rst.py +++ b/tests/test_util/test_util_rst.py @@ -5,7 +5,13 @@ from docutils.statemachine import StringList from jinja2 import Environment -from sphinx.util.rst import append_epilog, escape, heading, prepend_prolog, textwidth +from sphinx.util.rst import ( + _append_epilogue, + _prepend_prologue, + escape, + heading, + textwidth, +) def test_escape() -> None: @@ -15,25 +21,25 @@ def test_escape() -> None: assert escape('.. toctree::') == r'\.. toctree\:\:' -def test_append_epilog() -> None: +def test_append_epilogue() -> None: epilog = 'this is rst_epilog\ngood-bye reST!' content = StringList( ['hello Sphinx world', 'Sphinx is a document generator'], 'dummy.rst', ) - append_epilog(content, epilog) + _append_epilogue(content, epilog) assert list(content.xitems()) == [ ('dummy.rst', 0, 'hello Sphinx world'), ('dummy.rst', 1, 'Sphinx is a document generator'), ('dummy.rst', 2, ''), - ('', 0, 'this is rst_epilog'), - ('', 1, 'good-bye reST!'), + ('', 0, 'this is rst_epilog'), + ('', 1, 'good-bye reST!'), ] -def test_prepend_prolog() -> None: - prolog = 'this is rst_prolog\nhello reST!' +def test_prepend_prologue() -> None: + prologue = 'this is rst_prolog\nhello reST!' content = StringList( [ ':title: test of SphinxFileInput', @@ -44,14 +50,14 @@ def test_prepend_prolog() -> None: ], 'dummy.rst', ) - prepend_prolog(content, prolog) + _prepend_prologue(content, prologue) assert list(content.xitems()) == [ ('dummy.rst', 0, ':title: test of SphinxFileInput'), ('dummy.rst', 1, ':author: Sphinx team'), ('', 0, ''), - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 2, ''), ('dummy.rst', 3, 'hello Sphinx world'), @@ -60,17 +66,17 @@ def test_prepend_prolog() -> None: def test_prepend_prolog_with_CR() -> None: - # prolog having CR at tail - prolog = 'this is rst_prolog\nhello reST!\n' + # prologue having CR at tail + prologue = 'this is rst_prolog\nhello reST!\n' content = StringList( ['hello Sphinx world', 'Sphinx is a document generator'], 'dummy.rst', ) - prepend_prolog(content, prolog) + _prepend_prologue(content, prologue) assert list(content.xitems()) == [ - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 0, 'hello Sphinx world'), ('dummy.rst', 1, 'Sphinx is a document generator'), @@ -78,17 +84,17 @@ def test_prepend_prolog_with_CR() -> None: def test_prepend_prolog_without_CR() -> None: - # prolog not having CR at tail - prolog = 'this is rst_prolog\nhello reST!' + # prologue not having CR at tail + prologue = 'this is rst_prolog\nhello reST!' content = StringList( ['hello Sphinx world', 'Sphinx is a document generator'], 'dummy.rst', ) - prepend_prolog(content, prolog) + _prepend_prologue(content, prologue) assert list(content.xitems()) == [ - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 0, 'hello Sphinx world'), ('dummy.rst', 1, 'Sphinx is a document generator'), @@ -96,7 +102,7 @@ def test_prepend_prolog_without_CR() -> None: def test_prepend_prolog_with_roles_in_sections() -> None: - prolog = 'this is rst_prolog\nhello reST!' + prologue = 'this is rst_prolog\nhello reST!' content = StringList( [ ':title: test of SphinxFileInput', @@ -109,14 +115,14 @@ def test_prepend_prolog_with_roles_in_sections() -> None: ], 'dummy.rst', ) - prepend_prolog(content, prolog) + _prepend_prologue(content, prologue) assert list(content.xitems()) == [ ('dummy.rst', 0, ':title: test of SphinxFileInput'), ('dummy.rst', 1, ':author: Sphinx team'), ('', 0, ''), - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 2, ''), ('dummy.rst', 3, ':mod:`foo`'), @@ -128,13 +134,13 @@ def test_prepend_prolog_with_roles_in_sections() -> None: def test_prepend_prolog_with_roles_in_sections_with_newline() -> None: # prologue with trailing line break - prolog = 'this is rst_prolog\nhello reST!\n' + prologue = 'this is rst_prolog\nhello reST!\n' content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst') - prepend_prolog(content, prolog) + _prepend_prologue(content, prologue) assert list(content.xitems()) == [ - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 0, ':mod:`foo`'), ('dummy.rst', 1, '----------'), @@ -145,13 +151,13 @@ def test_prepend_prolog_with_roles_in_sections_with_newline() -> None: def test_prepend_prolog_with_roles_in_sections_without_newline() -> None: # prologue with no trailing line break - prolog = 'this is rst_prolog\nhello reST!' + prologue = 'this is rst_prolog\nhello reST!' content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst') - prepend_prolog(content, prolog) + _prepend_prologue(content, prologue) assert list(content.xitems()) == [ - ('', 0, 'this is rst_prolog'), - ('', 1, 'hello reST!'), + ('', 0, 'this is rst_prolog'), + ('', 1, 'hello reST!'), ('', 0, ''), ('dummy.rst', 0, ':mod:`foo`'), ('dummy.rst', 1, '----------'), From a1639a7cc5bdd6a05f1bbdc54528cd49ef0d06cc Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Sun, 15 Jun 2025 03:43:37 +0100 Subject: [PATCH 137/435] Refactor ``render_partial()`` to do less work (#13664) --- sphinx/builders/html/__init__.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py index de49f89bbb4..b6a67f5f453 100644 --- a/sphinx/builders/html/__init__.py +++ b/sphinx/builders/html/__init__.py @@ -17,6 +17,7 @@ from urllib.parse import quote import docutils.readers.doctree +import docutils.utils import jinja2.exceptions from docutils import nodes from docutils.core import Publisher @@ -429,12 +430,18 @@ def render_partial(self, node: Node | None) -> dict[str, str]: """Utility: Render a lone doctree node.""" if node is None: return {'fragment': ''} - - doc = new_document('') + pub = self._publisher + doc = docutils.utils.new_document('', pub.settings) doc.append(node) - self._publisher.set_source(doc) - self._publisher.publish() - return self._publisher.writer.parts + doc.transformer.populate_from_components((pub.reader, pub.parser, pub.writer)) + doc.transformer.apply_transforms() + visitor: HTML5Translator = self.create_translator(doc, self) # type: ignore[assignment] + doc.walkabout(visitor) + parts = { + 'fragment': ''.join(visitor.fragment), + 'title': ''.join(visitor.title), + } + return parts def prepare_writing(self, docnames: Set[str]) -> None: # create the search indexer From 076774224a978e5b7a9e679f30fe84816e50e8f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20B=2E?= <2589111+jfbu@users.noreply.github.com> Date: Sun, 15 Jun 2025 21:25:21 +0200 Subject: [PATCH 138/435] Docs: clarify wording regarding default style for LaTeX tables (#13667) --- doc/usage/configuration.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst index 7cdf462c4ba..3ec5818f49b 100644 --- a/doc/usage/configuration.rst +++ b/doc/usage/configuration.rst @@ -3083,7 +3083,7 @@ These options influence LaTeX output. the :code-tex:`\\rowcolors` LaTeX command becomes a no-op (this command has limitations and has never correctly supported all types of tables Sphinx produces in LaTeX). - Please update your project to use the + Please use the :ref:`latex table color configuration ` keys instead. To customise the styles for a table, @@ -3096,7 +3096,7 @@ These options influence LaTeX output. The latter two can be combined with any of the first three. The ``standard`` class produces tables with both horizontal and vertical lines - (as has been the default so far with Sphinx). + (as had been the default prior to Sphinx 6.0.0). A single-row multi-column merged cell will obey the row colour, if it is set. From 551b8428980cab65d7e5c1b759e5aa95ce0cd5a7 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 16 Jun 2025 20:32:31 +0100 Subject: [PATCH 139/435] Deprecate support for source encodings other than UTF-8 (#13666) --- CHANGES.rst | 3 +++ doc/usage/configuration.rst | 3 +++ doc/usage/restructuredtext/basics.rst | 9 +++++---- doc/usage/restructuredtext/directives.rst | 2 +- sphinx/config.py | 14 ++++++++++++++ tests/test_config/test_config.py | 15 +++++++++++++-- 6 files changed, 39 insertions(+), 7 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 4920b4c7736..47eb18fc8a6 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -23,6 +23,9 @@ Deprecated Patch by Adam Turner. * #13644: Deprecate the :py:attr:`!Parser.config` and :py:attr:`!env` attributes. Patch by Adam Turner. +* #13665: Deprecate support for non-UTF 8 source encodings, + scheduled for removal in Sphinx 10. + Patch by Adam Turner. Features added -------------- diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst index 3ec5818f49b..5babd7e2915 100644 --- a/doc/usage/configuration.rst +++ b/doc/usage/configuration.rst @@ -1157,6 +1157,9 @@ Options for source files The recommended encoding is ``'utf-8-sig'``. .. versionadded:: 0.5 + .. deprecated:: 8.3 + Support for source encodings other than UTF-8 is deprecated. + Sphinx 10 will only support UTF-8 files. .. confval:: source_suffix :type: :code-py:`dict[str, str] | Sequence[str] | str` diff --git a/doc/usage/restructuredtext/basics.rst b/doc/usage/restructuredtext/basics.rst index ea61b80fc85..8f408f45e38 100644 --- a/doc/usage/restructuredtext/basics.rst +++ b/doc/usage/restructuredtext/basics.rst @@ -646,10 +646,11 @@ configurations: Source encoding --------------- -Since the easiest way to include special characters like em dashes or copyright -signs in reStructuredText is to directly write them as Unicode characters, one has to -specify an encoding. Sphinx assumes source files to be encoded in UTF-8 by -default; you can change this with the :confval:`source_encoding` config value. +Sphinx supports source files that are encoded in UTF-8. +This means that the full range of Unicode__ characters may be used +directly in reStructuredText. + +__ https://www.unicode.org/ Gotchas diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst index f882f33ba3e..5845a6ab717 100644 --- a/doc/usage/restructuredtext/directives.rst +++ b/doc/usage/restructuredtext/directives.rst @@ -971,7 +971,7 @@ __ https://pygments.org/docs/lexers :type: text Explicitly specify the encoding of the file. - This overwrites the default encoding (:confval:`source_encoding`). + This overwrites the default encoding (UTF-8). For example: .. code-block:: rst diff --git a/sphinx/config.py b/sphinx/config.py index a43b6cc82d0..e878cd7a834 100644 --- a/sphinx/config.py +++ b/sphinx/config.py @@ -895,7 +895,21 @@ def check_master_doc( return changed +def deprecate_source_encoding(_app: Sphinx, config: Config) -> None: + """Warn on non-UTF 8 source_encoding.""" + # RemovedInSphinx10Warning + if config.source_encoding.lower() not in {'utf-8', 'utf-8-sig', 'utf8'}: + msg = _( + 'Support for source encodings other than UTF-8 ' + 'is deprecated and will be removed in Sphinx 10. ' + 'Please comment at https://github.com/sphinx-doc/sphinx/issues/13665 ' + 'if this causes a problem.' + ) + logger.warning(msg) + + def setup(app: Sphinx) -> ExtensionMetadata: + app.connect('config-inited', deprecate_source_encoding, priority=790) app.connect('config-inited', convert_source_suffix, priority=800) app.connect('config-inited', convert_highlight_options, priority=800) app.connect('config-inited', init_numfig_format, priority=800) diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py index b3392e654b2..857e1c28e34 100644 --- a/tests/test_config/test_config.py +++ b/tests/test_config/test_config.py @@ -19,14 +19,14 @@ ) from sphinx.deprecation import RemovedInSphinx90Warning from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError +from sphinx.testing.util import SphinxTestApp from sphinx.util.tags import Tags if TYPE_CHECKING: from collections.abc import Iterable + from pathlib import Path from typing import TypeAlias - from sphinx.testing.util import SphinxTestApp - CircularList: TypeAlias = list[int | 'CircularList'] CircularDict: TypeAlias = dict[str, int | 'CircularDict'] @@ -811,3 +811,14 @@ def test_root_doc_and_master_doc_are_synchronized() -> None: c.root_doc = '1234' assert c.master_doc == '1234' assert c.root_doc == c.master_doc + + +def test_source_encoding_deprecation(tmp_path: Path) -> None: + (tmp_path / 'conf.py').touch() + app = SphinxTestApp( + buildername='dummy', + srcdir=tmp_path, + confoverrides={'source_encoding': 'latin-1'}, + ) + expected = 'Support for source encodings other than UTF-8 is deprecated and will be removed' + assert expected in app.warning.getvalue() From be6593b0de9c273eba311a58c15a7fbdc972c5bf Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 16 Jun 2025 20:37:08 +0100 Subject: [PATCH 140/435] Prefer ``current_document.docname`` (#13669) --- doc/development/tutorials/examples/recipe.py | 2 +- doc/development/tutorials/examples/todo.py | 2 +- sphinx/builders/latex/transforms.py | 2 +- sphinx/builders/linkcheck.py | 2 +- sphinx/directives/other.py | 10 +++++----- sphinx/directives/patches.py | 12 +++++++----- sphinx/domains/c/__init__.py | 6 ++++-- sphinx/domains/changeset.py | 2 +- sphinx/domains/citation.py | 4 ++-- sphinx/domains/cpp/__init__.py | 6 ++++-- sphinx/domains/index.py | 2 +- sphinx/domains/javascript.py | 9 ++++++--- sphinx/domains/python/__init__.py | 6 ++++-- sphinx/domains/rst.py | 2 +- sphinx/domains/std/__init__.py | 9 ++++++--- sphinx/environment/__init__.py | 2 +- sphinx/environment/collectors/asset.py | 8 +++++--- sphinx/environment/collectors/metadata.py | 2 +- sphinx/environment/collectors/title.py | 4 ++-- sphinx/environment/collectors/toctree.py | 2 +- sphinx/ext/autodoc/directive.py | 2 +- sphinx/ext/autosectionlabel.py | 2 +- sphinx/ext/autosummary/__init__.py | 2 +- sphinx/ext/duration.py | 2 +- sphinx/ext/graphviz.py | 4 ++-- sphinx/ext/intersphinx/_resolve.py | 2 +- sphinx/ext/todo.py | 2 +- sphinx/ext/viewcode.py | 4 ++-- sphinx/io.py | 2 +- sphinx/roles.py | 2 +- sphinx/transforms/i18n.py | 4 +++- sphinx/transforms/post_transforms/__init__.py | 2 +- sphinx/transforms/post_transforms/images.py | 6 +++--- sphinx/transforms/references.py | 4 +++- sphinx/util/i18n.py | 2 +- sphinx/versioning.py | 2 +- 36 files changed, 79 insertions(+), 59 deletions(-) diff --git a/doc/development/tutorials/examples/recipe.py b/doc/development/tutorials/examples/recipe.py index 9848629216a..da52fa2df67 100644 --- a/doc/development/tutorials/examples/recipe.py +++ b/doc/development/tutorials/examples/recipe.py @@ -165,7 +165,7 @@ def add_recipe(self, signature, ingredients): name, signature, 'Recipe', - self.env.docname, + self.env.current_document.docname, anchor, 0, )) diff --git a/doc/development/tutorials/examples/todo.py b/doc/development/tutorials/examples/todo.py index a8aa1ec4a1d..c9993eda198 100644 --- a/doc/development/tutorials/examples/todo.py +++ b/doc/development/tutorials/examples/todo.py @@ -44,7 +44,7 @@ def run(self): self.env.todo_all_todos = [] self.env.todo_all_todos.append({ - 'docname': self.env.docname, + 'docname': self.env.current_document.docname, 'lineno': self.lineno, 'todo': todo_node.deepcopy(), 'target': targetnode, diff --git a/sphinx/builders/latex/transforms.py b/sphinx/builders/latex/transforms.py index 9fa180a7dd9..759a084cd00 100644 --- a/sphinx/builders/latex/transforms.py +++ b/sphinx/builders/latex/transforms.py @@ -40,7 +40,7 @@ class FootnoteDocnameUpdater(SphinxTransform): def apply(self, **kwargs: Any) -> None: matcher = NodeMatcher(*self.TARGET_NODES) for node in matcher.findall(self.document): - node['docname'] = self.env.docname + node['docname'] = self.env.current_document.docname class SubstitutionDefinitionsRemover(SphinxPostTransform): diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py index c1b199c5493..b80c9d515b2 100644 --- a/sphinx/builders/linkcheck.py +++ b/sphinx/builders/linkcheck.py @@ -261,7 +261,7 @@ def _add_uri(self, uri: str, node: nodes.Element) -> None: """ builder = cast('CheckExternalLinksBuilder', self.env._app.builder) hyperlinks = builder.hyperlinks - docname = self.env.docname + docname = self.env.current_document.docname if newuri := self.env.events.emit_firstresult('linkcheck-process-uri', uri): uri = newuri diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py index 8c66ed383b5..090e58a4cf0 100644 --- a/sphinx/directives/other.py +++ b/sphinx/directives/other.py @@ -63,7 +63,7 @@ class TocTree(SphinxDirective): def run(self) -> list[Node]: subnode = addnodes.toctree() - subnode['parent'] = self.env.docname + subnode['parent'] = self.env.current_document.docname # (title, ref) pairs, where ref may be a document, or an external link, # and title may be None if the document's title is to be used @@ -90,7 +90,7 @@ def parse_content(self, toctree: addnodes.toctree) -> None: """Populate ``toctree['entries']`` and ``toctree['includefiles']`` from content.""" generated_docnames = frozenset(StandardDomain._virtual_doc_names) suffixes = self.config.source_suffix - current_docname = self.env.docname + current_docname = self.env.current_document.docname glob = toctree['glob'] # glob target documents @@ -267,7 +267,7 @@ def run(self) -> list[Node]: if len(children) != 1 or not isinstance(children[0], nodes.bullet_list): logger.warning( __('.. acks content is not a list'), - location=(self.env.docname, self.lineno), + location=(self.env.current_document.docname, self.lineno), ) return [] return [addnodes.acks('', *children)] @@ -290,7 +290,7 @@ def run(self) -> list[Node]: if len(children) != 1 or not isinstance(children[0], nodes.bullet_list): logger.warning( __('.. hlist content is not a list'), - location=(self.env.docname, self.lineno), + location=(self.env.current_document.docname, self.lineno), ) return [] fulllist = children[0] @@ -388,7 +388,7 @@ def _insert_input(include_lines: list[str], source: str) -> None: text = '\n'.join(include_lines[:-2]) path = Path(relpath(Path(source).resolve(), start=self.env.srcdir)) - docname = self.env.docname + docname = self.env.current_document.docname # Emit the "include-read" event arg = [text] diff --git a/sphinx/directives/patches.py b/sphinx/directives/patches.py index 94184de502c..0a7419ed563 100644 --- a/sphinx/directives/patches.py +++ b/sphinx/directives/patches.py @@ -72,11 +72,11 @@ def run(self) -> list[Node]: 'an absolute path as a relative path from source directory. ' 'Please update your document.' ), - location=(env.docname, self.lineno), + location=(env.current_document.docname, self.lineno), ) else: abspath = env.srcdir / self.options['file'][1:] - doc_dir = env.doc2path(env.docname).parent + doc_dir = env.doc2path(env.current_document.docname).parent self.options['file'] = relpath(abspath, doc_dir) return super().run() @@ -162,7 +162,7 @@ def run(self) -> list[Node]: latex, latex, classes=self.options.get('class', []), - docname=self.env.docname, + docname=self.env.current_document.docname, number=None, label=label, ) @@ -180,7 +180,7 @@ def add_target(self, ret: list[Node]) -> None: # assign label automatically if math_number_all enabled if node['label'] == '' or (self.config.math_number_all and not node['label']): # NoQA: PLC1901 seq = self.env.new_serialno('sphinx.ext.math#equations') - node['label'] = f'{self.env.docname}:{seq}' + node['label'] = f'{self.env.current_document.docname}:{seq}' # no targets and numbers are needed if not node['label']: @@ -188,7 +188,9 @@ def add_target(self, ret: list[Node]) -> None: # register label to domain domain = self.env.domains.math_domain - domain.note_equation(self.env.docname, node['label'], location=node) + domain.note_equation( + self.env.current_document.docname, node['label'], location=node + ) node['number'] = domain.get_equation_number_for(node['label']) # add target node diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py index 80d24c1abe2..194916122cd 100644 --- a/sphinx/domains/c/__init__.py +++ b/sphinx/domains/c/__init__.py @@ -156,7 +156,7 @@ def _add_enumerator_to_parent(self, ast: ASTDeclaration) -> None: parent=target_symbol, ident=symbol.ident, declaration=decl_clone, - docname=self.env.docname, + docname=self.env.current_document.docname, line=self.get_source_info()[1], ) @@ -259,7 +259,9 @@ def handle_signature(self, sig: str, signode: TextElement) -> ASTDeclaration: try: symbol = parent_symbol.add_declaration( - ast, docname=self.env.docname, line=self.get_source_info()[1] + ast, + docname=self.env.current_document.docname, + line=self.get_source_info()[1], ) # append the new declaration to the sibling list assert symbol.siblingAbove is None diff --git a/sphinx/domains/changeset.py b/sphinx/domains/changeset.py index d2492dcccb2..e2657ad63ed 100644 --- a/sphinx/domains/changeset.py +++ b/sphinx/domains/changeset.py @@ -135,7 +135,7 @@ def note_changeset(self, node: addnodes.versionmodified) -> None: objname = self.env.current_document.obj_desc_name changeset = ChangeSet( node['type'], - self.env.docname, + self.env.current_document.docname, node.line, # type: ignore[arg-type] module, objname, diff --git a/sphinx/domains/citation.py b/sphinx/domains/citation.py index 348888c2d50..da7fc6a3fdd 100644 --- a/sphinx/domains/citation.py +++ b/sphinx/domains/citation.py @@ -83,7 +83,7 @@ def note_citation(self, node: nodes.citation) -> None: def note_citation_reference(self, node: pending_xref) -> None: docnames = self.citation_refs.setdefault(node['reftarget'], set()) - docnames.add(self.env.docname) + docnames.add(self.env.current_document.docname) def check_consistency(self) -> None: for name, (docname, _labelid, lineno) in self.citations.items(): @@ -139,7 +139,7 @@ def apply(self, **kwargs: Any) -> None: domain = self.env.domains.citation_domain for node in self.document.findall(nodes.citation): # register citation node to domain - node['docname'] = self.env.docname + node['docname'] = self.env.current_document.docname domain.note_citation(node) # mark citation labels as not smartquoted diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py index ef486897bc4..0ccdc106c44 100644 --- a/sphinx/domains/cpp/__init__.py +++ b/sphinx/domains/cpp/__init__.py @@ -219,7 +219,7 @@ def _add_enumerator_to_parent(self, ast: ASTDeclaration) -> None: templateParams=None, templateArgs=None, declaration=decl_clone, - docname=self.env.docname, + docname=self.env.current_document.docname, line=self.get_source_info()[1], ) @@ -374,7 +374,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> ASTDeclaration: try: symbol = parent_symbol.add_declaration( - ast, docname=self.env.docname, line=self.get_source_info()[1] + ast, + docname=self.env.current_document.docname, + line=self.get_source_info()[1], ) # append the new declaration to the sibling list assert symbol.siblingAbove is None diff --git a/sphinx/domains/index.py b/sphinx/domains/index.py index 09a18d0180e..cefa64a8d5f 100644 --- a/sphinx/domains/index.py +++ b/sphinx/domains/index.py @@ -47,7 +47,7 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non def process_doc(self, env: BuildEnvironment, docname: str, document: Node) -> None: """Process a document after it is read by the environment.""" - entries = self.entries.setdefault(env.docname, []) + entries = self.entries.setdefault(env.current_document.docname, []) for node in list(document.findall(addnodes.index)): node_entries = node['entries'] try: diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py index 22673489d23..e620e04b401 100644 --- a/sphinx/domains/javascript.py +++ b/sphinx/domains/javascript.py @@ -363,7 +363,10 @@ def run(self) -> list[Node]: # Make a duplicate entry in 'objects' to facilitate searching for # the module in JavaScriptDomain.find_obj() domain.note_object( - mod_name, 'module', node_id, location=(self.env.docname, self.lineno) + mod_name, + 'module', + node_id, + location=(self.env.current_document.docname, self.lineno), ) # The node order is: index node first, then target node @@ -459,14 +462,14 @@ def note_object( docname, location=location, ) - self.objects[fullname] = (self.env.docname, node_id, objtype) + self.objects[fullname] = (self.env.current_document.docname, node_id, objtype) @property def modules(self) -> dict[str, tuple[str, str]]: return self.data.setdefault('modules', {}) # modname -> docname, node_id def note_module(self, modname: str, node_id: str) -> None: - self.modules[modname] = (self.env.docname, node_id) + self.modules[modname] = (self.env.current_document.docname, node_id) def clear_doc(self, docname: str) -> None: for fullname, (pkg_docname, _node_id, _l) in list(self.objects.items()): diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py index 1281b14ad58..a0a0571f069 100644 --- a/sphinx/domains/python/__init__.py +++ b/sphinx/domains/python/__init__.py @@ -818,7 +818,9 @@ def note_object( other.docname, location=location, ) - self.objects[name] = ObjectEntry(self.env.docname, node_id, objtype, aliased) + self.objects[name] = ObjectEntry( + self.env.current_document.docname, node_id, objtype, aliased + ) @property def modules(self) -> dict[str, ModuleEntry]: @@ -832,7 +834,7 @@ def note_module( .. versionadded:: 2.1 """ self.modules[name] = ModuleEntry( - docname=self.env.docname, + docname=self.env.current_document.docname, node_id=node_id, synopsis=synopsis, platform=platform, diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py index 2b486ea85ed..64aff25a015 100644 --- a/sphinx/domains/rst.py +++ b/sphinx/domains/rst.py @@ -266,7 +266,7 @@ def note_object( location=location, ) - self.objects[objtype, name] = (self.env.docname, node_id) + self.objects[objtype, name] = (self.env.current_document.docname, node_id) def clear_doc(self, docname: str) -> None: for (typ, name), (doc, _node_id) in list(self.objects.items()): diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py index 04161736675..556cb5c5d40 100644 --- a/sphinx/domains/std/__init__.py +++ b/sphinx/domains/std/__init__.py @@ -308,7 +308,10 @@ def add_target_and_index( domain = self.env.domains.standard_domain for optname in signode.get('allnames', ()): domain.add_program_option( - currprogram, optname, self.env.docname, signode['ids'][0] + currprogram, + optname, + self.env.current_document.docname, + signode['ids'][0], ) # create an index entry @@ -857,7 +860,7 @@ def note_object( docname, location=location, ) - self.objects[objtype, name] = (self.env.docname, labelid) + self.objects[objtype, name] = (self.env.current_document.docname, labelid) @property def _terms(self) -> dict[str, tuple[str, str]]: @@ -871,7 +874,7 @@ def _note_term(self, term: str, labelid: str, location: Any = None) -> None: """ self.note_object('term', term, labelid, location) - self._terms[term.lower()] = (self.env.docname, labelid) + self._terms[term.lower()] = (self.env.current_document.docname, labelid) @property def progoptions(self) -> dict[tuple[str | None, str], tuple[str, str]]: diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py index a09978ba279..36b364f5c3d 100644 --- a/sphinx/environment/__init__.py +++ b/sphinx/environment/__init__.py @@ -788,7 +788,7 @@ def apply_post_transforms(self, doctree: nodes.document, docname: str) -> None: new = deepcopy(backup) new.docname = docname try: - # set env.docname during applying post-transforms + # set env.current_document.docname during applying post-transforms self.current_document = new transformer = SphinxTransformer(doctree) diff --git a/sphinx/environment/collectors/asset.py b/sphinx/environment/collectors/asset.py index e199fc90124..a1af7c33474 100644 --- a/sphinx/environment/collectors/asset.py +++ b/sphinx/environment/collectors/asset.py @@ -47,7 +47,7 @@ def merge_other( def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: """Process and rewrite image URIs.""" - docname = app.env.docname + docname = app.env.current_document.docname for node in doctree.findall(nodes.image): # Map the mimetype to the corresponding image. The writer may @@ -156,7 +156,9 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: if '://' in targetname: node['refuri'] = targetname else: - rel_filename, filename = app.env.relfn2path(targetname, app.env.docname) + rel_filename, filename = app.env.relfn2path( + targetname, app.env.current_document.docname + ) app.env.note_dependency(rel_filename) if not os.access(filename, os.R_OK): logger.warning( @@ -168,7 +170,7 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: ) continue node['filename'] = app.env.dlfiles.add_file( - app.env.docname, rel_filename + app.env.current_document.docname, rel_filename ).as_posix() diff --git a/sphinx/environment/collectors/metadata.py b/sphinx/environment/collectors/metadata.py index 2cda65beec2..8936341a919 100644 --- a/sphinx/environment/collectors/metadata.py +++ b/sphinx/environment/collectors/metadata.py @@ -41,7 +41,7 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: if index is None: return elif isinstance(doctree[index], nodes.docinfo): - md = app.env.metadata[app.env.docname] + md = app.env.metadata[app.env.current_document.docname] for node in doctree[index]: # type: ignore[attr-defined] # nodes are multiply inherited... if isinstance(node, nodes.authors): diff --git a/sphinx/environment/collectors/title.py b/sphinx/environment/collectors/title.py index 4bd3ed7e146..50dfa2bdc54 100644 --- a/sphinx/environment/collectors/title.py +++ b/sphinx/environment/collectors/title.py @@ -55,8 +55,8 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: else: # document has no title titlenode += nodes.Text(doctree.get('title', '')) - app.env.titles[app.env.docname] = titlenode - app.env.longtitles[app.env.docname] = longtitlenode + app.env.titles[app.env.current_document.docname] = titlenode + app.env.longtitles[app.env.current_document.docname] = longtitlenode def setup(app: Sphinx) -> ExtensionMetadata: diff --git a/sphinx/environment/collectors/toctree.py b/sphinx/environment/collectors/toctree.py index fddd269e1b7..5c3d5c97f8c 100644 --- a/sphinx/environment/collectors/toctree.py +++ b/sphinx/environment/collectors/toctree.py @@ -65,7 +65,7 @@ def merge_other( def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: """Build a TOC from the doctree and store it in the inventory.""" - docname = app.env.docname + docname = app.env.current_document.docname numentries = [0] # nonlocal again... def build_toc( diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py index 03d6383e0e1..fd0553047a9 100644 --- a/sphinx/ext/autodoc/directive.py +++ b/sphinx/ext/autodoc/directive.py @@ -163,7 +163,7 @@ def run(self) -> list[Node]: 'An option to %s is either unknown or has an invalid value: %s', self.name, exc, - location=(self.env.docname, lineno), + location=(self.env.current_document.docname, lineno), ) return [] diff --git a/sphinx/ext/autosectionlabel.py b/sphinx/ext/autosectionlabel.py index b1eaa0ceac9..7c5304ad83d 100644 --- a/sphinx/ext/autosectionlabel.py +++ b/sphinx/ext/autosectionlabel.py @@ -39,7 +39,7 @@ def register_sections_as_label(app: Sphinx, document: Node) -> None: ): continue labelid = node['ids'][0] - docname = app.env.docname + docname = app.env.current_document.docname title = cast('nodes.title', node[0]) ref_name = getattr(title, 'rawsource', title.astext()) if app.config.autosectionlabel_prefix_document: diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py index 62c9427ecdb..a0ae7af16b1 100644 --- a/sphinx/ext/autosummary/__init__.py +++ b/sphinx/ext/autosummary/__init__.py @@ -268,7 +268,7 @@ def run(self) -> list[Node]: nodes = self.get_table(items) if 'toctree' in self.options: - dirname = posixpath.dirname(self.env.docname) + dirname = posixpath.dirname(self.env.current_document.docname) tree_prefix = self.options['toctree'].strip() docnames = [] diff --git a/sphinx/ext/duration.py b/sphinx/ext/duration.py index 1cf3f7b58d4..3f7f64c2875 100644 --- a/sphinx/ext/duration.py +++ b/sphinx/ext/duration.py @@ -37,7 +37,7 @@ def reading_durations(self) -> dict[str, float]: return self.data.setdefault('reading_durations', {}) def note_reading_duration(self, duration: float) -> None: - self.reading_durations[self.env.docname] = duration + self.reading_durations[self.env.current_document.docname] = duration def clear(self) -> None: self.reading_durations.clear() diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py index b973c1f5870..8ba99cc24ad 100644 --- a/sphinx/ext/graphviz.py +++ b/sphinx/ext/graphviz.py @@ -167,7 +167,7 @@ def run(self) -> list[Node]: ] node = graphviz() node['code'] = dotcode - node['options'] = {'docname': self.env.docname} + node['options'] = {'docname': self.env.current_document.docname} if 'graphviz_dot' in self.options: node['options']['graphviz_dot'] = self.options['graphviz_dot'] @@ -212,7 +212,7 @@ def run(self) -> list[Node]: node = graphviz() dot_code = '\n'.join(self.content) node['code'] = f'{self.name} {self.arguments[0]} {{\n{dot_code}\n}}\n' - node['options'] = {'docname': self.env.docname} + node['options'] = {'docname': self.env.current_document.docname} if 'graphviz_dot' in self.options: node['options']['graphviz_dot'] = self.options['graphviz_dot'] if 'layout' in self.options: diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py index 2029a0ea971..b68222645bb 100644 --- a/sphinx/ext/intersphinx/_resolve.py +++ b/sphinx/ext/intersphinx/_resolve.py @@ -522,7 +522,7 @@ def _emit_warning(self, msg: str, /, *args: Any) -> None: *args, type='intersphinx', subtype='external', - location=(self.env.docname, self.lineno), + location=(self.env.current_document.docname, self.lineno), ) def _concat_strings(self, strings: Iterable[str]) -> str: diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py index 53c4d57b4f1..4b2e32bc9c4 100644 --- a/sphinx/ext/todo.py +++ b/sphinx/ext/todo.py @@ -59,7 +59,7 @@ def run(self) -> list[Node]: return [todo] todo.insert(0, nodes.title(text=_('Todo'))) - todo['docname'] = self.env.docname + todo['docname'] = self.env.current_document.docname self.add_name(todo) self.set_source_info(todo) self.state.document.note_explicit_target(todo) diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py index 2b9b479e0a1..af352eaaab6 100644 --- a/sphinx/ext/viewcode.py +++ b/sphinx/ext/viewcode.py @@ -166,7 +166,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool: if not modname: continue fullname = signode.get('fullname') - if not has_tag(modname, fullname, env.docname, refname): + if not has_tag(modname, fullname, env.current_document.docname, refname): continue if fullname in names: # only one link per name, please @@ -174,7 +174,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool: names.add(fullname) pagename = posixpath.join(OUTPUT_DIRNAME, modname.replace('.', '/')) signode += viewcode_anchor( - reftarget=pagename, refid=fullname, refdoc=env.docname + reftarget=pagename, refid=fullname, refdoc=env.current_document.docname ) diff --git a/sphinx/io.py b/sphinx/io.py index e2d299f8ae2..2d7b41beda5 100644 --- a/sphinx/io.py +++ b/sphinx/io.py @@ -86,7 +86,7 @@ def read_source(self, env: BuildEnvironment) -> str: # emit "source-read" event arg = [content] - env.events.emit('source-read', env.docname, arg) + env.events.emit('source-read', env.current_document.docname, arg) return arg[0] diff --git a/sphinx/roles.py b/sphinx/roles.py index 79ec70e90a3..cadfb5a027b 100644 --- a/sphinx/roles.py +++ b/sphinx/roles.py @@ -130,7 +130,7 @@ def create_xref_node(self) -> tuple[list[Node], list[system_message]]: # create the reference node options = { - 'refdoc': self.env.docname, + 'refdoc': self.env.current_document.docname, 'refdomain': self.refdomain, 'reftype': self.reftype, 'refexplicit': self.has_explicit_title, diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py index 27db99c542f..81182f01718 100644 --- a/sphinx/transforms/i18n.py +++ b/sphinx/transforms/i18n.py @@ -419,7 +419,9 @@ def apply(self, **kwargs: Any) -> None: settings, source = self.document.settings, self.document['source'] msgstr = '' - textdomain = docname_to_domain(self.env.docname, self.config.gettext_compact) + textdomain = docname_to_domain( + self.env.current_document.docname, self.config.gettext_compact + ) # fetch translations srcdir = self.env.srcdir diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py index ae70ce195d9..ac95f56102a 100644 --- a/sphinx/transforms/post_transforms/__init__.py +++ b/sphinx/transforms/post_transforms/__init__.py @@ -98,7 +98,7 @@ def _resolve_pending_xref( new_node: nodes.reference | None typ = node['reftype'] target = node['reftarget'] - ref_doc = node.setdefault('refdoc', self.env.docname) + ref_doc = node.setdefault('refdoc', self.env.current_document.docname) ref_domain = node.get('refdomain', '') domain: Domain | None if ref_domain: diff --git a/sphinx/transforms/post_transforms/images.py b/sphinx/transforms/post_transforms/images.py index 97b585d9cf6..6e6e9becb20 100644 --- a/sphinx/transforms/post_transforms/images.py +++ b/sphinx/transforms/post_transforms/images.py @@ -123,7 +123,7 @@ def _process_image(self, node: nodes.image, path: Path) -> None: node['candidates'].pop('?') node['candidates'][mimetype] = path_str node['uri'] = path_str - self.env.images.add_file(self.env.docname, path_str) + self.env.images.add_file(self.env.current_document.docname, path_str) class DataURIExtractor(BaseImageConverter): @@ -156,7 +156,7 @@ def handle(self, node: nodes.image) -> None: node['candidates'].pop('?') node['candidates'][image.mimetype] = path_str node['uri'] = path_str - self.env.images.add_file(self.env.docname, path_str) + self.env.images.add_file(self.env.current_document.docname, path_str) def get_filename_for(filename: str, mimetype: str) -> str: @@ -278,7 +278,7 @@ def handle(self, node: nodes.image) -> None: node['uri'] = str(destpath) self.env.original_image_uri[destpath] = srcpath - self.env.images.add_file(self.env.docname, destpath) + self.env.images.add_file(self.env.current_document.docname, destpath) def convert( self, _from: str | os.PathLike[str], _to: str | os.PathLike[str] diff --git a/sphinx/transforms/references.py b/sphinx/transforms/references.py index 447e9ded568..17380777997 100644 --- a/sphinx/transforms/references.py +++ b/sphinx/transforms/references.py @@ -36,7 +36,9 @@ class SphinxDomains(SphinxTransform): default_priority = 850 def apply(self, **kwargs: Any) -> None: - self.env.domains._process_doc(self.env, self.env.docname, self.document) + self.env.domains._process_doc( + self.env, self.env.current_document.docname, self.document + ) def setup(app: Sphinx) -> ExtensionMetadata: diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py index dd1616a8f31..7553119334b 100644 --- a/sphinx/util/i18n.py +++ b/sphinx/util/i18n.py @@ -319,7 +319,7 @@ def get_image_filename_for_language( ) -> str: root, ext = os.path.splitext(filename) dirname = os.path.dirname(root) - docpath = os.path.dirname(env.docname) + docpath = os.path.dirname(env.current_document.docname) try: return env.config.figure_language_filename.format( root=root, diff --git a/sphinx/versioning.py b/sphinx/versioning.py index 3de5a17ec9c..02bc6edd055 100644 --- a/sphinx/versioning.py +++ b/sphinx/versioning.py @@ -160,7 +160,7 @@ def apply(self, **kwargs: Any) -> None: if env.versioning_compare: # get old doctree - filename = env.doctreedir / f'{env.docname}.doctree' + filename = env.doctreedir / f'{env.current_document.docname}.doctree' try: with open(filename, 'rb') as f: old_doctree = pickle.load(f) From 616faf830e748d91ba2de4122b0fa04e633f6fba Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 16 Jun 2025 21:32:22 +0100 Subject: [PATCH 141/435] Simplify ``_publish_msgstr()`` (#13670) --- sphinx/transforms/i18n.py | 101 ++++++++++++++------------------------ 1 file changed, 36 insertions(+), 65 deletions(-) diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py index 81182f01718..570154185e9 100644 --- a/sphinx/transforms/i18n.py +++ b/sphinx/transforms/i18n.py @@ -2,23 +2,21 @@ from __future__ import annotations -import contextlib from re import DOTALL, match from textwrap import indent from typing import TYPE_CHECKING, Any, TypeVar +import docutils.utils from docutils import nodes -from docutils.io import StringInput from sphinx import addnodes from sphinx.domains.std import make_glossary_term, split_term_classifiers from sphinx.errors import ConfigError -from sphinx.io import SphinxBaseReader from sphinx.locale import __ from sphinx.locale import init as init_locale -from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransform -from sphinx.transforms.references import SphinxDomains +from sphinx.transforms import SphinxTransform from sphinx.util import get_filetype, logging +from sphinx.util.docutils import LoggingReporter from sphinx.util.i18n import docname_to_domain from sphinx.util.index_entries import split_index_msg from sphinx.util.nodes import ( @@ -28,11 +26,12 @@ extract_messages, traverse_translatable_index, ) -from sphinx.versioning import UIDTransform if TYPE_CHECKING: from collections.abc import Sequence + from docutils.frontend import Values + from sphinx.application import Sphinx from sphinx.config import Config from sphinx.environment import BuildEnvironment @@ -52,77 +51,49 @@ N = TypeVar('N', bound=nodes.Node) -class _SphinxI18nReader(SphinxBaseReader): - """A document reader for internationalisation (i18n). - - This returns the source line number of the original text - as the current source line number to let users know where - the error happened, because the translated texts are - partial and they don't have correct line numbers. - """ - - def __init__( - self, *args: Any, registry: SphinxComponentRegistry, **kwargs: Any - ) -> None: - super().__init__(*args, **kwargs) - unused = frozenset({ - PreserveTranslatableMessages, - Locale, - RemoveTranslatableInline, - AutoIndexUpgrader, - SphinxDomains, - DoctreeReadEvent, - UIDTransform, - }) - transforms = self.transforms + registry.get_transforms() - self.transforms = [ - transform for transform in transforms if transform not in unused - ] - - -def publish_msgstr( +def _publish_msgstr( source: str, source_path: str, source_line: int, - config: Config, - settings: Any, *, + config: Config, env: BuildEnvironment, registry: SphinxComponentRegistry, + settings: Values, ) -> nodes.Element: """Publish msgstr (single line) into docutils document :param str source: source text :param str source_path: source path for warning indication :param source_line: source line for warning indication - :param sphinx.config.Config config: sphinx config :param docutils.frontend.Values settings: docutils settings - :return: document - :rtype: docutils.nodes.document + :param sphinx.config.Config config: sphinx config :param sphinx.environment.BuildEnvironment env: sphinx environment :param sphinx.registry.SphinxComponentRegistry registry: sphinx registry + :return: document + :rtype: docutils.nodes.document """ + filetype = get_filetype(config.source_suffix, source_path) + doc = docutils.utils.new_document( + f'{source_path}:{source_line}:', settings + ) + doc.reporter = LoggingReporter.from_reporter(doc.reporter) + + # clear rst_prolog temporarily + rst_prolog = config.rst_prolog + config.rst_prolog = None try: - # clear rst_prolog temporarily - rst_prolog = config.rst_prolog - config.rst_prolog = None - - reader = _SphinxI18nReader(registry=registry) - filetype = get_filetype(config.source_suffix, source_path) parser = registry.create_source_parser(filetype, config=config, env=env) - doc = reader.read( - source=StringInput( - source=source, source_path=f'{source_path}:{source_line}:' - ), - parser=parser, - settings=settings, - ) - with contextlib.suppress(IndexError): # empty node - return doc[0] - return doc + parser.parse(source, doc) + doc.current_source = doc.current_line = None finally: config.rst_prolog = rst_prolog + try: + return doc[0] # type: ignore[return-value] + except IndexError: # empty node + return doc + def parse_noqa(source: str) -> tuple[str, bool]: m = match(r'(.*)(? None: if isinstance(node, LITERAL_TYPE_NODES): msgstr = '::\n\n' + indent(msgstr, ' ' * 3) - patch = publish_msgstr( + patch = _publish_msgstr( msgstr, source, node.line, # type: ignore[arg-type] - self.config, - settings, + config=self.config, env=self.env, registry=self.env._registry, + settings=settings, ) # FIXME: no warnings about inconsistent references in this part # XXX doctest and other block markup @@ -491,14 +462,14 @@ def apply(self, **kwargs: Any) -> None: if isinstance(node, nodes.term): for _id in node['ids']: term, first_classifier = split_term_classifiers(msgstr) - patch = publish_msgstr( + patch = _publish_msgstr( term or '', source, node.line, # type: ignore[arg-type] - self.config, - settings, + config=self.config, env=self.env, registry=self.env._registry, + settings=settings, ) updater.patch = make_glossary_term( self.env, @@ -569,14 +540,14 @@ def apply(self, **kwargs: Any) -> None: # This generates:
    msgstr
    msgstr = msgstr + '\n' + '=' * len(msgstr) * 2 - patch = publish_msgstr( + patch = _publish_msgstr( msgstr, source, node.line, # type: ignore[arg-type] - self.config, - settings, + config=self.config, env=self.env, registry=self.env._registry, + settings=settings, ) # Structural Subelements phase2 if isinstance(node, nodes.title): From 2b8f6dab31d5fef15f034b84be8e7e946b9fe961 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Mon, 16 Jun 2025 23:09:54 +0100 Subject: [PATCH 142/435] Make parsing more explicit in ``sphinx.testing.restructuredtext`` (#13671) --- sphinx/testing/restructuredtext.py | 83 +++++++++++++++++++++--------- 1 file changed, 59 insertions(+), 24 deletions(-) diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index 68c78199606..c8fcd597aa5 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -1,42 +1,77 @@ from __future__ import annotations +import warnings from typing import TYPE_CHECKING -from docutils.core import publish_doctree +from docutils import nodes +from docutils.frontend import OptionParser -from sphinx.io import SphinxStandaloneReader +from sphinx.io import SphinxBaseReader from sphinx.parsers import RSTParser -from sphinx.util.docutils import sphinx_domains +from sphinx.transforms import SphinxTransformer +from sphinx.util.docutils import LoggingReporter, sphinx_domains if TYPE_CHECKING: - from docutils import nodes + from docutils.frontend import Values from sphinx.application import Sphinx def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: - """Parse a string as reStructuredText with Sphinx application.""" + """Parse a string as reStructuredText with Sphinx.""" + config = app.config env = app.env + registry = app.registry + srcdir = app.srcdir + + source_path = str(srcdir / f'{docname}.rst') + + # Get settings + settings_overrides = { + 'gettext_compact': True, + 'input_encoding': 'utf-8', + 'output_encoding': 'unicode', + 'traceback': True, + } + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=DeprecationWarning) + option_parser = OptionParser( + components=(RSTParser, SphinxBaseReader), defaults=settings_overrides + ) + settings: Values = option_parser.get_default_values() # type: ignore[assignment] + settings._source = source_path + settings.env = env + + # Create parser + parser = RSTParser() + parser._config = config + parser._env = env + + # Create root document node + reporter = LoggingReporter( + source_path, + settings.report_level, + settings.halt_level, + settings.debug, + settings.error_encoding_error_handler, + ) + document = nodes.document(settings, reporter, source=source_path) + document.note_source(source_path, -1) + + # substitute transformer + document.transformer = transformer = SphinxTransformer(document) + transformer.add_transforms(SphinxBaseReader().get_transforms()) + transformer.add_transforms(registry.get_transforms()) + transformer.add_transforms(parser.get_transforms()) + + env.current_document.docname = docname try: - app.env.current_document.docname = docname - reader = SphinxStandaloneReader() - reader._setup_transforms(app.registry.get_transforms()) - parser = RSTParser() - parser._config = app.config - parser._env = app.env with sphinx_domains(env): - return publish_doctree( - text, - str(app.srcdir / f'{docname}.rst'), - reader=reader, - parser=parser, - settings_overrides={ - 'env': env, - 'gettext_compact': True, - 'input_encoding': 'utf-8', - 'output_encoding': 'unicode', - 'traceback': True, - }, - ) + parser.parse(text, document) + document.current_source = document.current_line = None + + transformer.apply_transforms() finally: env.current_document.docname = '' + + return document From 97f2fb2e13cb78f3d1788bee8f0bc24b21986e5c Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Tue, 17 Jun 2025 01:27:14 +0100 Subject: [PATCH 143/435] Add ``_get_settings()`` helper function (#13672) --- sphinx/__init__.py | 10 ---------- sphinx/builders/html/__init__.py | 16 ++++------------ sphinx/builders/latex/__init__.py | 17 ++++------------- sphinx/builders/manpage.py | 16 ++++------------ sphinx/builders/texinfo.py | 17 ++++------------- sphinx/testing/restructuredtext.py | 13 ++----------- sphinx/util/docutils.py | 23 ++++++++++++++++++++++- tests/test_markup/test_markup.py | 17 +++++------------ tests/test_search.py | 8 +------- tests/test_util/test_util_nodes.py | 7 +------ 10 files changed, 47 insertions(+), 97 deletions(-) diff --git a/sphinx/__init__.py b/sphinx/__init__.py index 6ddfdba271f..79df3e09df3 100644 --- a/sphinx/__init__.py +++ b/sphinx/__init__.py @@ -5,22 +5,12 @@ from __future__ import annotations -import warnings - from sphinx.util._pathlib import _StrPath TYPE_CHECKING = False if TYPE_CHECKING: from typing import Final -warnings.filterwarnings( - 'ignore', - 'The frontend.Option class .*', - DeprecationWarning, - module='docutils.frontend', -) -del warnings - __version__: Final = '8.3.0' __display_version__: Final = __version__ # used for command line version diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py index b6a67f5f453..e72dffc2b33 100644 --- a/sphinx/builders/html/__init__.py +++ b/sphinx/builders/html/__init__.py @@ -10,7 +10,6 @@ import re import shutil import sys -import warnings from pathlib import Path from types import NoneType from typing import TYPE_CHECKING @@ -21,7 +20,6 @@ import jinja2.exceptions from docutils import nodes from docutils.core import Publisher -from docutils.frontend import OptionParser from docutils.io import DocTreeInput, StringOutput from sphinx import __display_version__, package_dir @@ -50,7 +48,7 @@ from sphinx.util._timestamps import _format_rfc3339_microseconds from sphinx.util._uri import is_url from sphinx.util.display import progress_message, status_iterator -from sphinx.util.docutils import new_document +from sphinx.util.docutils import _get_settings, new_document from sphinx.util.fileutil import copy_asset from sphinx.util.i18n import format_date from sphinx.util.inventory import InventoryFile @@ -459,15 +457,9 @@ def prepare_writing(self, docnames: Set[str]) -> None: self.load_indexer(docnames) self.docwriter = HTMLWriter(self) - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - self.docsettings: Any = OptionParser( - defaults=self.env.settings, - components=(self.docwriter,), - read_config_files=True, - ).get_default_values() + self.docsettings = _get_settings( + HTMLWriter, defaults=self.env.settings, read_config_files=True + ) self.docsettings.compact_lists = bool(self.config.html_compact_lists) # determine the additional indices to include diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py index d22c959b276..feaa8e021cb 100644 --- a/sphinx/builders/latex/__init__.py +++ b/sphinx/builders/latex/__init__.py @@ -4,12 +4,9 @@ import os import os.path -import warnings from pathlib import Path from typing import TYPE_CHECKING -from docutils.frontend import OptionParser - import sphinx.builders.latex.nodes # NoQA: F401 # Workaround: import this before writer to avoid ImportError from sphinx import addnodes, highlighting, package_dir from sphinx._cli.util.colour import darkgreen @@ -27,7 +24,7 @@ from sphinx.locale import _, __ from sphinx.util import logging, texescape from sphinx.util.display import progress_message, status_iterator -from sphinx.util.docutils import SphinxFileOutput, new_document +from sphinx.util.docutils import SphinxFileOutput, _get_settings, new_document from sphinx.util.fileutil import copy_asset_file from sphinx.util.i18n import format_date from sphinx.util.nodes import inline_all_toctrees @@ -301,15 +298,9 @@ def copy_assets(self) -> None: def write_documents(self, _docnames: Set[str]) -> None: docwriter = LaTeXWriter(self) - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - docsettings: Any = OptionParser( - defaults=self.env.settings, - components=(docwriter,), - read_config_files=True, - ).get_default_values() + docsettings = _get_settings( + LaTeXWriter, defaults=self.env.settings, read_config_files=True + ) for entry in self.document_data: docname, targetname, title, author, themename = entry[:5] diff --git a/sphinx/builders/manpage.py b/sphinx/builders/manpage.py index feeb35c1877..799f2a64f54 100644 --- a/sphinx/builders/manpage.py +++ b/sphinx/builders/manpage.py @@ -2,10 +2,8 @@ from __future__ import annotations -import warnings from typing import TYPE_CHECKING -from docutils.frontend import OptionParser from docutils.io import FileOutput from sphinx import addnodes @@ -14,13 +12,13 @@ from sphinx.locale import __ from sphinx.util import logging from sphinx.util.display import progress_message +from sphinx.util.docutils import _get_settings from sphinx.util.nodes import inline_all_toctrees from sphinx.util.osutil import ensuredir, make_filename_from_project from sphinx.writers.manpage import ManualPageTranslator, ManualPageWriter if TYPE_CHECKING: from collections.abc import Set - from typing import Any from sphinx.application import Sphinx from sphinx.config import Config @@ -54,15 +52,9 @@ def get_target_uri(self, docname: str, typ: str | None = None) -> str: @progress_message(__('writing')) def write_documents(self, _docnames: Set[str]) -> None: docwriter = ManualPageWriter(self) - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - docsettings: Any = OptionParser( - defaults=self.env.settings, - components=(docwriter,), - read_config_files=True, - ).get_default_values() + docsettings = _get_settings( + ManualPageWriter, defaults=self.env.settings, read_config_files=True + ) for info in self.config.man_pages: docname, name, description, authors, section = info diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py index 6611be05465..a0a8a9f8dea 100644 --- a/sphinx/builders/texinfo.py +++ b/sphinx/builders/texinfo.py @@ -3,11 +3,9 @@ from __future__ import annotations import os.path -import warnings from typing import TYPE_CHECKING from docutils import nodes -from docutils.frontend import OptionParser from docutils.io import FileOutput from sphinx import addnodes, package_dir @@ -18,14 +16,13 @@ from sphinx.locale import _, __ from sphinx.util import logging from sphinx.util.display import progress_message, status_iterator -from sphinx.util.docutils import new_document +from sphinx.util.docutils import _get_settings, new_document from sphinx.util.nodes import inline_all_toctrees from sphinx.util.osutil import SEP, copyfile, ensuredir, make_filename_from_project from sphinx.writers.texinfo import TexinfoTranslator, TexinfoWriter if TYPE_CHECKING: from collections.abc import Iterable, Set - from typing import Any from docutils.nodes import Node @@ -119,15 +116,9 @@ def write_documents(self, _docnames: Set[str]) -> None: with progress_message(__('writing')): self.post_process_images(doctree) docwriter = TexinfoWriter(self) - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - settings: Any = OptionParser( - defaults=self.env.settings, - components=(docwriter,), - read_config_files=True, - ).get_default_values() + settings = _get_settings( + TexinfoWriter, defaults=self.env.settings, read_config_files=True + ) settings.author = author settings.title = title settings.texinfo_filename = targetname[:-5] + '.info' diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index c8fcd597aa5..e5f32cf695d 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -1,19 +1,15 @@ from __future__ import annotations -import warnings from typing import TYPE_CHECKING from docutils import nodes -from docutils.frontend import OptionParser from sphinx.io import SphinxBaseReader from sphinx.parsers import RSTParser from sphinx.transforms import SphinxTransformer -from sphinx.util.docutils import LoggingReporter, sphinx_domains +from sphinx.util.docutils import LoggingReporter, _get_settings, sphinx_domains if TYPE_CHECKING: - from docutils.frontend import Values - from sphinx.application import Sphinx @@ -33,12 +29,7 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: 'output_encoding': 'unicode', 'traceback': True, } - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - option_parser = OptionParser( - components=(RSTParser, SphinxBaseReader), defaults=settings_overrides - ) - settings: Values = option_parser.get_default_values() # type: ignore[assignment] + settings = _get_settings(SphinxBaseReader, RSTParser, defaults=settings_overrides) settings._source = source_path settings.env = env diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py index b53774aa26f..f75acb1cb5b 100644 --- a/sphinx/util/docutils.py +++ b/sphinx/util/docutils.py @@ -4,6 +4,7 @@ import os import re +import warnings from contextlib import contextmanager from copy import copy from pathlib import Path @@ -11,6 +12,7 @@ import docutils from docutils import nodes +from docutils.frontend import OptionParser from docutils.io import FileOutput from docutils.parsers.rst import Directive, directives, roles from docutils.statemachine import StateMachine @@ -27,10 +29,11 @@ ) if TYPE_CHECKING: - from collections.abc import Iterator, Sequence + from collections.abc import Iterator, Mapping, Sequence from types import ModuleType, TracebackType from typing import Any, Protocol + from docutils import Component from docutils.frontend import Values from docutils.nodes import Element, Node, system_message from docutils.parsers.rst.states import Inliner @@ -816,3 +819,21 @@ def new_document(source_path: str, settings: Any = None) -> nodes.document: document = nodes.document(settings, reporter, source=source_path) document.note_source(source_path, -1) return document + + +def _get_settings( + *components: Component | type[Component], + defaults: Mapping[str, Any], + read_config_files: bool = False, +) -> Values: + with warnings.catch_warnings(action='ignore', category=DeprecationWarning): + # DeprecationWarning: The frontend.OptionParser class will be replaced + # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. + # DeprecationWarning: The frontend.Option class will be removed + # in Docutils 0.21 or later. + option_parser = OptionParser( + components=components, + defaults=defaults, + read_config_files=read_config_files, + ) + return option_parser.get_default_values() # type: ignore[return-value] diff --git a/tests/test_markup/test_markup.py b/tests/test_markup/test_markup.py index 3a370ee46ad..f9da6038c7f 100644 --- a/tests/test_markup/test_markup.py +++ b/tests/test_markup/test_markup.py @@ -3,11 +3,10 @@ from __future__ import annotations import re -import warnings from types import SimpleNamespace import pytest -from docutils import frontend, nodes, utils +from docutils import nodes, utils from docutils.parsers.rst import Parser as RstParser from sphinx import addnodes @@ -17,7 +16,7 @@ from sphinx.testing.util import assert_node from sphinx.transforms import SphinxSmartQuotes from sphinx.util import texescape -from sphinx.util.docutils import sphinx_domains +from sphinx.util.docutils import _get_settings, sphinx_domains from sphinx.writers.html import HTMLWriter from sphinx.writers.html5 import HTML5Translator from sphinx.writers.latex import LaTeXTranslator, LaTeXWriter @@ -27,15 +26,9 @@ def settings(app): env = app.env texescape.init() # otherwise done by the latex builder - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - optparser = frontend.OptionParser( - components=(RstParser, HTMLWriter, LaTeXWriter), - defaults=default_settings, - ) - settings = optparser.get_default_values() + settings = _get_settings( + RstParser, HTMLWriter, LaTeXWriter, defaults=default_settings + ) settings.smart_quotes = True settings.env = env settings.env.current_document.docname = 'dummy' diff --git a/tests/test_search.py b/tests/test_search.py index a8ad186a533..5ed753a2ea1 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -3,7 +3,6 @@ from __future__ import annotations import json -import warnings from io import BytesIO from typing import TYPE_CHECKING @@ -169,12 +168,7 @@ def test_term_in_raw_directive(app: SphinxTestApp) -> None: def test_IndexBuilder(): - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - optparser = frontend.OptionParser(components=(rst.Parser,)) - settings = optparser.get_default_values() + settings = frontend.get_default_settings(rst.Parser) parser = rst.Parser() domain1 = DummyDomain( diff --git a/tests/test_util/test_util_nodes.py b/tests/test_util/test_util_nodes.py index 61342efdb1b..39c43d6e88a 100644 --- a/tests/test_util/test_util_nodes.py +++ b/tests/test_util/test_util_nodes.py @@ -2,7 +2,6 @@ from __future__ import annotations -import warnings from textwrap import dedent from typing import TYPE_CHECKING, Any @@ -30,11 +29,7 @@ def _transform(doctree) -> None: def create_new_document() -> document: - with warnings.catch_warnings(): - warnings.filterwarnings('ignore', category=DeprecationWarning) - # DeprecationWarning: The frontend.OptionParser class will be replaced - # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later. - settings = frontend.OptionParser(components=(rst.Parser,)).get_default_values() + settings = frontend.get_default_settings(rst.Parser) settings.id_prefix = 'id' document = new_document('dummy.txt', settings) return document From 88f7fa95fee90382bdc65ccf80b7add980372c63 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Wed, 18 Jun 2025 03:33:07 +0100 Subject: [PATCH 144/435] Add ``_parse_str_to_doctree()`` helper method (#13673) --- sphinx/testing/restructuredtext.py | 46 ++++--------- sphinx/transforms/__init__.py | 3 +- sphinx/util/docutils.py | 65 ++++++++++++++++++- .../test_directive_object_description.py | 20 +++--- 4 files changed, 89 insertions(+), 45 deletions(-) diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py index e5f32cf695d..548ef5f27b4 100644 --- a/sphinx/testing/restructuredtext.py +++ b/sphinx/testing/restructuredtext.py @@ -2,14 +2,12 @@ from typing import TYPE_CHECKING -from docutils import nodes - -from sphinx.io import SphinxBaseReader from sphinx.parsers import RSTParser -from sphinx.transforms import SphinxTransformer -from sphinx.util.docutils import LoggingReporter, _get_settings, sphinx_domains +from sphinx.util.docutils import _parse_str_to_doctree if TYPE_CHECKING: + from docutils import nodes + from sphinx.application import Sphinx @@ -20,49 +18,29 @@ def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document: registry = app.registry srcdir = app.srcdir - source_path = str(srcdir / f'{docname}.rst') - # Get settings settings_overrides = { + 'env': env, 'gettext_compact': True, 'input_encoding': 'utf-8', 'output_encoding': 'unicode', 'traceback': True, } - settings = _get_settings(SphinxBaseReader, RSTParser, defaults=settings_overrides) - settings._source = source_path - settings.env = env # Create parser parser = RSTParser() parser._config = config parser._env = env - # Create root document node - reporter = LoggingReporter( - source_path, - settings.report_level, - settings.halt_level, - settings.debug, - settings.error_encoding_error_handler, - ) - document = nodes.document(settings, reporter, source=source_path) - document.note_source(source_path, -1) - - # substitute transformer - document.transformer = transformer = SphinxTransformer(document) - transformer.add_transforms(SphinxBaseReader().get_transforms()) - transformer.add_transforms(registry.get_transforms()) - transformer.add_transforms(parser.get_transforms()) - env.current_document.docname = docname try: - with sphinx_domains(env): - parser.parse(text, document) - document.current_source = document.current_line = None - - transformer.apply_transforms() + return _parse_str_to_doctree( + text, + filename=srcdir / f'{docname}.rst', + default_settings=settings_overrides, + env=env, + parser=parser, + transforms=registry.get_transforms(), + ) finally: env.current_document.docname = '' - - return document diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py index 7ba50aaa240..760a5e6a67d 100644 --- a/sphinx/transforms/__init__.py +++ b/sphinx/transforms/__init__.py @@ -18,7 +18,6 @@ from sphinx.deprecation import _deprecation_warning from sphinx.locale import _, __ from sphinx.util import logging -from sphinx.util.docutils import new_document from sphinx.util.i18n import format_date from sphinx.util.nodes import apply_source_workaround, is_smartquotable @@ -97,6 +96,8 @@ def apply_transforms(self) -> None: else: # wrap the target node by document node during transforming try: + from sphinx.util.docutils import new_document + document = new_document('') if self.env: document.settings.env = self.env diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py index f75acb1cb5b..5d709ff0434 100644 --- a/sphinx/util/docutils.py +++ b/sphinx/util/docutils.py @@ -5,7 +5,7 @@ import os import re import warnings -from contextlib import contextmanager +from contextlib import contextmanager, nullcontext from copy import copy from pathlib import Path from typing import TYPE_CHECKING @@ -15,12 +15,15 @@ from docutils.frontend import OptionParser from docutils.io import FileOutput from docutils.parsers.rst import Directive, directives, roles +from docutils.readers import standalone from docutils.statemachine import StateMachine +from docutils.transforms.references import DanglingReferences from docutils.utils import Reporter, unescape from sphinx.errors import SphinxError from sphinx.locale import __ -from sphinx.util import logging +from sphinx.transforms import SphinxTransformer +from sphinx.util import logging, rst from sphinx.util.parsing import nested_parse_to_nodes logger = logging.getLogger(__name__) @@ -36,8 +39,10 @@ from docutils import Component from docutils.frontend import Values from docutils.nodes import Element, Node, system_message + from docutils.parsers import Parser from docutils.parsers.rst.states import Inliner from docutils.statemachine import State, StringList + from docutils.transforms import Transform from sphinx.builders import Builder from sphinx.config import Config @@ -69,6 +74,13 @@ def __call__( ) -> tuple[RoleFunction | None, list[system_message]]: ... +_READER_TRANSFORMS = [ + transform + for transform in standalone.Reader().get_transforms() + if transform is not DanglingReferences +] + + additional_nodes: set[type[Element]] = set() @@ -821,6 +833,55 @@ def new_document(source_path: str, settings: Any = None) -> nodes.document: return document +def _parse_str_to_doctree( + content: str, + *, + filename: Path, + default_role: str = '', + default_settings: Mapping[str, Any], + env: BuildEnvironment, + parser: Parser, + transforms: Sequence[type[Transform]] = (), +) -> nodes.document: + env.current_document._parser = parser + + # Propagate exceptions by default when used programmatically: + defaults = {'traceback': True, **default_settings} + settings = _get_settings(standalone.Reader, parser, defaults=defaults) + settings._source = str(filename) + + # Create root document node + reporter = LoggingReporter( + source=str(filename), + report_level=settings.report_level, + halt_level=settings.halt_level, + debug=settings.debug, + error_handler=settings.error_encoding_error_handler, + ) + document = nodes.document(settings, reporter, source=str(filename)) + document.note_source(str(filename), -1) + + # substitute transformer + document.transformer = transformer = SphinxTransformer(document) + transformer.add_transforms(_READER_TRANSFORMS) + transformer.add_transforms(transforms) + transformer.add_transforms(parser.get_transforms()) + + if default_role: + default_role_cm = rst.default_role(env.current_document.docname, default_role) + else: + default_role_cm = nullcontext() # type: ignore[assignment] + with sphinx_domains(env), default_role_cm: + # parse content to abstract syntax tree + parser.parse(content, document) + document.current_source = document.current_line = None + + # run transforms + transformer.apply_transforms() + + return document + + def _get_settings( *components: Component | type[Component], defaults: Mapping[str, Any], diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py index 6b85c34d326..6759271bbf0 100644 --- a/tests/test_directives/test_directive_object_description.py +++ b/tests/test_directives/test_directive_object_description.py @@ -9,9 +9,8 @@ from docutils import nodes from sphinx import addnodes -from sphinx.io import _create_publisher from sphinx.testing import restructuredtext -from sphinx.util.docutils import sphinx_domains +from sphinx.util.docutils import _parse_str_to_doctree if TYPE_CHECKING: from sphinx.application import Sphinx @@ -24,15 +23,20 @@ def _doctree_for_test( ) -> nodes.document: config = app.config registry = app.registry + + filename = env.doc2path(docname) + content = filename.read_text(encoding='utf-8') + env.prepare_settings(docname) parser = registry.create_source_parser('restructuredtext', config=config, env=env) - publisher = _create_publisher( - env=env, parser=parser, transforms=registry.get_transforms() + return _parse_str_to_doctree( + content, + filename=env.doc2path(docname), + default_settings={'env': env}, + env=env, + parser=parser, + transforms=registry.get_transforms(), ) - with sphinx_domains(env): - publisher.set_source(source_path=str(env.doc2path(docname))) - publisher.publish() - return publisher.document @pytest.mark.sphinx('text', testroot='object-description-sections') From c188e3f24374c2277345e75b342c2e9f09445df3 Mon Sep 17 00:00:00 2001 From: Adam Turner <9087854+AA-Turner@users.noreply.github.com> Date: Wed, 18 Jun 2025 03:33:34 +0100 Subject: [PATCH 145/435] Restore support for nested ``only`` nodes in toctrees (#13663) --- sphinx/environment/adapters/toctree.py | 118 +++++++++++------- sphinx/util/tags.py | 4 +- tests/roots/test-toctree-only/conf.py | 0 tests/roots/test-toctree-only/index.rst | 26 ++++ .../test_environment_toctree.py | 92 +++++++++++++- 5 files changed, 192 insertions(+), 48 deletions(-) create mode 100644 tests/roots/test-toctree-only/conf.py create mode 100644 tests/roots/test-toctree-only/index.rst diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py index 4708383d64b..670ac786629 100644 --- a/sphinx/environment/adapters/toctree.py +++ b/sphinx/environment/adapters/toctree.py @@ -482,56 +482,84 @@ def _toctree_add_classes(node: Element, depth: int, docname: str) -> None: subnode = subnode.parent -ET = TypeVar('ET', bound=Element) +_ET = TypeVar('_ET', bound=Element) def _toctree_copy( - node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tags -) -> ET: + node: _ET, depth: int, maxdepth: int, collapse: bool, tags: Tags +) -> _ET: """Utility: Cut and deep-copy a TOC at a specified depth.""" - keep_bullet_list_sub_nodes = depth <= 1 or ( - (depth <= maxdepth or maxdepth <= 0) and (not collapse or 'iscurrent' in node) - ) + assert not isinstance(node, addnodes.only) + depth = max(depth - 1, 1) + copied = _toctree_copy_seq(node, depth, maxdepth, collapse, tags, initial_call=True) + assert len(copied) == 1 + return copied[0] # type: ignore[return-value] - copy = node.copy() - for subnode in node.children: - if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item): - # for

    and

  • , just recurse - copy.append(_toctree_copy(subnode, depth, maxdepth, collapse, tags)) - elif isinstance(subnode, nodes.bullet_list): - # for