diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 2dd61db..267683f 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -1,12 +1,9 @@ name: Black Formatting -on: - push: - branches: - - "!docs" +on: push jobs: - formatting: + formatting-black: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -16,6 +13,5 @@ jobs: - name: Code Formatting (App) run: | - pip install black - pip install -r requirements.txt + pip install -r requirements-dev.txt black --check . diff --git a/.github/workflows/isort.yml b/.github/workflows/isort.yml index 2c85199..3257dc9 100644 --- a/.github/workflows/isort.yml +++ b/.github/workflows/isort.yml @@ -1,11 +1,9 @@ name: Isort Formatting -on: - push: - branches: - - "!docs" +on: push + jobs: - formatting: + formatting-isort: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -15,6 +13,5 @@ jobs: - name: Code Formatting (App) run: | - pip install isort - pip install -r requirements.txt + pip install -r requirements-dev.txt isort --check . diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 25b8f06..17e9931 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -17,7 +17,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install setuptools wheel twine build + pip install -r requirements-dev.txt - name: Build and publish to Pypi env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} diff --git a/.github/workflows/ruff-format.yml b/.github/workflows/ruff-format.yml index 7231f69..16adc33 100644 --- a/.github/workflows/ruff-format.yml +++ b/.github/workflows/ruff-format.yml @@ -1,12 +1,9 @@ name: Ruff Formatting -on: - push: - branches: - - "!docs" +on: push jobs: - formatting: + formatting-ruff: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -16,6 +13,5 @@ jobs: - name: Code Formatting (App) run: | - pip install ruff - pip install -r requirements.txt + pip install -r requirements-dev.txt ruff format --check . diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml index 5da112a..98f68b8 100644 --- a/.github/workflows/ruff.yml +++ b/.github/workflows/ruff.yml @@ -1,11 +1,9 @@ name: Ruff Check -on: - push: - branches: - - "!docs" +on: push + jobs: - formatting: + ruff-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -15,6 +13,5 @@ jobs: - name: Code Formatting (App) run: | - pip install ruff - pip install -r requirements.txt + pip install -r requirements-dev.txt ruff check . diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 120f180..afc9074 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,11 +1,9 @@ name: Code Tests -on: - push: - branches: - - "!docs" +on: push + jobs: - formatting: + testing: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -16,5 +14,5 @@ jobs: - name: Run tests run: | pip install . - pip install pytest + pip install -r requirements-dev.txt python3 -m pytest . diff --git a/.github/workflows/windows-tests.yml b/.github/workflows/windows-tests.yml index 6cd25ae..25e1368 100644 --- a/.github/workflows/windows-tests.yml +++ b/.github/workflows/windows-tests.yml @@ -1,11 +1,9 @@ name: Code Tests (On Windows) -on: - push: - branches: - - "!docs" +on: push + jobs: - formatting: + testing-windows: runs-on: windows-latest steps: - uses: actions/checkout@v2 @@ -16,5 +14,5 @@ jobs: - name: Run tests run: | pip install . - pip install pytest + pip install -r requirements-dev.txt python3 -m pytest . -s diff --git a/.gitignore b/.gitignore index e68931d..0c660a6 100644 --- a/.gitignore +++ b/.gitignore @@ -5,5 +5,9 @@ __pycache__/ dist/ build/ +.pytest_cache/ .ruff_cache/ salve.egg-info/ + +# Pycharm +.idea diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 02d96da..f1ed774 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -13,7 +13,7 @@ build: commands: - python3 ./docs/source/create_example_docs.py - python3 -m pip install -r docs/requirements.txt - - python3 -m sphinx docs/source $READTHEDOCS_OUTPUT/html + - python3 -m sphinx --builder dirhtml docs/source $READTHEDOCS_OUTPUT/html # Build documentation in the docs/ directory with Sphinx sphinx: diff --git a/README.md b/README.md index cdd4441..2a37c7d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -

Salve v0.7.2

+

Salve v1.0.0

# Installation @@ -9,7 +9,7 @@ In the Command Line, paste the following: `pip install salve` Salve is an IPC library that can be used by code editors to easily get autocompletions, replacements, editorconfig suggestions, definitions, and syntax highlighting. > **Notes:** -> - Due to the way Windows handles chars the hidden character highlighter may not work properly. See [#57](https://github.com/Moosems/salve/pull/57). If anyone knows how to fix this, I would greatly appreciate you opening a PR :) +> - Due to the way Windows handles chars the hidden character highlighter may not work properly. See [#57](https://github.com/salve-org/salve/pull/57). If anyone knows how to fix this, I would greatly appreciate you opening a PR :) ## Documentation diff --git a/docs/source/command-sheet.rst b/docs/source/command-sheet.rst index f642e02..6b8718f 100644 --- a/docs/source/command-sheet.rst +++ b/docs/source/command-sheet.rst @@ -13,7 +13,7 @@ Below is a chart outlining the different request commands you can give and the d * - ``AUTOCOMPLETE`` - file: ``str``, - current_word: ``str`` (optional), + current_word: ``str`` (the portion of the word being typed), expected_keywords: ``list[str]`` (any special keywords for the language (optional)) * - ``REPLACEMENTS`` @@ -36,5 +36,9 @@ Below is a chart outlining the different request commands you can give and the d current_word: ``str`` (the word being searched for), definition_starters: ``list[tuple[str, str]]`` (list of regexes to search for and a string associated (see :doc:`examples/simple_definitions_example`)) + * - ``LINKS_AND_CHARS`` + - file: ``str``, + + text_range: ``tuple[int, int]`` (the lower and upper line bounds (inclusively) of what text to highlight (optional)) -To see how to use any given one of these in more detail, visit the :doc:`examples` page! To continue on your journey through ``Salve`` instead, move on to the :doc:`special-classes` page instead. +To see how to use any given one of these in more detail, visit the :doc:`examples` page! Otherwise move on to the :doc:`special-classes` page instead. diff --git a/docs/source/conf.py b/docs/source/conf.py index a8cb16c..9996395 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -14,7 +14,7 @@ project = "Salve" copyright = "2024, Moosems" author = "Moosems" -release = "v0.7.2" +release = "v1.0.0" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration diff --git a/docs/source/create_example_docs.py b/docs/source/create_example_docs.py index 2830b46..c2bf679 100644 --- a/docs/source/create_example_docs.py +++ b/docs/source/create_example_docs.py @@ -27,5 +27,5 @@ for line in example_file: rst_file.write(f" {line}") rst_file.write( - f"\nSee the file example file `here <{'https://github.com/Moosems/salve/blob/master/examples/' + filename}>`_." + f"\nSee the file example file `here <{'https://github.com/salve-org/salve/blob/master/examples/' + filename}>`_." ) diff --git a/docs/source/example-usage.rst b/docs/source/example-usage.rst index 4ac0708..d023a9d 100644 --- a/docs/source/example-usage.rst +++ b/docs/source/example-usage.rst @@ -9,59 +9,47 @@ Now that you have ``Salve`` installed, let's try running a simple example that p from time import sleep # We import the IPC class, the HIGHLIGHT command, and the Response TypedDict - # NOTE: The HIGHLIGHT is actually just a string but it makes it easier to get - # spelling errors from your code editor + # NOTE: The HIGHLIGHT variable is actually just a string that makes it easier to get + # spell checking from your code editor from salve import HIGHLIGHT, IPC, Response - # Because this module is made with multiprocessing we need any usage to - # initially be called from an if __name__ == "__main__" clause def main(): - # Here we define the context (create the other process) nothing more. IPC - # doesn't take any arguments, it just sets itself up for you! + # Here we define the context (which creates the other process). context = IPC() - # In order to actually use a file, we need to update the internal system - # which is done by IPC.update_file(). As promised, this system is also highly - # powerful and flexible and because of this nothign prevents you, the user, - # from using arbitrary code snippets instead of real files. + # In order to actually use a file, we need to update the internal file system + # which is done by IPC.update_file(). Note that nothing prevents you, the + # user, from using arbitrary code snippets instead of real files. context.update_file( "test", # Name the file for the system open(__file__, "r+").read(), # Give the contents as a string ) - # Next, we request the system to make highlights (using the HIGHLIGHT) - # command, tell it to use the file "test", that it should be highlighting - # for a python file, and to only return the code in lines 1-30. If you - # want it to highlight the whole file given , just leave the argument - # empty (unspecified) + # Next, we request the system to make syntax highlights. If you want it to + # highlight the whole file given , just leave the text_range argument unspecified context.request( HIGHLIGHT, file="test", language="python", text_range=(1, 30) ) - # It doesn't actually take a full second to get highlights but its simpler - # for the example to just do this. + # Letting the other process spin up and create syntax highlights sleep(1) - # The reason you need a union of Response and None is because we choose to - # give None when yo ask for the highlight of something that hasn't had a server - # response rather than raise an exception. + # If there is no response we give None instead of raising an Exception output: Response | None = context.get_response(HIGHLIGHT) print(output) # Finally, if you are done with the IPC before the rest of the program, you can - # kill it like so. It automatically dies when the main thread dies but this lets - # you save a bit of CPU usage + # kill it to save a bit of CPU time. context.kill_IPC() + # Because this module is made with multiprocessing we need an if __name__ == "__main__" clause if __name__ == "__main__": main() -Now I know thats a lot of comments, it is, but it also explains everything in a lot of detail. - -In a more realistic case, you would probably do something more like the following: +Now thats all fine and dandy, but what about if I need it to work for [insert random use case here]? Well, ``Salve`` is actually meant to be super flexible so if you use it properly, it can elegantly fit nearly any use case. For example, what if you need it to fit a tkinter application that uses an event loop? Well, take a look: .. code-block:: python @@ -77,6 +65,8 @@ In a more realistic case, you would probably do something more like the followin self.context = IPC() # At some point something calls the request_highlight function + # In a real application we would also define a callback to send the + # output to self.after(500, lambda: self.request_highlight(open(__file__, "r+").read())) self.output: Response | None = None @@ -99,10 +89,10 @@ In a more realistic case, you would probably do something more like the followin if __name__ == "__main__": App() -Befor you leave, here are some quick reminders to remember as you use ``Salve``: +Some quick notes to remember as you use ``Salve``: -- The first time that the system is loaded or a new server needs to be started it will take a fair bit longer. -- Any usage of ``IPC ``needs to originally have been called from an ``if __name__ == "__main__":`` block to prevent a multiproccesing error. +- The first time that the system is loaded or a new server needs to be started it will take a fair bit longer as a new interpreter needs to be created. +- Any usage of ``IPC`` needs to have been called from an ``if __name__ == "__main__":`` block to prevent a multiproccesing error. .. |br| raw:: html @@ -110,4 +100,4 @@ Befor you leave, here are some quick reminders to remember as you use ``Salve``: |br| -Now that is one beautiful application! Of course, there is far more that you can do beyond just getting highlights. For example: what if you want to get autocompletions? Well, there is of course the the handy :doc:`command-sheet` page. +Of course, you can do far more with ``Salve`` than just getting syntax highlights. For example: what if you want to get autocompletions? Well, there is of course the the :doc:`command-sheet`. diff --git a/docs/source/examples/example_usage.rst b/docs/source/examples/example_usage.rst index 333ecaf..0083dc7 100644 --- a/docs/source/examples/example_usage.rst +++ b/docs/source/examples/example_usage.rst @@ -58,4 +58,4 @@ Example Usage if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/gui_client.rst b/docs/source/examples/gui_client.rst index 555a512..6cbd424 100644 --- a/docs/source/examples/gui_client.rst +++ b/docs/source/examples/gui_client.rst @@ -51,4 +51,4 @@ Gui Client if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/highlight_logging_example.rst b/docs/source/examples/highlight_logging_example.rst new file mode 100644 index 0000000..3fe764e --- /dev/null +++ b/docs/source/examples/highlight_logging_example.rst @@ -0,0 +1,43 @@ +========================= +Highlight Logging Example +========================= + +.. code-block:: python + + from logging import INFO, Logger, basicConfig, getLogger + from time import sleep + + from salve import HIGHLIGHT, IPC, Response + + basicConfig( + level=INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + logger: Logger = getLogger("Main") + + + def main(): + context = IPC() + + context.update_file( + "test", + open(__file__, "r+").read(), + ) + + context.request( + HIGHLIGHT, file="test", language="python", text_range=(1, 30) + ) + + sleep(1) + output: Response | None = context.get_response(HIGHLIGHT) + if output is None: + logger.info("Output is None") + + logger.info(f"Output: {output}") + + context.kill_IPC() + + + if __name__ == "__main__": + main() + +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/simple_autocomplete_example.rst b/docs/source/examples/simple_autocomplete_example.rst index 07099a6..9863736 100644 --- a/docs/source/examples/simple_autocomplete_example.rst +++ b/docs/source/examples/simple_autocomplete_example.rst @@ -34,4 +34,4 @@ Simple Autocomplete Example if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/simple_definitions_example.rst b/docs/source/examples/simple_definitions_example.rst index 1529207..5bcc96e 100644 --- a/docs/source/examples/simple_definitions_example.rst +++ b/docs/source/examples/simple_definitions_example.rst @@ -36,4 +36,4 @@ Simple Definitions Example if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/simple_editorconfig_example.rst b/docs/source/examples/simple_editorconfig_example.rst index b43d7a7..c94987c 100644 --- a/docs/source/examples/simple_editorconfig_example.rst +++ b/docs/source/examples/simple_editorconfig_example.rst @@ -23,4 +23,4 @@ Simple Editorconfig Example if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/simple_highlight_example.rst b/docs/source/examples/simple_highlight_example.rst index 3fcb9ff..e3d5190 100644 --- a/docs/source/examples/simple_highlight_example.rst +++ b/docs/source/examples/simple_highlight_example.rst @@ -14,7 +14,7 @@ Simple Highlight Example context.update_file( "test", - open(__file__, "r+").read() * 20, + open(__file__, "r+").read(), ) context.request( @@ -30,4 +30,4 @@ Simple Highlight Example if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/simple_links_and_hidden_chars_example.rst b/docs/source/examples/simple_links_and_hidden_chars_example.rst new file mode 100644 index 0000000..af744ca --- /dev/null +++ b/docs/source/examples/simple_links_and_hidden_chars_example.rst @@ -0,0 +1,31 @@ +===================================== +Simple Links And Hidden Chars Example +===================================== + +.. code-block:: python + + from time import sleep + + from salve import IPC, LINKS_AND_CHARS, Response + + + def main(): + context = IPC() + + context.update_file( + "test", + open(__file__, "r+").read(), + ) + + context.request(LINKS_AND_CHARS, file="test", text_range=(1, 30)) + + sleep(1) + output: Response | None = context.get_response(LINKS_AND_CHARS) + print(output) + context.kill_IPC() + + + if __name__ == "__main__": + main() + +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/examples/simple_replacements_example.rst b/docs/source/examples/simple_replacements_example.rst index d01b3b6..100ac75 100644 --- a/docs/source/examples/simple_replacements_example.rst +++ b/docs/source/examples/simple_replacements_example.rst @@ -33,4 +33,4 @@ Simple Replacements Example if __name__ == "__main__": main() -See the file example file `here `_. \ No newline at end of file +See the file example file `here `_. \ No newline at end of file diff --git a/docs/source/extra-tools.rst b/docs/source/extra-tools.rst deleted file mode 100644 index 98e76d3..0000000 --- a/docs/source/extra-tools.rst +++ /dev/null @@ -1,13 +0,0 @@ -=========== -Extra Tools -=========== - -Currently ``Salve`` does not feature any extra tools you can use but there will be some when the ``v1.0.0`` release comes out. - -.. |br| raw:: html - -
- -|br| - -Now that you've finished up here, would you like to take a gander at the :doc:`examples`? diff --git a/docs/source/functions.rst b/docs/source/functions.rst index fb2ec12..c7613f6 100644 --- a/docs/source/functions.rst +++ b/docs/source/functions.rst @@ -15,4 +15,4 @@ This function lets you give a ``str`` as input (should only be one char long) an |br| -Next up is :doc:`extra-tools` and then to finish off the tour will be the :doc:`examples`! +Next up to finish off the tour will be the :doc:`examples`! diff --git a/docs/source/index.rst b/docs/source/index.rst index 48b66fb..e900448 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -18,5 +18,4 @@ Welcome to ``Salve``'s Documentation! ``Salve`` is a library that can be used by special-classes variables functions - extra-tools examples diff --git a/docs/source/installation.rst b/docs/source/installation.rst index 77a1ed8..aa9ee5a 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -8,6 +8,6 @@ To start using ``Salve``, first install it using pip: $ pip install salve -And it's installed! Congratulations on giving your code editors the tools they need to work and allowing youself to not pull out hair in the process! +And it's installed! Congratulations on giving your code editors the tools they need to work and allowing yourself to not pull out hair in the process! Let's move on to the :doc:`example-usage` page to give ``Salve`` a try! diff --git a/docs/source/special-classes.rst b/docs/source/special-classes.rst index bbf21ae..bc45899 100644 --- a/docs/source/special-classes.rst +++ b/docs/source/special-classes.rst @@ -20,7 +20,7 @@ The ``IPC`` class has the following methods available for use: ``Response`` ************ -The ``Response`` TypedDict classs allows for type checking when handling output from ``Salve``. +The ``Response`` TypedDict classs allows for type checking when handling output from ``Salve``. To access the resulut of the command simply use ``some_response["result"]``. .. |br| raw:: html diff --git a/docs/source/variables.rst b/docs/source/variables.rst index 216e525..125a529 100644 --- a/docs/source/variables.rst +++ b/docs/source/variables.rst @@ -2,21 +2,12 @@ Special Variables and Types =========================== -.. _Token Overview: +.. _Token and GENERIC_TOKENS Overview: -``Token`` -********* +``Token`` and ``GENERIC_TOKENS`` +******************************** -The ``Token`` type is, in reality, just a type alias of `tuple[tuple[int, int], int, str]``. Despite simply being a ``tuple``, the ``Token`` is likely the most used data type in ``Salve`` and is the most frequently returned data type to the user. That being said, let me explain what it really is. - -The ``Token`` type contains three parts: the start index, its length, and its type. The start index is that ``tuple`` at the beginning of the main ``tuple`` and the first index is the line the ``Token`` takes place on and the second is the column. ``Token``'s start at 1, 0 so you may need to do a -1 or a +1 depending on how you use this data. The second ``int`` is the length of the ``Token` and the ``str`` is the type. You will use these very often so its never a bad idea to get very familiar with them. - -.. _Generic Tokens Overview: - -``generic_tokens`` -****************** - -The ``generic_tokens`` ``list`` provides all the generic ``Token`` types you will encounter when using ``Salve``. Simply print this out to get a good idea of what ``Token`` types you will be working with when using ``Salve`` and yoiu will never need to be worried about being surprised by random ``Token`` types. +See `Token Tools Documentation `_ for details. Note that ``Token`` is a very common return type used by ``Salve``. .. _Commands Overview: @@ -30,20 +21,21 @@ The ``generic_tokens`` ``list`` provides all the generic ``Token`` types you wil ``COMMAND``'s ************* -The ``COMMAND`` variable is a ``str`` type alias used to hopefully prevent spelling mistakes. You see it is not hard to mistype things like ``"autocomplete"`` as `"autocopmlete"` and this can cause issues when working on something like this. Sadly most code editors don't give spelling errors to you but what they do give is autocomplete for variables and errors for misspelled variables. These variables, therefore, can be used to spell check in a way and that is exactly what ``Salve`` uses them for. The full list of them is as follows: +The ``COMMAND`` variable is a ``str`` type alias's used to prevent spelling mistakes. It is not hard to mistype things like ``"autocomplete"`` as `"autocopmlete"` and this can cause issues when working on something like this. Sadly most code editors don't give spelling errors to you but what they do give is autocomplete for variables and errors for misspelled variables. These variables, therefore, can be used for spell checking in a way and that is exactly what ``Salve`` uses them for. The full list of them is as follows: -- AUTOCOMPLETE -- REPLACEMENTS -- HIGHLIGHT -- EDITORCONFIG -- DEFINITION +- ``AUTOCOMPLETE`` +- ``REPLACEMENTS`` +- ``HIGHLIGHT`` +- ``EDITORCONFIG`` +- ``DEFINITION`` +- ``LINKS_AND_CHARS`` .. _Hidden Chars Overview: ``hidden_chars`` **************** -The ``hidden_chars`` variable is a ``dict`` used to cross reference the char that the ``Token`` of type ``"Hidden_Char"`` points to and get the name of the char. +The ``hidden_chars`` variable is a ``dict`` used to cross reference the char that the ``Token`` of type ``"Hidden_Char"`` points to for the name of the char. .. |br| raw:: html diff --git a/examples/highlight_logging_example.py b/examples/highlight_logging_example.py new file mode 100644 index 0000000..93e60e7 --- /dev/null +++ b/examples/highlight_logging_example.py @@ -0,0 +1,35 @@ +from logging import INFO, Logger, basicConfig, getLogger +from time import sleep + +from salve import HIGHLIGHT, IPC, Response + +basicConfig( + level=INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) +logger: Logger = getLogger("Main") + + +def main(): + context = IPC() + + context.update_file( + "test", + open(__file__, "r+").read(), + ) + + context.request( + HIGHLIGHT, file="test", language="python", text_range=(1, 30) + ) + + sleep(1) + output: Response | None = context.get_response(HIGHLIGHT) + if output is None: + logger.info("Output is None") + + logger.info(f"Output: {output}") + + context.kill_IPC() + + +if __name__ == "__main__": + main() diff --git a/examples/simple_highlight_example.py b/examples/simple_highlight_example.py index 9b462e2..4993745 100644 --- a/examples/simple_highlight_example.py +++ b/examples/simple_highlight_example.py @@ -8,7 +8,7 @@ def main(): context.update_file( "test", - open(__file__, "r+").read() * 20, + open(__file__, "r+").read(), ) context.request( diff --git a/examples/simple_links_and_hidden_chars_example.py b/examples/simple_links_and_hidden_chars_example.py new file mode 100644 index 0000000..5bb9e13 --- /dev/null +++ b/examples/simple_links_and_hidden_chars_example.py @@ -0,0 +1,23 @@ +from time import sleep + +from salve import IPC, LINKS_AND_CHARS, Response + + +def main(): + context = IPC() + + context.update_file( + "test", + open(__file__, "r+").read(), + ) + + context.request(LINKS_AND_CHARS, file="test", text_range=(1, 30)) + + sleep(1) + output: Response | None = context.get_response(LINKS_AND_CHARS) + print(output) + context.kill_IPC() + + +if __name__ == "__main__": + main() diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..bca0199 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,19 @@ +# Normal requirements +pygments +pyeditorconfig +beartype +token_tools + +# Testing +pytest + +# Formatting +ruff +isort +black + +# Builds +setuptools +wheel +twine +build diff --git a/requirements.txt b/requirements.txt index ce9eab7..fcb8d86 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ pygments pyeditorconfig beartype +token_tools diff --git a/salve/__init__.py b/salve/__init__.py index fb7fde5..b8c1652 100644 --- a/salve/__init__.py +++ b/salve/__init__.py @@ -9,11 +9,8 @@ DEFINITION, EDITORCONFIG, HIGHLIGHT, + LINKS_AND_CHARS, REPLACEMENTS, Response, ) -from .server_functions import ( # noqa: F401, E402 - Token, - generic_tokens, - is_unicode_letter, -) +from .server_functions import is_unicode_letter # noqa: F401, E402 diff --git a/salve/ipc.py b/salve/ipc.py index 70a2587..b408a6b 100644 --- a/salve/ipc.py +++ b/salve/ipc.py @@ -1,8 +1,7 @@ -from multiprocessing import Pipe, Process, Queue, freeze_support -from multiprocessing.connection import Connection +from logging import Logger, getLogger +from multiprocessing import Process, Queue, freeze_support from pathlib import Path from random import randint -from sys import platform from .misc import ( COMMAND, @@ -16,12 +15,6 @@ ) from .server import Server -# Deal with Windows weirdness -if platform == "win32": - from multiprocessing.connection import ( - PipeConnection as Connection, # type: ignore - ) - class IPC: """The IPC class is used to talk to the server and run commands. The public API includes the following methods: @@ -43,40 +36,51 @@ def __init__(self, id_max: int = 15_000) -> None: self.files: dict[str, str] = {} + self.logger: Logger = getLogger("IPC") + self.logger.info("Creating server") self.response_queue: ResponseQueueType = Queue() self.requests_queue: RequestQueueType = Queue() - self.client_end: Connection self.main_server: Process self.create_server() + self.logger.info("Initialization is complete") def create_server(self) -> None: """Creates the main_server through a subprocess - internal API""" - self.client_end, server_end = Pipe() freeze_support() + server_logger = getLogger("Server") self.main_server = Process( target=Server, - args=(server_end, self.response_queue, self.requests_queue), + args=(self.response_queue, self.requests_queue, server_logger), daemon=True, ) self.main_server.start() + self.logger.info("Server created") + self.logger.info("Copying files to server") files_copy = self.files.copy() self.files = {} for file, data in files_copy.items(): self.update_file(file, data) + self.logger.debug("Finished copying files to server") def create_message(self, type: str, **kwargs) -> None: """Creates a Message based on the args and kwawrgs provided. Highly flexible. - internal API""" + self.logger.info("Creating message for server") id = randint(1, self.id_max) # 0 is reserved for the empty case while id in self.all_ids: id = randint(1, self.id_max) self.all_ids.append(id) + self.logger.debug("ID for message created") if not self.main_server.is_alive(): + self.logger.critical( + "Server was killed at some point, creating server" + ) self.create_server() match type: case "request": + self.logger.info("Creating request for server") command = kwargs.get("command", "") self.current_ids[command] = id request: Request = { @@ -86,9 +90,11 @@ def create_message(self, type: str, **kwargs) -> None: "file": "", } request.update(**kwargs) - # print(request) + self.logger.debug(f"Request created: {request}") self.requests_queue.put(request) + self.logger.info("Message sent") case "notification": + self.logger.info("Creating notification for server") notification: Notification = { "id": id, "type": type, @@ -97,7 +103,9 @@ def create_message(self, type: str, **kwargs) -> None: "contents": "", } notification.update(**kwargs) + self.logger.debug(f"Notification created: {notification}") self.requests_queue.put(notification) + self.logger.info("Message sent") def request( self, @@ -111,16 +119,20 @@ def request( definition_starters: list[tuple[str, str]] = [("", "before")], ) -> None: """Sends the main_server a request of type command with given kwargs - external API""" + self.logger.debug("Beginning request") if command not in COMMANDS: - self.kill_IPC() + self.logger.exception( + f"Command {command} not in builtin commands. Those are {COMMANDS}!" + ) raise Exception( f"Command {command} not in builtin commands. Those are {COMMANDS}!" ) if file not in self.files and command != EDITORCONFIG: - self.kill_IPC() + self.logger.exception(f"File {file} does not exist in system!") raise Exception(f"File {file} does not exist in system!") + self.logger.debug("Sending info to create_message()") self.create_message( type="request", command=command, @@ -133,40 +145,51 @@ def request( definition_starters=definition_starters, ) - def cancel_request(self, command: str): + def cancel_request(self, command: str) -> None: """Cancels a request of type command - external API""" if command not in COMMANDS: - self.kill_IPC() + self.logger.exception( + f"Cannot cancel command {command}, valid commands are {COMMANDS}" + ) raise Exception( f"Cannot cancel command {command}, valid commands are {COMMANDS}" ) + self.logger.info(f"Cancelled command: {command}") self.current_ids[command] = 0 def parse_response(self, res: Response) -> None: """Parses main_server output line and discards useless responses - internal API""" + self.logger.debug("Parsing server response") id = res["id"] self.all_ids.remove(id) if "command" not in res: + self.logger.info("Response was notification response") return command = res["command"] if id != self.current_ids[command]: + self.logger.info("Response is from old request") return + self.logger.info(f"Response is useful for command type: {command}") self.current_ids[command] = 0 self.newest_responses[command] = res def check_responses(self) -> None: """Checks all main_server output by calling IPC.parse_line() on each response - internal API""" + self.logger.debug("Checking responses") while not self.response_queue.empty(): self.parse_response(self.response_queue.get()) def get_response(self, command: str) -> Response | None: - """Runs IPC.check_responses() and returns the current response of type command if it has been returned - external API""" + """Checks responses and returns the current response of type command if it has been returned - external API""" + self.logger.info(f"Getting response for type: {command}") if command not in COMMANDS: - self.kill_IPC() + self.logger.exception( + f"Cannot get response of command {command}, valid commands are {COMMANDS}" + ) raise Exception( f"Cannot get response of command {command}, valid commands are {COMMANDS}" ) @@ -174,25 +197,32 @@ def get_response(self, command: str) -> Response | None: self.check_responses() response: Response | None = self.newest_responses[command] self.newest_responses[command] = None + self.logger.info("Response retrieved") return response def update_file(self, file: str, current_state: str) -> None: """Updates files in the system - external API""" + self.logger.info(f"Updating file: {file}") self.files[file] = current_state + self.logger.debug("Notifying server of file update") self.create_message("notification", file=file, contents=current_state) def remove_file(self, file: str) -> None: """Removes a file from the main_server - external API""" if file not in list(self.files.keys()): - self.kill_IPC() + self.logger.exception( + f"Cannot remove file {file} as file is not in file database!" + ) raise Exception( f"Cannot remove file {file} as file is not in file database!" ) + self.logger.info("Notifying server of file deletion") self.create_message("notification", remove=True, file=file) def kill_IPC(self) -> None: """Kills the main_server when salve_ipc's services are no longer required - external API""" + self.logger.info("Killing server") self.main_server.kill() diff --git a/salve/misc.py b/salve/misc.py index 44ed476..d70b792 100644 --- a/salve/misc.py +++ b/salve/misc.py @@ -2,12 +2,15 @@ from pathlib import Path from typing import TYPE_CHECKING, NotRequired, TypedDict +from token_tools import Token + COMMANDS: list[str] = [ "autocomplete", "replacements", "highlight", "editorconfig", "definition", + "links_and_chars", ] COMMAND = str @@ -16,6 +19,7 @@ HIGHLIGHT: COMMAND = COMMANDS[2] EDITORCONFIG: COMMAND = COMMANDS[3] DEFINITION: COMMAND = COMMANDS[4] +LINKS_AND_CHARS: COMMAND = COMMANDS[5] class Message(TypedDict): @@ -33,7 +37,7 @@ class Request(Message): expected_keywords: NotRequired[list[str]] # autocomplete, replacements current_word: NotRequired[str] # autocomplete, replacements, definition language: NotRequired[str] # highlight - text_range: NotRequired[tuple[int, int]] # highlight + text_range: NotRequired[tuple[int, int]] # highlight, links_and_chars file_path: NotRequired[Path | str] # editorconfig definition_starters: NotRequired[ list[tuple[str, str]] @@ -53,9 +57,7 @@ class Response(Message): cancelled: bool command: NotRequired[str] - result: NotRequired[ - list[str | tuple[tuple[int, int], int, str]] | dict[str, str] - ] + result: NotRequired[list[str | Token] | dict[str, str] | Token] if TYPE_CHECKING: diff --git a/salve/server.py b/salve/server.py index f120414..5a83b2c 100644 --- a/salve/server.py +++ b/salve/server.py @@ -1,9 +1,9 @@ -from multiprocessing.connection import Connection +from logging import Logger from multiprocessing.queues import Queue as GenericClassQueue -from sys import platform from time import sleep from pyeditorconfig import get_config +from token_tools import Token, normal_text_range from .misc import ( COMMANDS, @@ -14,30 +14,25 @@ ResponseQueueType, ) from .server_functions import ( - Token, find_autocompletions, get_definition, get_highlights, get_replacements, + get_special_tokens, ) -# Deal with Windows weirdness -if platform == "win32": - from multiprocessing.connection import ( - PipeConnection as Connection, # type: ignore - ) - class Server: - """Handles input from the user and returns output from special functions designed to make the job easy. Not an external API.""" + """Handles input from the user and returns output from special functions. Not an external API.""" def __init__( self, - server_end: Connection, response_queue: GenericClassQueue, requests_queue: GenericClassQueue, + logger: Logger, ) -> None: - self.server_end: Connection = server_end + self.logger: Logger = logger + self.logger.info("Starting server setup") self.response_queue: ResponseQueueType = response_queue self.requests_queue: RequestQueueType = requests_queue self.all_ids: list[int] = [] @@ -49,38 +44,60 @@ def __init__( self.files: dict[str, str] = {} + self.logger.info("Server setup complete") + while True: self.run_tasks() sleep(0.0025) def simple_id_response(self, id: int, cancelled: bool = True) -> None: + self.logger.debug(f"Creating simple response for id {id}") response: Response = { "id": id, "type": "response", "cancelled": cancelled, } + self.logger.debug(f"Sending simple response for id {id}") self.response_queue.put(response) + self.logger.info(f"Simple response for id {id} sent") def parse_line(self, message: Request | Notification) -> None: + self.logger.debug("Parsing Message from user") id: int = message["id"] match message["type"]: case "notification": + self.logger.debug("Mesage is of type notification") file: str = message["file"] # type: ignore if message["remove"]: # type: ignore + self.logger.info(f"File {file} was requested for removal") self.files.pop(file) + self.logger.info(f"File {file} has been removed") return contents: str = message["contents"] # type: ignore self.files[file] = contents + self.logger.info( + f"File {file} has been updated with new contents" + ) self.simple_id_response(id, False) + self.logger.debug( + f"Notification response for id {id} has been sent" + ) case "request": + self.logger.info(f"Mesage with id {id} is of type request") self.all_ids.append(id) command: str = message["command"] # type: ignore self.newest_ids[command] = id self.newest_requests[command] = message # type: ignore + self.logger.debug("Request stored for parsing") case _: + self.logger.warning( + f"Unknown type {type}. Sending simple response" + ) self.simple_id_response(id) + self.logger.debug(f"Simple response for id {id} sent") def cancel_all_ids_except_newest(self) -> None: + self.logger.info("Cancelling all old id's") ids = [ id["id"] for id in list(self.newest_requests.values()) @@ -88,49 +105,66 @@ def cancel_all_ids_except_newest(self) -> None: ] for id in self.all_ids: if id in ids: + self.logger.debug(f"Id {id} is newest of its command") continue + self.logger.debug( + f"Id {id} is an old request, sending simple respone" + ) self.simple_id_response(id) self.all_ids = [] + self.logger.debug("All ids list reset") def handle_request(self, request: Request) -> None: command: str = request["command"] id: int = self.newest_ids[command] file: str = request["file"] - result: ( - list[str | tuple[tuple[int, int], int, str]] | dict[str, str] - ) = [] + result: list[str | Token] | dict[str, str] = [] cancelled: bool = False match request["command"]: case "autocomplete": + self.logger.info("Finding completions for request") result = find_autocompletions( full_text=self.files[file], expected_keywords=request["expected_keywords"], # type: ignore current_word=request["current_word"], # type: ignore ) case "replacements": + self.logger.info("Getting replacements for request") result = get_replacements( full_text=self.files[file], expected_keywords=request["expected_keywords"], # type: ignore replaceable_word=request["current_word"], # type: ignore ) case "highlight": + self.logger.info("Getting normal highlights for request") pre_refined_result: list[Token] = get_highlights( full_text=self.files[file], language=request["language"], # type: ignore text_range=request["text_range"], # type: ignore ) - result += [token for token in pre_refined_result] # type: ignore + result.extend([token for token in pre_refined_result]) case "editorconfig": + self.logger.info("Getting editorconfig info for request") result = get_config(request["file_path"]) # type: ignore case "definition": + self.logger.info("Getting definition for request") result = get_definition( self.files[file], request["definition_starters"], # type: ignore request["current_word"], # type: ignore ) + case "links_and_chars": + self.logger.info("Searching for Links and chars") + result = get_special_tokens( + self.files[file], + normal_text_range(self.files[file], request["text_range"])[ # type: ignore + 1 + ], + ) case _: + self.logger.warning(f"Command {command} not recognized") cancelled = True response: Response = { @@ -140,24 +174,32 @@ def handle_request(self, request: Request) -> None: "command": command, "result": result, } + self.logger.debug("Response created") self.response_queue.put(response) self.newest_ids[command] = 0 + self.logger.info(f"Response sent for request of command {command}") def run_tasks(self) -> None: + if self.requests_queue.empty(): + return + + self.logger.debug("New request in queue") while not self.requests_queue.empty(): + self.logger.debug("Parsing request") self.parse_line(self.requests_queue.get()) - self.cancel_all_ids_except_newest() + if not self.all_ids: + self.logger.debug("All requests were notifications") - if not list( - self.newest_requests.values() - ): # There may have only been refreshes - return + self.logger.debug("Cancelling all old id's") + self.cancel_all_ids_except_newest() # Actual work for request in list(self.newest_requests.values()): if request is None: continue - self.handle_request(request) command: str = request["command"] + self.logger.info(f"Handling request of command {command}") + self.handle_request(request) self.newest_requests[command] = None + self.logger.debug("Request completed") diff --git a/salve/server_functions/__init__.py b/salve/server_functions/__init__.py index e9b00e6..40cd416 100644 --- a/salve/server_functions/__init__.py +++ b/salve/server_functions/__init__.py @@ -1,6 +1,6 @@ from .autocompletions import find_autocompletions # noqa: F401 from .definitions import get_definition # noqa: F401 from .highlight import get_highlights # noqa: F401 -from .highlight import Token, generic_tokens # noqa: F401 +from .links_and_hidden_chars import get_special_tokens # noqa: F401 from .misc import is_unicode_letter # noqa: F401 from .replacements import get_replacements # noqa: F401 diff --git a/salve/server_functions/autocompletions.py b/salve/server_functions/autocompletions.py index 9df366c..4e618fd 100644 --- a/salve/server_functions/autocompletions.py +++ b/salve/server_functions/autocompletions.py @@ -20,7 +20,9 @@ def find_autocompletions( no_usable_words_in_text: bool = not relevant_words if no_usable_words_in_text: - relevant_words += expected_keywords + relevant_words.extend( + expected_keywords * 3 + ) # We add a multiplier of three to boost the score of keywords relevant_words = [ word for word in relevant_words if word.startswith(current_word) diff --git a/salve/server_functions/definitions.py b/salve/server_functions/definitions.py index 3a23642..5447327 100644 --- a/salve/server_functions/definitions.py +++ b/salve/server_functions/definitions.py @@ -1,6 +1,7 @@ from re import Match, Pattern, compile -from .highlight import Token +from token_tools import Token + from .misc import find_words diff --git a/salve/server_functions/highlight/__init__.py b/salve/server_functions/highlight/__init__.py index 126f3b4..2a63a5d 100644 --- a/salve/server_functions/highlight/__init__.py +++ b/salve/server_functions/highlight/__init__.py @@ -1,2 +1 @@ from .highlight import get_highlights # noqa: F401 -from .tokens import Token, generic_tokens # noqa: F401 diff --git a/salve/server_functions/highlight/docstring_highlight.py b/salve/server_functions/highlight/docstring_highlight.py index e770b2a..0e945fb 100644 --- a/salve/server_functions/highlight/docstring_highlight.py +++ b/salve/server_functions/highlight/docstring_highlight.py @@ -1,14 +1,15 @@ from functools import cache -from re import MULTILINE, Match, compile +from re import DOTALL, MULTILINE, Match, compile from beartype.typing import Callable from pygments.lexer import RegexLexer, default from pygments.token import Comment as CommentToken -from pygments.token import String as StringToken +from pygments.token import String as StringToken # noqa: F811 +from token_tools import Token -from .tokens import Token, get_new_token_type +from .misc import get_new_token_type -useful_toks = { +useful_tokens = { StringToken.Doc, StringToken.Heredoc, CommentToken, @@ -26,19 +27,20 @@ @cache def get_pygments_comment_regexes(lexer: RegexLexer) -> _TokenTupleReturnType: """ - Steals the regexes that pgments uses to give docstring, heredoc, comment, and multiline comment highlights - (css comments, though multine, aren't called multiline comments) + Steals the regexes that pygments uses to give docstring, heredoc, comment, and multiline comment highlights + (css comments, though multiline, aren't called multiline comments) """ regexes: _TokenTupleReturnType = [] for path in lexer.tokens: - # This should have a better type definition but I didn't have the mental capacity to - # write each possibility so I'm waiting for beartype to implement the functionality for me like the bum I am + # This should have a better type definition, but I didn't have the mental capacity to + # write each possibility, so I'm waiting for beartype to implement the functionality for me like the bum I am path_tokens: list = lexer.tokens[path] if isinstance(path_tokens[0], str): - # This means that the path is redirecting to another path in its place but we check them all anyway so just exit this path + # This means that the path is redirecting to another path in its place, + # but we check them all anyway so just exit this path continue for token_tuple in path_tokens: @@ -46,7 +48,7 @@ def get_pygments_comment_regexes(lexer: RegexLexer) -> _TokenTupleReturnType: if isinstance(token_tuple, default): continue - if token_tuple[1] in useful_toks: + if token_tuple[1] in useful_tokens: regexes.append((token_tuple[0], token_tuple[1])) continue @@ -57,7 +59,7 @@ def get_pygments_comment_regexes(lexer: RegexLexer) -> _TokenTupleReturnType: pygments_func: Callable = token_tuple[1] if pygments_func.__closure__ is None: - # Will always evaluate to False but its for the static type checkers appeasement + # Will always evaluate to False, but it's for the static type checkers appeasement continue tokens: _TokenTupleInternalType = [ @@ -67,8 +69,9 @@ def get_pygments_comment_regexes(lexer: RegexLexer) -> _TokenTupleReturnType: ] # Sometimes pygments hides these types in functional programming for token in tokens: - if token in useful_toks: - # We know if its in the useful tokens list that its a token type but the static type checker doesn't + if token in useful_tokens: + # We know if it's in the useful tokens list that + # it's a token type but the static type checker doesn't regexes.append((token_tuple[0], token)) # type: ignore continue @@ -85,9 +88,9 @@ def proper_docstring_tokens(lexer: RegexLexer, full_text: str) -> list[Token]: for regex, token_type in proper_highlight_regexes: current_text = full_text - match: Match[str] | None = compile(regex, flags=MULTILINE).search( - full_text - ) + match: Match[str] | None = compile( + regex, flags=MULTILINE | DOTALL + ).search(full_text) if match is None: # Onwards to the next regex! diff --git a/salve/server_functions/highlight/highlight.py b/salve/server_functions/highlight/highlight.py index 1510170..a000958 100644 --- a/salve/server_functions/highlight/highlight.py +++ b/salve/server_functions/highlight/highlight.py @@ -3,16 +3,16 @@ from pygments import lex from pygments.lexer import Lexer, RegexLexer from pygments.lexers import get_lexer_by_name - -from .docstring_highlight import _LexReturnTokens, proper_docstring_tokens -from .links_and_hidden_chars import find_hidden_chars, get_urls, hidden_chars -from .tokens import ( +from token_tools import ( Token, - get_new_token_type, + normal_text_range, only_tokens_in_text_range, overwrite_and_merge_tokens, ) +from .docstring_highlight import _LexReturnTokens, proper_docstring_tokens +from .misc import get_new_token_type + @cache def lexer_by_name_cached(language: str) -> Lexer: @@ -28,16 +28,11 @@ def get_highlights( # Create some variables used all throughout the function lexer: Lexer = lexer_by_name_cached(language) - split_text: list[str] = full_text.splitlines() new_tokens: list[Token] = [] - if text_range[1] == -1: - # This indicates that the text range should span the length of the entire code - text_range = (text_range[0], len(split_text)) + split_text, text_range = normal_text_range(full_text, text_range) start_index: tuple[int, int] = (text_range[0], 0) - # We want only the lines in the text range because this list is iterated - split_text: list[str] = split_text[text_range[0] - 1 : text_range[1]] for line in split_text: og_tokens: _LexReturnTokens = list(lex(line, lexer)) @@ -51,7 +46,7 @@ def get_highlights( continue if not token_str.strip() or new_type == "Text": - # If the token is empty or is plain Text we simply skip it because thats ultimately useless info + # If the token is empty or is plain Text we simply skip it because that's ultimately useless info start_index = (start_index[0], start_index[1] + token_len) continue @@ -67,10 +62,5 @@ def get_highlights( new_tokens, proper_docstring_tokens(lexer, full_text) ) - new_tokens += get_urls(split_text, text_range[0]) - if [char for char in hidden_chars if char in full_text]: - # if there are not hidden chars we don't want to needlessly compute this - new_tokens += find_hidden_chars(split_text, text_range[0]) - new_tokens = only_tokens_in_text_range(new_tokens, text_range) return new_tokens diff --git a/salve/server_functions/highlight/misc.py b/salve/server_functions/highlight/misc.py new file mode 100644 index 0000000..a5f9ad3 --- /dev/null +++ b/salve/server_functions/highlight/misc.py @@ -0,0 +1,44 @@ +from functools import cache + +from token_tools import GENERIC_TOKENS + +default_tokens: list[str] = [ + "Token.Text.Whitespace", + "Token.Text", + "Token.Error", + "Token.Keyword", + "Token.Name", + "Token.Literal.String", + "Token.Literal.Number", + "Token.Literal", + "Token.Operator", + "Token.Punctuation", + "Token.Comment", + "Token.Generic", +] + + +def normal_text_range( + full_text: str, text_range: tuple[int, int] = (1, -1) +) -> tuple[list[str], tuple[int, int]]: + split_text: list[str] = full_text.splitlines() + + if text_range[1] == -1: + # This indicates that the text range should span the length of the entire code + text_range = (text_range[0], len(split_text)) + + # We want only the lines in the text range because this list is iterated + split_text = split_text[text_range[0] - 1 : text_range[1]] + + return (split_text, text_range) + + +@cache +def get_new_token_type(old_token: str) -> str: + """Turns pygments token types into a generic predefined Token""" + new_type: str = GENERIC_TOKENS[0] + for index, token in enumerate(default_tokens): + if old_token.startswith(token): + new_type = GENERIC_TOKENS[index] + break + return new_type diff --git a/salve/server_functions/highlight/tokens.py b/salve/server_functions/highlight/tokens.py deleted file mode 100644 index 3e72ac4..0000000 --- a/salve/server_functions/highlight/tokens.py +++ /dev/null @@ -1,212 +0,0 @@ -from functools import cache - -Token = tuple[tuple[int, int], int, str] - -generic_tokens: list[str] = [ - "Whitespace", - "Text", - "Error", - "Keyword", - "Name", - "String", - "Number", - "Literal", - "Operator", - "Punctuation", - "Comment", - "Generic", - "Link", # Website link (Not given by pygments) - "Hidden_Char", # Hidden chars (no width space kind of stuff) - "Definition", # Definitions -] - -default_tokens: list[str] = [ - "Token.Text.Whitespace", - "Token.Text", - "Token.Error", - "Token.Keyword", - "Token.Name", - "Token.Literal.String", - "Token.Literal.Number", - "Token.Literal", - "Token.Operator", - "Token.Punctuation", - "Token.Comment", - "Token.Generic", -] - - -@cache -def get_new_token_type(old_token: str) -> str: - """Turns pygments token types into a generic predefined Token""" - new_type: str = generic_tokens[0] - for index, token in enumerate(default_tokens): - if old_token.startswith(token): - new_type = generic_tokens[index] - break - return new_type - - -def only_tokens_in_text_range( - tokens: list[Token], text_range: tuple[int, int] -) -> list[Token]: - # We create a new list becase lists are pass by reference - output_tokens: list[Token] = [] - - for token in tokens: - token_lineno: int = token[0][0] - minimum_line: int = text_range[0] - maximum_line: int = text_range[1] - - if token_lineno < minimum_line or token_lineno > maximum_line: - continue - - output_tokens.append(token) - - output_tokens = merge_tokens(output_tokens) - return output_tokens - - -def merge_tokens(tokens: list[Token]) -> list[Token]: - output_tokens: list[Token] = [] - depth: int = 0 - for token in tokens: - # Deal with basic edge case - if depth == 0: - output_tokens.append(token) - depth += 1 - continue - - previous_token = output_tokens[-1] - - # Get our boolean checks - same_token_type: bool = previous_token[2] == token[2] - same_line: bool = previous_token[0][0] == token[0][0] - neighboring_tokens: bool = ( - previous_token[0][1] + previous_token[1] == token[0][1] - ) - - # Determine if tokens should be merged - if not (same_token_type and same_line and neighboring_tokens): - output_tokens.append(token) - depth += 1 - continue - - # Replace previous token with new token (we don't increase depth because we are substituting, not adding) - new_token: Token = ( - (token[0][0], previous_token[0][1]), - previous_token[1] + token[1], - token[2], - ) - output_tokens[-1] = new_token - return output_tokens - - -def overwrite_tokens(old_tokens: list[Token], new_tokens: list[Token]): - if not new_tokens: - return old_tokens - - output_tokens: list[Token] = [] - dont_add_tokens: list[Token] = [] - for new_token in new_tokens: - for old_token in old_tokens: - same_token: bool = old_token == new_token - if same_token: - continue - - same_line: bool = old_token[0][0] == new_token[0][0] - can_add_token: bool = old_token not in dont_add_tokens - if not same_line: - if can_add_token: - output_tokens.append(old_token) - continue - - # Check if the ranges overlap and if so either (remove the old_token and add to don't add list) or, - # if part of the token is out of the new_token_range, remove the part in the new tokens range - - old_token_end: int = old_token[0][1] + old_token[1] - new_token_end: int = new_token[0][1] + new_token[1] - - partial_front_overlap: bool = ( - new_token[0][1] <= old_token_end - and not old_token_end > new_token_end - ) - partial_end_overlap: bool = new_token_end >= old_token[0][1] - fully_contained: bool = ( - old_token_end <= new_token_end - and old_token[0][1] >= new_token[0][1] - ) - - if not ( - partial_front_overlap or partial_end_overlap or fully_contained - ): - continue - - dont_add_tokens.append(old_token) - - while old_token in output_tokens: - output_tokens.remove(old_token) - - if fully_contained: - continue - - # If we are here if means its a partial overlap - if partial_front_overlap: - created_token: Token = ( - (new_token[0][0], old_token[0][1]), - new_token[0][1] - old_token[0][1], - old_token[2], - ) - while created_token in output_tokens: - output_tokens.remove(created_token) - output_tokens.append(created_token) - dont_add_tokens.append(created_token) - continue - - if old_token[0][1] < new_token[0][1]: - created_token_1: Token = ( - (new_token[0][0], old_token[0][1]), - new_token[0][1] - old_token[0][1], - old_token[2], - ) - created_token_2: Token = ( - (new_token[0][0], new_token_end), - old_token_end - new_token_end, - old_token[2], - ) - while created_token_1 in output_tokens: - output_tokens.remove(created_token_1) - output_tokens.append(created_token_1) - while created_token_2 in output_tokens: - output_tokens.remove(created_token_2) - output_tokens.append(created_token_2) - dont_add_tokens.append(created_token_1) - dont_add_tokens.append(created_token_2) - - created_token: Token = ( - (new_token[0][0], new_token_end), - old_token_end - new_token_end, - old_token[2], - ) - while created_token in output_tokens: - output_tokens.remove(created_token) - output_tokens.append(created_token) - dont_add_tokens.append(created_token) - - output_tokens.append(new_token) - - output_tokens = sorted(set(output_tokens)) - return output_tokens - - -def overwrite_and_merge_tokens( - old_tokens: list[Token], new_tokens: list[Token] -) -> list[Token]: - merged_old_tokens: list[Token] = merge_tokens(sorted(set(old_tokens))) - merged_new_tokens: list[Token] = merge_tokens(sorted(set(new_tokens))) - output_tokens: list[Token] = overwrite_tokens( - merged_old_tokens, merged_new_tokens - ) - - output_tokens = sorted(set(merge_tokens(output_tokens))) - return output_tokens diff --git a/salve/server_functions/highlight/links_and_hidden_chars.py b/salve/server_functions/links_and_hidden_chars.py similarity index 76% rename from salve/server_functions/highlight/links_and_hidden_chars.py rename to salve/server_functions/links_and_hidden_chars.py index 3d5e5e4..e1f609a 100644 --- a/salve/server_functions/highlight/links_and_hidden_chars.py +++ b/salve/server_functions/links_and_hidden_chars.py @@ -1,17 +1,18 @@ from re import Match, Pattern, compile -from .tokens import Token +from token_tools import Token -url_regex: Pattern = compile(r"(ftp|http|https):\/\/[a-zA-Z0-9_-]") +url_regex: Pattern = compile(r"(ftp|http|https)://[a-zA-Z0-9_-]") -def get_urls(lines: list[str], start_line: int = 1) -> list[Token]: - start_pos: tuple[int, int] = (start_line, 0) +def get_urls(whole_text: str, text_range: tuple[int, int]) -> list[Token]: + lines: list[str] = whole_text.splitlines() + start_pos: tuple[int, int] = (text_range[0], 0) url_toks: list[Token] = [] while True: - if start_pos[0] >= len(lines) + start_line: + if start_pos[0] > text_range[1]: break - line: str = lines[start_pos[0] - start_line][start_pos[1] :] + line: str = lines[start_pos[0] - text_range[0]][start_pos[1] :] match_start: Match[str] | None = url_regex.search(line) if match_start is None: start_pos = (start_pos[0] + 1, 0) @@ -39,7 +40,6 @@ def get_urls(lines: list[str], start_line: int = 1) -> list[Token]: hidden_chars: dict[str, str] = { - "\u0009": "CHARACTER TABULATION", "\u00a0": "NO-BREAK SPACE", "\u00ad": "SOFT HYPHEN", "\u034f": "COMBINING GRAPHEME JOINER", @@ -96,9 +96,12 @@ def get_urls(lines: list[str], start_line: int = 1) -> list[Token]: } -def find_hidden_chars(lines: list[str], start_line: int = 1) -> list[Token]: +def find_hidden_chars( + whole_text: str, text_range: tuple[int, int] +) -> list[Token]: + lines: list[str] = whole_text.splitlines() hidden_char_indexes: list[tuple[tuple[int, int], str]] = [ - ((line_index + start_line, char_index), char) + ((line_index + text_range[0], char_index), char) for line_index, line in enumerate(lines) for char_index, char in enumerate(line) if char in list(hidden_chars.keys()) @@ -107,3 +110,14 @@ def find_hidden_chars(lines: list[str], start_line: int = 1) -> list[Token]: (char[0], len(char[1]), "Hidden_Char") for char in hidden_char_indexes ] return tok_list + + +def get_special_tokens( + whole_text: str, text_range: tuple[int, int] +) -> list[Token]: + return_tokens: list[Token] = [] + return_tokens.extend(get_urls(whole_text, text_range)) + if [char for char in hidden_chars if char in whole_text]: + # If there are no hidden chars we don't want to needlessly compute this + return_tokens.extend(find_hidden_chars(whole_text, text_range)) + return return_tokens diff --git a/salve/server_functions/replacements.py b/salve/server_functions/replacements.py index 9d09055..be6fa81 100644 --- a/salve/server_functions/replacements.py +++ b/salve/server_functions/replacements.py @@ -8,8 +8,8 @@ def get_replacements( ) -> list[str]: """Returns a list of possible and plausible replacements for a given word""" # Get all words in file - starter_words = find_words(full_text) - starter_words += ( + starter_words: list[str] = find_words(full_text) + starter_words.extend( expected_keywords * 3 ) # We add a multiplier of three to boost the score of keywords while replaceable_word in starter_words: diff --git a/setup.py b/setup.py index 2c94e02..8f67330 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -# pip install -r requirements.txt --break-system-packages; pip uninstall salve -y --break-system-packages; pip install . --break-system-packages --no-build-isolation; python3 -m pytest . +# pip install -U -r requirements-dev.txt --break-system-packages; pip uninstall salve -y --break-system-packages; pip install . --break-system-packages --no-build-isolation; python3 -m pytest . from setuptools import setup with open("README.md", "r") as file: @@ -7,14 +7,16 @@ setup( name="salve", - version="0.7.2", + version="1.0.0", description="Salve is an IPC library that can be used by code editors to easily get autocompletions, replacements, editorconfig suggestions, definitions, and syntax highlighting.", author="Moosems", author_email="moosems.j@gmail.com", - url="https://github.com/Moosems/salve", + url="https://github.com/salve-org/salve", long_description=long_description, long_description_content_type="text/markdown", - install_requires=["pygments", "pyeditorconfig", "beartype"], + install_requires=open("requirements.txt", "r+") + .read() + .splitlines(keepends=False), python_requires=">=3.11", license="MIT license", classifiers=[ @@ -25,5 +27,9 @@ "License :: OSI Approved :: MIT License", "Typing :: Typed", ], - packages=["salve"], + packages=[ + "salve", + "salve.server_functions", + "salve.server_functions.highlight", + ], ) diff --git a/tests/test_ipc.py b/tests/test_ipc.py index a340325..2082b80 100644 --- a/tests/test_ipc.py +++ b/tests/test_ipc.py @@ -8,6 +8,7 @@ EDITORCONFIG, HIGHLIGHT, IPC, + LINKS_AND_CHARS, REPLACEMENTS, Response, ) @@ -48,7 +49,7 @@ def test_IPC(): (r":?.*=.*", "before"), ], ) - + context.request(LINKS_AND_CHARS, file="test", text_range=(1, 18)) sleep(1) # Check output @@ -118,11 +119,8 @@ def test_IPC(): ((17, 0), 3, "Name"), ((17, 3), 2, "Punctuation"), ((18, 0), 24, "Comment"), - ((18, 2), 22, "Link"), - ((5, 7), 1, "Hidden_Char"), ], } - # Deal with Windows weirdness if platform == "win32": expected_output = { @@ -162,12 +160,34 @@ def test_IPC(): ((17, 0), 3, "Name"), ((17, 3), 2, "Punctuation"), ((18, 0), 24, "Comment"), - ((18, 2), 22, "Link"), ], } assert highlight_output == expected_output + links_and_hidden_chars_result: Response | None = context.get_response( + LINKS_AND_CHARS + ) + if links_and_hidden_chars_result is None: + raise AssertionError("links_and_hidden_chars_result output is None") + links_and_hidden_chars_result["id"] = 0 + expected_output = { + "id": 0, + "type": "response", + "cancelled": False, + "command": LINKS_AND_CHARS, + "result": [((18, 2), 22, "Link"), ((5, 7), 1, "Hidden_Char")], + } + if platform == "win32": + expected_output = { + "id": 0, + "type": "response", + "cancelled": False, + "command": LINKS_AND_CHARS, + "result": [((18, 2), 22, "Link")], + } + assert links_and_hidden_chars_result == expected_output + context.update_file( "foo", open(Path("tests/testing_file2.py"), "r+").read() )