Skip to content

Commit

Permalink
Merge branch 'st3'
Browse files Browse the repository at this point in the history
  • Loading branch information
deathaxe committed Jan 29, 2025
2 parents d6955fd + 8cece69 commit cfd468b
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 58 deletions.
20 changes: 12 additions & 8 deletions latextools/latextools_cache_listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,36 +47,40 @@ def update_cache(cache, doc, bib):
def worker():
with ActivityIndicator("Updating LaTeXTools cache") as activity:
try:
cache.invalidate("bib_files")
cache.invalidate()
if doc:
logger.debug("Updating analysis cache for %s", cache.tex_root)
cache.set("analysis", analysis.analyze_document(cache.tex_root))
if bib:
logger.debug("Updating bibliography cache for %s", cache.tex_root)
run_plugin_command("get_entries", *(find_bib_files(cache.tex_root) or []))
except Exception:
activity.finish("LaTeXTools cache update failed")
traceback.print_exc()
else:
activity.finish("LaTeXTools cache updated")

if cache and (doc or bib):
if cache:
threading.Thread(target=worker).start()


class LatextoolsCacheUpdateListener(sublime_plugin.EventListener):
def on_load(self, view):
if not view.match_selector(0, "text.tex.latex"):
if not view.is_primary():
return

update_doc = get_setting("cache.analysis.update_on_load", True, view)
update_bib = get_setting("cache.bibliography.update_on_load", True, view)
if not update_doc and not update_bib:
if not view.match_selector(0, "text.tex.latex"):
return

cache = get_cache(view)
if not cache:
return

update_doc = get_setting("cache.analysis.update_on_load", True, view)
update_bib = get_setting("cache.bibliography.update_on_load", True, view)
if not update_doc and not update_bib:
return

# because cache state is shared amongst all documents sharing a tex
# root, this ensure we only load the analysis ONCE in the on_load
# event
Expand All @@ -90,10 +94,10 @@ def on_close(self, view):
remove_cache(view)

def on_post_save(self, view):
if not view.match_selector(0, "text.tex.latex"):
if not view.is_primary():
return

if not view.is_primary():
if not view.match_selector(0, "text.tex.latex"):
return

update_doc = get_setting("cache.analysis.update_on_save", True, view)
Expand Down
25 changes: 20 additions & 5 deletions latextools/utils/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,17 +425,14 @@ def _analyze_tex_file(
logger.error("File appears cyclic: %s\n%s", file_name, process_file_stack)
return ana

if not import_path:
base_path, _ = os.path.split(tex_root)
else:
base_path = import_path
base_path = import_path if import_path else os.path.dirname(tex_root)

# store import path at the base path, such that it can be accessed
if import_path:
if file_name in ana._import_base_paths:
if ana._import_base_paths[file_name] != import_path:
logger.warning(
"'%s' is imported twice. " "Cannot handle this correctly in the analysis.",
"'%s' is imported twice. Cannot handle this correctly in the analysis.",
file_name,
)
else:
Expand Down Expand Up @@ -473,6 +470,7 @@ def _analyze_tex_file(
# check that we still need to analyze
if only_preamble and ana._state.get("preamble_finished", False):
return ana

elif g("command") in _import_commands and g("args") is not None and g("args2") is not None:
if g("command").startswith("sub"):
next_import_path = os.path.join(base_path, g("args").strip('"'))
Expand All @@ -495,6 +493,7 @@ def _analyze_tex_file(
# check that we still need to analyze
if only_preamble and ana._state.get("preamble_finished", False):
return ana

# subfile support:
# if we are not in the root file (i.e. not call from included files)
# and have the command \documentclass[main.tex]{subfiles}
Expand All @@ -519,6 +518,22 @@ def _analyze_tex_file(
except KeyError:
pass

# usepackage(local) support:
# analyze existing local packages or stylesheets
elif g("command") == "usepackage" and g("args") is not None:
fn = os.path.join(base_path, os.path.splitext(g("args").strip('"'))[0])
for ext in (".sty", ".tex"):
open_file = fn + ext
if os.path.isfile(open_file):
process_file_stack.append(file_name)
_analyze_tex_file(tex_root, open_file, process_file_stack, ana)
process_file_stack.pop()
break

# check that we still need to analyze
if only_preamble and ana._state.get("preamble_finished", False):
return ana

return ana


Expand Down
77 changes: 32 additions & 45 deletions latextools/utils/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,7 @@ def hash_digest(text):
Arguments:
text -- the text for which the digest should be created
"""
text_encoded = text.encode("utf8")
hash_result = hashlib.md5(text_encoded)
return hash_result.hexdigest()
return hashlib.md5(text.encode("utf-8")).hexdigest()


def cache_local(tex_root, key, func):
Expand Down Expand Up @@ -173,27 +171,28 @@ def _global_cache_path():

# marker class for invalidated result
class InvalidObject:
_HASH = hash("_LaTeXTools_InvalidObject")
__slots__ = []
__hash = hash("_LaTeXTools_InvalidObject")

def __eq__(self, other):
@classmethod
def __hash__(cls):
return cls.__hash

@classmethod
def __eq__(cls, other):
# in general, this is a bad pattern, since it will treat the
# literal string "_LaTeXTools_InvalidObject" as being an invalid
# object; nevertheless, we need an object identity that persists
# across reloads, and this seems to be the only way to guarantee
# that
return self._HASH == hash(other)

def __ne__(self, other):
return not self == other

def __hash__(self):
return self._HASH

try:
return cls.__hash == hash(other)
except TypeError:
return False

try:
_invalid_object
except NameError:
_invalid_object = InvalidObject()
@classmethod
def __ne__(cls, other):
return not cls == other


class Cache:
Expand All @@ -216,14 +215,12 @@ def __init__(self):
self._disk_lock = threading.Lock()
if not hasattr(self, "_write_lock"):
self._write_lock = threading.Lock()
if not hasattr(self, "_save_lock"):
self._save_lock = threading.Lock()
if not hasattr(self, "_objects"):
self._objects = {}
if not hasattr(self, "_dirty"):
self._dirty = False
if not hasattr(self, "_save_queue"):
self._save_queue = []
self._save_queue = 0
if not hasattr(self, "_pool"):
self._pool = ThreadPool(2)

Expand All @@ -247,7 +244,7 @@ def get(self, key):
# note: will raise CacheMiss if can't be found
result = self.load(key)

if result == _invalid_object:
if result == InvalidObject:
raise CacheMiss("{0} is invalid".format(key))

# return a copy of any objects
Expand All @@ -269,7 +266,7 @@ def has(self, key):
if key is None:
raise ValueError("key cannot be None")

return key in self._objects and self._objects[key] != _invalid_object
return key in self._objects and self._objects[key] != InvalidObject

def set(self, key, obj):
"""
Expand All @@ -284,11 +281,6 @@ def set(self, key, obj):
if key is None:
raise ValueError("key cannot be None")

try:
pickle.dumps(obj, protocol=-1)
except pickle.PicklingError:
raise ValueError("obj must be picklable")

if isinstance(obj, list):
obj = tuple(obj)
elif isinstance(obj, dict):
Expand Down Expand Up @@ -336,7 +328,7 @@ def invalidate(self, key=None):

def _invalidate(key):
try:
self._objects[key] = _invalid_object
self._objects[key] = InvalidObject
except Exception:
logger.error("error occurred while invalidating %s", key)
traceback.print_exc()
Expand Down Expand Up @@ -412,12 +404,12 @@ def save(self, key=None):
with self._disk_lock:
# operate on a stable copy of the object
with self._write_lock:
_objs = pickle.loads(pickle.dumps(self._objects, protocol=-1))
_objs = self._objects.copy()
self._dirty = False

if key is None:
# remove all InvalidObjects
delete_keys = [k for k in _objs if _objs[k] == _invalid_object]
delete_keys = [k for k in _objs if _objs[k] == InvalidObject]

for k in delete_keys:
del _objs[k]
Expand All @@ -442,7 +434,7 @@ def save(self, key=None):
logger.error("error while deleting %s: %s", self.cache_path, e)

elif key in _objs:
if _objs[key] == _invalid_object:
if _objs[key] == InvalidObject:
file_path = os.path.join(self.cache_path, key)
try:
os.remove(file_path)
Expand Down Expand Up @@ -475,17 +467,16 @@ def _write(self, key, obj):
raise CacheMiss()

def _schedule_save(self):
with self._save_lock:
self._save_queue.append(0)
threading.Timer(0.5, self._debounce_save).start()

def _debounce_save(self):
with self._save_lock:
if len(self._save_queue) > 1:
self._save_queue.pop()
def _debounce():
self._save_queue -= 1
if self._save_queue > 0:
sublime.set_timeout(_debounce, 1000)
else:
self._save_queue = []
sublime.set_timeout(self.save_async, 0)
self._save_queue = 0
self.save_async()

self._save_queue += 1
sublime.set_timeout(_debounce, 1000)

# ensure cache is saved to disk when removed from memory
def __del__(self):
Expand Down Expand Up @@ -561,8 +552,6 @@ def get(self, key):

return super(ValidatingCache, self).get(key)

get.__doc__ = Cache.get.__doc__

def set(self, key, obj):
if key is None:
raise ValueError("key cannot be None")
Expand All @@ -571,8 +560,6 @@ def set(self, key, obj):

return super(ValidatingCache, self).set(key, obj)

set.__doc__ = Cache.set.__doc__


class InstanceTrackingCache(Cache):
"""
Expand Down

0 comments on commit cfd468b

Please sign in to comment.