Skip to content

Commit

Permalink
Run python2 compatible black
Browse files Browse the repository at this point in the history
  • Loading branch information
pgrunewald committed Mar 8, 2024
1 parent be7cbe2 commit 4ef6e99
Show file tree
Hide file tree
Showing 15 changed files with 288 additions and 147 deletions.
3 changes: 2 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ Changelog
1.13 (unreleased)
-----------------

- Nothing changed yet.
- Add and run a black version, that is compatible with Python 2.
[pgrunewald]


1.12 (2024-03-08)
Expand Down
7 changes: 6 additions & 1 deletion DEVELOP.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ Create a virtualenv in the package::

Install requirements with pip::

$ ./bin/pip install -r requirements.txt
$ ./bin/pip install -r requirements.txt -r requirements_black.txt

Run buildout::

Expand Down Expand Up @@ -38,3 +38,8 @@ run a specific tox env:

$ tox -e plone52-py38


Format Python code
------------------

$ ./bin/black .
4 changes: 4 additions & 0 deletions requirements_black.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# black 21.12b0 is the last version supporting Python2 syntax
black==21.12b0
# ensure compatibility with our black version
click==8.0.3
4 changes: 2 additions & 2 deletions src/collective/exportimport/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
os.path.expandvars(os.getenv("COLLECTIVE_EXPORTIMPORT_CENTRAL_DIRECTORY", ""))
)

SITE_ROOT = 'plone_site_root'
SITE_ROOT = "plone_site_root"

# Discussion Item has its own export / import views, don't show it in the exportable content type list
SKIPPED_CONTENTTYPE_IDS = ['Discussion Item']
SKIPPED_CONTENTTYPE_IDS = ["Discussion Item"]
52 changes: 37 additions & 15 deletions src/collective/exportimport/export_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def __call__(
download_to_server=False,
migration=True,
include_revisions=False,
write_errors=False
write_errors=False,
):
self.portal_type = portal_type or []
if isinstance(self.portal_type, str):
Expand Down Expand Up @@ -150,7 +150,9 @@ def __call__(
return self.template()

if not self.portal_type:
api.portal.show_message(_(u"Select at least one type to export"), self.request)
api.portal.show_message(
_(u"Select at least one type to export"), self.request
)
return self.template()

if self.include_blobs == 1:
Expand Down Expand Up @@ -250,7 +252,11 @@ def __call__(
json.dump(errors, f, indent=4)
f.write("]")
msg = _(u"Exported {} items ({}) as {} to {} with {} errors").format(
number, ", ".join(self.portal_type), filename, filepath, len(self.errors)
number,
", ".join(self.portal_type),
filename,
filepath,
len(self.errors),
)
logger.info(msg)
api.portal.show_message(msg, self.request)
Expand All @@ -274,12 +280,14 @@ def __call__(
f.write(",")
json.dump(datum, f, sort_keys=True, indent=4)
if number:
if self.errors and self.write_errors:
if self.errors and self.write_errors:
f.write(",")
errors = {"unexported_paths": self.errors}
json.dump(errors, f, indent=4)
f.write("]")
msg = _(u"Exported {} {} with {} errors").format(number, self.portal_type, len(self.errors))
msg = _(u"Exported {} {} with {} errors").format(
number, self.portal_type, len(self.errors)
)
logger.info(msg)
api.portal.show_message(msg, self.request)
response = self.request.response
Expand Down Expand Up @@ -350,13 +358,13 @@ def export_content(self):
obj = brain.getObject()
except Exception:
msg = u"Error getting brain {}".format(brain.getPath())
self.errors.append({'path':None, 'message': msg})
self.errors.append({"path": None, "message": msg})
logger.exception(msg, exc_info=True)
continue
if obj is None:
msg = u"brain.getObject() is None {}".format(brain.getPath())
logger.error(msg)
self.errors.append({'path':None, 'message': msg})
self.errors.append({"path": None, "message": msg})
continue
obj = self.global_obj_hook(obj)
if not obj:
Expand All @@ -375,7 +383,7 @@ def export_content(self):
yield item
except Exception:
msg = u"Error exporting {}".format(obj.absolute_url())
self.errors.append({'path':obj.absolute_url(), 'message':msg})
self.errors.append({"path": obj.absolute_url(), "message": msg})
logger.exception(msg, exc_info=True)

def portal_types(self):
Expand All @@ -395,7 +403,9 @@ def portal_types(self):
"number": number,
"value": fti.id,
"title": translate(
safe_unicode(fti.title), domain="plone", context=self.request
safe_unicode(fti.title),
domain="plone",
context=self.request,
),
}
)
Expand Down Expand Up @@ -580,15 +590,27 @@ def export_revisions(self, item, obj):
item_version = self.update_data_for_migration(item_version, obj)
item["exportimport.versions"][version_id] = item_version
# inject metadata (missing for Archetypes content):
comment = history_metadata.retrieve(version_id)["metadata"]["sys_metadata"]["comment"]
if comment and comment != item["exportimport.versions"][version_id].get("changeNote"):
comment = history_metadata.retrieve(version_id)["metadata"]["sys_metadata"][
"comment"
]
if comment and comment != item["exportimport.versions"][version_id].get(
"changeNote"
):
item["exportimport.versions"][version_id]["changeNote"] = comment
principal = history_metadata.retrieve(version_id)["metadata"]["sys_metadata"]["principal"]
if principal and principal != item["exportimport.versions"][version_id].get("changeActor"):
principal = history_metadata.retrieve(version_id)["metadata"][
"sys_metadata"
]["principal"]
if principal and principal != item["exportimport.versions"][version_id].get(
"changeActor"
):
item["exportimport.versions"][version_id]["changeActor"] = principal
# current changenote
item["changeNote"] = history_metadata.retrieve(-1)["metadata"]["sys_metadata"]["comment"]
item["changeActor"] = history_metadata.retrieve(-1)["metadata"]["sys_metadata"]["principal"]
item["changeNote"] = history_metadata.retrieve(-1)["metadata"]["sys_metadata"][
"comment"
]
item["changeActor"] = history_metadata.retrieve(-1)["metadata"]["sys_metadata"][
"principal"
]
return item


Expand Down
7 changes: 5 additions & 2 deletions src/collective/exportimport/export_other.py
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,9 @@ def __call__(self, download_to_server=False):
def all_portlets(self):
self.results = []
portal = api.portal.get()
portal.ZopeFindAndApply(self.context, search_sub=True, apply_func=self.get_portlets)
portal.ZopeFindAndApply(
self.context, search_sub=True, apply_func=self.get_portlets
)
self.get_root_portlets()
return self.results

Expand All @@ -669,7 +671,7 @@ def _get_portlets(self, obj, uid):
obj_results["uuid"] = uid
self.results.append(obj_results)
return

def get_root_portlets(self):
site = api.portal.get()
self._get_portlets(site, PORTAL_PLACEHOLDER)
Expand All @@ -681,6 +683,7 @@ def local_portlets_hook(self, portlets):
def portlets_blacklist_hook(self, blacklist):
return blacklist


def export_local_portlets(obj):
"""Serialize portlets for one content object
Code mostly taken from https://github.com/plone/plone.restapi/pull/669
Expand Down
5 changes: 3 additions & 2 deletions src/collective/exportimport/export_topic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@


class ExportTopic(ExportContent):
""" Export ATTopic """
"""Export ATTopic"""

def build_query(self):
""" Build the query based on the topic criterias """
"""Build the query based on the topic criterias"""
return self.context.buildQuery()
41 changes: 33 additions & 8 deletions src/collective/exportimport/fix_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def find_object(base, path):
obj = api.portal.get()
portal_path = obj.absolute_url_path() + "/"
if path.startswith(portal_path):
path = path[len(portal_path):]
path = path[len(portal_path) :]
else:
obj = aq_parent(base) # relative urls start at the parent...

Expand Down Expand Up @@ -324,14 +324,18 @@ def table_class_fixer(text, obj=None):
query["path"] = "/".join(context.getPhysicalPath())
brains = catalog(**query)
total = len(brains)
logger.info("There are %s content items in total, starting migration...", len(brains))
logger.info(
"There are %s content items in total, starting migration...", len(brains)
)
fixed_fields = 0
fixed_items = 0
for index, brain in enumerate(brains, start=1):
try:
obj = brain.getObject()
except Exception:
logger.warning("Could not get object for: %s", brain.getPath(), exc_info=True)
logger.warning(
"Could not get object for: %s", brain.getPath(), exc_info=True
)
continue
if obj is None:
logger.error(u"brain.getObject() is None %s", brain.getPath())
Expand All @@ -343,11 +347,19 @@ def table_class_fixer(text, obj=None):
if text and IRichTextValue.providedBy(text) and text.raw:
clean_text = text.raw
for fixer in fixers:
logger.debug("Fixing html for %s with %s", obj.absolute_url(), fixer.__name__)
logger.debug(
"Fixing html for %s with %s",
obj.absolute_url(),
fixer.__name__,
)
try:
clean_text = fixer(clean_text, obj)
except Exception:
logger.info(u"Error while fixing html of %s for %s", fieldname, obj.absolute_url())
logger.info(
u"Error while fixing html of %s for %s",
fieldname,
obj.absolute_url(),
)
raise

if clean_text and clean_text != text.raw:
Expand All @@ -359,7 +371,11 @@ def table_class_fixer(text, obj=None):
)
setattr(obj, fieldname, textvalue)
changed = True
logger.debug(u"Fixed html for field %s of %s", fieldname, obj.absolute_url())
logger.debug(
u"Fixed html for field %s of %s",
fieldname,
obj.absolute_url(),
)
fixed_fields += 1
if changed:
fixed_items += 1
Expand All @@ -371,11 +387,20 @@ def table_class_fixer(text, obj=None):
# Commit every 1000 changed items.
logger.info(
u"Fix html for %s (%s) of %s items (changed %s fields in %s items)",
index, round(index / total * 100, 2), total, fixed_fields, fixed_items)
index,
round(index / total * 100, 2),
total,
fixed_fields,
fixed_items,
)
if commit:
transaction.commit()

logger.info(u"Finished fixing html in content fields (changed %s fields in %s items)", fixed_fields, fixed_items)
logger.info(
u"Finished fixing html in content fields (changed %s fields in %s items)",
fixed_fields,
fixed_items,
)
if commit:
# commit remaining items
transaction.commit()
Expand Down
Loading

0 comments on commit 4ef6e99

Please sign in to comment.