Skip to content

Commit

Permalink
Merge pull request #164 from Progress1/template
Browse files Browse the repository at this point in the history
Adding new reports (templates) + documentation
  • Loading branch information
milankowww authored Sep 27, 2023
2 parents a27a4da + 2dcad0b commit 83a6a90
Show file tree
Hide file tree
Showing 13 changed files with 4,865 additions and 202 deletions.
Binary file added doc/static/Reports.docx
Binary file not shown.
1,382 changes: 1,382 additions & 0 deletions src/core/migrations/versions/1c4eed243364_new_reports.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
"""JP: add cascade delete to report_item releated tables
Revision ID: 4f24c634cd22
Revises: aaf3d8b31972
Create Date: 2023-05-11 08:50:57.791722
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '4f24c634cd22'
down_revision = 'aaf3d8b31972'
branch_labels = None
depends_on = None


def upgrade():
delete_previous()
# report_item_attribute
op.create_foreign_key('report_item_attribute_report_item_id_fkey', 'report_item_attribute', 'report_item', ['report_item_id'], ['id'], ondelete='CASCADE')
# report_item_remote_report_item
op.create_foreign_key('report_item_remote_report_item_report_item_id_fkey', 'report_item_remote_report_item', 'report_item', ['report_item_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key('report_item_remote_report_item_remote_report_item_id_fkey', 'report_item_remote_report_item', 'report_item', ['remote_report_item_id'], ['id'], ondelete='CASCADE')
# asset_vulnerability
op.create_foreign_key('asset_vulnerability_report_item_id_fkey', 'asset_vulnerability', 'report_item', ['report_item_id'], ['id'], ondelete='CASCADE')
# product_report_item
op.create_foreign_key('product_report_item_report_item_id_fkey', 'product_report_item', 'report_item', ['report_item_id'], ['id'], ondelete='CASCADE')
# report_item_cpe
op.create_foreign_key('report_item_cpe_report_item_id_fkey', 'report_item_cpe', 'report_item', ['report_item_id'], ['id'], ondelete='CASCADE')
# report_item_news_item_aggregate
op.create_foreign_key('report_item_news_item_aggregate_report_item_id_fkey', 'report_item_news_item_aggregate', 'report_item', ['report_item_id'], ['id'], ondelete='CASCADE')

def downgrade():
delete_previous()
# report_item_attribute
op.create_foreign_key('report_item_attribute_report_item_id_fkey', 'report_item_attribute', 'report_item', ['report_item_id'], ['id'])
# report_item_remote_report_item
op.create_foreign_key('report_item_remote_report_item_report_item_id_fkey', 'report_item_remote_report_item', 'report_item', ['report_item_id'], ['id'])
op.create_foreign_key('report_item_remote_report_item_remote_report_item_id_fkey', 'report_item_remote_report_item', 'report_item', ['remote_report_item_id'], ['id'])
# asset_vulnerability
op.create_foreign_key('asset_vulnerability_report_item_id_fkey', 'asset_vulnerability', 'report_item', ['report_item_id'], ['id'])
# product_report_item
op.create_foreign_key('product_report_item_report_item_id_fkey', 'product_report_item', 'report_item', ['report_item_id'], ['id'])
# report_item_cpe
op.create_foreign_key('report_item_cpe_report_item_id_fkey', 'report_item_cpe', 'report_item', ['report_item_id'], ['id'])
# report_item_news_item_aggregate
op.create_foreign_key('report_item_news_item_aggregate_report_item_id_fkey', 'report_item_news_item_aggregate', 'report_item', ['report_item_id'], ['id'])

def delete_previous():
print("deleting previous objects...", flush=True)
# report_item_attribute
op.drop_constraint('report_item_attribute_report_item_id_fkey', 'report_item_attribute', type_='foreignkey')
# report_item_remote_report_item
op.drop_constraint('report_item_remote_report_item_report_item_id_fkey', 'report_item_remote_report_item', type_='foreignkey')
op.drop_constraint('report_item_remote_report_item_remote_report_item_id_fkey', 'report_item_remote_report_item', type_='foreignkey')
# asset_vulnerability
op.drop_constraint('asset_vulnerability_report_item_id_fkey', 'asset_vulnerability', type_='foreignkey')
# product_report_item
op.drop_constraint('product_report_item_report_item_id_fkey', 'product_report_item', type_='foreignkey')
# report_item_cpe
op.drop_constraint('report_item_cpe_report_item_id_fkey', 'report_item_cpe', type_='foreignkey')
# report_item_news_item_aggregate
op.drop_constraint('report_item_news_item_aggregate_report_item_id_fkey', 'report_item_news_item_aggregate', type_='foreignkey')
12 changes: 4 additions & 8 deletions src/core/model/report_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class ReportItemAttribute(db.Model):
current = db.Column(db.Boolean, default=True)

attribute_group_item_id = db.Column(db.Integer, db.ForeignKey('attribute_group_item.id'))
attribute_group_item = db.relationship("AttributeGroupItem")
attribute_group_item = db.relationship("AttributeGroupItem", viewonly=True)
attribute_group_item_title = db.Column(db.String)

report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), nullable=True)
Expand All @@ -62,10 +62,6 @@ def find(cls, attribute_id):
report_item_attribute = cls.query.get(attribute_id)
return report_item_attribute

@staticmethod
def sort(report_item_attribute):
return report_item_attribute.last_updated


class NewReportItemSchema(ReportItemBaseSchema):
news_item_aggregates = fields.Nested(NewsItemAggregateIdSchema, many=True, missing=[])
Expand Down Expand Up @@ -94,11 +90,11 @@ class ReportItem(db.Model):
completed = db.Column(db.Boolean, default=False)

user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True)
user = db.relationship("User")
user = db.relationship("User", viewonly=True)
remote_user = db.Column(db.String())

report_item_type_id = db.Column(db.Integer, db.ForeignKey('report_item_type.id'), nullable=True)
report_item_type = db.relationship("ReportItemType")
report_item_type = db.relationship("ReportItemType", viewonly=True)

news_item_aggregates = db.relationship("NewsItemAggregate", secondary='report_item_news_item_aggregate')

Expand Down Expand Up @@ -141,7 +137,7 @@ def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item
def reconstruct(self):
self.subtitle = ""
self.tag = "mdi-file-table-outline"
self.attributes.sort(key=ReportItemAttribute.sort)
self.attributes.sort(key=lambda obj: (obj.attribute_group_item.attribute_group.index, obj.attribute_group_item.index, obj.id))

@classmethod
def count_all(cls, is_completed):
Expand Down
2 changes: 1 addition & 1 deletion src/core/model/report_item_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class AttributeGroupItem(db.Model):
max_occurrence = db.Column(db.Integer)

attribute_group_id = db.Column(db.Integer, db.ForeignKey('attribute_group.id'))
attribute_group = db.relationship("AttributeGroup")
attribute_group = db.relationship("AttributeGroup", viewonly=True)

attribute_id = db.Column(db.Integer, db.ForeignKey('attribute.id'))
attribute = db.relationship("Attribute")
Expand Down
72 changes: 59 additions & 13 deletions src/presenters/presenters/base_presenter.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class BasePresenter:
name = "Base Presenter"
description = "Base abstract type for all presenters"

parameters = []
parameters = list()

# helper class
@staticmethod
Expand Down Expand Up @@ -57,19 +57,53 @@ def __init__(self, report_item, report_types, attribute_map):

self.attrs = BasePresenter.AttributesObject()

# group the values ; identify attributes with the same names
attribute_group_items = dict()
attribute_group_items_by_name = dict()

# print (dir(report_item), flush=True)

for attribute in report_item.attributes:
if attribute.value is not None:
attr_type = attribute_map[attribute.attribute_group_item_id]
attr_key = attr_type.title.lower().replace(" ", "_")
if hasattr(self.attrs, attr_key):
if attribute_map[attribute.attribute_group_item_id].max_occurrence > 1:
attr = getattr(self.attrs, attr_key)
attr.append(attribute.value)
else:
if attribute_map[attribute.attribute_group_item_id].max_occurrence == 1:
setattr(self.attrs, attr_key, attribute.value)
else:
setattr(self.attrs, attr_key, [attribute.value])
attribute_group_item_id = attribute.attribute_group_item_id
if attribute_group_item_id not in attribute_group_items:
attribute_group_items[attribute_group_item_id] = list()
attribute_group_items[attribute_group_item_id].append(attribute) ######

attr_type = attribute_map[attribute_group_item_id]
attr_key = attr_type.title.lower().replace(" ", "_")
if attr_key not in attribute_group_items_by_name:
attribute_group_items_by_name[attr_key] = 1
else:
attribute_group_items_by_name[attr_key] += 1
# print(">>>", attr_key + ":", attribute.value, flush=True)

for attribute_group_item_id in attribute_group_items.keys():
attr_type = attribute_map[attribute_group_item_id]
attr_key = attr_type.title.lower().replace(" ", "_")

attribute_group_item = attribute_group_items[attribute_group_item_id]
# print("=>>", attribute_group_item, flush=True)

min_occurrence = attribute_map[attribute_group_item_id].min_occurrence
max_occurrence = attribute_map[attribute_group_item_id].max_occurrence

value_to_add = None
if max_occurrence == 1:
if len(attribute_group_item) > 0:
value_to_add = attribute_group_item[0].value
else:
value_to_add = list()
for attribute in attribute_group_item:
value_to_add.append(attribute.value)

how_many_with_the_same_name = attribute_group_items_by_name[attr_key]
# print("===", attr_key + ":", value_to_add, how_many_with_the_same_name, flush=True)
if how_many_with_the_same_name == 1:
setattr(self.attrs, attr_key, value_to_add)
else:
if not hasattr(self.attrs, attr_key):
setattr(self.attrs, attr_key, list())
getattr(self.attrs, attr_key).append(value_to_add)

# object holding all that we received from the CORE
class InputDataObject:
Expand Down Expand Up @@ -159,3 +193,15 @@ def generate_input_data(presenter_input):

def generate(self, presenter_input):
pass

# used in JINJA templating for formating "string date" to "date"
def _filter_datetime(date, fmtin=None, fmtout=None):
if date == "":
return ""
if not fmtin:
fmtin = "%Y.%m.%d"
date = datetime.datetime.strptime(date, fmtin)
native = date.replace(tzinfo=None)
if not fmtout:
fmtout = "%-d.%-m.%Y"
return native.strftime(fmtout)
2 changes: 2 additions & 0 deletions src/presenters/presenters/html_presenter.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

import os
from base64 import b64encode
import jinja2
Expand All @@ -24,6 +25,7 @@ def generate(self, presenter_input):
head, tail = os.path.split(presenter_input.parameter_values_map['HTML_TEMPLATE_PATH'])
input_data = BasePresenter.generate_input_data(presenter_input)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(head))
env.filters["strfdate"] = BasePresenter._filter_datetime
output_text = env.get_template(tail).render(data=input_data).encode()
base64_bytes = b64encode(output_text)
data = base64_bytes.decode('UTF-8')
Expand Down
2 changes: 1 addition & 1 deletion src/presenters/presenters/pdf_presenter.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def generate(self, presenter_input):
input_data = BasePresenter.generate_input_data(presenter_input)

env = jinja2.Environment(loader=jinja2.FileSystemLoader(head))

env.filters["strfdate"] = BasePresenter._filter_datetime
body = env.get_template(tail)
output_text = body.render(data=input_data)
with open(output_body_html, 'w') as output_file:
Expand Down
1 change: 1 addition & 0 deletions src/presenters/presenters/text_presenter.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def generate(self, presenter_input):
input_data = BasePresenter.generate_input_data(presenter_input)

env = jinja2.Environment(loader=jinja2.FileSystemLoader(head))
env.filters["strfdate"] = BasePresenter._filter_datetime

func_dict = {
"vars": vars,
Expand Down
Loading

0 comments on commit 83a6a90

Please sign in to comment.