From 61138143dc5e2c499e708a4b24d5c337ab2dfd4c Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 9 Jan 2024 13:03:36 +0100 Subject: [PATCH 001/146] addition of Czech language vatiables and files to code --- src/gui/src/i18n/messages.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/gui/src/i18n/messages.js b/src/gui/src/i18n/messages.js index 525a7e200..3048e932c 100644 --- a/src/gui/src/i18n/messages.js +++ b/src/gui/src/i18n/messages.js @@ -1,9 +1,11 @@ +import messages_cs from "@/i18n/cs/messages"; import messages_en from "@/i18n/en/messages"; import messages_sk from "@/i18n/sk/messages"; const messages = { + cs: messages_cs, en: messages_en, sk: messages_sk }; -export default messages \ No newline at end of file +export default messages From 2bf81e99801e4e8b19b61d2cfae0c7e872dc4550 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 9 Jan 2024 13:05:49 +0100 Subject: [PATCH 002/146] fix inconsistency for English --- src/gui/src/i18n/en/messages.js | 48 ++++++++++++++++----------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/src/gui/src/i18n/en/messages.js b/src/gui/src/i18n/en/messages.js index e4ebf10e0..dc1b77334 100644 --- a/src/gui/src/i18n/en/messages.js +++ b/src/gui/src/i18n/en/messages.js @@ -143,7 +143,7 @@ const messages_en = { country: "Country", successful: "New organization was successfully added", successful_edit: "Organization was successfully updated", - removed: "Organization was successfully removed", + removed: "Organization was successfully deleted", removed_error: "Organization is in use and could not be deleted", total_count: "Organizations count: " }, @@ -161,7 +161,7 @@ const messages_en = { name: "Name", successful: "New user was successfully added", successful_edit: "User was successfully updated", - removed: "User was successfully removed", + removed: "User was successfully deleted", removed_error: "User is in use and could not be deleted", organizations: "Organizations", roles: "Roles", @@ -185,7 +185,7 @@ const messages_en = { key: "Key", successful: "New collectors node was successfully added", successful_edit: "Collectors node was successfully updated", - removed: "Collectors node was successfully removed", + removed: "Collectors node was successfully deleted", removed_error: "Collectors node is in use and could not be deleted", total_count: "Collectors nodes count: " }, @@ -204,7 +204,7 @@ const messages_en = { description: "Description", successful: "New OSINT source was successfully added", successful_edit: "OSINT source was successfully updated", - removed: "OSINT source was successfully removed", + removed: "OSINT source was successfully deleted", removed_error: "OSINT source is in use and could not be deleted", word_lists: "Word Lists", type: "Type", @@ -242,7 +242,7 @@ const messages_en = { description: "Description", successful: "New OSINT source group was successfully added", successful_edit: "OSINT source group was successfully updated", - removed: "OSINT source group was successfully removed", + removed: "OSINT source group was successfully deleted", removed_error: "OSINT source group is in use and could not be deleted", title: "Title", osint_sources: "OSINT Sources", @@ -264,7 +264,7 @@ const messages_en = { description: "Description", successful: "New role was successfully added", successful_edit: "Role was successfully updated", - removed: "Role was successfully removed", + removed: "Role was successfully deleted", removed_error: "Role is in use and could not be deleted", title: "Title", permissions: "Permissions", @@ -291,7 +291,7 @@ const messages_en = { modify: "Modify", successful: "New ACL was successfully added", successful_edit: "ACL was successfully updated", - removed: "ACL was successfully removed", + removed: "ACL was successfully deleted", removed_error: "ACL is in use and could not be deleted", roles: "Roles", users: "Users", @@ -314,7 +314,7 @@ const messages_en = { use_for_notifications: "Use for all global notifications", successful: "New publisher preset was successfully added", successful_edit: "Publisher preset was successfully updated", - removed: "Publisher preset was successfully removed", + removed: "Publisher preset was successfully deleted", removed_error: "Publisher preset is in use and could not be deleted", total_count: "Publisher presets count: " }, @@ -334,7 +334,7 @@ const messages_en = { description: "Description", successful: "New product type was successfully added", successful_edit: "Product type was successfully updated", - removed: "Product type was successfully removed", + removed: "Product type was successfully deleted", removed_error: "Product type is in use and could not be deleted", total_count: "Product types count: ", help: "Template parameters description", @@ -374,7 +374,7 @@ const messages_en = { key: "Key", successful: "New presenters node was successfully added", successful_edit: "Presenters node was successfully updated", - removed: "Presenters node was successfully removed", + removed: "Presenters node was successfully deleted", removed_error: "Presenters node is in use and could not be deleted", total_count: "Presenters nodes count: " }, @@ -394,7 +394,7 @@ const messages_en = { key: "Key", successful: "New publishers node was successfully added", successful_edit: "Publishers node was successfully updated", - removed: "Publishers node was successfully removed", + removed: "Publishers node was successfully deleted", removed_error: "Publishers node is in use and could not be deleted", total_count: "Publishers nodes count: " }, @@ -414,7 +414,7 @@ const messages_en = { key: "Key", successful: "New bots node was successfully added", successful_edit: "Bots node was successfully updated", - removed: "Bots node was successfully removed", + removed: "Bots node was successfully deleted", removed_error: "Bots node is in use and could not be deleted", total_count: "Bots nodes count: " }, @@ -434,7 +434,7 @@ const messages_en = { description: "Description", successful: "New bot preset was successfully added", successful_edit: "Bot preset was successfully updated", - removed: "Bot preset was successfully removed", + removed: "Bot preset was successfully deleted", removed_error: "Bot preset is in use and could not be deleted", total_count: "Bot presets count: " }, @@ -460,7 +460,7 @@ const messages_en = { default_value: "Default Value", successful: "New attribute was successfully added", successful_edit: "Attribute was successfully updated", - removed: "Attribute was successfully removed", + removed: "Attribute was successfully deleted", removed_error: "Attribute is in use and could not be deleted", value: "Value", value_text: "Value text", @@ -679,7 +679,7 @@ const messages_en = { successful: "New report item type was successfully added", successful_edit: "Report item type was successfully updated", removed_error: "Report item type is in use and could not be deleted", - removed: "Report item type was successfully removed", + removed: "Report item type was successfully deleted", total_count: "Report types count: " }, @@ -696,7 +696,7 @@ const messages_en = { report_type: "Report Item Type", successful: "New report item was successfully added", successful_edit: "Report item was successfully saved", - removed: "Report item was successfully removed", + removed: "Report item was successfully deleted", removed_error: "Report item is in use and could not be deleted", select: "Select Report Items", select_remote: "Select Report Items from Remote Nodes", @@ -729,7 +729,7 @@ const messages_en = { report_type: "Product Type", successful: "New product was successfully added", successful_edit: "Product was successfully saved", - removed: "Product was successfully removed", + removed: "Product was successfully deleted", removed_error: "Product is in use and could not be deleted", preview: "Show product preview", publish: "Publish product", @@ -900,7 +900,7 @@ const messages_en = { use_for_stop_words: "Use as stop word list", successful: "New word list was successfully added", successful_edit: "Word list was successfully updated", - remove: "Word list was successfully removed", + remove: "Word list was successfully deleted", removed_error: "Word list is in use and could not be deleted", value: "Value", new_word: "New Word", @@ -929,7 +929,7 @@ const messages_en = { allowed_users: "Allowed users (If none is selected than all users are allowed)", successful: "New asset group was successfully added", successful_edit: "Asset group was successfully updated", - removed: "Asset group was successfully removed", + removed: "Asset group was successfully deleted", removed_error: "Asset group is in use and could not be deleted", total_count: "Asset groups count: " }, @@ -951,7 +951,7 @@ const messages_en = { recipients: "Recipients", successful: "New notification template was successfully added", successful_edit: "Notification template was successfully updated", - removed: "Notification template was successfully removed", + removed: "Notification template was successfully deleted", removed_error: "Notification template is in use and could not be deleted", total_count: "Notification templates count: " }, @@ -974,7 +974,7 @@ const messages_en = { value: "Value", successful: "New asset was successfully added", successful_edit: "Asset was successfully updated", - removed: "Asset was successfully removed", + removed: "Asset was successfully deleted", removed_error: "Asset is in use and could not be deleted", total_count: "Assets count: ", vulnerabilities: "Vulnerabilities", @@ -1002,7 +1002,7 @@ const messages_en = { enabled: "Enabled", successful: "New remote access was successfully added", successful_edit: "Remote access was successfully updated", - removed: "Remote access was successfully removed", + removed: "Remote access was successfully deleted", removed_error: "Remote access is in use and could not be deleted", osint_sources: "OSINT Sources to share", report_item_types: "Report Item Types to share", @@ -1031,7 +1031,7 @@ const messages_en = { osint_source_group: "Synchronize to OSINT source group", successful: "New remote node was successfully added", successful_edit: "Remote node was successfully updated", - removed: "Remote node was successfully removed", + removed: "Remote node was successfully deleted", removed_error: "Remote node is in use and could not be deleted", total_count: "Remote nodes count: " }, @@ -1054,4 +1054,4 @@ const messages_en = { } }; -export default messages_en \ No newline at end of file +export default messages_en From 92ee614f6c556733a927bf2a45a058258ac166b1 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 9 Jan 2024 13:15:09 +0100 Subject: [PATCH 003/146] use newer version of vue-i18n --- src/gui/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gui/package.json b/src/gui/package.json index 272750546..0c60e41d0 100644 --- a/src/gui/package.json +++ b/src/gui/package.json @@ -44,7 +44,7 @@ "vue-cookies": "^1.7.4", "vue-csv-import": "^3.3.10", "vue-cvss": "^1.1.2", - "vue-i18n": "^8.26.7", + "vue-i18n": "^8.28.2", "vue-keypress": "^1.4.0", "vue-quick-chat": "^1.2.8", "vue-quill-editor": "^3.0.6", From b48e35f824d05095c1a85ffc7f9a59c8dc0a354c Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 9 Jan 2024 19:45:19 +0100 Subject: [PATCH 004/146] improve language settings and selection --- docker/.env | 3 +++ docker/docker-compose.yml | 2 +- src/gui/src/main.js | 29 ++++++++++++++++++++--------- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/docker/.env b/docker/.env index f71c53f63..000a566f0 100644 --- a/docker/.env +++ b/docker/.env @@ -12,6 +12,9 @@ TARANIS_NG_TAG=v23.09.1 # Timezone for all containers TZ=Europe/Bratislava +# Force language +VUE_APP_TARANIS_NG_LOCALE="" + # Default passwords. CHANGE THESE FOR PRODUCTION! POSTGRES_PASSWORD=supersecret POSTGRES_KEYCLOAK_PASSWORD=supersecret diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 2f4be5755..d9fa83369 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -222,7 +222,7 @@ services: VUE_APP_TARANIS_NG_URL: "${TARANIS_NG_HTTPS_URI}" VUE_APP_TARANIS_NG_CORE_API: "${TARANIS_NG_HTTPS_URI}/api/v1" VUE_APP_TARANIS_NG_CORE_SSE: "${TARANIS_NG_HTTPS_URI}/sse" - VUE_APP_TARANIS_NG_LOCALE: en + VUE_APP_TARANIS_NG_LOCALE: "${VUE_APP_TARANIS_NG_LOCALE}" TZ: "${TZ}" labels: traefik.enable: "true" diff --git a/src/gui/src/main.js b/src/gui/src/main.js index 855933616..b86c4a8f6 100644 --- a/src/gui/src/main.js +++ b/src/gui/src/main.js @@ -4,14 +4,14 @@ import Vue from 'vue'; import Vuetify from 'vuetify/lib'; //import colors from 'vuetify/lib/util/colors' import App from './App.vue' -import {router} from './router' -import {store} from '@/store/store' +import { router } from './router' +import { store } from '@/store/store' import ApiService from "@/services/api_service"; import VueI18n from 'vue-i18n' import messages from "@/i18n/messages"; import VeeValidate from 'vee-validate'; import Themes from './assets/themes'; -import {Scroll} from 'vuetify/lib/directives'; +import { Scroll } from 'vuetify/lib/directives'; import CKEditor from '@ckeditor/ckeditor5-vue' import VueCookies from 'vue-cookies' import VueSSE from 'vue-sse'; @@ -24,7 +24,7 @@ import layout_config from "./assets/layout_config"; const CSL = { install(Vue) { Vue.prototype.UI = layout_config - this.UI = () => {} + this.UI = () => { } } } Vue.use(CSL); @@ -44,11 +44,11 @@ Vue.use(Vuetify, { }); Vue.use(Vuetify, { - iconfont: 'md' + iconfont: 'md' }); Vue.use(Vuetify, { - iconfont: 'mdi' + iconfont: 'mdi' }); Vue.use(CKEditor); @@ -62,8 +62,19 @@ const vuetify = new Vuetify({ Vue.use(VueI18n); +let bash_locale = "$VUE_APP_TARANIS_NG_LOCALE"; +let locale; + +if (bash_locale) { + locale = bash_locale; +} else if (typeof (process.env.VUE_APP_TARANIS_NG_LOCALE) !== "undefined") { + locale = process.env.VUE_APP_TARANIS_NG_LOCALE; +} else { + locale = navigator.language.split('-')[0]; +} + const i18n = new VueI18n({ - locale: ((typeof(process.env.VUE_APP_TARANIS_NG_LOCALE) == "undefined") ? "$VUE_APP_TARANIS_NG_LOCALE" : process.env.VUE_APP_TARANIS_NG_LOCALE), + locale: locale, fallbackLocale: 'en', messages }); @@ -73,7 +84,7 @@ Vue.use(VeeValidate, { i18n, }); -ApiService.init(((typeof(process.env.VUE_APP_TARANIS_NG_CORE_API) == "undefined") ? "$VUE_APP_TARANIS_NG_CORE_API" : process.env.VUE_APP_TARANIS_NG_CORE_API)); +ApiService.init(((typeof (process.env.VUE_APP_TARANIS_NG_CORE_API) == "undefined") ? "$VUE_APP_TARANIS_NG_CORE_API" : process.env.VUE_APP_TARANIS_NG_CORE_API)); if (localStorage.ACCESS_TOKEN) { store.dispatch('setToken', (localStorage.ACCESS_TOKEN)).then() @@ -92,4 +103,4 @@ export const vm = new Vue({ this.$store.commit('setVerticalView', val); } -}).$mount('#app'); \ No newline at end of file +}).$mount('#app'); From c1d42af6656eac6de1a6921175feb2ce273b8ac7 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 5 Feb 2024 14:29:10 +0100 Subject: [PATCH 005/146] English inconsistencies --- src/gui/src/i18n/en/messages.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/gui/src/i18n/en/messages.js b/src/gui/src/i18n/en/messages.js index dc1b77334..4bfdb81ea 100644 --- a/src/gui/src/i18n/en/messages.js +++ b/src/gui/src/i18n/en/messages.js @@ -321,7 +321,7 @@ const messages_en = { product_type: { add_new: "Add new product type", - edit: "Edit new product type", + edit: "Edit product type", node: "Presenters Node", presenter: "Presenter", add: "Add", @@ -361,7 +361,7 @@ const messages_en = { presenters_node: { add_new: "Add new presenters node", - edit: "Edit new presenters node", + edit: "Edit presenters node", add: "Add", save: "Save", add_btn: "Add New", @@ -381,7 +381,7 @@ const messages_en = { publishers_node: { add_new: "Add new publishers node", - edit: "Add new publishers node", + edit: "Edit publishers node", add: "Add", save: "Save", add_btn: "Add New", @@ -401,7 +401,7 @@ const messages_en = { bots_node: { add_new: "Add new bots node", - edit: "Add new bots node", + edit: "Edit bots node", add: "Add", save: "Save", add_btn: "Add New", From 003c9a453614736901772a9f3efb327c6a46e688 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 5 Feb 2024 16:58:55 +0100 Subject: [PATCH 006/146] Czech translation --- src/gui/src/i18n/cs/messages.js | 1057 +++++++++++++++++++++++++++++++ 1 file changed, 1057 insertions(+) create mode 100644 src/gui/src/i18n/cs/messages.js diff --git a/src/gui/src/i18n/cs/messages.js b/src/gui/src/i18n/cs/messages.js new file mode 100644 index 000000000..a78ce7ecf --- /dev/null +++ b/src/gui/src/i18n/cs/messages.js @@ -0,0 +1,1057 @@ +const messages_cs = { + + validations: { + messages: { + _default: "Vyplňte prosím povinné pole" + }, + + custom: { + username: { + required: "Vyplňte prosím své uživatelské jméno" + }, + password: { + required: "Heslo je povinné" + }, + url: { + required: "URL je povinné" + }, + key: { + required: "API klíč je povinný" + }, + name: { + required: "Jméno je povinné" + }, + parameter: { + required: "Povinné pole" + }, + password_check: { + required: "Heslo je povinné", + confirmed: "Hesla se neshodují" + }, + collector_node: { + required: "Vyberte instanci kolektoru" + }, + file: { + required: "Soubor je povinný" + } + }, + }, + + login: { + title: "Prosím přihlaste se", + username: "Uživatelské jméno", + password: "Heslo", + submit: "Přihlásit se", + error: "Uživatelské jméno nebo heslo je nesprávné" + }, + + user_menu: { + settings: "Nastavení uživatele", + logout: "Odhlásit", + dark_theme: "Tmavý motiv", + }, + + main_menu: { + enter: "Vložit", + assess: "Vyhodnotit", + analyze: "Analyzovat", + publish: "Zveřejnit", + config: "Nastavení", + dashboard: "Dashboard", + my_assets: "Aktiva" + }, + + nav_menu: { + enter: "Vytvořit novinku", + newsitems: "Novinky", + products: "Reporty", + publications: "Publikace", + recent: "Recent", + popular: "Popular", + favourites: "Oblíbené", + configuration: "Nastavení", + collectors_nodes: "Instance kolektorů", + presenters_nodes: "Instance prezenterů", + publishers_nodes: "Instance vydavatelů", + bots_nodes: "Instance robotů", + osint_sources: "Zdroje OSINT", + osint_source_groups: "Skupiny zdrojů OSINT", + publisher_presets: "Publikační kanály", + bot_presets: "Roboti", + collectors: "Kolektory", + report_items: "Analýzy", + attributes: "Atributy", + report_types: "Typy analýz", + product_types: "Typy publikací", + roles: "Role", + acls: "ACL", + users: "Uživatelé", + organizations: "Organizace", + word_lists: "Seznamy slov", + asset_groups: "Skupiny aktiv", + notification_templates: "Šablony oznámení", + remote_access: "Vzdálený přístup", + remote_nodes: "Vzdálené instance", + local: "Lokální" + }, + + notification: { + close: "Zavřít" + }, + + enter: { + create: "Vytvořit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Novinku se nepodařilo vytvořit.", + title: "Název", + review: "Souhrn", + source: "Zdroj", + link: "Odkaz", + successful: "Novinka byla vytvořena", + }, + + card_item: { + title: "Název", + created: "Vytvořeno", + collected: "Indexováno", + published: "Publikováno", + source: "Zdroj", + status: "Stav", + node: "Instance", + description: "Popis", + in_analyze: "Probíhá analýza", + url: "URL", + name: "Name", + username: "Uživatelské jméno", + aggregated_items: "Sloučené novinky", + }, + + organization: { + add_new: "Přidat novou organizaci", + edit: "Upravit organizaci", + add: "Přidat", + add_btn: "Přidat", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Organizaci se nepodařilo vytvořit.", + name: "Název", + description: "Popis", + street: "Ulice", + city: "Město", + zip: "PSČ", + country: "Země", + successful: "Nová organizace byla vytvořena", + successful_edit: "Organizace byla upravena", + removed: "Organizace byla smazána", + removed_error: "Organizace je používána a nelze ji smazat", + total_count: "Počet organizací: " + }, + + user: { + add_new: "Přidat nového uživatele", + edit: "Upravit uživatele", + add: "Přidat", + add_btn: "Přidat", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Uživatele se nepodařilo vytvořit.", + username: "Uživatelské jméno", + name: "Jméno", + successful: "Uživatel byl vytvořen", + successful_edit: "Uživatel byl upraven", + removed: "Uživatel byl smazán", + removed_error: "Uživatel je používán a nelze jej smazat", + organizations: "Organizace", + roles: "Role", + permissions: "Práva", + total_count: "Počet uživatelů: ", + password: "Heslo", + password_check: "Zadejte heslo znovu", + }, + + collectors_node: { + add_new: "Přidat novou instanci kolektorů", + edit: "Upravit instanci kolektorů", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "K tomuto uzlu kolektoru se nepodařilo připojit.", + name: "Název", + description: "Popis", + url: "URL", + key: "Klíč", + successful: "Nový kolektor byl přidán", + successful_edit: "Instance kolektoru byl upraven", + removed: "Instance kolektoru byla smazána", + removed_error: "Instance kolektoru je používám a nelze jej smazat", + total_count: "Počet uzlů kolektorů: " + }, + + osint_source: { + add_new: "Přidat nový zdroj OSINT", + edit: "Editovat zdroj OSINT", + node: "Instance kolektoru", + collector: "Kolektor", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Tento zdroj OSINT se nepodařilo uložit", + name: "Název", + description: "Popis", + successful: "Nový zdroj OSINT byl přidán", + successful_edit: "Zdroj OSINT byl upraven", + removed: "Zdroj OSINT byl úspěšně smazán", + removed_error: "Zdroj OSINT je používán a nelze jej smazat", + word_lists: "Seznamy slov", + type: "Typ", + total_count: "Počet zdrojů OSINT: ", + osint_source_groups: "Skupiny zdrojů OSINT", + tooltip: { + group_items: "Seskupit novinky do souhrnu", + ungroup_items: "Oddělit zprávy ze souhrnu", + analyze_items: "Vytvořit analýzu z novinek", + read_items: "Označit novinky jako přečtené", + important_items: "Označit novinky jako důležité", + like_items: "Označit jako To se mi líbí", + dislike_items: "Označit jako To se mi nelíbí", + delete_items: "Smazat novinky", + select_all: "Vybrat vše", + unselect_all: "Odznačit vše" + }, + notification: { + success: "Kolektory byly přidány" + }, + dialog_import: "Importovat zdroje OSINT", + import: "Importovat", + export: "Exportovat" + }, + + osint_source_group: { + add_new: "Přidat novou skupinu zdrojů OSINT", + edit: "Upravit skupinu zdrojů OSINT", + add: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Tuto skupinu zdrojů OSINT se nepodařilo uložit.", + name: "Name", + description: "Popis", + successful: "Nová skupina zdrojů OSINT byla přidána", + successful_edit: "Skupina zdrojů OSINT byla upravena", + removed: "Skupina zdrojů OSINT byla smazána", + removed_error: "Skupina zdrojů OSINT je používána a nelze ji odstranit", + title: "Název", + osint_sources: "Zdroje OSINT", + total_count: "Počet skupin zdrojů OSINT: ", + default_group: "Nezařazeno", + default_group_description: "Skupina pro nezařazené zdroje OSINT" + }, + + role: { + add_new: "Přidat novou roli", + edit: "Upravit roli", + add: "Přidat", + add_btn: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Tuto roli se nepodařilo uložit.", + name: "Název", + description: "Popis", + successful: "Nová role byla přidána", + successful_edit: "Role byla upravena", + removed: "Role byla smazána", + removed_error: "Role je používána a nelze ji smazat", + title: "Název", + permissions: "Práva", + total_count: "Počet rolí: " + }, + + acl: { + full_title: "Seznamy pro řízení přístupu (ACL)", + add_new: "Přidat nový ACL", + edit: "Upravit ACL", + add: "Přidat", + add_btn: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Toto ACL se nepodařilo uložit.", + name: "Název", + description: "Popis", + item_type: "Typ položky", + item_id: "ID položky", + everyone: "Everyone", + see: "Prohlížet", + access: "Access", + modify: "Upravit", + successful: "Nový ACL bylo přidáno", + successful_edit: "ACL bylo přidáno", + removed: "ACL bylo smazán", + removed_error: "ACL je používáno a nelze jej smazat", + roles: "Role", + users: "Uživatelé", + total_count: "Počet ACL: " + }, + + publisher_preset: { + add_new: "Přidat nové přednastavení vydavatele", + edit: "Upravit přednastavení vydavatele", + node: "Instance vydavatelů", + publisher: "Vydavatel", + add: "Přidat", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit přednastavení.", + name: "Název", + description: "Popis", + use_for_notifications: "Použit pro všechna globální upozornění", + successful: "Nové přednastavení bylo přidáno", + successful_edit: "Přednastavení bylo upraveno", + removed: "Přednastavení bylo smazáno", + removed_error: "Přednastavení vydavatele se používá a nelze jej smazat", + total_count: "Počet přednastavení vydavatelů: " + }, + + product_type: { + add_new: "Přidat nový typ publikace", + edit: "Upravit typ publikace", + node: "Instance prezenterů", + presenter: "Prezenter", + add: "Přidat", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit typ publikace.", + name: "Název", + description: "Popis", + successful: "Nový typ publikace byla přidána", + successful_edit: "Typ publikace byl upraven", + removed: "Typ publikace byl smazán", + removed_error: "Typ publikace se používá a nelze jej smazat", + total_count: "Počet typů publikací: ", + help: "Popis parametrů šablony", + close: "Zavřít", + choose_report_type: "Vyberte typ analýzy pro zobrazení popisu parametrů", + report_items: "Analýzy", + report_items_object: { + name: "Název", + name_prefix: "Předpona názvu", + type: "Typ analýzy" + }, + news_items: "Novinky", + news_items_object: { + title: "Název", + review: "Souhrn", + content: "Obsah", + author: "Autor", + source: "Zdroj", + link: "Odkaz", + collected: "Datum indexace", + published: "Datum zveřejnění", + } + }, + + presenters_node: { + add_new: "Přidat novou instanci prezenterů", + edit: "Upravit instanci prezenterů", + add: "Přidat", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se připojit k této instanci prezenterů.", + name: "Název", + description: "Popis", + url: "URL", + key: "Heslo", + successful: "Nová instance prezenterů byla přidána", + successful_edit: "Instance prezenterů byla upravena", + removed: "Instance prezenterů byla smazána", + removed_error: "Instance prezenterů je používána a nelze ji smazat", + total_count: "Počet instancí prezenterů: " + }, + + publishers_node: { + add_new: "Přidat novou instanci vydavatelů", + edit: "Upravit instanci vydavatelů", + add: "Přidat", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se připojit k této instanci vydavatelů.", + name: "Name", + description: "Popis", + url: "URL", + key: "Klíč", + successful: "Nová instance vydavatelů byla přidána", + successful_edit: "Instance vydavatelů byla upravena", + removed: "Instance vydavatelů byla smazána", + removed_error: "Instance vydavatelů je používána a nelze ji smazat", + total_count: "Počet instancí vydavatelů: " + }, + + bots_node: { + add_new: "Přidat instanci robotů", + edit: "Upravit instanci robotů", + add: "Přidat", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se připojit k instanci robotů.", + name: "Název", + description: "Popis", + url: "URL", + key: "Klíč", + successful: "Nová instance robotů byla přidána", + successful_edit: "Instance robotů byla upravena", + removed: "Instance robotů byla smazána", + removed_error: "Instance robotů se používá a nelze ji smazat", + total_count: "Počet instancí robotů: " + }, + + bot_preset: { + add_new: "Přidat nové přednastavení robota", + edit: "Upravit přednastavení robota", + node: "Instance robotů", + bot: "Robot", + add: "Přidat", + save: "Uložit", + add_btn: "Přidat", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit tohoto robota.", + name: "Název", + description: "Popis", + successful: "Nové přednastavení robota bylo přidáno", + successful_edit: "Přednastavení robota bylo upraveno", + removed: "Přednastavení robota bylo smazáno", + removed_error: "Přednastavení robota je používáno a nelze jej odstranit", + total_count: "Počet přednastavení robotů: " + }, + + attribute: { + add: "Přidat", + add_btn: "Přidat", + add_new: "Přidat nový atribut", + edit: "Upravit atribut", + add_attachment: "Přidat přílohu", + add_value: "Přidat hodnotu", + select_attachment: "Vybrat přílohu", + select_file: "Vybrat soubor", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit tento atribut", + name: "Název", + description: "Popis", + type: "Typ", + validator: "Validátor", + validator_parameter: "Parametr validátoru", + default_value: "Původní hodnota", + successful: "Nový atribut byl přidán", + successful_edit: "Atribut byl upraven", + removed: "Atribut byl smazán", + removed_error: "Atribut je používán a nemohl být smazán", + value: "Hodnota", + value_text: "Popis hodnoty", + tlp_clear: "TLP:CLEAR", + tlp_green: "TLP:GREEN", + tlp_amber: "TLP:AMBER", + tlp_amber_strict: "TLP:AMBER+STRICT", + tlp_red: "TLP:RED", + attribute_parameters: "Parametry atributu", + attribute_constants: "Konstanty atributu", + import_from_csv: "Importovat z CSV", + new_constant: "Nová konstanta", + attribute: "atribut", + attributes: "Atributy", + new_attribute: "Nový atribut", + min_occurrence: "Minimální výskyt", + max_occurrence: "Maximální výskyt", + total_count: "Počet atributů: ", + import: "Importovat", + load_csv_file: "Načíst CSV soubor", + file_has_header: "Soubor má hlavičku", + search: "Hledat", + reload_cpe: "Znovu načíst CPE slovník", + reload_cve: "Znovu načíst CVE slovník", + delete_existing: "Smazat všechny existující hodnoty", + select_enum: "Vybrat hodnotu konstanty", + reloading: "Opakované načítání slovníku...", + status: "Stav", + select_date: "Vybrat datum", + select_time: "Vybrat čas", + select_datetime: "Vybrat datum a čas", + done: "Hotovo" + }, + + cvss_calculator: { + title: "CVSS Calculator 3.1", + base_score: "Base Score", + attack_vector: "Attack Vector (AV)", + attack_complexity: "Attack Complexity (AC)", + privileges_required: "Privileges Required (PR)", + user_interaction: "User Interaction (UI)", + scope: "Scope (S)", + confidentiality: "Confidentiality (C)", + integrity: "Integrity (I)", + availability: "Availability (A)", + + temporal_score: "Temporal Score", + exploitability_code_maturity: "Exploitability Code Maturity (E)", + remediation_level: "Remediation Level (RL)", + report_confidence: "Report Confidence (RC)", + + environmental_score: "Environmental Score", + confidentiality_requirement: "Confidentiality Requirement (CR)", + integrity_requirement: "Integrity Requirement (IR)", + availability_requirement: "Availability Requirement (AR)", + modified_attack_vector: "Modified Attack Vector (MAV)", + modified_attack_complexity: "Modified Attack Complexity (MAC)", + modified_privileges_required: "Modified Privileges Required (MPR)", + modified_user_interaction: "Modified User Interaction (MUI)", + modified_scope: "Modified Scope (MS)", + modified_confidentiality: "Modified Confidentiality (MC)", + modified_integrity: "Modified Integrity (MI)", + modified_availability: "Modified Availability (MA)", + + network: "Network", + adjacent: "Adjacent", + adjacent_network: "Adjacent Network", + local: "Local", + physical: "Physical", + required: "Required", + unchanged: "Unchanged", + changed: "Changed", + + not_defined: "Not Defined", + none: "None", + low: "Low", + medium: "Medium", + high: "High", + critical: "Critical", + + unproven: "Unproven", + proof_of_concept: "Proof-of-Concept", + functional: "Functional", + + official_fix: "Official Fix", + temporary_fix: "Temporary Fix", + workaround: "Workaround", + unavailable: "Unavailable", + + unknown: "Unknown", + reasonable: "Reasonable", + confirmed: "Confirmed", + + validator: "Neplatný nebo nekompletní textový řetězec" + + }, + + cvss_calculator_tooltip: { + baseMetricGroup_Legend: "The Base Metric group represents the intrinsic characteristics of a vulnerability that are constant over time and across user environments. Determine the vulnerable component and score Attack Vector, Attack Complexity, Privileges Required and User Interaction relative to this.", + AV_Heading: "This metric reflects the context by which vulnerability exploitation is possible. The Base Score increases the more remote (logically, and physically) an attacker can be in order to exploit the vulnerable component.", + AV_N_Label: "The vulnerable component is bound to the network stack and the set of possible attackers extends beyond the other options listed, up to and including the entire Internet. Such a vulnerability is often termed 'remotely exploitable' and can be thought of as an attack being exploitable at the protocol level one or more network hops away (e.g., across one or more routers).", + AV_A_Label: "The vulnerable component is bound to the network stack, but the attack is limited at the protocol level to a logically adjacent topology. This can mean an attack must be launched from the same shared physical (e.g., Bluetooth or IEEE 802.11) or logical (e.g., local IP subnet) network, or from within a secure or otherwise limited administrative domain (e.g., MPLS, secure VPN to an administrative network zone).", + AV_L_Label: "The vulnerable component is not bound to the network stack and the attacker’s path is via read/write/execute capabilities. Either: the attacker exploits the vulnerability by accessing the target system locally (e.g., keyboard, console), or remotely (e.g., SSH); or the attacker relies on User Interaction by another person to perform actions required to exploit the vulnerability (e.g., tricking a legitimate user into opening a malicious document).", + AV_P_Label: "The attack requires the attacker to physically touch or manipulate the vulnerable component. Physical interaction may be brief or persistent.", + AC_Heading: "This metric describes the conditions beyond the attacker’s control that must exist in order to exploit the vulnerability. Such conditions may require the collection of more information about the target or computational exceptions. The assessment of this metric excludes any requirements for user interaction in order to exploit the vulnerability. If a specific configuration is required for an attack to succeed, the Base metrics should be scored assuming the vulnerable component is in that configuration.", + AC_L_Label: "Specialized access conditions or extenuating circumstances do not exist. An attacker can expect repeatable success against the vulnerable component.", + AC_H_Label: "A successful attack depends on conditions beyond the attacker's control. That is, a successful attack cannot be accomplished at will, but requires the attacker to invest in some measurable amount of effort in preparation or execution against the vulnerable component before a successful attack can be expected. For example, a successful attack may require an attacker to: gather knowledge about the environment in which the vulnerable target/component exists; prepare the target environment to improve exploit reliability; or inject themselves into the logical network path between the target and the resource requested by the victim in order to read and/or modify network communications (e.g., a man in the middle attack).", + PR_Heading: "This metric describes the level of privileges an attacker must possess before successfully exploiting the vulnerability.", + PR_N_Label: "The attacker is unauthorized prior to attack, and therefore does not require any access to settings or files to carry out an attack.", + PR_L_Label: "The attacker is authorized with (i.e., requires) privileges that provide basic user capabilities that could normally affect only settings and files owned by a user. Alternatively, an attacker with Low privileges may have the ability to cause an impact only to non-sensitive resources.", + PR_H_Label: "The attacker is authorized with (i.e., requires) privileges that provide significant (e.g., administrative) control over the vulnerable component that could affect component-wide settings and files.", + UI_Heading: "This metric captures the requirement for a user, other than the attacker, to participate in the successful compromise the vulnerable component. This metric determines whether the vulnerability can be exploited solely at the will of the attacker, or whether a separate user (or user-initiated process) must participate in some manner.", + UI_N_Label: "The vulnerable system can be exploited without any interaction from any user.", + UI_R_Label: "Successful exploitation of this vulnerability requires a user to take some action before the vulnerability can be exploited.", + S_Heading: "Does a successful attack impact a component other than the vulnerable component? If so, the Base Score increases and the Confidentiality, Integrity and Authentication metrics should be scored relative to the impacted component.", + S_U_Label: "An exploited vulnerability can only affect resources managed by the same security authority. In this case, the vulnerable component and the impacted component are either the same, or both are managed by the same security authority.", + S_C_Label: "An exploited vulnerability can affect resources beyond the security scope managed by the security authority of the vulnerable component. In this case, the vulnerable component and the impacted component are different and managed by different security authorities.", + C_Heading: "This metric measures the impact to the confidentiality of the information resources managed by a software component due to a successfully exploited vulnerability. Confidentiality refers to limiting information access and disclosure to only authorized users, as well as preventing access by, or disclosure to, unauthorized ones.", + C_N_Label: "There is no loss of confidentiality within the impacted component.", + C_L_Label: "There is some loss of confidentiality. Access to some restricted information is obtained, but the attacker does not have control over what information is obtained, or the amount or kind of loss is limited. The information disclosure does not cause a direct, serious loss to the impacted component.", + C_H_Label: "There is total loss of confidentiality, resulting in all resources within the impacted component being divulged to the attacker. Alternatively, access to only some restricted information is obtained, but the disclosed information presents a direct, serious impact.", + I_Heading: "This metric measures the impact to integrity of a successfully exploited vulnerability. Integrity refers to the trustworthiness and veracity of information.", + I_N_Label: "There is no loss of integrity within the impacted component.", + I_L_Label: "Modification of data is possible, but the attacker does not have control over the consequence of a modification, or the amount of modification is limited. The data modification does not have a direct, serious impact on the impacted component.", + I_H_Label: "There is a total loss of integrity, or a complete loss of protection. For example, the attacker is able to modify any/all files protected by the impacted component. Alternatively, only some files can be modified, but malicious modification would present a direct, serious consequence to the impacted component.", + A_Heading: "This metric measures the impact to the availability of the impacted component resulting from a successfully exploited vulnerability. It refers to the loss of availability of the impacted component itself, such as a networked service (e.g., web, database, email). Since availability refers to the accessibility of information resources, attacks that consume network bandwidth, processor cycles, or disk space all impact the availability of an impacted component.", + A_N_Label: "There is no impact to availability within the impacted component.", + A_L_Label: "Performance is reduced or there are interruptions in resource availability. Even if repeated exploitation of the vulnerability is possible, the attacker does not have the ability to completely deny service to legitimate users. The resources in the impacted component are either partially available all of the time, or fully available only some of the time, but overall there is no direct, serious consequence to the impacted component.", + A_H_Label: "There is total loss of availability, resulting in the attacker being able to fully deny access to resources in the impacted component; this loss is either sustained (while the attacker continues to deliver the attack) or persistent (the condition persists even after the attack has completed). Alternatively, the attacker has the ability to deny some availability, but the loss of availability presents a direct, serious consequence to the impacted component (e.g., the attacker cannot disrupt existing connections, but can prevent new connections; the attacker can repeatedly exploit a vulnerability that, in each instance of a successful attack, leaks a only small amount of memory, but after repeated exploitation causes a service to become completely unavailable).", + temporalMetricGroup_Legend: "The Temporal metrics measure the current state of exploit techniques or code availability, the existence of any patches or workarounds, or the confidence that one has in the description of a vulnerability.", + E_Heading: "This metric measures the likelihood of the vulnerability being attacked, and is typically based on the current state of exploit techniques, exploit code availability, or active, 'in-the-wild' exploitation.", + E_X_Label: "Assigning this value indicates there is insufficient information to choose one of the other values, and has no impact on the overall Temporal Score, i.e., it has the same effect on scoring as assigning High.", + E_U_Label: "No exploit code is available, or an exploit is theoretical.", + E_P_Label: "Proof-of-concept exploit code is available, or an attack demonstration is not practical for most systems. The code or technique is not functional in all situations and may require substantial modification by a skilled attacker.", + E_F_Label: "Functional exploit code is available. The code works in most situations where the vulnerability exists.", + E_H_Label: "Functional autonomous code exists, or no exploit is required (manual trigger) and details are widely available. Exploit code works in every situation, or is actively being delivered via an autonomous agent (such as a worm or virus). Network-connected systems are likely to encounter scanning or exploitation attempts. Exploit development has reached the level of reliable, widely-available, easy-to-use automated tools.", + RL_Heading: "The Remediation Level of a vulnerability is an important factor for prioritization. The typical vulnerability is unpatched when initially published. Workarounds or hotfixes may offer interim remediation until an official patch or upgrade is issued. Each of these respective stages adjusts the temporal score downwards, reflecting the decreasing urgency as remediation becomes final.", + RL_X_Label: "Assigning this value indicates there is insufficient information to choose one of the other values, and has no impact on the overall Temporal Score, i.e., it has the same effect on scoring as assigning Unavailable.", + RL_O_Label: "A complete vendor solution is available. Either the vendor has issued an official patch, or an upgrade is available.", + RL_T_Label: "There is an official but temporary fix available. This includes instances where the vendor issues a temporary hotfix, tool, or workaround.", + RL_W_Label: "There is an unofficial, non-vendor solution available. In some cases, users of the affected technology will create a patch of their own or provide steps to work around or otherwise mitigate the vulnerability.", + RL_U_Label: "There is either no solution available or it is impossible to apply.", + RC_Heading: "This metric measures the degree of confidence in the existence of the vulnerability and the credibility of the known technical details. Sometimes only the existence of vulnerabilities are publicized, but without specific details. For example, an impact may be recognized as undesirable, but the root cause may not be known. The vulnerability may later be corroborated by research which suggests where the vulnerability may lie, though the research may not be certain. Finally, a vulnerability may be confirmed through acknowledgement by the author or vendor of the affected technology. The urgency of a vulnerability is higher when a vulnerability is known to exist with certainty. This metric also suggests the level of technical knowledge available to would-be attackers.", + RC_X_Label: "Assigning this value indicates there is insufficient information to choose one of the other values, and has no impact on the overall Temporal Score, i.e., it has the same effect on scoring as assigning Confirmed.", + RC_U_Label: "There are reports of impacts that indicate a vulnerability is present. The reports indicate that the cause of the vulnerability is unknown, or reports may differ on the cause or impacts of the vulnerability. Reporters are uncertain of the true nature of the vulnerability, and there is little confidence in the validity of the reports or whether a static Base score can be applied given the differences described. An example is a bug report which notes that an intermittent but non-reproducible crash occurs, with evidence of memory corruption suggesting that denial of service, or possible more serious impacts, may result.", + RC_R_Label: "Significant details are published, but researchers either do not have full confidence in the root cause, or do not have access to source code to fully confirm all of the interactions that may lead to the result. Reasonable confidence exists, however, that the bug is reproducible and at least one impact is able to be verified (Proof-of-concept exploits may provide this). An example is a detailed write-up of research into a vulnerability with an explanation (possibly obfuscated or 'left as an exercise to the reader') that gives assurances on how to reproduce the results.", + RC_C_Label: "Detailed reports exist, or functional reproduction is possible (functional exploits may provide this). Source code is available to independently verify the assertions of the research, or the author or vendor of the affected code has confirmed the presence of the vulnerability.", + environmentalMetricGroup_Legend: "These metrics enable the analyst to customize the CVSS score depending on the importance of the affected IT asset to a user’s organization, measured in terms of complementary/alternative security controls in place, Confidentiality, Integrity, and Availability. The metrics are the modified equivalent of base metrics and are assigned metric values based on the component placement in organization infrastructure.", + CR_Heading: "These metrics enable the analyst to customize the CVSS score depending on the importance of the Confidentiality of the affected IT asset to a user’s organization, relative to other impacts. This metric modifies the environmental score by reweighting the Modified Confidentiality impact metric versus the other modified impacts.", + CR_X_Label: "Assigning this value indicates there is insufficient information to choose one of the other values, and has no impact on the overall Environmental Score, i.e., it has the same effect on scoring as assigning Medium.", + CR_L_Label: "Loss of Confidentiality is likely to have only a limited adverse effect on the organization or individuals associated with the organization (e.g., employees, customers).", + CR_M_Label: "Assigning this value to the metric will not influence the score.", + CR_H_Label: "Loss of Confidentiality is likely to have a catastrophic adverse effect on the organization or individuals associated with the organization (e.g., employees, customers).", + IR_Heading: "These metrics enable the analyst to customize the CVSS score depending on the importance of the Integrity of the affected IT asset to a user’s organization, relative to other impacts. This metric modifies the environmental score by reweighting the Modified Integrity impact metric versus the other modified impacts.", + IR_X_Label: "Assigning this value indicates there is insufficient information to choose one of the other values, and has no impact on the overall Environmental Score, i.e., it has the same effect on scoring as assigning Medium.", + IR_L_Label: "Loss of Integrity is likely to have only a limited adverse effect on the organization or individuals associated with the organization (e.g., employees, customers).", + IR_M_Label: "Assigning this value to the metric will not influence the score.", + IR_H_Label: "Loss of Integrity is likely to have a catastrophic adverse effect on the organization or individuals associated with the organization (e.g., employees, customers).", + AR_Heading: "These metrics enable the analyst to customize the CVSS score depending on the importance of the Availability of the affected IT asset to a user’s organization, relative to other impacts. This metric modifies the environmental score by reweighting the Modified Availability impact metric versus the other modified impacts.", + AR_X_Label: "Assigning this value indicates there is insufficient information to choose one of the other values, and has no impact on the overall Environmental Score, i.e., it has the same effect on scoring as assigning Medium.", + AR_L_Label: "Loss of Availability is likely to have only a limited adverse effect on the organization or individuals associated with the organization (e.g., employees, customers).", + AR_M_Label: "Assigning this value to the metric will not influence the score.", + AR_H_Label: "Loss of Availability is likely to have a catastrophic adverse effect on the organization or individuals associated with the organization (e.g., employees, customers).", + MAV_Heading: "This metric reflects the context by which vulnerability exploitation is possible. The Environmental Score increases the more remote (logically, and physically) an attacker can be in order to exploit the vulnerable component.", + MAV_X_Label: "The value assigned to the corresponding Base metric is used.", + MAV_N_Label: "The vulnerable component is bound to the network stack and the set of possible attackers extends beyond the other options listed, up to and including the entire Internet. Such a vulnerability is often termed 'remotely exploitable' and can be thought of as an attack being exploitable at the protocol level one or more network hops away.", + MAV_A_Label: "The vulnerable component is bound to the network stack, but the attack is limited at the protocol level to a logically adjacent topology. This can mean an attack must be launched from the same shared physical (e.g., Bluetooth or IEEE 802.11) or logical (e.g., local IP subnet) network, or from within a secure or otherwise limited administrative domain (e.g., MPLS, secure VPN).", + MAV_L_Label: "The vulnerable component is not bound to the network stack and the attacker’s path is via read/write/execute capabilities. Either: the attacker exploits the vulnerability by accessing the target system locally (e.g., keyboard, console), or remotely (e.g., SSH); or the attacker relies on User Interaction by another person to perform actions required to exploit the vulnerability (e.g., tricking a legitimate user into opening a malicious document).", + MAV_P_Label: "The attack requires the attacker to physically touch or manipulate the vulnerable component. Physical interaction may be brief or persistent.", + MAC_Heading: "This metric describes the conditions beyond the attacker’s control that must exist in order to exploit the vulnerability. Such conditions may require the collection of more information about the target or computational exceptions. The assessment of this metric excludes any requirements for user interaction in order to exploit the vulnerability. If a specific configuration is required for an attack to succeed, the Base metrics should be scored assuming the vulnerable component is in that configuration.", + MAC_X_Label: "The value assigned to the corresponding Base metric is used.", + MAC_L_Label: "Specialized access conditions or extenuating circumstances do not exist. An attacker can expect repeatable success against the vulnerable component.", + MAC_H_Label: "A successful attack depends on conditions beyond the attacker's control. That is, a successful attack cannot be accomplished at will, but requires the attacker to invest in some measurable amount of effort in preparation or execution against the vulnerable component before a successful attack can be expected. For example, a successful attack may require an attacker to: gather knowledge about the environment in which the vulnerable target/component exists; prepare the target environment to improve exploit reliability; or inject themselves into the logical network path between the target and the resource requested by the victim in order to read and/or modify network communications (e.g., a man in the middle attack).", + MPR_Heading: "This metric describes the level of privileges an attacker must possess before successfully exploiting the vulnerability.", + MPR_X_Label: "The value assigned to the corresponding Base metric is used.", + MPR_N_Label: "The attacker is unauthorized prior to attack, and therefore does not require any access to settings or files to carry out an attack.", + MPR_L_Label: "The attacker is authorized with (i.e., requires) privileges that provide basic user capabilities that could normally affect only settings and files owned by a user. Alternatively, an attacker with Low privileges may have the ability to cause an impact only to non-sensitive resources.", + MPR_H_Label: "The attacker is authorized with (i.e., requires) privileges that provide significant (e.g., administrative) control over the vulnerable component that could affect component-wide settings and files.", + MUI_Heading: "This metric captures the requirement for a user, other than the attacker, to participate in the successful compromise the vulnerable component. This metric determines whether the vulnerability can be exploited solely at the will of the attacker, or whether a separate user (or user-initiated process) must participate in some manner.", + MUI_X_Label: "The value assigned to the corresponding Base metric is used.", + MUI_N_Label: "The vulnerable system can be exploited without any interaction from any user.", + MUI_R_Label: "Successful exploitation of this vulnerability requires a user to take some action before the vulnerability can be exploited.", + MS_Heading: "Does a successful attack impact a component other than the vulnerable component? If so, the Base Score increases and the Confidentiality, Integrity and Authentication metrics should be scored relative to the impacted component.", + MS_X_Label: "The value assigned to the corresponding Base metric is used.", + MS_U_Label: "An exploited vulnerability can only affect resources managed by the same security authority. In this case, the vulnerable component and the impacted component are either the same, or both are managed by the same security authority.", + MS_C_Label: "An exploited vulnerability can affect resources beyond the security scope managed by the security authority of the vulnerable component. In this case, the vulnerable component and the impacted component are different and managed by different security authorities.", + MC_Heading: "This metric measures the impact to the confidentiality of the information resources managed by a software component due to a successfully exploited vulnerability. Confidentiality refers to limiting information access and disclosure to only authorized users, as well as preventing access by, or disclosure to, unauthorized ones.", + MC_X_Label: "The value assigned to the corresponding Base metric is used.", + MC_N_Label: "There is no loss of confidentiality within the impacted component.", + MC_L_Label: "There is some loss of confidentiality. Access to some restricted information is obtained, but the attacker does not have control over what information is obtained, or the amount or kind of loss is limited. The information disclosure does not cause a direct, serious loss to the impacted component.", + MC_H_Label: "There is total loss of confidentiality, resulting in all resources within the impacted component being divulged to the attacker. Alternatively, access to only some restricted information is obtained, but the disclosed information presents a direct, serious impact.", + MI_Heading: "This metric measures the impact to integrity of a successfully exploited vulnerability. Integrity refers to the trustworthiness and veracity of information.", + MI_X_Label: "The value assigned to the corresponding Base metric is used.", + MI_N_Label: "There is no loss of integrity within the impacted component.", + MI_L_Label: "Modification of data is possible, but the attacker does not have control over the consequence of a modification, or the amount of modification is limited. The data modification does not have a direct, serious impact on the impacted component.", + MI_H_Label: "There is a total loss of integrity, or a complete loss of protection. For example, the attacker is able to modify any/all files protected by the impacted component. Alternatively, only some files can be modified, but malicious modification would present a direct, serious consequence to the impacted component.", + MA_Heading: "This metric measures the impact to the availability of the impacted component resulting from a successfully exploited vulnerability. It refers to the loss of availability of the impacted component itself, such as a networked service (e.g., web, database, email). Since availability refers to the accessibility of information resources, attacks that consume network bandwidth, processor cycles, or disk space all impact the availability of an impacted component.", + MA_X_Label: "The value assigned to the corresponding Base metric is used.", + MA_N_Label: "There is no impact to availability within the impacted component.", + MA_L_Label: "Performance is reduced or there are interruptions in resource availability. Even if repeated exploitation of the vulnerability is possible, the attacker does not have the ability to completely deny service to legitimate users. The resources in the impacted component are either partially available all of the time, or fully available only some of the time, but overall there is no direct, serious consequence to the impacted component.", + MA_H_Label: "There is total loss of availability, resulting in the attacker being able to fully deny access to resources in the impacted component; this loss is either sustained (while the attacker continues to deliver the attack) or persistent (the condition persists even after the attack has completed). Alternatively, the attacker has the ability to deny some availability, but the loss of availability presents a direct, serious consequence to the impacted component (e.g., the attacker cannot disrupt existing connections, but can prevent new connections; the attacker can repeatedly exploit a vulnerability that, in each instance of a successful attack, leaks a only small amount of memory, but after repeated exploitation causes a service to become completely unavailable)." + }, + + report_type: { + add_new: "Přidat nový typ analýzy", + edit: "Upravit typ analýzy", + add_btn: "Přidat", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se uložit tento typ analýzy", + name: "Název", + description: "Popis", + section_title: "Section", + new_group: "Nová skupina atributů", + successful: "Nový typ analýzy byl přidán", + successful_edit: "Typ analýzy byl upraven", + removed_error: "Typ analýzy je používán a nemohl být smazán", + removed: "Typ analýzy byl smazán", + total_count: "Počet typů analýz: " + }, + + report_item: { + add_new: "Nová analýza", + edit: "Upravit analýzu", + read: "Report item preview", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit tuto analýzu", + title: "Název", + title_prefix: "Předpona názvu", + report_type: "Typ analýzy", + successful: "Analýza byla přidána", + successful_edit: "Analýza byla uložena", + removed: "Analýza byla smazána", + removed_error: "Analýza se používá a nelze ji smazat", + select: "Vybrat analýzy", + select_remote: "Vybrat novinky ze vzdálených instancí", + add: "Přidat", + attributes: "Atributy", + import_csv: "Importovat z CSV", + import_from_csv: "Importovat CVE/CPE z CSV", + delete_existing_codes: "Smazat stávající CVE/CPE kódy", + tooltip: { + sort_time: "Seřadit hodnoty od nejnovější", + sort_user: "Zobrazit vlastní hodnoty před ostatními", + cvss_detail: "Zobrazit definice CVSS kalkulačky", + enum_selector: "Zobrazit okno s hledáním hodnot", + delete_value: "Smazat hodnotu tohoto atributu", + add_value: "Přidat novou hodnotu k tomuto atributu", + } + }, + + product: { + add_new: "Nový typ publikace", + add_btn: "Přidat", + edit: "Upravit typ publikace", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit tento typ publikace", + title: "Nadpis", + name: "Název", + description: "Popis", + report_type: "Typ publikace", + successful: "Nový typ publikace byl úspěšně přidán", + successful_edit: "Typ publikace byl uložen", + removed: "Typ publikace byl úspěšně smazán", + removed_error: "Typ publikace je používán a nelze jej smazat", + preview: "Zobrazit náhled publikace", + publish: "Zveřejnit publikaci", + total_count: "Počet typů publikací: " + }, + + analyze: { + sort: "Seřadit podle", + from: "Od", + to: "Do", + add_new: "Přidat", + total_count: "Počet analýz: ", + tooltip: { + filter_completed: "Zobrazit/Skrýt hotové analýzy", + filter_incomplete: "Zobrazit/Skrýt nedokončené analýzy", + range: { + ALL: "Zobrazit všechny analýzy", + TODAY: "Zobrazit dnešní analýzy", + WEEK: "Zobrazit analýzy za poslední týden", + MONTH: "Zobrazit analýzy za poslední měsíc" + }, + sort: { + time: { + ascending: "Seřadit analýzy podle data vytvoření vzestupně", + descending: "Seřadit analýzy podle data vytvoření sestupně" + } + }, + toggle_selection: "Toggle analýzy selection mode", + delete_items: "Smazat analýzy", + publish_items: "Vytvořit z analýz report", + delete_item: "Smazat analýzu", + publish_item: "Vytvořit report z analýzy", + } + }, + + assess: { + source: "Zdroj", + comments: "Komentáře", + collected: "Indexováno", + published: "Publikováno", + author: "Autor", + add_news_item: "Přidat novinku", + select_news_item: "Vybrat novinku", + add: "Přidat", + aggregate_detail: "Detail sloučené novinky", + aggregate_info: "Info", + aggregate_title: "Název", + aggregate_description: "Popis", + attributes: "Atributy", + title: "Název", + description: "Popis", + download: "Stáhnout", + total_count: "Počet novinek: ", + selected_count: "Počet vybraných novinek: ", + tooltip: { + filter_read: "Zobrazit/Skrýt nepřečtené novinky", + filter_important: "Zobrazit/Skrýt důležité novinky", + filter_relevant: "Zobrazit/Skrýt relevantní novinky", + filter_in_analyze: "Zobrazit/Skrýt analyzované novinky", + range: { + ALL: "Zobrazit všechny novinky", + TODAY: "Zobrazit dnešní novinky", + WEEK: "Zobrazit novinky za minulý týden", + MONTH: "Zobrazit novinky za minulý měsíc" + }, + sort: { + time: { + ascending: "Seřadit novinky podle data indexace vzestupně", + descending: "Seřadit novinky podle data indexace sestupně" + }, + relevance: { + ascending: "Seřadit novinky podle relevance vzestupně", + descending: "Seřadit novinky podle relevance sestupně" + } + }, + highlight_wordlist: "Zvýraznit slova ze seznamů slov", + toggle_selection: "Mód výběru novinek", + group_items: "Sloučit novinky", + ungroup_items: "Rozdělit novinky", + analyze_items: "Vytvořit z novinek analýzu", + read_items: "Označit novinky jako přečtené", + important_items: "Označit novinky jako důležité", + like_items: "To se mi líbí", + dislike_items: "To se mi nelíbí", + delete_items: "Smazat novinky", + open_source: "Otevřít zdroj novinky v nové záložce", + ungroup_item: "Oddělit novinku ze skupiny", + analyze_item: "Vytvořit z novinky analýzu", + read_item: "Označit jako přečtené", + important_item: "Označit jako důležité", + like_item: "To se mi líbí", + dislike_item: "To se mi nelíbí", + delete_item: "Smazat novinku", + }, + shortcuts: { + enter_filter_mode: "Zapnut mód zkratek 'filtr'. Ukončete klávesou Escape.", + enter_view_mode: "Zapnut mód zkratek 'náhled'. Ukončete klávesou Escape.", + default_mode: "Mód zkratek 'původní'.", + aggregate_no_group: "Nelze otevřít neagregovanou novinku, funguje pouze se skupinou novinek.", + }, + }, + + publish: { + tooltip: { + range: { + ALL: "Zobrazit všechny reporty", + TODAY: "Zobrazit dnešní reporty", + WEEK: "Zobrazit reporty za minulý týden", + MONTH: "Zobrazit reporty za minulý měsíc" + }, + sort: { + time: { + ascending: "Seřadit reporty podle data vytvoření vzestupně", + descending: "Seřadit reporty podle data vytvoření sestupně" + } + }, + delete_item: "Smazat report", + } + }, + + toolbar_filter: { + search: "Hledání", + all: "Vše", + today: "Dnes", + this_week: "Tento týden", + this_month: "Tento měsíc", + custom_filter: "Vlastní filtrování" + }, + + settings: { + user_settings: "Nastavení uživatele", + tab_general: "Obecné", + tab_wordlists: "Seznamy slov", + tab_hotkeys: "Zkratky", + save: "Uložit", + close_item: "Zavřít", + collection_up: "Posunout nahoru", + collection_down: "Posunout dolů", + show_item: "Show", + read_item: "Označit jako přečtené", + important_item: "Označit jako důležité", + like_item: "Označit jako To se mi líbí", + unlike_item: "Označit jako To se mi nelíbí", + delete_item: "Smazat", + spellcheck: "Kontrolovat pravopis", + dark_theme: "Tmavý motiv", + press_key: "Stiskněte klávesu pro ", + cancel_press_key: "Zrušit", + selection: "Výběr", + group: "Seskupit", + ungroup: "Zrušit seskupení", + new_product: "Nový produkt", + aggregate_open: "Otevřít sloučenou novinku" + }, + + word_list: { + add_new: "Přidat nový seznam slov", + edit: "Upravit seznam slov", + add: "Přidat", + add_btn: "Přidat", + save: "Uložit", + cancel: "Zrušit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se uložit tento seznam slov", + name: "Název", + description: "Popis", + link: "URL", + use_for_stop_words: "Použít jako seznam stopslov", + successful: "Nový seznam slov byl přidán", + successful_edit: "Seznam slov byl přidán byl upraven", + remove: "Seznam slov byl smazán", + removed_error: "Seznam slov se používá a nelze jej smazat", + value: "Hodnota", + new_word: "Nové slovo", + words: "Slova", + new_category: "Nová kategorie", + total_count: "Počet seznamů slov: ", + file_has_header: "Soubor má hlavičku", + import_from_csv: "Importovat z CSV", + load_csv_file: "Načíst CSV soubor", + download_from_link: "Stáhnout z URL", + import: "Importovat", + close: "Zavřít", + }, + + asset_group: { + add_new: "Přidat novou skupinu aktiv", + edit: "Upravit skupinu aktiv", + add: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se vytvořit skupinu aktiv", + name: "Název", + description: "Popis", + notification_templates: "Šablony upozornění", + allowed_users: "Povolení uživatelé (Pokud není žádný vybrán, získávají povolení všichni)", + successful: "Nová skupina aktiv byla přidána", + successful_edit: "Skupina aktiv byla upravena", + removed: "Skupina aktiv byla smazána", + removed_error: "Skupina aktiv je používána a nelze ji smazat", + total_count: "Počet skupin aktiv: " + }, + + notification_template: { + add_new: "Přidat novou šablonu upozornění", + edit: "Upravit šablonu upozornění", + add: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se uložit šablonu upozornění", + name: "Název", + description: "Popis", + message_title: "Nadpis zprávy", + message_body: "Tělo zprávy", + new_recipient: "Nový příjemce", + email: "E-mailová adresa", + recipients: "Příjemci", + successful: "Nová šablona upozornění byla přidána", + successful_edit: "Šablona upozornění byla upravena", + removed: "Šablona upozornění byla smazána", + removed_error: "Šablona upozornění se používá a nelze ji smazat", + total_count: "Počet šablon upozornění: " + }, + + asset: { + add_new: "Přidat nové aktivum", + add_group_info: "Přidejte prosím skupinu aktiv", + edit: "Upravit aktivum", + add: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se uložit toto aktivum", + name: "Název", + serial: "Sériové číslo", + description: "Popis", + cpe: "CPE kód", + new_cpe: "Přidat CPE kód", + cpes: "CPE kódy", + value: "Hodnota", + successful: "Nové aktivum bylo přidáno", + successful_edit: "Aktivum bylo upraveno", + removed: "Aktivum bylo smazáno", + removed_error: "Aktivum se používá a nelze jej smazat", + total_count: "Počet aktiv: ", + vulnerabilities: "Zranitelnosti", + vulnerabilities_count: "Počet zranitelností: ", + no_vulnerabilities: "Bez zranitelností", + import_csv: "Importovat CSV", + import_from_csv: "Importovat CPE z CSV", + file_has_header: "Soubor má hlavičku", + load_csv_file: "Načíst CSV soubor", + import: "Importovat", + close: "Zavřít" + }, + + remote_access: { + add_new: "Přidat vzdálený přístup", + edit: "Upravit vzdálený přístup", + add: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se uložit vzdálený přístup", + name: "Název", + description: "Popis", + access_key: "Přístupový klíč", + enabled: "Enabled", + successful: "Nový vzdálený přístup byl přidán", + successful_edit: "Vzdálený přístup byl upraven", + removed: "Vzdálený přístup byl smazán", + removed_error: "Vzdálený přístup se používá a nelze jej smazat", + osint_sources: "Sdílené zdroje OSINT", + report_item_types: "Sdílené analýzy", + total_count: "Počet vzdálených přístupů: " + }, + + remote_node: { + add_new: "Přidat vzdálenou instanci", + edit: "Upravit vzdálenou instanci", + add: "Přidat", + cancel: "Zrušit", + save: "Uložit", + validation_error: "Prosím vyplňte všechna povinná pole", + error: "Nepodařilo se uložit tuto vzdálenou instanci", + name: "Name", + description: "Popis", + remote_url: "URL vzdálené instance", + event_url: "URL vzdáleného zdroje událostí", + access_key: "Přístupový klíč", + enabled: "Zapnuto", + connect: "Připojit ke vzdálené instanci", + connect_error: "Připojení ke vzdálené instanci selhalo. Použit nesprávný přístupový klíč nebo instance není v dostupná.", + connect_info: "Připojeno ke vzdálené instanci.", + sync_news_items: "Synchronizovat novinky", + sync_report_items: "Synchronizovat analýzy", + osint_source_group: "Synchronizovat do skupiny OSINT zdrojů", + successful: "Nová vzdálená instance byla přidána", + successful_edit: "Vzdálená instance byla upravena", + removed: "Vzdálená instance byla smazána", + removed_error: "Vzdálená instance se používá a nelze ji odstranit", + total_count: "Počet vzdálených instancí: " + }, + + drop_zone: { + default_message: "Přetáhněte soubory sem nebo kliknutím vyberte", + file_description: "Popis", + last_updated: "Naposledy upraveno", + save: "Uložit", + download: "Stáhnout", + delete: "Smazat", + cancel: "Zrušit", + attachment_load: "Načíst přílohu", + attachment_detail: "Detaily přílohy" + }, + + error: { + aggregate_in_use: "Některé vybrané novinky nebo sloučené novinky jsou již připojeny k analýze", + server_error: "Neznámá chyba serveru..." + } +}; + +export default messages_cs From fc20dfb3a7f27fb5cc4fe7594e54257ce76af772 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 20 Feb 2024 23:30:52 +0100 Subject: [PATCH 007/146] use secrets for Docker, remove Docker .env from repository --- .gitignore | 6 + docker/{.env => .env.example} | 7 - docker/README.md | 2 +- docker/api_key.txt.example | 1 + docker/docker-compose-keycloak-serv.yml | 18 +- docker/docker-compose-keycloak.yml | 11 +- docker/docker-compose.yml | 34 +- docker/jwt_secret_key.txt.example | 1 + docker/keycloak_password.txt.example | 1 + docker/postgres_keycloak_password.txt.example | 1 + docker/postgres_password.txt.example | 1 + docker/prestart_core.sh | 8 +- src/bots/managers/auth_manager.py | 3 + src/bots/managers/sse_manager.py | 14 +- src/bots/remote/core_api.py | 171 ++++++++-- src/collectors/managers/auth_manager.py | 3 + src/collectors/remote/core_api.py | 93 ++++-- src/core/README.md | 4 +- src/core/config.py | 72 ++++- src/core/managers/auth_manager.py | 303 ++++++++++++++---- src/presenters/managers/auth_manager.py | 3 + src/publishers/managers/auth_manager.py | 3 + 22 files changed, 619 insertions(+), 141 deletions(-) rename docker/{.env => .env.example} (74%) create mode 100644 docker/api_key.txt.example create mode 100644 docker/jwt_secret_key.txt.example create mode 100644 docker/keycloak_password.txt.example create mode 100644 docker/postgres_keycloak_password.txt.example create mode 100644 docker/postgres_password.txt.example diff --git a/.gitignore b/.gitignore index 5fc30282a..27082c274 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,12 @@ build/ *.bak # sensitive data not to be commited +docker/.env +docker/postgres_password.txt +docker/postgres_keycloak_password.txt +docker/keycloak_password.txt +docker/jwt_secret_key.txt +docker/api_key.txt .env.local .env.*.local src/.env diff --git a/docker/.env b/docker/.env.example similarity index 74% rename from docker/.env rename to docker/.env.example index 9686c2df1..4797d556b 100644 --- a/docker/.env +++ b/docker/.env.example @@ -12,12 +12,6 @@ TARANIS_NG_TAG=v23.12.1 # Timezone for all containers TZ=Europe/Bratislava -# Default passwords. CHANGE THESE FOR PRODUCTION! -POSTGRES_PASSWORD=supersecret -POSTGRES_KEYCLOAK_PASSWORD=supersecret -JWT_SECRET_KEY=supersecret -COLLECTOR_PRESENTER_PUBLISHER_API_KEY=supersecret - # Paths CVE_UPDATE_FILE=/data/cve_dictionary.xml CPE_UPDATE_FILE=/data/cpe_dictionary.xml @@ -39,4 +33,3 @@ PRESENTER_PORT=5002 # Standalone Keycloak KEYCLOAK_VERSION=16.1.1 KEYCLOAK_USER=admin -KEYCLOAK_PASSWORD=supersecret diff --git a/docker/README.md b/docker/README.md index f03ff8add..af039aa70 100644 --- a/docker/README.md +++ b/docker/README.md @@ -149,7 +149,7 @@ Any configuration options are available at [https://hub.docker.com/_/postgres](h | `DB_POOL_SIZE` | SQLAlchemy QueuePool number of active connections to the database. | `100` | | `DB_POOL_RECYCLE` | SQLAlchemy QueuePool maximum connection age. | `300` | | `DB_POOL_TIMEOUT` | SQLAlchemy QueuePool connection timeout. | `5` | -| `JWT_SECRET_KEY` | JWT token secret key. | `J6flTliJ076zWg` | +| `JWT_SECRET_KEY` | JWT token secret key. | `supersecret` | | `OPENID_LOGOUT_URL` | Keycloak logout URL. | `https://example.com/auth/realms/master/protocol/openid-connect/logout` | | `WORKERS_PER_CORE` | Number of gunicorn worker threads to spawn per CPU core. | `4` | | `SKIP_DEFAULT_COLLECTOR` | Set to `true` to prevent initialization of a default docker collector at first run | `` | diff --git a/docker/api_key.txt.example b/docker/api_key.txt.example new file mode 100644 index 000000000..b5f907866 --- /dev/null +++ b/docker/api_key.txt.example @@ -0,0 +1 @@ +supersecret diff --git a/docker/docker-compose-keycloak-serv.yml b/docker/docker-compose-keycloak-serv.yml index aba75ba94..49f14d279 100644 --- a/docker/docker-compose-keycloak-serv.yml +++ b/docker/docker-compose-keycloak-serv.yml @@ -7,7 +7,7 @@ services: environment: POSTGRES_DB: "taranis-ng-keycloak" POSTGRES_USER: "taranis-ng-keycloak" - POSTGRES_PASSWORD: "${POSTGRES_KEYCLOAK_PASSWORD}" + POSTGRES_PASSWORD: /run/secrets/postgres_keycloak_password command: ["postgres", "-c", "shared_buffers=${DB_SHARED_BUFFERS}", "-c", "max_connections=${DB_MAX_CONNECTIONS}"] volumes: - "keycloak_db_data:/var/lib/postgresql/data" @@ -15,7 +15,9 @@ services: driver: "json-file" options: max-size: "200k" - max-file: "10" + max-file: "10" + secrets: + - postgres_keycloak_password keycloak: image: "skcert/taranis-ng-keycloak:${TARANIS_NG_TAG}" @@ -32,10 +34,12 @@ services: DB_DATABASE: taranis-ng-keycloak DB_USER: taranis-ng-keycloak DB_PASSWORD: "${POSTGRES_KEYCLOAK_PASSWORD}" + DB_PASSWORD_FILE: /run/secrets/postgres_keycloak_password KEYCLOAK_IMPORT: "/opt/jboss/keycloak/realm-export.json" KEYCLOAK_FRONTEND_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/keycloak/auth" KEYCLOAK_USER: "${KEYCLOAK_USER}" KEYCLOAK_PASSWORD: "${KEYCLOAK_PASSWORD}" + KEYCLOAK_PASSWORD_FILE: /run/secrets/keycloak_password KEYCLOAK_DEFAULT_THEME: "taranis-ng" PROXY_ADDRESS_FORWARDING: "false" JAVA_OPTS: "-Dkeycloak.profile.feature.upload_scripts=enabled" @@ -57,8 +61,16 @@ services: traefik.http.routers.taranis-keycloak-443.tls.domains[0].main: "${TARANIS_NG_HOSTNAME}" traefik.http.routers.taranis-keycloak-443.middlewares: "taranis-keycloak-stripprefix" traefik.http.routers.taranis-keycloak-443.service: "taranis-keycloak" + secrets: + - postgres_keycloak_password + - keycloak_password + +secrets: + postgres_keycloak_password: + file: postgres_keycloak_password.txt + keycloak_password: + file: keycloak_password.txt volumes: keycloak_db_data: keycloak_data: - diff --git a/docker/docker-compose-keycloak.yml b/docker/docker-compose-keycloak.yml index eb7ff2b23..6a14c8295 100644 --- a/docker/docker-compose-keycloak.yml +++ b/docker/docker-compose-keycloak.yml @@ -2,7 +2,7 @@ version: "3.9" services: core: - environment: + environment: TARANIS_NG_AUTHENTICATOR: "keycloak" OPENID_LOGOUT_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/keycloak/auth/realms/taranis-ng/protocol/openid-connect/logout?redirect_uri=GOTO_URL" @@ -13,12 +13,19 @@ services: KEYCLOAK_USER_MANAGEMENT: "true" KEYCLOAK_SERVER_URL: "http://keycloak:8080" KEYCLOAK_ADMIN_USERNAME: "admin" - KEYCLOAK_ADMIN_PASSWORD: "supersecret" + KEYCLOAK_ADMIN_PASSWORD: "${KEYCLOAK_PASSWORD}" + KEYCLOAK_ADMIN_PASSWORD_FILE: /run/secrets/keycloak_password KEYCLOAK_REALM_NAME: "taranis-ng" KEYCLOAK_CLIENT_SECRET_KEY: "supersecret" KEYCLOAK_VERIFY: "true" + secrets: + - keycloak_password gui: environment: VUE_APP_TARANIS_NG_LOGOUT_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/auth/logout?gotoUrl=TARANIS_GUI_URI" VUE_APP_TARANIS_NG_LOGIN_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/keycloak/auth/realms/taranis-ng/protocol/openid-connect/auth?response_type=code&client_id=taranis-ng&redirect_uri=TARANIS_GUI_URI" + +secrets: + keycloak_password: + file: keycloak_password.txt diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 2f4be5755..641616f65 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -21,6 +21,7 @@ services: POSTGRES_DB: "taranis-ng" POSTGRES_USER: "taranis-ng" POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}" + POSTGRES_PASSWORD_FILE: /run/secrets/postgres_password TZ: "${TZ}" PGTZ: "${TZ}" command: ["postgres", "-c", "shared_buffers=${DB_SHARED_BUFFERS}", "-c", "max_connections=${DB_MAX_CONNECTIONS}"] @@ -31,6 +32,8 @@ services: options: max-size: "200k" max-file: "10" + secrets: + - postgres_password core: depends_on: @@ -52,12 +55,14 @@ services: DB_DATABASE: "taranis-ng" DB_USER: "taranis-ng" DB_PASSWORD: "${POSTGRES_PASSWORD}" + DB_PASSWORD_FILE: /run/secrets/postgres_password DB_POOL_SIZE: 100 DB_POOL_RECYCLE: 300 DB_POOL_TIMEOUT: 30 TARANIS_NG_AUTHENTICATOR: "${TARANIS_NG_AUTHENTICATOR}" JWT_SECRET_KEY: "${JWT_SECRET_KEY}" + JWT_SECRET_KEY_FILE: /run/secrets/jwt_secret_key OPENID_LOGOUT_URL: "" WORKERS_PER_CORE: "1" @@ -68,7 +73,9 @@ services: DEBUG: "true" DEBUG_SQL: "false" # to allow automatic initialisation of collectors/presenters/publishers - COLLECTOR_PRESENTER_PUBLISHER_API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" + # COLLECTOR_PRESENTER_PUBLISHER_API_KEY_FILE: "/run/secrets/api_key" + API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" + API_KEY_FILE: "/run/secrets/api_key" labels: traefik.enable: "true" traefik.http.services.taranis-api.loadbalancer.server.port: "80" @@ -84,7 +91,6 @@ services: traefik.http.routers.taranis-sse-443.tls: "true" traefik.http.routers.taranis-sse-443.tls.domains[0].main: "${TARANIS_NG_HOSTNAME}" traefik.http.routers.taranis-sse-443.service: "taranis-api" - volumes: - "core_data:/data" logging: @@ -92,6 +98,10 @@ services: options: max-size: "200k" max-file: "10" + secrets: + - postgres_password + - jwt_secret_key + - api_key bots: depends_on: @@ -109,6 +119,7 @@ services: https_proxy: "${HTTPS_PROXY}" environment: API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" + API_KEY_FILE: "/run/secrets/api_key" TARANIS_NG_CORE_URL: "http://core" TARANIS_NG_CORE_SSE: "http://core/sse" WORKERS_PER_CORE: "1" @@ -118,6 +129,8 @@ services: options: max-size: "200k" max-file: "10" + secrets: + - api_key collectors: depends_on: @@ -136,6 +149,7 @@ services: environment: TARANIS_NG_CORE_URL: "http://core" API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" + API_KEY_FILE: "/run/secrets/api_key" WORKERS_PER_CORE: "1" DEBUG: "true" TZ: "${TZ}" @@ -146,6 +160,8 @@ services: options: max-size: "200k" max-file: "10" + secrets: + - api_key presenters: depends_on: @@ -164,6 +180,7 @@ services: environment: TARANIS_NG_CORE_URL: "http://core" API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" + API_KEY_FILE: "/run/secrets/api_key" WORKERS_PER_CORE: "1" TZ: "${TZ}" ports: @@ -175,6 +192,8 @@ services: options: max-size: "200k" max-file: "10" + secrets: + - api_key publishers: depends_on: @@ -193,6 +212,7 @@ services: environment: TARANIS_NG_CORE_URL: "http://core" API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" + API_KEY_FILE: "/run/secrets/api_key" WORKERS_PER_CORE: "1" TZ: "${TZ}" logging: @@ -200,6 +220,8 @@ services: options: max-size: "200k" max-file: "10" + secrets: + - api_key gui: depends_on: @@ -269,6 +291,14 @@ services: max-size: "200k" max-file: "10" +secrets: + postgres_password: + file: postgres_password.txt + jwt_secret_key: + file: jwt_secret_key.txt + api_key: + file: api_key.txt + volumes: redis_conf: database_data: diff --git a/docker/jwt_secret_key.txt.example b/docker/jwt_secret_key.txt.example new file mode 100644 index 000000000..b5f907866 --- /dev/null +++ b/docker/jwt_secret_key.txt.example @@ -0,0 +1 @@ +supersecret diff --git a/docker/keycloak_password.txt.example b/docker/keycloak_password.txt.example new file mode 100644 index 000000000..b5f907866 --- /dev/null +++ b/docker/keycloak_password.txt.example @@ -0,0 +1 @@ +supersecret diff --git a/docker/postgres_keycloak_password.txt.example b/docker/postgres_keycloak_password.txt.example new file mode 100644 index 000000000..b5f907866 --- /dev/null +++ b/docker/postgres_keycloak_password.txt.example @@ -0,0 +1 @@ +supersecret diff --git a/docker/postgres_password.txt.example b/docker/postgres_password.txt.example new file mode 100644 index 000000000..b5f907866 --- /dev/null +++ b/docker/postgres_password.txt.example @@ -0,0 +1 @@ +supersecret diff --git a/docker/prestart_core.sh b/docker/prestart_core.sh index 1b50c5cac..a76fe2f83 100644 --- a/docker/prestart_core.sh +++ b/docker/prestart_core.sh @@ -5,10 +5,14 @@ echo "Running inside /app/prestart.sh..." echo "Running migrations..." /app/db_migration.py db upgrade head -if [ `./manage.py collector --list | wc -l` = 0 -a x"$SKIP_DEFAULT_COLLECTOR" != "xtrue" ]; then +if [ "$(./manage.py collector --list | wc -l)" -eq 0 ] && [ x"$SKIP_DEFAULT_COLLECTOR" != "xtrue" ]; then ( echo "Adding default collector" - ./manage.py collector --create --name "Default Docker Collector" --description "A local collector node configured as a part of Taranis NG default installation." --api-url "http://collectors/" --api-key "$COLLECTOR_PRESENTER_PUBLISHER_API_KEY" + if [ -z "$API_KEY" ]; then + echo "API_KEY variable is not set, trying to read from file..." + API_KEY=$(cat "$API_KEY_FILE") + fi + ./manage.py collector --create --name "Default Docker Collector" --description "A local collector node configured as a part of Taranis NG default installation." --api-url "http://collectors/" --api-key "$API_KEY" ) & fi diff --git a/src/bots/managers/auth_manager.py b/src/bots/managers/auth_manager.py index 1ce891a05..5dca2e02c 100644 --- a/src/bots/managers/auth_manager.py +++ b/src/bots/managers/auth_manager.py @@ -9,6 +9,9 @@ import ssl api_key = os.getenv("API_KEY") +if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() if os.getenv("SSL_VERIFICATION") == "False": try: diff --git a/src/bots/managers/sse_manager.py b/src/bots/managers/sse_manager.py index d9adceb64..fa77075fc 100644 --- a/src/bots/managers/sse_manager.py +++ b/src/bots/managers/sse_manager.py @@ -1,3 +1,4 @@ +"""This module is responsible for managing the Server-Sent Events (SSE) from the Core.""" import os import requests import sseclient @@ -6,19 +7,26 @@ from managers import bots_manager +api_key = os.getenv("API_KEY") +if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() + + def initialize(): + """Start the SSE thread to listen to the Core's events.""" + class SSEThread(threading.Thread): @classmethod def run(cls): try: - response = requests.get(os.getenv("TARANIS_NG_CORE_SSE") + "?api_key=" + os.getenv("API_KEY"), - stream=True) + response = requests.get(f"{os.getenv('TARANIS_NG_CORE_SSE')}?api_key={api_key}", stream=True) client = sseclient.SSEClient(response) for event in client.events(): bots_manager.process_event(event.event, event.data) except requests.exceptions.ConnectionError: - print('Could not connect to Core SSE') + print("Could not connect to Core SSE") sse_thread = SSEThread() sse_thread.start() diff --git a/src/bots/remote/core_api.py b/src/bots/remote/core_api.py index 5609bc94e..3664ae2e0 100755 --- a/src/bots/remote/core_api.py +++ b/src/bots/remote/core_api.py @@ -1,92 +1,221 @@ +"""This module provides a class to interact with the Taranis-NG Core API.""" import os import json import requests class CoreApi: - api_url = os.getenv('TARANIS_NG_CORE_URL') + """A class that provides methods to interact with the Taranis-NG Core API. + + Attributes: + api_url (str): The URL of the Taranis-NG Core API. + api_key (str): The API key used for authentication. + headers (dict): The headers used for API requests. + + Methods: + get_bots_presets: Get the presets for a specific bot type. + get_news_items_data: Get news items data. + update_news_item_attributes: Update the attributes of a news item. + delete_word_list_category_entries: Delete entries from a word list category. + update_word_list_category_entries: Update the entries of a word list category. + get_categories: Get the categories for a specific bot. + add_word_list_category: Add a word list category. + get_news_items_aggregate: Get news items aggregate by source group. + news_items_grouping: Group news items based on certain criteria. + """ + + api_url = os.getenv("TARANIS_NG_CORE_URL") if api_url.endswith("/"): api_url = api_url[:-1] - api_key = os.getenv('API_KEY') - headers = {'Authorization': 'Bearer ' + api_key} + api_key = os.getenv("API_KEY") + if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() + headers = {"Authorization": "Bearer " + api_key} @classmethod def get_bots_presets(cls, bot_type): + """Get the presets for a specific bot type. + + This method sends a POST request to the API endpoint to retrieve the presets + for the specified bot type. + + Arguments: + bot_type (str): The type of bot for which to retrieve the presets. + + Returns: + tuple: A tuple containing the JSON response and the HTTP status code. + The JSON response contains the presets for the specified bot type. + The HTTP status code indicates the success or failure of the request. + """ try: - response = requests.post(cls.api_url + '/api/v1/bots/bots-presets', json={'api_key': cls.api_key, - 'bot_type': bot_type}, - headers=cls.headers) + response = requests.post( + cls.api_url + "/api/v1/bots/bots-presets", json={"api_key": cls.api_key, "bot_type": bot_type}, headers=cls.headers + ) return response.json(), response.status_code except (requests.exceptions.ConnectionError, json.decoder.JSONDecodeError): return {}, 503 @classmethod def get_news_items_data(cls, limit): + """Get news items data. + + This method retrieves news items data from the API. + + Arguments: + limit (int): The maximum number of news items to retrieve. + + Returns: + tuple: A tuple containing the JSON response and the HTTP status code. + If an exception occurs, None is returned along with a status code of 400. + """ try: - response = requests.get(cls.api_url + '/api/v1/bots/news-item-data?limit=' + limit, headers=cls.headers) + response = requests.get(cls.api_url + "/api/v1/bots/news-item-data?limit=" + limit, headers=cls.headers) return response.json(), response.status_code except Exception: return None, 400 @classmethod def update_news_item_attributes(cls, id, attributes): + """Update the attributes of a news item. + + Arguments: + id (str): The ID of the news item. + attributes (dict): The attributes to update. + + Returns: + int: The status code of the API response. + """ try: - response = requests.put(cls.api_url + '/api/v1/bots/news-item-data/' + id + '/attributes', json=attributes, - headers=cls.headers) + response = requests.put(cls.api_url + "/api/v1/bots/news-item-data/" + id + "/attributes", json=attributes, headers=cls.headers) return response.status_code except Exception: return None, 400 @classmethod def delete_word_list_category_entries(cls, id, name): + """Delete entries from a word list category. + + This method sends a DELETE request to the API to delete entries from a word list category. + + Arguments: + id (str): The ID of the word list category. + name (str): The name of the entry to be deleted. + + Returns: + int: The status code of the response, or None if an exception occurred. + """ try: - response = requests.delete(cls.api_url + '/api/v1/bots/word-list-categories/' + id + '/entries/' + name, - headers=cls.headers) + response = requests.delete(cls.api_url + "/api/v1/bots/word-list-categories/" + id + "/entries/" + name, headers=cls.headers) return response.status_code except Exception: return None, 400 @classmethod def update_word_list_category_entries(cls, id, name, entries): + """Update the entries of a word list category. + + Arguments: + id (str): The ID of the word list category. + name (str): The name of the entry. + entries (list): The list of entries to update. + + Returns: + int: The status code of the API response. + """ try: - response = requests.put(cls.api_url + '/api/v1/bots/word-list-categories/' + id + '/entries/' + name, - json=entries, - headers=cls.headers) + response = requests.put( + cls.api_url + "/api/v1/bots/word-list-categories/" + id + "/entries/" + name, json=entries, headers=cls.headers + ) return response.status_code except Exception: return None, 400 @classmethod def get_categories(cls, id): + """Get the categories for a specific bot. + + Arguments: + id (str): The ID of the bot. + + Returns: + dict: The categories for the bot. + + Raises: + None + + """ try: - response = requests.get(cls.api_url + '/api/v1/bots/word-list-categories/' + id, headers=cls.headers) + response = requests.get(cls.api_url + "/api/v1/bots/word-list-categories/" + id, headers=cls.headers) return response.json() except Exception: return None, 400 @classmethod def add_word_list_category(cls, id, category): + """Add a word list category. + + This method sends a PUT request to the API endpoint to add a word list category. + + Arguments: + id (str): The ID of the category. + category (dict): The category data to be added. + + Returns: + int: The status code of the response. + + Raises: + None + + """ try: - response = requests.put(cls.api_url + '/api/v1/bots/word-list-categories/' + id, json=category, - headers=cls.headers) + response = requests.put(cls.api_url + "/api/v1/bots/word-list-categories/" + id, json=category, headers=cls.headers) return response.status_code except Exception: return None, 400 @classmethod def get_news_items_aggregate(cls, source_group, limit): + """Get news items aggregate by source group. + + This method retrieves news item aggregates based on the specified source group and limit. + + Arguments: + source_group (str): The source group to filter the news item aggregates. + limit (int): The maximum number of news item aggregates to retrieve. + + Returns: + dict: A dictionary containing the news item aggregates. + + Raises: + None + + """ try: - response = requests.get(cls.api_url + '/api/v1/bots/news-item-aggregates-by-group/' + source_group, - json={'limit': limit}, headers=cls.headers) + response = requests.get( + cls.api_url + "/api/v1/bots/news-item-aggregates-by-group/" + source_group, json={"limit": limit}, headers=cls.headers + ) return response.json() except Exception: return None, 400 @classmethod def news_items_grouping(cls, data): + """Group news items based on certain criteria. + + This method sends a PUT request to the API endpoint '/api/v1/bots/news-item-aggregates-group-action' + with the provided data to group news items based on certain criteria. + + Arguments: + data (dict): The data to be sent in the request body. + + Returns: + int: The status code of the response if the request is successful. + None: If an exception occurs during the request. + + """ try: - response = requests.put(cls.api_url + '/api/v1/bots/news-item-aggregates-group-action', - json=data, headers=cls.headers) + response = requests.put(cls.api_url + "/api/v1/bots/news-item-aggregates-group-action", json=data, headers=cls.headers) return response.status_code except Exception: return None, 400 diff --git a/src/collectors/managers/auth_manager.py b/src/collectors/managers/auth_manager.py index 56bbdb59e..ae3e90605 100644 --- a/src/collectors/managers/auth_manager.py +++ b/src/collectors/managers/auth_manager.py @@ -9,6 +9,9 @@ import ssl api_key = os.getenv("API_KEY") +if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() if os.getenv("SSL_VERIFICATION") == "False": try: diff --git a/src/collectors/remote/core_api.py b/src/collectors/remote/core_api.py index 491e510da..125fc60d7 100755 --- a/src/collectors/remote/core_api.py +++ b/src/collectors/remote/core_api.py @@ -1,36 +1,73 @@ +"""This module provides methods for interacting with the Taranis-NG API.""" import logging import os import urllib import requests -logger = logging.getLogger('gunicorn.error') +logger = logging.getLogger("gunicorn.error") logger.level = logging.INFO # increase logging level if "DEBUG" in os.environ and os.environ.get("DEBUG").lower() == "true": logger.setLevel(logging.DEBUG) + class CoreApi: - api_url = os.getenv('TARANIS_NG_CORE_URL') + """ + The CoreApi class provides methods for interacting with the Taranis-NG API. + + Attributes: + api_url (str): The URL of the Taranis-NG API. + api_key (str): The API key used for authentication. + headers (dict): The headers to be included in API requests. + + Methods: + get_osint_sources(collector_type): Retrieves the OSINT sources for a given collector type. + update_collector_status(): Updates the status of the collector. + add_news_items(news_items): Adds news items to the collector. + """ + + api_url = os.getenv("TARANIS_NG_CORE_URL") if api_url.endswith("/"): api_url = api_url[:-1] - api_key = os.getenv('API_KEY') - headers = {'Authorization': 'Bearer ' + api_key} + api_key = os.getenv("API_KEY") + if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() + headers = {"Authorization": "Bearer " + api_key} @classmethod def get_osint_sources(cls, collector_type): - id = '' - config_file = os.getenv('COLLECTOR_CONFIG_FILE') + """ + Retrieve the OSINT sources for a given collector type. + + Args: + collector_type (str): The type of collector. + + Returns: + tuple: A tuple containing the JSON response and the HTTP status code. + If an error occurs, returns None and 400 status code. + """ + id = "" + config_file = os.getenv("COLLECTOR_CONFIG_FILE") try: - with open(config_file, 'r') as file: + with open(config_file, "r") as file: id = file.read().strip() except Exception as ex: logger.debug(ex) - return 'Cannot read collector config file.', 0 + return "Cannot read collector config file.", 0 try: - response = requests.get(cls.api_url + '/api/v1/collectors/' + urllib.parse.quote(id) + '/osint-sources?api_key=' + urllib.parse.quote(cls.api_key) + '&collector_type=' + urllib.parse.quote(collector_type), - headers=cls.headers) + response = requests.get( + cls.api_url + + "/api/v1/collectors/" + + urllib.parse.quote(id) + + "/osint-sources?api_key=" + + urllib.parse.quote(cls.api_key) + + "&collector_type=" + + urllib.parse.quote(collector_type), + headers=cls.headers, + ) return response.json(), response.status_code except Exception as ex: logger.debug(ex) @@ -38,18 +75,26 @@ def get_osint_sources(cls, collector_type): @classmethod def update_collector_status(cls): - id = '' - config_file = os.getenv('COLLECTOR_CONFIG_FILE') + """Update the status of the collector. + + This method retrieves the collector ID from the environment variable COLLECTOR_CONFIG_FILE, + reads the collector config file, and sends a GET request to the API endpoint to update the + collector status. + + Returns: + tuple: A tuple containing the JSON response and the HTTP status code. + """ + id = "" + config_file = os.getenv("COLLECTOR_CONFIG_FILE") try: - with open(config_file, 'r') as file: + with open(config_file, "r") as file: id = file.read().strip() except Exception as ex: logger.debug(ex) - return 'Cannot read collector config file.', 0 + return "Cannot read collector config file.", 0 try: - response = requests.get(cls.api_url + '/api/v1/collectors/' + urllib.parse.quote(id), - headers=cls.headers) + response = requests.get(cls.api_url + "/api/v1/collectors/" + urllib.parse.quote(id), headers=cls.headers) return response.json(), response.status_code except Exception as ex: logger.debug(ex) @@ -57,9 +102,21 @@ def update_collector_status(cls): @classmethod def add_news_items(cls, news_items): + """Add news items to the collector. + + This method sends a POST request to the API endpoint for adding news items to the collector. + + Arguments: + news_items (list): A list of news items to be added. + + Returns: + int: The HTTP status code of the response. + + Raises: + Exception: If an error occurs during the request. + """ try: - response = requests.post(cls.api_url + '/api/v1/collectors/news-items', json=news_items, - headers=cls.headers) + response = requests.post(cls.api_url + "/api/v1/collectors/news-items", json=news_items, headers=cls.headers) return response.status_code except Exception as ex: logger.debug(ex) diff --git a/src/core/README.md b/src/core/README.md index 457b230ff..df63135ec 100644 --- a/src/core/README.md +++ b/src/core/README.md @@ -6,7 +6,7 @@ `source venv/bin/activate` `pip3 install -r requirements.txt` 4. Set environment variables for taranis-ng-core: - DB_URL=127.0.0.1:5432;DB_DATABASE=taranisdb;DB_USER=;DB_PASSWORD=;JWT_SECRET_KEY=F1AE885322F1C + DB_URL=127.0.0.1:5432;DB_DATABASE=taranisdb;DB_USER=;DB_PASSWORD=;JWT_SECRET_KEY= 5. Before first run uncomment line with `import test.py` in app.py to create set of test data. After first run comment this line again. 6. Run taranis-ng-core: `python3 run.py` 7. Set environment variables for taranis-ng-collectors: @@ -33,7 +33,7 @@ Keycloak is not needed to run test version of TaranisNG at the moment. You can u 6. Create first admin account and log in to Master Realm 7. Choose **ADD REALM** to create realm with the name **taranisng** 8. In taranis-ng realm choose **IMPORT** and import file _realm-export.json_ from **taranis-ng-core** root -9. In CLIENTS choose taranis-ng and regenerate secret in CREDENTIALS -> REGENERATE SECRET and put secret it _into client_secrets.json_ inside **taranis-ng-core** root (_NOTE: this will be properly configurable inside admin interface in the future_) +9. In CLIENTS choose taranis-ng and regenerate secret in CREDENTIALS -> REGENERATE SECRET and put secret it _into client_secrets.json_ inside **taranis-ng-core** root (_NOTE: this will be properly configurable inside admin interface in the future_) 10. Create 2 users **user** and **admin** in USERS -> ADD USER. These are test users in TaranisNG at the moment. 11. In **taranis-ng-core** add environment variable TARANIS_NG_AUTHENTICATOR=openid (just for sign in) or TARANIS_NG_AUTHENTICATOR=keycloak (for identy management) 12. In **taranis-ng-core** add environment variable OPENID_LOGOUT_URL and set it according to your Keycloak installation e.g. http://127.0.0.1:8081/auth/realms/taranisng/protocol/openid-connect/logout?redirect_uri= diff --git a/src/core/config.py b/src/core/config.py index 2323c95a5..83bbd9ef1 100755 --- a/src/core/config.py +++ b/src/core/config.py @@ -1,20 +1,59 @@ +"""This module contains the configuration class for Taranis-NG.""" import os from dotenv import load_dotenv + load_dotenv() class Config(object): + """Configuration class for Taranis-NG. + + This class holds the configuration settings for the Taranis-NG application. + It provides access to environment variables and other configuration options. + + Attributes: + REDIS_URL (str): The URL of the Redis server. + DB_URL (str): The URL of the database server. + DB_DATABASE (str): The name of the database. + DB_USER (str): The username for the database connection. + DB_PASSWORD (str): The password for the database connection. + SQLALCHEMY_DATABASE_URI (str): The SQLAlchemy database URI. + SQLALCHEMY_TRACK_MODIFICATIONS (bool): Whether to track modifications in SQLAlchemy. + SQLALCHEMY_ECHO (bool): Whether to echo SQL queries in SQLAlchemy. + DB_POOL_SIZE (int): The size of the database connection pool. + DB_POOL_RECYCLE (int): The time in seconds before a connection is recycled. + DB_POOL_TIMEOUT (int): The maximum time in seconds to wait for a connection from the pool. + JWT_SECRET_KEY (str): The secret key for JWT token generation. + JWT_IDENTITY_CLAIM (str): The claim name for the JWT identity. + JWT_ACCESS_TOKEN_EXPIRES (int): The expiration time in seconds for JWT access tokens. + DEBUG (bool): Whether to enable debug mode. + SECRET_KEY (str): The secret key for the application. + OIDC_CLIENT_SECRETS (str): The path to the OIDC client secrets file. + OIDC_ID_TOKEN_COOKIE_SECURE (bool): Whether to secure the OIDC ID token cookie. + OIDC_REQUIRE_VERIFIED_EMAIL (bool): Whether to require verified email for OIDC. + OIDC_USER_INFO_ENABLED (bool): Whether to enable OIDC user info endpoint. + OIDC_OPENID_REALM (str): The OIDC realm. + OIDC_SCOPES (list): The list of OIDC scopes. + OIDC_INTROSPECTION_AUTH_METHOD (str): The OIDC introspection authentication method. + OIDC_TOKEN_TYPE_HINT (str): The OIDC token type hint. + OIDC_RESOURCE_CHECK_AUD (bool): Whether to check the audience of OIDC resource. + OIDC_CLOCK_SKEW (int): The clock skew in seconds for OIDC. + OPENID_LOGOUT_URL (str): The URL for OIDC logout. + """ + REDIS_URL = os.getenv("REDIS_URL") DB_URL = os.getenv("DB_URL") DB_DATABASE = os.getenv("DB_DATABASE") DB_USER = os.getenv("DB_USER") DB_PASSWORD = os.getenv("DB_PASSWORD") + if not DB_PASSWORD: + with open(os.getenv("DB_PASSWORD_FILE"), "r") as file: + DB_PASSWORD = file.read() - SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://{user}:{pw}@{url}/{db}'.format(user=DB_USER, pw=DB_PASSWORD, - url=DB_URL, db=DB_DATABASE) + SQLALCHEMY_DATABASE_URI = "postgresql+psycopg2://{user}:{pw}@{url}/{db}".format(user=DB_USER, pw=DB_PASSWORD, url=DB_URL, db=DB_DATABASE) SQLALCHEMY_TRACK_MODIFICATIONS = False - SQLALCHEMY_ECHO = (os.getenv("DEBUG_SQL", "false").lower() == "true") # DEBUG SQL Queries + SQLALCHEMY_ECHO = os.getenv("DEBUG_SQL", "false").lower() == "true" # DEBUG SQL Queries if "DB_POOL_SIZE" in os.environ: DB_POOL_SIZE = os.getenv("DB_POOL_SIZE") @@ -22,26 +61,29 @@ class Config(object): DB_POOL_TIMEOUT = os.getenv("DB_POOL_TIMEOUT") SQLALCHEMY_ENGINE_OPTIONS = { - 'pool_size': int(DB_POOL_SIZE), - 'pool_recycle': int(DB_POOL_RECYCLE), - 'pool_pre_ping': True, - 'pool_timeout': int(DB_POOL_TIMEOUT) + "pool_size": int(DB_POOL_SIZE), + "pool_recycle": int(DB_POOL_RECYCLE), + "pool_pre_ping": True, + "pool_timeout": int(DB_POOL_TIMEOUT), } - JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY') - JWT_IDENTITY_CLAIM = 'sub' + JWT_SECRET_KEY = os.getenv("JWT_SECRET_KEY") + if not JWT_SECRET_KEY: + with open(os.getenv("JWT_SECRET_KEY_FILE"), "r") as file: + JWT_SECRET_KEY = file.read() + JWT_IDENTITY_CLAIM = "sub" JWT_ACCESS_TOKEN_EXPIRES = 14400 DEBUG = True - SECRET_KEY = 'OKdbmczZKFiteHVgKXiwFXZxKsLyRNvt' - OIDC_CLIENT_SECRETS = 'client_secrets.json' + SECRET_KEY = "OKdbmczZKFiteHVgKXiwFXZxKsLyRNvt" + OIDC_CLIENT_SECRETS = "client_secrets.json" OIDC_ID_TOKEN_COOKIE_SECURE = False OIDC_REQUIRE_VERIFIED_EMAIL = False OIDC_USER_INFO_ENABLED = True - OIDC_OPENID_REALM = 'taranis-ng' - OIDC_SCOPES = ['openid'] - OIDC_INTROSPECTION_AUTH_METHOD = 'client_secret_post' - OIDC_TOKEN_TYPE_HINT = 'access_token' + OIDC_OPENID_REALM = "taranis-ng" + OIDC_SCOPES = ["openid"] + OIDC_INTROSPECTION_AUTH_METHOD = "client_secret_post" + OIDC_TOKEN_TYPE_HINT = "access_token" OIDC_RESOURCE_CHECK_AUD = True OIDC_CLOCK_SKEW = 560 diff --git a/src/core/managers/auth_manager.py b/src/core/managers/auth_manager.py index 12758b9e4..5ded15cdc 100644 --- a/src/core/managers/auth_manager.py +++ b/src/core/managers/auth_manager.py @@ -1,3 +1,4 @@ +"""This module contains the authentication manager.""" import os from datetime import datetime, timedelta from enum import Enum, auto @@ -25,29 +26,46 @@ current_authenticator = None -api_key = os.getenv('API_KEY') +api_key = os.getenv("API_KEY") +if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() def cleanup_token_blacklist(app): + """ + Clean up the token blacklist by deleting tokens older than one day. + + Arguments: + app -- The Flask application object. + """ with app.app_context(): TokenBlacklist.delete_older(datetime.today() - timedelta(days=1)) def initialize(app): + """ + Initialize the authentication manager. + + This function sets up the authentication manager based on the configured authenticator. + + Arguments: + app: The Flask application object. + """ global current_authenticator JWTManager(app) - which = os.getenv('TARANIS_NG_AUTHENTICATOR') + which = os.getenv("TARANIS_NG_AUTHENTICATOR") if which is not None: which = which.lower() - if which == 'openid': + if which == "openid": current_authenticator = OpenIDAuthenticator() - elif which == 'keycloak': + elif which == "keycloak": current_authenticator = KeycloakAuthenticator() - elif which == 'password': + elif which == "password": current_authenticator = PasswordAuthenticator() - elif which == 'ldap': + elif which == "ldap": current_authenticator = LDAPAuthenticator() else: current_authenticator = PasswordAuthenticator() @@ -58,22 +76,69 @@ def initialize(app): def get_required_credentials(): + """Get the required credentials. + + This function returns the required credentials for the current authenticator. + + Returns: + The required credentials for the current authenticator. + """ return current_authenticator.get_required_credentials() def authenticate(credentials): + """Authenticate the user using the provided credentials. + + Arguments: + credentials -- The user's credentials. + + Returns: + The result of the authentication process. + """ return current_authenticator.authenticate(credentials) def refresh(user): + """Refresh the authentication token for the given user. + + Arguments: + user -- The user object for which the authentication token needs to be refreshed. + + Returns: + The refreshed authentication token. + """ return current_authenticator.refresh(user) def logout(token): + """Logout the user. + + This function logs out the user by calling the `logout` method of the current authenticator. + + Arguments: + token (str): The authentication token of the user. + + Returns: + None: This function does not return any value. + """ return current_authenticator.logout(token) class ACLCheck(Enum): + """Enumeration for ACL checks. + + This enumeration defines the different types of access control checks that can be performed. + + Attributes: + OSINT_SOURCE_GROUP_ACCESS: Access check for OSINT source group. + NEWS_ITEM_ACCESS: Access check for news item. + NEWS_ITEM_MODIFY: Modify check for news item. + REPORT_ITEM_ACCESS: Access check for report item. + REPORT_ITEM_MODIFY: Modify check for report item. + PRODUCT_TYPE_ACCESS: Access check for product type. + PRODUCT_TYPE_MODIFY: Modify check for product type. + """ + OSINT_SOURCE_GROUP_ACCESS = auto() NEWS_ITEM_ACCESS = auto() NEWS_ITEM_MODIFY = auto() @@ -84,11 +149,23 @@ class ACLCheck(Enum): def check_acl(item_id, acl_check, user): - check_see = 'SEE' in str(acl_check) - check_access = 'ACCESS' in str(acl_check) - check_modify = 'MODIFY' in str(acl_check) + """Check the access control list (ACL) for the given item. + + This function determines whether the user has the necessary permissions to perform the specified ACL check on the item. + + Arguments: + item_id (str): The ID of the item. + acl_check (str): The type of ACL check to perform. + user (str): The user performing the ACL check. + + Returns: + bool: True if the user is allowed to perform the ACL check, False otherwise. + """ + check_see = "SEE" in str(acl_check) + check_access = "ACCESS" in str(acl_check) + check_modify = "MODIFY" in str(acl_check) allowed = False - item_type = 'UNKNOWN' + item_type = "UNKNOWN" if acl_check == ACLCheck.OSINT_SOURCE_GROUP_ACCESS: item_type = "OSINT Source Group" @@ -108,14 +185,23 @@ def check_acl(item_id, acl_check, user): if not allowed: if check_access: - log_manager.store_user_auth_error_activity(user, "Unauthorized access attempt to {}: {}".format(item_type, item_id)) + log_manager.store_user_auth_error_activity(user, f"Unauthorized access attempt to {item_type}: {item_id}") else: - log_manager.store_user_auth_error_activity(user, "Unauthorized modification attempt to {}: {}".format(item_type, item_id)) + log_manager.store_user_auth_error_activity(user, f"Unauthorized modification attempt to {item_type}: {item_id}") return allowed def no_auth(fn): + """Allow access to the decorated function without authentication. + + Arguments: + fn (function): The function to be decorated. + + Returns: + function: The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): log_manager.store_activity("API_ACCESS", None) @@ -125,6 +211,16 @@ def wrapper(*args, **kwargs): def get_id_name_by_acl(acl): + """Get the ID name based on the ACL. + + This function takes an ACL object and returns the corresponding ID name based on the ACL's name. + + Arguments: + acl -- The ACL object. + + Returns: + The ID name corresponding to the ACL's name. + """ if "NEWS_ITEM" in acl.name: return "item_id" elif "REPORT_ITEM" in acl.name: @@ -136,34 +232,33 @@ def get_id_name_by_acl(acl): def get_user_from_api_key(): - """ - Try to authenticate the user by API key + """Try to authenticate the user by API key. Returns: - (user) - user: User object or None + user (User object or None): The authenticated user object, or None if authentication fails. """ try: - if 'Authorization' not in request.headers or not request.headers['Authorization'].__contains__('Bearer '): + if "Authorization" not in request.headers or not request.headers["Authorization"].__contains__("Bearer "): return None - key_string = request.headers['Authorization'].replace('Bearer ', '') + key_string = request.headers["Authorization"].replace("Bearer ", "") api_key = ApiKey.find_by_key(key_string) if not api_key: return None user = User.find_by_id(api_key.user_id) return user except Exception as ex: - log_manager.store_auth_error_activity("Apikey check presence error: " + str(ex)) + log_manager.store_auth_error_activity(f"API key check presence error: {str(ex)}") return None def get_perm_from_user(user): - """ - Get user permmisions + """Get user permissions. + + Args: + user: User object representing the user. Returns: - (all_user_perms) - all_users_perms: set of user's Permissions or None + Set of user's permissions (as permission IDs) or None if an error occurs. """ try: all_users_perms = set() @@ -174,17 +269,17 @@ def get_perm_from_user(user): all_users_perms = all_users_perms.union(role_perms) return all_users_perms except Exception as ex: - log_manager.store_auth_error_activity("Get permmision from user error: " + str(ex)) + log_manager.store_auth_error_activity(f"Get permission from user error: {str(ex)}") return None def get_user_from_jwt_token(): - """ - Try to authenticate the user by API key + """Try to authenticate the user by API key. + + This function verifies the JWT token in the request and retrieves the user object associated with the token's identity. Returns: - (user) - user: User object or None + user (User object or None): The authenticated user object if successful, otherwise None. """ try: verify_jwt_in_request() @@ -195,43 +290,56 @@ def get_user_from_jwt_token(): # does it encode an identity? identity = get_jwt_identity() if not identity: - log_manager.store_auth_error_activity("Missing identity in JWT: " + get_raw_jwt()) + log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_raw_jwt()}") return None user = User.find(identity) if not user: - log_manager.store_auth_error_activity("Unknown identity in JWT: {}".format(identity)) + log_manager.store_auth_error_activity(f"Unknown identity in JWT: {identity}") return None return user def get_perm_from_jwt_token(user): - """ - Get user permmisions + """Get user permissions from JWT token. + + Args: + user: The user object. Returns: - (all_user_perms) - all_users_perms: set of user's Permissions or None + A set of user's permissions or None if permissions are missing or an error occurs. + """ try: # does it include permissions? claims = get_jwt_claims() - if not claims or 'permissions' not in claims: + if not claims or "permissions" not in claims: log_manager.store_user_auth_error_activity(user, "Missing permissions in JWT") return None - all_users_perms = set(claims['permissions']) + all_users_perms = set(claims["permissions"]) return all_users_perms except Exception as ex: - log_manager.store_auth_error_activity("Get permmision from JWT error: " + str(ex)) + log_manager.store_auth_error_activity(f"Get permission from JWT error: {str(ex)}") return None def auth_required(required_permissions, *acl_args): + """Check if the user has the required permissions and ACL access. + + Arguments: + required_permissions (str or list): The required permissions for the user. + *acl_args: Variable number of arguments representing the ACLs to check. + + Returns: + The decorated function. + + """ + def auth_required_wrap(fn): @wraps(fn) def wrapper(*args, **kwargs): - error = ({'error': 'not authorized'}, 401) + error = ({"error": "not authorized"}, 401) if isinstance(required_permissions, list): required_permissions_set = set(required_permissions) @@ -251,12 +359,12 @@ def wrapper(*args, **kwargs): # is there at least one match with the permissions required by the call? if not required_permissions_set.intersection(active_permissions_set): - log_manager.store_user_auth_error_activity(user, "Insufficient permissions for user: {}".format(user.username)) + log_manager.store_user_auth_error_activity(user, f"Insufficient permissions for user: {user.username}") return error # if the object does have an ACL, do we match it? if len(acl_args) > 0 and not check_acl(kwargs[get_id_name_by_acl(acl_args[0])], acl_args[0], user): - log_manager.store_user_auth_error_activity(user, "Access denied by ACL for user: {}".format(user.username)) + log_manager.store_user_auth_error_activity(user, f"Access denied by ACL for user: {user.username}") return error # allow @@ -264,30 +372,40 @@ def wrapper(*args, **kwargs): return fn(*args, **kwargs) return wrapper + return auth_required_wrap def api_key_required(fn): + """Enforce API key authentication. + + Args: + fn (function): The function to be decorated. + + Returns: + function: The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): - error = ({'error': 'not authorized'}, 401) + error = ({"error": "not authorized"}, 401) # do we have the authorization header? - if 'Authorization' not in request.headers: + if "Authorization" not in request.headers: log_manager.store_auth_error_activity("Missing Authorization header for external access") return error # is it properly encoded? - auth_header = request.headers['Authorization'] - if not auth_header.startswith('Bearer'): + auth_header = request.headers["Authorization"] + if not auth_header.startswith("Bearer"): log_manager.store_auth_error_activity("Missing Authorization Bearer for external access") return error # does it match some of our collector's keys? - api_key = auth_header.replace('Bearer ', '') + api_key = auth_header.replace("Bearer ", "") if not CollectorsNode.exists_by_api_key(api_key): api_key = log_manager.sensitive_value(api_key) - log_manager.store_auth_error_activity("Incorrect api key: " + api_key + " for external access") + log_manager.store_auth_error_activity(f"Incorrect api key: {api_key} for external access") return error # allow @@ -297,26 +415,36 @@ def wrapper(*args, **kwargs): def access_key_required(fn): + """Check for access key authorization. + + This decorator can be used to protect routes or functions that require access key authorization. + It checks if the request has a valid access key in the Authorization header. + + Arguments: + fn (function): The function to be decorated. + + Returns: + function: The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): - error = ({'error': 'not authorized'}, 401) + error = ({"error": "not authorized"}, 401) # do we have the authorization header? - if 'Authorization' not in request.headers: + if "Authorization" not in request.headers: log_manager.store_auth_error_activity("Missing Authorization header for remote access") return error # is it properly encoded? - auth_header = request.headers['Authorization'] - if not auth_header.startswith('Bearer'): + auth_header = request.headers["Authorization"] + if not auth_header.startswith("Bearer"): log_manager.store_auth_error_activity("Missing Authorization Bearer for remote access") return error # does it match some of our remote peer's access keys? - if not RemoteAccess.exists_by_access_key(auth_header.replace('Bearer ', '')): - log_manager.store_auth_error_activity("Incorrect access key: " - + auth_header.replace('Bearer ', - '') + " for remote access") + if not RemoteAccess.exists_by_access_key(auth_header.replace("Bearer ", "")): + log_manager.store_auth_error_activity(f"Incorrect access key: {auth_header.replace('Bearer ', '')} for remote access") return error # allow @@ -326,24 +454,32 @@ def wrapper(*args, **kwargs): def jwt_required(fn): + """Check if a valid JWT is present in the request headers. + + Arguments: + fn -- The function to be decorated. + + Returns: + The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): - try: verify_jwt_in_request() except JWTExtendedException: log_manager.store_auth_error_activity("Missing JWT") - return {'error': 'authorization required'}, 401 + return {"error": "authorization required"}, 401 identity = get_jwt_identity() if not identity: - log_manager.store_auth_error_activity("Missing identity in JWT: {}".format(get_raw_jwt())) - return {'error': 'authorization failed'}, 401 + log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_raw_jwt()}") + return {"error": "authorization failed"}, 401 user = User.find(identity) if user is None: - log_manager.store_auth_error_activity("Unknown identity: ".format(identity)) - return {'error': 'authorization failed'}, 401 + log_manager.store_auth_error_activity(f"Unknown identity: {identity}") + return {"error": "authorization failed"}, 401 log_manager.store_user_activity(user, "API_ACCESS", "Access permitted") return fn(*args, **kwargs) @@ -352,11 +488,27 @@ def wrapper(*args, **kwargs): def get_access_key(): - return request.headers['Authorization'].replace('Bearer ', '') + """Get the access key from the request headers. + + This function retrieves the access key from the "Authorization" header of the request. + The access key is expected to be in the format "Bearer ". + + Returns: + The access key extracted from the request headers. + """ + return request.headers["Authorization"].replace("Bearer ", "") def get_user_from_jwt(): - # obtain the identity and current permissions + """Obtain the identity and current permissions. + + This function retrieves the user information from the JWT token. If the user information + is not found in the JWT token, it falls back to retrieving the user information from the + API key. + + Returns: + The user object containing the identity and current permissions. + """ user = get_user_from_api_key() if user is None: user = get_user_from_jwt_token() @@ -364,21 +516,42 @@ def get_user_from_jwt(): def decode_user_from_jwt(jwt_token): + """Decode the user from a JWT token. + + Arguments: + jwt_token (str): The JWT token to decode. + + Returns: + User: The user object decoded from the JWT token. + """ decoded = None + jwt_secret_key = os.getenv("JWT_SECRET_KEY") + if not jwt_secret_key: + with open(os.getenv("JWT_SECRET_KEY_FILE"), "r") as file: + jwt_secret_key = file.read() try: - decoded = jwt.decode(jwt_token, os.getenv('JWT_SECRET_KEY')) + decoded = jwt.decode(jwt_token, jwt_secret_key) except Exception as ex: # e.g. "Signature has expired" - log_manager.store_auth_error_activity("Invalid JWT: " + str(ex)) + log_manager.store_auth_error_activity(f"Invalid JWT: {str(ex)}") if decoded is None: return None - return User.find(decoded['sub']) + return User.find(decoded["sub"]) def get_external_permissions_ids(): + """Get the external permissions IDs.""" return ["MY_ASSETS_ACCESS", "MY_ASSETS_CREATE", "MY_ASSETS_CONFIG"] def get_external_permissions(): + """Get the external permissions. + + This function retrieves a list of external permissions by calling the `get_external_permissions_ids` function + and then fetching the corresponding permission objects using the `Permission.find` method. + + Returns: + A list of external permission objects. + """ permissions = [] for permission_id in get_external_permissions_ids(): permissions.append(Permission.find(permission_id)) diff --git a/src/presenters/managers/auth_manager.py b/src/presenters/managers/auth_manager.py index 1ce891a05..5dca2e02c 100644 --- a/src/presenters/managers/auth_manager.py +++ b/src/presenters/managers/auth_manager.py @@ -9,6 +9,9 @@ import ssl api_key = os.getenv("API_KEY") +if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() if os.getenv("SSL_VERIFICATION") == "False": try: diff --git a/src/publishers/managers/auth_manager.py b/src/publishers/managers/auth_manager.py index 693900e91..034f1dfaa 100644 --- a/src/publishers/managers/auth_manager.py +++ b/src/publishers/managers/auth_manager.py @@ -9,6 +9,9 @@ import ssl api_key = os.getenv("API_KEY") +if not api_key: + with open(os.getenv("API_KEY_FILE"), "r") as file: + api_key = file.read() if os.getenv("SSL_VERIFICATION") == "False": try: From 577122ea9a86d3f0aaf60b723fd19bbac1a0e73e Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 20 Feb 2024 23:40:44 +0100 Subject: [PATCH 008/146] fix of leftover --- docker/docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 641616f65..8d72cd77b 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -73,7 +73,6 @@ services: DEBUG: "true" DEBUG_SQL: "false" # to allow automatic initialisation of collectors/presenters/publishers - # COLLECTOR_PRESENTER_PUBLISHER_API_KEY_FILE: "/run/secrets/api_key" API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}" API_KEY_FILE: "/run/secrets/api_key" labels: From f575da97a6806d0dd88e389d9bca0c972eeac130 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 20:55:44 +0100 Subject: [PATCH 009/146] upgrade core to use current Alpine and Python --- docker/Dockerfile.core | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile.core b/docker/Dockerfile.core index 2747b3351..c9afe20a5 100644 --- a/docker/Dockerfile.core +++ b/docker/Dockerfile.core @@ -1,4 +1,4 @@ -FROM python:3.7-alpine3.14 AS build_shared +FROM python:3.12-alpine3.19 AS build_shared WORKDIR /build_shared/ @@ -8,7 +8,7 @@ RUN python -m build -FROM python:3.7-alpine3.14 AS production +FROM python:3.12-alpine3.19 AS production WORKDIR /app/ From 23272befad79ab6ca6d72a83bb61332d9588c995 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 20:57:17 +0100 Subject: [PATCH 010/146] upgrade requirements.txt, remove unnecessary items --- src/core/requirements.txt | 58 ++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 34 deletions(-) diff --git a/src/core/requirements.txt b/src/core/requirements.txt index 5784a289b..bb43dbdd7 100644 --- a/src/core/requirements.txt +++ b/src/core/requirements.txt @@ -1,38 +1,28 @@ -alembic==1.10.2 -certifi==2019.11.28 -Flask==1.1.4 -Flask-Cors==3.0.10 -Flask-JWT-Extended==3.24.1 -Flask-Migrate==2.5.2 +alembic==1.11.0 +Flask==3.0.2 +Flask-Cors==4.0.0 +Flask-JWT-Extended==4.6.0 +Flask-Migrate==4.0.5 flask-oidc==1.4.0 -Flask-RESTful==0.3.7 -Flask-Script==2.0.6 -Flask-SSE==0.2.1 -Flask-SQLAlchemy==2.5.1 -gevent==21.8.0 -greenlet==1.1.1 -gunicorn==20.0.4 -idna==2.9 -Jinja2==2.11.3 +Flask-RESTful==0.3.10 +#Flask-Script==2.0.6 +Flask-SSE==1.0.0 +Flask-SQLAlchemy==3.0.5 +gevent==24.2.1 +gunicorn==21.2.0 +Jinja2==3.1.3 ldap3==2.9.1 -Mako==1.1.0 -MarkupSafe==1.1.0 -marshmallow==3.18.0 +# markupsafe==2.0.1 #remove after Jinja2 upgraded +marshmallow==3.19.0 marshmallow-enum==1.5.1 psycogreen==1.0.2 -psycopg2-binary==2.9.6 -PyJWT==1.7.1 -python-dateutil==2.8.1 -python-dotenv==0.10.3 -python-editor==1.0.4 -python-keycloak==0.23.0 -pytz==2019.3 -requests==2.26.0 -schedule==0.6.0 -six==1.13.0 -sseclient-py==1.7 -soupsieve==1.9.5 -SQLAlchemy==1.4.47 -urllib3==1.26.7 -Werkzeug==0.16.0 -pycryptodomex==3.17 +psycopg2==2.9.9 +PyJWT==2.8.0 +python-dotenv==1.0.1 +python-keycloak==3.9.1 +requests==2.31.0 +schedule==1.2.1 +sseclient-py==1.8.0 +SQLAlchemy==1.4.51 #upgrade +Werkzeug==3.0.1 #update +pycryptodomex==3.20 From 7ae6548387dec924887168ae16c9d9568ef05b4a Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 21:17:57 +0100 Subject: [PATCH 011/146] update code for newer version of flask_jwt_extended, f-strings --- src/core/managers/auth_manager.py | 305 ++++++++++++++++++++++++------ 1 file changed, 245 insertions(+), 60 deletions(-) diff --git a/src/core/managers/auth_manager.py b/src/core/managers/auth_manager.py index 12758b9e4..1616b1251 100644 --- a/src/core/managers/auth_manager.py +++ b/src/core/managers/auth_manager.py @@ -1,10 +1,34 @@ +"""This module contains the authentication manager for the Taranis-NG application. + +The authentication manager handles user authentication and authorization using different authenticators. +It provides functions for initializing the manager, authenticating users, refreshing authentication tokens, +performing access control checks, and retrieving user permissions. + +Classes: + ACLCheck: Enumeration for ACL checks. + +Functions: + cleanup_token_blacklist: Clean up the token blacklist by deleting tokens older than one day. + initialize: Initialize the authentication manager. + get_required_credentials: Get the required credentials for authentication. + authenticate: Authenticate the user with the provided credentials. + refresh: Refresh the authentication token for the specified user. + logout: Log out a user. + check_acl: Check the access control list (ACL) for the given item. + no_auth: Decorator that allows access to the decorated function without authentication. + get_id_name_by_acl: Get the corresponding ID name based on the given ACL. + get_user_from_api_key: Try to authenticate the user by API key. + get_perm_from_user: Get user permissions. + get_user_from_jwt_token: Try to authenticate the user by JWT token. + get_perm_from_jwt_token: Get user permissions from JWT token. +""" import os from datetime import datetime, timedelta from enum import Enum, auto from functools import wraps import jwt from flask import request -from flask_jwt_extended import JWTManager, get_jwt_claims, get_jwt_identity, verify_jwt_in_request, get_raw_jwt +from flask_jwt_extended import JWTManager, get_jwt_identity, verify_jwt_in_request, get_jwt from flask_jwt_extended.exceptions import JWTExtendedException from managers import log_manager, time_manager @@ -25,29 +49,44 @@ current_authenticator = None -api_key = os.getenv('API_KEY') +api_key = os.getenv("API_KEY") def cleanup_token_blacklist(app): + """Clean up the token blacklist by deleting tokens older than one day. + + Arguments: + app -- The Flask application object. + """ with app.app_context(): TokenBlacklist.delete_older(datetime.today() - timedelta(days=1)) def initialize(app): + """Initialize the authentication manager. + + This function sets up the authentication manager based on the configured authenticator. + + Arguments: + app: The Flask application object. + + Returns: + None + """ global current_authenticator JWTManager(app) - which = os.getenv('TARANIS_NG_AUTHENTICATOR') + which = os.getenv("TARANIS_NG_AUTHENTICATOR") if which is not None: which = which.lower() - if which == 'openid': + if which == "openid": current_authenticator = OpenIDAuthenticator() - elif which == 'keycloak': + elif which == "keycloak": current_authenticator = KeycloakAuthenticator() - elif which == 'password': + elif which == "password": current_authenticator = PasswordAuthenticator() - elif which == 'ldap': + elif which == "ldap": current_authenticator = LDAPAuthenticator() else: current_authenticator = PasswordAuthenticator() @@ -58,22 +97,67 @@ def initialize(app): def get_required_credentials(): + """Get the required credentials for authentication. + + This function returns the required credentials for authentication. + + Returns: + The required credentials for authentication. + """ return current_authenticator.get_required_credentials() def authenticate(credentials): + """Authenticate the user with the provided credentials. + + Arguments: + credentials -- The user's credentials. + + Returns: + The result of the authentication process. + """ return current_authenticator.authenticate(credentials) def refresh(user): + """Refresh the authentication token for the specified user. + + Arguments: + user -- The user object for which the authentication token needs to be refreshed. + + Returns: + The refreshed authentication token. + """ return current_authenticator.refresh(user) def logout(token): + """Log out a user. + + Arguments: + token (str): The authentication token of the user. + + Returns: + None + """ return current_authenticator.logout(token) class ACLCheck(Enum): + """Enumeration for ACL checks. + + This enumeration defines the different types of access control checks that can be performed. + + Attributes: + OSINT_SOURCE_GROUP_ACCESS: Access check for OSINT source group. + NEWS_ITEM_ACCESS: Access check for news item. + NEWS_ITEM_MODIFY: Modify check for news item. + REPORT_ITEM_ACCESS: Access check for report item. + REPORT_ITEM_MODIFY: Modify check for report item. + PRODUCT_TYPE_ACCESS: Access check for product type. + PRODUCT_TYPE_MODIFY: Modify check for product type. + """ + OSINT_SOURCE_GROUP_ACCESS = auto() NEWS_ITEM_ACCESS = auto() NEWS_ITEM_MODIFY = auto() @@ -84,11 +168,23 @@ class ACLCheck(Enum): def check_acl(item_id, acl_check, user): - check_see = 'SEE' in str(acl_check) - check_access = 'ACCESS' in str(acl_check) - check_modify = 'MODIFY' in str(acl_check) + """Check the access control list (ACL) for the given item. + + This function determines whether the user has the necessary permissions to perform the specified ACL check on the item. + + Arguments: + item_id (int): The ID of the item. + acl_check (ACLCheck): The type of ACL check to perform. + user (User): The user performing the ACL check. + + Returns: + bool: True if the user is allowed to perform the ACL check, False otherwise. + """ + check_see = "SEE" in str(acl_check) + check_access = "ACCESS" in str(acl_check) + check_modify = "MODIFY" in str(acl_check) allowed = False - item_type = 'UNKNOWN' + item_type = "UNKNOWN" if acl_check == ACLCheck.OSINT_SOURCE_GROUP_ACCESS: item_type = "OSINT Source Group" @@ -108,14 +204,23 @@ def check_acl(item_id, acl_check, user): if not allowed: if check_access: - log_manager.store_user_auth_error_activity(user, "Unauthorized access attempt to {}: {}".format(item_type, item_id)) + log_manager.store_user_auth_error_activity(user, f"Unauthorized access attempt to {item_type}: {item_id}") else: - log_manager.store_user_auth_error_activity(user, "Unauthorized modification attempt to {}: {}".format(item_type, item_id)) + log_manager.store_user_auth_error_activity(user, f"Unauthorized modification attempt to {item_type}: {item_id}") return allowed def no_auth(fn): + """Allow access to the decorated function without authentication. + + Arguments: + fn (function): The function to be decorated. + + Returns: + function: The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): log_manager.store_activity("API_ACCESS", None) @@ -125,6 +230,14 @@ def wrapper(*args, **kwargs): def get_id_name_by_acl(acl): + """Return the corresponding ID name based on the given ACL. + + Arguments: + acl -- The ACL object. + + Returns: + The ID name associated with the ACL. + """ if "NEWS_ITEM" in acl.name: return "item_id" elif "REPORT_ITEM" in acl.name: @@ -136,30 +249,28 @@ def get_id_name_by_acl(acl): def get_user_from_api_key(): - """ - Try to authenticate the user by API key + """Try to authenticate the user by API key. Returns: (user) user: User object or None """ try: - if 'Authorization' not in request.headers or not request.headers['Authorization'].__contains__('Bearer '): + if "Authorization" not in request.headers or not request.headers["Authorization"].__contains__("Bearer "): return None - key_string = request.headers['Authorization'].replace('Bearer ', '') + key_string = request.headers["Authorization"].replace("Bearer ", "") api_key = ApiKey.find_by_key(key_string) if not api_key: return None user = User.find_by_id(api_key.user_id) return user except Exception as ex: - log_manager.store_auth_error_activity("Apikey check presence error: " + str(ex)) + log_manager.store_auth_error_activity(f"ApiKey check presence error: {ex}") return None def get_perm_from_user(user): - """ - Get user permmisions + """Get user permissions. Returns: (all_user_perms) @@ -174,13 +285,12 @@ def get_perm_from_user(user): all_users_perms = all_users_perms.union(role_perms) return all_users_perms except Exception as ex: - log_manager.store_auth_error_activity("Get permmision from user error: " + str(ex)) + log_manager.store_auth_error_activity(f"Get permission from user error: {ex}") return None def get_user_from_jwt_token(): - """ - Try to authenticate the user by API key + """Try to authenticate the user by API key. Returns: (user) @@ -195,43 +305,55 @@ def get_user_from_jwt_token(): # does it encode an identity? identity = get_jwt_identity() if not identity: - log_manager.store_auth_error_activity("Missing identity in JWT: " + get_raw_jwt()) + log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_jwt()}") return None user = User.find(identity) if not user: - log_manager.store_auth_error_activity("Unknown identity in JWT: {}".format(identity)) + log_manager.store_auth_error_activity(f"Unknown identity in JWT: {identity}") return None return user def get_perm_from_jwt_token(user): - """ - Get user permmisions + """Get user permissions from JWT token. + + Args: + user: The user object. Returns: - (all_user_perms) - all_users_perms: set of user's Permissions or None + A set of user's permissions if available, otherwise None. """ try: # does it include permissions? - claims = get_jwt_claims() - if not claims or 'permissions' not in claims: + jwt_data = get_jwt() + claims = jwt_data["user_claims"] + if not claims or "permissions" not in claims: log_manager.store_user_auth_error_activity(user, "Missing permissions in JWT") return None - all_users_perms = set(claims['permissions']) + all_users_perms = set(claims["permissions"]) return all_users_perms except Exception as ex: - log_manager.store_auth_error_activity("Get permmision from JWT error: " + str(ex)) + log_manager.store_auth_error_activity(f"Get permission from JWT error: {ex}") return None def auth_required(required_permissions, *acl_args): + """Check if the user has the required permissions and ACL access. + + Arguments: + required_permissions (str or list): The required permissions for the user. + *acl_args: Additional arguments for ACL access. + + Returns: + The decorated function. + """ + def auth_required_wrap(fn): @wraps(fn) def wrapper(*args, **kwargs): - error = ({'error': 'not authorized'}, 401) + error = ({"error": "not authorized"}, 401) if isinstance(required_permissions, list): required_permissions_set = set(required_permissions) @@ -251,12 +373,12 @@ def wrapper(*args, **kwargs): # is there at least one match with the permissions required by the call? if not required_permissions_set.intersection(active_permissions_set): - log_manager.store_user_auth_error_activity(user, "Insufficient permissions for user: {}".format(user.username)) + log_manager.store_user_auth_error_activity(user, f"Insufficient permissions for user: {user.username}") return error # if the object does have an ACL, do we match it? if len(acl_args) > 0 and not check_acl(kwargs[get_id_name_by_acl(acl_args[0])], acl_args[0], user): - log_manager.store_user_auth_error_activity(user, "Access denied by ACL for user: {}".format(user.username)) + log_manager.store_user_auth_error_activity(user, f"Access denied by ACL for user: {user.username}") return error # allow @@ -264,30 +386,40 @@ def wrapper(*args, **kwargs): return fn(*args, **kwargs) return wrapper + return auth_required_wrap def api_key_required(fn): + """Check for the presence of an API key in the Authorization header. + + Arguments: + fn: The function to be decorated. + + Returns: + The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): - error = ({'error': 'not authorized'}, 401) + error = ({"error": "not authorized"}, 401) # do we have the authorization header? - if 'Authorization' not in request.headers: + if "Authorization" not in request.headers: log_manager.store_auth_error_activity("Missing Authorization header for external access") return error # is it properly encoded? - auth_header = request.headers['Authorization'] - if not auth_header.startswith('Bearer'): + auth_header = request.headers["Authorization"] + if not auth_header.startswith("Bearer"): log_manager.store_auth_error_activity("Missing Authorization Bearer for external access") return error # does it match some of our collector's keys? - api_key = auth_header.replace('Bearer ', '') + api_key = auth_header.replace("Bearer ", "") if not CollectorsNode.exists_by_api_key(api_key): api_key = log_manager.sensitive_value(api_key) - log_manager.store_auth_error_activity("Incorrect api key: " + api_key + " for external access") + log_manager.store_auth_error_activity(f"Incorrect api key: {api_key} for external access") return error # allow @@ -297,26 +429,36 @@ def wrapper(*args, **kwargs): def access_key_required(fn): + """Check for access key authorization. + + This decorator can be used to protect routes or functions that require access key authorization. + It checks if the request has a valid access key in the Authorization header. + + Arguments: + fn (function): The function to be decorated. + + Returns: + function: The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): - error = ({'error': 'not authorized'}, 401) + error = ({"error": "not authorized"}, 401) # do we have the authorization header? - if 'Authorization' not in request.headers: + if "Authorization" not in request.headers: log_manager.store_auth_error_activity("Missing Authorization header for remote access") return error # is it properly encoded? - auth_header = request.headers['Authorization'] - if not auth_header.startswith('Bearer'): + auth_header = request.headers["Authorization"] + if not auth_header.startswith("Bearer"): log_manager.store_auth_error_activity("Missing Authorization Bearer for remote access") return error # does it match some of our remote peer's access keys? - if not RemoteAccess.exists_by_access_key(auth_header.replace('Bearer ', '')): - log_manager.store_auth_error_activity("Incorrect access key: " - + auth_header.replace('Bearer ', - '') + " for remote access") + if not RemoteAccess.exists_by_access_key(auth_header.replace("Bearer ", "")): + log_manager.store_auth_error_activity(f"Incorrect access key: {auth_header.replace('Bearer ', '')} for remote access") return error # allow @@ -326,24 +468,32 @@ def wrapper(*args, **kwargs): def jwt_required(fn): + """Check if a valid JWT is present in the request headers. + + Arguments: + fn -- The function to be decorated. + + Returns: + The decorated function. + """ + @wraps(fn) def wrapper(*args, **kwargs): - try: verify_jwt_in_request() except JWTExtendedException: log_manager.store_auth_error_activity("Missing JWT") - return {'error': 'authorization required'}, 401 + return {"error": "authorization required"}, 401 identity = get_jwt_identity() if not identity: - log_manager.store_auth_error_activity("Missing identity in JWT: {}".format(get_raw_jwt())) - return {'error': 'authorization failed'}, 401 + log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_jwt()}") + return {"error": "authorization failed"}, 401 user = User.find(identity) if user is None: - log_manager.store_auth_error_activity("Unknown identity: ".format(identity)) - return {'error': 'authorization failed'}, 401 + log_manager.store_auth_error_activity(f"Unknown identity: {identity}") + return {"error": "authorization failed"}, 401 log_manager.store_user_activity(user, "API_ACCESS", "Access permitted") return fn(*args, **kwargs) @@ -352,11 +502,26 @@ def wrapper(*args, **kwargs): def get_access_key(): - return request.headers['Authorization'].replace('Bearer ', '') + """Get the access key from the request headers. + + This function retrieves the access key from the "Authorization" header of the request. + The access key is expected to be in the format "Bearer ". + + Returns: + The access key extracted from the request headers. + """ + return request.headers["Authorization"].replace("Bearer ", "") def get_user_from_jwt(): - # obtain the identity and current permissions + """Obtain the identity and current permissions. + + This function first tries to obtain the user from the API key. + If the user is not found, it then tries to obtain the user from the JWT token. + + Returns: + The user object if found, None otherwise. + """ user = get_user_from_api_key() if user is None: user = get_user_from_jwt_token() @@ -364,21 +529,41 @@ def get_user_from_jwt(): def decode_user_from_jwt(jwt_token): + """Decode the user from a JWT token. + + Arguments: + jwt_token (str): The JWT token to decode. + + Returns: + User: The user object decoded from the JWT token. + """ decoded = None try: - decoded = jwt.decode(jwt_token, os.getenv('JWT_SECRET_KEY')) + decoded = jwt.decode(jwt_token, os.getenv("JWT_SECRET_KEY")) except Exception as ex: # e.g. "Signature has expired" log_manager.store_auth_error_activity("Invalid JWT: " + str(ex)) if decoded is None: return None - return User.find(decoded['sub']) + return User.find(decoded["sub"]) def get_external_permissions_ids(): + """Return a list of external permissions IDs. + + This function returns a list of permission IDs that are related to accessing, creating, and configuring assets. + + Returns: + list: A list of external permission IDs. + """ return ["MY_ASSETS_ACCESS", "MY_ASSETS_CREATE", "MY_ASSETS_CONFIG"] def get_external_permissions(): + """Get the external permissions. + + Returns: + A list of external permissions. + """ permissions = [] for permission_id in get_external_permissions_ids(): permissions.append(Permission.find(permission_id)) From 2e607d4825bf86bc64f486f3fb12b646cbf5611f Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 21:26:22 +0100 Subject: [PATCH 012/146] remove flask_script, use click --- src/core/manage.py | 758 ++++++++++++++++++++++++--------------------- 1 file changed, 412 insertions(+), 346 deletions(-) diff --git a/src/core/manage.py b/src/core/manage.py index 2f0474ea0..5d315321e 100755 --- a/src/core/manage.py +++ b/src/core/manage.py @@ -1,4 +1,5 @@ #! /usr/bin/env python +"""This script is used to manage user accounts, roles, and collectors in the Taranis-NG application.""" from os import abort, getenv, read import random @@ -6,20 +7,18 @@ import string import time import logging +import click from flask import Flask -from flask_script import Manager,Command -from flask_script.commands import Option import traceback from managers import db_manager -from model import * +from model import user, role, permission, collectors_node, collector from model import apikey from remote.collectors_api import CollectorsApi app = Flask(__name__) -app.config.from_object('config.Config') -manager = Manager(app=app) -app.logger = logging.getLogger('gunicorn.error') +app.config.from_object("config.Config") +app.logger = logging.getLogger("gunicorn.error") app.logger.level = logging.INFO db_manager.initialize(app) @@ -28,48 +27,90 @@ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) while True: try: - s.connect((app.config.get('DB_URL'), 5432)) + s.connect((app.config.get("DB_URL"), 5432)) s.close() break - except socket.error as ex: + except socket.error: time.sleep(0.1) -# user account management -class AccountManagement(Command): - - option_list = ( - Option('--list', '-l', dest='opt_list', action='store_true'), - Option('--create', '-c', dest='opt_create', action='store_true'), - Option('--edit', '-e', dest='opt_edit', action='store_true'), - Option('--delete', '-d', dest='opt_delete', action='store_true'), - Option('--username', dest='opt_username'), - Option('--name', dest='opt_name', default=""), - Option('--password', dest='opt_password'), - Option('--roles', dest='opt_roles'), - ) - - def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_username, opt_name, opt_password, opt_roles): - - if (opt_list): - users = user.User.get_all() - for us in users: - roles = [] - for r in us.roles: - roles.append(r.id) - print('Id: {}\n\tUsername: {}\n\tName: {}\n\tRoles: {}'.format(us.id, us.username, us.name, roles)) - exit() - - if (opt_create): - if (not opt_username or not opt_password or not opt_roles): - app.logger.critical("Username, password or role not specified!") - abort() - if user.User.find(opt_username): - app.logger.critical("User already exists!") +@app.cli.command("account") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--edit", "-e", "opt_edit", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--username", "opt_username") +@click.option("--name", "opt_name", default="") +@click.option("--password", "opt_password") +@click.option("--roles", "opt_roles") +def account_management(opt_list, opt_create, opt_edit, opt_delete, opt_username, opt_name, opt_password, opt_roles): + """Manage user accounts. + + Args: + opt_list (bool): List all user accounts. + opt_create (bool): Create a new user account. + opt_edit (bool): Edit an existing user account. + opt_delete (bool): Delete an existing user account. + opt_username (str): Username of the user account. + opt_name (str): Name of the user. + opt_password (str): Password of the user account. + opt_roles (str): Roles assigned to the user account. + """ + if opt_list: + users = user.User.get_all() + for us in users: + roles = [] + for r in us.roles: + roles.append(r.id) + print("Id: {}\n\tUsername: {}\n\tName: {}\n\tRoles: {}".format(us.id, us.username, us.name, roles)) + exit() + + if opt_create: + if not opt_username or not opt_password or not opt_roles: + app.logger.critical("Username, password or role not specified!") + abort() + + if user.User.find(opt_username): + app.logger.critical("User already exists!") + abort() + + opt_roles = opt_roles.split(",") + roles = [] + for ro in opt_roles: + r = None + try: + r = role.Role.find(int(ro)) + except Exception: + r = role.Role.find_by_name(ro) + + if not r: + app.logger.critical("The specified role '{}' does not exist!".format(ro)) abort() - opt_roles = opt_roles.split(',') + roles.append(r) + + new_user = user.User(-1, opt_username, opt_name, opt_password, None, roles, None) + db_manager.db.session.add(new_user) + db_manager.db.session.commit() + + print("User '{}' with id {} created.".format(opt_name, new_user.id)) + + if opt_edit: + if not opt_username: + app.logger.critical("Username not specified!") + abort() + if not opt_password or not opt_roles: + app.logger.critical("Please specify a new password or role id!") + abort() + + if not user.User.find(opt_username): + app.logger.critical("User does not exist!") + abort() + + if opt_roles: + opt_roles = opt_roles.split(",") roles = [] + for ro in opt_roles: r = None try: @@ -83,362 +124,387 @@ def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_username, opt_name roles.append(r) - new_user = user.User(-1, opt_username, opt_name, opt_password, None, roles, None) - db_manager.db.session.add(new_user) - db_manager.db.session.commit() - - print('User \'{}\' with id {} created.'.format(opt_name, new_user.id)) - - if (opt_edit): - if (not opt_username): - app.logger.critical("Username not specified!") - abort() - if (not opt_password or not opt_roles): - app.logger.critical("Please specify a new password or role id!") - abort() + if opt_delete: + if not opt_username: + app.logger.critical("Username not specified!") + abort() - if not user.User.find(opt_username): - app.logger.critical("User does not exist!") - abort() + u = user.User.find(opt_username) + if not u: + app.logger.critical("User does not exist!") + abort() - if (opt_roles): - opt_roles = opt_roles.split(',') - roles = [] + user.User.delete(u.id) + print("The user '{}' has been deleted.".format(opt_username)) + + +@app.cli.command("role") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--edit", "-e", "opt_edit", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--filter", "-f", "opt_filter") +@click.option("--id", "opt_id") +@click.option("--name", "opt_name") +@click.option("--description", "opt_description", default="") +@click.option("--permissions", "opt_permissions") +def role_management(opt_list, opt_create, opt_edit, opt_delete, opt_filter, opt_id, opt_name, opt_description, opt_permissions): + """Manage roles. + + Args: + opt_list (bool): List all roles. + opt_create (bool): Create a new role. + opt_edit (bool): Edit an existing role. + opt_delete (bool): Delete an existing role. + opt_filter (str): Filter roles by name. + opt_id (str): ID of the role. + opt_name (str): Name of the role. + opt_description (str): Description of the role. + opt_permissions (str): Permissions assigned to the role. + """ + if opt_list: + roles = None + if opt_filter: + roles = role.Role.get(opt_filter)[0] + else: + roles = role.Role.get_all() + + for ro in roles: + perms = [] + for p in ro.permissions: + perms.append(p.id) + print("Id: {}\n\tName: {}\n\tDescription: {}\n\tPermissions: {}".format(ro.id, ro.name, ro.description, perms)) + exit() - for ro in opt_roles: - r = None - try: - r = role.Role.find(int(ro)) - except Exception: - r = role.Role.find_by_name(ro) + if opt_create: + if not opt_name or not opt_permissions: + app.logger.critical("Role name or permissions not specified!") + abort() - if not r: - app.logger.critical("The specified role '{}' does not exist!".format(ro)) - abort() + opt_permissions = opt_permissions.split(",") + perms = [] - roles.append(r) + for pe in opt_permissions: + p = permission.Permission.find(pe) - if (opt_delete): - if (not opt_username): - app.logger.critical("Username not specified!") + if not p: + app.logger.critical("The specified permission '{}' does not exist!".format(pe)) abort() - u = user.User.find(opt_username) - if not u: - app.logger.critical("User does not exist!") - abort() + perms.append(p) - user.User.delete(u.id) - print('The user \'{}\' has been deleted.'.format(opt_username)) - -# role management -class RoleManagement(Command): - - option_list = ( - Option('--list', '-l', dest='opt_list', action='store_true'), - Option('--create', '-c', dest='opt_create', action='store_true'), - Option('--edit', '-e', dest='opt_edit', action='store_true'), - Option('--delete', '-d', dest='opt_delete', action='store_true'), - Option('--filter', '-f', dest='opt_filter'), - Option('--id', dest='opt_id'), - Option('--name', dest='opt_name'), - Option('--description', dest='opt_description', default=""), - Option('--permissions', dest='opt_permissions'), - ) - - def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_filter, opt_id, opt_name, opt_description, opt_permissions): - - if (opt_list): - roles = None - if (opt_filter): - roles = role.Role.get(opt_filter)[0] - else: - roles = role.Role.get_all() - - for ro in roles: - perms = [] - for p in ro.permissions: - perms.append(p.id) - print('Id: {}\n\tName: {}\n\tDescription: {}\n\tPermissions: {}'.format(ro.id, ro.name, ro.description, perms)) - exit() - - if (opt_create): - if (not opt_name or not opt_permissions): - app.logger.critical("Role name or permissions not specified!") - abort() + new_role = role.Role(-1, opt_name, opt_description, perms) + db_manager.db.session.add(new_role) + db_manager.db.session.commit() - opt_permissions = opt_permissions.split(',') - perms = [] + print("Role '{}' with id {} created.".format(opt_name, new_role.id)) - for pe in opt_permissions: - p = permission.Permission.find(pe) + if opt_edit: + if not opt_id or not opt_name: + app.logger.critical("Role id or name not specified!") + abort() + if not opt_name or not opt_description or not opt_permissions: + app.logger.critical("Please specify a new name, description or permissions!") + abort() - if not p: - app.logger.critical("The specified permission '{}' does not exist!".format(pe)) - abort() + if opt_delete: + if not opt_id or not opt_name: + app.logger.critical("Role id or name not specified!") + abort() - perms.append(p) - new_role = role.Role(-1, opt_name, opt_description, perms) - db_manager.db.session.add(new_role) - db_manager.db.session.commit() +@app.cli.command("collector") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--edit", "-e", "opt_edit", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--update", "-u", "opt_update", is_flag=True) +@click.option("--all", "-a", "opt_all", is_flag=True) +@click.option("--show-api-key", "opt_show_api_key", is_flag=True) +@click.option("--id", "opt_id") +@click.option("--name", "opt_name") +@click.option("--description", "opt_description", default="") +@click.option("--api-url", "opt_api_url") +@click.option("--api-key", "opt_api_key") +def collector_management( + opt_list, + opt_create, + opt_edit, + opt_delete, + opt_update, + opt_all, + opt_show_api_key, + opt_id, + opt_name, + opt_description, + opt_api_url, + opt_api_key, +): + """Manage collectors. + + Args: + opt_list (bool): List all collectors. + opt_create (bool): Create a new collector. + opt_edit (bool): Edit an existing collector. + opt_delete (bool): Delete an existing collector. + opt_update (bool): Update collectors. + opt_all (bool): Update all collectors. + opt_show_api_key (bool): Show API key in the output. + opt_id (str): ID of the collector. + opt_name (str): Name of the collector. + opt_description (str): Description of the collector. + opt_api_url (str): API URL of the collector. + opt_api_key (str): API key of the collector. + """ + if opt_list: + collector_nodes = collectors_node.CollectorsNode.get_all() + + for node in collector_nodes: + capabilities = [] + sources = [] + for c in node.collectors: + capabilities.append(c.type) + for s in c.sources: + sources.append("{} ({})".format(s.name, s.id)) + print( + "Id: {}\n\tName: {}\n\tURL: {}\n\t{}Created: {}\n\tLast seen: {}\n\tCapabilities: {}\n\tSources: {}".format( + node.id, + node.name, + node.api_url, + "API key: {}\n\t".format(node.api_key) if opt_show_api_key else "", + node.created, + node.last_seen, + capabilities, + sources, + ) + ) + exit() - print('Role \'{}\' with id {} created.'.format(opt_name, new_role.id)) + if opt_create: + if not opt_name or not opt_api_url or not opt_api_key: + app.logger.critical("Please specify the collector node name, API url and key!") + abort() - if (opt_edit): - if (not opt_id or not opt_name): - app.logger.critical("Role id or name not specified!") - abort() - if (not opt_name or not opt_description or not opt_permissions): - app.logger.critical("Please specify a new name, description or permissions!") - abort() + data = { + "id": "", + "name": opt_name, + "description": opt_description if opt_description else "", + "api_url": opt_api_url, + "api_key": opt_api_key, + "collectors": [], + "status": "0", + } + + print("Trying to contact a new collector node...") + retries, max_retries = 0, 30 + while retries < max_retries: + try: + collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info("") + break + except: # noqa: E722 + collectors_info = "Collector unavailable" + status_code = 0 + time.sleep(1) + retries += 1 + print("Retrying [{}/{}]...".format(retries, max_retries)) + + if status_code != 200: + print("Cannot create a new collector node!") + print("Response from collector: {}".format(collectors_info)) + abort() - if (opt_delete): - if (not opt_id or not opt_name): - app.logger.critical("Role id or name not specified!") - abort() + collectors = collector.Collector.create_all(collectors_info) + node = collectors_node.CollectorsNode.add_new(data, collectors) + collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info(node.id) -# collector management -class CollectorManagement(Command): - - option_list = ( - Option('--list', '-l', dest='opt_list', action='store_true'), - Option('--create', '-c', dest='opt_create', action='store_true'), - Option('--edit', '-e', dest='opt_edit', action='store_true'), - Option('--delete', '-d', dest='opt_delete', action='store_true'), - Option('--update', '-u', dest='opt_update', action='store_true'), - Option('--all', '-a', dest='opt_all', action='store_true'), - Option('--show-api-key', dest='opt_show_api_key', action='store_true'), - Option('--id', dest='opt_id'), - Option('--name', dest='opt_name'), - Option('--description', dest='opt_description', default=""), - Option('--api-url', dest='opt_api_url'), - Option('--api-key', dest='opt_api_key'), - ) - - def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_update, opt_all, opt_show_api_key, opt_id, opt_name, opt_description, opt_api_url, opt_api_key): - if (opt_list): - collector_nodes = collectors_node.CollectorsNode.get_all() - - for node in collector_nodes: - capabilities = [] - sources = [] - for c in node.collectors: - capabilities.append(c.type) - for s in c.sources: - sources.append('{} ({})'.format(s.name, s.id)) - print('Id: {}\n\tName: {}\n\tURL: {}\n\t{}Created: {}\n\tLast seen: {}\n\tCapabilities: {}\n\tSources: {}'.format(node.id, node.name, node.api_url, 'API key: {}\n\t'.format(node.api_key) if opt_show_api_key else '', node.created, node.last_seen, capabilities, sources)) - exit() - - if (opt_create): - if (not opt_name or not opt_api_url or not opt_api_key): - app.logger.critical("Please specify the collector node name, API url and key!") - abort() + print("Collector node '{}' with id {} created.".format(opt_name, node.id)) - data = { - 'id': '', - 'name': opt_name, - 'description': opt_description if opt_description else '', - 'api_url': opt_api_url, - 'api_key': opt_api_key, - 'collectors': [], - 'status': '0' - } - - print('Trying to contact a new collector node...') - retries, max_retries = 0, 30 - while retries < max_retries: - try: - collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info("") - break; - except: - collectors_info = 'Collector unavailable' - status_code = 0 - time.sleep(1) - retries += 1 - print('Retrying [{}/{}]...'.format(retries, max_retries)) - - - if status_code != 200: - print('Cannot create a new collector node!') - print('Response from collector: {}'.format(collectors_info)) - abort() + if opt_edit: + if not opt_id or not opt_name: + app.logger.critical("Collector node id or name not specified!") + abort() + if not opt_name or not opt_description or not opt_api_url or not opt_api_key: + app.logger.critical("Please specify a new name, description, API url or key!") + abort() - collectors = collector.Collector.create_all(collectors_info) - node = collectors_node.CollectorsNode.add_new(data, collectors) - collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info(node.id) + if opt_delete: + if not opt_id or not opt_name: + app.logger.critical("Collector node id or name not specified!") + abort() - print('Collector node \'{}\' with id {} created.'.format(opt_name, node.id)) + if opt_update: + if not opt_all and not opt_id and not opt_name: + app.logger.critical("Collector node id or name not specified!") + app.logger.critical("If you want to update all collectors, pass the --all parameter.") + abort() - if (opt_edit): - if (not opt_id or not opt_name): - app.logger.critical("Collector node id or name not specified!") + nodes = None + if opt_id: + nodes = [collectors_node.CollectorsNode.get_by_id(opt_id)] + if not nodes: + app.logger.critical("Collector node does not exit!") abort() - if (not opt_name or not opt_description or not opt_api_url or not opt_api_key): - app.logger.critical("Please specify a new name, description, API url or key!") + elif opt_name: + nodes, count = collectors_node.CollectorsNode.get(opt_name) + if not count: + app.logger.critical("Collector node does not exit!") abort() - - if (opt_delete): - if (not opt_id or not opt_name): - app.logger.critical("Collector node id or name not specified!") + else: + nodes, count = collectors_node.CollectorsNode.get(None) + if not count: + app.logger.critical("No collector nodes exist!") abort() - if (opt_update): - if (not opt_all and not opt_id and not opt_name): - app.logger.critical("Collector node id or name not specified!") - app.logger.critical("If you want to update all collectors, pass the --all parameter.") - abort() - - nodes = None - if opt_id: - nodes = [ collectors_node.CollectorsNode.get_by_id(opt_id) ] - if not nodes: - app.logger.critical("Collector node does not exit!") - abort() - elif opt_name: - nodes, count = collectors_node.CollectorsNode.get(opt_name) - if not count: - app.logger.critical("Collector node does not exit!") - abort() + for node in nodes: + # refresh collector node id + collectors_info, status_code = CollectorsApi(node.api_url, node.api_key).get_collectors_info(node.id) + if status_code == 200: + print("Collector node {} updated.".format(node.id)) else: - nodes, count = collectors_node.CollectorsNode.get(None) - if not count: - app.logger.critical("No collector nodes exist!") - abort() + print("Unable to update collector node {}.\n\tResponse: [{}] {}.".format(node.id, status_code, collectors_info)) - for node in nodes: - # refresh collector node id - collectors_info, status_code = CollectorsApi(node.api_url, node.api_key).get_collectors_info(node.id) - if status_code == 200: - print('Collector node {} updated.'.format(node.id)) - else: - print('Unable to update collector node {}.\n\tResponse: [{}] {}.'.format(node.id, status_code, collectors_info)) - -# dictionary management -class DictionaryManagement(Command): - - option_list = ( - Option('--upload-cve', dest='opt_cve', action='store_true'), - Option('--upload-cpe', dest='opt_cpe', action='store_true'), - ) - - def run(self, opt_cve, opt_cpe): - from model import attribute - - if (opt_cve): - cve_update_file = getenv('CVE_UPDATE_FILE') - if cve_update_file is None: - app.logger.critical("CVE_UPDATE_FILE is undefined") - abort() - self.upload_to(cve_update_file) - try: - attribute.Attribute.load_dictionaries('cve') - except Exception: - app.logger.debug(traceback.format_exc()) - app.logger.critical("File structure was not recognized!") - abort() +@app.cli.command("dictionary") +@click.option("--upload-cve", is_flag=True) +@click.option("--upload-cpe", is_flag=True) +def dictionary_management(upload_cve, upload_cpe): + """Manage the dictionaries by uploading and loading CVE and CPE files. - if (opt_cpe): - cpe_update_file = getenv('CPE_UPDATE_FILE') - if cpe_update_file is None: - app.logger.critical("CPE_UPDATE_FILE is undefined") - abort() + This function uploads the CVE and CPE files and loads the dictionaries accordingly. + If `upload_cve` is True, it uploads the CVE file and loads the CVE dictionary. + If `upload_cpe` is True, it uploads the CPE file and loads the CPE dictionary. - self.upload_to(cpe_update_file) - try: - attribute.Attribute.load_dictionaries('cpe') - except Exception: - app.logger.debug(traceback.format_exc()) - app.logger.critical("File structure was not recognized!") - abort() + Arguments: + upload_cve (bool): Indicates whether to upload the CVE file and load the CVE dictionary. + upload_cpe (bool): Indicates whether to upload the CPE file and load the CPE dictionary. + """ + from model import attribute - app.logger.error("Dictionary was uploaded.") - exit() + if upload_cve: + cve_update_file = getenv("CVE_UPDATE_FILE") + if cve_update_file is None: + app.logger.critical("CVE_UPDATE_FILE is undefined") + abort() - def upload_to(self, filename): + upload_to(cve_update_file) try: - with open(filename, 'wb') as out_file: - while True: - chunk = read(0, 131072) - if not chunk: - break - out_file.write(chunk) + attribute.Attribute.load_dictionaries("cve") except Exception: app.logger.debug(traceback.format_exc()) - app.logger.critical("Upload failed!") + app.logger.critical("File structure was not recognized!") abort() -# ApiKeys management -class ApiKeysManagement(Command): + if upload_cpe: + cpe_update_file = getenv("CPE_UPDATE_FILE") + if cpe_update_file is None: + app.logger.critical("CPE_UPDATE_FILE is undefined") + abort() - option_list = ( - Option('--list', '-l', dest='opt_list', action='store_true'), - Option('--create', '-c', dest='opt_create', action='store_true'), - Option('--delete', '-d', dest='opt_delete', action='store_true'), - Option('--name', '-n', dest='opt_name'), - Option('--user', '-u', dest='opt_user'), - Option('--expires', '-e', dest='opt_expires') - ) + upload_to(cpe_update_file) + try: + attribute.Attribute.load_dictionaries("cpe") + except Exception: + app.logger.debug(traceback.format_exc()) + app.logger.critical("File structure was not recognized!") + abort() + app.logger.error("Dictionary was uploaded.") + exit() - def run(self, opt_list, opt_create, opt_delete, opt_name, opt_user, opt_expires): - if (opt_list): - apikeys = apikey.ApiKey.get_all() - for k in apikeys: - print('Id: {}\n\tName: {}\n\tKey: {}\n\tCreated: {}\n\tUser id: {}\n\tExpires: {}'.format(k.id, k.name, k.key, k.created_at, k.user_id, k.expires_at)) - exit() +def upload_to(filename): + """Upload a file to the specified filename. - if (opt_create): - if (not opt_name): - app.logger.critical("Name not specified!") - abort() + Arguments: + filename (str): The name of the file to upload. + """ + try: + with open(filename, "wb") as out_file: + while True: + chunk = read(0, 131072) + if not chunk: + break + out_file.write(chunk) + except Exception: + app.logger.debug(traceback.format_exc()) + app.logger.critical("Upload failed!") + abort() + + +@app.cli.command("apikey") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--name", "-n", "opt_name") +@click.option("--user", "-u", "opt_user") +@click.option("--expires", "-e", "opt_expires") +def api_keys_management(opt_list, opt_create, opt_delete, opt_name, opt_user, opt_expires): + """Manage API keys. + + This function provides functionality to list, create, and delete API keys. + + Arguments: + opt_list (bool): If True, list all existing API keys. + opt_create (bool): If True, create a new API key. + opt_delete (bool): If True, delete an existing API key. + opt_name (str): The name of the API key. + opt_user (str): The user associated with the API key. + opt_expires (str): The expiration date of the API key. + """ + if opt_list: + apikeys = apikey.ApiKey.get_all() + for k in apikeys: + print( + "Id: {}\n\tName: {}\n\tKey: {}\n\tCreated: {}\n\tUser id: {}\n\tExpires: {}".format( + k.id, k.name, k.key, k.created_at, k.user_id, k.expires_at + ) + ) + exit() - if apikey.ApiKey.find_by_name(opt_name): - app.logger.critical("Name already exists!") - abort() + if opt_create: + if not opt_name: + app.logger.critical("Name not specified!") + abort() - if (not opt_user): - app.logger.critical("User not specified!") - abort() + if apikey.ApiKey.find_by_name(opt_name): + app.logger.critical("Name already exists!") + abort() - u = None - if opt_user: - u = user.User.find(opt_user) - if not u: - app.logger.critical("The specified user '{}' does not exist!".format(opt_user)) - abort() + if not opt_user: + app.logger.critical("User not specified!") + abort() - data = { - #'id': None, - 'name': opt_name, - 'key': ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=40)), - 'user_id': u.id, - 'expires_at': opt_expires if opt_expires else None - } + u = None + if opt_user: + u = user.User.find(opt_user) + if not u: + app.logger.critical("The specified user '{}' does not exist!".format(opt_user)) + abort() - k = apikey.ApiKey.add_new(data) - print('ApiKey \'{}\' with id {} created.'.format(opt_name, k.id)) + data = { + # 'id': None, + "name": opt_name, + "key": "".join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=40)), + "user_id": u.id, + "expires_at": opt_expires if opt_expires else None, + } - if (opt_delete): - if (not opt_name): - app.logger.critical("Name not specified!") - abort() + k = apikey.ApiKey.add_new(data) + print("ApiKey '{}' with id {} created.".format(opt_name, k.id)) - k = apikey.ApiKey.find_by_name(opt_name) - if not k: - app.logger.critical("Name not found!") - abort() + if opt_delete: + if not opt_name: + app.logger.critical("Name not specified!") + abort() + + k = apikey.ApiKey.find_by_name(opt_name) + if not k: + app.logger.critical("Name not found!") + abort() - apikey.ApiKey.delete(k.id) - print('ApiKey \'{}\' has been deleted.'.format(opt_name)) + apikey.ApiKey.delete(k.id) + print("ApiKey '{}' has been deleted.".format(opt_name)) -manager.add_command('account', AccountManagement) -manager.add_command('role', RoleManagement) -manager.add_command('collector', CollectorManagement) -manager.add_command('dictionary', DictionaryManagement) -manager.add_command('apikey', ApiKeysManagement) -if __name__ == '__main__': - manager.run() +if __name__ == "__main__": + app.run() From 68eb7edc7c4bb83cd40e1156e591bcf6766d98a0 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 21:32:14 +0100 Subject: [PATCH 013/146] remove flask_script --- src/core/db_migration.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/src/core/db_migration.py b/src/core/db_migration.py index 2cf3a9af7..dc74bc7c4 100755 --- a/src/core/db_migration.py +++ b/src/core/db_migration.py @@ -1,16 +1,19 @@ #! /usr/bin/env python +"""This script is responsible for performing database migrations for the Taranis-NG application. +It initializes the Flask application, configures the database manager, and waits for the database to be ready. +Once the database is ready, it performs the necessary migrations using Flask-Migrate. +""" import socket import time from flask import Flask -from flask_script import Manager -from flask_migrate import Migrate, MigrateCommand +from flask_migrate import Migrate from managers import db_manager -from model import * +from model import * # noqa: F401, F403 app = Flask(__name__) -app.config.from_object('config.Config') +app.config.from_object("config.Config") db_manager.initialize(app) @@ -18,17 +21,13 @@ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) while True: try: - s.connect((app.config.get('DB_URL'), 5432)) + s.connect((app.config.get("DB_URL"), 5432)) s.close() break - except socket.error as ex: + except socket.error: time.sleep(0.1) migrate = Migrate(app=app, db=db_manager.db) -manager = Manager(app=app) - -manager.add_command('db', MigrateCommand) - -if __name__ == '__main__': - manager.run() +if __name__ == "__main__": + app.run() From 9ae19281ba3fb736ff6243396ad6f8b6b011cec5 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 5 Mar 2024 08:16:30 +0100 Subject: [PATCH 014/146] JWT fixes --- src/core/auth/base_authenticator.py | 82 ++++++++++++++++++++++++++--- src/core/managers/auth_manager.py | 2 +- 2 files changed, 75 insertions(+), 9 deletions(-) diff --git a/src/core/auth/base_authenticator.py b/src/core/auth/base_authenticator.py index 704638471..aa996606a 100644 --- a/src/core/auth/base_authenticator.py +++ b/src/core/auth/base_authenticator.py @@ -1,3 +1,8 @@ +"""This module contains the `BaseAuthenticator` class which provides the base functionality for authentication. + +The `BaseAuthenticator` class defines methods for authentication, token generation, token refresh, and user logout. +""" + from flask_jwt_extended import create_access_token from managers import log_manager @@ -6,42 +11,103 @@ class BaseAuthenticator: + """Base class for authenticators. + + This class provides the basic structure and methods for implementing an authenticator. + Subclasses should override the methods as needed for specific authentication mechanisms. + + Methods: + get_required_credentials: Return the required credentials for authentication. + authenticate: Authenticate the user based on the provided credentials. + refresh: Refresh the authentication token for the given user. + logout: Logout the user by adding the token to the blacklist. + initialize: Initialize the authenticator. + generate_error: Generate an error response for authentication failure. + generate_jwt: Generate a JSON Web Token (JWT) for the given username. + """ def get_required_credentials(self): + """Return the required credentials for authentication. + + Returns: + A list of required credentials. + """ return [] def authenticate(self, credentials): + """Authenticate the user based on the provided credentials. + + Arguments: + credentials -- The user's credentials. + + Returns: + The result of the authentication process. + """ return BaseAuthenticator.generate_error() def refresh(self, user): + """Refresh the authentication token for the given user. + + Arguments: + user -- The user object. + + Returns: + The refreshed authentication token. + """ return BaseAuthenticator.generate_jwt(user.username) @staticmethod def logout(token): + """Logout the user by adding the token to the blacklist. + + Arguments: + token -- The token to be blacklisted. + """ if token is not None: TokenBlacklist.add(token) @staticmethod def initialize(app): + """Initialize the authenticator. + + Arguments: + app -- The application object. + """ pass @staticmethod def generate_error(): - return {'error': 'Authentication failed'}, 401 + """Generate an error response for authentication failure. + + Returns: + A tuple containing the error message and the HTTP status code. + """ + return {"error": "Authentication failed"}, 401 @staticmethod def generate_jwt(username): + """Generate a JSON Web Token (JWT) for the given username. + + Arguments: + username (str): The username for which to generate the JWT. + Returns: + tuple: A tuple containing the generated access token and the HTTP status code. + """ user = User.find(username) if not user: - log_manager.store_auth_error_activity("User does not exist after authentication: " + username) + log_manager.store_auth_error_activity(f"User does not exist after authentication: {username}") return BaseAuthenticator.generate_error() else: log_manager.store_user_activity(user, "LOGIN", "Successful") - access_token = create_access_token(identity=user.username, - user_claims={'id': user.id, - 'name': user.name, - 'organization_name': user.get_current_organization_name(), - 'permissions': user.get_permissions()}) + access_token = create_access_token( + identity=user.username, + additional_claims={ + "id": user.id, + "name": user.name, + "organization_name": user.get_current_organization_name(), + "permissions": user.get_permissions(), + }, + ) - return {'access_token': access_token}, 200 + return {"access_token": access_token}, 200 diff --git a/src/core/managers/auth_manager.py b/src/core/managers/auth_manager.py index 1616b1251..6d7d1a5a7 100644 --- a/src/core/managers/auth_manager.py +++ b/src/core/managers/auth_manager.py @@ -539,7 +539,7 @@ def decode_user_from_jwt(jwt_token): """ decoded = None try: - decoded = jwt.decode(jwt_token, os.getenv("JWT_SECRET_KEY")) + decoded = jwt.decode(jwt_token, os.getenv("JWT_SECRET_KEY"), algorithms=["HS256"]) except Exception as ex: # e.g. "Signature has expired" log_manager.store_auth_error_activity("Invalid JWT: " + str(ex)) if decoded is None: From 79ed0a2d34207be6754941c0864013b0aa5ae158 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 5 Mar 2024 08:17:40 +0100 Subject: [PATCH 015/146] add git to build dependencies in dockerfile --- docker/Dockerfile.core | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/Dockerfile.core b/docker/Dockerfile.core index c9afe20a5..62ab44129 100644 --- a/docker/Dockerfile.core +++ b/docker/Dockerfile.core @@ -36,6 +36,7 @@ RUN \ apk add --no-cache --virtual .build-deps \ gcc \ g++ \ + git \ make \ glib-dev \ musl-dev \ From 200475e86455eb22c501d813a94fd491e13b2c30 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 5 Mar 2024 08:17:57 +0100 Subject: [PATCH 016/146] update requirements --- src/core/requirements.txt | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/core/requirements.txt b/src/core/requirements.txt index bb43dbdd7..60ff3c119 100644 --- a/src/core/requirements.txt +++ b/src/core/requirements.txt @@ -3,16 +3,15 @@ Flask==3.0.2 Flask-Cors==4.0.0 Flask-JWT-Extended==4.6.0 Flask-Migrate==4.0.5 -flask-oidc==1.4.0 +#https://github.com/puiterwijk/flask-oidc/issues/147 +git+https://github.com/puiterwijk/flask-oidc.git@b10e6bf881a3fe0c3972e4093648f2b77f32a97c Flask-RESTful==0.3.10 -#Flask-Script==2.0.6 Flask-SSE==1.0.0 Flask-SQLAlchemy==3.0.5 gevent==24.2.1 gunicorn==21.2.0 Jinja2==3.1.3 ldap3==2.9.1 -# markupsafe==2.0.1 #remove after Jinja2 upgraded marshmallow==3.19.0 marshmallow-enum==1.5.1 psycogreen==1.0.2 @@ -24,5 +23,5 @@ requests==2.31.0 schedule==1.2.1 sseclient-py==1.8.0 SQLAlchemy==1.4.51 #upgrade -Werkzeug==3.0.1 #update +Werkzeug==3.0.1 pycryptodomex==3.20 From 476f6f0a8150e465638d421cd57450970993c379 Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Thu, 7 Mar 2024 22:38:07 +0100 Subject: [PATCH 017/146] Fix error: Multiple head revisions are present for given argument 'head'; please specify a specific target revision --- src/core/migrations/versions/f0a4860000ff_cwe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/core/migrations/versions/f0a4860000ff_cwe.py b/src/core/migrations/versions/f0a4860000ff_cwe.py index 86822bad0..aec4b7e2d 100644 --- a/src/core/migrations/versions/f0a4860000ff_cwe.py +++ b/src/core/migrations/versions/f0a4860000ff_cwe.py @@ -1,7 +1,7 @@ """Add CWE attribute Revision ID: f0a4860000ff -Revises: d776f47ce040 +Revises: e87b34c74db0 Create Date: 2024-02-08 12:53:03.830779 """ @@ -18,7 +18,7 @@ # revision identifiers, used by Alembic. revision = "f0a4860000ff" -down_revision = "d776f47ce040" +down_revision = "e87b34c74db0" branch_labels = None depends_on = None From 6e18e784e776bbe084ac2e4be08cf9a4d59d30b2 Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Fri, 8 Mar 2024 15:44:34 +0100 Subject: [PATCH 018/146] Fix upgrade db error 2 (caused previous PR) --- src/core/migrations/versions/f0a4860000ff_cwe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/core/migrations/versions/f0a4860000ff_cwe.py b/src/core/migrations/versions/f0a4860000ff_cwe.py index aec4b7e2d..2ac1574d6 100644 --- a/src/core/migrations/versions/f0a4860000ff_cwe.py +++ b/src/core/migrations/versions/f0a4860000ff_cwe.py @@ -1,7 +1,7 @@ """Add CWE attribute Revision ID: f0a4860000ff -Revises: e87b34c74db0 +Revises: dfc12c30395b Create Date: 2024-02-08 12:53:03.830779 """ @@ -18,7 +18,7 @@ # revision identifiers, used by Alembic. revision = "f0a4860000ff" -down_revision = "e87b34c74db0" +down_revision = "dfc12c30395b" branch_labels = None depends_on = None From cc2e82aa9fb80d08295ab2631a6c27cb7665aa21 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sat, 9 Mar 2024 07:31:16 +0100 Subject: [PATCH 019/146] language priority and fallback --- src/gui/src/components/UserSettings.vue | 3 ++- src/gui/src/main.js | 11 +---------- src/gui/src/store/settings.js | 19 ++++++++++++------- 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/src/gui/src/components/UserSettings.vue b/src/gui/src/components/UserSettings.vue index 0a29a8755..f6fcb884d 100644 --- a/src/gui/src/components/UserSettings.vue +++ b/src/gui/src/components/UserSettings.vue @@ -160,6 +160,7 @@ word_lists: [], selected_word_lists: [], languages: [ + { id: 'cs', value: 'Czech' }, { id: 'en', value: 'English' }, { id: 'sk', value: 'Slovak' }, ], @@ -240,4 +241,4 @@ } } } - \ No newline at end of file + diff --git a/src/gui/src/main.js b/src/gui/src/main.js index ef43a43b3..f6f3b196c 100644 --- a/src/gui/src/main.js +++ b/src/gui/src/main.js @@ -60,18 +60,9 @@ const vuetify = new Vuetify({ Vue.use(VueI18n); let bash_locale = "$VUE_APP_TARANIS_NG_LOCALE"; -let locale; - -if (bash_locale) { - locale = bash_locale; -} else if (typeof (process.env.VUE_APP_TARANIS_NG_LOCALE) !== "undefined") { - locale = process.env.VUE_APP_TARANIS_NG_LOCALE; -} else { - locale = navigator.language.split('-')[0]; -} const i18n = new VueI18n({ - locale: locale, + locale: bash_locale, fallbackLocale: 'en', messages }); diff --git a/src/gui/src/store/settings.js b/src/gui/src/store/settings.js index 919d5e91d..f340bcebb 100644 --- a/src/gui/src/store/settings.js +++ b/src/gui/src/store/settings.js @@ -109,12 +109,17 @@ const getters = { getProfileLanguage(state) { let lng = state.language; if (!lng) { - if (typeof (process.env.VUE_APP_TARANIS_NG_LOCALE) !== "undefined") { - lng = process.env.VUE_APP_TARANIS_NG_LOCALE; - } - if (!lng) { - lng = "en"; - } + lng = navigator.language.split('-')[0];; + } + if (!lng && typeof (process.env.VUE_APP_TARANIS_NG_LOCALE) !== "undefined") { + lng = process.env.VUE_APP_TARANIS_NG_LOCALE; + } + if (!lng) { + let bash_locale = "$VUE_APP_TARANIS_NG_LOCALE"; + lng = bash_locale; + } + if (!lng) { + lng = "en"; } return lng; } @@ -125,4 +130,4 @@ export const settings = { actions, mutations, getters -} \ No newline at end of file +} From 0172fa47cbdf04aca991421678153ac8b5917a03 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 11 Mar 2024 19:00:19 +0100 Subject: [PATCH 020/146] default passwords in .env.example --- docker/.env.example | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docker/.env.example b/docker/.env.example index 4797d556b..f2c0311c1 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -12,9 +12,17 @@ TARANIS_NG_TAG=v23.12.1 # Timezone for all containers TZ=Europe/Bratislava +# Default passwords. Taranis-NG now supports Docker secrets. Use this in production! https://docs.docker.com/compose/use-secrets/ +# (rename files in secrets directory, change passwords stored there and remove variables below) +POSTGRES_PASSWORD=supersecret +POSTGRES_KEYCLOAK_PASSWORD=supersecret +JWT_SECRET_KEY=supersecret +COLLECTOR_PRESENTER_PUBLISHER_API_KEY=supersecret + # Paths CVE_UPDATE_FILE=/data/cve_dictionary.xml CPE_UPDATE_FILE=/data/cpe_dictionary.xml +CWE_UPDATE_FILE=/data/cwe_dictionary.xml # Web host and ports TARANIS_NG_HOSTNAME=localhost From 267466c5e22a7ca86b1879b388e800944325afeb Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 11 Mar 2024 19:00:57 +0100 Subject: [PATCH 021/146] moved secrets to separate directory --- docker/{ => secrets}/api_key.txt.example | 0 docker/{ => secrets}/jwt_secret_key.txt.example | 0 docker/{ => secrets}/keycloak_password.txt.example | 0 docker/{ => secrets}/postgres_keycloak_password.txt.example | 0 docker/{ => secrets}/postgres_password.txt.example | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename docker/{ => secrets}/api_key.txt.example (100%) rename docker/{ => secrets}/jwt_secret_key.txt.example (100%) rename docker/{ => secrets}/keycloak_password.txt.example (100%) rename docker/{ => secrets}/postgres_keycloak_password.txt.example (100%) rename docker/{ => secrets}/postgres_password.txt.example (100%) diff --git a/docker/api_key.txt.example b/docker/secrets/api_key.txt.example similarity index 100% rename from docker/api_key.txt.example rename to docker/secrets/api_key.txt.example diff --git a/docker/jwt_secret_key.txt.example b/docker/secrets/jwt_secret_key.txt.example similarity index 100% rename from docker/jwt_secret_key.txt.example rename to docker/secrets/jwt_secret_key.txt.example diff --git a/docker/keycloak_password.txt.example b/docker/secrets/keycloak_password.txt.example similarity index 100% rename from docker/keycloak_password.txt.example rename to docker/secrets/keycloak_password.txt.example diff --git a/docker/postgres_keycloak_password.txt.example b/docker/secrets/postgres_keycloak_password.txt.example similarity index 100% rename from docker/postgres_keycloak_password.txt.example rename to docker/secrets/postgres_keycloak_password.txt.example diff --git a/docker/postgres_password.txt.example b/docker/secrets/postgres_password.txt.example similarity index 100% rename from docker/postgres_password.txt.example rename to docker/secrets/postgres_password.txt.example From fd5b22510317e7951742cd2b27265077be46f3ee Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 11 Mar 2024 19:42:44 +0100 Subject: [PATCH 022/146] ignore .txt files in docker/secrets --- .gitignore | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 27082c274..d43bf0673 100644 --- a/.gitignore +++ b/.gitignore @@ -20,11 +20,7 @@ build/ # sensitive data not to be commited docker/.env -docker/postgres_password.txt -docker/postgres_keycloak_password.txt -docker/keycloak_password.txt -docker/jwt_secret_key.txt -docker/api_key.txt +docker/secrets/*.txt .env.local .env.*.local src/.env From cf0af1b7a0624091b77daee2405157424fe8cb1d Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 11 Mar 2024 19:54:05 +0100 Subject: [PATCH 023/146] update docker compose files --- docker/docker-compose-keycloak-serv.yml | 4 ++-- docker/docker-compose-keycloak.yml | 2 +- docker/docker-compose.yml | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docker/docker-compose-keycloak-serv.yml b/docker/docker-compose-keycloak-serv.yml index 49f14d279..9a5b7eb44 100644 --- a/docker/docker-compose-keycloak-serv.yml +++ b/docker/docker-compose-keycloak-serv.yml @@ -67,9 +67,9 @@ services: secrets: postgres_keycloak_password: - file: postgres_keycloak_password.txt + file: secrets/postgres_keycloak_password.txt keycloak_password: - file: keycloak_password.txt + file: secrets/keycloak_password.txt volumes: keycloak_db_data: diff --git a/docker/docker-compose-keycloak.yml b/docker/docker-compose-keycloak.yml index 6a14c8295..068947262 100644 --- a/docker/docker-compose-keycloak.yml +++ b/docker/docker-compose-keycloak.yml @@ -28,4 +28,4 @@ services: secrets: keycloak_password: - file: keycloak_password.txt + file: secrets/keycloak_password.txt diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 8d72cd77b..53e156bda 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -292,11 +292,11 @@ services: secrets: postgres_password: - file: postgres_password.txt + file: secrets/postgres_password.txt jwt_secret_key: - file: jwt_secret_key.txt + file: secrets/jwt_secret_key.txt api_key: - file: api_key.txt + file: secrets/api_key.txt volumes: redis_conf: From 57eb2427824de9066c68bfe46e358e1f78bd1182 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 11 Mar 2024 19:58:29 +0100 Subject: [PATCH 024/146] implement secrets to keycloak --- src/core/managers/external_auth_manager.py | 104 +++++++++++++++++---- 1 file changed, 86 insertions(+), 18 deletions(-) diff --git a/src/core/managers/external_auth_manager.py b/src/core/managers/external_auth_manager.py index 7faa619fa..a937df750 100644 --- a/src/core/managers/external_auth_manager.py +++ b/src/core/managers/external_auth_manager.py @@ -1,47 +1,115 @@ +"""This module provides functions for managing users in an external authentication system. + +The module includes functions for checking if Keycloak user management is enabled, +retrieving the Keycloak admin password, creating a KeycloakAdmin instance, +creating a user in the external authentication system, updating user information, +and deleting a user from the external authentication system. + +Functions: +- keycloak_user_management_enabled(): Check if Keycloak user management is enabled. +- get_keycloak_password(): Get the Keycloak admin password. +- get_keycloak_admin(): Return an instance of KeycloakAdmin. +- create_user(user_data): Create a user in the external authentication system. +- update_user(user_data, original_username): Update user information in the external authentication system. +- delete_user(username): Delete a user from the external authentication system. +""" + import os from keycloak import KeycloakAdmin def keycloak_user_management_enabled(): - if 'KEYCLOAK_USER_MANAGEMENT' in os.environ: - return os.getenv('KEYCLOAK_USER_MANAGEMENT').lower() == 'true' + """Check if Keycloak user management is enabled. + + Returns: + bool: True if Keycloak user management is enabled, False otherwise. + """ + if "KEYCLOAK_USER_MANAGEMENT" in os.environ: + return os.getenv("KEYCLOAK_USER_MANAGEMENT").lower() == "true" else: return False +def get_keycloak_password(): + """Get the Keycloak admin password. + + This function retrieves the Keycloak admin password from the environment variable + KEYCLOAK_ADMIN_PASSWORD. If the environment variable is not set, it reads the password + from the file specified by the environment variable KEYCLOAK_ADMIN_PASSWORD_FILE. + + Returns: + str: The Keycloak admin password. + """ + keycloak_password = os.getenv("KEYCLOAK_ADMIN_PASSWORD") + if not keycloak_password: + with open(os.getenv("KEYCLOAK_ADMIN_PASSWORD_FILE"), "r") as file: + keycloak_password = file.read() + return keycloak_password + + def get_keycloak_admin(): - return KeycloakAdmin(server_url=os.getenv('KEYCLOAK_SERVER_URL'), - username=os.getenv('KEYCLOAK_ADMIN_USERNAME'), - password=os.getenv('KEYCLOAK_ADMIN_PASSWORD'), - realm_name=os.getenv('KEYCLOAK_REALM_NAME'), - client_secret_key=os.getenv('KEYCLOAK_CLIENT_SECRET_KEY'), - verify=(os.getenv('KEYCLOAK_VERIFY').lower() == "true") - ) + """Return an instance of KeycloakAdmin. + + This function retrieves the necessary environment variables and uses them to create + and configure a KeycloakAdmin object. The KeycloakAdmin object is then returned. + + Returns: + KeycloakAdmin: An instance of the KeycloakAdmin class. + """ + return KeycloakAdmin( + server_url=os.getenv("KEYCLOAK_SERVER_URL"), + username=os.getenv("KEYCLOAK_ADMIN_USERNAME"), + password=get_keycloak_password(), + realm_name=os.getenv("KEYCLOAK_REALM_NAME"), + client_secret_key=os.getenv("KEYCLOAK_CLIENT_SECRET_KEY"), + verify=(os.getenv("KEYCLOAK_VERIFY").lower() == "true"), + ) def create_user(user_data): + """Create a user in the external authentication system. + + Arguments: + user_data (dict): A dictionary containing user data. + - username (str): The username of the user. + - password (str): The password of the user. + """ if keycloak_user_management_enabled(): keycloak_admin = get_keycloak_admin() - keycloak_admin.create_user({'username': user_data['username'], - 'credentials': [{'value': user_data['password'], 'type': 'password'}], - 'enabled': True}) + keycloak_admin.create_user( + {"username": user_data["username"], "credentials": [{"value": user_data["password"], "type": "password"}], "enabled": True} + ) def update_user(user_data, original_username): + """Update user information in the external authentication system. + + This function updates the user information in the external authentication system, such as Keycloak. + + Arguments: + user_data (dict): A dictionary containing the updated user data. + original_username (str): The original username of the user. + """ if keycloak_user_management_enabled(): - if 'password' in user_data and user_data['password'] or original_username != user_data['username']: + if "password" in user_data and user_data["password"] or original_username != user_data["username"]: keycloak_admin = get_keycloak_admin() keycloak_user_id = keycloak_admin.get_user_id(original_username) if keycloak_user_id is not None: - if original_username != user_data['username']: - keycloak_admin.update_user(user_id=keycloak_user_id, payload={'username': user_data['username']}) + if original_username != user_data["username"]: + keycloak_admin.update_user(user_id=keycloak_user_id, payload={"username": user_data["username"]}) - if 'password' in user_data and user_data['password']: - keycloak_admin.set_user_password(user_id=keycloak_user_id, password=user_data['password'], - temporary=False) + if "password" in user_data and user_data["password"]: + keycloak_admin.set_user_password(user_id=keycloak_user_id, password=user_data["password"], temporary=False) def delete_user(username): + """Delete a user from the external authentication system. + + This function deletes a user from the external authentication system, such as Keycloak. + + Arguments: + username (str): The username of the user to be deleted. + """ if keycloak_user_management_enabled(): keycloak_admin = get_keycloak_admin() keycloak_user_id = keycloak_admin.get_user_id(username) From b94ca54ce31f081b3a95085730a6a8c28f79d3c8 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 11 Mar 2024 20:08:59 +0100 Subject: [PATCH 025/146] update documentation --- docker/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/README.md b/docker/README.md index af039aa70..95740b3e1 100644 --- a/docker/README.md +++ b/docker/README.md @@ -56,7 +56,7 @@ git clone https://github.com/SK-CERT/Taranis-NG.git cd Taranis-NG ``` -_Then_, using your favorite text editor, please change the default passwords in `docker/.env` file. You can only skip this step when deploying a non-production testing environment. +_Then_, rename file `docker/.env.example` to `.env` and using your favorite text editor, please remove default passwords to use [Docker secrets](https://docs.docker.com/compose/use-secrets/). You should only use passwords in `.env` when deploying a non-production testing environment. ```bash vim docker/.env From 8e33a630ec799798791172ad83d7217d139e2ea8 Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Mon, 11 Mar 2024 21:53:34 +0100 Subject: [PATCH 026/146] Create dependabot.yml --- .github/dependabot.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..2405ddfc7 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + - package-ecosystem: "Docker" # See documentation for possible values + directory: "/docker" # Location of package manifests + schedule: + interval: "weekly" From e6649dce54afe2f84c6db1b50dc7c3e11cfff716 Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Mon, 11 Mar 2024 21:56:06 +0100 Subject: [PATCH 027/146] Update dependabot.yml --- .github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2405ddfc7..193cf596d 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,7 @@ version: 2 updates: - - package-ecosystem: "Docker" # See documentation for possible values + - package-ecosystem: "docker" # See documentation for possible values directory: "/docker" # Location of package manifests schedule: interval: "weekly" From 2febec05e7363746329e2d457f7f27155d7819ed Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Mon, 11 Mar 2024 22:01:41 +0100 Subject: [PATCH 028/146] Update dependabot.yml --- .github/dependabot.yml | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 193cf596d..57c88c0c0 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,7 +5,35 @@ version: 2 updates: - - package-ecosystem: "docker" # See documentation for possible values - directory: "/docker" # Location of package manifests + - package-ecosystem: "docker" + directory: "/docker" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/src/bots" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/src/collectors" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/src/core" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/src/core" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/src/presenters" + schedule: + interval: "weekly" + - package-ecosystem: "pip" + directory: "/src/publishers" + schedule: + interval: "weekly" + - package-ecosystem: "npm" + directory: "/src/gui" schedule: interval: "weekly" From ec020f4ace7400884c7575a94890d1e2f376b031 Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Mon, 11 Mar 2024 22:09:41 +0100 Subject: [PATCH 029/146] Update dependabot.yml --- .github/dependabot.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 57c88c0c0..1a8b0b4f4 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -21,10 +21,6 @@ updates: directory: "/src/core" schedule: interval: "weekly" - - package-ecosystem: "pip" - directory: "/src/core" - schedule: - interval: "weekly" - package-ecosystem: "pip" directory: "/src/presenters" schedule: From 61353d81fd4e713fdf67923885713cf22ee63776 Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Mon, 11 Mar 2024 22:18:33 +0100 Subject: [PATCH 030/146] Update dependabot.yml --- .github/dependabot.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 1a8b0b4f4..92f4d97ec 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -17,10 +17,10 @@ updates: directory: "/src/collectors" schedule: interval: "weekly" - - package-ecosystem: "pip" - directory: "/src/core" - schedule: - interval: "weekly" +# - package-ecosystem: "pip" +# directory: "/src/core" +# schedule: +# interval: "weekly" - package-ecosystem: "pip" directory: "/src/presenters" schedule: From 91b23a7671724ed143bab6703b9e26d9cfef8fdc Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 12 Mar 2024 09:41:46 +0100 Subject: [PATCH 031/146] prefer ENG --- src/gui/src/components/UserSettings.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gui/src/components/UserSettings.vue b/src/gui/src/components/UserSettings.vue index f6fcb884d..0c2ba545e 100644 --- a/src/gui/src/components/UserSettings.vue +++ b/src/gui/src/components/UserSettings.vue @@ -160,8 +160,8 @@ word_lists: [], selected_word_lists: [], languages: [ - { id: 'cs', value: 'Czech' }, { id: 'en', value: 'English' }, + { id: 'cs', value: 'Czech' }, { id: 'sk', value: 'Slovak' }, ], }), From 312d1fb53ae5dab628bed8252066a450b7dc0172 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:27:29 +0000 Subject: [PATCH 032/146] Bump python-dateutil from 2.8.2 to 2.9.0.post0 in /src/collectors Bumps [python-dateutil](https://github.com/dateutil/dateutil) from 2.8.2 to 2.9.0.post0. - [Release notes](https://github.com/dateutil/dateutil/releases) - [Changelog](https://github.com/dateutil/dateutil/blob/master/NEWS) - [Commits](https://github.com/dateutil/dateutil/compare/2.8.2...2.9.0.post0) --- updated-dependencies: - dependency-name: python-dateutil dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/collectors/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt index 912bba00b..c8fc73e3d 100644 --- a/src/collectors/requirements.txt +++ b/src/collectors/requirements.txt @@ -10,7 +10,7 @@ gunicorn==21.2.0 marshmallow==3.21.0 marshmallow-enum==1.5.1 PySocks==1.7.1 -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 python-dotenv==1.0.1 requests==2.31.0 schedule==1.2.1 From 76cc5003eb94a13716281c260abd7c8688d0d435 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:27:29 +0000 Subject: [PATCH 033/146] Bump marshmallow from 3.21.0 to 3.21.1 in /src/collectors Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.0 to 3.21.1. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.0...3.21.1) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/collectors/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt index 912bba00b..db72b61c5 100644 --- a/src/collectors/requirements.txt +++ b/src/collectors/requirements.txt @@ -7,7 +7,7 @@ Flask-Cors==4.0.0 Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==21.2.0 -marshmallow==3.21.0 +marshmallow==3.21.1 marshmallow-enum==1.5.1 PySocks==1.7.1 python-dateutil==2.8.2 From 62cf8c1d93e5dad0c3ada5b1bd3faae01215a18c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:27:30 +0000 Subject: [PATCH 034/146] Bump marshmallow from 3.21.0 to 3.21.1 in /src/bots Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.0 to 3.21.1. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.0...3.21.1) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/bots/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bots/requirements.txt b/src/bots/requirements.txt index ecff522c9..f63955063 100644 --- a/src/bots/requirements.txt +++ b/src/bots/requirements.txt @@ -3,7 +3,7 @@ Flask-Cors==4.0.0 Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==21.2.0 -marshmallow==3.21.0 +marshmallow==3.21.1 marshmallow-enum==1.5.1 oauthlib==3.2.2 PySocks==1.7.1 From 2fb7dd7865b494d79ed54462acc6531c140e0f21 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:27:31 +0000 Subject: [PATCH 035/146] Bump marshmallow from 3.21.0 to 3.21.1 in /src/publishers Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.0 to 3.21.1. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.0...3.21.1) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/publishers/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/publishers/requirements.txt b/src/publishers/requirements.txt index ef5db4b1b..e4df676ac 100644 --- a/src/publishers/requirements.txt +++ b/src/publishers/requirements.txt @@ -4,7 +4,7 @@ Flask-Cors==4.0.0 Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==21.2.0 -marshmallow==3.21.0 +marshmallow==3.21.1 marshmallow-enum==1.5.1 oauth2client==4.1.3 paramiko==3.4.0 From c7662d326d93ec2c4539dd991e689055069c43d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:27:35 +0000 Subject: [PATCH 036/146] Bump pymisp from 2.4.186 to 2.4.187 in /src/publishers Bumps [pymisp](https://github.com/MISP/PyMISP) from 2.4.186 to 2.4.187. - [Changelog](https://github.com/MISP/PyMISP/blob/main/CHANGELOG.txt) - [Commits](https://github.com/MISP/PyMISP/compare/v2.4.186...v2.4.187) --- updated-dependencies: - dependency-name: pymisp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/publishers/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/publishers/requirements.txt b/src/publishers/requirements.txt index ef5db4b1b..280dd08a4 100644 --- a/src/publishers/requirements.txt +++ b/src/publishers/requirements.txt @@ -12,4 +12,4 @@ python-dotenv==1.0.1 requests==2.31.0 tweepy==4.14.0 urllib3==2.2.1 -pymisp==2.4.186 +pymisp==2.4.187 From dfb3f5f35b8bb855fbc7ea0050497802252bf819 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:28:12 +0000 Subject: [PATCH 037/146] Bump weasyprint from 61.1 to 61.2 in /src/presenters Bumps [weasyprint](https://github.com/Kozea/WeasyPrint) from 61.1 to 61.2. - [Release notes](https://github.com/Kozea/WeasyPrint/releases) - [Changelog](https://github.com/Kozea/WeasyPrint/blob/main/docs/changelog.rst) - [Commits](https://github.com/Kozea/WeasyPrint/compare/v61.1...v61.2) --- updated-dependencies: - dependency-name: weasyprint dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/presenters/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/presenters/requirements.txt b/src/presenters/requirements.txt index dd996d5c9..dd92de445 100644 --- a/src/presenters/requirements.txt +++ b/src/presenters/requirements.txt @@ -7,4 +7,4 @@ Jinja2==3.1.3 marshmallow==3.21.0 marshmallow-enum==1.5.1 python-dotenv==1.0.1 -weasyprint==61.1 +weasyprint==61.2 From d4f75169315d4fbef3ce18ba46f48e1a876f6171 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:28:21 +0000 Subject: [PATCH 038/146] Bump marshmallow from 3.21.0 to 3.21.1 in /src/presenters Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.0 to 3.21.1. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.0...3.21.1) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/presenters/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/presenters/requirements.txt b/src/presenters/requirements.txt index dd996d5c9..87c0094d3 100644 --- a/src/presenters/requirements.txt +++ b/src/presenters/requirements.txt @@ -4,7 +4,7 @@ Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==21.2.0 Jinja2==3.1.3 -marshmallow==3.21.0 +marshmallow==3.21.1 marshmallow-enum==1.5.1 python-dotenv==1.0.1 weasyprint==61.1 From e1fdca873173903a5ebb9bfdc3cc82b9b8588cee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:39:03 +0000 Subject: [PATCH 039/146] Bump vue-cookies from 1.7.4 to 1.8.3 in /src/gui Bumps [vue-cookies](https://github.com/cmp-cc/vue-cookies) from 1.7.4 to 1.8.3. - [Commits](https://github.com/cmp-cc/vue-cookies/compare/v1.7.4...v1.8.3) --- updated-dependencies: - dependency-name: vue-cookies dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/gui/package-lock.json | 12 ++++++------ src/gui/package.json | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/gui/package-lock.json b/src/gui/package-lock.json index 0bf66c3c2..e42495d7f 100644 --- a/src/gui/package-lock.json +++ b/src/gui/package-lock.json @@ -16191,9 +16191,9 @@ } }, "vue-cookies": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/vue-cookies/-/vue-cookies-1.7.4.tgz", - "integrity": "sha512-mOS5Btr8V9zvAtkmQ7/TfqJIropOx7etDAgBywPCmHjvfJl2gFbH2XgoMghleLoyyMTi5eaJss0mPN7arMoslA==" + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/vue-cookies/-/vue-cookies-1.8.3.tgz", + "integrity": "sha512-VBRsyRMVdahBgFfh389TMHPmDdr4URDJNMk4FKSCfuNITs7+jitBDhwyL4RJd3WUsfOYNNjPAkfbehyH9AFuoA==" }, "vue-csv-import": { "version": "3.3.10", @@ -16321,9 +16321,9 @@ "dev": true }, "vue-i18n": { - "version": "8.26.7", - "resolved": "https://registry.npmjs.org/vue-i18n/-/vue-i18n-8.26.7.tgz", - "integrity": "sha512-7apa5PvRg1YCLoraE3lOgpCG8hJGupLCtywQWedWsgBbvF0TOgFvhitqK9xRH0PBGG1G8aiJz9oklyNDFfDxLg==" + "version": "8.28.2", + "resolved": "https://registry.npmjs.org/vue-i18n/-/vue-i18n-8.28.2.tgz", + "integrity": "sha512-C5GZjs1tYlAqjwymaaCPDjCyGo10ajUphiwA922jKt9n7KPpqR7oM1PCwYzhB/E7+nT3wfdG3oRre5raIT1rKA==" }, "vue-keypress": { "version": "1.4.0", diff --git a/src/gui/package.json b/src/gui/package.json index 0c60e41d0..66e28d1a2 100644 --- a/src/gui/package.json +++ b/src/gui/package.json @@ -41,7 +41,7 @@ "vue": "^2.6.14", "vue-ckeditor2": "^2.1.5", "vue-cli": "^2.9.6", - "vue-cookies": "^1.7.4", + "vue-cookies": "^1.8.3", "vue-csv-import": "^3.3.10", "vue-cvss": "^1.1.2", "vue-i18n": "^8.28.2", From 70845b4ef59979383d3e82878f9e1b8d230c7ea9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 09:39:48 +0000 Subject: [PATCH 040/146] Bump vue-i18n from 8.26.7 to 8.28.2 in /src/gui Bumps [vue-i18n](https://github.com/intlify/vue-i18n-next/tree/HEAD/packages/vue-i18n) from 8.26.7 to 8.28.2. - [Release notes](https://github.com/intlify/vue-i18n-next/releases) - [Changelog](https://github.com/intlify/vue-i18n-next/blob/master/CHANGELOG.md) - [Commits](https://github.com/intlify/vue-i18n-next/commits/HEAD/packages/vue-i18n) --- updated-dependencies: - dependency-name: vue-i18n dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/gui/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/gui/package-lock.json b/src/gui/package-lock.json index 0bf66c3c2..e354a3280 100644 --- a/src/gui/package-lock.json +++ b/src/gui/package-lock.json @@ -16321,9 +16321,9 @@ "dev": true }, "vue-i18n": { - "version": "8.26.7", - "resolved": "https://registry.npmjs.org/vue-i18n/-/vue-i18n-8.26.7.tgz", - "integrity": "sha512-7apa5PvRg1YCLoraE3lOgpCG8hJGupLCtywQWedWsgBbvF0TOgFvhitqK9xRH0PBGG1G8aiJz9oklyNDFfDxLg==" + "version": "8.28.2", + "resolved": "https://registry.npmjs.org/vue-i18n/-/vue-i18n-8.28.2.tgz", + "integrity": "sha512-C5GZjs1tYlAqjwymaaCPDjCyGo10ajUphiwA922jKt9n7KPpqR7oM1PCwYzhB/E7+nT3wfdG3oRre5raIT1rKA==" }, "vue-keypress": { "version": "1.4.0", From 6df49b4d2071541f0518cc015c25c3517bd043e2 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 12 Mar 2024 11:31:18 +0100 Subject: [PATCH 041/146] rename example to sample, update doc --- docker/{.env.example => .env.sample} | 13 +++++++------ docker/README.md | 2 +- .../{api_key.txt.example => api_key.txt.sample} | 0 ...et_key.txt.example => jwt_secret_key.txt.sample} | 0 ...ord.txt.example => keycloak_password.txt.sample} | 0 ...xample => postgres_keycloak_password.txt.sample} | 0 ...ord.txt.example => postgres_password.txt.sample} | 0 7 files changed, 8 insertions(+), 7 deletions(-) rename docker/{.env.example => .env.sample} (63%) rename docker/secrets/{api_key.txt.example => api_key.txt.sample} (100%) rename docker/secrets/{jwt_secret_key.txt.example => jwt_secret_key.txt.sample} (100%) rename docker/secrets/{keycloak_password.txt.example => keycloak_password.txt.sample} (100%) rename docker/secrets/{postgres_keycloak_password.txt.example => postgres_keycloak_password.txt.sample} (100%) rename docker/secrets/{postgres_password.txt.example => postgres_password.txt.sample} (100%) diff --git a/docker/.env.example b/docker/.env.sample similarity index 63% rename from docker/.env.example rename to docker/.env.sample index f2c0311c1..35515f37c 100644 --- a/docker/.env.example +++ b/docker/.env.sample @@ -12,12 +12,13 @@ TARANIS_NG_TAG=v23.12.1 # Timezone for all containers TZ=Europe/Bratislava -# Default passwords. Taranis-NG now supports Docker secrets. Use this in production! https://docs.docker.com/compose/use-secrets/ -# (rename files in secrets directory, change passwords stored there and remove variables below) -POSTGRES_PASSWORD=supersecret -POSTGRES_KEYCLOAK_PASSWORD=supersecret -JWT_SECRET_KEY=supersecret -COLLECTOR_PRESENTER_PUBLISHER_API_KEY=supersecret +# Taranis-NG now supports Docker secrets. Use this in production! More here: https://docs.docker.com/compose/use-secrets/ +# These variables bellow are only for testing purposes and cannot be mixed with use of Docker secrets! +# POSTGRES_PASSWORD=supersecret +# POSTGRES_KEYCLOAK_PASSWORD=supersecret +# JWT_SECRET_KEY=supersecret +# COLLECTOR_PRESENTER_PUBLISHER_API_KEY=supersecret +# KEYCLOAK_PASSWORD=supersecret # Paths CVE_UPDATE_FILE=/data/cve_dictionary.xml diff --git a/docker/README.md b/docker/README.md index 95740b3e1..c4982a9e8 100644 --- a/docker/README.md +++ b/docker/README.md @@ -56,7 +56,7 @@ git clone https://github.com/SK-CERT/Taranis-NG.git cd Taranis-NG ``` -_Then_, rename file `docker/.env.example` to `.env` and using your favorite text editor, please remove default passwords to use [Docker secrets](https://docs.docker.com/compose/use-secrets/). You should only use passwords in `.env` when deploying a non-production testing environment. +_Then_, remove `.sample` extension from file `docker/.env.sample` and files in `docker/secrets`. Use your favorite text editor and change default passwords. Taranis NG uses [Docker secrets](https://docs.docker.com/compose/use-secrets/) to store sensitive data. (Saving passwords in variables defined in `docker/.env` is not advised and you will need to modify Docker compose YAML files to make it work correctly.) ```bash vim docker/.env diff --git a/docker/secrets/api_key.txt.example b/docker/secrets/api_key.txt.sample similarity index 100% rename from docker/secrets/api_key.txt.example rename to docker/secrets/api_key.txt.sample diff --git a/docker/secrets/jwt_secret_key.txt.example b/docker/secrets/jwt_secret_key.txt.sample similarity index 100% rename from docker/secrets/jwt_secret_key.txt.example rename to docker/secrets/jwt_secret_key.txt.sample diff --git a/docker/secrets/keycloak_password.txt.example b/docker/secrets/keycloak_password.txt.sample similarity index 100% rename from docker/secrets/keycloak_password.txt.example rename to docker/secrets/keycloak_password.txt.sample diff --git a/docker/secrets/postgres_keycloak_password.txt.example b/docker/secrets/postgres_keycloak_password.txt.sample similarity index 100% rename from docker/secrets/postgres_keycloak_password.txt.example rename to docker/secrets/postgres_keycloak_password.txt.sample diff --git a/docker/secrets/postgres_password.txt.example b/docker/secrets/postgres_password.txt.sample similarity index 100% rename from docker/secrets/postgres_password.txt.example rename to docker/secrets/postgres_password.txt.sample From ba3c58119e21e0954b3ec62283a877e853177822 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 12 Mar 2024 13:10:17 +0100 Subject: [PATCH 042/146] fixes --- .pre-commit-config.yaml | 7 ++++--- docker/docker-compose-keycloak-serv.yml | 10 +++++----- docker/secrets/api_key.txt.sample | 2 +- docker/secrets/jwt_secret_key.txt.sample | 2 +- docker/secrets/keycloak_password.txt.sample | 2 +- docker/secrets/keycloak_postgres_password.txt.sample | 1 + docker/secrets/postgres_keycloak_password.txt.sample | 1 - docker/secrets/postgres_password.txt.sample | 2 +- src/core/config.py | 2 ++ 9 files changed, 16 insertions(+), 13 deletions(-) create mode 100644 docker/secrets/keycloak_postgres_password.txt.sample delete mode 100644 docker/secrets/postgres_keycloak_password.txt.sample diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 982895371..decc7fcf1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,14 +1,14 @@ repos: - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 24.2.0 hooks: - id: black language_version: python3 args: [--line-length=142] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 7.0.0 hooks: - id: flake8 additional_dependencies: [flake8-docstrings] @@ -16,8 +16,9 @@ repos: types: ['python'] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-yaml - id: end-of-file-fixer + exclude: '.*\.sample$' - id: trailing-whitespace diff --git a/docker/docker-compose-keycloak-serv.yml b/docker/docker-compose-keycloak-serv.yml index 9a5b7eb44..bece0d5f7 100644 --- a/docker/docker-compose-keycloak-serv.yml +++ b/docker/docker-compose-keycloak-serv.yml @@ -7,7 +7,7 @@ services: environment: POSTGRES_DB: "taranis-ng-keycloak" POSTGRES_USER: "taranis-ng-keycloak" - POSTGRES_PASSWORD: /run/secrets/postgres_keycloak_password + POSTGRES_PASSWORD: /run/secrets/keycloak_postgres_password command: ["postgres", "-c", "shared_buffers=${DB_SHARED_BUFFERS}", "-c", "max_connections=${DB_MAX_CONNECTIONS}"] volumes: - "keycloak_db_data:/var/lib/postgresql/data" @@ -17,7 +17,7 @@ services: max-size: "200k" max-file: "10" secrets: - - postgres_keycloak_password + - keycloak_postgres_password keycloak: image: "skcert/taranis-ng-keycloak:${TARANIS_NG_TAG}" @@ -34,7 +34,7 @@ services: DB_DATABASE: taranis-ng-keycloak DB_USER: taranis-ng-keycloak DB_PASSWORD: "${POSTGRES_KEYCLOAK_PASSWORD}" - DB_PASSWORD_FILE: /run/secrets/postgres_keycloak_password + DB_PASSWORD_FILE: /run/secrets/keycloak_postgres_password KEYCLOAK_IMPORT: "/opt/jboss/keycloak/realm-export.json" KEYCLOAK_FRONTEND_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/keycloak/auth" KEYCLOAK_USER: "${KEYCLOAK_USER}" @@ -62,12 +62,12 @@ services: traefik.http.routers.taranis-keycloak-443.middlewares: "taranis-keycloak-stripprefix" traefik.http.routers.taranis-keycloak-443.service: "taranis-keycloak" secrets: - - postgres_keycloak_password + - keycloak_postgres_password - keycloak_password secrets: postgres_keycloak_password: - file: secrets/postgres_keycloak_password.txt + file: secrets/keycloak_postgres_password.txt keycloak_password: file: secrets/keycloak_password.txt diff --git a/docker/secrets/api_key.txt.sample b/docker/secrets/api_key.txt.sample index b5f907866..d22774e91 100644 --- a/docker/secrets/api_key.txt.sample +++ b/docker/secrets/api_key.txt.sample @@ -1 +1 @@ -supersecret +supersecret \ No newline at end of file diff --git a/docker/secrets/jwt_secret_key.txt.sample b/docker/secrets/jwt_secret_key.txt.sample index b5f907866..d22774e91 100644 --- a/docker/secrets/jwt_secret_key.txt.sample +++ b/docker/secrets/jwt_secret_key.txt.sample @@ -1 +1 @@ -supersecret +supersecret \ No newline at end of file diff --git a/docker/secrets/keycloak_password.txt.sample b/docker/secrets/keycloak_password.txt.sample index b5f907866..d22774e91 100644 --- a/docker/secrets/keycloak_password.txt.sample +++ b/docker/secrets/keycloak_password.txt.sample @@ -1 +1 @@ -supersecret +supersecret \ No newline at end of file diff --git a/docker/secrets/keycloak_postgres_password.txt.sample b/docker/secrets/keycloak_postgres_password.txt.sample new file mode 100644 index 000000000..d22774e91 --- /dev/null +++ b/docker/secrets/keycloak_postgres_password.txt.sample @@ -0,0 +1 @@ +supersecret \ No newline at end of file diff --git a/docker/secrets/postgres_keycloak_password.txt.sample b/docker/secrets/postgres_keycloak_password.txt.sample deleted file mode 100644 index b5f907866..000000000 --- a/docker/secrets/postgres_keycloak_password.txt.sample +++ /dev/null @@ -1 +0,0 @@ -supersecret diff --git a/docker/secrets/postgres_password.txt.sample b/docker/secrets/postgres_password.txt.sample index b5f907866..d22774e91 100644 --- a/docker/secrets/postgres_password.txt.sample +++ b/docker/secrets/postgres_password.txt.sample @@ -1 +1 @@ -supersecret +supersecret \ No newline at end of file diff --git a/src/core/config.py b/src/core/config.py index 83bbd9ef1..022659f06 100755 --- a/src/core/config.py +++ b/src/core/config.py @@ -1,4 +1,5 @@ """This module contains the configuration class for Taranis-NG.""" + import os from dotenv import load_dotenv @@ -17,6 +18,7 @@ class Config(object): DB_DATABASE (str): The name of the database. DB_USER (str): The username for the database connection. DB_PASSWORD (str): The password for the database connection. + DB_PASSWORD_FILE (str): The path to the file containing the database password. SQLALCHEMY_DATABASE_URI (str): The SQLAlchemy database URI. SQLALCHEMY_TRACK_MODIFICATIONS (bool): Whether to track modifications in SQLAlchemy. SQLALCHEMY_ECHO (bool): Whether to echo SQL queries in SQLAlchemy. From e8929cce59045cb2e6ba55442284586c73e213b1 Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Thu, 14 Mar 2024 20:37:56 +0100 Subject: [PATCH 043/146] Update dependabot.yml --- .github/dependabot.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 92f4d97ec..7800ce3f1 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -17,6 +17,8 @@ updates: directory: "/src/collectors" schedule: interval: "weekly" + ignore: + - dependency-name: "slackclient" # - package-ecosystem: "pip" # directory: "/src/core" # schedule: From c5e885c6c282eee040266877759e59df46ddb6d4 Mon Sep 17 00:00:00 2001 From: martindruzbacky Date: Fri, 15 Mar 2024 22:10:01 +0100 Subject: [PATCH 044/146] Added ConfirmDelete component for confirming deletion of item + translation --- .../src/components/common/ConfirmDelete.vue | 43 +++++++++++++++++++ src/gui/src/i18n/cs/messages.js | 5 +++ src/gui/src/i18n/en/messages.js | 5 +++ src/gui/src/i18n/sk/messages.js | 7 +++ 4 files changed, 60 insertions(+) create mode 100644 src/gui/src/components/common/ConfirmDelete.vue diff --git a/src/gui/src/components/common/ConfirmDelete.vue b/src/gui/src/components/common/ConfirmDelete.vue new file mode 100644 index 000000000..eee387c1a --- /dev/null +++ b/src/gui/src/components/common/ConfirmDelete.vue @@ -0,0 +1,43 @@ + + + + + diff --git a/src/gui/src/i18n/cs/messages.js b/src/gui/src/i18n/cs/messages.js index a78ce7ecf..ffb5f4604 100644 --- a/src/gui/src/i18n/cs/messages.js +++ b/src/gui/src/i18n/cs/messages.js @@ -830,6 +830,11 @@ const messages_cs = { default_mode: "Mód zkratek 'původní'.", aggregate_no_group: "Nelze otevřít neagregovanou novinku, funguje pouze se skupinou novinek.", }, + confirmation: { + confirmation_text: "Jste si jistý, že chcete zmazat tuto položku?", + cancel: "Zrušit", + confirm: "Ano, zmazat", + }, }, publish: { diff --git a/src/gui/src/i18n/en/messages.js b/src/gui/src/i18n/en/messages.js index b7f2c82c7..6cdb22222 100644 --- a/src/gui/src/i18n/en/messages.js +++ b/src/gui/src/i18n/en/messages.js @@ -831,6 +831,11 @@ const messages_en = { default_mode: 'Back in shortcut mode "default".', aggregate_no_group: 'Cannot open unaggregated news item, only works with news item groups.', }, + confirmation: { + confirmation_text: "Are you sure you want to delete this item?", + cancel: "Cancel", + confirm: "Yes, Delete it", + }, }, assets: { diff --git a/src/gui/src/i18n/sk/messages.js b/src/gui/src/i18n/sk/messages.js index bb47b4ad4..e0ecfa012 100644 --- a/src/gui/src/i18n/sk/messages.js +++ b/src/gui/src/i18n/sk/messages.js @@ -85,6 +85,13 @@ const messages_sk = { name: "Meno", description: "Popis", successful: "Nový OSINT zdroj bol úspešne pridaný" + }, + assess: { + confirmation: { + confirmation_text: "Ste si istý, že chcete odstrániť túto položku?", + cancel: "Zrušiť", + confirm: "Áno, vymazať" + }, } }; From 77ff9ae8d3fa6b32770f47c553c90e47ae6d16de Mon Sep 17 00:00:00 2001 From: martindruzbacky Date: Fri, 15 Mar 2024 22:12:06 +0100 Subject: [PATCH 045/146] Added delete confirmation to assess section and single item detail --- src/gui/src/components/assess/CardAssess.vue | 23 +++++++++++++++---- .../assess/NewsItemSingleDetail.vue | 22 ++++++++++++++---- 2 files changed, 36 insertions(+), 9 deletions(-) diff --git a/src/gui/src/components/assess/CardAssess.vue b/src/gui/src/components/assess/CardAssess.vue index 29a7bf1d8..a9757197e 100644 --- a/src/gui/src/components/assess/CardAssess.vue +++ b/src/gui/src/components/assess/CardAssess.vue @@ -125,7 +125,7 @@ mdi-thumb-down - {{ UI.ICON.DELETE }} @@ -149,7 +149,11 @@ - + + +
({ toolbar: false, opened: false, - selected: false + selected: false, + showDeletePopup: false, }), computed: { canAccess() { @@ -421,7 +427,14 @@ export default { this.toolbar = false; this.$el.querySelector(".card .layout").classList.remove('focus'); } - } + }, + toggleDeletePopup() { + this.showDeletePopup = !this.showDeletePopup; + }, + handleDeletion() { + this.showDeletePopup = false; + this.cardItemToolbar('delete') + } }, created() { this.opened = this.aggregate_opened; diff --git a/src/gui/src/components/assess/NewsItemSingleDetail.vue b/src/gui/src/components/assess/NewsItemSingleDetail.vue index e2f415838..d6ff0b21e 100644 --- a/src/gui/src/components/assess/NewsItemSingleDetail.vue +++ b/src/gui/src/components/assess/NewsItemSingleDetail.vue @@ -1,4 +1,5 @@ diff --git a/src/gui/src/i18n/en/messages.js b/src/gui/src/i18n/en/messages.js index 6cdb22222..0496b0a34 100644 --- a/src/gui/src/i18n/en/messages.js +++ b/src/gui/src/i18n/en/messages.js @@ -832,7 +832,7 @@ const messages_en = { aggregate_no_group: 'Cannot open unaggregated news item, only works with news item groups.', }, confirmation: { - confirmation_text: "Are you sure you want to delete this item?", + confirmation_text: "Are you sure you want to delete following item?", cancel: "Cancel", confirm: "Yes, Delete it", }, diff --git a/src/gui/src/store/settings.js b/src/gui/src/store/settings.js index f340bcebb..5b990ffdd 100644 --- a/src/gui/src/store/settings.js +++ b/src/gui/src/store/settings.js @@ -109,7 +109,7 @@ const getters = { getProfileLanguage(state) { let lng = state.language; if (!lng) { - lng = navigator.language.split('-')[0];; + lng = navigator.language.split('-')[0]; } if (!lng && typeof (process.env.VUE_APP_TARANIS_NG_LOCALE) !== "undefined") { lng = process.env.VUE_APP_TARANIS_NG_LOCALE; From 265555fa03daf5ce5fda4f2076b75d1e2703ee7c Mon Sep 17 00:00:00 2001 From: martindruzbacky Date: Thu, 21 Mar 2024 13:16:53 +0100 Subject: [PATCH 054/146] Added title to delete card to remind what item is being deleted --- src/gui/src/components/analyze/CardAnalyze.vue | 2 +- src/gui/src/components/analyze/NewsItemSelector.vue | 3 ++- src/gui/src/components/assess/CardAssess.vue | 2 +- src/gui/src/components/assess/NewsItemSingleDetail.vue | 2 +- src/gui/src/components/assets/CardAsset.vue | 10 ++-------- src/gui/src/components/publish/CardProduct.vue | 2 +- 6 files changed, 8 insertions(+), 13 deletions(-) diff --git a/src/gui/src/components/analyze/CardAnalyze.vue b/src/gui/src/components/analyze/CardAnalyze.vue index 40ba683c1..0debc8363 100644 --- a/src/gui/src/components/analyze/CardAnalyze.vue +++ b/src/gui/src/components/analyze/CardAnalyze.vue @@ -64,7 +64,7 @@ diff --git a/src/gui/src/components/analyze/NewsItemSelector.vue b/src/gui/src/components/analyze/NewsItemSelector.vue index b11aa9b94..45dfb5cb3 100644 --- a/src/gui/src/components/analyze/NewsItemSelector.vue +++ b/src/gui/src/components/analyze/NewsItemSelector.vue @@ -68,7 +68,7 @@ @@ -249,6 +249,7 @@ toggleDeletePopup(aggregate) { this.showDeletePopup = !this.showDeletePopup; this.to_delete = aggregate; + }, }, diff --git a/src/gui/src/components/assess/CardAssess.vue b/src/gui/src/components/assess/CardAssess.vue index b2cb785d1..266c2bbcd 100644 --- a/src/gui/src/components/assess/CardAssess.vue +++ b/src/gui/src/components/assess/CardAssess.vue @@ -151,7 +151,7 @@
diff --git a/src/gui/src/components/assess/NewsItemSingleDetail.vue b/src/gui/src/components/assess/NewsItemSingleDetail.vue index d6ff0b21e..0759fbf08 100644 --- a/src/gui/src/components/assess/NewsItemSingleDetail.vue +++ b/src/gui/src/components/assess/NewsItemSingleDetail.vue @@ -146,7 +146,7 @@ diff --git a/src/gui/src/components/assets/CardAsset.vue b/src/gui/src/components/assets/CardAsset.vue index 74ce5ca65..b45b47ec4 100644 --- a/src/gui/src/components/assets/CardAsset.vue +++ b/src/gui/src/components/assets/CardAsset.vue @@ -47,9 +47,8 @@ - + @close="showDeletePopup = false" :title_name="card.title" + > @@ -70,7 +69,6 @@ toolbar: false, selected: false, status: "in_progress", - title_name: "", showDeletePopup: false, }), mixins: [AuthMixin], @@ -110,14 +108,10 @@ }, toggleDeletePopup() { this.showDeletePopup = !this.showDeletePopup; - this.setTitleForDeleteItem(); }, handleDeletion() { this.showDeletePopup = false; this.cardItemToolbar('delete') - }, - setTitleForDeleteItem(){ - this.title_name = this.card.title; } } } diff --git a/src/gui/src/components/publish/CardProduct.vue b/src/gui/src/components/publish/CardProduct.vue index 7125e5adc..f7f62948c 100644 --- a/src/gui/src/components/publish/CardProduct.vue +++ b/src/gui/src/components/publish/CardProduct.vue @@ -37,7 +37,7 @@ From 85acd3a27711da08de367277e05e202d1f673ee1 Mon Sep 17 00:00:00 2001 From: martindruzbacky Date: Thu, 21 Mar 2024 13:31:55 +0100 Subject: [PATCH 055/146] Aligned text of title to center if text is too short --- src/gui/src/components/common/ConfirmDelete.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/gui/src/components/common/ConfirmDelete.vue b/src/gui/src/components/common/ConfirmDelete.vue index ee1897050..716d094a0 100644 --- a/src/gui/src/components/common/ConfirmDelete.vue +++ b/src/gui/src/components/common/ConfirmDelete.vue @@ -2,7 +2,7 @@ {{ $t('assess.confirmation.confirmation_text') }} - {{ title_name }} + {{ title_name }} {{ UI.ICON.DELETE }} From 12f7e1e93497956a1e67cea52840c095c1950f80 Mon Sep 17 00:00:00 2001 From: martindruzbacky Date: Thu, 21 Mar 2024 15:06:03 +0100 Subject: [PATCH 056/146] Added confirm delete popUp --- src/gui/src/components/assess/CardAssess.vue | 2 -- .../components/common/card/CardCompact.vue | 19 +++++++++++++++++-- .../src/components/common/card/CardNode.vue | 19 +++++++++++++++++-- .../src/components/common/card/CardPreset.vue | 19 +++++++++++++++++-- .../config/osint_sources/CardGroup.vue | 19 +++++++++++++++++-- .../config/osint_sources/CardSource.vue | 19 +++++++++++++++++-- .../config/product_types/CardProductType.vue | 19 +++++++++++++++++-- .../src/components/config/user/CardUser.vue | 19 +++++++++++++++++-- 8 files changed, 119 insertions(+), 16 deletions(-) diff --git a/src/gui/src/components/assess/CardAssess.vue b/src/gui/src/components/assess/CardAssess.vue index 266c2bbcd..fa8b1b611 100644 --- a/src/gui/src/components/assess/CardAssess.vue +++ b/src/gui/src/components/assess/CardAssess.vue @@ -192,7 +192,6 @@ export default { toolbar: false, opened: false, selected: false, - title_name: "", showDeletePopup: false, }), computed: { @@ -431,7 +430,6 @@ export default { }, toggleDeletePopup() { this.showDeletePopup = !this.showDeletePopup; - this.title_name = this.card.title; }, handleDeletion() { this.showDeletePopup = false; diff --git a/src/gui/src/components/common/card/CardCompact.vue b/src/gui/src/components/common/card/CardCompact.vue index 7d8b596e7..05c7c3b2e 100644 --- a/src/gui/src/components/common/card/CardCompact.vue +++ b/src/gui/src/components/common/card/CardCompact.vue @@ -23,7 +23,7 @@ - + {{ UI.ICON.DELETE }} @@ -35,19 +35,27 @@ + + + + + diff --git a/src/gui/src/components/publish/NewProduct.vue b/src/gui/src/components/publish/NewProduct.vue index 3c2f4cf60..b37e0673b 100644 --- a/src/gui/src/components/publish/NewProduct.vue +++ b/src/gui/src/components/publish/NewProduct.vue @@ -32,8 +32,7 @@ v-model="selected_type" :items="product_types" item-text="title" - :label="$t('product.report_type')" - /> + :label="$t('product.report_type')" /> + :spellcheck="$store.state.settings.spellcheck" /> + :spellcheck="$store.state.settings.spellcheck" /> @@ -86,12 +83,18 @@ - + mdi-send-outline {{ $t('product.publish') }} + + + + @@ -111,14 +114,15 @@ \ No newline at end of file From d123e4b1365209f1f151170c2327bd415cf87599 Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Wed, 24 Apr 2024 13:27:18 +0200 Subject: [PATCH 070/146] - replaced ConfirmDelete component to universal MessageBox, no other functional changes - old ConfirmDelete deleted - code formating --- .../src/components/analyze/CardAnalyze.vue | 231 +++---- .../components/analyze/NewsItemSelector.vue | 156 ++--- src/gui/src/components/assess/CardAssess.vue | 567 +++++++++--------- .../src/components/assess/CardAssessItem.vue | 56 +- .../assess/NewsItemAggregateDetail.vue | 175 +++--- .../src/components/assess/NewsItemDetail.vue | 221 +++---- .../assess/NewsItemSingleDetail.vue | 274 ++++----- src/gui/src/components/assets/CardAsset.vue | 17 +- .../src/components/common/ConfirmDelete.vue | 37 -- src/gui/src/components/common/MessageBox.vue | 2 +- .../components/common/card/CardCompact.vue | 17 +- .../src/components/common/card/CardNode.vue | 19 +- .../src/components/common/card/CardPreset.vue | 19 +- .../config/osint_sources/CardGroup.vue | 121 ++-- .../config/osint_sources/CardSource.vue | 26 +- .../config/product_types/CardProductType.vue | 19 +- .../src/components/config/user/CardUser.vue | 19 +- .../src/components/publish/CardProduct.vue | 21 +- src/gui/src/components/publish/NewProduct.vue | 2 +- 19 files changed, 972 insertions(+), 1027 deletions(-) delete mode 100644 src/gui/src/components/common/ConfirmDelete.vue diff --git a/src/gui/src/components/analyze/CardAnalyze.vue b/src/gui/src/components/analyze/CardAnalyze.vue index ec28adff0..f98b5f5c8 100644 --- a/src/gui/src/components/analyze/CardAnalyze.vue +++ b/src/gui/src/components/analyze/CardAnalyze.vue @@ -62,135 +62,136 @@ - - - + + + + \ No newline at end of file diff --git a/src/gui/src/components/analyze/NewsItemSelector.vue b/src/gui/src/components/analyze/NewsItemSelector.vue index 45dfb5cb3..213e5f2ac 100644 --- a/src/gui/src/components/analyze/NewsItemSelector.vue +++ b/src/gui/src/components/analyze/NewsItemSelector.vue @@ -1,77 +1,77 @@ \ No newline at end of file diff --git a/src/gui/src/components/assess/CardAssess.vue b/src/gui/src/components/assess/CardAssess.vue index fa8b1b611..1227c1767 100644 --- a/src/gui/src/components/assess/CardAssess.vue +++ b/src/gui/src/components/assess/CardAssess.vue @@ -3,7 +3,7 @@ - + @@ -12,8 +12,7 @@ @click.stop="cardItemToolbar" @mouseenter.native="toolbar=true" @mouseleave.native="toolbar=cardFocus" - :color="selectedColor" - > + :color="selectedColor"> @@ -58,12 +57,11 @@ @click.stop="openCard"> mdi-arrow-down-drop-circle mdi-arrow-right-drop-circle -
{{ $t('card_item.aggregated_items') }}: {{ card.news_items.length }}
+
{{ $t('card_item.aggregated_items') }}: {{ card.news_items.length }}
@@ -122,7 +120,8 @@ - mdi-thumb-down + + mdi-thumb-down
- + +
+ @show-item-detail="showItemDetail(news_item)" />
\ No newline at end of file diff --git a/src/gui/src/components/assess/CardAssessItem.vue b/src/gui/src/components/assess/CardAssessItem.vue index 009943f68..f1f1a07b2 100644 --- a/src/gui/src/components/assess/CardAssessItem.vue +++ b/src/gui/src/components/assess/CardAssessItem.vue @@ -13,8 +13,7 @@ @click.stop="cardItemToolbar" @mouseenter.native="toolbar=true" @mouseleave.native="toolbar=cardFocus" - :color="selectedColor" - > + :color="selectedColor"> @@ -113,25 +112,26 @@
- + + diff --git a/src/gui/src/components/assess/NewsItemAggregateDetail.vue b/src/gui/src/components/assess/NewsItemAggregateDetail.vue index abd28c01e..1914d3342 100644 --- a/src/gui/src/components/assess/NewsItemAggregateDetail.vue +++ b/src/gui/src/components/assess/NewsItemAggregateDetail.vue @@ -1,99 +1,96 @@ - - diff --git a/src/gui/src/components/common/MessageBox.vue b/src/gui/src/components/common/MessageBox.vue index 416fbefe1..effd288e9 100644 --- a/src/gui/src/components/common/MessageBox.vue +++ b/src/gui/src/components/common/MessageBox.vue @@ -11,7 +11,7 @@ {{ message }} - {{ $t('common.messagebox.yes') }} + {{ $t('common.messagebox.yes') }} {{ $t('common.messagebox.cancel') }}
diff --git a/src/gui/src/components/common/card/CardCompact.vue b/src/gui/src/components/common/card/CardCompact.vue index 05c7c3b2e..beed86af2 100644 --- a/src/gui/src/components/common/card/CardCompact.vue +++ b/src/gui/src/components/common/card/CardCompact.vue @@ -36,9 +36,10 @@ - + + @@ -46,11 +47,11 @@ \ No newline at end of file diff --git a/src/gui/src/components/config/osint_sources/CardSource.vue b/src/gui/src/components/config/osint_sources/CardSource.vue index 12334a4c4..e1ae15732 100644 --- a/src/gui/src/components/config/osint_sources/CardSource.vue +++ b/src/gui/src/components/config/osint_sources/CardSource.vue @@ -3,14 +3,13 @@ - + + @click.stop="cardItemToolbar" :color="selected && multiSelect ? 'green lighten-4' : ''"> @@ -47,9 +46,10 @@ - + + @@ -57,13 +57,13 @@ \ No newline at end of file + diff --git a/src/gui/src/components/common/EnumSelector.vue b/src/gui/src/components/common/EnumSelector.vue index 4815466e1..d5b8496d0 100644 --- a/src/gui/src/components/common/EnumSelector.vue +++ b/src/gui/src/components/common/EnumSelector.vue @@ -9,44 +9,27 @@ {{ UI.ICON.CLOSE }} - {{$t('attribute.select_enum')}} + {{ $t('attribute.select_enum') }} - + @@ -59,92 +42,96 @@ \ No newline at end of file +} + diff --git a/src/gui/src/components/common/attribute/AttributeCWE.vue b/src/gui/src/components/common/attribute/AttributeCWE.vue index 52ab971d4..fb93649b2 100644 --- a/src/gui/src/components/common/attribute/AttributeCWE.vue +++ b/src/gui/src/components/common/attribute/AttributeCWE.vue @@ -17,6 +17,11 @@ :class="getLockedStyle(index)" :disabled="values[index].locked || !canModify" @fo="firstOne(index)"> + + diff --git a/src/gui/src/components/common/attribute/attributes_mixin.js b/src/gui/src/components/common/attribute/attributes_mixin.js index 588636a84..f648b284d 100644 --- a/src/gui/src/components/common/attribute/attributes_mixin.js +++ b/src/gui/src/components/common/attribute/attributes_mixin.js @@ -1,4 +1,4 @@ -import {getReportItemData, holdLockReportItem, lockReportItem, unlockReportItem, updateReportItem} from "@/api/analyze"; +import { getReportItemData, holdLockReportItem, lockReportItem, unlockReportItem, updateReportItem } from "@/api/analyze"; import AuthMixin from "@/services/auth/auth_mixin"; import Permissions from "@/services/auth/permissions"; @@ -35,6 +35,7 @@ var AttributesMixin = { methods: { enumSelected(data) { this.values[data.index].value = data.value + this.values[data.index].value_description = data.value_description this.onEdit(data.index) }, @@ -54,7 +55,7 @@ var AttributesMixin = { index: this.values.length, value: "", last_updated: data.attribute_last_updated, - user: {name: data.attribute_user} + user: { name: data.attribute_user } }) }) }) @@ -104,7 +105,7 @@ var AttributesMixin = { onFocus(field_index) { if (this.edit === true) { - lockReportItem(this.report_item_id, {'field_id': this.values[field_index].id}).then(() => { + lockReportItem(this.report_item_id, { 'field_id': this.values[field_index].id }).then(() => { }) } //window.console.debug("onFocus") @@ -115,7 +116,7 @@ var AttributesMixin = { this.onEdit(field_index) - unlockReportItem(this.report_item_id, {'field_id': this.values[field_index].id}).then(() => { + unlockReportItem(this.report_item_id, { 'field_id': this.values[field_index].id }).then(() => { }) } }, @@ -126,7 +127,7 @@ var AttributesMixin = { clearTimeout(this.key_timeout); let self = this; this.key_timeout = setTimeout(function () { - holdLockReportItem(self.report_item_id, {'field_id': self.values[field_index].id}).then(() => { + holdLockReportItem(self.report_item_id, { 'field_id': self.values[field_index].id }).then(() => { }) }, 1000); } @@ -140,6 +141,7 @@ var AttributesMixin = { data.attribute_id = this.values[field_index].id let value = this.values[field_index].value + let value_description = this.values[field_index].value_description if (this.attribute_group.attribute.type === 'CPE') { value = value.replace("*", "%") } else if (this.attribute_group.attribute.type === 'BOOLEAN') { @@ -150,11 +152,12 @@ var AttributesMixin = { } } data.attribute_value = value + data.value_description = value_description updateReportItem(this.report_item_id, data).then((update_response) => { getReportItemData(this.report_item_id, update_response.data).then((response) => { this.values[field_index].last_updated = response.data.attribute_last_updated - this.values[field_index].user = {name: response.data.attribute_user} + this.values[field_index].user = { name: response.data.attribute_user } }) }) } @@ -201,8 +204,9 @@ var AttributesMixin = { value = value === "true"; } this.values[i].value = value + this.values[i].value_description = data.value_description this.values[i].last_updated = data.attribute_last_updated - this.values[i].user = {name: data.attribute_user} + this.values[i].user = { name: data.attribute_user } break } } @@ -215,14 +219,15 @@ var AttributesMixin = { id: data.attribute_id, index: this.values.length, value: data.attribute_value, + value_description: data.value_description, binary_mime_type: data.binary_mime_type, binary_size: data.binary_size, binary_description: data.binary_description, last_updated: data.attribute_last_updated, - user: {name: data.attribute_user} + user: { name: data.attribute_user } }) if (this.attribute_group.attribute.type === 'ATTACHMENT') { - this.addFile(this.values[this.values.length-1]) + this.addFile(this.values[this.values.length - 1]) } }) } @@ -260,4 +265,4 @@ var AttributesMixin = { } }; -export default AttributesMixin \ No newline at end of file +export default AttributesMixin From 1ecc477440977b508e68e25e6ac3a97dc2da51c4 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Thu, 20 Jun 2024 20:57:51 +0200 Subject: [PATCH 113/146] modified model and schema --- src/core/model/report_item.py | 687 +++++++++++++++++++----- src/shared/shared/schema/report_item.py | 215 ++++++++ 2 files changed, 757 insertions(+), 145 deletions(-) diff --git a/src/core/model/report_item.py b/src/core/model/report_item.py index b1bf3fd10..4bd749159 100644 --- a/src/core/model/report_item.py +++ b/src/core/model/report_item.py @@ -1,4 +1,24 @@ -from marshmallow import post_load +"""This module contains the ReportItem class and its associated schema. + +The ReportItem class represents a report item, which is a component of a larger report. It contains attributes such as ID, UUID, title, +created timestamp, and more. The class also includes methods for finding report item attributes by ID. + +The module also defines several schemas for creating and validating report items and their attributes. + +Classes: + - ReportItem: A class representing a report item. + - ReportItemAttribute: A class representing an attribute of a report item. + - ReportItemRemoteReportItem: A class representing the relationship between a report item and a remote report item. + +Schemas: + - NewReportItemSchema: Schema for creating a new report item. + - NewReportItemAttributeSchema: Schema for creating a new report item attribute. + +Relationships: + - ReportItem has a many-to-one relationship with User and ReportItemType. + - ReportItem has a many-to-many relationship with NewsItemAggregate and ReportItem. +""" + from datetime import datetime import uuid as uuid_generator from sqlalchemy import orm, or_, func, text, and_ @@ -14,19 +34,76 @@ from shared.schema.acl_entry import ItemType from shared.schema.attribute import AttributeType from shared.schema.news_item import NewsItemAggregateIdSchema, NewsItemAggregateSchema -from shared.schema.report_item import ReportItemAttributeBaseSchema, ReportItemBaseSchema, ReportItemIdSchema, RemoteReportItemSchema, ReportItemRemoteSchema, ReportItemSchema, ReportItemPresentationSchema +from shared.schema.report_item import ( + ReportItemAttributeBaseSchema, + ReportItemBaseSchema, + ReportItemIdSchema, + RemoteReportItemSchema, + ReportItemRemoteSchema, + ReportItemSchema, + ReportItemPresentationSchema, +) class NewReportItemAttributeSchema(ReportItemAttributeBaseSchema): + """Schema for creating a new report item attribute. + + This schema is used to validate and deserialize data for creating a new report item attribute. + + Arguments: + ReportItemAttributeBaseSchema -- The base schema for report item attributes. + + Returns: + An instance of the ReportItemAttribute class. + """ @post_load def make_report_item_attribute(self, data, **kwargs): + """Create a report item attribute. + + This method takes in data and creates a ReportItemAttribute object. + + Arguments: + data (dict): A dictionary containing the data for the report item attribute. + + Returns: + ReportItemAttribute: The created report item attribute object. + """ return ReportItemAttribute(**data) class ReportItemAttribute(db.Model): + """A class representing an attribute of a report item. + + Attributes: + id (int): The unique identifier of the attribute. + value (str): The value of the attribute. + value_description (str): The description of the attribute value. + binary_mime_type (str): The MIME type of the binary data, if applicable. + binary_data (bytes): The binary data associated with the attribute. + binary_size (int): The size of the binary data in bytes. + binary_description (str): The description of the binary data. + created (datetime): The timestamp of when the attribute was created. + last_updated (datetime): The timestamp of when the attribute was last updated. + version (int): The version number of the attribute. + current (bool): Indicates whether the attribute is the current version. + attribute_group_item_id (int): The ID of the attribute group item that the attribute belongs to. + attribute_group_item (AttributeGroupItem): The attribute group item that the attribute belongs to. + attribute_group_item_title (str): The title of the attribute group item. + report_item_id (int): The ID of the report item that the attribute belongs to. + report_item (ReportItem): The report item that the attribute belongs to. + user_id (int): The ID of the user who created the attribute. + user (User): The user who created the attribute. + + Methods: + __init__: Initializes a new instance of the ReportItemAttribute class. + find: Finds a report item attribute by its ID. + + """ + id = db.Column(db.Integer, primary_key=True) value = db.Column(db.String(), nullable=False) + value_description = db.Column(db.String()) binary_mime_type = db.Column(db.String()) binary_data = orm.deferred(db.Column(db.LargeBinary)) binary_size = db.Column(db.Integer) @@ -37,20 +114,42 @@ class ReportItemAttribute(db.Model): version = db.Column(db.Integer, default=1) current = db.Column(db.Boolean, default=True) - attribute_group_item_id = db.Column(db.Integer, db.ForeignKey('attribute_group_item.id')) + attribute_group_item_id = db.Column(db.Integer, db.ForeignKey("attribute_group_item.id")) attribute_group_item = db.relationship("AttributeGroupItem", viewonly=True) attribute_group_item_title = db.Column(db.String) - report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), nullable=True) + report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id"), nullable=True) report_item = db.relationship("ReportItem") - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) + user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=True) user = db.relationship("User") - def __init__(self, id, value, binary_mime_type, binary_size, binary_description, attribute_group_item_id, - attribute_group_item_title): + def __init__( + self, + id, + value, + value_description, + binary_mime_type, + binary_size, + binary_description, + attribute_group_item_id, + attribute_group_item_title, + ): + """Initialize a ReportItem object. + + Arguments: + id (int): The ID of the report item. + value (str): The value of the report item. + value_description (str): The description of the value. + binary_mime_type (str): The MIME type of the binary data. + binary_size (int): The size of the binary data. + binary_description (str): The description of the binary data. + attribute_group_item_id (int): The ID of the attribute group item. + attribute_group_item_title (str): The title of the attribute group item. + """ self.id = None self.value = value + self.value_description = value_description self.binary_mime_type = binary_mime_type self.binary_size = binary_size self.binary_description = binary_description @@ -59,26 +158,83 @@ def __init__(self, id, value, binary_mime_type, binary_size, binary_description, @classmethod def find(cls, attribute_id): + """Find a report item attribute by its ID. + + Args: + attribute_id (int): The ID of the attribute to find. + + Returns: + ReportItemAttribute: The report item attribute with the specified ID, or None if not found. + + """ report_item_attribute = cls.query.get(attribute_id) return report_item_attribute class NewReportItemSchema(ReportItemBaseSchema): + """Schema for creating a new report item. + + This schema defines the structure and validation rules for creating a new report item. + + Arguments: + ReportItemBaseSchema -- The base schema for report items. + + Returns: + An instance of the NewReportItemSchema class. + """ + news_item_aggregates = fields.Nested(NewsItemAggregateIdSchema, many=True, missing=[]) remote_report_items = fields.Nested(ReportItemIdSchema, many=True, missing=[]) attributes = fields.Nested(NewReportItemAttributeSchema, many=True) @post_load def make(self, data, **kwargs): + """Create a new ReportItem object. + + Arguments: + data (dict): A dictionary containing the data for the ReportItem. + + Returns: + ReportItem: A new ReportItem object. + """ return ReportItem(**data) class ReportItemRemoteReportItem(db.Model): - report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), primary_key=True) - remote_report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), primary_key=True) + """A class representing the relationship between a report item and a remote report item. + + Arguments: + db -- The database object used for defining the model. + """ + + report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id"), primary_key=True) + remote_report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id"), primary_key=True) class ReportItem(db.Model): + """A class representing a report item. + + Attributes: + id (int): The unique identifier of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item title. + created (datetime): The datetime when the report item was created. + last_updated (datetime): The datetime when the report item was last updated. + completed (bool): Indicates whether the report item is completed or not. + user_id (int): The ID of the user associated with the report item. + user (User): The user associated with the report item. + remote_user (str): The remote user associated with the report item. + report_item_type_id (int): The ID of the report item type associated with the report item. + report_item_type (ReportItemType): The report item type associated with the report item. + news_item_aggregates (list): The list of news item aggregates associated with the report item. + remote_report_items (list): The list of remote report items associated with the report item. + attributes (list): The list of attributes associated with the report item. + report_item_cpes (list): The list of report item CPES associated with the report item. + subtitle (str): The subtitle of the report item. + tag (str): The tag of the report item. + """ + id = db.Column(db.Integer, primary_key=True) uuid = db.Column(db.String(64)) @@ -89,26 +245,40 @@ class ReportItem(db.Model): last_updated = db.Column(db.DateTime, default=datetime.now) completed = db.Column(db.Boolean, default=False) - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) + user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=True) user = db.relationship("User", viewonly=True) remote_user = db.Column(db.String()) - report_item_type_id = db.Column(db.Integer, db.ForeignKey('report_item_type.id'), nullable=True) + report_item_type_id = db.Column(db.Integer, db.ForeignKey("report_item_type.id"), nullable=True) report_item_type = db.relationship("ReportItemType", viewonly=True) - news_item_aggregates = db.relationship("NewsItemAggregate", secondary='report_item_news_item_aggregate') + news_item_aggregates = db.relationship("NewsItemAggregate", secondary="report_item_news_item_aggregate") - remote_report_items = db.relationship("ReportItem", secondary='report_item_remote_report_item', - primaryjoin=ReportItemRemoteReportItem.report_item_id == id, - secondaryjoin=ReportItemRemoteReportItem.remote_report_item_id == id) + remote_report_items = db.relationship( + "ReportItem", + secondary="report_item_remote_report_item", + primaryjoin=ReportItemRemoteReportItem.report_item_id == id, + secondaryjoin=ReportItemRemoteReportItem.remote_report_item_id == id, + ) - attributes = db.relationship('ReportItemAttribute', back_populates="report_item", cascade="all, delete-orphan") + attributes = db.relationship("ReportItemAttribute", back_populates="report_item", cascade="all, delete-orphan") report_item_cpes = db.relationship("ReportItemCpe", cascade="all, delete-orphan") - def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item_aggregates, remote_report_items, - attributes, completed): - + def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item_aggregates, remote_report_items, attributes, completed): + """Initialize a new instance of the ReportItem class. + + Arguments: + id (int): The ID of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item's title. + report_item_type_id (int): The ID of the report item type. + news_item_aggregates (list): A list of news item aggregates associated with the report item. + remote_report_items (list): A list of remote report items associated with the report item. + attributes (dict): A dictionary of attributes for the report item. + completed (bool): Indicates whether the report item is completed or not. + """ self.id = id if uuid is None: @@ -135,35 +305,75 @@ def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item @orm.reconstructor def reconstruct(self): + """Reconstructs the report item. + + This method clears the subtitle, sets the tag to "mdi-file-table-outline", + and sorts the attributes based on the attribute group index, attribute group item index, and attribute ID. + """ self.subtitle = "" self.tag = "mdi-file-table-outline" self.attributes.sort(key=lambda obj: (obj.attribute_group_item.attribute_group.index, obj.attribute_group_item.index, obj.id)) @classmethod def count_all(cls, is_completed): + """Count the number of report items based on completion status. + + Arguments: + is_completed (bool): A flag indicating whether to count completed or incomplete report items. + Returns: + int: The count of report items matching the completion status. + """ return cls.query.filter_by(completed=is_completed).count() @classmethod def find(cls, report_item_id): + """Find a report item by its ID. + + Arguments: + report_item_id (int): The ID of the report item. + Returns: + ReportItem: The report item with the specified ID. + """ report_item = cls.query.get(report_item_id) return report_item @classmethod def find_by_uuid(cls, report_item_uuid): + """Find a report item by its UUID. + + Arguments: + report_item_uuid (str): The UUID of the report item. + Returns: + ReportItem: The report item with the specified UUID. + """ report_item = cls.query.filter_by(uuid=report_item_uuid) return report_item @classmethod def allowed_with_acl(cls, report_item_id, user, see, access, modify): - - query = db.session.query(ReportItem.id).distinct().group_by(ReportItem.id).filter( - ReportItem.id == report_item_id) - - query = query.outerjoin(ACLEntry, or_(and_(ReportItem.uuid == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM), - and_(cast(ReportItem.report_item_type_id, - sqlalchemy.String) == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE))) + """Check if the user is allowed to perform actions on a report item based on ACL. + + Arguments: + report_item_id (int): The ID of the report item. + user (User): The user object. + see (bool): Whether the user can see the report item. + access (bool): Whether the user can access the report item. + modify (bool): Whether the user can modify the report item. + Returns: + bool: True if the user is allowed, False otherwise. + """ + query = db.session.query(ReportItem.id).distinct().group_by(ReportItem.id).filter(ReportItem.id == report_item_id) + + query = query.outerjoin( + ACLEntry, + or_( + and_(ReportItem.uuid == ACLEntry.item_id, ACLEntry.item_type == ItemType.REPORT_ITEM), + and_( + cast(ReportItem.report_item_type_id, sqlalchemy.String) == ACLEntry.item_id, + ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE, + ), + ), + ) query = ACLEntry.apply_query(query, user, see, access, modify) @@ -171,14 +381,29 @@ def allowed_with_acl(cls, report_item_id, user, see, access, modify): @classmethod def get_for_sync(cls, last_synced, report_item_types): + """Retrieve report items for synchronization. + + This method retrieves report items that have been updated since the last synchronization time, + and belong to the specified report item types. + Args: + last_synced (datetime): The last synchronization time. + report_item_types (list): A list of report item types. + Returns: + tuple: A tuple containing two elements: + - items (list): A list of report items that need to be synchronized. + - last_sync_time (datetime): The current synchronization time. + """ report_item_type_ids = set() for report_item_type in report_item_types: report_item_type_ids.add(report_item_type.id) last_sync_time = datetime.now() - query = cls.query.filter(ReportItem.last_updated >= last_synced, ReportItem.last_updated <= last_sync_time, - ReportItem.report_item_type_id.in_(report_item_type_ids)) + query = cls.query.filter( + ReportItem.last_updated >= last_synced, + ReportItem.last_updated <= last_sync_time, + ReportItem.report_item_type_id.in_(report_item_type_ids), + ) report_items = query.all() @@ -193,70 +418,108 @@ def get_for_sync(cls, last_synced, report_item_types): @classmethod def get(cls, group, filter, offset, limit, user): - + """Retrieve report items based on specified criteria. + + Arguments: + group (str): The remote user group. + filter (dict): The filter criteria. + offset (int): The offset for pagination. + limit (int): The limit for pagination. + user (str): The user performing the query. + Returns: + tuple: A tuple containing the list of report items and the total count. + """ if group: query = cls.query.filter(ReportItem.remote_user == group) else: - query = db.session.query(ReportItem, func.count().filter(ACLEntry.id > 0).label("acls"), - func.count().filter(ACLEntry.access == True).label("access"), - func.count().filter(ACLEntry.modify == True).label("modify")).distinct().group_by( - ReportItem.id) - - query = query.filter(ReportItem.remote_user == None) - - query = query.outerjoin(ACLEntry, or_(and_(ReportItem.uuid == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM), - and_(cast(ReportItem.report_item_type_id, - sqlalchemy.String) == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE))) + query = ( + db.session.query( + ReportItem, + func.count().filter(ACLEntry.id > 0).label("acls"), + func.count().filter(ACLEntry.access.is_(True)).label("access"), + func.count().filter(ACLEntry.modify.is_(True)).label("modify"), + ) + .distinct() + .group_by(ReportItem.id) + ) + + query = query.filter(ReportItem.remote_user.is_(None)) + + query = query.outerjoin( + ACLEntry, + or_( + and_(ReportItem.uuid == ACLEntry.item_id, ACLEntry.item_type == ItemType.REPORT_ITEM), + and_( + cast(ReportItem.report_item_type_id, sqlalchemy.String) == ACLEntry.item_id, + ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE, + ), + ), + ) query = ACLEntry.apply_query(query, user, True, False, False) - if 'search' in filter and filter['search'] != '': - search_string = '%' + filter['search'].lower() + '%' - query = query.join(ReportItemAttribute, ReportItem.id == ReportItemAttribute.report_item_id).filter(or_( - func.lower(ReportItemAttribute.value).like(search_string), - func.lower(ReportItem.title).like(search_string), - func.lower(ReportItem.title_prefix).like(search_string))) + search_string = filter.get("search", "").lower() + if search_string: + search_string = f"%{search_string}%" + query = query.join(ReportItemAttribute, ReportItem.id == ReportItemAttribute.report_item_id).filter( + or_( + func.lower(ReportItemAttribute.value).like(search_string), + func.lower(ReportItem.title).like(search_string), + func.lower(ReportItem.title_prefix).like(search_string), + ) + ) - if 'completed' in filter and filter['completed'].lower() == "true": - query = query.filter(ReportItem.completed == True) + if filter.get("completed", "").lower() == "true": + query = query.filter(ReportItem.completed) - if 'incompleted' in filter and filter['incompleted'].lower() == "true": - query = query.filter(ReportItem.completed == False) + if filter.get("incompleted", "").lower() == "true": + query = query.filter(ReportItem.completed.is_(False)) - if 'range' in filter and filter['range'] != 'ALL': + if filter.get("range", "ALL") != "ALL": date_limit = datetime.now() - if filter['range'] == 'TODAY': + if filter["range"] == "TODAY": date_limit = date_limit.replace(hour=0, minute=0, second=0, microsecond=0) - if filter['range'] == 'WEEK': - date_limit = date_limit.replace(day=date_limit.day - date_limit.weekday(), hour=0, minute=0, second=0, - microsecond=0) + if filter["range"] == "WEEK": + date_limit = date_limit.replace(day=date_limit.day - date_limit.weekday(), hour=0, minute=0, second=0, microsecond=0) - if filter['range'] == 'MONTH': + if filter["range"] == "MONTH": date_limit = date_limit.replace(day=1, hour=0, minute=0, second=0, microsecond=0) query = query.filter(ReportItem.created >= date_limit) - if 'sort' in filter: - if filter['sort'] == 'DATE_DESC': + if filter.get("sort"): + if filter["sort"] == "DATE_DESC": query = query.order_by(db.desc(ReportItem.created)) - elif filter['sort'] == 'DATE_ASC': + elif filter["sort"] == "DATE_ASC": query = query.order_by(db.asc(ReportItem.created)) return query.offset(offset).limit(limit).all(), query.count() @classmethod def identical(cls, uuid): + """Check if a report item with the given UUID exists. + + Arguments: + uuid -- The UUID of the report item to check. + Returns: + True if a report item with the given UUID exists, False otherwise. + """ return db.session.query(db.exists().where(ReportItem.uuid == uuid)).scalar() @classmethod def get_by_cpe(cls, cpes): - + """Retrieve report items by Common Platform Enumeration (CPE). + + This method queries the database to retrieve report items that match the provided CPEs. + Arguments: + cpes (list): A list of CPEs to search for. + Returns: + list: A list of report item IDs that match the provided CPEs. + """ if len(cpes) > 0: query_string = "SELECT DISTINCT report_item_id FROM report_item_cpe WHERE value LIKE ANY(:cpes) OR {}" - params = {'cpes': cpes} + params = {"cpes": cpes} inner_query = "" for i in range(len(cpes)): @@ -274,6 +537,18 @@ def get_by_cpe(cls, cpes): @classmethod def get_json(cls, group, filter, offset, limit, user): + """Get the JSON representation of report items. + + This method retrieves report items based on the provided parameters and returns them in JSON format. + Arguments: + group (str): The group parameter. + filter (str): The filter parameter. + offset (int): The offset parameter. + limit (int): The limit parameter. + user (str): The user parameter. + Returns: + dict: A dictionary containing the total count of report items and a list of report items in JSON format. + """ results, count = cls.get(group, filter, offset, limit, user) report_items = [] if group: @@ -292,18 +567,36 @@ def get_json(cls, group, filter, offset, limit, user): report_items.append(report_item) report_items_schema = ReportItemPresentationSchema(many=True) - return {'total_count': count, 'items': report_items_schema.dump(report_items)} + return {"total_count": count, "items": report_items_schema.dump(report_items)} @classmethod def get_detail_json(cls, id): + """Get the detailed JSON representation of a report item. + + Arguments: + cls -- The class object. + id -- The ID of the report item. + Returns: + The detailed JSON representation of the report item. + """ report_item = cls.query.get(id) report_item_schema = ReportItemSchema() return report_item_schema.dump(report_item) @classmethod def get_groups(cls): - result = db.session.query(ReportItem.remote_user).distinct().group_by(ReportItem.remote_user).filter( - ReportItem.remote_user != None).all() + """Get the distinct groups associated with the report items. + + Returns: + list: A list of distinct groups. + """ + result = ( + db.session.query(ReportItem.remote_user) + .distinct() + .group_by(ReportItem.remote_user) + .filter(ReportItem.remote_user.isnot(None)) + .all() + ) groups = set() for row in result: groups.add(row[0]) @@ -312,11 +605,21 @@ def get_groups(cls): @classmethod def add_report_item(cls, report_item_data, user): + """Add a report item to the database. + + This method takes in report_item_data and user as arguments and adds a new report item to the database. + It performs authorization checks to ensure that the user has the necessary permissions to add the report item. + Arguments: + report_item_data (dict): The data for the report item. + user (User): The user adding the report item. + Returns: + tuple: A tuple containing the added report item and the HTTP status code. + """ report_item_schema = NewReportItemSchema() report_item = report_item_schema.load(report_item_data) if not ReportItemType.allowed_with_acl(report_item.report_item_type_id, user, False, False, True): - return 'Unauthorized access to report item type', 401 + return "Unauthorized access to report item type", 401 report_item.user_id = user.id for attribute in report_item.attributes: @@ -331,6 +634,16 @@ def add_report_item(cls, report_item_data, user): @classmethod def add_remote_report_items(cls, report_item_data, remote_node_name): + """Add remote report items to the database. + + This method takes a list of report item data and a remote node name, + and adds the report items to the database. If a report item with the + same UUID already exists, it updates the existing report item with the + new data. + Arguments: + report_item_data (list): A list of report item data. + remote_node_name (str): The name of the remote node. + """ report_item_schema = NewReportItemSchema(many=True) report_items = report_item_schema.load(report_item_data) @@ -350,66 +663,81 @@ def add_remote_report_items(cls, report_item_data, remote_node_name): @classmethod def update_report_item(cls, id, data, user): + """Update a report item with the given data. + + Arguments: + id (int): The ID of the report item. + data (dict): The data to update the report item with. + user (User): The user performing the update. + Returns: + tuple: A tuple containing a boolean indicating whether the report item was modified and the updated data. + """ modified = False new_attribute = None report_item = cls.query.get(id) if report_item is not None: - if 'update' in data: - if 'title' in data: - if report_item.title != data['title']: + if "update" in data: + if "title" in data: + if report_item.title != data["title"]: modified = True - report_item.title = data['title'] - data['title'] = '' + report_item.title = data["title"] + data["title"] = "" - if 'title_prefix' in data: - if report_item.title_prefix != data['title_prefix']: + if "title_prefix" in data: + if report_item.title_prefix != data["title_prefix"]: modified = True - report_item.title_prefix = data['title_prefix'] - data['title_prefix'] = '' + report_item.title_prefix = data["title_prefix"] + data["title_prefix"] = "" - if 'completed' in data: - if report_item.completed != data['completed']: + if "completed" in data: + if report_item.completed != data["completed"]: modified = True - report_item.completed = data['completed'] - data['completed'] = '' + report_item.completed = data["completed"] + data["completed"] = "" - if 'attribute_id' in data: + if "attribute_id" in data: for attribute in report_item.attributes: - # sometime we compare: int & int or int & str - if str(attribute.id) == str(data['attribute_id']): - if attribute.value != data['attribute_value']: + # Compare attribute IDs + if str(attribute.id) == str(data["attribute_id"]): + if attribute.value != data["attribute_value"]: + modified = True + attribute.value = data["attribute_value"] + data["attribute_value"] = "" + attribute.user = user + attribute.last_updated = datetime.now() + if attribute.value_description != data["value_description"]: modified = True - attribute.value = data['attribute_value'] - data['attribute_value'] = '' + attribute.value_description = data["value_description"] + data["value_description"] = "" attribute.user = user attribute.last_updated = datetime.now() break - if 'add' in data: - if 'attribute_id' in data: + if "add" in data: + if "attribute_id" in data: modified = True - new_attribute = ReportItemAttribute(None, "", None, 0, None, data['attribute_group_item_id'], None) + new_attribute = ReportItemAttribute(None, "", None, 0, None, data["attribute_group_item_id"], None) new_attribute.user = user report_item.attributes.append(new_attribute) - if 'aggregate_ids' in data: + if "aggregate_ids" in data: modified = True - for aggregate_id in data['aggregate_ids']: + for aggregate_id in data["aggregate_ids"]: aggregate = NewsItemAggregate.find(aggregate_id) report_item.news_item_aggregates.append(aggregate) - if 'remote_report_item_ids' in data: + if "remote_report_item_ids" in data: modified = True - for remote_report_item_id in data['remote_report_item_ids']: + for remote_report_item_id in data["remote_report_item_ids"]: remote_report_item = ReportItem.find(remote_report_item_id) report_item.remote_report_items.append(remote_report_item) - if 'delete' in data: - if 'attribute_id' in data: + if "delete" in data: + if "attribute_id" in data: attribute_to_delete = None for attribute in report_item.attributes: # sometime we compare: int & int or int & str - if str(attribute.id) == str(data['attribute_id']): + if str(attribute.id) == str(data["attribute_id"]): attribute_to_delete = attribute break @@ -417,10 +745,10 @@ def update_report_item(cls, id, data, user): modified = True report_item.attributes.remove(attribute_to_delete) - if 'aggregate_id' in data: + if "aggregate_id" in data: aggregate_to_delete = None for aggregate in report_item.news_item_aggregates: - if aggregate.id == data['aggregate_id']: + if aggregate.id == data["aggregate_id"]: aggregate_to_delete = aggregate break @@ -428,10 +756,10 @@ def update_report_item(cls, id, data, user): modified = True report_item.news_item_aggregates.remove(aggregate_to_delete) - if 'remote_report_item_id' in data: + if "remote_report_item_id" in data: remote_report_item_to_delete = None for remote_report_item in report_item.remote_report_items: - if remote_report_item.id == data['remote_report_item_id']: + if remote_report_item.id == data["remote_report_item_id"]: remote_report_item_to_delete = remote_report_item break @@ -441,73 +769,99 @@ def update_report_item(cls, id, data, user): if modified: report_item.last_updated = datetime.now() - data['user_id'] = user.id - data['report_item_id'] = int(id) + data["user_id"] = user.id + data["report_item_id"] = int(id) report_item.update_cpes() db.session.commit() if new_attribute is not None: - data['attribute_id'] = new_attribute.id + data["attribute_id"] = new_attribute.id return modified, data @classmethod def get_updated_data(cls, id, data): + """Get the updated data for a report item. + + This method retrieves the updated data for a report item based on the provided ID and data. + Arguments: + cls (class): The class object. + id (int): The ID of the report item. + data (dict): The data containing the updates. + Returns: + dict: The updated data for the report item. + """ report_item = cls.query.get(id) if report_item is not None: - if 'update' in data: - if 'title' in data: - data['title'] = report_item.title + if "update" in data: + if "title" in data: + data["title"] = report_item.title - if 'title_prefix' in data: - data['title_prefix'] = report_item.title_prefix + if "title_prefix" in data: + data["title_prefix"] = report_item.title_prefix - if 'completed' in data: - data['completed'] = report_item.completed + if "completed" in data: + data["completed"] = report_item.completed - if 'attribute_id' in data: + if "attribute_id" in data: for attribute in report_item.attributes: - if str(attribute.id) == data['attribute_id']: - data['attribute_value'] = attribute.value - data['attribute_last_updated'] = attribute.last_updated.strftime('%d.%m.%Y - %H:%M') - data['attribute_user'] = attribute.user.name + if str(attribute.id) == data["attribute_id"]: + data["attribute_value"] = attribute.value + data["attribute_value_description"] = attribute.value_description + data["attribute_last_updated"] = attribute.last_updated.strftime("%d.%m.%Y - %H:%M") + data["attribute_user"] = attribute.user.name break - if 'add' in data: - if 'aggregate_ids' in data: + if "add" in data: + if "aggregate_ids" in data: schema = NewsItemAggregateSchema() - data['news_item_aggregates'] = [] - for aggregate_id in data['aggregate_ids']: + data["news_item_aggregates"] = [] + for aggregate_id in data["aggregate_ids"]: aggregate = NewsItemAggregate.find(aggregate_id) - data['news_item_aggregates'].append(schema.dump(aggregate)) + data["news_item_aggregates"].append(schema.dump(aggregate)) - if 'remote_report_item_ids' in data: + if "remote_report_item_ids" in data: schema = RemoteReportItemSchema() - data['remote_report_items'] = [] - for remote_report_item_id in data['remote_report_item_ids']: + data["remote_report_items"] = [] + for remote_report_item_id in data["remote_report_item_ids"]: remote_report_item = ReportItem.find(remote_report_item_id) - data['remote_report_items'].append(schema.dump(remote_report_item)) + data["remote_report_items"].append(schema.dump(remote_report_item)) - if 'attribute_id' in data: + if "attribute_id" in data: for attribute in report_item.attributes: - if str(attribute.id) == data['attribute_id']: - data['attribute_value'] = attribute.value - data['binary_mime_type'] = attribute.binary_mime_type - data['binary_size'] = attribute.binary_size - data['binary_description'] = attribute.binary_description - data['attribute_last_updated'] = attribute.last_updated.strftime('%d.%m.%Y - %H:%M') - data['attribute_user'] = attribute.user.name + if str(attribute.id) == data["attribute_id"]: + data["attribute_value"] = attribute.value + data["attribute_value_description"] = attribute.value_description + data["binary_mime_type"] = attribute.binary_mime_type + data["binary_size"] = attribute.binary_size + data["binary_description"] = attribute.binary_description + data["attribute_last_updated"] = attribute.last_updated.strftime("%d.%m.%Y - %H:%M") + data["attribute_user"] = attribute.user.name break return data @classmethod def add_attachment(cls, id, attribute_group_item_id, user, file, description): + """Add an attachment to a report item. + + Arguments: + id (int): The ID of the report item. + attribute_group_item_id (int): The ID of the attribute group item. + user (User): The user who is adding the attachment. + file (FileStorage): The file to be attached. + description (str): The description of the attachment. + Returns: + dict: A dictionary containing information about the attachment. + - "add" (bool): True if the attachment was added successfully. + - "user_id" (int): The ID of the user who added the attachment. + - "report_item_id" (int): The ID of the report item. + - "attribute_id" (int): The ID of the newly created attribute. + """ report_item = cls.query.get(id) file_data = file.read() - new_attribute = ReportItemAttribute(None, file.filename, file.mimetype, len(file_data), description, - attribute_group_item_id, None) + new_attribute = ReportItemAttribute(None, file.filename, file.mimetype, len(file_data), description, attribute_group_item_id, None) new_attribute.user = user new_attribute.binary_data = file_data report_item.attributes.append(new_attribute) @@ -515,10 +869,10 @@ def add_attachment(cls, id, attribute_group_item_id, user, file, description): report_item.last_updated = datetime.now() data = dict() - data['add'] = True - data['user_id'] = user.id - data['report_item_id'] = int(id) - data['attribute_id'] = new_attribute.id + data["add"] = True + data["user_id"] = user.id + data["report_item_id"] = int(id) + data["attribute_id"] = new_attribute.id db.session.commit() @@ -526,6 +880,20 @@ def add_attachment(cls, id, attribute_group_item_id, user, file, description): @classmethod def remove_attachment(cls, id, attribute_id, user): + """Remove an attachment from a report item. + + Arguments: + cls (ReportItem): The class object. + id (int): The ID of the report item. + attribute_id (int): The ID of the attribute to be removed. + user (User): The user performing the action. + Returns: + dict: A dictionary containing information about the deletion. + - delete (bool): Indicates whether the attribute was successfully deleted. + - user_id (int): The ID of the user performing the action. + - report_item_id (int): The ID of the report item. + - attribute_id (int): The ID of the attribute that was deleted. + """ report_item = cls.query.get(id) attribute_to_delete = None for attribute in report_item.attributes: @@ -539,10 +907,10 @@ def remove_attachment(cls, id, attribute_id, user): report_item.last_updated = datetime.now() data = dict() - data['delete'] = True - data['user_id'] = user.id - data['report_item_id'] = int(id) - data['attribute_id'] = attribute_id + data["delete"] = True + data["user_id"] = user.id + data["report_item_id"] = int(id) + data["attribute_id"] = attribute_id db.session.commit() @@ -550,13 +918,27 @@ def remove_attachment(cls, id, attribute_id, user): @classmethod def delete_report_item(cls, id): + """Delete a report item by its ID. + + Arguments: + id (int): The ID of the report item to be deleted. + Returns: + tuple: A tuple containing the status message and the HTTP status code. + The status message is "success" if the report item was deleted successfully. + The HTTP status code is 200 if the report item was deleted successfully. + """ report_item = cls.query.get(id) if report_item is not None: db.session.delete(report_item) db.session.commit() - return 'success', 200 + return "success", 200 def update_cpes(self): + """Update the list of CPES for the report item. + + This method clears the existing list of CPES and populates it with new CPES + based on the attributes of the report item. Only attributes of type CPE are considered. + """ self.report_item_cpes = [] if self.completed is True: for attribute in self.attributes: @@ -566,10 +948,25 @@ def update_cpes(self): class ReportItemCpe(db.Model): + """A class representing a CPE (Common Platform Enumeration) report item. + + Attributes: + id (int): The unique identifier of the report item. + value (str): The value of the report item. + report_item_id (int): The foreign key referencing the parent report item. + Args: + db (object): The database object. + """ + id = db.Column(db.Integer, primary_key=True) value = db.Column(db.String()) - report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id')) + report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id")) def __init__(self, value): + """Initialize a ReportItemCpe object. + + Args: + value (any): The value of the report item. + """ self.id = None self.value = value diff --git a/src/shared/shared/schema/report_item.py b/src/shared/shared/schema/report_item.py index fb6723b47..502012a7e 100644 --- a/src/shared/shared/schema/report_item.py +++ b/src/shared/shared/schema/report_item.py @@ -1,3 +1,24 @@ +"""This module contains schemas and classes for representing report items and their attributes. + +The module includes the following classes: +- ReportItemAttributeBaseSchema: Schema for representing a report item attribute. +- ReportItemAttributeSchema: Schema for representing a report item attribute with additional fields. +- ReportItemAttribute: Class representing an attribute of a report item. +- ReportItemBaseSchema: Schema for the base report item. +- RemoteReportItemSchema: Schema for remote report items. +- ReportItemSchema: Schema for serializing and deserializing ReportItem objects. +- ReportItemAttributeRemoteSchema: Schema for representing a remote attribute of a report item. +- ReportItemRemoteSchema: Schema for representing a remote report item. +- ReportItemPresentationSchema: Schema for presenting a report item. +- ReportItem: Class representing a report item. + +The module also imports schemas from other modules: +- PresentationSchema: Schema for presentation. +- NewsItemAggregateSchema: Schema for representing news item aggregates. +- ACLEntryStatusSchema: Schema for the ACL entry status. +- UserSchemaBase: Schema for representing user data. +""" + from marshmallow import Schema, fields, post_load, EXCLUDE from shared.schema.presentation import PresentationSchema @@ -7,11 +28,27 @@ class ReportItemAttributeBaseSchema(Schema): + """Schema for representing a report item attribute. + + Attributes: + id (int): The ID of the attribute. + value (str): The value of the attribute. + value_description (str): The description of the attribute value. + binary_mime_type (str): The MIME type of the binary attribute. + binary_size (int): The size of the binary attribute. + binary_description (str): The description of the binary attribute. + attribute_group_item_title (str): The title of the attribute group item. + attribute_group_item_id (int): The ID of the attribute group item. + """ + class Meta: + """Meta class for configuring the behavior of the ReportItemAttributeBase schema.""" + unknown = EXCLUDE id = fields.Int(load_default=None, allow_none=True) value = fields.Str() + value_description = fields.Str(load_default=None, allow_none=True) binary_mime_type = fields.Str(load_default=None, allow_none=True) binary_size = fields.Int(load_default=0) binary_description = fields.Str(load_default=None, allow_none=True) @@ -20,6 +57,17 @@ class Meta: class ReportItemAttributeSchema(ReportItemAttributeBaseSchema): + """Schema for representing a report item attribute. + + This schema defines the structure and validation rules for a report item attribute. + + Arguments: + ReportItemAttributeBaseSchema -- The base schema for a report item attribute. + + Returns: + An instance of the ReportItemAttributeSchema class. + """ + created = fields.DateTime("%d.%m.%Y - %H:%M") last_updated = fields.DateTime("%d.%m.%Y - %H:%M") version = fields.Int() @@ -28,10 +76,39 @@ class ReportItemAttributeSchema(ReportItemAttributeBaseSchema): @post_load def make(self, data, **kwargs): + """Create a new ReportItemAttribute object. + + This method takes in data and creates a new ReportItemAttribute object using the provided data. + + Arguments: + data (dict): A dictionary containing the data for creating the ReportItemAttribute object. + **kwargs: Additional keyword arguments. + + Returns: + ReportItemAttribute: The newly created ReportItemAttribute object. + """ return ReportItemAttribute(**data) class ReportItemAttribute: + """Represents an attribute of a report item. + + Attributes: + id (int): The ID of the attribute. + value (str): The value of the attribute. + value_description (str): The description of the attribute value. + binary_mime_type (str): The MIME type of the binary data associated with the attribute. + binary_size (int): The size of the binary data associated with the attribute. + binary_description (str): The description of the binary data associated with the attribute. + attribute_group_item_id (int): The ID of the attribute group item. + attribute_group_item_title (str): The title of the attribute group item. + created (datetime): The date and time when the attribute was created. + last_updated (datetime): The date and time when the attribute was last updated. + version (int): The version of the attribute. + current (bool): Indicates whether the attribute is the current version. + user (str): The user who created or last updated the attribute. + """ + def __init__( self, id, @@ -47,6 +124,7 @@ def __init__( current, user, ): + """Initialize a new instance of the ReportItem class.""" self.id = id self.value = value self.created = created @@ -62,7 +140,22 @@ def __init__( class ReportItemBaseSchema(Schema): + """Schema for the base report item. + + Attributes: + id (int): The ID of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item title. + created (DateTime): The date and time when the report item was created. + last_updated (DateTime): The date and time when the report item was last updated. + completed (bool): Indicates whether the report item is completed or not. + report_item_type_id (int): The ID of the report item type. + """ + class Meta: + """Meta class for configuring the behavior of the ReportItem schema.""" + unknown = EXCLUDE id = fields.Int(load_default=None, allow_none=True) @@ -76,11 +169,41 @@ class Meta: class RemoteReportItemSchema(ReportItemBaseSchema, PresentationSchema): + """Schema for remote report items. + + This schema represents the structure and validation rules for remote report items. + + Arguments: + ReportItemBaseSchema -- Base schema for report items. + PresentationSchema -- Schema for presentation. + + Attributes: + remote_user (str): The remote user associated with the report item. + attributes (list): List of nested report item attributes. + """ + remote_user = fields.Str(allow_none=True) attributes = fields.Nested(ReportItemAttributeSchema, many=True) class ReportItemSchema(ReportItemBaseSchema): + """Schema for serializing and deserializing ReportItem objects. + + Inherits from ReportItemBaseSchema. + + Attributes: + news_item_aggregates (List[NewsItemAggregateSchema]): List of nested NewsItemAggregateSchema objects. + remote_report_items (List[RemoteReportItemSchema]): List of nested RemoteReportItemSchema objects. + attributes (List[ReportItemAttributeSchema]): List of nested ReportItemAttributeSchema objects. + remote_user (str): Remote user associated with the report item. + + Methods: + make(data, **kwargs): Post-load method to create a ReportItem object from deserialized data. + + Returns: + ReportItemSchema: An instance of the ReportItemSchema class. + """ + news_item_aggregates = fields.Nested(NewsItemAggregateSchema, many=True) remote_report_items = fields.Nested(RemoteReportItemSchema, many=True) attributes = fields.Nested(ReportItemAttributeSchema, many=True) @@ -88,15 +211,38 @@ class ReportItemSchema(ReportItemBaseSchema): @post_load def make(self, data, **kwargs): + """Create a new ReportItem object. + + This method takes in data and creates a new ReportItem object using the provided data. + + Arguments: + data (dict): A dictionary containing the data for the ReportItem. + + Returns: + ReportItem: A new ReportItem object. + """ return ReportItem(**data) class ReportItemAttributeRemoteSchema(Schema): + """A schema for representing a remote attribute of a report item. + + Attributes: + attribute_group_item_title (str): The title of the attribute group item. + value (str): The value of the attribute. + """ + attribute_group_item_title = fields.Str() value = fields.Str() class ReportItemRemoteSchema(Schema): + """A schema for representing a remote report item. + + Arguments: + Schema -- The base schema class. + """ + uuid = fields.Str(allow_none=True) title = fields.Str() title_prefix = fields.Str() @@ -105,10 +251,41 @@ class ReportItemRemoteSchema(Schema): class ReportItemPresentationSchema(ReportItemBaseSchema, ACLEntryStatusSchema, PresentationSchema): + """Schema for presenting a report item. + + This schema inherits from the ReportItemBaseSchema, ACLEntryStatusSchema, and PresentationSchema classes. + + Arguments: + ReportItemBaseSchema -- Schema for the base report item. + ACLEntryStatusSchema -- Schema for the ACL entry status. + PresentationSchema -- Schema for the presentation. + + Attributes: + remote_user -- String field representing the remote user. Allows None as a value. + """ + remote_user = fields.Str(allow_none=True) class ReportItem: + """ + Represents a report item. + + Attributes: + id (int): The ID of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item title. + created (datetime): The date and time when the report item was created. + last_updated (datetime): The date and time when the report item was last updated. + completed (bool): Indicates whether the report item is completed or not. + report_item_type_id (int): The ID of the report item type. + news_item_aggregates (list): A list of news item aggregates associated with the report item. + remote_report_items (list): A list of remote report items associated with the report item. + attributes (dict): Additional attributes of the report item. + remote_user (str): The remote user associated with the report item. + """ + def __init__( self, id, @@ -124,6 +301,7 @@ def __init__( attributes, remote_user, ): + """Initialize a ReportItem object.""" self.id = id self.uuid = uuid self.title = title @@ -139,16 +317,53 @@ def __init__( class ReportItemIdSchema(Schema): + """Schema for Report Item ID. + + This schema defines the structure for the Report Item ID. + + Arguments: + Schema -- The base schema class. + + Returns: + An instance of the ReportItemId class. + """ + class Meta: + """Meta class for configuring the behavior of the ReportItemId schema.""" + unknown = EXCLUDE id = fields.Int() @post_load def make(self, data, **kwargs): + """Create a new ReportItemId object. + + This method takes in data and returns a new ReportItemId object. + + Arguments: + data (dict): The data used to create the ReportItemId object. + + Returns: + ReportItemId: A new ReportItemId object. + """ return ReportItemId(**data) class ReportItemId: + """A class representing the ID of a report item. + + Args: + id (int): The ID of the report item. + + Attributes: + id (int): The ID of the report item. + """ + def __init__(self, id): + """Initialize a ReportItem object. + + Args: + id (int): The ID of the report item. + """ self.id = id From 16ebe738640334e45d6b5abdbfacb923dbba380a Mon Sep 17 00:00:00 2001 From: multiflexi Date: Thu, 20 Jun 2024 20:57:51 +0200 Subject: [PATCH 114/146] modified model and schema --- src/core/model/report_item.py | 687 +++++++++++++++++++----- src/shared/shared/schema/report_item.py | 217 ++++++++ 2 files changed, 759 insertions(+), 145 deletions(-) diff --git a/src/core/model/report_item.py b/src/core/model/report_item.py index b1bf3fd10..4bd749159 100644 --- a/src/core/model/report_item.py +++ b/src/core/model/report_item.py @@ -1,4 +1,24 @@ -from marshmallow import post_load +"""This module contains the ReportItem class and its associated schema. + +The ReportItem class represents a report item, which is a component of a larger report. It contains attributes such as ID, UUID, title, +created timestamp, and more. The class also includes methods for finding report item attributes by ID. + +The module also defines several schemas for creating and validating report items and their attributes. + +Classes: + - ReportItem: A class representing a report item. + - ReportItemAttribute: A class representing an attribute of a report item. + - ReportItemRemoteReportItem: A class representing the relationship between a report item and a remote report item. + +Schemas: + - NewReportItemSchema: Schema for creating a new report item. + - NewReportItemAttributeSchema: Schema for creating a new report item attribute. + +Relationships: + - ReportItem has a many-to-one relationship with User and ReportItemType. + - ReportItem has a many-to-many relationship with NewsItemAggregate and ReportItem. +""" + from datetime import datetime import uuid as uuid_generator from sqlalchemy import orm, or_, func, text, and_ @@ -14,19 +34,76 @@ from shared.schema.acl_entry import ItemType from shared.schema.attribute import AttributeType from shared.schema.news_item import NewsItemAggregateIdSchema, NewsItemAggregateSchema -from shared.schema.report_item import ReportItemAttributeBaseSchema, ReportItemBaseSchema, ReportItemIdSchema, RemoteReportItemSchema, ReportItemRemoteSchema, ReportItemSchema, ReportItemPresentationSchema +from shared.schema.report_item import ( + ReportItemAttributeBaseSchema, + ReportItemBaseSchema, + ReportItemIdSchema, + RemoteReportItemSchema, + ReportItemRemoteSchema, + ReportItemSchema, + ReportItemPresentationSchema, +) class NewReportItemAttributeSchema(ReportItemAttributeBaseSchema): + """Schema for creating a new report item attribute. + + This schema is used to validate and deserialize data for creating a new report item attribute. + + Arguments: + ReportItemAttributeBaseSchema -- The base schema for report item attributes. + + Returns: + An instance of the ReportItemAttribute class. + """ @post_load def make_report_item_attribute(self, data, **kwargs): + """Create a report item attribute. + + This method takes in data and creates a ReportItemAttribute object. + + Arguments: + data (dict): A dictionary containing the data for the report item attribute. + + Returns: + ReportItemAttribute: The created report item attribute object. + """ return ReportItemAttribute(**data) class ReportItemAttribute(db.Model): + """A class representing an attribute of a report item. + + Attributes: + id (int): The unique identifier of the attribute. + value (str): The value of the attribute. + value_description (str): The description of the attribute value. + binary_mime_type (str): The MIME type of the binary data, if applicable. + binary_data (bytes): The binary data associated with the attribute. + binary_size (int): The size of the binary data in bytes. + binary_description (str): The description of the binary data. + created (datetime): The timestamp of when the attribute was created. + last_updated (datetime): The timestamp of when the attribute was last updated. + version (int): The version number of the attribute. + current (bool): Indicates whether the attribute is the current version. + attribute_group_item_id (int): The ID of the attribute group item that the attribute belongs to. + attribute_group_item (AttributeGroupItem): The attribute group item that the attribute belongs to. + attribute_group_item_title (str): The title of the attribute group item. + report_item_id (int): The ID of the report item that the attribute belongs to. + report_item (ReportItem): The report item that the attribute belongs to. + user_id (int): The ID of the user who created the attribute. + user (User): The user who created the attribute. + + Methods: + __init__: Initializes a new instance of the ReportItemAttribute class. + find: Finds a report item attribute by its ID. + + """ + id = db.Column(db.Integer, primary_key=True) value = db.Column(db.String(), nullable=False) + value_description = db.Column(db.String()) binary_mime_type = db.Column(db.String()) binary_data = orm.deferred(db.Column(db.LargeBinary)) binary_size = db.Column(db.Integer) @@ -37,20 +114,42 @@ class ReportItemAttribute(db.Model): version = db.Column(db.Integer, default=1) current = db.Column(db.Boolean, default=True) - attribute_group_item_id = db.Column(db.Integer, db.ForeignKey('attribute_group_item.id')) + attribute_group_item_id = db.Column(db.Integer, db.ForeignKey("attribute_group_item.id")) attribute_group_item = db.relationship("AttributeGroupItem", viewonly=True) attribute_group_item_title = db.Column(db.String) - report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), nullable=True) + report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id"), nullable=True) report_item = db.relationship("ReportItem") - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) + user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=True) user = db.relationship("User") - def __init__(self, id, value, binary_mime_type, binary_size, binary_description, attribute_group_item_id, - attribute_group_item_title): + def __init__( + self, + id, + value, + value_description, + binary_mime_type, + binary_size, + binary_description, + attribute_group_item_id, + attribute_group_item_title, + ): + """Initialize a ReportItem object. + + Arguments: + id (int): The ID of the report item. + value (str): The value of the report item. + value_description (str): The description of the value. + binary_mime_type (str): The MIME type of the binary data. + binary_size (int): The size of the binary data. + binary_description (str): The description of the binary data. + attribute_group_item_id (int): The ID of the attribute group item. + attribute_group_item_title (str): The title of the attribute group item. + """ self.id = None self.value = value + self.value_description = value_description self.binary_mime_type = binary_mime_type self.binary_size = binary_size self.binary_description = binary_description @@ -59,26 +158,83 @@ def __init__(self, id, value, binary_mime_type, binary_size, binary_description, @classmethod def find(cls, attribute_id): + """Find a report item attribute by its ID. + + Args: + attribute_id (int): The ID of the attribute to find. + + Returns: + ReportItemAttribute: The report item attribute with the specified ID, or None if not found. + + """ report_item_attribute = cls.query.get(attribute_id) return report_item_attribute class NewReportItemSchema(ReportItemBaseSchema): + """Schema for creating a new report item. + + This schema defines the structure and validation rules for creating a new report item. + + Arguments: + ReportItemBaseSchema -- The base schema for report items. + + Returns: + An instance of the NewReportItemSchema class. + """ + news_item_aggregates = fields.Nested(NewsItemAggregateIdSchema, many=True, missing=[]) remote_report_items = fields.Nested(ReportItemIdSchema, many=True, missing=[]) attributes = fields.Nested(NewReportItemAttributeSchema, many=True) @post_load def make(self, data, **kwargs): + """Create a new ReportItem object. + + Arguments: + data (dict): A dictionary containing the data for the ReportItem. + + Returns: + ReportItem: A new ReportItem object. + """ return ReportItem(**data) class ReportItemRemoteReportItem(db.Model): - report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), primary_key=True) - remote_report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id'), primary_key=True) + """A class representing the relationship between a report item and a remote report item. + + Arguments: + db -- The database object used for defining the model. + """ + + report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id"), primary_key=True) + remote_report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id"), primary_key=True) class ReportItem(db.Model): + """A class representing a report item. + + Attributes: + id (int): The unique identifier of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item title. + created (datetime): The datetime when the report item was created. + last_updated (datetime): The datetime when the report item was last updated. + completed (bool): Indicates whether the report item is completed or not. + user_id (int): The ID of the user associated with the report item. + user (User): The user associated with the report item. + remote_user (str): The remote user associated with the report item. + report_item_type_id (int): The ID of the report item type associated with the report item. + report_item_type (ReportItemType): The report item type associated with the report item. + news_item_aggregates (list): The list of news item aggregates associated with the report item. + remote_report_items (list): The list of remote report items associated with the report item. + attributes (list): The list of attributes associated with the report item. + report_item_cpes (list): The list of report item CPES associated with the report item. + subtitle (str): The subtitle of the report item. + tag (str): The tag of the report item. + """ + id = db.Column(db.Integer, primary_key=True) uuid = db.Column(db.String(64)) @@ -89,26 +245,40 @@ class ReportItem(db.Model): last_updated = db.Column(db.DateTime, default=datetime.now) completed = db.Column(db.Boolean, default=False) - user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=True) + user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=True) user = db.relationship("User", viewonly=True) remote_user = db.Column(db.String()) - report_item_type_id = db.Column(db.Integer, db.ForeignKey('report_item_type.id'), nullable=True) + report_item_type_id = db.Column(db.Integer, db.ForeignKey("report_item_type.id"), nullable=True) report_item_type = db.relationship("ReportItemType", viewonly=True) - news_item_aggregates = db.relationship("NewsItemAggregate", secondary='report_item_news_item_aggregate') + news_item_aggregates = db.relationship("NewsItemAggregate", secondary="report_item_news_item_aggregate") - remote_report_items = db.relationship("ReportItem", secondary='report_item_remote_report_item', - primaryjoin=ReportItemRemoteReportItem.report_item_id == id, - secondaryjoin=ReportItemRemoteReportItem.remote_report_item_id == id) + remote_report_items = db.relationship( + "ReportItem", + secondary="report_item_remote_report_item", + primaryjoin=ReportItemRemoteReportItem.report_item_id == id, + secondaryjoin=ReportItemRemoteReportItem.remote_report_item_id == id, + ) - attributes = db.relationship('ReportItemAttribute', back_populates="report_item", cascade="all, delete-orphan") + attributes = db.relationship("ReportItemAttribute", back_populates="report_item", cascade="all, delete-orphan") report_item_cpes = db.relationship("ReportItemCpe", cascade="all, delete-orphan") - def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item_aggregates, remote_report_items, - attributes, completed): - + def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item_aggregates, remote_report_items, attributes, completed): + """Initialize a new instance of the ReportItem class. + + Arguments: + id (int): The ID of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item's title. + report_item_type_id (int): The ID of the report item type. + news_item_aggregates (list): A list of news item aggregates associated with the report item. + remote_report_items (list): A list of remote report items associated with the report item. + attributes (dict): A dictionary of attributes for the report item. + completed (bool): Indicates whether the report item is completed or not. + """ self.id = id if uuid is None: @@ -135,35 +305,75 @@ def __init__(self, id, uuid, title, title_prefix, report_item_type_id, news_item @orm.reconstructor def reconstruct(self): + """Reconstructs the report item. + + This method clears the subtitle, sets the tag to "mdi-file-table-outline", + and sorts the attributes based on the attribute group index, attribute group item index, and attribute ID. + """ self.subtitle = "" self.tag = "mdi-file-table-outline" self.attributes.sort(key=lambda obj: (obj.attribute_group_item.attribute_group.index, obj.attribute_group_item.index, obj.id)) @classmethod def count_all(cls, is_completed): + """Count the number of report items based on completion status. + + Arguments: + is_completed (bool): A flag indicating whether to count completed or incomplete report items. + Returns: + int: The count of report items matching the completion status. + """ return cls.query.filter_by(completed=is_completed).count() @classmethod def find(cls, report_item_id): + """Find a report item by its ID. + + Arguments: + report_item_id (int): The ID of the report item. + Returns: + ReportItem: The report item with the specified ID. + """ report_item = cls.query.get(report_item_id) return report_item @classmethod def find_by_uuid(cls, report_item_uuid): + """Find a report item by its UUID. + + Arguments: + report_item_uuid (str): The UUID of the report item. + Returns: + ReportItem: The report item with the specified UUID. + """ report_item = cls.query.filter_by(uuid=report_item_uuid) return report_item @classmethod def allowed_with_acl(cls, report_item_id, user, see, access, modify): - - query = db.session.query(ReportItem.id).distinct().group_by(ReportItem.id).filter( - ReportItem.id == report_item_id) - - query = query.outerjoin(ACLEntry, or_(and_(ReportItem.uuid == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM), - and_(cast(ReportItem.report_item_type_id, - sqlalchemy.String) == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE))) + """Check if the user is allowed to perform actions on a report item based on ACL. + + Arguments: + report_item_id (int): The ID of the report item. + user (User): The user object. + see (bool): Whether the user can see the report item. + access (bool): Whether the user can access the report item. + modify (bool): Whether the user can modify the report item. + Returns: + bool: True if the user is allowed, False otherwise. + """ + query = db.session.query(ReportItem.id).distinct().group_by(ReportItem.id).filter(ReportItem.id == report_item_id) + + query = query.outerjoin( + ACLEntry, + or_( + and_(ReportItem.uuid == ACLEntry.item_id, ACLEntry.item_type == ItemType.REPORT_ITEM), + and_( + cast(ReportItem.report_item_type_id, sqlalchemy.String) == ACLEntry.item_id, + ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE, + ), + ), + ) query = ACLEntry.apply_query(query, user, see, access, modify) @@ -171,14 +381,29 @@ def allowed_with_acl(cls, report_item_id, user, see, access, modify): @classmethod def get_for_sync(cls, last_synced, report_item_types): + """Retrieve report items for synchronization. + + This method retrieves report items that have been updated since the last synchronization time, + and belong to the specified report item types. + Args: + last_synced (datetime): The last synchronization time. + report_item_types (list): A list of report item types. + Returns: + tuple: A tuple containing two elements: + - items (list): A list of report items that need to be synchronized. + - last_sync_time (datetime): The current synchronization time. + """ report_item_type_ids = set() for report_item_type in report_item_types: report_item_type_ids.add(report_item_type.id) last_sync_time = datetime.now() - query = cls.query.filter(ReportItem.last_updated >= last_synced, ReportItem.last_updated <= last_sync_time, - ReportItem.report_item_type_id.in_(report_item_type_ids)) + query = cls.query.filter( + ReportItem.last_updated >= last_synced, + ReportItem.last_updated <= last_sync_time, + ReportItem.report_item_type_id.in_(report_item_type_ids), + ) report_items = query.all() @@ -193,70 +418,108 @@ def get_for_sync(cls, last_synced, report_item_types): @classmethod def get(cls, group, filter, offset, limit, user): - + """Retrieve report items based on specified criteria. + + Arguments: + group (str): The remote user group. + filter (dict): The filter criteria. + offset (int): The offset for pagination. + limit (int): The limit for pagination. + user (str): The user performing the query. + Returns: + tuple: A tuple containing the list of report items and the total count. + """ if group: query = cls.query.filter(ReportItem.remote_user == group) else: - query = db.session.query(ReportItem, func.count().filter(ACLEntry.id > 0).label("acls"), - func.count().filter(ACLEntry.access == True).label("access"), - func.count().filter(ACLEntry.modify == True).label("modify")).distinct().group_by( - ReportItem.id) - - query = query.filter(ReportItem.remote_user == None) - - query = query.outerjoin(ACLEntry, or_(and_(ReportItem.uuid == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM), - and_(cast(ReportItem.report_item_type_id, - sqlalchemy.String) == ACLEntry.item_id, - ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE))) + query = ( + db.session.query( + ReportItem, + func.count().filter(ACLEntry.id > 0).label("acls"), + func.count().filter(ACLEntry.access.is_(True)).label("access"), + func.count().filter(ACLEntry.modify.is_(True)).label("modify"), + ) + .distinct() + .group_by(ReportItem.id) + ) + + query = query.filter(ReportItem.remote_user.is_(None)) + + query = query.outerjoin( + ACLEntry, + or_( + and_(ReportItem.uuid == ACLEntry.item_id, ACLEntry.item_type == ItemType.REPORT_ITEM), + and_( + cast(ReportItem.report_item_type_id, sqlalchemy.String) == ACLEntry.item_id, + ACLEntry.item_type == ItemType.REPORT_ITEM_TYPE, + ), + ), + ) query = ACLEntry.apply_query(query, user, True, False, False) - if 'search' in filter and filter['search'] != '': - search_string = '%' + filter['search'].lower() + '%' - query = query.join(ReportItemAttribute, ReportItem.id == ReportItemAttribute.report_item_id).filter(or_( - func.lower(ReportItemAttribute.value).like(search_string), - func.lower(ReportItem.title).like(search_string), - func.lower(ReportItem.title_prefix).like(search_string))) + search_string = filter.get("search", "").lower() + if search_string: + search_string = f"%{search_string}%" + query = query.join(ReportItemAttribute, ReportItem.id == ReportItemAttribute.report_item_id).filter( + or_( + func.lower(ReportItemAttribute.value).like(search_string), + func.lower(ReportItem.title).like(search_string), + func.lower(ReportItem.title_prefix).like(search_string), + ) + ) - if 'completed' in filter and filter['completed'].lower() == "true": - query = query.filter(ReportItem.completed == True) + if filter.get("completed", "").lower() == "true": + query = query.filter(ReportItem.completed) - if 'incompleted' in filter and filter['incompleted'].lower() == "true": - query = query.filter(ReportItem.completed == False) + if filter.get("incompleted", "").lower() == "true": + query = query.filter(ReportItem.completed.is_(False)) - if 'range' in filter and filter['range'] != 'ALL': + if filter.get("range", "ALL") != "ALL": date_limit = datetime.now() - if filter['range'] == 'TODAY': + if filter["range"] == "TODAY": date_limit = date_limit.replace(hour=0, minute=0, second=0, microsecond=0) - if filter['range'] == 'WEEK': - date_limit = date_limit.replace(day=date_limit.day - date_limit.weekday(), hour=0, minute=0, second=0, - microsecond=0) + if filter["range"] == "WEEK": + date_limit = date_limit.replace(day=date_limit.day - date_limit.weekday(), hour=0, minute=0, second=0, microsecond=0) - if filter['range'] == 'MONTH': + if filter["range"] == "MONTH": date_limit = date_limit.replace(day=1, hour=0, minute=0, second=0, microsecond=0) query = query.filter(ReportItem.created >= date_limit) - if 'sort' in filter: - if filter['sort'] == 'DATE_DESC': + if filter.get("sort"): + if filter["sort"] == "DATE_DESC": query = query.order_by(db.desc(ReportItem.created)) - elif filter['sort'] == 'DATE_ASC': + elif filter["sort"] == "DATE_ASC": query = query.order_by(db.asc(ReportItem.created)) return query.offset(offset).limit(limit).all(), query.count() @classmethod def identical(cls, uuid): + """Check if a report item with the given UUID exists. + + Arguments: + uuid -- The UUID of the report item to check. + Returns: + True if a report item with the given UUID exists, False otherwise. + """ return db.session.query(db.exists().where(ReportItem.uuid == uuid)).scalar() @classmethod def get_by_cpe(cls, cpes): - + """Retrieve report items by Common Platform Enumeration (CPE). + + This method queries the database to retrieve report items that match the provided CPEs. + Arguments: + cpes (list): A list of CPEs to search for. + Returns: + list: A list of report item IDs that match the provided CPEs. + """ if len(cpes) > 0: query_string = "SELECT DISTINCT report_item_id FROM report_item_cpe WHERE value LIKE ANY(:cpes) OR {}" - params = {'cpes': cpes} + params = {"cpes": cpes} inner_query = "" for i in range(len(cpes)): @@ -274,6 +537,18 @@ def get_by_cpe(cls, cpes): @classmethod def get_json(cls, group, filter, offset, limit, user): + """Get the JSON representation of report items. + + This method retrieves report items based on the provided parameters and returns them in JSON format. + Arguments: + group (str): The group parameter. + filter (str): The filter parameter. + offset (int): The offset parameter. + limit (int): The limit parameter. + user (str): The user parameter. + Returns: + dict: A dictionary containing the total count of report items and a list of report items in JSON format. + """ results, count = cls.get(group, filter, offset, limit, user) report_items = [] if group: @@ -292,18 +567,36 @@ def get_json(cls, group, filter, offset, limit, user): report_items.append(report_item) report_items_schema = ReportItemPresentationSchema(many=True) - return {'total_count': count, 'items': report_items_schema.dump(report_items)} + return {"total_count": count, "items": report_items_schema.dump(report_items)} @classmethod def get_detail_json(cls, id): + """Get the detailed JSON representation of a report item. + + Arguments: + cls -- The class object. + id -- The ID of the report item. + Returns: + The detailed JSON representation of the report item. + """ report_item = cls.query.get(id) report_item_schema = ReportItemSchema() return report_item_schema.dump(report_item) @classmethod def get_groups(cls): - result = db.session.query(ReportItem.remote_user).distinct().group_by(ReportItem.remote_user).filter( - ReportItem.remote_user != None).all() + """Get the distinct groups associated with the report items. + + Returns: + list: A list of distinct groups. + """ + result = ( + db.session.query(ReportItem.remote_user) + .distinct() + .group_by(ReportItem.remote_user) + .filter(ReportItem.remote_user.isnot(None)) + .all() + ) groups = set() for row in result: groups.add(row[0]) @@ -312,11 +605,21 @@ def get_groups(cls): @classmethod def add_report_item(cls, report_item_data, user): + """Add a report item to the database. + + This method takes in report_item_data and user as arguments and adds a new report item to the database. + It performs authorization checks to ensure that the user has the necessary permissions to add the report item. + Arguments: + report_item_data (dict): The data for the report item. + user (User): The user adding the report item. + Returns: + tuple: A tuple containing the added report item and the HTTP status code. + """ report_item_schema = NewReportItemSchema() report_item = report_item_schema.load(report_item_data) if not ReportItemType.allowed_with_acl(report_item.report_item_type_id, user, False, False, True): - return 'Unauthorized access to report item type', 401 + return "Unauthorized access to report item type", 401 report_item.user_id = user.id for attribute in report_item.attributes: @@ -331,6 +634,16 @@ def add_report_item(cls, report_item_data, user): @classmethod def add_remote_report_items(cls, report_item_data, remote_node_name): + """Add remote report items to the database. + + This method takes a list of report item data and a remote node name, + and adds the report items to the database. If a report item with the + same UUID already exists, it updates the existing report item with the + new data. + Arguments: + report_item_data (list): A list of report item data. + remote_node_name (str): The name of the remote node. + """ report_item_schema = NewReportItemSchema(many=True) report_items = report_item_schema.load(report_item_data) @@ -350,66 +663,81 @@ def add_remote_report_items(cls, report_item_data, remote_node_name): @classmethod def update_report_item(cls, id, data, user): + """Update a report item with the given data. + + Arguments: + id (int): The ID of the report item. + data (dict): The data to update the report item with. + user (User): The user performing the update. + Returns: + tuple: A tuple containing a boolean indicating whether the report item was modified and the updated data. + """ modified = False new_attribute = None report_item = cls.query.get(id) if report_item is not None: - if 'update' in data: - if 'title' in data: - if report_item.title != data['title']: + if "update" in data: + if "title" in data: + if report_item.title != data["title"]: modified = True - report_item.title = data['title'] - data['title'] = '' + report_item.title = data["title"] + data["title"] = "" - if 'title_prefix' in data: - if report_item.title_prefix != data['title_prefix']: + if "title_prefix" in data: + if report_item.title_prefix != data["title_prefix"]: modified = True - report_item.title_prefix = data['title_prefix'] - data['title_prefix'] = '' + report_item.title_prefix = data["title_prefix"] + data["title_prefix"] = "" - if 'completed' in data: - if report_item.completed != data['completed']: + if "completed" in data: + if report_item.completed != data["completed"]: modified = True - report_item.completed = data['completed'] - data['completed'] = '' + report_item.completed = data["completed"] + data["completed"] = "" - if 'attribute_id' in data: + if "attribute_id" in data: for attribute in report_item.attributes: - # sometime we compare: int & int or int & str - if str(attribute.id) == str(data['attribute_id']): - if attribute.value != data['attribute_value']: + # Compare attribute IDs + if str(attribute.id) == str(data["attribute_id"]): + if attribute.value != data["attribute_value"]: + modified = True + attribute.value = data["attribute_value"] + data["attribute_value"] = "" + attribute.user = user + attribute.last_updated = datetime.now() + if attribute.value_description != data["value_description"]: modified = True - attribute.value = data['attribute_value'] - data['attribute_value'] = '' + attribute.value_description = data["value_description"] + data["value_description"] = "" attribute.user = user attribute.last_updated = datetime.now() break - if 'add' in data: - if 'attribute_id' in data: + if "add" in data: + if "attribute_id" in data: modified = True - new_attribute = ReportItemAttribute(None, "", None, 0, None, data['attribute_group_item_id'], None) + new_attribute = ReportItemAttribute(None, "", None, 0, None, data["attribute_group_item_id"], None) new_attribute.user = user report_item.attributes.append(new_attribute) - if 'aggregate_ids' in data: + if "aggregate_ids" in data: modified = True - for aggregate_id in data['aggregate_ids']: + for aggregate_id in data["aggregate_ids"]: aggregate = NewsItemAggregate.find(aggregate_id) report_item.news_item_aggregates.append(aggregate) - if 'remote_report_item_ids' in data: + if "remote_report_item_ids" in data: modified = True - for remote_report_item_id in data['remote_report_item_ids']: + for remote_report_item_id in data["remote_report_item_ids"]: remote_report_item = ReportItem.find(remote_report_item_id) report_item.remote_report_items.append(remote_report_item) - if 'delete' in data: - if 'attribute_id' in data: + if "delete" in data: + if "attribute_id" in data: attribute_to_delete = None for attribute in report_item.attributes: # sometime we compare: int & int or int & str - if str(attribute.id) == str(data['attribute_id']): + if str(attribute.id) == str(data["attribute_id"]): attribute_to_delete = attribute break @@ -417,10 +745,10 @@ def update_report_item(cls, id, data, user): modified = True report_item.attributes.remove(attribute_to_delete) - if 'aggregate_id' in data: + if "aggregate_id" in data: aggregate_to_delete = None for aggregate in report_item.news_item_aggregates: - if aggregate.id == data['aggregate_id']: + if aggregate.id == data["aggregate_id"]: aggregate_to_delete = aggregate break @@ -428,10 +756,10 @@ def update_report_item(cls, id, data, user): modified = True report_item.news_item_aggregates.remove(aggregate_to_delete) - if 'remote_report_item_id' in data: + if "remote_report_item_id" in data: remote_report_item_to_delete = None for remote_report_item in report_item.remote_report_items: - if remote_report_item.id == data['remote_report_item_id']: + if remote_report_item.id == data["remote_report_item_id"]: remote_report_item_to_delete = remote_report_item break @@ -441,73 +769,99 @@ def update_report_item(cls, id, data, user): if modified: report_item.last_updated = datetime.now() - data['user_id'] = user.id - data['report_item_id'] = int(id) + data["user_id"] = user.id + data["report_item_id"] = int(id) report_item.update_cpes() db.session.commit() if new_attribute is not None: - data['attribute_id'] = new_attribute.id + data["attribute_id"] = new_attribute.id return modified, data @classmethod def get_updated_data(cls, id, data): + """Get the updated data for a report item. + + This method retrieves the updated data for a report item based on the provided ID and data. + Arguments: + cls (class): The class object. + id (int): The ID of the report item. + data (dict): The data containing the updates. + Returns: + dict: The updated data for the report item. + """ report_item = cls.query.get(id) if report_item is not None: - if 'update' in data: - if 'title' in data: - data['title'] = report_item.title + if "update" in data: + if "title" in data: + data["title"] = report_item.title - if 'title_prefix' in data: - data['title_prefix'] = report_item.title_prefix + if "title_prefix" in data: + data["title_prefix"] = report_item.title_prefix - if 'completed' in data: - data['completed'] = report_item.completed + if "completed" in data: + data["completed"] = report_item.completed - if 'attribute_id' in data: + if "attribute_id" in data: for attribute in report_item.attributes: - if str(attribute.id) == data['attribute_id']: - data['attribute_value'] = attribute.value - data['attribute_last_updated'] = attribute.last_updated.strftime('%d.%m.%Y - %H:%M') - data['attribute_user'] = attribute.user.name + if str(attribute.id) == data["attribute_id"]: + data["attribute_value"] = attribute.value + data["attribute_value_description"] = attribute.value_description + data["attribute_last_updated"] = attribute.last_updated.strftime("%d.%m.%Y - %H:%M") + data["attribute_user"] = attribute.user.name break - if 'add' in data: - if 'aggregate_ids' in data: + if "add" in data: + if "aggregate_ids" in data: schema = NewsItemAggregateSchema() - data['news_item_aggregates'] = [] - for aggregate_id in data['aggregate_ids']: + data["news_item_aggregates"] = [] + for aggregate_id in data["aggregate_ids"]: aggregate = NewsItemAggregate.find(aggregate_id) - data['news_item_aggregates'].append(schema.dump(aggregate)) + data["news_item_aggregates"].append(schema.dump(aggregate)) - if 'remote_report_item_ids' in data: + if "remote_report_item_ids" in data: schema = RemoteReportItemSchema() - data['remote_report_items'] = [] - for remote_report_item_id in data['remote_report_item_ids']: + data["remote_report_items"] = [] + for remote_report_item_id in data["remote_report_item_ids"]: remote_report_item = ReportItem.find(remote_report_item_id) - data['remote_report_items'].append(schema.dump(remote_report_item)) + data["remote_report_items"].append(schema.dump(remote_report_item)) - if 'attribute_id' in data: + if "attribute_id" in data: for attribute in report_item.attributes: - if str(attribute.id) == data['attribute_id']: - data['attribute_value'] = attribute.value - data['binary_mime_type'] = attribute.binary_mime_type - data['binary_size'] = attribute.binary_size - data['binary_description'] = attribute.binary_description - data['attribute_last_updated'] = attribute.last_updated.strftime('%d.%m.%Y - %H:%M') - data['attribute_user'] = attribute.user.name + if str(attribute.id) == data["attribute_id"]: + data["attribute_value"] = attribute.value + data["attribute_value_description"] = attribute.value_description + data["binary_mime_type"] = attribute.binary_mime_type + data["binary_size"] = attribute.binary_size + data["binary_description"] = attribute.binary_description + data["attribute_last_updated"] = attribute.last_updated.strftime("%d.%m.%Y - %H:%M") + data["attribute_user"] = attribute.user.name break return data @classmethod def add_attachment(cls, id, attribute_group_item_id, user, file, description): + """Add an attachment to a report item. + + Arguments: + id (int): The ID of the report item. + attribute_group_item_id (int): The ID of the attribute group item. + user (User): The user who is adding the attachment. + file (FileStorage): The file to be attached. + description (str): The description of the attachment. + Returns: + dict: A dictionary containing information about the attachment. + - "add" (bool): True if the attachment was added successfully. + - "user_id" (int): The ID of the user who added the attachment. + - "report_item_id" (int): The ID of the report item. + - "attribute_id" (int): The ID of the newly created attribute. + """ report_item = cls.query.get(id) file_data = file.read() - new_attribute = ReportItemAttribute(None, file.filename, file.mimetype, len(file_data), description, - attribute_group_item_id, None) + new_attribute = ReportItemAttribute(None, file.filename, file.mimetype, len(file_data), description, attribute_group_item_id, None) new_attribute.user = user new_attribute.binary_data = file_data report_item.attributes.append(new_attribute) @@ -515,10 +869,10 @@ def add_attachment(cls, id, attribute_group_item_id, user, file, description): report_item.last_updated = datetime.now() data = dict() - data['add'] = True - data['user_id'] = user.id - data['report_item_id'] = int(id) - data['attribute_id'] = new_attribute.id + data["add"] = True + data["user_id"] = user.id + data["report_item_id"] = int(id) + data["attribute_id"] = new_attribute.id db.session.commit() @@ -526,6 +880,20 @@ def add_attachment(cls, id, attribute_group_item_id, user, file, description): @classmethod def remove_attachment(cls, id, attribute_id, user): + """Remove an attachment from a report item. + + Arguments: + cls (ReportItem): The class object. + id (int): The ID of the report item. + attribute_id (int): The ID of the attribute to be removed. + user (User): The user performing the action. + Returns: + dict: A dictionary containing information about the deletion. + - delete (bool): Indicates whether the attribute was successfully deleted. + - user_id (int): The ID of the user performing the action. + - report_item_id (int): The ID of the report item. + - attribute_id (int): The ID of the attribute that was deleted. + """ report_item = cls.query.get(id) attribute_to_delete = None for attribute in report_item.attributes: @@ -539,10 +907,10 @@ def remove_attachment(cls, id, attribute_id, user): report_item.last_updated = datetime.now() data = dict() - data['delete'] = True - data['user_id'] = user.id - data['report_item_id'] = int(id) - data['attribute_id'] = attribute_id + data["delete"] = True + data["user_id"] = user.id + data["report_item_id"] = int(id) + data["attribute_id"] = attribute_id db.session.commit() @@ -550,13 +918,27 @@ def remove_attachment(cls, id, attribute_id, user): @classmethod def delete_report_item(cls, id): + """Delete a report item by its ID. + + Arguments: + id (int): The ID of the report item to be deleted. + Returns: + tuple: A tuple containing the status message and the HTTP status code. + The status message is "success" if the report item was deleted successfully. + The HTTP status code is 200 if the report item was deleted successfully. + """ report_item = cls.query.get(id) if report_item is not None: db.session.delete(report_item) db.session.commit() - return 'success', 200 + return "success", 200 def update_cpes(self): + """Update the list of CPES for the report item. + + This method clears the existing list of CPES and populates it with new CPES + based on the attributes of the report item. Only attributes of type CPE are considered. + """ self.report_item_cpes = [] if self.completed is True: for attribute in self.attributes: @@ -566,10 +948,25 @@ def update_cpes(self): class ReportItemCpe(db.Model): + """A class representing a CPE (Common Platform Enumeration) report item. + + Attributes: + id (int): The unique identifier of the report item. + value (str): The value of the report item. + report_item_id (int): The foreign key referencing the parent report item. + Args: + db (object): The database object. + """ + id = db.Column(db.Integer, primary_key=True) value = db.Column(db.String()) - report_item_id = db.Column(db.Integer, db.ForeignKey('report_item.id')) + report_item_id = db.Column(db.Integer, db.ForeignKey("report_item.id")) def __init__(self, value): + """Initialize a ReportItemCpe object. + + Args: + value (any): The value of the report item. + """ self.id = None self.value = value diff --git a/src/shared/shared/schema/report_item.py b/src/shared/shared/schema/report_item.py index fb6723b47..9dd0b97d9 100644 --- a/src/shared/shared/schema/report_item.py +++ b/src/shared/shared/schema/report_item.py @@ -1,3 +1,24 @@ +"""This module contains schemas and classes for representing report items and their attributes. + +The module includes the following classes: +- ReportItemAttributeBaseSchema: Schema for representing a report item attribute. +- ReportItemAttributeSchema: Schema for representing a report item attribute with additional fields. +- ReportItemAttribute: Class representing an attribute of a report item. +- ReportItemBaseSchema: Schema for the base report item. +- RemoteReportItemSchema: Schema for remote report items. +- ReportItemSchema: Schema for serializing and deserializing ReportItem objects. +- ReportItemAttributeRemoteSchema: Schema for representing a remote attribute of a report item. +- ReportItemRemoteSchema: Schema for representing a remote report item. +- ReportItemPresentationSchema: Schema for presenting a report item. +- ReportItem: Class representing a report item. + +The module also imports schemas from other modules: +- PresentationSchema: Schema for presentation. +- NewsItemAggregateSchema: Schema for representing news item aggregates. +- ACLEntryStatusSchema: Schema for the ACL entry status. +- UserSchemaBase: Schema for representing user data. +""" + from marshmallow import Schema, fields, post_load, EXCLUDE from shared.schema.presentation import PresentationSchema @@ -7,11 +28,27 @@ class ReportItemAttributeBaseSchema(Schema): + """Schema for representing a report item attribute. + + Attributes: + id (int): The ID of the attribute. + value (str): The value of the attribute. + value_description (str): The description of the attribute value. + binary_mime_type (str): The MIME type of the binary attribute. + binary_size (int): The size of the binary attribute. + binary_description (str): The description of the binary attribute. + attribute_group_item_title (str): The title of the attribute group item. + attribute_group_item_id (int): The ID of the attribute group item. + """ + class Meta: + """Meta class for configuring the behavior of the ReportItemAttributeBase schema.""" + unknown = EXCLUDE id = fields.Int(load_default=None, allow_none=True) value = fields.Str() + value_description = fields.Str(load_default=None, allow_none=True) binary_mime_type = fields.Str(load_default=None, allow_none=True) binary_size = fields.Int(load_default=0) binary_description = fields.Str(load_default=None, allow_none=True) @@ -20,6 +57,17 @@ class Meta: class ReportItemAttributeSchema(ReportItemAttributeBaseSchema): + """Schema for representing a report item attribute. + + This schema defines the structure and validation rules for a report item attribute. + + Arguments: + ReportItemAttributeBaseSchema -- The base schema for a report item attribute. + + Returns: + An instance of the ReportItemAttributeSchema class. + """ + created = fields.DateTime("%d.%m.%Y - %H:%M") last_updated = fields.DateTime("%d.%m.%Y - %H:%M") version = fields.Int() @@ -28,14 +76,44 @@ class ReportItemAttributeSchema(ReportItemAttributeBaseSchema): @post_load def make(self, data, **kwargs): + """Create a new ReportItemAttribute object. + + This method takes in data and creates a new ReportItemAttribute object using the provided data. + + Arguments: + data (dict): A dictionary containing the data for creating the ReportItemAttribute object. + **kwargs: Additional keyword arguments. + + Returns: + ReportItemAttribute: The newly created ReportItemAttribute object. + """ return ReportItemAttribute(**data) class ReportItemAttribute: + """Represents an attribute of a report item. + + Attributes: + id (int): The ID of the attribute. + value (str): The value of the attribute. + value_description (str): The description of the attribute value. + binary_mime_type (str): The MIME type of the binary data associated with the attribute. + binary_size (int): The size of the binary data associated with the attribute. + binary_description (str): The description of the binary data associated with the attribute. + attribute_group_item_id (int): The ID of the attribute group item. + attribute_group_item_title (str): The title of the attribute group item. + created (datetime): The date and time when the attribute was created. + last_updated (datetime): The date and time when the attribute was last updated. + version (int): The version of the attribute. + current (bool): Indicates whether the attribute is the current version. + user (str): The user who created or last updated the attribute. + """ + def __init__( self, id, value, + value_description, binary_mime_type, binary_size, binary_description, @@ -47,8 +125,10 @@ def __init__( current, user, ): + """Initialize a new instance of the ReportItem class.""" self.id = id self.value = value + self.value_description = value_description self.created = created self.last_updated = last_updated self.version = version @@ -62,7 +142,22 @@ def __init__( class ReportItemBaseSchema(Schema): + """Schema for the base report item. + + Attributes: + id (int): The ID of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item title. + created (DateTime): The date and time when the report item was created. + last_updated (DateTime): The date and time when the report item was last updated. + completed (bool): Indicates whether the report item is completed or not. + report_item_type_id (int): The ID of the report item type. + """ + class Meta: + """Meta class for configuring the behavior of the ReportItem schema.""" + unknown = EXCLUDE id = fields.Int(load_default=None, allow_none=True) @@ -76,11 +171,41 @@ class Meta: class RemoteReportItemSchema(ReportItemBaseSchema, PresentationSchema): + """Schema for remote report items. + + This schema represents the structure and validation rules for remote report items. + + Arguments: + ReportItemBaseSchema -- Base schema for report items. + PresentationSchema -- Schema for presentation. + + Attributes: + remote_user (str): The remote user associated with the report item. + attributes (list): List of nested report item attributes. + """ + remote_user = fields.Str(allow_none=True) attributes = fields.Nested(ReportItemAttributeSchema, many=True) class ReportItemSchema(ReportItemBaseSchema): + """Schema for serializing and deserializing ReportItem objects. + + Inherits from ReportItemBaseSchema. + + Attributes: + news_item_aggregates (List[NewsItemAggregateSchema]): List of nested NewsItemAggregateSchema objects. + remote_report_items (List[RemoteReportItemSchema]): List of nested RemoteReportItemSchema objects. + attributes (List[ReportItemAttributeSchema]): List of nested ReportItemAttributeSchema objects. + remote_user (str): Remote user associated with the report item. + + Methods: + make(data, **kwargs): Post-load method to create a ReportItem object from deserialized data. + + Returns: + ReportItemSchema: An instance of the ReportItemSchema class. + """ + news_item_aggregates = fields.Nested(NewsItemAggregateSchema, many=True) remote_report_items = fields.Nested(RemoteReportItemSchema, many=True) attributes = fields.Nested(ReportItemAttributeSchema, many=True) @@ -88,15 +213,38 @@ class ReportItemSchema(ReportItemBaseSchema): @post_load def make(self, data, **kwargs): + """Create a new ReportItem object. + + This method takes in data and creates a new ReportItem object using the provided data. + + Arguments: + data (dict): A dictionary containing the data for the ReportItem. + + Returns: + ReportItem: A new ReportItem object. + """ return ReportItem(**data) class ReportItemAttributeRemoteSchema(Schema): + """A schema for representing a remote attribute of a report item. + + Attributes: + attribute_group_item_title (str): The title of the attribute group item. + value (str): The value of the attribute. + """ + attribute_group_item_title = fields.Str() value = fields.Str() class ReportItemRemoteSchema(Schema): + """A schema for representing a remote report item. + + Arguments: + Schema -- The base schema class. + """ + uuid = fields.Str(allow_none=True) title = fields.Str() title_prefix = fields.Str() @@ -105,10 +253,41 @@ class ReportItemRemoteSchema(Schema): class ReportItemPresentationSchema(ReportItemBaseSchema, ACLEntryStatusSchema, PresentationSchema): + """Schema for presenting a report item. + + This schema inherits from the ReportItemBaseSchema, ACLEntryStatusSchema, and PresentationSchema classes. + + Arguments: + ReportItemBaseSchema -- Schema for the base report item. + ACLEntryStatusSchema -- Schema for the ACL entry status. + PresentationSchema -- Schema for the presentation. + + Attributes: + remote_user -- String field representing the remote user. Allows None as a value. + """ + remote_user = fields.Str(allow_none=True) class ReportItem: + """ + Represents a report item. + + Attributes: + id (int): The ID of the report item. + uuid (str): The UUID of the report item. + title (str): The title of the report item. + title_prefix (str): The prefix of the report item title. + created (datetime): The date and time when the report item was created. + last_updated (datetime): The date and time when the report item was last updated. + completed (bool): Indicates whether the report item is completed or not. + report_item_type_id (int): The ID of the report item type. + news_item_aggregates (list): A list of news item aggregates associated with the report item. + remote_report_items (list): A list of remote report items associated with the report item. + attributes (dict): Additional attributes of the report item. + remote_user (str): The remote user associated with the report item. + """ + def __init__( self, id, @@ -124,6 +303,7 @@ def __init__( attributes, remote_user, ): + """Initialize a ReportItem object.""" self.id = id self.uuid = uuid self.title = title @@ -139,16 +319,53 @@ def __init__( class ReportItemIdSchema(Schema): + """Schema for Report Item ID. + + This schema defines the structure for the Report Item ID. + + Arguments: + Schema -- The base schema class. + + Returns: + An instance of the ReportItemId class. + """ + class Meta: + """Meta class for configuring the behavior of the ReportItemId schema.""" + unknown = EXCLUDE id = fields.Int() @post_load def make(self, data, **kwargs): + """Create a new ReportItemId object. + + This method takes in data and returns a new ReportItemId object. + + Arguments: + data (dict): The data used to create the ReportItemId object. + + Returns: + ReportItemId: A new ReportItemId object. + """ return ReportItemId(**data) class ReportItemId: + """A class representing the ID of a report item. + + Args: + id (int): The ID of the report item. + + Attributes: + id (int): The ID of the report item. + """ + def __init__(self, id): + """Initialize a ReportItem object. + + Args: + id (int): The ID of the report item. + """ self.id = id From beedecbd477998f765dcf9fda206bb16d7cf04ac Mon Sep 17 00:00:00 2001 From: multiflexi Date: Thu, 20 Jun 2024 21:11:09 +0200 Subject: [PATCH 115/146] update presenter --- src/presenters/presenters/base_presenter.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/presenters/presenters/base_presenter.py b/src/presenters/presenters/base_presenter.py index b56d1bf06..b932c08f8 100644 --- a/src/presenters/presenters/base_presenter.py +++ b/src/presenters/presenters/base_presenter.py @@ -107,12 +107,15 @@ def __init__(self, report_item, report_types, attribute_map): attr_type = attribute_map[attribute_group_item_id] attr_key = attr_type.title.lower().replace(" ", "_") - max_occurrence = attr_type.max_occurrence - value_to_add = ( - attribute_group_item[0].value - if max_occurrence == 1 and attribute_group_item - else [attribute.value for attribute in attribute_group_item] - ) + if attr_key.startswith("cwe"): + value_to_add = {attribute.value: attribute.value_description for attribute in attribute_group_item} + else: + max_occurrence = attr_type.max_occurrence + value_to_add = ( + attribute_group_item[0].value + if max_occurrence == 1 and attribute_group_item + else [attribute.value for attribute in attribute_group_item] + ) how_many_with_the_same_name = len(attribute_groups[attr_key]) if how_many_with_the_same_name == 1: From 288d5230dcdd5388c355f38c4c5af3adca28463b Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Tue, 25 Jun 2024 16:08:10 +0200 Subject: [PATCH 116/146] Added "Last attempt" and "Last collected" date columns in OSINT sources configuration view --- src/collectors/collectors/atom_collector.py | 6 +++++ src/collectors/collectors/base_collector.py | 6 +++++ src/collectors/collectors/email_collector.py | 7 ++++- src/collectors/collectors/rss_collector.py | 2 ++ .../collectors/scheduled_tasks_collector.py | 6 +++++ src/collectors/collectors/slack_collector.py | 7 ++++- .../collectors/twitter_collector.py | 6 +++++ src/collectors/collectors/web_collector.py | 12 +++++---- src/collectors/managers/collectors_manager.py | 9 ++++--- src/collectors/remote/core_api.py | 14 ++++++++++ src/core/api/collectors.py | 11 ++++++++ src/core/model/news_item.py | 3 +++ src/core/model/osint_source.py | 27 ++++++++++++------- .../config/osint_sources/CardSource.vue | 9 ++++++- src/gui/src/i18n/cs/messages.js | 2 ++ src/gui/src/i18n/en/messages.js | 2 ++ src/gui/src/i18n/sk/messages.js | 2 ++ src/shared/shared/schema/osint_source.py | 2 ++ 18 files changed, 112 insertions(+), 21 deletions(-) diff --git a/src/collectors/collectors/atom_collector.py b/src/collectors/collectors/atom_collector.py index ce3ab2c4f..7ed4832cb 100644 --- a/src/collectors/collectors/atom_collector.py +++ b/src/collectors/collectors/atom_collector.py @@ -30,7 +30,13 @@ class AtomCollector(BaseCollector): @BaseCollector.ignore_exceptions def collect(self, source): + """Collect data from Atom feed. + Arguments: + source -- Source object. + """ + + BaseCollector.update_last_attempt(source) feed_url = source.parameter_values['ATOM_FEED_URL'] user_agent = source.parameter_values['USER_AGENT'] interval = source.parameter_values['REFRESH_INTERVAL'] diff --git a/src/collectors/collectors/base_collector.py b/src/collectors/collectors/base_collector.py index b9b8f18e5..0d07a6095 100644 --- a/src/collectors/collectors/base_collector.py +++ b/src/collectors/collectors/base_collector.py @@ -35,6 +35,12 @@ def get_info(self): def collect(self, source): pass + @staticmethod + def update_last_attempt(source): + response, status_code = CoreApi.update_collector_last_attepmt(source.id) + if status_code != 200: + log_critical("Update last attempt: HTTP {}, response: {}".format(status_code, response)) + # wrap scheduled action with exception because scheduler fail plan next one @staticmethod def ignore_exceptions(func): diff --git a/src/collectors/collectors/email_collector.py b/src/collectors/collectors/email_collector.py index 40f751c96..34a7eecfe 100644 --- a/src/collectors/collectors/email_collector.py +++ b/src/collectors/collectors/email_collector.py @@ -33,9 +33,14 @@ class EmailCollector(BaseCollector): @BaseCollector.ignore_exceptions def collect(self, source): + """Collect data from email source. - news_items = [] + Arguments: + source -- Source object. + """ + BaseCollector.update_last_attempt(source) + news_items = [] email_server_type = source.parameter_values['EMAIL_SERVER_TYPE'] email_server_hostname = source.parameter_values['EMAIL_SERVER_HOSTNAME'] email_server_port = source.parameter_values['EMAIL_SERVER_PORT'] diff --git a/src/collectors/collectors/rss_collector.py b/src/collectors/collectors/rss_collector.py index 496251163..476a2d9c2 100644 --- a/src/collectors/collectors/rss_collector.py +++ b/src/collectors/collectors/rss_collector.py @@ -46,6 +46,8 @@ def collect(self, source): Arguments: source -- Source object. """ + + BaseCollector.update_last_attempt(source) feed_url = source.parameter_values["FEED_URL"] interval = source.parameter_values["REFRESH_INTERVAL"] links_limit = BaseCollector.read_int_parameter("LINKS_LIMIT", 0, source) diff --git a/src/collectors/collectors/scheduled_tasks_collector.py b/src/collectors/collectors/scheduled_tasks_collector.py index 58dd46ee8..61c87b3d0 100644 --- a/src/collectors/collectors/scheduled_tasks_collector.py +++ b/src/collectors/collectors/scheduled_tasks_collector.py @@ -25,7 +25,13 @@ class ScheduledTasksCollector(BaseCollector): @BaseCollector.ignore_exceptions def collect(self, source): + """Collect data from scheduled tasks. + Arguments: + source -- Source object. + """ + + BaseCollector.update_last_attempt(source) news_items = [] head, tail = os.path.split(source.parameter_values['TASK_COMMAND']) task_title = source.parameter_values['TASK_TITLE'] diff --git a/src/collectors/collectors/slack_collector.py b/src/collectors/collectors/slack_collector.py index 9241e822d..8ae4334fd 100644 --- a/src/collectors/collectors/slack_collector.py +++ b/src/collectors/collectors/slack_collector.py @@ -25,9 +25,14 @@ class SlackCollector(BaseCollector): @BaseCollector.ignore_exceptions def collect(self, source): + """Collect data from Slack source. - news_items = [] + Arguments: + source -- Source object. + """ + BaseCollector.update_last_attempt(source) + news_items = [] proxy_server = source.parameter_values['PROXY_SERVER'] if proxy_server: diff --git a/src/collectors/collectors/twitter_collector.py b/src/collectors/collectors/twitter_collector.py index 888083f69..a3b6ddd73 100644 --- a/src/collectors/collectors/twitter_collector.py +++ b/src/collectors/collectors/twitter_collector.py @@ -30,8 +30,14 @@ class TwitterCollector(BaseCollector): @BaseCollector.ignore_exceptions def collect(self, source): + """Collect data from X source. + + Arguments: + source -- Source object. + """ try: + BaseCollector.update_last_attempt(source) news_items = [] attributes = [] diff --git a/src/collectors/collectors/web_collector.py b/src/collectors/collectors/web_collector.py index 346bbb0bb..3e845d6ac 100644 --- a/src/collectors/collectors/web_collector.py +++ b/src/collectors/collectors/web_collector.py @@ -462,6 +462,7 @@ def __run_tor(self): def collect(self, source): """Collects news items from this source (main function)""" + BaseCollector.update_last_attempt(source) self.source = source log_manager.log_collector_activity('web', self.source.name, 'Starting collector') @@ -597,9 +598,11 @@ def __process_title_page_articles(self, browser, title_page_handle, index_url): for item in article_items: count += 1 # try: - # print(item.get_attribute('outerHTML'), flush=True) - # except Exception: + # print("H: {0} {1:.200}".format(count, item.get_attribute('outerHTML')), flush=True) + # except Exception as ex: # pass + # if first item works but next items have problems - it's because this: + # https://www.selenium.dev/documentation/webdriver/troubleshooting/errors/#stale-element-reference-exception link = None try: link = item.get_attribute('href') @@ -632,10 +635,9 @@ def __process_title_page_articles(self, browser, title_page_handle, index_url): try: news_item = self.__process_article_page(index_url, browser) if news_item: - log_manager.log_collector_activity('web', self.source.name, 'Successfully parsed an article') # log_manager.log_collector_activity('web', self.source.name, '... Title : {0}'.format(news_item.title)) - # log_manager.log_collector_activity('web', self.source.name, '... Review : {0:.100}'.format(news_item.review)) - # log_manager.log_collector_activity('web', self.source.name, '... Content : {0:.100}'.format(news_item.content)) + # log_manager.log_collector_activity('web', self.source.name, '... Review : {0:.100}'.format(news_item.review.replace("\r", "").replace("\n", " ").strip())) + # log_manager.log_collector_activity('web', self.source.name, '... Content : {0:.100}'.format(news_item.content.replace("\r", "").replace("\n", " ").strip())) # log_manager.log_collector_activity('web', self.source.name, '... Author : {0}'.format(news_item.author)) # log_manager.log_collector_activity('web', self.source.name, '... Published: {0}'.format(news_item.published)) self.news_items.append(news_item) diff --git a/src/collectors/managers/collectors_manager.py b/src/collectors/managers/collectors_manager.py index 716982f7a..c9d23a2f5 100644 --- a/src/collectors/managers/collectors_manager.py +++ b/src/collectors/managers/collectors_manager.py @@ -2,7 +2,7 @@ import threading import time -from managers.log_manager import log_debug, log_system_activity +from managers.log_manager import log_debug, log_system_activity, log_warning from collectors.atom_collector import AtomCollector from collectors.email_collector import EmailCollector from collectors.manual_collector import ManualCollector @@ -19,9 +19,10 @@ def reportStatus(): while True: log_debug("[{}] Sending status update...".format(__name__)) - response, code = CoreApi.update_collector_status() - log_debug("[{}] Core responded with: HTTP {}, {}".format(__name__, code, response)) - # for debuging scheduler tasks + response, status_code = CoreApi.update_collector_status() + if status_code != 200: + log_warning("[{}] Core status update response: HTTP {}, {}".format(__name__, status_code, response)) + # for debugging scheduler tasks # for key in collectors: # for source in collectors[key].osint_sources: # if hasattr(source, "scheduler_job"): diff --git a/src/collectors/remote/core_api.py b/src/collectors/remote/core_api.py index 03947369b..44cfde4d8 100755 --- a/src/collectors/remote/core_api.py +++ b/src/collectors/remote/core_api.py @@ -103,6 +103,20 @@ def update_collector_status(cls): logger.debug(ex) return ex, 400 + @classmethod + def update_collector_last_attepmt(cls, source_id): + """Update collector's "last attempted" record with current datetime. + + Returns: + tuple: A tuple containing the JSON response and the HTTP status code. + """ + try: + response = requests.get(cls.api_url + "/api/v1/collectors/osint-sources/" + urllib.parse.quote(source_id) + "/attempt", headers=cls.headers) + return response.json(), response.status_code + except Exception as ex: + logger.debug(ex) + return ex, 400 + @classmethod def add_news_items(cls, news_items): """Add news items to the collector. diff --git a/src/core/api/collectors.py b/src/core/api/collectors.py index c4018f134..c59737fb7 100644 --- a/src/core/api/collectors.py +++ b/src/core/api/collectors.py @@ -26,6 +26,16 @@ def get(self, collector_id): return osint_source.OSINTSource.get_all_for_collector_json(node, parameters.collector_type) +class OSINTSourceLastAttempt(Resource): + + @api_key_required + def get(self, osint_source_id): + source = osint_source.OSINTSource.get_by_id(osint_source_id) + if not source: + return {}, 404 + source.update_last_attempt(osint_source_id) + return {}, 200 + class AddNewsItems(Resource): @api_key_required @@ -68,5 +78,6 @@ def get(self, collector_id): def initialize(api): api.add_resource(OSINTSourcesForCollectors, "/api/v1/collectors//osint-sources") api.add_resource(OSINTSourceStatusUpdate, "/api/v1/collectors/osint-sources/") + api.add_resource(OSINTSourceLastAttempt, "/api/v1/collectors/osint-sources//attempt") api.add_resource(CollectorStatusUpdate, "/api/v1/collectors/") api.add_resource(AddNewsItems, "/api/v1/collectors/news-items") diff --git a/src/core/model/news_item.py b/src/core/model/news_item.py index 850f38944..21378a87e 100644 --- a/src/core/model/news_item.py +++ b/src/core/model/news_item.py @@ -555,6 +555,9 @@ def add_news_items(cls, news_items_data_list): db.session.commit() + for source_id in osint_source_ids: + OSINTSource.update_collected(source_id) + return osint_source_ids @classmethod diff --git a/src/core/model/osint_source.py b/src/core/model/osint_source.py index 02c4b1a7d..bf0fe3742 100644 --- a/src/core/model/osint_source.py +++ b/src/core/model/osint_source.py @@ -209,18 +209,27 @@ def update(cls, osint_source_id, data): return osint_source, default_group - def update_status(self, status_schema): - # if not collected, do not change last collected timestamp - if status_schema.last_collected: - self.last_collected = status_schema.last_collected + @classmethod + def update_collected(cls, osint_source_id): + """Update collector's "last collected" record with current datetime (only when some data is collected). - # if not attempted, do not change last collected timestamp - if status_schema.last_attempted: - self.last_attempted = status_schema.last_attempted + Args: + osint_source_id (int): Osint source Id. + """ + osint_source = cls.query.get(osint_source_id) + osint_source.last_collected = datetime.now() + db.session.commit() - self.last_error_message = status_schema.last_error_message - self.last_data = status_schema.last_data + @classmethod + def update_last_attempt(cls, osint_source_id): + """Update collector's "last attempted" record with current datetime. + Args: + osint_source_id (int): Osint source Id. + """ + osint_source = cls.query.get(osint_source_id) + osint_source.last_attempted = datetime.now() + db.session.commit() class OSINTSourceParameterValue(db.Model): osint_source_id = db.Column(db.String, db.ForeignKey('osint_source.id'), primary_key=True) diff --git a/src/gui/src/components/config/osint_sources/CardSource.vue b/src/gui/src/components/config/osint_sources/CardSource.vue index e1ae15732..96f08e2c1 100644 --- a/src/gui/src/components/config/osint_sources/CardSource.vue +++ b/src/gui/src/components/config/osint_sources/CardSource.vue @@ -24,11 +24,18 @@
{{$t('card_item.description')}}
{{card.description}}
+ +
{{$t('osint_source.last_attempt')}}
+
{{card.last_attempted}}
+
+ +
{{$t('osint_source.last_collected')}}
+
{{card.last_collected}}
+
{{$t('osint_source.type')}}
{{card.collector.type}}
- diff --git a/src/gui/src/i18n/cs/messages.js b/src/gui/src/i18n/cs/messages.js index 6e5f9ac25..fcf12b3dc 100644 --- a/src/gui/src/i18n/cs/messages.js +++ b/src/gui/src/i18n/cs/messages.js @@ -202,6 +202,8 @@ const messages_cs = { error: "Tento zdroj OSINT se nepodařilo uložit", name: "Název", description: "Popis", + last_attempt: "Poslední pokus", + last_collected: "Poslední sběr", successful: "Nový zdroj OSINT byl přidán", successful_edit: "Zdroj OSINT byl upraven", removed: "Zdroj OSINT byl úspěšně smazán", diff --git a/src/gui/src/i18n/en/messages.js b/src/gui/src/i18n/en/messages.js index 20b634b92..7525737f1 100644 --- a/src/gui/src/i18n/en/messages.js +++ b/src/gui/src/i18n/en/messages.js @@ -202,6 +202,8 @@ const messages_en = { error: "Could not save this OSINT source", name: "Name", description: "Description", + last_attempt: "Last attempt", + last_collected: "Last collected", successful: "New OSINT source was successfully added", successful_edit: "OSINT source was successfully updated", removed: "OSINT source was successfully deleted", diff --git a/src/gui/src/i18n/sk/messages.js b/src/gui/src/i18n/sk/messages.js index ee3df1014..f26fa17cd 100644 --- a/src/gui/src/i18n/sk/messages.js +++ b/src/gui/src/i18n/sk/messages.js @@ -84,6 +84,8 @@ const messages_sk = { error: "Nepodarilo sa vytvoriť zadaný zdroj.", name: "Meno", description: "Popis", + last_attempt: "Posledný pokus", + last_collected: "Posledný zber", successful: "Nový OSINT zdroj bol úspešne pridaný" }, diff --git a/src/shared/shared/schema/osint_source.py b/src/shared/shared/schema/osint_source.py index 0bfbc6063..d6ca76ec7 100644 --- a/src/shared/shared/schema/osint_source.py +++ b/src/shared/shared/schema/osint_source.py @@ -58,6 +58,8 @@ class OSINTSourceSchema(OSINTSourceSchemaBase): name = fields.Str() description = fields.Str() collector_id = fields.Str() + last_attempted = fields.DateTime("%d.%m.%Y - %H:%M:%S") + last_collected = fields.DateTime("%d.%m.%Y - %H:%M:%S") class OSINTSourceCollectorSchema(Schema): From a5418b31034e10527901d8fa920a08261a091302 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:19:23 +0000 Subject: [PATCH 117/146] Bump requests from 2.32.2 to 2.32.3 in /src/publishers Bumps [requests](https://github.com/psf/requests) from 2.32.2 to 2.32.3. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.32.2...v2.32.3) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/publishers/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/publishers/requirements.txt b/src/publishers/requirements.txt index c5bcb2a2c..d4631e3be 100644 --- a/src/publishers/requirements.txt +++ b/src/publishers/requirements.txt @@ -9,7 +9,7 @@ marshmallow-enum==1.5.1 oauth2client==4.1.3 paramiko==3.4.0 python-dotenv==1.0.1 -requests==2.32.2 +requests==2.32.3 tweepy==4.14.0 urllib3==2.2.2 pymisp==2.4.190 From 5acae5edb5406766ee6df6e88a60e72e1c84e783 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:19:44 +0000 Subject: [PATCH 118/146] Bump marshmallow from 3.21.2 to 3.21.3 in /src/bots Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.2 to 3.21.3. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.2...3.21.3) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/bots/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bots/requirements.txt b/src/bots/requirements.txt index 3e5cf18d2..79cdee0e5 100644 --- a/src/bots/requirements.txt +++ b/src/bots/requirements.txt @@ -3,7 +3,7 @@ Flask-Cors==4.0.1 Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==22.0.0 -marshmallow==3.21.2 +marshmallow==3.21.3 marshmallow-enum==1.5.1 oauthlib==3.2.2 PySocks==1.7.1 From 13fd4c41e45ab64359894adce55b5cab7dd1aa71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:21:21 +0000 Subject: [PATCH 119/146] Bump marshmallow from 3.21.2 to 3.21.3 in /src/presenters Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.2 to 3.21.3. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.2...3.21.3) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/presenters/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/presenters/requirements.txt b/src/presenters/requirements.txt index a959a5672..5c42fb35a 100644 --- a/src/presenters/requirements.txt +++ b/src/presenters/requirements.txt @@ -4,7 +4,7 @@ Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==22.0.0 Jinja2==3.1.4 -marshmallow==3.21.2 +marshmallow==3.21.3 marshmallow-enum==1.5.1 python-dotenv==1.0.1 weasyprint==62.2 From f1642020b3a702c4feeaf524f7ba81aa46706c92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:22:23 +0000 Subject: [PATCH 120/146] Bump selenium from 4.21.0 to 4.22.0 in /src/collectors Bumps [selenium](https://github.com/SeleniumHQ/Selenium) from 4.21.0 to 4.22.0. - [Release notes](https://github.com/SeleniumHQ/Selenium/releases) - [Commits](https://github.com/SeleniumHQ/Selenium/compare/selenium-4.21.0...selenium-4.22.0) --- updated-dependencies: - dependency-name: selenium dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/collectors/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt index 6878f4958..236da4e0a 100644 --- a/src/collectors/requirements.txt +++ b/src/collectors/requirements.txt @@ -14,6 +14,6 @@ python-dateutil==2.9.0.post0 python-dotenv==1.0.1 requests==2.32.3 schedule==1.2.2 -selenium==4.21.0 +selenium==4.22.0 slackclient==1.3.2 tweepy==4.14.0 From c2e902f42ac83a6ced773e7504e4e6be82b92982 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:22:56 +0000 Subject: [PATCH 121/146] Bump marshmallow from 3.21.2 to 3.21.3 in /src/collectors Bumps [marshmallow](https://github.com/marshmallow-code/marshmallow) from 3.21.2 to 3.21.3. - [Changelog](https://github.com/marshmallow-code/marshmallow/blob/dev/CHANGELOG.rst) - [Commits](https://github.com/marshmallow-code/marshmallow/compare/3.21.2...3.21.3) --- updated-dependencies: - dependency-name: marshmallow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/collectors/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt index 6878f4958..5213fbd6d 100644 --- a/src/collectors/requirements.txt +++ b/src/collectors/requirements.txt @@ -7,7 +7,7 @@ Flask-Cors==4.0.1 Flask-RESTful==0.3.10 gevent==24.2.1 gunicorn==22.0.0 -marshmallow==3.21.2 +marshmallow==3.21.3 marshmallow-enum==1.5.1 PySocks==1.7.1 python-dateutil==2.9.0.post0 From 2c1d020b57a5e0b4eb9ee638e49a64691987658d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:23:45 +0000 Subject: [PATCH 122/146] Bump pymisp from 2.4.190 to 2.4.193 in /src/publishers Bumps [pymisp](https://github.com/MISP/PyMISP) from 2.4.190 to 2.4.193. - [Changelog](https://github.com/MISP/PyMISP/blob/main/CHANGELOG.txt) - [Commits](https://github.com/MISP/PyMISP/compare/v2.4.190...v2.4.193) --- updated-dependencies: - dependency-name: pymisp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/publishers/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/publishers/requirements.txt b/src/publishers/requirements.txt index d4631e3be..2b000df92 100644 --- a/src/publishers/requirements.txt +++ b/src/publishers/requirements.txt @@ -12,4 +12,4 @@ python-dotenv==1.0.1 requests==2.32.3 tweepy==4.14.0 urllib3==2.2.2 -pymisp==2.4.190 +pymisp==2.4.194 From 94e4a70bc8da755b790b88134fd097b53646c751 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:25:42 +0000 Subject: [PATCH 123/146] Bump weasyprint from 62.1 to 62.3 in /src/presenters Bumps [weasyprint](https://github.com/Kozea/WeasyPrint) from 62.1 to 62.3. - [Release notes](https://github.com/Kozea/WeasyPrint/releases) - [Changelog](https://github.com/Kozea/WeasyPrint/blob/main/docs/changelog.rst) - [Commits](https://github.com/Kozea/WeasyPrint/compare/v62.1...v62.3) --- updated-dependencies: - dependency-name: weasyprint dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/presenters/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/presenters/requirements.txt b/src/presenters/requirements.txt index 5c42fb35a..8a8e3ab78 100644 --- a/src/presenters/requirements.txt +++ b/src/presenters/requirements.txt @@ -7,4 +7,4 @@ Jinja2==3.1.4 marshmallow==3.21.3 marshmallow-enum==1.5.1 python-dotenv==1.0.1 -weasyprint==62.2 +weasyprint==62.3 From 6ec1537fec2070872d6d3e6dded931c7efd2e584 Mon Sep 17 00:00:00 2001 From: Jaroslav Svoboda Date: Wed, 26 Jun 2024 23:30:11 +0200 Subject: [PATCH 124/146] Update linting.yaml with Python 3.12 --- .github/workflows/linting.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/linting.yaml b/.github/workflows/linting.yaml index e3eb2e6c0..e9cc1c9a2 100644 --- a/.github/workflows/linting.yaml +++ b/.github/workflows/linting.yaml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.10'] + python-version: ['3.8', '3.10', '3.12'] steps: - uses: actions/checkout@v2 - name: lint with black From 0208560f4929174bfe3ef32233692a39e9d1b54f Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Thu, 27 Jun 2024 22:15:05 +0200 Subject: [PATCH 125/146] Update/rewrite slackclient to 2.9.4 Docker was unable to build - dependency conflict by websocket-client by upgrading selenium --- src/collectors/collectors/base_collector.py | 7 +- src/collectors/collectors/slack_collector.py | 112 ++++++++++--------- src/collectors/collectors/web_collector.py | 1 - src/collectors/managers/log_manager.py | 6 +- src/collectors/requirements.txt | 2 +- 5 files changed, 65 insertions(+), 63 deletions(-) diff --git a/src/collectors/collectors/base_collector.py b/src/collectors/collectors/base_collector.py index 0d07a6095..f24238db2 100644 --- a/src/collectors/collectors/base_collector.py +++ b/src/collectors/collectors/base_collector.py @@ -8,7 +8,7 @@ from functools import wraps from managers import time_manager -from managers.log_manager import log_debug, log_info, log_warning, log_critical, log_debug_trace +from managers.log_manager import log_debug, log_info, log_warning, log_critical, log_debug_trace, log_collector_activity from remote.core_api import CoreApi from shared.schema import collector, osint_source, news_item from shared.schema.parameter import Parameter, ParameterType @@ -232,6 +232,7 @@ def sanitize_news_items(news_items, source): @staticmethod def publish(news_items, source): + log_collector_activity("", source.name, "Collected {} news items".format(len(news_items))) BaseCollector.sanitize_news_items(news_items, source) filtered_news_items = BaseCollector.filter_by_word_list(news_items, source) news_items_schema = news_item.NewsItemDataSchema(many=True) @@ -261,14 +262,14 @@ def refresh(self): source_schema = osint_source.OSINTSourceSchemaBase(many=True) self.osint_sources = source_schema.load(response) - log_debug("{} data loaded".format(len(self.osint_sources))) + log_debug("{} sources loaded for {}".format(len(self.osint_sources), self.type)) # start collection for source in self.osint_sources: interval = source.parameter_values["REFRESH_INTERVAL"] # do not schedule if no interval is set if interval == '' or interval == '0': - log_debug("scheduling '{}' disabled".format(str(source.name))) + log_debug("disabled '{}'".format(str(source.name))) continue self.collect(source) diff --git a/src/collectors/collectors/slack_collector.py b/src/collectors/collectors/slack_collector.py index 8ae4334fd..2db4f7a0e 100644 --- a/src/collectors/collectors/slack_collector.py +++ b/src/collectors/collectors/slack_collector.py @@ -2,14 +2,16 @@ import hashlib import uuid import time -from slackclient import SlackClient +import traceback import socket +from slack import WebClient from .base_collector import BaseCollector +from managers import log_manager from shared.schema.news_item import NewsItemData from shared.schema.parameter import Parameter, ParameterType - +# the slackclient project is in maintenance mode now, "slack_sdk" is successor: https://pypi.org/project/slack-sdk/ class SlackCollector(BaseCollector): type = "SLACK_COLLECTOR" name = "Slack Collector" @@ -56,56 +58,56 @@ def collect(self, source): print('OSINTSource name: ' + source.name) print('Proxy connection failed') - slack_client = SlackClient(source.parameter_values['SLACK_API_TOKEN']) - - if slack_client.rtm_connect(): - - while True: - try: - data = slack_client.rtm_read() - - if data: - for item in data: - - ids = source.parameter_values['WORKSPACE_CHANNELS_ID'].replace(' ', '') - channels_list = ids.split(',') - - if item['type'] == 'message' and item['channel'] in channels_list: - published = time.ctime(float(item["ts"])) - content = item['text'] - preview = item['text'][:500] - - user = item['user'] - user_name = slack_client.api_call("users.info", user=user) - author = user_name['user']['real_name'] - - channel_id = item['channel'] - channel_name = slack_client.api_call("channels.info", channel=channel_id) - channel = channel_name['channel']['name'] - - team = item['team'] - team_name = slack_client.api_call("team.info", team=team) - workspace = team_name['team']['name'] - - title = 'Slack post from workspace ' + workspace + ' and channel ' + channel - link = '' - url = '' - - for_hash = author + channel + content - - news_item = NewsItemData(uuid.uuid4(), hashlib.sha256(for_hash.encode()).hexdigest(), - title, preview, url, link, published, author, - datetime.datetime.now(), content, source.id, []) - - news_items.append(news_item) - - BaseCollector.publish(news_items, source) - - time.sleep(1) - except KeyError: - print('Deleted message') - pass - else: - print('OSINTSource ID: ' + source.id) - print('OSINTSource name: ' + source.name) - print('ERROR') + ids = source.parameter_values['WORKSPACE_CHANNELS_ID'].replace(' ', '') + channels_list = ids.split(',') + + slack_client = WebClient(source.parameter_values['SLACK_API_TOKEN']) + + try: + for channel_id in channels_list: + log_manager.log_collector_activity('slack', source.name, "Channel: {0}".format(channel_id)) + channel_info = slack_client.conversations_info(channel=channel_id) + channel_name = channel_info['channel']['name'] + + # in future we can use parameter "oldest" - Only messages after this Unix timestamp will be included in results + data = slack_client.conversations_history(channel = channel_id, limit = 30) + count = 0 + for message in data["messages"]: + count += 1 + # log_manager.log_collector_activity('slack', source.name, "Message: {0}".format(count)) + published = time.ctime(float(message["ts"])) + content = message['text'] + preview = content[:500] + + user_id = message['user'] + user_name = slack_client.users_profile_get(user=user_id) + author = user_name["profile"]["real_name"] + + team_id = message.get("team", "") + if team_id: + team_info = slack_client.team_info(team=team_id) + team_name = team_info['team']['name'] + else: + team_name = "" + + title = f"Slack post from channel {channel_name}" + if team_name: + title += f" ({team_name})" + link = "" + url = "" + for_hash = user_id + channel_id + content + + # log_manager.log_collector_activity('slack', source.name, '... Title : {0}'.format(title)) + # log_manager.log_collector_activity('slack', source.name, '... Content : {0:.100}'.format(content.replace("\r", "").replace("\n", " ").strip())) + # log_manager.log_collector_activity('slack', source.name, '... Author : {0}'.format(author)) + # log_manager.log_collector_activity('slack', source.name, '... Published: {0}'.format(published)) + + news_item = NewsItemData(uuid.uuid4(), hashlib.sha256(for_hash.encode()).hexdigest(), + title, preview, url, link, published, author, + datetime.datetime.now(), content, source.id, []) + news_items.append(news_item) + + BaseCollector.publish(news_items, source) + + except Exception as ex: + log_manager.log_collector_activity('slack', source.name, 'Error: ' + traceback.format_exc()) diff --git a/src/collectors/collectors/web_collector.py b/src/collectors/collectors/web_collector.py index 3e845d6ac..d13cc1f29 100644 --- a/src/collectors/collectors/web_collector.py +++ b/src/collectors/collectors/web_collector.py @@ -577,7 +577,6 @@ def __browse_title_page(self, index_url): break self.__dispose_of_headless_driver(browser) - log_manager.log_collector_activity('web', self.source.name, 'Committing {} news items'.format(len(self.news_items))) BaseCollector.publish(self.news_items, self.source) return True, '', total_processed_articles, total_failed_articles diff --git a/src/collectors/managers/log_manager.py b/src/collectors/managers/log_manager.py index 176860ba7..0861730c6 100644 --- a/src/collectors/managers/log_manager.py +++ b/src/collectors/managers/log_manager.py @@ -83,13 +83,13 @@ def log_critical(message): sys_logger = None log_warning("Unable to connect to syslog server!") log_warning(ex) -else: +else: # disable log duplicities on the screen if we have SYSLOG disabled sys_logger = None def log_system_activity(module, message): log_info("[{}] {}".format(module, message)) -def log_collector_activity(collector_type, collector, message): - log_text = "COLLECTOR {} '{}': {}".format(collector_type, collector, message) +def log_collector_activity(collector_type, source_name, message): + log_text = "COLLECTOR {} '{}': {}".format(collector_type, source_name, message) log_debug(log_text) \ No newline at end of file diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt index efc0e5524..d4e11bf16 100644 --- a/src/collectors/requirements.txt +++ b/src/collectors/requirements.txt @@ -15,5 +15,5 @@ python-dotenv==1.0.1 requests==2.32.3 schedule==1.2.2 selenium==4.22.0 -slackclient==1.3.2 +slackclient==2.9.4 tweepy==4.14.0 From e40ceb764d20ac1c40e6d4ee37f921d5d5d9dd11 Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Fri, 28 Jun 2024 13:57:19 +0200 Subject: [PATCH 126/146] Correct wrong formatting: %H:%M:%s -> %H:%M:%S for WEB collector show nice published date (no need microseconds, looks ugly) --- src/collectors/collectors/web_collector.py | 3 ++- src/shared/shared/schema/osint_source.py | 4 ++-- src/shared/shared/schema/remote.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/collectors/collectors/web_collector.py b/src/collectors/collectors/web_collector.py index d13cc1f29..4c7bfad83 100644 --- a/src/collectors/collectors/web_collector.py +++ b/src/collectors/collectors/web_collector.py @@ -687,6 +687,7 @@ def __process_article_page(self, index_url, browser): if not published_str: published_str = 'today' published = dateparser.parse(published_str, settings={'DATE_ORDER': 'DMY'}) + published_str = published.strftime("%Y-%m-%d %H:%M") # remove microseconds/seconds from the screen, looks ugly link = current_url @@ -698,7 +699,7 @@ def __process_article_page(self, index_url, browser): hashlib.sha256(for_hash.encode()).hexdigest(), title, article_description, self.web_url, link, - published, + published_str, author, datetime.datetime.now(), article_full_text, diff --git a/src/shared/shared/schema/osint_source.py b/src/shared/shared/schema/osint_source.py index d6ca76ec7..7ae6cf693 100644 --- a/src/shared/shared/schema/osint_source.py +++ b/src/shared/shared/schema/osint_source.py @@ -47,8 +47,8 @@ class OSINTSourceUpdateStatusSchema(Schema): class Meta: unknown = EXCLUDE - last_collected = fields.DateTime("%d.%m.%Y - %H:%M:%s") - last_attempted = fields.DateTime("%d.%m.%Y - %H:%M:%s") + last_collected = fields.DateTime("%d.%m.%Y - %H:%M:%S") + last_attempted = fields.DateTime("%d.%m.%Y - %H:%M:%S") last_error_message = fields.Str() last_data = fields.Raw() diff --git a/src/shared/shared/schema/remote.py b/src/shared/shared/schema/remote.py index 3ab39bc2d..b6f0284bc 100644 --- a/src/shared/shared/schema/remote.py +++ b/src/shared/shared/schema/remote.py @@ -16,7 +16,7 @@ class RemoteAccessSchema(Schema): class RemoteAccessPresentationSchema(RemoteAccessSchema, PresentationSchema): - last_synced = fields.DateTime("%d.%m.%Y - %H:%M:%s") + last_synced = fields.DateTime("%d.%m.%Y - %H:%M:%S") class RemoteNodeSchema(Schema): @@ -33,5 +33,5 @@ class RemoteNodeSchema(Schema): class RemoteNodePresentationSchema(RemoteNodeSchema, PresentationSchema): - last_synced = fields.DateTime("%d.%m.%Y - %H:%M:%s") + last_synced = fields.DateTime("%d.%m.%Y - %H:%M:%S") event_id = fields.Str() From abb12e6fc4cc96829a613ce5858a687a079eb421 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jul 2024 16:13:40 +0000 Subject: [PATCH 127/146] Bump envelope from 2.0.3 to 2.0.5 in /src/publishers Bumps [envelope](https://github.com/CZ-NIC/envelope) from 2.0.3 to 2.0.5. - [Changelog](https://github.com/CZ-NIC/envelope/blob/master/CHANGELOG.md) - [Commits](https://github.com/CZ-NIC/envelope/commits) --- updated-dependencies: - dependency-name: envelope dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- src/publishers/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/publishers/requirements.txt b/src/publishers/requirements.txt index 2b000df92..4b0654762 100644 --- a/src/publishers/requirements.txt +++ b/src/publishers/requirements.txt @@ -1,4 +1,4 @@ -envelope==2.0.3 +envelope==2.0.5 Flask==3.0.3 Flask-Cors==4.0.1 Flask-RESTful==0.3.10 From fe6f247e4e2143ab51d1fb6c4cbba5edbcb33eeb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jul 2024 16:29:06 +0000 Subject: [PATCH 128/146] Bump selenium from 4.22.0 to 4.23.0 in /src/collectors Bumps [selenium](https://github.com/SeleniumHQ/Selenium) from 4.22.0 to 4.23.0. - [Release notes](https://github.com/SeleniumHQ/Selenium/releases) - [Commits](https://github.com/SeleniumHQ/Selenium/compare/selenium-4.22.0...selenium-4.23.0) --- updated-dependencies: - dependency-name: selenium dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- src/collectors/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt index d4e11bf16..7e4e4bc0e 100644 --- a/src/collectors/requirements.txt +++ b/src/collectors/requirements.txt @@ -14,6 +14,6 @@ python-dateutil==2.9.0.post0 python-dotenv==1.0.1 requests==2.32.3 schedule==1.2.2 -selenium==4.22.0 +selenium==4.23.0 slackclient==2.9.4 tweepy==4.14.0 From 6a44a11ac9b4c6100f8e599e95c56c01754c8a27 Mon Sep 17 00:00:00 2001 From: Jan Polonsky Date: Tue, 23 Jul 2024 13:49:22 +0200 Subject: [PATCH 129/146] Run collector always in new class. If scheduler or source save action run on the "same time" local variables start overwriting. Collect function is running multiple times in same class instance. Result was bad category, bad source update times and it start mixing source parsing rules, properties across various sites! This update fix: - after some time scheduler cross runs - source save and collectors refresh on the already running scheduler - applying bad parsing attributes from another source !!!! --- src/collectors/collectors/base_collector.py | 24 ++++++++++++--------- src/collectors/collectors/web_collector.py | 1 - 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/collectors/collectors/base_collector.py b/src/collectors/collectors/base_collector.py index f24238db2..8b2704304 100644 --- a/src/collectors/collectors/base_collector.py +++ b/src/collectors/collectors/base_collector.py @@ -272,12 +272,12 @@ def refresh(self): log_debug("disabled '{}'".format(str(source.name))) continue - self.collect(source) + self.run_collector(source) # run task every day at XY if interval[0].isdigit() and ':' in interval: log_debug("scheduling '{}' at: {}".format(str(source.name), str(interval))) - source.scheduler_job = time_manager.schedule_job_every_day(interval, self.collect, source) + source.scheduler_job = time_manager.schedule_job_every_day(interval, self.run_collector, source) # run task at a specific day (XY, ZZ:ZZ:ZZ) elif interval[0].isalpha(): interval = interval.split(',') @@ -285,23 +285,23 @@ def refresh(self): at = interval[1].strip() log_debug("scheduling '{}' at: {} {}".format(str(source.name), str(day), str(at))) if day == 'Monday': - source.scheduler_job = time_manager.schedule_job_on_monday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_monday(at, self.run_collector, source) elif day == 'Tuesday': - source.scheduler_job = time_manager.schedule_job_on_tuesday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_tuesday(at, self.run_collector, source) elif day == 'Wednesday': - source.scheduler_job = time_manager.schedule_job_on_wednesday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_wednesday(at, self.run_collector, source) elif day == 'Thursday': - source.scheduler_job = time_manager.schedule_job_on_thursday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_thursday(at, self.run_collector, source) elif day == 'Friday': - source.scheduler_job = time_manager.schedule_job_on_friday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_friday(at, self.run_collector, source) elif day == 'Saturday': - source.scheduler_job = time_manager.schedule_job_on_saturday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_saturday(at, self.run_collector, source) elif day == 'Sunday': - source.scheduler_job = time_manager.schedule_job_on_sunday(at, self.collect, source) + source.scheduler_job = time_manager.schedule_job_on_sunday(at, self.run_collector, source) # run task every XY minutes else: log_debug("scheduling '{}' for {}".format(str(source.name), int(interval))) - source.scheduler_job = time_manager.schedule_job_minutes(int(interval), self.collect, source) + source.scheduler_job = time_manager.schedule_job_minutes(int(interval), self.run_collector, source) else: # TODO: send update to core with the error message log_warning("configuration not received, code: {}, response: {}".format(code, response)) @@ -310,6 +310,10 @@ def refresh(self): log_debug_trace() pass + def run_collector(self, source): + runner = self.__class__() # get right type of collector + runner.collect(source) + def initialize(self): self.refresh() diff --git a/src/collectors/collectors/web_collector.py b/src/collectors/collectors/web_collector.py index 4c7bfad83..779188755 100644 --- a/src/collectors/collectors/web_collector.py +++ b/src/collectors/collectors/web_collector.py @@ -494,7 +494,6 @@ def __browse_title_page(self, index_url): log_manager.log_collector_activity('web', self.source.name, 'Requesting title page: {}'.format(self.web_url)) try: browser.get(index_url) - log_manager.log_collector_activity('web', self.source.name, 'Title page obtained') except Exception: log_manager.log_collector_activity('web', self.source.name, 'Error obtaining title page') self.__dispose_of_headless_driver(browser) From 1bd20abcd34bc795c8be3d913d06ff12219dde63 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Wed, 24 Jul 2024 12:56:24 +0200 Subject: [PATCH 130/146] Folder for templates --- .gitignore | 4 ++++ src/presenters/templates/custom/README.md | 1 + 2 files changed, 5 insertions(+) create mode 100644 src/presenters/templates/custom/README.md diff --git a/.gitignore b/.gitignore index d43bf0673..d03da440b 100644 --- a/.gitignore +++ b/.gitignore @@ -47,3 +47,7 @@ local/ *.njsproj.user *.sln *.sw? + +# ignore custom templates +src/presenters/templates/custom/* +!src/presenters/templates/custom/README.md diff --git a/src/presenters/templates/custom/README.md b/src/presenters/templates/custom/README.md new file mode 100644 index 000000000..5a3ea4d71 --- /dev/null +++ b/src/presenters/templates/custom/README.md @@ -0,0 +1 @@ +Place here your custom templates From b778b58e6f01a46fc1215d86da838a9f69ce86b6 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Wed, 24 Jul 2024 20:55:46 +0200 Subject: [PATCH 131/146] ignoring yml files in traefik/dynamic --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d03da440b..03e325756 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ build/ # sensitive data not to be commited docker/.env docker/secrets/*.txt +docker/traefik/dynamic/*.yml .env.local .env.*.local src/.env From 29b3d3971a2705c31e7b9fae659786347d19cd71 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Thu, 25 Jul 2024 12:56:23 +0200 Subject: [PATCH 132/146] fix error when adding CWE --- src/core/model/report_item.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/model/report_item.py b/src/core/model/report_item.py index 4bd749159..5c0c5dae1 100644 --- a/src/core/model/report_item.py +++ b/src/core/model/report_item.py @@ -716,7 +716,7 @@ def update_report_item(cls, id, data, user): if "add" in data: if "attribute_id" in data: modified = True - new_attribute = ReportItemAttribute(None, "", None, 0, None, data["attribute_group_item_id"], None) + new_attribute = ReportItemAttribute(None, "", "", None, 0, None, data["attribute_group_item_id"], None) new_attribute.user = user report_item.attributes.append(new_attribute) From e8faa0bf5ab58c8aebb0a3ca16aa027918da2101 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Thu, 25 Jul 2024 12:59:46 +0200 Subject: [PATCH 133/146] fix adding attribute for attachments --- src/core/model/report_item.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/core/model/report_item.py b/src/core/model/report_item.py index 5c0c5dae1..3bfdb9476 100644 --- a/src/core/model/report_item.py +++ b/src/core/model/report_item.py @@ -861,7 +861,9 @@ def add_attachment(cls, id, attribute_group_item_id, user, file, description): """ report_item = cls.query.get(id) file_data = file.read() - new_attribute = ReportItemAttribute(None, file.filename, file.mimetype, len(file_data), description, attribute_group_item_id, None) + new_attribute = ReportItemAttribute( + None, file.filename, "", file.mimetype, len(file_data), description, attribute_group_item_id, None + ) new_attribute.user = user new_attribute.binary_data = file_data report_item.attributes.append(new_attribute) From 314102266051aa1be35d95d096f91feb3a9be15f Mon Sep 17 00:00:00 2001 From: multiflexi Date: Thu, 25 Jul 2024 13:13:10 +0200 Subject: [PATCH 134/146] Fix missing value_description definition --- src/gui/src/components/analyze/NewReportItem.vue | 1 + 1 file changed, 1 insertion(+) diff --git a/src/gui/src/components/analyze/NewReportItem.vue b/src/gui/src/components/analyze/NewReportItem.vue index 465d21219..69c8f6f19 100644 --- a/src/gui/src/components/analyze/NewReportItem.vue +++ b/src/gui/src/components/analyze/NewReportItem.vue @@ -684,6 +684,7 @@ export default { if (data.remote_report_items[l].attributes[k].attribute_group_item_title === this.selected_type.attribute_groups[i].attribute_group_items[j].title) { let value = data.remote_report_items[l].attributes[k].value + let value_description = data.remote_report_items[l].attributes[k].value_description if (this.selected_type.attribute_groups[i].attribute_group_items[j].attribute.type === 'CPE') { value = value.replace("%", "*") } else if (this.selected_type.attribute_groups[i].attribute_group_items[j].attribute.type === 'BOOLEAN') { From 24c48453c972bddbc880f1caf6e87eefb7eeb45b Mon Sep 17 00:00:00 2001 From: multiflexi Date: Fri, 26 Jul 2024 15:02:16 +0200 Subject: [PATCH 135/146] fix LDAP env variables --- docker/.env.example | 9 ++++++++- docker/docker-compose.yml | 3 +++ src/core/auth/ldap_authenticator.py | 30 +++++++++++++++++++++-------- 3 files changed, 33 insertions(+), 9 deletions(-) diff --git a/docker/.env.example b/docker/.env.example index dec8bf37a..f68c9e644 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1,6 +1,5 @@ # General COMPOSE_PROJECT_NAME=taranis-ng -TARANIS_NG_AUTHENTICATOR=password HTTP_PROXY= HTTPS_PROXY= @@ -31,6 +30,14 @@ TARANIS_NG_HOSTNAME=localhost TARANIS_NG_HTTPS_PORT=4443 TARANIS_NG_HTTP_PORT=8080 TARANIS_NG_HTTPS_URI=https://localhost:4443 +TARANIS_NG_AUTHENTICATOR=password + +# To use LDAP authentication, adjust the following lines to your LDAP server, +# change TARANIS_NG_AUTHENTICATOR to ldap and add CA certificate to "auth/ldap_ca.pem" +# or point LDAP_CA_CERT_PATH to it. +LDAP_SERVER=ldaps://ldap.example.com +LDAP_BASE_DN="ou=people,dc=example,dc=com" + TRAEFIK_MANAGEMENT_PORT=127.0.0.1:8081 # Limits diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 76e09f0b1..aed8c4d06 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -60,6 +60,9 @@ services: DB_POOL_RECYCLE: 300 DB_POOL_TIMEOUT: 30 TARANIS_NG_AUTHENTICATOR: "${TARANIS_NG_AUTHENTICATOR}" + LDAP_SERVER: "${LDAP_SERVER}" + LDAP_BASE_DN: "${LDAP_BASE_DN}" + LDAP_CA_CERT_PATH: JWT_SECRET_KEY: "${JWT_SECRET_KEY}" JWT_SECRET_KEY_FILE: /run/secrets/jwt_secret_key diff --git a/src/core/auth/ldap_authenticator.py b/src/core/auth/ldap_authenticator.py index acb77a622..6e301966b 100644 --- a/src/core/auth/ldap_authenticator.py +++ b/src/core/auth/ldap_authenticator.py @@ -1,3 +1,15 @@ +"""This module provides an LDAPAuthenticator class that authenticates users against an LDAP server. + +Attributes: + LDAP_SERVER (str): The LDAP server URL. + LDAP_BASE_DN (str): The base DN (Distinguished Name) for LDAP queries. + LDAP_CA_CERT_PATH (str): The file path to the LDAP CA certificate. + +Classes: + LDAPAuthenticator: Authenticates users against an LDAP server. + +""" + from managers import log_manager from auth.base_authenticator import BaseAuthenticator from flask import request @@ -9,7 +21,7 @@ class LDAPAuthenticator(BaseAuthenticator): - """Authenticates users against an LDAP server. + """Authenticate users against an LDAP server. Args: BaseAuthenticator (_type_): _description_ @@ -18,15 +30,17 @@ class LDAPAuthenticator(BaseAuthenticator): _type_: _description_ """ - LDAP_SERVER = os.getenv('LDAP_SERVER') - LDAP_BASE_DN = os.getenv('LDAP_BASE_DN') - LDAP_CA_CERT_PATH = os.getenv('LDAP_CA_CERT_PATH') - if LDAP_CA_CERT_PATH is not None and not os.path.isfile(LDAP_CA_CERT_PATH): - LDAP_CA_CERT_PATH = None + LDAP_SERVER = os.getenv("LDAP_SERVER") + LDAP_BASE_DN = os.getenv("LDAP_BASE_DN") + if os.getenv("LDAP_CA_CERT_PATH") not in [None, ""]: + LDAP_CA_CERT_PATH = os.getenv("LDAP_CA_CERT_PATH") + elif os.path.isfile("auth/ldap_ca.pem"): + LDAP_CA_CERT_PATH = "auth/ldap_ca.pem" + else: log_manager.store_auth_error_activity("No LDAP CA certificate found. LDAP authentication might not work.") def get_required_credentials(self): - """Gets the username and the password. + """Get the username and the password. Returns: _type_: _description_ @@ -34,7 +48,7 @@ def get_required_credentials(self): return ["username", "password"] def authenticate(self, credentials): - """Tries to authenticate the user against the LDAP server. + """Try to authenticate the user against the LDAP server. Args: credentials (_type_): _description_ From 96742284fc1e95f7eabec09fecc852f69fce7cae Mon Sep 17 00:00:00 2001 From: multiflexi Date: Fri, 26 Jul 2024 16:17:47 +0200 Subject: [PATCH 136/146] update value desc only if found --- src/core/model/report_item.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/core/model/report_item.py b/src/core/model/report_item.py index 3bfdb9476..1dd47fd08 100644 --- a/src/core/model/report_item.py +++ b/src/core/model/report_item.py @@ -705,12 +705,13 @@ def update_report_item(cls, id, data, user): data["attribute_value"] = "" attribute.user = user attribute.last_updated = datetime.now() - if attribute.value_description != data["value_description"]: - modified = True - attribute.value_description = data["value_description"] - data["value_description"] = "" - attribute.user = user - attribute.last_updated = datetime.now() + if data.get("value_description", False): + if attribute.value_description != data["value_description"]: + modified = True + attribute.value_description = data["value_description"] + data["value_description"] = "" + attribute.user = user + attribute.last_updated = datetime.now() break if "add" in data: From 621773905625eceda717daccf04cb3bc89102eb4 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 29 Jul 2024 11:35:43 +0200 Subject: [PATCH 137/146] check if ldap authentication is used --- src/core/auth/ldap_authenticator.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/core/auth/ldap_authenticator.py b/src/core/auth/ldap_authenticator.py index 6e301966b..f11c1b398 100644 --- a/src/core/auth/ldap_authenticator.py +++ b/src/core/auth/ldap_authenticator.py @@ -32,11 +32,15 @@ class LDAPAuthenticator(BaseAuthenticator): LDAP_SERVER = os.getenv("LDAP_SERVER") LDAP_BASE_DN = os.getenv("LDAP_BASE_DN") + # Check if the LDAP CA certificate path is set in the environment variables or the certificate is in the default path + # Custom path if os.getenv("LDAP_CA_CERT_PATH") not in [None, ""]: LDAP_CA_CERT_PATH = os.getenv("LDAP_CA_CERT_PATH") + # Default path elif os.path.isfile("auth/ldap_ca.pem"): LDAP_CA_CERT_PATH = "auth/ldap_ca.pem" - else: + # No path and authentication method is LDAP + elif os.getenv("TARANIS_NG_AUTHENTICATOR").casefold() == "ldap": log_manager.store_auth_error_activity("No LDAP CA certificate found. LDAP authentication might not work.") def get_required_credentials(self): From 04c4be95b5e33b1d332a5f62cde20fa01bf95985 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Mon, 29 Jul 2024 11:43:35 +0200 Subject: [PATCH 138/146] use __init__ --- src/core/auth/ldap_authenticator.py | 30 +++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/src/core/auth/ldap_authenticator.py b/src/core/auth/ldap_authenticator.py index f11c1b398..157ef0a8b 100644 --- a/src/core/auth/ldap_authenticator.py +++ b/src/core/auth/ldap_authenticator.py @@ -30,18 +30,24 @@ class LDAPAuthenticator(BaseAuthenticator): _type_: _description_ """ - LDAP_SERVER = os.getenv("LDAP_SERVER") - LDAP_BASE_DN = os.getenv("LDAP_BASE_DN") - # Check if the LDAP CA certificate path is set in the environment variables or the certificate is in the default path - # Custom path - if os.getenv("LDAP_CA_CERT_PATH") not in [None, ""]: - LDAP_CA_CERT_PATH = os.getenv("LDAP_CA_CERT_PATH") - # Default path - elif os.path.isfile("auth/ldap_ca.pem"): - LDAP_CA_CERT_PATH = "auth/ldap_ca.pem" - # No path and authentication method is LDAP - elif os.getenv("TARANIS_NG_AUTHENTICATOR").casefold() == "ldap": - log_manager.store_auth_error_activity("No LDAP CA certificate found. LDAP authentication might not work.") + def __init__(self): + """Initialize the LDAPAuthenticator class. + + Returns: + _type_: _description_ + """ + self.LDAP_SERVER = os.getenv("LDAP_SERVER") + self.LDAP_BASE_DN = os.getenv("LDAP_BASE_DN") + # Check if the LDAP CA certificate path is set in the environment variables or the certificate is in the default path + # Custom path + if os.getenv("LDAP_CA_CERT_PATH") not in [None, ""]: + self.LDAP_CA_CERT_PATH = os.getenv("LDAP_CA_CERT_PATH") + # Default path + elif os.path.isfile("auth/ldap_ca.pem"): + self.LDAP_CA_CERT_PATH = "auth/ldap_ca.pem" + # No path and authentication method is LDAP + elif os.getenv("TARANIS_NG_AUTHENTICATOR").casefold() == "ldap": + log_manager.store_auth_error_activity("No LDAP CA certificate found. LDAP authentication might not work.") def get_required_credentials(self): """Get the username and the password. From dd6f547d32db571eba7e39b2795e73d90de9f85e Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 20:55:44 +0100 Subject: [PATCH 139/146] upgrade core to use current Alpine and Python --- docker/Dockerfile.core | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile.core b/docker/Dockerfile.core index 484735d30..354ea5a65 100644 --- a/docker/Dockerfile.core +++ b/docker/Dockerfile.core @@ -1,4 +1,4 @@ -FROM python:3.10-alpine3.14 AS build_shared +FROM python:3.12-alpine3.19 AS build_shared WORKDIR /build_shared/ @@ -17,7 +17,7 @@ COPY ./src/core/sse/forward.c . RUN gcc -o forward forward.c -FROM python:3.10-alpine3.14 AS production +FROM python:3.12-alpine3.19 AS production WORKDIR /app/ From 0d1ef9983de974ecb94e1a20326fd090496b9de3 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 20:57:17 +0100 Subject: [PATCH 140/146] upgrade requirements.txt, remove unnecessary items --- src/core/requirements.txt | 55 ++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 32 deletions(-) diff --git a/src/core/requirements.txt b/src/core/requirements.txt index f19f9fa9f..bb43dbdd7 100644 --- a/src/core/requirements.txt +++ b/src/core/requirements.txt @@ -1,37 +1,28 @@ -alembic==1.10.2 -certifi==2019.11.28 -Flask==1.1.4 -Flask-Cors==4.0.1 -Flask-JWT-Extended==3.25.1 -Flask-Migrate==2.5.2 +alembic==1.11.0 +Flask==3.0.2 +Flask-Cors==4.0.0 +Flask-JWT-Extended==4.6.0 +Flask-Migrate==4.0.5 flask-oidc==1.4.0 Flask-RESTful==0.3.10 -Flask-Script==2.0.6 -Flask-SQLAlchemy==2.5.1 -gevent==21.8.0 -greenlet==1.1.1 -gunicorn==20.0.4 -idna==2.9 -Jinja2==2.11.3 +#Flask-Script==2.0.6 +Flask-SSE==1.0.0 +Flask-SQLAlchemy==3.0.5 +gevent==24.2.1 +gunicorn==21.2.0 +Jinja2==3.1.3 ldap3==2.9.1 -Mako==1.1.0 -MarkupSafe==1.1.1 -marshmallow==3.18.0 +# markupsafe==2.0.1 #remove after Jinja2 upgraded +marshmallow==3.19.0 marshmallow-enum==1.5.1 psycogreen==1.0.2 -psycopg2-binary==2.9.6 -PyJWT==1.7.1 -python-dateutil==2.8.1 -python-dotenv==0.10.3 -python-editor==1.0.4 -python-keycloak==0.23.0 -pytz==2019.3 -requests==2.26.0 -schedule==0.6.0 -six==1.13.0 -sseclient-py==1.7 -soupsieve==1.9.5 -SQLAlchemy==1.4.52 -urllib3==1.26.7 -Werkzeug==1.0.1 -pycryptodomex==3.17 +psycopg2==2.9.9 +PyJWT==2.8.0 +python-dotenv==1.0.1 +python-keycloak==3.9.1 +requests==2.31.0 +schedule==1.2.1 +sseclient-py==1.8.0 +SQLAlchemy==1.4.51 #upgrade +Werkzeug==3.0.1 #update +pycryptodomex==3.20 From e8753057ccc3e1c206648be88792ed00c902c932 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 21:17:57 +0100 Subject: [PATCH 141/146] update code for newer version of flask_jwt_extended, f-strings --- src/core/managers/auth_manager.py | 82 ++++++++++++++++++++----------- 1 file changed, 54 insertions(+), 28 deletions(-) diff --git a/src/core/managers/auth_manager.py b/src/core/managers/auth_manager.py index a53776239..99fe11c5d 100644 --- a/src/core/managers/auth_manager.py +++ b/src/core/managers/auth_manager.py @@ -1,12 +1,34 @@ -"""This module contains the authentication manager.""" - +"""This module contains the authentication manager for the Taranis-NG application. + +The authentication manager handles user authentication and authorization using different authenticators. +It provides functions for initializing the manager, authenticating users, refreshing authentication tokens, +performing access control checks, and retrieving user permissions. + +Classes: + ACLCheck: Enumeration for ACL checks. + +Functions: + cleanup_token_blacklist: Clean up the token blacklist by deleting tokens older than one day. + initialize: Initialize the authentication manager. + get_required_credentials: Get the required credentials for authentication. + authenticate: Authenticate the user with the provided credentials. + refresh: Refresh the authentication token for the specified user. + logout: Log out a user. + check_acl: Check the access control list (ACL) for the given item. + no_auth: Decorator that allows access to the decorated function without authentication. + get_id_name_by_acl: Get the corresponding ID name based on the given ACL. + get_user_from_api_key: Try to authenticate the user by API key. + get_perm_from_user: Get user permissions. + get_user_from_jwt_token: Try to authenticate the user by JWT token. + get_perm_from_jwt_token: Get user permissions from JWT token. +""" import os from datetime import datetime, timedelta from enum import Enum, auto from functools import wraps import jwt from flask import request -from flask_jwt_extended import JWTManager, get_jwt_claims, get_jwt_identity, verify_jwt_in_request, get_raw_jwt +from flask_jwt_extended import JWTManager, get_jwt_identity, verify_jwt_in_request, get_jwt from flask_jwt_extended.exceptions import JWTExtendedException from managers import log_manager, time_manager @@ -54,6 +76,9 @@ def initialize(app): Arguments: app: The Flask application object. + + Returns: + None """ global current_authenticator @@ -122,7 +147,7 @@ def logout(token): token (str): The authentication token of the user. Returns: - None: This function does not return any value. + None """ return current_authenticator.logout(token) @@ -250,7 +275,7 @@ def get_user_from_api_key(): user = User.find_by_id(api_key.user_id) return user except Exception as ex: - log_manager.store_auth_error_activity(f"API key check presence error: {str(ex)}") + log_manager.store_auth_error_activity(f"ApiKey check presence error: {ex}") return None @@ -272,7 +297,7 @@ def get_perm_from_user(user): all_users_perms = all_users_perms.union(role_perms) return all_users_perms except Exception as ex: - log_manager.store_auth_error_activity(f"Get permission from user error: {str(ex)}") + log_manager.store_auth_error_activity(f"Get permission from user error: {ex}") return None @@ -293,7 +318,7 @@ def get_user_from_jwt_token(): # does it encode an identity? identity = get_jwt_identity() if not identity: - log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_raw_jwt()}") + log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_jwt()}") return None user = User.find(identity) @@ -310,12 +335,12 @@ def get_perm_from_jwt_token(user): user: The user object. Returns: - A set of user's permissions or None if permissions are missing or an error occurs. - + A set of user's permissions if available, otherwise None. """ try: # does it include permissions? - claims = get_jwt_claims() + jwt_data = get_jwt() + claims = jwt_data["user_claims"] if not claims or "permissions" not in claims: log_manager.store_user_auth_error_activity(user, "Missing permissions in JWT") return None @@ -323,7 +348,7 @@ def get_perm_from_jwt_token(user): all_users_perms = set(claims["permissions"]) return all_users_perms except Exception as ex: - log_manager.store_auth_error_activity(f"Get permission from JWT error: {str(ex)}") + log_manager.store_auth_error_activity(f"Get permission from JWT error: {ex}") return None @@ -332,11 +357,10 @@ def auth_required(required_permissions, *acl_args): Arguments: required_permissions (str or list): The required permissions for the user. - *acl_args: Variable number of arguments representing the ACLs to check. + *acl_args: Additional arguments for ACL access. Returns: The decorated function. - """ def auth_required_wrap(fn): @@ -380,13 +404,13 @@ def wrapper(*args, **kwargs): def api_key_required(fn): - """Enforce API key authentication. + """Check for the presence of an API key in the Authorization header. - Args: - fn (function): The function to be decorated. + Arguments: + fn: The function to be decorated. Returns: - function: The decorated function. + The decorated function. """ @wraps(fn) @@ -476,7 +500,7 @@ def wrapper(*args, **kwargs): identity = get_jwt_identity() if not identity: - log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_raw_jwt()}") + log_manager.store_auth_error_activity(f"Missing identity in JWT: {get_jwt()}") return {"error": "authorization failed"}, 401 user = User.find(identity) @@ -505,12 +529,11 @@ def get_access_key(): def get_user_from_jwt(): """Obtain the identity and current permissions. - This function retrieves the user information from the JWT token. If the user information - is not found in the JWT token, it falls back to retrieving the user information from the - API key. + This function first tries to obtain the user from the API key. + If the user is not found, it then tries to obtain the user from the JWT token. Returns: - The user object containing the identity and current permissions. + The user object if found, None otherwise. """ user = get_user_from_api_key() if user is None: @@ -533,7 +556,7 @@ def decode_user_from_jwt(jwt_token): with open(os.getenv("JWT_SECRET_KEY_FILE"), "r") as file: jwt_secret_key = file.read() try: - decoded = jwt.decode(jwt_token, jwt_secret_key) + decoded = jwt.decode(jwt_token, os.getenv("JWT_SECRET_KEY")) except Exception as ex: # e.g. "Signature has expired" log_manager.store_auth_error_activity(f"Invalid JWT: {str(ex)}") if decoded is None: @@ -542,18 +565,21 @@ def decode_user_from_jwt(jwt_token): def get_external_permissions_ids(): - """Get the external permissions IDs.""" + """Return a list of external permissions IDs. + + This function returns a list of permission IDs that are related to accessing, creating, and configuring assets. + + Returns: + list: A list of external permission IDs. + """ return ["MY_ASSETS_ACCESS", "MY_ASSETS_CREATE", "MY_ASSETS_CONFIG"] def get_external_permissions(): """Get the external permissions. - This function retrieves a list of external permissions by calling the `get_external_permissions_ids` function - and then fetching the corresponding permission objects using the `Permission.find` method. - Returns: - A list of external permission objects. + A list of external permissions. """ permissions = [] for permission_id in get_external_permissions_ids(): From 7d904ecfc054927fbdb4b240bb049b040f786e37 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 21:26:22 +0100 Subject: [PATCH 142/146] remove flask_script, use click --- src/core/manage.py | 857 +++++++++++++++++++++------------------------ 1 file changed, 395 insertions(+), 462 deletions(-) diff --git a/src/core/manage.py b/src/core/manage.py index bc771f898..ebc635907 100755 --- a/src/core/manage.py +++ b/src/core/manage.py @@ -1,26 +1,24 @@ #! /usr/bin/env python +"""This script is used to manage user accounts, roles, and collectors in the Taranis-NG application.""" """Module for managing the application from the command line.""" from os import abort, getenv, read +from flask import Flask import random import socket import string import time import logging -from flask import Flask -from flask_script import Manager, Command -from flask_script.commands import Option +import click import traceback from managers import db_manager -from model import user, role, collector, collectors_node, permission, osint_source # noqa: F401 -from model import apikey +from model import user, role, permission, collectors_node, collector, attribute, apikey from remote.collectors_api import CollectorsApi app = Flask(__name__) app.config.from_object("config.Config") -manager = Manager(app=app) app.logger = logging.getLogger("gunicorn.error") app.logger.level = logging.INFO @@ -37,57 +35,83 @@ time.sleep(0.1) -class AccountManagement(Command): +@app.cli.command("account") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--edit", "-e", "opt_edit", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--username", "opt_username") +@click.option("--name", "opt_name", default="") +@click.option("--password", "opt_password") +@click.option("--roles", "opt_roles") +def account_management(opt_list, opt_create, opt_edit, opt_delete, opt_username, opt_name, opt_password, opt_roles): """Manage user accounts. - Arguments: - Command -- _description_ + Args: + opt_list (bool): List all user accounts. + opt_create (bool): Create a new user account. + opt_edit (bool): Edit an existing user account. + opt_delete (bool): Delete an existing user account. + opt_username (str): Username of the user account. + opt_name (str): Name of the user. + opt_password (str): Password of the user account. + opt_roles (str): Roles assigned to the user account. """ + if opt_list: + users = user.User.get_all() + for us in users: + roles = [] + for r in us.roles: + roles.append(r.id) + print("Id: {}\n\tUsername: {}\n\tName: {}\n\tRoles: {}".format(us.id, us.username, us.name, roles)) + exit() - option_list = ( - Option("--list", "-l", dest="opt_list", action="store_true"), - Option("--create", "-c", dest="opt_create", action="store_true"), - Option("--edit", "-e", dest="opt_edit", action="store_true"), - Option("--delete", "-d", dest="opt_delete", action="store_true"), - Option("--username", dest="opt_username"), - Option("--name", dest="opt_name", default=""), - Option("--password", dest="opt_password"), - Option("--roles", dest="opt_roles"), - ) - - def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_username, opt_name, opt_password, opt_roles): - """Run the command. - - Arguments: - opt_list -- list all user accounts - opt_create -- create a new user account - opt_edit -- edit an existing user account - opt_delete -- delete a user account - opt_username -- specify the username - opt_name -- specify the user's name - opt_password -- specify the user's password - opt_roles -- specify a list of roles, divided by a comma (,), that the user belongs to - """ - if opt_list: - users = user.User.get_all() - for us in users: - roles = [] - for r in us.roles: - roles.append(r.id) - print("Id: {}\n\tUsername: {}\n\tName: {}\n\tRoles: {}".format(us.id, us.username, us.name, roles)) - exit() - - if opt_create: - if not opt_username or not opt_password or not opt_roles: - app.logger.critical("Username, password or role not specified!") - abort() + if opt_create: + if not opt_username or not opt_password or not opt_roles: + app.logger.critical("Username, password or role not specified!") + abort() + + if user.User.find(opt_username): + app.logger.critical("User already exists!") + abort() + + opt_roles = opt_roles.split(",") + roles = [] + for ro in opt_roles: + r = None + try: + r = role.Role.find(int(ro)) + except Exception: + r = role.Role.find_by_name(ro) - if user.User.find(opt_username): - app.logger.critical("User already exists!") + if not r: + app.logger.critical("The specified role '{}' does not exist!".format(ro)) abort() + roles.append(r) + + new_user = user.User(-1, opt_username, opt_name, opt_password, None, roles, None) + db_manager.db.session.add(new_user) + db_manager.db.session.commit() + + print("User '{}' with id {} created.".format(opt_name, new_user.id)) + + if opt_edit: + if not opt_username: + app.logger.critical("Username not specified!") + abort() + if not opt_password or not opt_roles: + app.logger.critical("Please specify a new password or role id!") + abort() + + if not user.User.find(opt_username): + app.logger.critical("User does not exist!") + abort() + + if opt_roles: opt_roles = opt_roles.split(",") roles = [] + for ro in opt_roles: r = None try: @@ -101,477 +125,386 @@ def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_username, opt_name roles.append(r) - new_user = user.User(-1, opt_username, opt_name, opt_password, None, roles, None) - db_manager.db.session.add(new_user) - db_manager.db.session.commit() - - print("User '{}' with id {} created.".format(opt_name, new_user.id)) - - if opt_edit: - if not opt_username: - app.logger.critical("Username not specified!") - abort() - if not opt_password or not opt_roles: - app.logger.critical("Please specify a new password or role id!") - abort() - - if not user.User.find(opt_username): - app.logger.critical("User does not exist!") - abort() - - if opt_roles: - opt_roles = opt_roles.split(",") - roles = [] - - for ro in opt_roles: - r = None - try: - r = role.Role.find(int(ro)) - except Exception: - r = role.Role.find_by_name(ro) - - if not r: - app.logger.critical("The specified role '{}' does not exist!".format(ro)) - abort() - - roles.append(r) - - if opt_delete: - if not opt_username: - app.logger.critical("Username not specified!") - abort() + if opt_delete: + if not opt_username: + app.logger.critical("Username not specified!") + abort() - u = user.User.find(opt_username) - if not u: - app.logger.critical("User does not exist!") - abort() + u = user.User.find(opt_username) + if not u: + app.logger.critical("User does not exist!") + abort() - user.User.delete(u.id) - print("The user '{}' has been deleted.".format(opt_username)) + user.User.delete(u.id) + print("The user '{}' has been deleted.".format(opt_username)) + + +@app.cli.command("role") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--edit", "-e", "opt_edit", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--filter", "-f", "opt_filter") +@click.option("--id", "opt_id") +@click.option("--name", "opt_name") +@click.option("--description", "opt_description", default="") +@click.option("--permissions", "opt_permissions") +def role_management(opt_list, opt_create, opt_edit, opt_delete, opt_filter, opt_id, opt_name, opt_description, opt_permissions): + """Manage roles. + + Args: + opt_list (bool): List all roles. + opt_create (bool): Create a new role. + opt_edit (bool): Edit an existing role. + opt_delete (bool): Delete an existing role. + opt_filter (str): Filter roles by name. + opt_id (str): ID of the role. + opt_name (str): Name of the role. + opt_description (str): Description of the role. + opt_permissions (str): Permissions assigned to the role. + """ + if opt_list: + roles = None + if opt_filter: + roles = role.Role.get(opt_filter)[0] + else: + roles = role.Role.get_all() + + for ro in roles: + perms = [] + for p in ro.permissions: + perms.append(p.id) + print("Id: {}\n\tName: {}\n\tDescription: {}\n\tPermissions: {}".format(ro.id, ro.name, ro.description, perms)) + exit() + if opt_create: + if not opt_name or not opt_permissions: + app.logger.critical("Role name or permissions not specified!") + abort() -class RoleManagement(Command): - """Manage user roles. + opt_permissions = opt_permissions.split(",") + perms = [] - Arguments: - Command -- _description_ - """ + for pe in opt_permissions: + p = permission.Permission.find(pe) - option_list = ( - Option("--list", "-l", dest="opt_list", action="store_true"), - Option("--create", "-c", dest="opt_create", action="store_true"), - Option("--edit", "-e", dest="opt_edit", action="store_true"), - Option("--delete", "-d", dest="opt_delete", action="store_true"), - Option("--filter", "-f", dest="opt_filter"), - Option("--id", dest="opt_id"), - Option("--name", dest="opt_name"), - Option("--description", dest="opt_description", default=""), - Option("--permissions", dest="opt_permissions"), - ) - - def run(self, opt_list, opt_create, opt_edit, opt_delete, opt_filter, opt_id, opt_name, opt_description, opt_permissions): - """Run the command. - - Arguments: - opt_list -- list all roles - opt_create -- create a new role - opt_edit -- edit an existing role - opt_delete -- delete a role - opt_filter -- filter roles by their name or description - opt_id -- specify the role id (in combination with --edit or --delete) - opt_name -- specify the role name - opt_description -- specify the role description (default is "") - opt_permissions -- specify a list of permissions, divided with a comma (,), that the role would allow - """ - if opt_list: - roles = None - if opt_filter: - roles = role.Role.get(opt_filter)[0] - else: - roles = role.Role.get_all() - - for ro in roles: - perms = [] - for p in ro.permissions: - perms.append(p.id) - print("Id: {}\n\tName: {}\n\tDescription: {}\n\tPermissions: {}".format(ro.id, ro.name, ro.description, perms)) - exit() - - if opt_create: - if not opt_name or not opt_permissions: - app.logger.critical("Role name or permissions not specified!") + if not p: + app.logger.critical("The specified permission '{}' does not exist!".format(pe)) abort() - opt_permissions = opt_permissions.split(",") - perms = [] - - for pe in opt_permissions: - p = permission.Permission.find(pe) + perms.append(p) - if not p: - app.logger.critical("The specified permission '{}' does not exist!".format(pe)) - abort() + new_role = role.Role(-1, opt_name, opt_description, perms) + db_manager.db.session.add(new_role) + db_manager.db.session.commit() - perms.append(p) + print("Role '{}' with id {} created.".format(opt_name, new_role.id)) - new_role = role.Role(-1, opt_name, opt_description, perms) - db_manager.db.session.add(new_role) - db_manager.db.session.commit() - - print("Role '{}' with id {} created.".format(opt_name, new_role.id)) + if opt_edit: + if not opt_id or not opt_name: + app.logger.critical("Role id or name not specified!") + abort() + if not opt_name or not opt_description or not opt_permissions: + app.logger.critical("Please specify a new name, description or permissions!") + abort() - if opt_edit: - if not opt_id or not opt_name: - app.logger.critical("Role id or name not specified!") - abort() - if not opt_name or not opt_description or not opt_permissions: - app.logger.critical("Please specify a new name, description or permissions!") - abort() + if opt_delete: + if not opt_id or not opt_name: + app.logger.critical("Role id or name not specified!") + abort() - if opt_delete: - if not opt_id or not opt_name: - app.logger.critical("Role id or name not specified!") - abort() +@app.cli.command("collector") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--edit", "-e", "opt_edit", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--update", "-u", "opt_update", is_flag=True) +@click.option("--all", "-a", "opt_all", is_flag=True) +@click.option("--show-api-key", "opt_show_api_key", is_flag=True) +@click.option("--id", "opt_id") +@click.option("--name", "opt_name") +@click.option("--description", "opt_description", default="") +@click.option("--api-url", "opt_api_url") +@click.option("--api-key", "opt_api_key") +def collector_management( + opt_list, + opt_create, + opt_edit, + opt_delete, + opt_update, + opt_all, + opt_show_api_key, + opt_id, + opt_name, + opt_description, + opt_api_url, + opt_api_key, +): + """Manage collectors. + + Args: + opt_list (bool): List all collectors. + opt_create (bool): Create a new collector. + opt_edit (bool): Edit an existing collector. + opt_delete (bool): Delete an existing collector. + opt_update (bool): Update collectors. + opt_all (bool): Update all collectors. + opt_show_api_key (bool): Show API key in the output. + opt_id (str): ID of the collector. + opt_name (str): Name of the collector. + opt_description (str): Description of the collector. + opt_api_url (str): API URL of the collector. + opt_api_key (str): API key of the collector. + """ + if opt_list: + collector_nodes = collectors_node.CollectorsNode.get_all() + + for node in collector_nodes: + capabilities = [] + sources = [] + for c in node.collectors: + capabilities.append(c.type) + for s in c.sources: + sources.append("{} ({})".format(s.name, s.id)) + print( + "Id: {}\n\tName: {}\n\tURL: {}\n\t{}Created: {}\n\tLast seen: {}\n\tCapabilities: {}\n\tSources: {}".format( + node.id, + node.name, + node.api_url, + "API key: {}\n\t".format(node.api_key) if opt_show_api_key else "", + node.created, + node.last_seen, + capabilities, + sources, + ) + ) + exit() -class CollectorManagement(Command): - """Manage collector nodes. + if opt_create: + if not opt_name or not opt_api_url or not opt_api_key: + app.logger.critical("Please specify the collector node name, API url and key!") + abort() - Arguments: - Command -- _description_ - """ + data = { + "id": "", + "name": opt_name, + "description": opt_description if opt_description else "", + "api_url": opt_api_url, + "api_key": opt_api_key, + "collectors": [], + "status": "0", + } + + print("Trying to contact a new collector node...") + retries, max_retries = 0, 30 + while retries < max_retries: + try: + collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info("") + break + except: # noqa: E722 + collectors_info = "Collector unavailable" + status_code = 0 + time.sleep(1) + retries += 1 + print("Retrying [{}/{}]...".format(retries, max_retries)) + + if status_code != 200: + print("Cannot create a new collector node!") + print("Response from collector: {}".format(collectors_info)) + abort() - option_list = ( - Option("--list", "-l", dest="opt_list", action="store_true"), - Option("--create", "-c", dest="opt_create", action="store_true"), - Option("--edit", "-e", dest="opt_edit", action="store_true"), - Option("--delete", "-d", dest="opt_delete", action="store_true"), - Option("--update", "-u", dest="opt_update", action="store_true"), - Option("--all", "-a", dest="opt_all", action="store_true"), - Option("--show-api-key", dest="opt_show_api_key", action="store_true"), - Option("--id", dest="opt_id"), - Option("--name", dest="opt_name"), - Option("--description", dest="opt_description", default=""), - Option("--api-url", dest="opt_api_url"), - Option("--api-key", dest="opt_api_key"), - ) - - def run( - self, - opt_list, - opt_create, - opt_edit, - opt_delete, - opt_update, - opt_all, - opt_show_api_key, - opt_id, - opt_name, - opt_description, - opt_api_url, - opt_api_key, - ): - """Run the command. - - Arguments: - opt_list -- list all collector nodes - opt_create -- create a new node - opt_edit -- edit an existing node - opt_delete -- delete a node - opt_update -- re-initialize collector node - opt_all -- update all collector nodes (in combination with --update) - opt_show_api_key -- show API key in plaintext (in combination with --list) - opt_id -- specify the node id (in combination with --edit, --delete or --update) - opt_name -- specify the node name - opt_description -- specify the collector description (default is "") - opt_api_url -- specify the collector node API url - opt_api_key -- specify the collector node API key - """ - if opt_list: - collector_nodes = collectors_node.CollectorsNode.get_all() - - for node in collector_nodes: - capabilities = [] - sources = [] - for c in node.collectors: - capabilities.append(c.type) - for s in c.sources: - sources.append("{} ({})".format(s.name, s.id)) - print( - "Id: {}\n\tName: {}\n\tURL: {}\n\t{}Created: {}\n\tLast seen: {}\n\tCapabilities: {}\n\tSources: {}".format( - node.id, - node.name, - node.api_url, - "API key: {}\n\t".format(node.api_key) if opt_show_api_key else "", - node.created, - node.last_seen, - capabilities, - sources, - ) - ) - exit() + collectors = collector.Collector.create_all(collectors_info) + node = collectors_node.CollectorsNode.add_new(data, collectors) + collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info(node.id) - if opt_create: - if not opt_name or not opt_api_url or not opt_api_key: - app.logger.critical("Please specify the collector node name, API url and key!") - abort() + print("Collector node '{}' with id {} created.".format(opt_name, node.id)) - data = { - "id": "", - "name": opt_name, - "description": opt_description if opt_description else "", - "api_url": opt_api_url, - "api_key": opt_api_key, - "collectors": [], - "status": "0", - } - - print("Trying to contact a new collector node...") - retries, max_retries = 0, 30 - while retries < max_retries: - try: - collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info("") - break - except: # noqa: E722 - collectors_info = "Collector unavailable" - status_code = 0 - time.sleep(1) - retries += 1 - print("Retrying [{}/{}]...".format(retries, max_retries)) - - if status_code != 200: - print("Cannot create a new collector node!") - print("Response from collector: {}".format(collectors_info)) - abort() + if opt_edit: + if not opt_id or not opt_name: + app.logger.critical("Collector node id or name not specified!") + abort() + if not opt_name or not opt_description or not opt_api_url or not opt_api_key: + app.logger.critical("Please specify a new name, description, API url or key!") + abort() - collectors = collector.Collector.create_all(collectors_info) - node = collectors_node.CollectorsNode.add_new(data, collectors) - collectors_info, status_code = CollectorsApi(opt_api_url, opt_api_key).get_collectors_info(node.id) + if opt_delete: + if not opt_id or not opt_name: + app.logger.critical("Collector node id or name not specified!") + abort() - print("Collector node '{}' with id {} created.".format(opt_name, node.id)) + if opt_update: + if not opt_all and not opt_id and not opt_name: + app.logger.critical("Collector node id or name not specified!") + app.logger.critical("If you want to update all collectors, pass the --all parameter.") + abort() - if opt_edit: - if not opt_id or not opt_name: - app.logger.critical("Collector node id or name not specified!") - abort() - if not opt_name or not opt_description or not opt_api_url or not opt_api_key: - app.logger.critical("Please specify a new name, description, API url or key!") + nodes = None + if opt_id: + nodes = [collectors_node.CollectorsNode.get_by_id(opt_id)] + if not nodes: + app.logger.critical("Collector node does not exit!") abort() - - if opt_delete: - if not opt_id or not opt_name: - app.logger.critical("Collector node id or name not specified!") + elif opt_name: + nodes, count = collectors_node.CollectorsNode.get(opt_name) + if not count: + app.logger.critical("Collector node does not exit!") abort() - - if opt_update: - if not opt_all and not opt_id and not opt_name: - app.logger.critical("Collector node id or name not specified!") - app.logger.critical("If you want to update all collectors, pass the --all parameter.") + else: + nodes, count = collectors_node.CollectorsNode.get(None) + if not count: + app.logger.critical("No collector nodes exist!") abort() - nodes = None - if opt_id: - nodes = [collectors_node.CollectorsNode.get_by_id(opt_id)] - if not nodes: - app.logger.critical("Collector node does not exit!") - abort() - elif opt_name: - nodes, count = collectors_node.CollectorsNode.get(opt_name) - if not count: - app.logger.critical("Collector node does not exit!") - abort() + for node in nodes: + # refresh collector node id + collectors_info, status_code = CollectorsApi(node.api_url, node.api_key).get_collectors_info(node.id) + if status_code == 200: + print("Collector node {} updated.".format(node.id)) else: - nodes, count = collectors_node.CollectorsNode.get(None) - if not count: - app.logger.critical("No collector nodes exist!") - abort() + print("Unable to update collector node {}.\n\tResponse: [{}] {}.".format(node.id, status_code, collectors_info)) - for node in nodes: - # refresh collector node id - collectors_info, status_code = CollectorsApi(node.api_url, node.api_key).get_collectors_info(node.id) - if status_code == 200: - print("Collector node {} updated.".format(node.id)) - else: - print("Unable to update collector node {}.\n\tResponse: [{}] {}.".format(node.id, status_code, collectors_info)) +@app.cli.command("dictionary") +@click.option("--upload-cve", is_flag=True) +@click.option("--upload-cpe", is_flag=True) +def dictionary_management(upload_cve, upload_cpe): + """Manage the dictionaries by uploading and loading CVE and CPE files. -class DictionaryManagement(Command): - """Manage dictionaries. + This function uploads the CVE and CPE files and loads the dictionaries accordingly. + If `upload_cve` is True, it uploads the CVE file and loads the CVE dictionary. + If `upload_cpe` is True, it uploads the CPE file and loads the CPE dictionary. Arguments: - Command -- _description_ + upload_cve (bool): Indicates whether to upload the CVE file and load the CVE dictionary. + upload_cpe (bool): Indicates whether to upload the CPE file and load the CPE dictionary. """ - option_list = ( - Option("--upload-cve", dest="opt_cve", action="store_true"), - Option("--upload-cpe", dest="opt_cpe", action="store_true"), - Option("--upload-cwe", dest="opt_cwe", action="store_true"), - ) - - def run(self, opt_cve, opt_cpe, opt_cwe): - """Run the command. - - Arguments: - opt_cve -- upload the CPE dictionary (expected on STDIN in XML format) to the path indicated by CPE_UPDATE_FILE environment - variable, and update the database from that file. - opt_cpe -- upload the CVE dictionary (expected on STDIN in XML format) to the path indicated by CVE_UPDATE_FILE environment - variable, and update the database from that file. - opt_cwe -- upload the CWE dictionary (expected on STDIN in XML format) to the path indicated by CWE_UPDATE_FILE environment - variable, and update the database from that file. - """ - from model import attribute - - if opt_cve: - cve_update_file = getenv("CVE_UPDATE_FILE") - if cve_update_file is None: - app.logger.critical("CVE_UPDATE_FILE is undefined") - abort() - - self.upload_to(cve_update_file) - try: - attribute.Attribute.load_dictionaries("cve") - except Exception: - app.logger.debug(traceback.format_exc()) - app.logger.critical("File structure was not recognized!") - abort() - - if opt_cpe: - cpe_update_file = getenv("CPE_UPDATE_FILE") - if cpe_update_file is None: - app.logger.critical("CPE_UPDATE_FILE is undefined") - abort() - - self.upload_to(cpe_update_file) - try: - attribute.Attribute.load_dictionaries("cpe") - except Exception: - app.logger.debug(traceback.format_exc()) - app.logger.critical("File structure was not recognized!") - abort() - - if opt_cwe: - cwe_update_file = getenv("CWE_UPDATE_FILE") - if cwe_update_file is None: - app.logger.critical("CWE_UPDATE_FILE is undefined") - abort() - - self.upload_to(cwe_update_file) - try: - attribute.Attribute.load_dictionaries("cwe") - except Exception: - app.logger.debug(traceback.format_exc()) - app.logger.critical("File structure was not recognized!") - abort() + if upload_cve: + cve_update_file = getenv("CVE_UPDATE_FILE") + if cve_update_file is None: + app.logger.critical("CVE_UPDATE_FILE is undefined") + abort() - app.logger.critical("Dictionary was uploaded.") - exit() + upload_to(cve_update_file) + try: + attribute.Attribute.load_dictionaries("cve") + except Exception: + app.logger.debug(traceback.format_exc()) + app.logger.critical("File structure was not recognized!") + abort() - def upload_to(self, filename): - """Upload the file to the specified path. + if upload_cpe: + cpe_update_file = getenv("CPE_UPDATE_FILE") + if cpe_update_file is None: + app.logger.critical("CPE_UPDATE_FILE is undefined") + abort() - Arguments: - filename -- path specified by the environment variable - """ + upload_to(cpe_update_file) try: - with open(filename, "wb") as out_file: - while True: - chunk = read(0, 131072) - if not chunk: - break - out_file.write(chunk) + attribute.Attribute.load_dictionaries("cpe") except Exception: app.logger.debug(traceback.format_exc()) - app.logger.critical("Upload failed!") + app.logger.critical("File structure was not recognized!") abort() + app.logger.error("Dictionary was uploaded.") + exit() -class ApiKeysManagement(Command): - """Manage API keys. + +def upload_to(filename): + """Upload a file to the specified filename. Arguments: - Command -- _description_ + filename (str): The name of the file to upload. """ + try: + with open(filename, "wb") as out_file: + while True: + chunk = read(0, 131072) + if not chunk: + break + out_file.write(chunk) + except Exception: + app.logger.debug(traceback.format_exc()) + app.logger.critical("Upload failed!") + abort() + + +@app.cli.command("apikey") +@click.option("--list", "-l", "opt_list", is_flag=True) +@click.option("--create", "-c", "opt_create", is_flag=True) +@click.option("--delete", "-d", "opt_delete", is_flag=True) +@click.option("--name", "-n", "opt_name") +@click.option("--user", "-u", "opt_user") +@click.option("--expires", "-e", "opt_expires") +def api_keys_management(opt_list, opt_create, opt_delete, opt_name, opt_user, opt_expires): + """Manage API keys. - option_list = ( - Option("--list", "-l", dest="opt_list", action="store_true"), - Option("--create", "-c", dest="opt_create", action="store_true"), - Option("--delete", "-d", dest="opt_delete", action="store_true"), - Option("--name", "-n", dest="opt_name"), - Option("--user", "-u", dest="opt_user"), - Option("--expires", "-e", dest="opt_expires"), - ) - - def run(self, opt_list, opt_create, opt_delete, opt_name, opt_user, opt_expires): - """Run the command. - - Arguments: - opt_list -- list all apikeys - opt_create -- create a new apikey - opt_delete -- delete a apikey - opt_name -- specify the apikey name - opt_user -- specify the user's name - opt_expires -- specify the apikey expiration datetime - """ - if opt_list: - apikeys = apikey.ApiKey.get_all() - for k in apikeys: - print( - "Id: {}\n\tName: {}\n\tKey: {}\n\tCreated: {}\n\tUser id: {}\n\tExpires: {}".format( - k.id, k.name, k.key, k.created_at, k.user_id, k.expires_at - ) - ) - exit() + This function provides functionality to list, create, and delete API keys. - if opt_create: - if not opt_name: - app.logger.critical("Name not specified!") - abort() + Arguments: + opt_list (bool): If True, list all existing API keys. + opt_create (bool): If True, create a new API key. + opt_delete (bool): If True, delete an existing API key. + opt_name (str): The name of the API key. + opt_user (str): The user associated with the API key. + opt_expires (str): The expiration date of the API key. + """ + if opt_list: + apikeys = apikey.ApiKey.get_all() + for k in apikeys: + print( + "Id: {}\n\tName: {}\n\tKey: {}\n\tCreated: {}\n\tUser id: {}\n\tExpires: {}".format( + k.id, k.name, k.key, k.created_at, k.user_id, k.expires_at + ) + ) + exit() - if apikey.ApiKey.find_by_name(opt_name): - app.logger.critical("Name already exists!") - abort() + if opt_create: + if not opt_name: + app.logger.critical("Name not specified!") + abort() - if not opt_user: - app.logger.critical("User not specified!") - abort() + if apikey.ApiKey.find_by_name(opt_name): + app.logger.critical("Name already exists!") + abort() - u = None - if opt_user: - u = user.User.find(opt_user) - if not u: - app.logger.critical("The specified user '{}' does not exist!".format(opt_user)) - abort() + if not opt_user: + app.logger.critical("User not specified!") + abort() - data = { - # 'id': None, - "name": opt_name, - "key": "".join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=40)), - "user_id": u.id, - "expires_at": opt_expires if opt_expires else None, - } + u = None + if opt_user: + u = user.User.find(opt_user) + if not u: + app.logger.critical("The specified user '{}' does not exist!".format(opt_user)) + abort() - k = apikey.ApiKey.add_new(data) - print("ApiKey '{}' with id {} created.".format(opt_name, k.id)) + data = { + # 'id': None, + "name": opt_name, + "key": "".join(random.choices(string.ascii_uppercase + string.ascii_lowercase + string.digits, k=40)), + "user_id": u.id, + "expires_at": opt_expires if opt_expires else None, + } - if opt_delete: - if not opt_name: - app.logger.critical("Name not specified!") - abort() + k = apikey.ApiKey.add_new(data) + print("ApiKey '{}' with id {} created.".format(opt_name, k.id)) - k = apikey.ApiKey.find_by_name(opt_name) - if not k: - app.logger.critical("Name not found!") - abort() + if opt_delete: + if not opt_name: + app.logger.critical("Name not specified!") + abort() - apikey.ApiKey.delete(k.id) - print("ApiKey '{}' has been deleted.".format(opt_name)) + k = apikey.ApiKey.find_by_name(opt_name) + if not k: + app.logger.critical("Name not found!") + abort() + apikey.ApiKey.delete(k.id) + print("ApiKey '{}' has been deleted.".format(opt_name)) -manager.add_command("account", AccountManagement) -manager.add_command("role", RoleManagement) -manager.add_command("collector", CollectorManagement) -manager.add_command("dictionary", DictionaryManagement) -manager.add_command("apikey", ApiKeysManagement) if __name__ == "__main__": - manager.run() + app.run() From 61e60e212b7660b6711979bb552414dd8cc2eb65 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Sun, 3 Mar 2024 21:32:14 +0100 Subject: [PATCH 143/146] remove flask_script --- src/core/db_migration.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/src/core/db_migration.py b/src/core/db_migration.py index 2cf3a9af7..dc74bc7c4 100755 --- a/src/core/db_migration.py +++ b/src/core/db_migration.py @@ -1,16 +1,19 @@ #! /usr/bin/env python +"""This script is responsible for performing database migrations for the Taranis-NG application. +It initializes the Flask application, configures the database manager, and waits for the database to be ready. +Once the database is ready, it performs the necessary migrations using Flask-Migrate. +""" import socket import time from flask import Flask -from flask_script import Manager -from flask_migrate import Migrate, MigrateCommand +from flask_migrate import Migrate from managers import db_manager -from model import * +from model import * # noqa: F401, F403 app = Flask(__name__) -app.config.from_object('config.Config') +app.config.from_object("config.Config") db_manager.initialize(app) @@ -18,17 +21,13 @@ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) while True: try: - s.connect((app.config.get('DB_URL'), 5432)) + s.connect((app.config.get("DB_URL"), 5432)) s.close() break - except socket.error as ex: + except socket.error: time.sleep(0.1) migrate = Migrate(app=app, db=db_manager.db) -manager = Manager(app=app) - -manager.add_command('db', MigrateCommand) - -if __name__ == '__main__': - manager.run() +if __name__ == "__main__": + app.run() From 7aac3f1118271b6bc576137cd7e1c8680d0d4426 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 5 Mar 2024 08:16:30 +0100 Subject: [PATCH 144/146] JWT fixes --- src/core/auth/base_authenticator.py | 82 ++++++++++++++++++++++++++--- src/core/managers/auth_manager.py | 2 +- 2 files changed, 75 insertions(+), 9 deletions(-) diff --git a/src/core/auth/base_authenticator.py b/src/core/auth/base_authenticator.py index 704638471..aa996606a 100644 --- a/src/core/auth/base_authenticator.py +++ b/src/core/auth/base_authenticator.py @@ -1,3 +1,8 @@ +"""This module contains the `BaseAuthenticator` class which provides the base functionality for authentication. + +The `BaseAuthenticator` class defines methods for authentication, token generation, token refresh, and user logout. +""" + from flask_jwt_extended import create_access_token from managers import log_manager @@ -6,42 +11,103 @@ class BaseAuthenticator: + """Base class for authenticators. + + This class provides the basic structure and methods for implementing an authenticator. + Subclasses should override the methods as needed for specific authentication mechanisms. + + Methods: + get_required_credentials: Return the required credentials for authentication. + authenticate: Authenticate the user based on the provided credentials. + refresh: Refresh the authentication token for the given user. + logout: Logout the user by adding the token to the blacklist. + initialize: Initialize the authenticator. + generate_error: Generate an error response for authentication failure. + generate_jwt: Generate a JSON Web Token (JWT) for the given username. + """ def get_required_credentials(self): + """Return the required credentials for authentication. + + Returns: + A list of required credentials. + """ return [] def authenticate(self, credentials): + """Authenticate the user based on the provided credentials. + + Arguments: + credentials -- The user's credentials. + + Returns: + The result of the authentication process. + """ return BaseAuthenticator.generate_error() def refresh(self, user): + """Refresh the authentication token for the given user. + + Arguments: + user -- The user object. + + Returns: + The refreshed authentication token. + """ return BaseAuthenticator.generate_jwt(user.username) @staticmethod def logout(token): + """Logout the user by adding the token to the blacklist. + + Arguments: + token -- The token to be blacklisted. + """ if token is not None: TokenBlacklist.add(token) @staticmethod def initialize(app): + """Initialize the authenticator. + + Arguments: + app -- The application object. + """ pass @staticmethod def generate_error(): - return {'error': 'Authentication failed'}, 401 + """Generate an error response for authentication failure. + + Returns: + A tuple containing the error message and the HTTP status code. + """ + return {"error": "Authentication failed"}, 401 @staticmethod def generate_jwt(username): + """Generate a JSON Web Token (JWT) for the given username. + + Arguments: + username (str): The username for which to generate the JWT. + Returns: + tuple: A tuple containing the generated access token and the HTTP status code. + """ user = User.find(username) if not user: - log_manager.store_auth_error_activity("User does not exist after authentication: " + username) + log_manager.store_auth_error_activity(f"User does not exist after authentication: {username}") return BaseAuthenticator.generate_error() else: log_manager.store_user_activity(user, "LOGIN", "Successful") - access_token = create_access_token(identity=user.username, - user_claims={'id': user.id, - 'name': user.name, - 'organization_name': user.get_current_organization_name(), - 'permissions': user.get_permissions()}) + access_token = create_access_token( + identity=user.username, + additional_claims={ + "id": user.id, + "name": user.name, + "organization_name": user.get_current_organization_name(), + "permissions": user.get_permissions(), + }, + ) - return {'access_token': access_token}, 200 + return {"access_token": access_token}, 200 diff --git a/src/core/managers/auth_manager.py b/src/core/managers/auth_manager.py index 99fe11c5d..7f4a9128f 100644 --- a/src/core/managers/auth_manager.py +++ b/src/core/managers/auth_manager.py @@ -556,7 +556,7 @@ def decode_user_from_jwt(jwt_token): with open(os.getenv("JWT_SECRET_KEY_FILE"), "r") as file: jwt_secret_key = file.read() try: - decoded = jwt.decode(jwt_token, os.getenv("JWT_SECRET_KEY")) + decoded = jwt.decode(jwt_token, os.getenv("JWT_SECRET_KEY"), algorithms=["HS256"]) except Exception as ex: # e.g. "Signature has expired" log_manager.store_auth_error_activity(f"Invalid JWT: {str(ex)}") if decoded is None: From c9359a76e924b8c4fa0f28ce2c02fbd8b8f56758 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 5 Mar 2024 08:17:40 +0100 Subject: [PATCH 145/146] add git to build dependencies in dockerfile --- docker/Dockerfile.core | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/Dockerfile.core b/docker/Dockerfile.core index 354ea5a65..01e4b5a8d 100644 --- a/docker/Dockerfile.core +++ b/docker/Dockerfile.core @@ -49,6 +49,7 @@ RUN \ libc-dev\ zlib-dev \ linux-headers \ + git \ make \ glib-dev \ musl-dev \ From a55fe504089415cae2539a70ec6464f815e05db4 Mon Sep 17 00:00:00 2001 From: multiflexi Date: Tue, 5 Mar 2024 08:17:57 +0100 Subject: [PATCH 146/146] update requirements --- src/core/requirements.txt | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/core/requirements.txt b/src/core/requirements.txt index bb43dbdd7..60ff3c119 100644 --- a/src/core/requirements.txt +++ b/src/core/requirements.txt @@ -3,16 +3,15 @@ Flask==3.0.2 Flask-Cors==4.0.0 Flask-JWT-Extended==4.6.0 Flask-Migrate==4.0.5 -flask-oidc==1.4.0 +#https://github.com/puiterwijk/flask-oidc/issues/147 +git+https://github.com/puiterwijk/flask-oidc.git@b10e6bf881a3fe0c3972e4093648f2b77f32a97c Flask-RESTful==0.3.10 -#Flask-Script==2.0.6 Flask-SSE==1.0.0 Flask-SQLAlchemy==3.0.5 gevent==24.2.1 gunicorn==21.2.0 Jinja2==3.1.3 ldap3==2.9.1 -# markupsafe==2.0.1 #remove after Jinja2 upgraded marshmallow==3.19.0 marshmallow-enum==1.5.1 psycogreen==1.0.2 @@ -24,5 +23,5 @@ requests==2.31.0 schedule==1.2.1 sseclient-py==1.8.0 SQLAlchemy==1.4.51 #upgrade -Werkzeug==3.0.1 #update +Werkzeug==3.0.1 pycryptodomex==3.20