Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
148 changes: 148 additions & 0 deletions cognition_objects/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
CognitionIntegrationType,
)
from ..util import prevent_sql_injection
from submodules.model import enums

FINISHED_STATES = [
CognitionMarkdownFileState.FINISHED.value,
Expand Down Expand Up @@ -329,3 +330,150 @@ def get_distinct_item_ids_for_all_permissions(
return []

return [row[0] for row in results if row and row[0]]


def get_last_integrations_tasks() -> List[Dict[str, Any]]:
query = f"""
WITH embedding_agg AS (
SELECT
project_id,
jsonb_object_agg(
state,
jsonb_build_object(
'count', count,
'embeddings', embeddings
)
) AS embeddings_by_state
FROM (
SELECT
e.project_id,
e.state,
COUNT(*) AS count,
jsonb_agg(
jsonb_build_object(
'createdBy', e.created_by,
'finishedAt', e.finished_at,
'id', e.id,
'name', e.name,
'startedAt', e.started_at,
'state', e.state
) ORDER BY e.started_at DESC
) AS embeddings
FROM embedding e
GROUP BY e.project_id, e.state
) AS x
GROUP BY project_id
),

attribute_agg AS (
SELECT
project_id,
jsonb_object_agg(
state,
jsonb_build_object(
'count', count,
'attributes', attributes
)
) AS attributes_by_state
FROM (
SELECT
a.project_id,
a.state,
COUNT(*) AS count,
jsonb_agg(
jsonb_build_object(
'dataType', a.data_type,
'finishedAt', a.finished_at,
'id', a.id,
'name', a.name,
'startedAt', a.started_at,
'state', a.state
) ORDER BY a.started_at DESC
) AS attributes
FROM attribute a
WHERE a.state NOT IN ('UPLOADED','AUTOMATICALLY_CREATED')
GROUP BY a.project_id, a.state
) AS x
GROUP BY project_id
),

record_tokenization_task_agg AS (
SELECT
project_id,
jsonb_object_agg(
state,
jsonb_build_object(
'count', count,
'record_tokenization_tasks', record_tokenization_tasks
)
) AS record_tokenization_tasks_by_state
FROM (
SELECT
rtt.project_id,
rtt.state,
COUNT(*) AS count,
jsonb_agg(
jsonb_build_object(
'finishedAt', rtt.finished_at,
'id', rtt.id,
'startedAt', rtt.started_at,
'state', rtt.state,
'type', rtt.type
) ORDER BY rtt.started_at DESC
) AS record_tokenization_tasks
FROM record_tokenization_task rtt
GROUP BY rtt.project_id, rtt.state
) AS x
GROUP BY project_id
),

integration_data AS (
SELECT
i.id AS integration_id,
i.name AS integration_name,
i.error_message,
i.started_at,
i.finished_at,
i.state,
i.organization_id,
i.project_id,
i.created_by,
i.type,
o.name AS organization_name,
p.name AS project_name,
jsonb_build_object(
'embeddingsByState', coalesce(ea.embeddings_by_state, '[]'::jsonb),
'attributesByState', coalesce(aa.attributes_by_state, '[]'::jsonb),
'recordTokenizationTasksByState', coalesce(rtt.record_tokenization_tasks_by_state, '[]'::jsonb)
) AS full_data
FROM cognition.integration i
LEFT JOIN embedding_agg ea
ON ea.project_id = i.project_id
LEFT JOIN attribute_agg aa
ON aa.project_id = i.project_id
LEFT JOIN record_tokenization_task_agg rtt
ON rtt.project_id = i.project_id
JOIN organization o
ON o.id = i.organization_id
JOIN project p
ON p.id = i.project_id
)

SELECT
int_data.organization_id as organization_id,
int_data.organization_name as organization_name,
int_data.integration_id,
int_data.integration_name,
int_data.error_message,
int_data.started_at,
int_data.finished_at,
int_data.state,
int_data.full_data,
int_data.created_by,
int_data.type,
int_data.project_name
FROM integration_data int_data
ORDER BY int_data.organization_id, int_data.started_at DESC
"""

return general.execute_all(query)
47 changes: 47 additions & 0 deletions cognition_objects/markdown_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,3 +218,50 @@ def delete_many(org_id: str, md_file_ids: List[str], with_commit: bool = True) -
CognitionMarkdownFile.id.in_(md_file_ids),
).delete(synchronize_session=False)
general.flush_or_commit(with_commit)


def get_last_etl_tasks(
states: List[str],
created_at_from: str,
created_at_to: Optional[str] = None,
) -> List[Any]:

states = [prevent_sql_injection(st, isinstance(st, str)) for st in states]
if len(states) == 0:
return []

created_at_from = prevent_sql_injection(
created_at_from, isinstance(created_at_from, str)
)
if created_at_to:
created_at_to = prevent_sql_injection(
created_at_to, isinstance(created_at_to, str)
)
created_at_to_filter = ""

if created_at_to:
created_at_to_filter = f"AND mf.created_at <= '{created_at_to}'"

states_filter_sql = ", ".join([f"'{state}'" for state in states])

query = f"""
SELECT *
FROM (
SELECT mf.created_at, mf.created_by, mf.started_at, mf.finished_at, mf.file_name, mf.error, mf.state, md.id AS dataset_id, md.name AS dataset_name, md.organization_id, o.name AS organization_name,
ROW_NUMBER() OVER (
PARTITION BY md.organization_id, md.id
ORDER BY mf.created_at DESC
) AS rn
FROM cognition.markdown_file mf
JOIN cognition.markdown_dataset md ON md.id = mf.dataset_id
JOIN organization o ON o.id = md.organization_id
WHERE
mf.created_at >= '{created_at_from}'
AND mf.state IN ({states_filter_sql})
{created_at_to_filter}
) sub
WHERE rn <= 5
ORDER BY organization_id, dataset_id, created_at DESC
"""

return general.execute_all(query)
47 changes: 47 additions & 0 deletions cognition_objects/message.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from typing import Any, Dict, List, Optional, Union, Tuple
from datetime import datetime

from submodules.model.enums import MessageType
from ..business_objects import general
from ..session import session
from ..models import CognitionMessage
Expand Down Expand Up @@ -605,3 +607,48 @@ def get_count_by_project_id(project_id: str) -> int:
)
.count()
)


def get_last_chat_messages(
message_type: MessageType,
starting_from: str,
ending_to: Optional[str] = None,
) -> List[Any]:

message_type = prevent_sql_injection(message_type, isinstance(message_type, str))
starting_from = prevent_sql_injection(starting_from, isinstance(starting_from, str))
if ending_to:
ending_to = prevent_sql_injection(ending_to, isinstance(ending_to, str))

message_type_filter = ""
ending_to_filter = ""

if message_type == MessageType.WITH_ERROR:
message_type_filter = "AND c.error IS NOT NULL"
elif message_type == MessageType.WITHOUT_ERROR:
message_type_filter = "AND c.error IS NULL"
if ending_to:
ending_to_filter = f"AND m.created_at <= '{ending_to}'"

query = f"""
SELECT *
FROM (
SELECT m.created_at, m.created_by, m.question, m.answer, m.initiated_via, c.error, cp.id AS project_id, cp.name AS project_name, cp.organization_id, o.name AS organization_name, c.id AS conversation_id,
ROW_NUMBER() OVER (
PARTITION BY cp.organization_id, cp.id
ORDER BY m.created_at DESC
) AS rn
FROM cognition.message m
JOIN cognition.conversation c ON c.id = m.conversation_id
JOIN cognition.project cp ON cp.id = m.project_id
JOIN organization o ON o.id = cp.organization_id
WHERE
m.created_at >= '{starting_from}'
{message_type_filter}
{ending_to_filter}
) sub
WHERE rn <= 5
ORDER BY organization_id, project_id, created_at DESC
"""

return general.execute_all(query)
81 changes: 79 additions & 2 deletions cognition_objects/strategy.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from typing import List, Optional
from typing import Any, List, Optional
from datetime import datetime

from submodules.model.util import prevent_sql_injection

from ..business_objects import general
from ..session import session
from ..models import CognitionStrategy
from ..enums import StrategyComplexity
from ..enums import StrategyComplexity, StrategyStepType


def get(project_id: str, strategy_id: str) -> CognitionStrategy:
Expand Down Expand Up @@ -107,3 +109,78 @@ def delete_all_by_project_id(project_id: str, with_commit: bool = True) -> None:
CognitionStrategy.project_id == project_id
).delete()
general.flush_or_commit(with_commit)


def get_strategies_info(
step_types: List[str],
created_at_from: str,
created_at_to: Optional[str] = None,
) -> List[Any]:

step_types = [prevent_sql_injection(st, isinstance(st, str)) for st in step_types]
if len(step_types) == 0:
return []

created_at_from = prevent_sql_injection(
created_at_from, isinstance(created_at_from, str)
)
if created_at_to:
created_at_to = prevent_sql_injection(
created_at_to, isinstance(created_at_to, str)
)
created_at_to_filter = ""

if created_at_to:
created_at_to_filter = f"AND ss.created_at <= '{created_at_to}'"

step_types_sql = ", ".join([f"'{st}'" for st in step_types])

query = f"""
WITH step_data AS (
SELECT
s.id AS strategy_id, s.name AS strategy_name,
ss.id AS step_id, ss.created_by,ss.created_at, ss.name AS step_name, ss.step_type,
p.name AS project_name, p.id AS project_id,
o.name AS organization_name, o.id AS organization_id,
st.config::jsonb AS template_config,
CASE
WHEN ss.step_type = '{StrategyStepType.TEMPLATED.value}' AND st.config IS NOT NULL
THEN ARRAY(
SELECT (t->>'stepType') || ':' || (t->>'stepName')
FROM jsonb_array_elements((st.config->'steps')::jsonb) t
)
ELSE NULL
END AS template_step_names,
CASE
WHEN ss.step_type = '{StrategyStepType.TEMPLATED.value}' AND st.config IS NOT NULL
THEN ARRAY(
SELECT t->>'stepType'
FROM jsonb_array_elements((st.config->'steps')::jsonb) t
)
ELSE NULL
END AS template_step_types
FROM cognition.strategy s
JOIN cognition.strategy_step ss
ON ss.strategy_id = s.id
JOIN cognition.project p
ON p.id = s.project_id
JOIN organization o
ON o.id = p.organization_id
LEFT JOIN cognition.step_templates st
ON st.id = (ss.config->>'templateId')::uuid
WHERE ss.created_at >= '{created_at_from}'
{created_at_to_filter}
)
SELECT strategy_id, strategy_name, step_id, created_by, created_at, step_name, step_type, project_name, project_id, organization_name, organization_id,
CASE
WHEN step_type = '{StrategyStepType.TEMPLATED.value}' THEN template_step_names
ELSE ARRAY[step_type || ':' || step_name]
END AS templated_step_names
FROM step_data
WHERE
step_type IN ({step_types_sql})
OR (step_type = '{StrategyStepType.TEMPLATED.value}' AND template_step_types && ARRAY[{step_types_sql}])
ORDER BY strategy_id, created_at DESC
"""

return general.execute_all(query)
6 changes: 6 additions & 0 deletions enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -1015,5 +1015,11 @@ class MessageInitiationType(Enum):
MACRO = "MACRO"


class MessageType(Enum):
WITH_ERROR = "WITH_ERROR"
WITHOUT_ERROR = "WITHOUT_ERROR"
ALL = "ALL"


class TimedExecutionKey(Enum):
LAST_RESET_USER_MESSAGE_COUNT = "LAST_RESET_USER_MESSAGE_COUNT"