diff --git a/.claude/skills/flexus-bot-dev/references/FLEXUS_BOT_REFERENCE.md b/.claude/skills/flexus-bot-dev/references/FLEXUS_BOT_REFERENCE.md index f494e493..8dcea99f 100644 --- a/.claude/skills/flexus-bot-dev/references/FLEXUS_BOT_REFERENCE.md +++ b/.claude/skills/flexus-bot-dev/references/FLEXUS_BOT_REFERENCE.md @@ -383,6 +383,10 @@ Use `prompts_common.SCHED_TASK_SORT_10M` and `prompts_common.SCHED_TODO_5M` as d | `@rcx.on_tool_call("name")` | `async def(toolcall, args) -> str` | | `@rcx.on_erp_change("table")` | `async def(action, new_record, old_record)` | +**ERP action types**: `"INSERT"`, `"UPDATE"`, `"DELETE"`, `"ARCHIVE"` +- `ARCHIVE`: soft delete (archived_ts: 0 → >0) +- `DELETE`: hard delete (removed from db) + --- ## Setup Schema diff --git a/flexus_client_kit/ckit_bot_exec.py b/flexus_client_kit/ckit_bot_exec.py index d397372f..65981960 100644 --- a/flexus_client_kit/ckit_bot_exec.py +++ b/flexus_client_kit/ckit_bot_exec.py @@ -494,7 +494,7 @@ async def subscribe_and_produce_callbacks( elif upd.news_about.startswith("erp."): table_name = upd.news_about[4:] - if upd.news_action in ["INSERT", "UPDATE", "DELETE"]: + if upd.news_action in ["INSERT", "UPDATE", "DELETE", "ARCHIVE"]: handled = True new_record = upd.news_payload_erp_record_new old_record = upd.news_payload_erp_record_old diff --git a/flexus_client_kit/ckit_erp.py b/flexus_client_kit/ckit_erp.py index ed28b110..fd30b26c 100644 --- a/flexus_client_kit/ckit_erp.py +++ b/flexus_client_kit/ckit_erp.py @@ -4,7 +4,7 @@ import json import gql -from flexus_client_kit import ckit_client, gql_utils +from flexus_client_kit import ckit_client, gql_utils, erp_schema T = TypeVar('T') @@ -141,6 +141,33 @@ async def delete_erp_record( return r["erp_table_delete"] +async def batch_upsert_erp_records( + client: ckit_client.FlexusClient, + table_name: str, + ws_id: str, + upsert_key: str, + records: List[Any], +) -> dict: + http = await client.use_http() + async with http as h: + r = await h.execute(gql.gql(""" + mutation ErpTableBatchUpsert($schema_name: String!, $table_name: String!, $ws_id: String!, $upsert_key: String!, $records_json: String!) { + erp_table_batch_upsert(schema_name: $schema_name, table_name: $table_name, ws_id: $ws_id, upsert_key: $upsert_key, records_json: $records_json) + }"""), + variable_values={ + "schema_name": "erp", + "table_name": table_name, + "ws_id": ws_id, + "upsert_key": upsert_key, + "records_json": json.dumps([dataclass_or_dict_to_dict(r) for r in records]), + }, + ) + result = r["erp_table_batch_upsert"] + if isinstance(result, str): + return json.loads(result) + return result + + def check_record_matches_filters(record: dict, filters: List[Union[str, dict]], col_names: set = None) -> bool: """ Check if a record (dict) matches all filters. @@ -296,17 +323,9 @@ def check_record_matches_filter(record: dict, f: str, col_names: set = None) -> async def test(): - from flexus_client_kit.erp_schema import ProductTemplate, ProductProduct client = ckit_client.FlexusClient("ckit_erp_test") ws_id = "solarsystem" - products = await query_erp_table( - client, - "product_product", - ws_id, - ProductProduct, - limit=10, - include=["prodt"], - ) + products = await query_erp_table(client, "product_product", ws_id, erp_schema.ProductProduct, limit=10, include=["prodt"]) print(f"Found {len(products)} products:") for p in products: print(p) diff --git a/flexus_client_kit/erp_schema.py b/flexus_client_kit/erp_schema.py index d30fe954..3cd557b4 100644 --- a/flexus_client_kit/erp_schema.py +++ b/flexus_client_kit/erp_schema.py @@ -1,128 +1,164 @@ -import time from dataclasses import dataclass, field from typing import Optional, Dict, Type, List +def get_pkey_field(cls: Type) -> str: + for name, f in cls.__dataclass_fields__.items(): + if f.metadata.get("pkey"): + return name + raise ValueError(f"No pkey field in {cls.__name__}") + + +def get_important_fields(cls: Type) -> List[str]: + return [name for name, f in cls.__dataclass_fields__.items() if f.metadata.get("importance", 0) > 0] + + +def get_extra_search_fields(cls: Type) -> List[str]: + return [name for name, f in cls.__dataclass_fields__.items() if f.metadata.get("extra_search")] + + +def get_field_display(cls: Type, field_name: str) -> Optional[str]: + f = cls.__dataclass_fields__.get(field_name) + return f.metadata.get("display") if f else None + + +def get_field_enum(cls: Type, field_name: str) -> Optional[List[str]]: + f = cls.__dataclass_fields__.get(field_name) + return f.metadata.get("enum") if f else None + + +def get_field_display_name(cls: Type, field_name: str) -> Optional[str]: + f = cls.__dataclass_fields__.get(field_name) + return f.metadata.get("display_name") if f else None + + +def get_field_description(cls: Type, field_name: str) -> Optional[str]: + f = cls.__dataclass_fields__.get(field_name) + return f.metadata.get("description") if f else None + + @dataclass class CrmContact: ws_id: str - contact_first_name: str - contact_last_name: str - contact_email: str - contact_id: str = "" - contact_notes: str = "" - contact_details: dict = field(default_factory=dict) - contact_tags: List[str] = field(default_factory=list) - contact_address_line1: str = "" - contact_address_line2: str = "" - contact_address_city: str = "" - contact_address_state: str = "" - contact_address_zip: str = "" - contact_address_country: str = "" - contact_utm_first_source: str = "" - contact_utm_first_medium: str = "" - contact_utm_first_campaign: str = "" - contact_utm_first_term: str = "" - contact_utm_first_content: str = "" - contact_utm_last_source: str = "" - contact_utm_last_medium: str = "" - contact_utm_last_campaign: str = "" - contact_utm_last_term: str = "" - contact_utm_last_content: str = "" - contact_bant_score: int = -1 - contact_created_ts: float = 0.0 - contact_modified_ts: float = 0.0 - contact_archived_ts: float = 0.0 + contact_first_name: str = field(metadata={"importance": 1, "display_name": "First Name"}) + contact_last_name: str = field(metadata={"importance": 1, "display_name": "Last Name"}) + contact_email: str = field(metadata={"importance": 1, "extra_search": True, "display_name": "Email"}) + contact_phone: str = field(default="", metadata={"display_name": "Phone"}) + contact_id: str = field(default="", metadata={"pkey": True, "display_name": "Contact ID"}) + contact_notes: str = field(default="", metadata={"importance": 1, "display": "string_multiline", "display_name": "Notes"}) + contact_details: dict = field(default_factory=dict, metadata={"display_name": "Details", "description": "Custom JSON data: BANT qualification reasons, social profiles, preferences, custom attributes"}) + contact_tags: List[str] = field(default_factory=list, metadata={"importance": 1, "display_name": "Tags"}) + contact_address_line1: str = field(default="", metadata={"display_name": "Address Line 1"}) + contact_address_line2: str = field(default="", metadata={"display_name": "Address Line 2"}) + contact_address_city: str = field(default="", metadata={"display_name": "City"}) + contact_address_state: str = field(default="", metadata={"display_name": "State"}) + contact_address_zip: str = field(default="", metadata={"display_name": "ZIP Code"}) + contact_address_country: str = field(default="", metadata={"importance": 1, "display_name": "Country"}) + contact_utm_first_source: str = field(default="", metadata={"importance": 1, "display_name": "UTM Source (first touch)", "description": "First marketing interaction that brought this contact"}) + contact_utm_first_medium: str = field(default="", metadata={"display_name": "UTM Medium (first touch)"}) + contact_utm_first_campaign: str = field(default="", metadata={"display_name": "UTM Campaign (first touch)"}) + contact_utm_first_term: str = field(default="", metadata={"display_name": "UTM Term (first touch)"}) + contact_utm_first_content: str = field(default="", metadata={"display_name": "UTM Content (first touch)"}) + contact_utm_last_source: str = field(default="", metadata={"display_name": "UTM Source (last touch)", "description": "Most recent marketing interaction"}) + contact_utm_last_medium: str = field(default="", metadata={"display_name": "UTM Medium (last touch)"}) + contact_utm_last_campaign: str = field(default="", metadata={"display_name": "UTM Campaign (last touch)"}) + contact_utm_last_term: str = field(default="", metadata={"display_name": "UTM Term (last touch)"}) + contact_utm_last_content: str = field(default="", metadata={"display_name": "UTM Content (last touch)"}) + contact_bant_score: int = field(default=-1, metadata={"display_name": "BANT Qualification Score", "description": "Budget, Authority, Need, Timeline. -1 means not qualified, 0-4 scale"}) + contact_created_ts: float = field(default=0.0, metadata={"importance": 1, "display_name": "Created at"}) + contact_modified_ts: float = field(default=0.0, metadata={"display_name": "Modified at"}) + contact_archived_ts: float = field(default=0.0, metadata={"display_name": "Archived at"}) @dataclass -class CrmTask: +class CrmActivity: ws_id: str - contact_id: str - task_type: str - task_title: str - task_notes: str = "" - task_details: dict = field(default_factory=dict) - task_id: str = "" - task_due_ts: float = 0.0 - task_completed_ts: float = 0.0 - task_created_ts: float = field(default_factory=time.time) - task_modified_ts: float = field(default_factory=time.time) - contact: Optional['CrmContact'] = None + activity_title: str = field(metadata={"importance": 1, "display_name": "Title"}) + activity_type: str = field(metadata={"importance": 1, "display_name": "Type", "enum": ["WEB_CHAT", "MESSENGER_CHAT", "EMAIL", "CALL", "MEETING"]}) + activity_direction: str = field(metadata={"importance": 1, "display_name": "Direction", "enum": ["INBOUND", "OUTBOUND"]}) + activity_contact_id: str = field(metadata={"importance": 1, "display_name": "Contact"}) + activity_id: str = field(default="", metadata={"pkey": True, "display_name": "Activity ID"}) + activity_channel: str = field(default="", metadata={"importance": 1, "display_name": "Channel"}) + activity_ft_id: Optional[str] = field(default=None, metadata={"importance": 1, "display_name": "Thread"}) + activity_summary: str = field(default="", metadata={"importance": 1, "display": "string_multiline", "display_name": "Summary"}) + activity_details: dict = field(default_factory=dict, metadata={"display_name": "Details"}) + activity_occurred_ts: float = field(default=0.0, metadata={"importance": 1, "display_name": "Occurred at"}) + activity_created_ts: float = field(default=0.0, metadata={"display_name": "Created at"}) + activity_modified_ts: float = field(default=0.0, metadata={"display_name": "Modified at"}) @dataclass class ProductTemplate: - prodt_id: str - prodt_name: str - prodt_description: str - prodt_target_customers: str - prodt_type: str - prodt_pcat_id: str - prodt_list_price: int # stored in cents - prodt_standard_price: int # stored in cents - prodt_uom_id: str - prodt_active: bool - ws_id: str - prodt_chips: List[str] - pcat: Optional['ProductCategory'] = None - uom: Optional['ProductUom'] = None + prodt_id: str = field(metadata={"pkey": True, "display_name": "Product Template ID"}) + prodt_name: str = field(metadata={"importance": 1, "display_name": "Name"}) + prodt_description: str = field(metadata={"importance": 1, "display": "string_multiline", "display_name": "Description"}) + prodt_target_customers: str = field(metadata={"importance": 1, "display": "string_multiline", "display_name": "Target Customers"}) + prodt_type: str = field(metadata={"importance": 1, "display_name": "Type"}) + prodt_pcat_id: str = field(metadata={"display_name": "Category"}) + prodt_list_price: int = field(metadata={"importance": 1, "display_name": "List Price"}) + prodt_standard_price: int = field(metadata={"importance": 1, "display_name": "Standard Price"}) + prodt_uom_id: str = field(metadata={"display_name": "Unit of Measure"}) + prodt_active: bool = field(metadata={"importance": 1, "display_name": "Active"}) + ws_id: str = field(metadata={"display_name": "Workspace ID"}) + prodt_chips: List[str] = field(metadata={"importance": 1, "display_name": "Chips"}) + pcat: Optional['ProductCategory'] = field(default=None, metadata={"display_name": "Category"}) + uom: Optional['ProductUom'] = field(default=None, metadata={"display_name": "Unit of Measure"}) @dataclass class ProductProduct: - prod_id: str - prodt_id: str - prod_default_code: Optional[str] - prod_barcode: Optional[str] - prod_active: bool - ws_id: str - prodt: Optional[ProductTemplate] = None # Optional not because it's not nullable, it's optional because you have an option to include or not include it when querying + prod_id: str = field(metadata={"pkey": True, "display_name": "Product ID"}) + prodt_id: str = field(metadata={"importance": 1, "display_name": "Product Template"}) + prod_default_code: Optional[str] = field(metadata={"importance": 1, "display_name": "Internal Reference"}) + prod_barcode: Optional[str] = field(metadata={"importance": 1, "display_name": "Barcode"}) + prod_active: bool = field(metadata={"importance": 1, "display_name": "Active"}) + ws_id: str = field(metadata={"display_name": "Workspace ID"}) + prodt: Optional[ProductTemplate] = field(default=None, metadata={"display_name": "Product Template"}) @dataclass class ProductCategory: - pcat_id: str - pcat_name: str - pcat_parent_id: Optional[str] - pcat_active: bool - ws_id: str - parent: Optional['ProductCategory'] = None + pcat_id: str = field(metadata={"pkey": True, "display_name": "Category ID"}) + pcat_name: str = field(metadata={"importance": 1, "display_name": "Name"}) + pcat_parent_id: Optional[str] = field(metadata={"importance": 1, "display_name": "Parent Category"}) + pcat_active: bool = field(metadata={"importance": 1, "display_name": "Active"}) + ws_id: str = field(metadata={"display_name": "Workspace ID"}) + parent: Optional['ProductCategory'] = field(default=None, metadata={"display_name": "Parent Category"}) @dataclass class ProductTag: - tag_id: str - tag_name: str - tag_sequence: int - tag_color: str - tag_visible_to_customers: bool - ws_id: str + tag_id: str = field(metadata={"pkey": True, "display_name": "Tag ID"}) + tag_name: str = field(metadata={"importance": 1, "display_name": "Name"}) + tag_sequence: int = field(metadata={"importance": 1, "display_name": "Sequence"}) + tag_color: str = field(metadata={"importance": 1, "display_name": "Color"}) + tag_visible_to_customers: bool = field(metadata={"importance": 1, "display_name": "Visible to Customers"}) + ws_id: str = field(metadata={"display_name": "Workspace ID"}) @dataclass class ProductUom: - uom_id: str - uom_name: str - uom_category_id: Optional[str] - uom_active: bool - ws_id: str - category: Optional[ProductCategory] = None + uom_id: str = field(metadata={"pkey": True, "display_name": "UoM ID"}) + uom_name: str = field(metadata={"importance": 1, "display_name": "Name"}) + uom_category_id: Optional[str] = field(metadata={"importance": 1, "display_name": "Category"}) + uom_active: bool = field(metadata={"importance": 1, "display_name": "Active"}) + ws_id: str = field(metadata={"display_name": "Workspace ID"}) + category: Optional[ProductCategory] = field(default=None, metadata={"display_name": "Category"}) @dataclass class ProductM2mTemplateTag: - id: str - tag_id: str - prodt_id: str - ws_id: str - tag: Optional[ProductTag] = None - prodt: Optional['ProductTemplate'] = None + id: str = field(metadata={"pkey": True, "display_name": "ID"}) + tag_id: str = field(metadata={"display_name": "Tag"}) + prodt_id: str = field(metadata={"display_name": "Product Template"}) + ws_id: str = field(metadata={"display_name": "Workspace ID"}) + tag: Optional[ProductTag] = field(default=None, metadata={"display_name": "Tag"}) + prodt: Optional['ProductTemplate'] = field(default=None, metadata={"display_name": "Product Template"}) ERP_TABLE_TO_SCHEMA: Dict[str, Type] = { "crm_contact": CrmContact, - "crm_task": CrmTask, + "crm_activity": CrmActivity, "product_template": ProductTemplate, "product_product": ProductProduct, "product_category": ProductCategory, @@ -131,57 +167,12 @@ class ProductM2mTemplateTag: "product_m2m_template_tag": ProductM2mTemplateTag, } - -ERP_DEFAULT_VISIBLE_FIELDS: Dict[str, List[str]] = { - "crm_contact": [ - "contact_first_name", - "contact_last_name", - "contact_email", - "contact_notes", - "contact_tags", - "contact_utm_first_source", - "contact_address_country", - "contact_created_ts", - ], - "crm_task": [ - "task_title", - "task_type", - "task_notes", - "task_due_ts", - "task_completed_ts", - "contact_id", - ], - "product_template": [ - "prodt_name", - "prodt_type", - "prodt_list_price", - "prodt_standard_price", - "prodt_active", - "prodt_chips", - "prodt_description", - "prodt_target_customers", - ], - "product_product": [ - "prodt_id", - "prod_default_code", - "prod_barcode", - "prod_active", - ], - "product_category": [ - "pcat_name", - "pcat_parent_id", - "pcat_active", - ], - "product_tag": [ - "tag_name", - "tag_sequence", - "tag_color", - "tag_visible_to_customers", - ], - "product_uom": [ - "uom_name", - "uom_category_id", - "uom_active", - ], +ERP_DISPLAY_NAME_CONFIGS: Dict[str, str] = { + "crm_contact": "{contact_first_name} {contact_last_name}", + "crm_activity": "{activity_title}", + "product_template": "{prodt_name}", + "product_product": "{prod_default_code} {prod_barcode}", + "product_category": "{pcat_name}", + "product_tag": "{tag_name}", + "product_uom": "{uom_name}", } - diff --git a/flexus_client_kit/integrations/fi_erp.py b/flexus_client_kit/integrations/fi_erp.py index cbf985dc..6b3e3c83 100644 --- a/flexus_client_kit/integrations/fi_erp.py +++ b/flexus_client_kit/integrations/fi_erp.py @@ -1,7 +1,10 @@ +import csv +import dataclasses +import io import json import time import logging -from typing import Dict, Any, Optional, List +from typing import Dict, Any, Optional, List, Type, Union, get_origin, get_args from pymongo.collection import Collection import gql.transport.exceptions @@ -53,7 +56,7 @@ "skip": {"type": "integer", "description": "Number of rows to skip (default 0)", "order": 1001}, "limit": {"type": "integer", "description": "Maximum number of rows to return (default 100, max 1000)", "order": 1002}, "sort_by": {"type": "array", "items": {"type": "string"}, "description": "Sort expressions ['column:ASC', 'another:DESC']", "order": 1003}, - "filters": {"type": "array", "description": "Filter expressions: strings like 'status:=:active' or dicts like {'OR': [...]}", "order": 1004}, + "filters": {"type": "array", "items": {"oneOf": [{"type": "string"}, {"type": "object"}]}, "description": "Filter expressions: strings like 'status:=:active' or dicts like {'OR': [...]}", "order": 1004}, "include": {"type": "array", "items": {"type": "string"}, "description": "Relation names to include ['prodt', 'pcat']", "order": 1005}, "safety_valve": {"type": "string", "description": "Output character limit '5k' or '10000' (default 5k)", "order": 1006}, }, @@ -70,7 +73,7 @@ description=( "Create, update (patch), or delete records in ERP tables. " "First call erp_table_meta to see available columns. " - "Example: erp_table_crud(op='create', table_name='crm_task', fields={'contact_id': '123', 'task_type': 'call', 'task_title': 'Follow up'})" + "Example: erp_table_crud(op='create', table_name='crm_contact', fields={'contact_first_name': 'John', 'contact_last_name': 'Doe', 'contact_email': 'john@example.com'})" ), parameters={ "type": "object", @@ -85,6 +88,25 @@ ) +ERP_CSV_IMPORT_TOOL = ckit_cloudtool.CloudTool( + strict=False, + name="erp_csv_import", + description=( + "Import a normalized CSV (columns must match ERP table fields) stored via mongo_store. " + "Provide mongo_path of the CSV, target table_name, and an optional upsert_key column." + ), + parameters={ + "type": "object", + "properties": { + "table_name": {"type": "string", "description": "Target ERP table name", "order": 1}, + "mongo_path": {"type": "string", "description": "Path of the CSV stored via mongo_store or python_execute artifacts", "order": 2}, + "upsert_key": {"type": "string", "description": "Column used to detect existing records (e.g., contact_email). Leave blank to always create.", "order": 3}, + }, + "required": ["table_name", "mongo_path"], + }, +) + + def _format_table_meta_text(table_name: str, schema_class: type) -> str: result = f"Table: erp.{table_name}\n" result += "\nColumns:\n" @@ -137,6 +159,42 @@ def _rows_to_text(rows: list, table_name: str, safety_valve_chars: int = 5000) - return "\n".join(result), full_json +def _resolve_field_type(field_type: Optional[Type[Any]]) -> Optional[Type[Any]]: + if not field_type: + return None + origin = get_origin(field_type) + if origin is Union: + if non_none := [arg for arg in get_args(field_type) if arg is not type(None)]: + return _resolve_field_type(non_none[0]) + if origin in (list, dict): + return origin + return field_type + + +def _convert_csv_value(raw_value: str, field_type: Optional[Type[Any]]) -> Any: + value = raw_value.strip() + if value == "": + return None + normalized_type = _resolve_field_type(field_type) + if normalized_type is bool: + lowered = value.lower() + if lowered in ("true", "1", "yes", "y"): + return True + if lowered in ("false", "0", "no", "n"): + return False + raise ValueError(f"Value {value!r} is not a valid boolean") + if normalized_type is int: + return int(value) + if normalized_type is float: + return float(value) + if normalized_type in (list, dict): + try: + return json.loads(value) + except json.JSONDecodeError as e: + raise ValueError(f"Expected JSON for {normalized_type.__name__}: {e}") + return value + + class IntegrationErp: def __init__( self, @@ -239,7 +297,7 @@ async def handle_erp_data(self, toolcall: ckit_cloudtool.FCloudtoolCall, args: D display_text, full_json = _rows_to_text(rows_as_dicts, table_name, safety_valve_chars) - if full_json and self.mongo_collection: + if full_json and self.mongo_collection is not None: mongo_path = f"erp_query_results/{table_name}_{int(time.time())}.json" try: await ckit_mongo.mongo_overwrite( @@ -341,3 +399,99 @@ async def handle_erp_crud(self, toolcall: ckit_cloudtool.FCloudtoolCall, model_p else: return f"❌ Error: Unknown operation '{op}'. Use create, patch, or delete." + + + async def handle_csv_import(self, toolcall: ckit_cloudtool.FCloudtoolCall, args: Dict[str, Any]) -> str: + if self.mongo_collection is None: + return "❌ Cannot read CSV because MongoDB storage is unavailable for this bot." + + if not (table_name := args.get("table_name", "").strip()) or not (mongo_path := args.get("mongo_path", "").strip()): + return "❌ table_name and mongo_path are required" + upsert_key = args.get("upsert_key", "").strip() + + if not (schema_class := erp_schema.ERP_TABLE_TO_SCHEMA.get(table_name)): + return f"❌ Unknown table '{table_name}'. Run erp_table_meta for available tables." + pk_field = erp_schema.get_pkey_field(schema_class) + + if not (document := await ckit_mongo.mongo_retrieve_file(self.mongo_collection, mongo_path)): + return f"❌ File {mongo_path!r} not found in MongoDB." + + if not (file_bytes := document.get("data") or (json.dumps(document["json"]).encode("utf-8") if document.get("json") is not None else None)): + return f"❌ File {mongo_path!r} is empty." + + try: + csv_text = file_bytes.decode("utf-8-sig") + except UnicodeDecodeError: + return "❌ CSV must be UTF-8 encoded." + + reader = csv.DictReader(io.StringIO(csv_text)) + if not reader.fieldnames: + return "❌ CSV header row is missing." + reader.fieldnames = trimmed_headers = [(name or "").strip() for name in reader.fieldnames] + + allowed_fields = set(schema_class.__annotations__.keys()) + details_field = next((f for f in allowed_fields if f.endswith("_details")), None) + + if unknown_headers := [h for h in trimmed_headers if h and h not in allowed_fields]: + fix_hint = f"Fix: Remove them, add to '{details_field}' as JSON, or map to existing columns." if details_field else "Fix: Remove them or map to existing columns (use erp_table_meta to see valid columns)." + return f"❌ Unknown columns: {', '.join(unknown_headers)}\n\n{fix_hint}" + + if upsert_key and upsert_key not in trimmed_headers: + return f"❌ upsert_key '{upsert_key}' is not present in the CSV header." + + field_types = schema_class.__annotations__ + required_fields = {name for name, field_info in schema_class.__dataclass_fields__.items() if field_info.default == dataclasses.MISSING and field_info.default_factory == dataclasses.MISSING and name != pk_field and name != "ws_id"} + + errors: List[str] = [] + records = [] + for row_idx, row in enumerate(reader, start=1): + try: + record = {} + for column in trimmed_headers: + if column and column != pk_field and (raw_value := str(row.get(column, "")).strip()): + record[column] = _convert_csv_value(raw_value, field_types.get(column)) + + if "ws_id" in allowed_fields and not record.get("ws_id"): + record["ws_id"] = self.ws_id + + if upsert_key and not (key_value := str(row.get(upsert_key, "")).strip()): + raise ValueError(f"Missing value for upsert_key '{upsert_key}'") + + if missing := required_fields - record.keys(): + raise ValueError(f"Missing required fields: {', '.join(sorted(missing))}") + + records.append(record) + except Exception as e: + errors.append(f"Row {row_idx}: {e}") + + BATCH_SIZE = 1000 + total_created = total_updated = 0 + total_failed = sum(1 for e in errors if e.startswith('Row ')) + batch_errors = 0 + + for i in range(0, len(records), BATCH_SIZE): + try: + result = await ckit_erp.batch_upsert_erp_records(self.client, table_name, self.ws_id, upsert_key or "", records[i:i+BATCH_SIZE]) + total_created += result.get("created", 0) + total_updated += result.get("updated", 0) + total_failed += result.get("failed", 0) + errors.extend(f"Batch {i//BATCH_SIZE + 1}: {err}" for err in result.get("errors", [])) + except Exception as e: + total_failed += len(records[i:i+BATCH_SIZE]) + errors.append(f"Batch {i//BATCH_SIZE + 1} failed: {e}") + batch_errors += 1 + if batch_errors > 3: + errors.append("Aborting: too many batch errors") + break + + lines = [ + f"Processed {len(records) + sum(1 for e in errors if e.startswith('Row '))} row(s) from {mongo_path}.", + f"Created: {total_created}, Updated: {total_updated}, Failed: {total_failed}.", + ] + if errors: + lines.append("Errors:") + lines.extend(f" • {err}" for err in errors[:5]) + if len(errors) > 5: + lines.append(f" …and {len(errors) - 5} more errors.") + + return "\n".join(lines) diff --git a/flexus_client_kit/integrations/fi_gmail.py b/flexus_client_kit/integrations/fi_gmail.py index 1646df62..fccb4b21 100644 --- a/flexus_client_kit/integrations/fi_gmail.py +++ b/flexus_client_kit/integrations/fi_gmail.py @@ -19,6 +19,8 @@ from flexus_client_kit import ckit_cloudtool from flexus_client_kit import ckit_client from flexus_client_kit import ckit_external_auth +from flexus_client_kit import ckit_erp +from flexus_client_kit import erp_schema logger = logging.getLogger("gmail") @@ -219,7 +221,7 @@ async def called_by_model( try: if op == "send": - return await self._send_message(args) + return await self._send_message(args, toolcall.fcall_ft_id) elif op == "search": return await self._search_messages(args) elif op == "get": @@ -271,7 +273,7 @@ async def called_by_model( logger.error(error_msg) return f"❌ {error_msg}" - async def _send_message(self, args: Dict[str, Any]) -> str: + async def _send_message(self, args: Dict[str, Any], ft_id: str) -> str: to = args.get("to", "") subject = args.get("subject", "") body = args.get("body", "") @@ -312,8 +314,39 @@ async def _send_message(self, args: Dict[str, Any]) -> str: message_id = result.get("id") thread_id = result.get("threadId") + await self._create_activity_for_email(to, subject, body, ft_id) + return f"✅ Message sent successfully!\n Message ID: {message_id}\n Thread ID: {thread_id}" + async def _create_activity_for_email(self, to: str, subject: str, body: str, ft_id: str) -> None: + for email in to.split(","): + email = email.strip().lower() + if not email: + continue + try: + contacts = await ckit_erp.query_erp_table( + self.fclient, "crm_contact", self.rcx.persona.ws_id, erp_schema.CrmContact, + filters=[f"contact_email:ILIKE:{email}"], limit=1, + ) + if not contacts: + continue + contact = contacts[0] + await ckit_erp.create_erp_record(self.fclient, "crm_activity", self.rcx.persona.ws_id, { + "ws_id": self.rcx.persona.ws_id, + "activity_title": subject, + "activity_type": "EMAIL", + "activity_direction": "OUTBOUND", + "activity_channel": "GMAIL", + "activity_contact_id": contact.contact_id, + "activity_ft_id": ft_id, + "activity_summary": body[:500] if len(body) > 500 else body, + "activity_details": {"body": body}, + "activity_occurred_ts": time.time(), + }) + logger.info(f"Created CRM activity for email to {email}") + except Exception as e: + logger.warning(f"Failed to create CRM activity for {email}: {e}") + async def _search_messages(self, args: Dict[str, Any]) -> str: query = args.get("query", "") max_results = args.get("maxResults", 10) diff --git a/flexus_simple_bots/rick/rick_bot.py b/flexus_simple_bots/rick/rick_bot.py index 2ccbd4ba..6e037295 100644 --- a/flexus_simple_bots/rick/rick_bot.py +++ b/flexus_simple_bots/rick/rick_bot.py @@ -27,7 +27,7 @@ BOT_VERSION_INT = ckit_client.marketplace_version_as_int(BOT_VERSION) -ERP_TABLES = ["crm_task", "crm_contact"] +ERP_TABLES = ["crm_contact"] TOOLS = [ @@ -36,13 +36,14 @@ fi_erp.ERP_TABLE_META_TOOL, fi_erp.ERP_TABLE_DATA_TOOL, fi_erp.ERP_TABLE_CRUD_TOOL, + fi_erp.ERP_CSV_IMPORT_TOOL, fi_mongo_store.MONGO_STORE_TOOL, fi_crm_automations.CRM_AUTOMATION_TOOL, ] async def rick_main_loop(fclient: ckit_client.FlexusClient, rcx: ckit_bot_exec.RobotContext) -> None: - dbname = f"{rcx.persona.ws_id}__{rcx.persona.persona_id}" + dbname = f"{rcx.persona.persona_id}_db" mongo_conn_str = await ckit_mongo.mongo_fetch_creds(fclient, rcx.persona.persona_id) mongo = AsyncMongoClient(mongo_conn_str) mydb = mongo[dbname] @@ -91,6 +92,10 @@ async def toolcall_erp_data(toolcall: ckit_cloudtool.FCloudtoolCall, model_produ async def toolcall_erp_crud(toolcall: ckit_cloudtool.FCloudtoolCall, model_produced_args: Dict[str, Any]) -> str: return await erp_integration.handle_erp_crud(toolcall, model_produced_args) + @rcx.on_tool_call(fi_erp.ERP_CSV_IMPORT_TOOL.name) + async def toolcall_erp_csv_import(toolcall: ckit_cloudtool.FCloudtoolCall, model_produced_args: Dict[str, Any]) -> str: + return await erp_integration.handle_csv_import(toolcall, model_produced_args) + @rcx.on_tool_call(fi_mongo_store.MONGO_STORE_TOOL.name) async def toolcall_mongo_store(toolcall: ckit_cloudtool.FCloudtoolCall, model_produced_args: Dict[str, Any]) -> str: return await fi_mongo_store.handle_mongo_store(rcx.workdir, mongo_collection, toolcall, model_produced_args) diff --git a/flexus_simple_bots/rick/rick_prompts.py b/flexus_simple_bots/rick/rick_prompts.py index 3f12be60..aae2c9f1 100644 --- a/flexus_simple_bots/rick/rick_prompts.py +++ b/flexus_simple_bots/rick/rick_prompts.py @@ -1,20 +1,12 @@ from flexus_simple_bots import prompts_common from flexus_client_kit.integrations import fi_crm_automations -crm_prompt = f""" -Use erp_table_*() tools to interact with the CRM. -CRM tables always start with the prefix "crm_", such as crm_contact or crm_task. - -Contacts will be ingested very often from forms in landing pages or main websites, or imported from other systems. -Tasks are a short actionable item linked to a contact that some bot or human needs to do, like an email, follow-up or call. - -Extra fields that are not defined in the database schema will be in details, e.x. in contact_details, or task_details. - +crm_import_landing_pages_prompt = """ ## Importing Contacts from Landing Pages When users ask about importing contacts from landing pages or website forms, explain they need their form to POST to: -https://flexus.team/api/erp-ingest/crm-contact/{{{{ws_id}}}} +https://flexus.team/api/erp-ingest/crm-contact/{{ws_id}} Required fields: - contact_email @@ -37,6 +29,95 @@ ``` """ +crm_import_csv_prompt = """ +## Bulk Importing Records from CSV + +When a user wants to import records (e.g., contacts) from a CSV, follow this process: + +### Step 1: Get the CSV File + +Ask the user to upload their CSV file. They can attach it to the chat, and you will access it via mongo_store. + +### Step 2: Analyze CSV and Target Table + +1. Read the CSV (headers + sample rows) from Mongo +2. Call erp_table_meta() to retrieve the full schema of the target table (e.g., crm_contact) +3. Identify standard fields and the JSON details field for custom data + +### Step 3: Propose Field Mapping + +Create an intelligent mapping from CSV → table fields: + +1. Match columns by name similarity +2. Propose transformations where needed (e.g., split full name, normalize phone/email, parse dates) +3. Map unmatched CSV columns into the appropriate *_details JSON field +4. Suggest an upsert key for deduplication (e.g., contact_email) if possible + +Present the mapping to the user in a clear format: +``` +CSV Column → Target Field (Transformation) +----------------------------------------- +Email → contact_email (lowercase, trim) +Full Name → contact_first_name + contact_last_name (split on first space) +Phone → contact_phone (format: remove non-digits) +Company → contact_details.company (custom field) +Source → contact_details.source (custom field) + +Upsert key: contact_email (will update existing contacts with same email) +``` + +### Step 4: Validate and Adjust + +Ask the user to confirm or modify, field mappings, transformations, upsert behavior, validation rules + +### Step 5: Generate Python Script to Normalize the CSV +Use python_execute() only to transform the uploaded file into a clean CSV whose columns exactly match the ERP table. Read from the Mongo attachment and write a new CSV: + +```python +import pandas as pd + +SOURCE_FILE = "attachments/solar_root/leads_rows.csv" +TARGET_TABLE = "crm_contact" +OUTPUT_FILE = f"{{TARGET_TABLE}}_import.csv" + +df = pd.read_csv(SOURCE_FILE) +records = [] +for _, row in df.iterrows(): + full_name = str(row.get("Full Name", "")).strip() + parts = full_name.split(" ", 1) + first_name = parts[0] if parts else "" + last_name = parts[1] if len(parts) > 1 else "" + record = {{ + "contact_first_name": first_name, + "contact_last_name": last_name, + "contact_email": str(row.get("Email", "")).strip().lower(), + "contact_phone": str(row.get("Phone", "")).strip(), + "contact_details": {{ + "company": str(row.get("Company", "")).strip(), + "source": "csv_import" + }} + }} + records.append(record) + +normalized = pd.DataFrame(records) +normalized.to_csv(OUTPUT_FILE, index=False) +print(f"Saved {{OUTPUT_FILE}} with {{len(normalized)}} rows") +``` + +python_execute automatically uploads generated files back to Mongo under their filenames (e.g., `crm_contact_import.csv`), so you can reference them with mongo_store or the new import tool. + +### Step 6: Review the Normalized File +1. Use `mongo_store(op="cat", args={{"path": "crm_contact_import.csv"}})` to show the first rows +2. Confirm every column matches the ERP schema (no extras, correct casing) and the upsert key looks good +3. Share stats (row count, notable transforms) with the user + +### Step 7: Import with `erp_csv_import` + +Use erp_csv_import() to import the cleaned CSV. + +After import, offer to create follow-up tasks or automations for the new contacts. +""" + rick_prompt_default = f""" You are Rick, the Deal King. A confident, results-oriented sales assistant who helps close deals and manage customer relationships. @@ -52,7 +133,14 @@ Relevant strategies and templates are in policy docs under `/sales-pipeline/`, set them up and use them when asked to. -{crm_prompt} +## CRM Usage + +Use erp_table_*() tools to interact with the CRM. +CRM tables always start with the prefix "crm_", such as crm_contact. + +Contacts will be ingested very often from forms in landing pages or main websites, or imported from other systems. + +Extra fields that are not defined in the database schema will be in contact_details. If enabled in setup, and a template is configured in `/sales-pipeline/welcome-email`, new CRM contacts without a previous welcome email will receive one automatically, personalized based on contact and sales data. diff --git a/flexus_simple_bots/vix/vix_install.py b/flexus_simple_bots/vix/vix_install.py index e009276a..4b5df407 100644 --- a/flexus_simple_bots/vix/vix_install.py +++ b/flexus_simple_bots/vix/vix_install.py @@ -97,7 +97,7 @@ async def install( marketable_setup_default=vix_setup_schema, marketable_featured_actions=[ {"feat_question": "Help me qualify a lead", "feat_run_as_setup": False, "feat_depends_on_setup": []}, - {"feat_question": "Set up my sales strategy", "feat_run_as_setup": False, "feat_depends_on_setup": []}, + {"feat_question": "Set up my sales strategy", "feat_run_as_setup": False, "feat_depends_on_setup": [], "feat_expert": "setup"}, ], marketable_intro_message="Hi there! I'm Vix, an AI sales assistant. I'm here to help you find the right solution. Before we dive in, what's your name?", marketable_preferred_model_default="grok-4-1-fast-non-reasoning", diff --git a/flexus_simple_bots/vix/vix_prompts.py b/flexus_simple_bots/vix/vix_prompts.py index 30cf029c..3e37b338 100644 --- a/flexus_simple_bots/vix/vix_prompts.py +++ b/flexus_simple_bots/vix/vix_prompts.py @@ -315,21 +315,28 @@ While using CLOSER for the sales conversation, simultaneously gather BANT qualification data to prioritize leads effectively. -**CRITICAL:** After qualifying a lead, you MUST update their contact record with the BANT score: +**CRITICAL:** After qualifying a lead, you MUST update their contact record with the BANT score. +First, read the existing contact to preserve any existing data in contact_details: +```python +erp_table_data(table_name="crm_contact", options={{"where": {{"contact_email": "[email]"}}}}) +``` + +Then patch with the BANT data, merging with existing contact_details: ```python erp_table_crud( table_name="crm_contact", operation="patch", where={{"contact_email": "[email]"}}, updates={{ - "contact_bant_score": 0, # 0-4 + "contact_bant_score": 2, # 0-4 "contact_details": {{ + ...existing_contact_details, # preserve existing keys "bant": {{ - "budget": {{"score": 0, "notes": "your assessment"}}, - "authority": {{"score": 0, "notes": "your assessment"}}, - "need": {{"score": 1, "notes": "your assessment"}}, - "timeline": {{"score": 1, "notes": "your assessment"}} + "budget": {{"score": 1, "notes": "your assessment"}}, + "authority": {{"score": 1, "notes": "your assessment"}}, + "need": {{"score": 0, "notes": "your assessment"}}, + "timeline": {{"score": 0, "notes": "your assessment"}} }} }} }}