Skip to content

Commit 0a6f98e

Browse files
Version 1.4.78
1 parent 39a3d0b commit 0a6f98e

File tree

26 files changed

+166
-34
lines changed

26 files changed

+166
-34
lines changed

abacusai/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -292,4 +292,4 @@
292292
from .workflow_node_template import WorkflowNodeTemplate
293293

294294

295-
__version__ = "1.4.77"
295+
__version__ = "1.4.78"

abacusai/api_class/ai_agents.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,12 +233,12 @@ def from_tool_variable_mappings(cls, tool_variable_mappings: dict):
233233
json_schema['properties'][mapping['name']]['format'] = 'data-url'
234234
ui_schema[mapping['name']] = {'ui:widget': 'file'}
235235
if mapping['variable_type'] == enums.PythonFunctionArgumentType.LIST:
236-
if mapping['item_type'] == enums.PythonFunctionArgumentType.ATTACHMENT:
236+
if mapping.get('item_type') == enums.PythonFunctionArgumentType.ATTACHMENT:
237237
json_schema['properties'][mapping['name']]['type'] = 'string'
238238
json_schema['properties'][mapping['name']]['format'] = 'data-url'
239239
ui_schema[mapping['name']] = {'ui:widget': 'file', 'ui:options': {'multiple': True}}
240240
else:
241-
json_schema['properties'][mapping['name']]['items'] = {'type': enums.PythonFunctionArgumentType.to_json_type(mapping['item_type'])}
241+
json_schema['properties'][mapping['name']]['items'] = {'type': enums.PythonFunctionArgumentType.to_json_type(mapping.get('item_type', 'STRING'))}
242242
if mapping['is_required']:
243243
json_schema['required'].append(mapping['name'])
244244
return cls(json_schema=json_schema, ui_schema=ui_schema)
@@ -738,6 +738,9 @@ def from_dict(cls, node: dict):
738738
class LLMAgentNode(WorkflowGraphNode):
739739
"""
740740
Represents an LLM agent node in an Agent workflow graph. The LLM Agent Node can be initialized using either chatbot_deployment_id or creation_parameters.
741+
The presence of chatbot_deployment_id indicates that the LLM agent node has already been trained and deployed. During creation of an LLM Node using AI Engineer or deepagent.
742+
The chatbot_deployment_id is always passed as None pre-training.
743+
chatbot_paramters may contain training_config such as builtin tools, document_retrievers, behaviour instructions etc.
741744
742745
Args:
743746
name (str): A unique name for the LLM agent node.

abacusai/api_class/enums.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -427,6 +427,7 @@ class ApplicationConnectorType(ApiEnum):
427427
OUTLOOK = 'OUTLOOK'
428428
BIGQUERY = 'BIGQUERY'
429429
AZURESTORAGE = 'AZURESTORAGE'
430+
SHOPIFY = 'SHOPIFY'
430431

431432
@classmethod
432433
def user_connectors(cls):
@@ -448,7 +449,8 @@ def user_connectors(cls):
448449
cls.DBC,
449450
cls.GENERIC_OAUTH,
450451
cls.OUTLOOK,
451-
cls.BIGQUERY
452+
cls.BIGQUERY,
453+
cls.SHOPIFY
452454
]
453455

454456
@classmethod

abacusai/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -667,7 +667,7 @@ class BaseApiClient:
667667
client_options (ClientOptions): Optional API client configurations
668668
skip_version_check (bool): If true, will skip checking the server's current API version on initializing the client
669669
"""
670-
client_version = '1.4.77'
670+
client_version = '1.4.78'
671671

672672
def __init__(self, api_key: str = None, server: str = None, client_options: ClientOptions = None, skip_version_check: bool = False, include_tb: bool = False):
673673
self.api_key = api_key

abacusai/dataset.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ class Dataset(AbstractApiClass):
3636
mergeFileSchemas (bool): If the merge file schemas policy is enabled.
3737
referenceOnlyDocumentset (bool): Signifies whether to save the data reference only. Only valid if is_documentset if True.
3838
versionLimit (int): Version limit for the dataset.
39+
disableRealtimeContentProcessing (bool): Whether realtime content processing is disabled for this dataset (only applicable to REALTIME_CONTENT_STORE datasets).
3940
latestDatasetVersion (DatasetVersion): The latest version of this dataset.
4041
schema (DatasetColumn): List of resolved columns.
4142
refreshSchedules (RefreshSchedule): List of schedules that determines when the next version of the dataset will be created.
@@ -44,7 +45,7 @@ class Dataset(AbstractApiClass):
4445
attachmentParsingConfig (AttachmentParsingConfig): The attachment parsing config used for dataset (eg. for salesforce attachment parsing)
4546
"""
4647

47-
def __init__(self, client, datasetId=None, sourceType=None, dataSource=None, createdAt=None, ignoreBefore=None, ephemeral=None, lookbackDays=None, databaseConnectorId=None, databaseConnectorConfig=None, connectorType=None, featureGroupTableName=None, applicationConnectorId=None, applicationConnectorConfig=None, incremental=None, isDocumentset=None, extractBoundingBoxes=None, mergeFileSchemas=None, referenceOnlyDocumentset=None, versionLimit=None, schema={}, refreshSchedules={}, latestDatasetVersion={}, parsingConfig={}, documentProcessingConfig={}, attachmentParsingConfig={}):
48+
def __init__(self, client, datasetId=None, sourceType=None, dataSource=None, createdAt=None, ignoreBefore=None, ephemeral=None, lookbackDays=None, databaseConnectorId=None, databaseConnectorConfig=None, connectorType=None, featureGroupTableName=None, applicationConnectorId=None, applicationConnectorConfig=None, incremental=None, isDocumentset=None, extractBoundingBoxes=None, mergeFileSchemas=None, referenceOnlyDocumentset=None, versionLimit=None, disableRealtimeContentProcessing=None, schema={}, refreshSchedules={}, latestDatasetVersion={}, parsingConfig={}, documentProcessingConfig={}, attachmentParsingConfig={}):
4849
super().__init__(client, datasetId)
4950
self.dataset_id = datasetId
5051
self.source_type = sourceType
@@ -65,6 +66,7 @@ def __init__(self, client, datasetId=None, sourceType=None, dataSource=None, cre
6566
self.merge_file_schemas = mergeFileSchemas
6667
self.reference_only_documentset = referenceOnlyDocumentset
6768
self.version_limit = versionLimit
69+
self.disable_realtime_content_processing = disableRealtimeContentProcessing
6870
self.schema = client._build_class(DatasetColumn, schema)
6971
self.refresh_schedules = client._build_class(
7072
RefreshSchedule, refreshSchedules)
@@ -78,8 +80,8 @@ def __init__(self, client, datasetId=None, sourceType=None, dataSource=None, cre
7880
self.deprecated_keys = {}
7981

8082
def __repr__(self):
81-
repr_dict = {f'dataset_id': repr(self.dataset_id), f'source_type': repr(self.source_type), f'data_source': repr(self.data_source), f'created_at': repr(self.created_at), f'ignore_before': repr(self.ignore_before), f'ephemeral': repr(self.ephemeral), f'lookback_days': repr(self.lookback_days), f'database_connector_id': repr(self.database_connector_id), f'database_connector_config': repr(self.database_connector_config), f'connector_type': repr(self.connector_type), f'feature_group_table_name': repr(self.feature_group_table_name), f'application_connector_id': repr(self.application_connector_id), f'application_connector_config': repr(
82-
self.application_connector_config), f'incremental': repr(self.incremental), f'is_documentset': repr(self.is_documentset), f'extract_bounding_boxes': repr(self.extract_bounding_boxes), f'merge_file_schemas': repr(self.merge_file_schemas), f'reference_only_documentset': repr(self.reference_only_documentset), f'version_limit': repr(self.version_limit), f'schema': repr(self.schema), f'refresh_schedules': repr(self.refresh_schedules), f'latest_dataset_version': repr(self.latest_dataset_version), f'parsing_config': repr(self.parsing_config), f'document_processing_config': repr(self.document_processing_config), f'attachment_parsing_config': repr(self.attachment_parsing_config)}
83+
repr_dict = {f'dataset_id': repr(self.dataset_id), f'source_type': repr(self.source_type), f'data_source': repr(self.data_source), f'created_at': repr(self.created_at), f'ignore_before': repr(self.ignore_before), f'ephemeral': repr(self.ephemeral), f'lookback_days': repr(self.lookback_days), f'database_connector_id': repr(self.database_connector_id), f'database_connector_config': repr(self.database_connector_config), f'connector_type': repr(self.connector_type), f'feature_group_table_name': repr(self.feature_group_table_name), f'application_connector_id': repr(self.application_connector_id), f'application_connector_config': repr(self.application_connector_config), f'incremental': repr(
84+
self.incremental), f'is_documentset': repr(self.is_documentset), f'extract_bounding_boxes': repr(self.extract_bounding_boxes), f'merge_file_schemas': repr(self.merge_file_schemas), f'reference_only_documentset': repr(self.reference_only_documentset), f'version_limit': repr(self.version_limit), f'disable_realtime_content_processing': repr(self.disable_realtime_content_processing), f'schema': repr(self.schema), f'refresh_schedules': repr(self.refresh_schedules), f'latest_dataset_version': repr(self.latest_dataset_version), f'parsing_config': repr(self.parsing_config), f'document_processing_config': repr(self.document_processing_config), f'attachment_parsing_config': repr(self.attachment_parsing_config)}
8385
class_name = "Dataset"
8486
repr_str = ',\n '.join([f'{key}={value}' for key, value in repr_dict.items(
8587
) if getattr(self, key, None) is not None and key not in self.deprecated_keys])
@@ -92,8 +94,8 @@ def to_dict(self):
9294
Returns:
9395
dict: The dict value representation of the class parameters
9496
"""
95-
resp = {'dataset_id': self.dataset_id, 'source_type': self.source_type, 'data_source': self.data_source, 'created_at': self.created_at, 'ignore_before': self.ignore_before, 'ephemeral': self.ephemeral, 'lookback_days': self.lookback_days, 'database_connector_id': self.database_connector_id, 'database_connector_config': self.database_connector_config, 'connector_type': self.connector_type, 'feature_group_table_name': self.feature_group_table_name, 'application_connector_id': self.application_connector_id, 'application_connector_config': self.application_connector_config, 'incremental': self.incremental, 'is_documentset': self.is_documentset,
96-
'extract_bounding_boxes': self.extract_bounding_boxes, 'merge_file_schemas': self.merge_file_schemas, 'reference_only_documentset': self.reference_only_documentset, 'version_limit': self.version_limit, 'schema': self._get_attribute_as_dict(self.schema), 'refresh_schedules': self._get_attribute_as_dict(self.refresh_schedules), 'latest_dataset_version': self._get_attribute_as_dict(self.latest_dataset_version), 'parsing_config': self._get_attribute_as_dict(self.parsing_config), 'document_processing_config': self._get_attribute_as_dict(self.document_processing_config), 'attachment_parsing_config': self._get_attribute_as_dict(self.attachment_parsing_config)}
97+
resp = {'dataset_id': self.dataset_id, 'source_type': self.source_type, 'data_source': self.data_source, 'created_at': self.created_at, 'ignore_before': self.ignore_before, 'ephemeral': self.ephemeral, 'lookback_days': self.lookback_days, 'database_connector_id': self.database_connector_id, 'database_connector_config': self.database_connector_config, 'connector_type': self.connector_type, 'feature_group_table_name': self.feature_group_table_name, 'application_connector_id': self.application_connector_id, 'application_connector_config': self.application_connector_config, 'incremental': self.incremental, 'is_documentset': self.is_documentset, 'extract_bounding_boxes': self.extract_bounding_boxes,
98+
'merge_file_schemas': self.merge_file_schemas, 'reference_only_documentset': self.reference_only_documentset, 'version_limit': self.version_limit, 'disable_realtime_content_processing': self.disable_realtime_content_processing, 'schema': self._get_attribute_as_dict(self.schema), 'refresh_schedules': self._get_attribute_as_dict(self.refresh_schedules), 'latest_dataset_version': self._get_attribute_as_dict(self.latest_dataset_version), 'parsing_config': self._get_attribute_as_dict(self.parsing_config), 'document_processing_config': self._get_attribute_as_dict(self.document_processing_config), 'attachment_parsing_config': self._get_attribute_as_dict(self.attachment_parsing_config)}
9799
return {key: value for key, value in resp.items() if value is not None and key not in self.deprecated_keys}
98100

99101
def get_raw_data_from_realtime(self, check_permissions: bool = False, start_time: str = None, end_time: str = None, column_filter: dict = None):

abacusai/video_gen_settings.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,21 @@ class VideoGenSettings(AbstractApiClass):
1010
client (ApiClient): An authenticated API Client instance
1111
settings (dict): The settings for each model.
1212
warnings (dict): The warnings for each model.
13+
descriptions (dict): The descriptions for each model.
1314
model (VideoGenModel): Dropdown for models available for video generation.
1415
"""
1516

16-
def __init__(self, client, settings=None, warnings=None, model={}):
17+
def __init__(self, client, settings=None, warnings=None, descriptions=None, model={}):
1718
super().__init__(client, None)
1819
self.settings = settings
1920
self.warnings = warnings
21+
self.descriptions = descriptions
2022
self.model = client._build_class(VideoGenModel, model)
2123
self.deprecated_keys = {}
2224

2325
def __repr__(self):
2426
repr_dict = {f'settings': repr(self.settings), f'warnings': repr(
25-
self.warnings), f'model': repr(self.model)}
27+
self.warnings), f'descriptions': repr(self.descriptions), f'model': repr(self.model)}
2628
class_name = "VideoGenSettings"
2729
repr_str = ',\n '.join([f'{key}={value}' for key, value in repr_dict.items(
2830
) if getattr(self, key, None) is not None and key not in self.deprecated_keys])
@@ -36,5 +38,5 @@ def to_dict(self):
3638
dict: The dict value representation of the class parameters
3739
"""
3840
resp = {'settings': self.settings, 'warnings': self.warnings,
39-
'model': self._get_attribute_as_dict(self.model)}
41+
'descriptions': self.descriptions, 'model': self._get_attribute_as_dict(self.model)}
4042
return {key: value for key, value in resp.items() if value is not None and key not in self.deprecated_keys}

docs/.buildinfo

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file records the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: df4b3fc78cc9b7bc30e9104def812ac3
3+
config: 83f5dc97472bfb08cd934f5b9735673a
44
tags: 645f666f9bcd5a90fca523b33c5a78b7

docs/.buildinfo.bak

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# Sphinx build info version 1
22
# This file records the configuration used when building these files. When it is not found, a full rebuild will be done.
3-
config: 1807596735468513042c35ba1a2f25ef
3+
config: df4b3fc78cc9b7bc30e9104def812ac3
44
tags: 645f666f9bcd5a90fca523b33c5a78b7

docs/_sources/autoapi/abacusai/api_class/ai_agents/index.rst.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -570,6 +570,9 @@ Module Contents
570570

571571

572572
Represents an LLM agent node in an Agent workflow graph. The LLM Agent Node can be initialized using either chatbot_deployment_id or creation_parameters.
573+
The presence of chatbot_deployment_id indicates that the LLM agent node has already been trained and deployed. During creation of an LLM Node using AI Engineer or deepagent.
574+
The chatbot_deployment_id is always passed as None pre-training.
575+
chatbot_paramters may contain training_config such as builtin tools, document_retrievers, behaviour instructions etc.
573576

574577
:param name: A unique name for the LLM agent node.
575578
:type name: str

docs/_sources/autoapi/abacusai/api_class/enums/index.rst.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1866,6 +1866,11 @@ Module Contents
18661866

18671867

18681868

1869+
.. py:attribute:: SHOPIFY
1870+
:value: 'SHOPIFY'
1871+
1872+
1873+
18691874
.. py:method:: user_connectors()
18701875
:classmethod:
18711876

0 commit comments

Comments
 (0)