Skip to content

Commit dbd3ed5

Browse files
author
Austin Zielman
committed
Version 0.30.10
1 parent 00166d0 commit dbd3ed5

13 files changed

+38
-27
lines changed

abacusai/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
from .client import ApiClient, ApiException, ClientOptions
22

3-
__version__ = "0.30.8"
3+
__version__ = "0.30.9"

abacusai/batch_prediction.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
from .batch_prediction_version import BatchPredictionVersion
2-
from .refresh_schedule import RefreshSchedule
31
from .prediction_input import PredictionInput
2+
from .refresh_schedule import RefreshSchedule
3+
from .batch_prediction_version import BatchPredictionVersion
44

55

66
class BatchPrediction():

abacusai/batch_prediction_version.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,14 @@ class BatchPredictionVersion():
66
Batch Prediction Version
77
'''
88

9-
def __init__(self, client, batchPredictionVersion=None, batchPredictionId=None, status=None, deploymentId=None, modelVersion=None, predictionsStartedAt=None, predictionsCompletedAt=None, globalPredictionArgs=None, totalPredictions=None, failedPredictions=None, databaseConnectorId=None, databaseOutputConfiguration=None, explanations=None, fileConnectorOutputLocation=None, fileOutputFormat=None, connectorType=None, legacyInputLocation=None, error=None, csvInputPrefix=None, csvPredictionPrefix=None, csvExplanationsPrefix=None, batchInputs={}):
9+
def __init__(self, client, batchPredictionVersion=None, batchPredictionId=None, status=None, deploymentId=None, modelId=None, modelVersion=None, predictionsStartedAt=None, predictionsCompletedAt=None, globalPredictionArgs=None, totalPredictions=None, failedPredictions=None, databaseConnectorId=None, databaseOutputConfiguration=None, explanations=None, fileConnectorOutputLocation=None, fileOutputFormat=None, connectorType=None, legacyInputLocation=None, error=None, csvInputPrefix=None, csvPredictionPrefix=None, csvExplanationsPrefix=None, batchInputs={}):
1010
self.client = client
1111
self.id = batchPredictionVersion
1212
self.batch_prediction_version = batchPredictionVersion
1313
self.batch_prediction_id = batchPredictionId
1414
self.status = status
1515
self.deployment_id = deploymentId
16+
self.model_id = modelId
1617
self.model_version = modelVersion
1718
self.predictions_started_at = predictionsStartedAt
1819
self.predictions_completed_at = predictionsCompletedAt
@@ -33,13 +34,13 @@ def __init__(self, client, batchPredictionVersion=None, batchPredictionId=None,
3334
self.batch_inputs = client._build_class(PredictionInput, batchInputs)
3435

3536
def __repr__(self):
36-
return f"BatchPredictionVersion(batch_prediction_version={repr(self.batch_prediction_version)}, batch_prediction_id={repr(self.batch_prediction_id)}, status={repr(self.status)}, deployment_id={repr(self.deployment_id)}, model_version={repr(self.model_version)}, predictions_started_at={repr(self.predictions_started_at)}, predictions_completed_at={repr(self.predictions_completed_at)}, global_prediction_args={repr(self.global_prediction_args)}, total_predictions={repr(self.total_predictions)}, failed_predictions={repr(self.failed_predictions)}, database_connector_id={repr(self.database_connector_id)}, database_output_configuration={repr(self.database_output_configuration)}, explanations={repr(self.explanations)}, file_connector_output_location={repr(self.file_connector_output_location)}, file_output_format={repr(self.file_output_format)}, connector_type={repr(self.connector_type)}, legacy_input_location={repr(self.legacy_input_location)}, error={repr(self.error)}, csv_input_prefix={repr(self.csv_input_prefix)}, csv_prediction_prefix={repr(self.csv_prediction_prefix)}, csv_explanations_prefix={repr(self.csv_explanations_prefix)}, batch_inputs={repr(self.batch_inputs)})"
37+
return f"BatchPredictionVersion(batch_prediction_version={repr(self.batch_prediction_version)}, batch_prediction_id={repr(self.batch_prediction_id)}, status={repr(self.status)}, deployment_id={repr(self.deployment_id)}, model_id={repr(self.model_id)}, model_version={repr(self.model_version)}, predictions_started_at={repr(self.predictions_started_at)}, predictions_completed_at={repr(self.predictions_completed_at)}, global_prediction_args={repr(self.global_prediction_args)}, total_predictions={repr(self.total_predictions)}, failed_predictions={repr(self.failed_predictions)}, database_connector_id={repr(self.database_connector_id)}, database_output_configuration={repr(self.database_output_configuration)}, explanations={repr(self.explanations)}, file_connector_output_location={repr(self.file_connector_output_location)}, file_output_format={repr(self.file_output_format)}, connector_type={repr(self.connector_type)}, legacy_input_location={repr(self.legacy_input_location)}, error={repr(self.error)}, csv_input_prefix={repr(self.csv_input_prefix)}, csv_prediction_prefix={repr(self.csv_prediction_prefix)}, csv_explanations_prefix={repr(self.csv_explanations_prefix)}, batch_inputs={repr(self.batch_inputs)})"
3738

3839
def __eq__(self, other):
3940
return self.__class__ == other.__class__ and self.id == other.id
4041

4142
def to_dict(self):
42-
return {'batch_prediction_version': self.batch_prediction_version, 'batch_prediction_id': self.batch_prediction_id, 'status': self.status, 'deployment_id': self.deployment_id, 'model_version': self.model_version, 'predictions_started_at': self.predictions_started_at, 'predictions_completed_at': self.predictions_completed_at, 'global_prediction_args': self.global_prediction_args, 'total_predictions': self.total_predictions, 'failed_predictions': self.failed_predictions, 'database_connector_id': self.database_connector_id, 'database_output_configuration': self.database_output_configuration, 'explanations': self.explanations, 'file_connector_output_location': self.file_connector_output_location, 'file_output_format': self.file_output_format, 'connector_type': self.connector_type, 'legacy_input_location': self.legacy_input_location, 'error': self.error, 'csv_input_prefix': self.csv_input_prefix, 'csv_prediction_prefix': self.csv_prediction_prefix, 'csv_explanations_prefix': self.csv_explanations_prefix, 'batch_inputs': self.batch_inputs.to_dict() if self.batch_inputs else None}
43+
return {'batch_prediction_version': self.batch_prediction_version, 'batch_prediction_id': self.batch_prediction_id, 'status': self.status, 'deployment_id': self.deployment_id, 'model_id': self.model_id, 'model_version': self.model_version, 'predictions_started_at': self.predictions_started_at, 'predictions_completed_at': self.predictions_completed_at, 'global_prediction_args': self.global_prediction_args, 'total_predictions': self.total_predictions, 'failed_predictions': self.failed_predictions, 'database_connector_id': self.database_connector_id, 'database_output_configuration': self.database_output_configuration, 'explanations': self.explanations, 'file_connector_output_location': self.file_connector_output_location, 'file_output_format': self.file_output_format, 'connector_type': self.connector_type, 'legacy_input_location': self.legacy_input_location, 'error': self.error, 'csv_input_prefix': self.csv_input_prefix, 'csv_prediction_prefix': self.csv_prediction_prefix, 'csv_explanations_prefix': self.csv_explanations_prefix, 'batch_inputs': self.batch_inputs.to_dict() if self.batch_inputs else None}
4344

4445
def get_batch_prediction_result(self):
4546
return self.client.get_batch_prediction_result(self.batch_prediction_version)

abacusai/client.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def __str__(self):
7373

7474

7575
class ApiClient():
76-
client_version = '0.30.8'
76+
client_version = '0.30.9'
7777

7878
def __init__(self, api_key: str = None, server: str = None, client_options: ClientOptions = None):
7979
self.api_key = api_key
@@ -478,7 +478,7 @@ def export_feature_group_version_to_file_connector(self, feature_group_version:
478478
'''Export Feature group to File Connector.'''
479479
return self._call_api('exportFeatureGroupVersionToFileConnector', 'POST', query_params={}, body={'featureGroupVersion': feature_group_version, 'location': location, 'exportFileFormat': export_file_format}, parse_type=FeatureGroupExport)
480480

481-
def export_feature_group_version_to_database_connector(self, feature_group_version: str, database_connector_id: str, object_name: str, write_mode: str, database_feature_mapping: dict, id_column: str) -> FeatureGroupExport:
481+
def export_feature_group_version_to_database_connector(self, feature_group_version: str, database_connector_id: str, object_name: str, write_mode: str, database_feature_mapping: dict, id_column: str = None) -> FeatureGroupExport:
482482
'''Export Feature group to Database Connector.'''
483483
return self._call_api('exportFeatureGroupVersionToDatabaseConnector', 'POST', query_params={}, body={'featureGroupVersion': feature_group_version, 'databaseConnectorId': database_connector_id, 'objectName': object_name, 'writeMode': write_mode, 'databaseFeatureMapping': database_feature_mapping, 'idColumn': id_column}, parse_type=FeatureGroupExport)
484484

@@ -546,9 +546,9 @@ def mark_upload_complete(self, upload_id: str) -> Upload:
546546
'''Marks an upload process as complete.'''
547547
return self._call_api('markUploadComplete', 'POST', query_params={}, body={'uploadId': upload_id}, parse_type=Upload)
548548

549-
def create_dataset_from_file_connector(self, name: str, table_name: str, location: str, file_format: str = None, refresh_schedule: str = None, csv_delimiter: str = None, filename_column: str = None) -> Dataset:
549+
def create_dataset_from_file_connector(self, name: str, table_name: str, location: str, file_format: str = None, refresh_schedule: str = None, csv_delimiter: str = None, filename_column: str = None, start_prefix: str = None, until_prefix: str = None) -> Dataset:
550550
'''Creates a dataset from a file located in a cloud storage, such as Amazon AWS S3, using the specified dataset name and location.'''
551-
return self._call_api('createDatasetFromFileConnector', 'POST', query_params={}, body={'name': name, 'tableName': table_name, 'location': location, 'fileFormat': file_format, 'refreshSchedule': refresh_schedule, 'csvDelimiter': csv_delimiter, 'filenameColumn': filename_column}, parse_type=Dataset)
551+
return self._call_api('createDatasetFromFileConnector', 'POST', query_params={}, body={'name': name, 'tableName': table_name, 'location': location, 'fileFormat': file_format, 'refreshSchedule': refresh_schedule, 'csvDelimiter': csv_delimiter, 'filenameColumn': filename_column, 'startPrefix': start_prefix, 'untilPrefix': until_prefix}, parse_type=Dataset)
552552

553553
def create_dataset_version_from_file_connector(self, dataset_id: str, location: str = None, file_format: str = None, csv_delimiter: str = None) -> DatasetVersion:
554554
'''Creates a new version of the specified dataset.'''
@@ -598,6 +598,10 @@ def set_streaming_retention_policy(self, dataset_id: str, retention_hours: int =
598598
'''Sets the streaming retention policy'''
599599
return self._call_api('setStreamingRetentionPolicy', 'GET', query_params={'datasetId': dataset_id, 'retentionHours': retention_hours, 'retentionRowCount': retention_row_count})
600600

601+
def set_dataset_column_native_data_type(self, dataset_id: str, column: str, native_data_type: str) -> Schema:
602+
'''Creates a new schema and points the feature group to the new feature group schema id.'''
603+
return self._call_api('setDatasetColumnNativeDataType', 'POST', query_params={'datasetId': dataset_id}, body={'column': column, 'nativeDataType': native_data_type}, parse_type=Schema)
604+
601605
def get_file_connector_instructions(self, bucket: str, write_permission: bool = False) -> FileConnectorInstructions:
602606
'''Retrieves verification information to create a data connector to a cloud storage bucket.'''
603607
return self._call_api('getFileConnectorInstructions', 'GET', query_params={'bucket': bucket, 'writePermission': write_permission}, parse_type=FileConnectorInstructions)
@@ -787,12 +791,12 @@ def describe_model_version(self, model_version: str) -> ModelVersion:
787791
'''Retrieves a full description of the specified model version'''
788792
return self._call_api('describeModelVersion', 'GET', query_params={'modelVersion': model_version}, parse_type=ModelVersion)
789793

790-
def create_deployment(self, name: str = None, model_id: str = None, feature_group_id: str = None, description: str = None, calls_per_second: int = None, auto_deploy: bool = True) -> Deployment:
794+
def create_deployment(self, name: str = None, model_id: str = None, feature_group_id: str = None, project_id: str = None, description: str = None, calls_per_second: int = None, auto_deploy: bool = True) -> Deployment:
791795
'''Creates a deployment with the specified name and description for the specified model or feature group.
792796
793797
A Deployment makes the trained model or feature group available for prediction requests.
794798
'''
795-
return self._call_api('createDeployment', 'POST', query_params={}, body={'name': name, 'modelId': model_id, 'featureGroupId': feature_group_id, 'description': description, 'callsPerSecond': calls_per_second, 'autoDeploy': auto_deploy}, parse_type=Deployment)
799+
return self._call_api('createDeployment', 'POST', query_params={}, body={'name': name, 'modelId': model_id, 'featureGroupId': feature_group_id, 'projectId': project_id, 'description': description, 'callsPerSecond': calls_per_second, 'autoDeploy': auto_deploy}, parse_type=Deployment)
796800

797801
def create_deployment_token(self, project_id: str) -> DeploymentAuthToken:
798802
'''Creates a deployment token for the specified project.

abacusai/dataset.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
from .dataset_version import DatasetVersion
2-
from .dataset_column import DatasetColumn
31
from .refresh_schedule import RefreshSchedule
2+
from .dataset_column import DatasetColumn
3+
from .dataset_version import DatasetVersion
44

55

66
class Dataset():
@@ -64,6 +64,9 @@ def set_column_data_type(self, column, data_type):
6464
def set_streaming_retention_policy(self, retention_hours=None, retention_row_count=None):
6565
return self.client.set_streaming_retention_policy(self.dataset_id, retention_hours, retention_row_count)
6666

67+
def set_column_native_data_type(self, column, native_data_type):
68+
return self.client.set_dataset_column_native_data_type(self.dataset_id, column, native_data_type)
69+
6770
def refresh(self):
6871
self.__dict__.update(self.describe().__dict__)
6972
return self

abacusai/feature_column.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
from .point_in_time_feature import PointInTimeFeature
21
from .nested_column import NestedColumn
2+
from .point_in_time_feature import PointInTimeFeature
33

44

55
class FeatureColumn():

abacusai/feature_group.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from .feature_group_version import FeatureGroupVersion
22
from .feature_column import FeatureColumn
3+
from .modification_lock_info import ModificationLockInfo
34

45

56
class FeatureGroup():

abacusai/feature_group_version.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
from concurrent.futures import ThreadPoolExecutor
2-
import time
31
import io
42
from .feature_column import FeatureColumn
3+
import time
4+
from concurrent.futures import ThreadPoolExecutor
55

66

77
class FeatureGroupVersion():
@@ -32,7 +32,7 @@ def to_dict(self):
3232
def export_to_file_connector(self, location, export_file_format):
3333
return self.client.export_feature_group_version_to_file_connector(self.feature_group_version, location, export_file_format)
3434

35-
def export_to_database_connector(self, database_connector_id, object_name, write_mode, database_feature_mapping, id_column):
35+
def export_to_database_connector(self, database_connector_id, object_name, write_mode, database_feature_mapping, id_column=None):
3636
return self.client.export_feature_group_version_to_database_connector(self.feature_group_version, database_connector_id, object_name, write_mode, database_feature_mapping, id_column)
3737

3838
def refresh(self):

abacusai/model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
from .model_location import ModelLocation
21
from .refresh_schedule import RefreshSchedule
3-
import time
42
from .model_version import ModelVersion
3+
from .model_location import ModelLocation
4+
import time
55

66

77
class Model():

abacusai/nested_column.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,23 @@ class NestedColumn():
55
66
'''
77

8-
def __init__(self, client, name=None, selectClause=None, dataType=None, dataUse=None, sourceTable=None, originalName=None):
8+
def __init__(self, client, name=None, selectClause=None, columnDataType=None, columnMapping=None, dataType=None, dataUse=None, sourceTable=None, originalName=None):
99
self.client = client
1010
self.id = None
1111
self.name = name
1212
self.select_clause = selectClause
13+
self.column_data_type = columnDataType
14+
self.column_mapping = columnMapping
1315
self.data_type = dataType
1416
self.data_use = dataUse
1517
self.source_table = sourceTable
1618
self.original_name = originalName
1719

1820
def __repr__(self):
19-
return f"NestedColumn(name={repr(self.name)}, select_clause={repr(self.select_clause)}, data_type={repr(self.data_type)}, data_use={repr(self.data_use)}, source_table={repr(self.source_table)}, original_name={repr(self.original_name)})"
21+
return f"NestedColumn(name={repr(self.name)}, select_clause={repr(self.select_clause)}, column_data_type={repr(self.column_data_type)}, column_mapping={repr(self.column_mapping)}, data_type={repr(self.data_type)}, data_use={repr(self.data_use)}, source_table={repr(self.source_table)}, original_name={repr(self.original_name)})"
2022

2123
def __eq__(self, other):
2224
return self.__class__ == other.__class__ and self.id == other.id
2325

2426
def to_dict(self):
25-
return {'name': self.name, 'select_clause': self.select_clause, 'data_type': self.data_type, 'data_use': self.data_use, 'source_table': self.source_table, 'original_name': self.original_name}
27+
return {'name': self.name, 'select_clause': self.select_clause, 'column_data_type': self.column_data_type, 'column_mapping': self.column_mapping, 'data_type': self.data_type, 'data_use': self.data_use, 'source_table': self.source_table, 'original_name': self.original_name}

0 commit comments

Comments
 (0)