| Package | Description |
|---|---|
| com.logicalclocks.hsfs | |
| com.logicalclocks.hsfs.constructor | |
| com.logicalclocks.hsfs.engine | |
| com.logicalclocks.hsfs.engine.hudi | |
| com.logicalclocks.hsfs.metadata |
| Modifier and Type | Method and Description |
|---|---|
void |
TrainingDataset.addTag(String name,
Object value)
Add name/value tag to the training dataset.
|
void |
FeatureView.addTag(String name,
Object value)
Add name/value tag to the feature view.
|
void |
FeatureView.addTrainingDatasetTag(Integer version,
String name,
Object value)
Add name/value tag to the training dataset.
|
Query |
FeatureGroup.asOf(String wallclockTime)
Get Query object to retrieve all features of the group at a point in the past.
|
Query |
StreamFeatureGroup.asOf(String wallclockTime)
Get Query object to retrieve all features of the group at a point in the past.
|
Query |
FeatureGroup.asOf(String wallclockTime,
String excludeUntil)
Get Query object to retrieve all features of the group at a point in the past.
|
Query |
StreamFeatureGroup.asOf(String wallclockTime,
String excludeUntil)
Get Query object to retrieve all features of the group at a point in the past.
|
TrainingDataset |
TrainingDataset.TrainingDatasetBuilder.build() |
Feature |
Feature.FeatureBuilder.build() |
HopsworksConnection |
HopsworksConnection.HopsworksConnectionBuilder.build() |
FeatureView |
FeatureView.FeatureViewBuilder.build() |
static void |
FeatureView.clean(FeatureStore featureStore,
String featureViewName,
Integer featureViewVersion) |
void |
FeatureGroup.commitDeleteRecord(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData) |
void |
FeatureGroup.commitDeleteRecord(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Map<String,String> writeOptions) |
<S> void |
StreamFeatureGroup.commitDeleteRecord(S featureData) |
<S> void |
StreamFeatureGroup.commitDeleteRecord(S featureData,
Map<String,String> writeOptions) |
Map<Long,Map<String,String>> |
FeatureGroup.commitDetails()
Return commit details.
|
Map<Long,Map<String,String>> |
StreamFeatureGroup.commitDetails()
Return commit details.
|
Map<Long,Map<String,String>> |
FeatureGroup.commitDetails(Integer limit)
Return commit details.
|
Map<Long,Map<String,String>> |
StreamFeatureGroup.commitDetails(Integer limit)
Return commit details.
|
Map<Long,Map<String,String>> |
FeatureGroup.commitDetails(String wallclockTime)
Return commit details.
|
Map<Long,Map<String,String>> |
StreamFeatureGroup.commitDetails(String wallclockTime)
Return commit details.
|
Map<Long,Map<String,String>> |
FeatureGroup.commitDetails(String wallclockTime,
Integer limit)
Return commit details.
|
Map<Long,Map<String,String>> |
StreamFeatureGroup.commitDetails(String wallclockTime,
Integer limit)
Return commit details.
|
Statistics |
TrainingDataset.computeStatistics()
Recompute the statistics for the entire training dataset and save them to the feature store.
|
Statistics |
FeatureGroup.computeStatistics(String wallclockTime)
Recompute the statistics for the feature group and save them to the feature store.
|
Integer |
FeatureView.createTrainingData(String startTime,
String endTime,
String description,
DataFormat dataFormat) |
Integer |
FeatureView.createTrainingData(String startTime,
String endTime,
String description,
DataFormat dataFormat,
Boolean coalesce,
StorageConnector storageConnector,
String location,
Long seed,
StatisticsConfig statisticsConfig,
Map<String,String> writeOptions,
FilterLogic extraFilterLogic,
Filter extraFilter) |
Integer |
FeatureView.createTrainTestSplit(Float testSize,
String trainStart,
String trainEnd,
String testStart,
String testEnd,
String description,
DataFormat dataFormat) |
Integer |
FeatureView.createTrainTestSplit(Float testSize,
String trainStart,
String trainEnd,
String testStart,
String testEnd,
String description,
DataFormat dataFormat,
Boolean coalesce,
StorageConnector storageConnector,
String location,
Long seed,
StatisticsConfig statisticsConfig,
Map<String,String> writeOptions,
FilterLogic extraFilterLogic,
Filter extraFilter) |
Integer |
FeatureView.createTrainValidationTestSplit(Float validationSize,
Float testSize,
String trainStart,
String trainEnd,
String validationStart,
String validationEnd,
String testStart,
String testEnd,
String description,
DataFormat dataFormat) |
Integer |
FeatureView.createTrainValidationTestSplit(Float validationSize,
Float testSize,
String trainStart,
String trainEnd,
String validationStart,
String validationEnd,
String testStart,
String testEnd,
String description,
DataFormat dataFormat,
Boolean coalesce,
StorageConnector storageConnector,
String location,
Long seed,
StatisticsConfig statisticsConfig,
Map<String,String> writeOptions,
FilterLogic extraFilterLogic,
Filter extraFilter) |
void |
TrainingDataset.delete()
Delete training dataset and all associated metadata.
|
void |
FeatureView.delete() |
void |
FeatureView.deleteAllTrainingDatasets() |
void |
TrainingDataset.deleteTag(String name)
Delete a tag of the training dataset.
|
void |
FeatureView.deleteTag(String name)
Delete a tag of the feature view.
|
void |
FeatureView.deleteTrainingDataset(Integer version) |
void |
FeatureView.deleteTrainingDatasetTag(Integer version,
String name)
Delete a tag of the training dataset.
|
StorageConnector.AdlsConnector |
FeatureStore.getAdlsConnector(String name) |
String |
FeatureGroup.getAvroSchema() |
String |
StreamFeatureGroup.getAvroSchema() |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureView.getBatchData(String startTime,
String endTime) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureView.getBatchData(String startTime,
String endTime,
Map<String,String> readOptions) |
String |
FeatureView.getBatchQuery() |
String |
FeatureView.getBatchQuery(String startTime,
String endTime) |
StorageConnector.BigqueryConnector |
FeatureStore.getBigqueryConnector(String name) |
org.apache.avro.Schema |
FeatureGroup.getDeserializedAvroSchema() |
org.apache.avro.Schema |
StreamFeatureGroup.getDeserializedAvroSchema() |
String |
FeatureGroup.getEncodedAvroSchema() |
String |
StreamFeatureGroup.getEncodedAvroSchema() |
ExternalFeatureGroup |
FeatureStore.getExternalFeatureGroup(String name)
Get a external feature group object with default version `1` from the feature store.
|
ExternalFeatureGroup |
FeatureStore.getExternalFeatureGroup(@NonNull String name,
@NonNull Integer version)
Get a external feature group object from the feature store.
|
scala.collection.Seq<ExternalFeatureGroup> |
FeatureStore.getExternalFeatureGroups(@NonNull String name)
Get a list of all versions of an external feature group from the feature store.
|
String |
FeatureGroup.getFeatureAvroSchema(String featureName) |
String |
StreamFeatureGroup.getFeatureAvroSchema(String featureName) |
FeatureGroup |
FeatureStore.getFeatureGroup(String name)
Get a feature group object with default version `1` from the feature store.
|
FeatureGroup |
FeatureStore.getFeatureGroup(@NonNull String name,
@NonNull Integer version)
Get a feature group object from the feature store.
|
scala.collection.Seq<FeatureGroup> |
FeatureStore.getFeatureGroups(@NonNull String name)
Get a list of all versions of a feature group from the feature store.
|
FeatureStore |
HopsworksConnection.getFeatureStore()
Retrieve the project feature store.
|
FeatureStore |
HopsworksConnection.getFeatureStore(String name)
Retrieve a feature store based on name.
|
List<Object> |
FeatureView.getFeatureVector(Map<String,Object> entry) |
List<Object> |
FeatureView.getFeatureVector(Map<String,Object> entry,
boolean external) |
List<List<Object>> |
FeatureView.getFeatureVectors(Map<String,List<Object>> entry) |
List<List<Object>> |
FeatureView.getFeatureVectors(Map<String,List<Object>> entry,
boolean external) |
FeatureView |
FeatureStore.getFeatureView(String name)
Get a feature view object with the default version `1` from the selected feature store.
|
FeatureView |
FeatureStore.getFeatureView(@NonNull String name,
@NonNull Integer version)
Get a feature view object from the selected feature store.
|
StorageConnector.GcsConnector |
FeatureStore.getGcsConnector(String name) |
StorageConnector.HopsFsConnector |
FeatureStore.getHopsFsConnector(String name) |
StorageConnector.JdbcConnector |
FeatureStore.getJdbcConnector(String name) |
StorageConnector.KafkaConnector |
FeatureStore.getKafkaConnector(String name) |
ExternalFeatureGroup |
FeatureStore.getOnDemandFeatureGroup(String name)
Deprecated.
|
ExternalFeatureGroup |
FeatureStore.getOnDemandFeatureGroup(@NonNull String name,
@NonNull Integer version)
Deprecated.
|
scala.collection.Seq<ExternalFeatureGroup> |
FeatureStore.getOnDemandFeatureGroups(@NonNull String name)
Deprecated.
|
StorageConnector.JdbcConnector |
FeatureStore.getOnlineStorageConnector() |
FeatureGroup |
FeatureStore.getOrCreateFeatureGroup(String name,
Integer version) |
FeatureGroup |
FeatureStore.getOrCreateFeatureGroup(String name,
Integer version,
List<String> primaryKeys,
boolean onlineEnabled,
String eventTime) |
FeatureGroup |
FeatureStore.getOrCreateFeatureGroup(String name,
Integer version,
List<String> primaryKeys,
List<String> partitionKeys,
boolean onlineEnabled,
String eventTime) |
FeatureGroup |
FeatureStore.getOrCreateFeatureGroup(String name,
Integer version,
String description,
List<String> primaryKeys,
List<String> partitionKeys,
String hudiPrecombineKey,
boolean onlineEnabled,
TimeTravelFormat timeTravelFormat,
StatisticsConfig statisticsConfig,
String eventTime) |
FeatureView |
FeatureStore.getOrCreateFeatureView(String name,
Query query,
Integer version)
Get feature view metadata object or create a new one if it doesn't exist.
|
FeatureView |
FeatureStore.getOrCreateFeatureView(String name,
Query query,
Integer version,
String description,
List<String> labels)
Get feature view metadata object or create a new one if it doesn't exist.
|
StreamFeatureGroup |
FeatureStore.getOrCreateStreamFeatureGroup(String name,
Integer version) |
StreamFeatureGroup |
FeatureStore.getOrCreateStreamFeatureGroup(String name,
Integer version,
List<String> primaryKeys,
boolean onlineEnabled,
String eventTime) |
StreamFeatureGroup |
FeatureStore.getOrCreateStreamFeatureGroup(String name,
Integer version,
List<String> primaryKeys,
List<String> partitionKeys,
boolean onlineEnabled,
String eventTime) |
StreamFeatureGroup |
FeatureStore.getOrCreateStreamFeatureGroup(String name,
Integer version,
String description,
List<String> primaryKeys,
List<String> partitionKeys,
String hudiPrecombineKey,
boolean onlineEnabled,
StatisticsConfig statisticsConfig,
String eventTime) |
abstract String |
StorageConnector.getPath(String subPath) |
HashSet<String> |
FeatureView.getPrimaryKeys()
Set of primary key names that is used as keys in input dict object for `get_serving_vector` method.
|
String |
TrainingDataset.getQuery() |
String |
TrainingDataset.getQuery(boolean withLabel) |
String |
TrainingDataset.getQuery(Storage storage) |
String |
TrainingDataset.getQuery(Storage storage,
boolean withLabel) |
StorageConnector.RedshiftConnector |
FeatureStore.getRedshiftConnector(String name) |
StorageConnector.S3Connector |
FeatureStore.getS3Connector(String name) |
HashSet<String> |
TrainingDataset.getServingKeys()
Set of primary key names that is used as keys in input dict object for `get_serving_vector` method.
|
List<Object> |
TrainingDataset.getServingVector(Map<String,Object> entry)
Retrieve feature vector from online feature store.
|
List<Object> |
TrainingDataset.getServingVector(Map<String,Object> entry,
boolean external)
Retrieve feature vector from online feature store.
|
List<List<Object>> |
TrainingDataset.getServingVectors(Map<String,List<Object>> entry) |
List<List<Object>> |
TrainingDataset.getServingVectors(Map<String,List<Object>> entry,
boolean external) |
StorageConnector.SnowflakeConnector |
FeatureStore.getSnowflakeConnector(String name) |
Statistics |
TrainingDataset.getStatistics()
Get the last statistics commit for the training dataset.
|
Statistics |
TrainingDataset.getStatistics(String commitTime)
Get the statistics of a specific commit time for the training dataset.
|
StorageConnector |
FeatureStore.getStorageConnector(String name) |
StreamFeatureGroup |
FeatureStore.getStreamFeatureGroup(String name)
Get a feature group object with default version `1` from the feature store.
|
StreamFeatureGroup |
FeatureStore.getStreamFeatureGroup(@NonNull String name,
@NonNull Integer version)
Get a feature group object from the feature store.
|
Object |
TrainingDataset.getTag(String name)
Get a single tag value of the training dataset.
|
Object |
FeatureView.getTag(String name)
Get a single tag value of the feature view.
|
Map<String,Object> |
TrainingDataset.getTags()
Get all tags of the training dataset.
|
Map<String,Object> |
FeatureView.getTags()
Get all tags of the feature view.
|
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.getTrainingData(Integer version) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.getTrainingData(Integer version,
Map<String,String> readOptions) |
TrainingDataset |
FeatureStore.getTrainingDataset(String name)
Get a training dataset object with the default version `1` from the selected feature store.
|
TrainingDataset |
FeatureStore.getTrainingDataset(@NonNull String name,
@NonNull Integer version)
Get a training dataset object from the selected feature store.
|
scala.collection.Seq<TrainingDataset> |
FeatureStore.getTrainingDatasets(@NonNull String name)
Get all versions of a training dataset object from the selected feature store.
|
Object |
FeatureView.getTrainingDatasetTag(Integer version,
String name)
Get a single tag value of the training dataset.
|
Map<String,Object> |
FeatureView.getTrainingDatasetTags(Integer version)
Get all tags of the training dataset.
|
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.getTrainTestSplit(Integer version) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.getTrainTestSplit(Integer version,
Map<String,String> readOptions) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.getTrainValidationTestSplit(Integer version) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.getTrainValidationTestSplit(Integer version,
Map<String,String> readOptions) |
void |
TrainingDataset.initPreparedStatement()
Initialise and cache parametrised prepared statement to retrieve feature vector from online feature store.
|
void |
TrainingDataset.initPreparedStatement(boolean external)
Initialise and cache parametrised prepared statement to retrieve feature vector from online feature store.
|
void |
TrainingDataset.initPreparedStatement(boolean external,
boolean batch)
Initialise and cache parametrised prepared statement to retrieve batch feature vectors from online feature store.
|
void |
FeatureView.initServing() |
void |
FeatureView.initServing(Boolean batch,
Boolean external) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
boolean overwrite) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
boolean overwrite,
Map<String,String> writeOptions) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
HudiOperationType operation)
Commit insert or upsert to time travel enabled Feature group.
|
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Map<String,String> writeOptions) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Storage storage) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Storage storage,
boolean overwrite) |
void |
FeatureGroup.insert(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Storage storage,
boolean overwrite,
HudiOperationType operation,
Map<String,String> writeOptions) |
<S> void |
StreamFeatureGroup.insert(S featureData) |
<S> void |
StreamFeatureGroup.insert(S featureData,
boolean overwrite,
SaveMode saveMode,
JobConfiguration jobConfiguration) |
<S> void |
StreamFeatureGroup.insert(S featureData,
boolean overwrite,
SaveMode saveMode,
Map<String,String> writeOptions) |
<S> void |
StreamFeatureGroup.insert(S featureData,
boolean overwrite,
SaveMode saveMode,
Map<String,String> writeOptions,
JobConfiguration jobConfiguration) |
<S> void |
StreamFeatureGroup.insert(S featureData,
JobConfiguration jobConfiguration) |
<S> void |
StreamFeatureGroup.insert(S featureData,
Map<String,String> writeOptions) |
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroup.insertStream(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData)
Deprecated.
insertStream method is deprecated FeatureGroups. Full capability insertStream is available for StreamFeatureGroups.
|
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroup.insertStream(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
String queryName)
Deprecated.
insertStream method is deprecated FeatureGroups. Full capability insertStream is available for StreamFeatureGroups.
|
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroup.insertStream(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
String queryName,
String outputMode)
Deprecated.
insertStream method is deprecated FeatureGroups. Full capability insertStream is available for StreamFeatureGroups.
|
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroup.insertStream(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout)
Deprecated.
insertStream method is deprecated FeatureGroups. Full capability insertStream is available for StreamFeatureGroups.
|
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroup.insertStream(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout,
String checkpointLocation,
Map<String,String> writeOptions)
Deprecated.
insertStream method is deprecated FeatureGroups. Full capability insertStream is available for StreamFeatureGroups.
|
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroup.insertStream(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
String queryName,
String outputMode,
boolean awaitTermination,
String checkpointLocation)
Deprecated.
insertStream method is deprecated FeatureGroups. Full capability insertStream is available for StreamFeatureGroups.
|
<S> Object |
StreamFeatureGroup.insertStream(S featureData) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
Map<String,String> writeOptions) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName,
Map<String,String> writeOptions) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName,
String outputMode) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout,
String checkpointLocation) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout,
String checkpointLocation,
Map<String,String> writeOptions) |
<S> Object |
StreamFeatureGroup.insertStream(S featureData,
String queryName,
String outputMode,
String checkpointLocation) |
void |
StorageConnector.GcsConnector.prepareSpark() |
void |
FeatureView.purgeAllTrainingData() |
void |
FeatureView.purgeTrainingData(Integer version) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
ExternalFeatureGroup.read() |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.read() |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
TrainingDataset.read()
Read the content of the training dataset.
|
Object |
StreamFeatureGroup.read()
Reads Feature group data.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.read(boolean online) |
Object |
StreamFeatureGroup.read(boolean online) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.read(boolean online,
Map<String,String> readOptions) |
Object |
StreamFeatureGroup.read(boolean online,
Map<String,String> readOptions) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.read(Map<String,String> readOptions) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
TrainingDataset.read(Map<String,String> readOptions)
Read the content of the training dataset.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.read(String wallclockTime)
Reads Feature group data at a specific point in time.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
TrainingDataset.read(String split)
Read all a single split from the training dataset.
|
Object |
StreamFeatureGroup.read(String wallclockTime)
Reads Feature group data at a specific point in time.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.read(String wallclockTime,
Map<String,String> readOptions)
Reads Feature group data at a specific point in time.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
TrainingDataset.read(String split,
Map<String,String> readOptions)
Read a single split from the training dataset.
|
Object |
StreamFeatureGroup.read(String wallclockTime,
Map<String,String> readOptions) |
Object |
StorageConnector.read(String query,
String dataFormat,
Map<String,String> options,
String path) |
Object |
StorageConnector.S3Connector.read(String query,
String dataFormat,
Map<String,String> options,
String path) |
Object |
StorageConnector.RedshiftConnector.read(String query,
String dataFormat,
Map<String,String> options,
String path) |
Object |
StorageConnector.SnowflakeConnector.read(String query,
String dataFormat,
Map<String,String> options,
String path) |
Object |
StorageConnector.JdbcConnector.read(String query,
String dataFormat,
Map<String,String> options,
String path) |
Object |
StorageConnector.BigqueryConnector.read(String query,
String dataFormat,
Map<String,String> options,
String path)
If Table options are set in the storage connector, set path to table.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.readChanges(String wallclockStartTime,
String wallclockEndTime)
Deprecated.
|
Object |
StreamFeatureGroup.readChanges(String wallclockStartTime,
String wallclockEndTime)
Deprecated.
`readChanges` method is deprecated. Use `asOf(wallclockEndTime, wallclockStartTime).read()` instead.
|
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureGroup.readChanges(String wallclockStartTime,
String wallclockEndTime,
Map<String,String> readOptions)
Deprecated.
|
Object |
StreamFeatureGroup.readChanges(String wallclockStartTime,
String wallclockEndTime,
Map<String,String> readOptions)
Deprecated.
|
Object |
StorageConnector.KafkaConnector.readStream(String topic,
boolean topicPattern,
String messageFormat,
String schema,
Map<String,String> options,
boolean includeMetadata) |
void |
FeatureView.recreateTrainingDataset(Integer version,
Map<String,String> writeOptions) |
StorageConnector |
StorageConnector.refetch() |
void |
ExternalFeatureGroup.save() |
void |
FeatureGroup.save(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData)
Deprecated.
|
void |
FeatureGroup.save(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Map<String,String> writeOptions)
Deprecated.
|
void |
TrainingDataset.save(Query query)
Create the training dataset based on the content of the feature store query.
|
void |
TrainingDataset.save(Query query,
Map<String,String> writeOptions)
Create the training dataset based on the content of the feature store query.
|
<S> void |
StreamFeatureGroup.save(S featureData,
Map<String,String> writeOptions)
Deprecated.
|
<S> void |
StreamFeatureGroup.save(S featureData,
Map<String,String> writeOptions,
JobConfiguration jobConfiguration)
Deprecated.
|
void |
ExternalFeatureGroup.show(int numRows) |
void |
FeatureGroup.show(int numRows) |
void |
TrainingDataset.show(int numRows)
Show numRows from the training dataset (across all splits).
|
void |
FeatureGroup.show(int numRows,
boolean online) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.trainingData(String startTime,
String endTime,
String description) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.trainingData(String startTime,
String endTime,
String description,
Long seed,
StatisticsConfig statisticsConfig,
Map<String,String> readOptions,
FilterLogic extraFilterLogic,
Filter extraFilter) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.trainTestSplit(Float testSize,
String trainStart,
String trainEnd,
String testStart,
String testEnd,
String description) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.trainTestSplit(Float testSize,
String trainStart,
String trainEnd,
String testStart,
String testEnd,
String description,
Long seed,
StatisticsConfig statisticsConfig,
Map<String,String> readOptions,
FilterLogic extraFilterLogic,
Filter extraFilter) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.trainValidationTestSplit(Float validationSize,
Float testSize,
String trainStart,
String trainEnd,
String validationStart,
String validationEnd,
String testStart,
String testEnd,
String description) |
List<org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> |
FeatureView.trainValidationTestSplit(Float validationSize,
Float testSize,
String trainStart,
String trainEnd,
String validationStart,
String validationEnd,
String testStart,
String testEnd,
String description,
Long seed,
StatisticsConfig statisticsConfig,
Map<String,String> readOptions,
FilterLogic extraFilterLogic,
Filter extraFilter) |
void |
StorageConnector.S3Connector.update() |
void |
StorageConnector.RedshiftConnector.update() |
void |
StorageConnector.JdbcConnector.update() |
FeatureView |
FeatureView.update(FeatureView other) |
void |
TrainingDataset.updateStatisticsConfig()
Update the statistics configuration of the training dataset.
|
| Constructor and Description |
|---|
Feature(String name,
String type,
Boolean primary,
Boolean partition) |
Feature(String name,
String type,
String onlineType,
Boolean primary,
Boolean partition,
String defaultValue,
String description) |
HopsworksConnection(String host,
int port,
String project,
software.amazon.awssdk.regions.Region region,
SecretStore secretStore,
boolean hostnameVerification,
String trustStorePath,
String certPath,
String apiKeyFilePath,
String apiKeyValue) |
TrainingDataset(@NonNull String name,
Integer version,
String description,
DataFormat dataFormat,
Boolean coalesce,
StorageConnector storageConnector,
String location,
List<Split> splits,
String trainSplit,
Long seed,
FeatureStore featureStore,
StatisticsConfig statisticsConfig,
List<String> label,
String eventStartTime,
String eventEndTime,
TrainingDatasetType trainingDatasetType,
Float validationSize,
Float testSize,
String trainStart,
String trainEnd,
String validationStart,
String validationEnd,
String testStart,
String testEnd,
Integer timeSplitSize,
FilterLogic extraFilterLogic,
Filter extraFilter) |
| Modifier and Type | Method and Description |
|---|---|
Query |
Query.asOf(String wallclockTime)
Perform time travel on the given Query.
|
Query |
Query.asOf(String wallclockTime,
String excludeUntil)
Perform time travel on the given Query.
|
String |
FsQuery.getStorageQuery(Storage storage) |
Query |
Query.pullChanges(String wallclockStartTime,
String wallclockEndTime)
Deprecated.
use asOf(wallclockEndTime, wallclockStartTime) instead
|
Object |
Query.read() |
Object |
Query.read(boolean online) |
Object |
Query.read(boolean online,
Map<String,String> readOptions) |
void |
FsQuery.registerHudiFeatureGroups(Map<String,String> readOptions) |
void |
FsQuery.registerOnDemandFeatureGroups() |
void |
Query.show(boolean online,
int numRows) |
void |
Query.show(int numRows) |
| Modifier and Type | Method and Description |
|---|---|
void |
FeatureGroupBaseEngine.addTag(FeatureGroupBase featureGroupBase,
String name,
Object value) |
void |
FeatureViewEngine.addTag(FeatureView featureView,
String name,
Object value) |
void |
FeatureViewEngine.addTag(FeatureView featureView,
String name,
Object value,
Integer trainingDataVersion) |
void |
TrainingDatasetEngine.addTag(TrainingDataset trainingDataset,
String name,
Object value) |
<T extends FeatureGroupBase> |
FeatureGroupBaseEngine.appendFeatures(FeatureGroupBase featureGroup,
List<Feature> features,
Class<T> fgClass) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
SparkEngine.castColumnType(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset,
List<TrainingDatasetFeature> features) |
String |
FeatureGroupUtils.checkpointDirPath(String queryName,
String onlineTopicName) |
<S> FeatureGroupCommit |
FeatureGroupUtils.commitDelete(FeatureGroupBase featureGroupBase,
S genericDataset,
Map<String,String> writeOptions) |
Map<Long,Map<String,String>> |
FeatureGroupUtils.commitDetails(FeatureGroupBase featureGroupBase,
Integer limit) |
Map<Long,Map<String,String>> |
FeatureGroupUtils.commitDetailsByWallclockTime(FeatureGroupBase featureGroup,
String wallclockTime,
Integer limit) |
<S> Statistics |
StatisticsEngine.computeStatistics(FeatureGroupBase featureGroup,
S genericDataFrame,
Long commitId) |
Statistics |
StatisticsEngine.computeStatistics(FeatureView featureView,
TrainingDataset trainingDataset,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataFrame) |
Statistics |
FeatureViewEngine.computeStatistics(FeatureView featureView,
TrainingDataset trainingDataset,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>[] datasets) |
Statistics |
StatisticsEngine.computeStatistics(TrainingDataset trainingDataset,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataFrame) |
String |
FeatureGroupUtils.constructCheckpointPath(FeatureGroup featureGroup,
String queryName,
String queryPrefix) |
<S> S |
FeatureGroupUtils.convertToDefaultDataframe(S datasetGeneric) |
TrainingDatasetBundle |
FeatureViewEngine.createTrainingDataset(FeatureView featureView,
TrainingDataset trainingDataset,
Map<String,String> userWriteOptions) |
void |
FeatureGroupBaseEngine.delete(FeatureGroupBase featureGroupBase) |
void |
FeatureViewEngine.delete(FeatureStore featureStore,
String name) |
void |
FeatureViewEngine.delete(FeatureStore featureStore,
String name,
Integer version) |
void |
TrainingDatasetEngine.delete(TrainingDataset trainingDataset) |
void |
FeatureGroupBaseEngine.deleteTag(FeatureGroupBase featureGroupBase,
String name) |
void |
FeatureViewEngine.deleteTag(FeatureView featureView,
String name) |
void |
FeatureViewEngine.deleteTag(FeatureView featureView,
String name,
Integer trainingDataVersion) |
void |
TrainingDatasetEngine.deleteTag(TrainingDataset trainingDataset,
String name) |
void |
FeatureViewEngine.deleteTrainingData(FeatureView featureView) |
void |
FeatureViewEngine.deleteTrainingData(FeatureView featureView,
Integer trainingDataVersion) |
void |
FeatureViewEngine.deleteTrainingDatasetOnly(FeatureView featureView) |
void |
FeatureViewEngine.deleteTrainingDatasetOnly(FeatureView featureView,
Integer trainingDataVersion) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
SparkEngine.encodeComplexFeatures(FeatureGroupBase featureGroupBase,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset)
Encodes all complex type features to binary using their avro type as schema.
|
Statistics |
StatisticsEngine.get(FeatureGroupBase featureGroup,
String commitTime) |
List<FeatureView> |
FeatureViewEngine.get(FeatureStore featureStore,
String name) |
FeatureView |
FeatureViewEngine.get(FeatureStore featureStore,
String name,
Integer version) |
Statistics |
StatisticsEngine.get(TrainingDataset trainingDataset,
String commitTime) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
FeatureViewEngine.getBatchData(FeatureView featureView,
Date startTime,
Date endTime,
Map<String,String> readOptions,
Integer trainingDataVersion) |
Query |
FeatureViewEngine.getBatchQuery(FeatureView featureView,
Date startTime,
Date endTime,
Boolean withLabels,
Integer trainingDataVersion) |
String |
FeatureViewEngine.getBatchQueryString(FeatureView featureView,
Date startTime,
Date endTime,
Integer trainingDataVersion) |
static Date |
FeatureGroupUtils.getDateFromDateString(String inputDate) |
org.apache.avro.Schema |
FeatureGroupUtils.getDeserializedAvroSchema(String avroSchema) |
String |
FeatureGroupUtils.getEncodedAvroSchema(org.apache.avro.Schema schema,
List<String> complexFeatures) |
String |
FeatureGroupUtils.getFeatureAvroSchema(String featureName,
org.apache.avro.Schema schema) |
List<Object> |
VectorServer.getFeatureVector(FeatureView featureView,
Map<String,Object> entry) |
List<Object> |
VectorServer.getFeatureVector(FeatureView featureView,
Map<String,Object> entry,
boolean external) |
List<Object> |
VectorServer.getFeatureVector(TrainingDataset trainingDataset,
Map<String,Object> entry) |
List<Object> |
VectorServer.getFeatureVector(TrainingDataset trainingDataset,
Map<String,Object> entry,
boolean external) |
List<List<Object>> |
VectorServer.getFeatureVectors(FeatureView featureView,
Map<String,List<Object>> entry) |
List<List<Object>> |
VectorServer.getFeatureVectors(FeatureView featureView,
Map<String,List<Object>> entry,
boolean external) |
List<List<Object>> |
VectorServer.getFeatureVectors(TrainingDataset trainingDataset,
Map<String,List<Object>> entry) |
List<List<Object>> |
VectorServer.getFeatureVectors(TrainingDataset trainingDataset,
Map<String,List<Object>> entry,
boolean external) |
Map<String,String> |
FeatureGroupUtils.getKafkaConfig(FeatureGroupBase featureGroup,
Map<String,String> writeOptions) |
Statistics |
StatisticsEngine.getLast(FeatureGroupBase featureGroup) |
Statistics |
StatisticsEngine.getLast(TrainingDataset trainingDataset) |
FeatureView |
FeatureViewEngine.getOrCreateFeatureView(FeatureStore featureStore,
String name,
Integer version,
Query query,
String description,
List<String> labels) |
String |
TrainingDatasetEngine.getQuery(TrainingDataset trainingDataset,
Storage storage,
boolean withLabel,
boolean isHiveQuery) |
Statistics |
StatisticsEngine.getSplitStatistics(TrainingDataset trainingDataset) |
Statistics |
StatisticsEngine.getSplitStatistics(TrainingDataset trainingDataset,
Map<String,org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> splitDatasets) |
Subject |
FeatureGroupUtils.getSubject(FeatureGroupBase featureGroup) |
Object |
FeatureGroupBaseEngine.getTag(FeatureGroupBase featureGroupBase,
String name) |
Object |
FeatureViewEngine.getTag(FeatureView featureView,
String name) |
Object |
FeatureViewEngine.getTag(FeatureView featureView,
String name,
Integer trainingDataVersion) |
Object |
TrainingDatasetEngine.getTag(TrainingDataset trainingDataset,
String name) |
Map<String,Object> |
FeatureGroupBaseEngine.getTags(FeatureGroupBase featureGroupBase) |
Map<String,Object> |
FeatureViewEngine.getTags(FeatureView featureView) |
Map<String,Object> |
FeatureViewEngine.getTags(FeatureView featureView,
Integer trainingDataVersion) |
Map<String,Object> |
TrainingDatasetEngine.getTags(TrainingDataset trainingDataset) |
static Long |
FeatureGroupUtils.getTimeStampFromDateString(String inputDate) |
TrainingDatasetBundle |
FeatureViewEngine.getTrainingDataset(FeatureView featureView,
Integer trainingDatasetVersion,
List<String> requestedSplits,
Map<String,String> userReadOptions) |
TrainingDatasetBundle |
FeatureViewEngine.getTrainingDataset(FeatureView featureView,
TrainingDataset trainingDataset,
List<String> requestedSplits,
Map<String,String> userReadOptions) |
TrainingDatasetBundle |
FeatureViewEngine.getTrainingDataset(FeatureView featureView,
TrainingDataset trainingDataset,
Map<String,String> userReadOptions) |
void |
VectorServer.initPreparedStatement(FeatureView featureView,
boolean batch) |
void |
VectorServer.initPreparedStatement(FeatureView featureView,
boolean batch,
boolean external) |
void |
VectorServer.initPreparedStatement(TrainingDataset trainingDataset,
boolean batch) |
void |
VectorServer.initPreparedStatement(TrainingDataset trainingDataset,
boolean batch,
boolean external) |
void |
VectorServer.initServing(FeatureView featureView,
boolean batch) |
void |
VectorServer.initServing(FeatureView featureView,
boolean batch,
boolean external) |
void |
VectorServer.initServing(TrainingDataset trainingDataset,
boolean batch) |
void |
VectorServer.initServing(TrainingDataset trainingDataset,
boolean batch,
boolean external) |
void |
FeatureGroupEngine.insert(FeatureGroup featureGroup,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
Storage storage,
HudiOperationType operation,
org.apache.spark.sql.SaveMode saveMode,
List<String> partitionKeys,
String hudiPrecombineKey,
Map<String,String> writeOptions) |
<S> void |
StreamFeatureGroupEngine.insert(StreamFeatureGroup streamFeatureGroup,
S featureData,
SaveMode saveMode,
List<String> partitionKeys,
String hudiPrecombineKey,
Map<String,String> writeOptions,
JobConfiguration jobConfiguration) |
org.apache.spark.sql.streaming.StreamingQuery |
FeatureGroupEngine.insertStream(FeatureGroup featureGroup,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout,
String checkpointLocation,
List<String> partitionKeys,
String hudiPrecombineKey,
Map<String,String> writeOptions)
Deprecated.
|
<S> List<Feature> |
SparkEngine.parseFeatureGroupSchema(S datasetGeneric,
TimeTravelFormat timeTravelFormat) |
<S> List<Feature> |
FeatureGroupUtils.parseFeatureGroupSchema(S datasetGeneric,
TimeTravelFormat timeTravelFormat) |
List<TrainingDatasetFeature> |
TrainingDatasetUtils.parseTrainingDatasetSchema(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
SparkEngine.read(StorageConnector storageConnector,
String dataFormat,
Map<String,String> readOptions,
String location) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
TrainingDatasetEngine.read(TrainingDataset trainingDataset,
String split,
Map<String,String> providedOptions) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
SparkEngine.readStream(StorageConnector storageConnector,
String dataFormat,
String messageFormat,
String schema,
Map<String,String> options,
boolean includeMetadata) |
void |
FeatureViewEngine.recreateTrainingDataset(FeatureView featureView,
Integer version,
Map<String,String> userWriteOptions) |
void |
SparkEngine.registerHudiTemporaryTable(HudiFeatureGroupAlias hudiFeatureGroupAlias,
Map<String,String> readOptions) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
SparkEngine.registerOnDemandTemporaryTable(ExternalFeatureGroup onDemandFeatureGroup,
String alias) |
Statistics |
StatisticsEngine.registerSplitStatistics(FeatureView featureView,
TrainingDataset trainingDataset,
Map<String,org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>> splitDatasets) |
Statistics |
StatisticsEngine.registerSplitStatistics(TrainingDataset trainingDataset) |
FeatureGroup |
FeatureGroupEngine.save(FeatureGroup featureGroup,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset,
List<String> partitionKeys,
String hudiPrecombineKey,
Map<String,String> writeOptions)
Create the metadata and write the data to the online/offline feature store.
|
FeatureView |
FeatureViewEngine.save(FeatureView featureView) |
<S> StreamFeatureGroup |
StreamFeatureGroupEngine.save(StreamFeatureGroup featureGroup,
S dataset,
List<String> partitionKeys,
String hudiPrecombineKey,
Map<String,String> writeOptions,
JobConfiguration sparkJobConfiguration) |
TrainingDataset |
TrainingDatasetEngine.save(TrainingDataset trainingDataset,
Query query,
Map<String,String> userWriteOptions)
Make a REST call to Hopsworks to create the metadata and write the data on the File System.
|
void |
CodeEngine.saveCode(FeatureGroupBase featureGroup) |
void |
CodeEngine.saveCode(TrainingDataset trainingDataset) |
void |
FeatureGroupEngine.saveDataframe(FeatureGroup featureGroup,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset,
Storage storage,
HudiOperationType operation,
Map<String,String> offlineWriteOptions,
Map<String,String> onlineWriteOptions,
Integer validationId) |
ExternalFeatureGroup |
ExternalFeatureGroupEngine.saveFeatureGroup(ExternalFeatureGroup externalFeatureGroup) |
FeatureGroup |
FeatureGroupEngine.saveFeatureGroupMetaData(FeatureGroup featureGroup,
List<String> partitionKeys,
String hudiPrecombineKey,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> featureData,
boolean saveEmpty) |
<S> StreamFeatureGroup |
StreamFeatureGroupEngine.saveFeatureGroupMetaData(StreamFeatureGroup featureGroup,
List<String> partitionKeys,
String hudiPrecombineKey,
Map<String,String> writeOptions,
JobConfiguration sparkJobConfiguration,
S featureData) |
static void |
TrainingDatasetUtils.setLabelFeature(List<TrainingDatasetFeature> features,
List<String> labels) |
void |
SparkEngine.setupConnectorHadoopConf(StorageConnector storageConnector) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>[] |
SparkEngine.splitDataset(TrainingDataset trainingDataset,
Query query,
Map<String,String> readOptions) |
void |
TrainingDatasetUtils.trainingDatasetSchemaMatch(org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset,
List<TrainingDatasetFeature> features) |
FeatureView |
FeatureViewEngine.update(FeatureView featureView) |
<T extends FeatureGroupBase> |
FeatureGroupBaseEngine.updateDescription(FeatureGroupBase featureGroup,
String description,
Class<T> fgClass) |
<T extends FeatureGroupBase> |
FeatureGroupBaseEngine.updateFeatures(FeatureGroupBase featureGroup,
List<Feature> features,
Class<T> fgClass) |
<T extends FeatureGroupBase> |
FeatureGroupBaseEngine.updateStatisticsConfig(FeatureGroupBase featureGroup,
Class<T> fgClass) |
void |
TrainingDatasetEngine.updateStatisticsConfig(TrainingDataset trainingDataset) |
void |
SparkEngine.validateSparkConfiguration() |
void |
FeatureGroupUtils.verifyAttributeKeyNames(FeatureGroupBase featureGroup,
List<String> partitionKeyNames,
String precombineKeyName) |
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row>[] |
SparkEngine.write(TrainingDataset trainingDataset,
Query query,
Map<String,String> queryReadOptions,
Map<String,String> writeOptions,
org.apache.spark.sql.SaveMode saveMode)
Setup Spark to write the data on the File System.
|
<S> void |
SparkEngine.writeEmptyDataframe(FeatureGroupBase featureGroup) |
void |
SparkEngine.writeOfflineDataframe(FeatureGroupBase featureGroup,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset,
HudiOperationType operation,
Map<String,String> writeOptions,
Integer validationId) |
<S> void |
SparkEngine.writeOfflineDataframe(StreamFeatureGroup streamFeatureGroup,
S genericDataset,
HudiOperationType operation,
Map<String,String> writeOptions,
Integer validationId) |
<S> void |
SparkEngine.writeOnlineDataframe(FeatureGroupBase featureGroupBase,
S dataset,
String onlineTopicName,
Map<String,String> writeOptions) |
<S> org.apache.spark.sql.streaming.StreamingQuery |
SparkEngine.writeStreamDataframe(FeatureGroupBase featureGroupBase,
S datasetGeneric,
String queryName,
String outputMode,
boolean awaitTermination,
Long timeout,
String checkpointLocation,
Map<String,String> writeOptions) |
void |
FeatureViewEngine.writeTrainingDataset(FeatureView featureView,
TrainingDataset trainingDataset,
Map<String,String> userWriteOptions) |
| Modifier and Type | Method and Description |
|---|---|
<S> FeatureGroupCommit |
HudiEngine.deleteRecord(org.apache.spark.sql.SparkSession sparkSession,
FeatureGroupBase featureGroup,
S genericDeleteDF,
Map<String,String> writeOptions) |
void |
HudiEngine.reconcileHudiSchema(org.apache.spark.sql.SparkSession sparkSession,
HudiFeatureGroupAlias hudiFeatureGroupAlias,
Map<String,String> hudiArgs) |
void |
HudiEngine.saveHudiFeatureGroup(org.apache.spark.sql.SparkSession sparkSession,
FeatureGroupBase featureGroup,
org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> dataset,
HudiOperationType operation,
Map<String,String> writeOptions,
Integer validationId) |
| Modifier and Type | Method and Description |
|---|---|
void |
TagsApi.add(FeatureGroupBase featureGroupBase,
String name,
Object value) |
void |
TagsApi.add(FeatureView featureView,
Integer trainingDatasetVersion,
String name,
Object value) |
void |
TagsApi.add(FeatureView featureView,
String name,
Object value) |
void |
TagsApi.add(TrainingDataset trainingDataset,
String name,
Object value) |
void |
FeatureGroupBase.addTag(String name,
Object value)
Add name/value tag to the feature group.
|
void |
FeatureGroupBase.appendFeatures(Feature features)
Append a single feature to the schema of the feature group.
|
void |
FeatureGroupBase.appendFeatures(List<Feature> features)
Append features to the schema of the feature group.
|
Statistics |
FeatureGroupBase.computeStatistics()
Recompute the statistics for the feature group and save them to the feature store.
|
FsQuery |
QueryConstructorApi.constructQuery(FeatureStore featureStore,
Query query) |
TrainingDataset |
FeatureViewApi.createTrainingData(String featureViewName,
Integer featureViewVersion,
TrainingDataset trainingData) |
TrainingDataset |
TrainingDatasetApi.createTrainingDataset(TrainingDataset trainingDataset) |
void |
FeatureGroupBase.delete() |
void |
FeatureGroupApi.delete(FeatureGroupBase featureGroupBase) |
void |
FeatureViewApi.delete(FeatureStore featureStore,
String name) |
void |
FeatureViewApi.delete(FeatureStore featureStore,
String name,
Integer version) |
void |
TrainingDatasetApi.delete(TrainingDataset trainingDataset) |
void |
FeatureGroupApi.deleteContent(FeatureGroupBase featureGroup) |
void |
TagsApi.deleteTag(FeatureGroupBase featureGroup,
String name) |
void |
TagsApi.deleteTag(FeatureView featureView,
Integer trainingDatasetVersion,
String name) |
void |
TagsApi.deleteTag(FeatureView featureView,
String name) |
void |
FeatureGroupBase.deleteTag(String name)
Delete a tag of the feature group.
|
void |
TagsApi.deleteTag(TrainingDataset trainingDataset,
String name) |
void |
FeatureViewApi.deleteTrainingData(FeatureStore featureStore,
String name,
Integer version) |
void |
FeatureViewApi.deleteTrainingData(FeatureStore featureStore,
String featureViewName,
Integer featureViewVersion,
Integer trainingDataVersion) |
void |
FeatureViewApi.deleteTrainingDatasetOnly(FeatureStore featureStore,
String name,
Integer version) |
void |
FeatureViewApi.deleteTrainingDatasetOnly(FeatureStore featureStore,
String name,
Integer version,
Integer trainingDataVersion) |
FeatureGroupCommit |
FeatureGroupApi.featureGroupCommit(FeatureGroupBase featureGroup,
FeatureGroupCommit featureGroupCommit) |
Query |
FeatureGroupBase.filter(Filter filter)
Filter the query based on a condition for a feature or a conjunction of multiple filters.
|
Query |
FeatureGroupBase.filter(FilterLogic filter)
Filter the query based on a condition for a feature or a conjunction of multiple filters.
|
Map<String,Object> |
TagsApi.get(FeatureGroupBase featureGroupBase) |
Statistics |
StatisticsApi.get(FeatureGroupBase featureGroup,
String commitTime) |
Object |
TagsApi.get(FeatureGroupBase featureGroupBase,
String name) |
List<FeatureView> |
FeatureViewApi.get(FeatureStore featureStore,
String name) |
FeatureView |
FeatureViewApi.get(FeatureStore featureStore,
String name,
Integer version) |
List<TrainingDataset> |
TrainingDatasetApi.get(FeatureStore featureStore,
String tdName,
Integer tdVersion) |
Map<String,Object> |
TagsApi.get(FeatureView featureView) |
Map<String,Object> |
TagsApi.get(FeatureView featureView,
Integer trainingDatasetVersion) |
Object |
TagsApi.get(FeatureView featureView,
Integer trainingDatasetVersion,
String name) |
Object |
TagsApi.get(FeatureView featureView,
String name) |
StorageConnector |
StorageConnectorApi.get(Integer featureStoreId,
String name) |
FeatureStore |
FeatureStoreApi.get(int projectId,
String name) |
Project |
ProjectApi.get(String name) |
Map<String,Object> |
TagsApi.get(TrainingDataset trainingDataset) |
Statistics |
StatisticsApi.get(TrainingDataset trainingDataset,
String commitTime) |
Object |
TagsApi.get(TrainingDataset trainingDataset,
String name) |
Query |
FeatureViewApi.getBatchQuery(FeatureStore featureStore,
String name,
Integer version,
Long startTime,
Long endTime,
Boolean withLabels,
Integer trainingDataVersion) |
List<String> |
KafkaApi.getBrokerEndpoints(FeatureStore featureStore) |
StorageConnector |
StorageConnectorApi.getByName(FeatureStore featureStore,
String name) |
List<FeatureGroupCommit> |
FeatureGroupApi.getCommitDetails(FeatureGroupBase featureGroupBase,
Long wallclockTimestamp,
Integer limit) |
org.apache.avro.Schema |
FeatureGroupBase.getDeserializedAvroSchema() |
String |
FeatureGroupBase.getEncodedAvroSchema() |
ExternalFeatureGroup |
FeatureGroupApi.getExternalFeatureGroup(FeatureStore featureStore,
String fgName,
Integer fgVersion) |
List<ExternalFeatureGroup> |
FeatureGroupApi.getExternalFeatureGroups(FeatureStore featureStore,
String fgName) |
Feature |
FeatureGroupBase.getFeature(String name)
Retrieve a feature of the feature group by name.
|
String |
FeatureGroupBase.getFeatureAvroSchema(String featureName) |
FeatureGroup |
FeatureGroupApi.getFeatureGroup(FeatureStore featureStore,
String fgName,
Integer fgVersion) |
List<FeatureGroup> |
FeatureGroupApi.getFeatureGroups(FeatureStore featureStore,
String fgName) |
static HopsworksClient |
HopsworksClient.getInstance() |
Statistics |
StatisticsApi.getLast(FeatureGroupBase featureGroup) |
Statistics |
StatisticsApi.getLast(TrainingDataset trainingDataset) |
StorageConnector.JdbcConnector |
StorageConnectorApi.getOnlineStorageConnector(FeatureStore featureStore) |
String |
FeatureGroupBase.getOnlineTopicName() |
FeatureGroup |
FeatureGroupApi.getOrCreateFeatureGroup(FeatureStore featureStore,
String name,
Integer version,
String description,
List<String> primaryKeys,
List<String> partitionKeys,
String hudiPrecombineKey,
boolean onlineEnabled,
TimeTravelFormat timeTravelFormat,
StatisticsConfig statisticsConfig,
String eventTime) |
StreamFeatureGroup |
FeatureGroupApi.getOrCreateStreamFeatureGroup(FeatureStore featureStore,
String name,
Integer version,
String description,
List<String> primaryKeys,
List<String> partitionKeys,
String hudiPrecombineKey,
boolean onlineEnabled,
StatisticsConfig statisticsConfig,
String eventTime) |
FsQuery |
TrainingDatasetApi.getQuery(TrainingDataset trainingDataset,
boolean withLabel,
boolean isHiveQuery) |
List<ServingPreparedStatement> |
FeatureViewApi.getServingPreparedStatement(FeatureView featureView,
boolean batch) |
List<ServingPreparedStatement> |
TrainingDatasetApi.getServingPreparedStatement(TrainingDataset trainingDataset,
boolean batch) |
Statistics |
FeatureGroupBase.getStatistics()
Get the last statistics commit for the feature group.
|
Statistics |
FeatureGroupBase.getStatistics(String commitTime)
Get the statistics of a specific commit time for the feature group.
|
StreamFeatureGroup |
FeatureGroupApi.getStreamFeatureGroup(FeatureStore featureStore,
String fgName,
Integer fgVersion) |
List<StreamFeatureGroup> |
FeatureGroupApi.getStreamFeatureGroups(FeatureStore featureStore,
String fgName) |
Subject |
FeatureGroupBase.getSubject() |
Object |
FeatureGroupBase.getTag(String name)
Get a single tag value of the feature group.
|
Map<String,Object> |
FeatureGroupBase.getTags()
Get all tags of the feature group.
|
List<PartitionDetails> |
KafkaApi.getTopicDetails(FeatureStore featureStore,
String topicName) |
Subject |
KafkaApi.getTopicSubject(FeatureStore featureStore,
String topicName) |
TrainingDataset |
FeatureViewApi.getTrainingData(FeatureStore featureStore,
String featureViewName,
Integer featureViewVersion,
Integer trainingDataVersion) |
TrainingDataset |
TrainingDatasetApi.getTrainingDataset(FeatureStore featureStore,
String tdName,
Integer tdVersion) |
List<TransformationFunctionAttached> |
FeatureViewApi.getTransformationFunctions(FeatureView featureView) |
List<TransformationFunctionAttached> |
TrainingDatasetApi.getTransformationFunctions(TrainingDataset trainingDataset) |
<T> T |
HopsworksClient.handleRequest(org.apache.http.HttpRequest request) |
<T> T |
HopsworksClient.handleRequest(org.apache.http.HttpRequest request,
Class<T> cls) |
<T> T |
HopsworksInternalClient.handleRequest(org.apache.http.HttpRequest request,
org.apache.http.client.ResponseHandler<T> responseHandler) |
<T> T |
HopsworksClient.handleRequest(org.apache.http.HttpRequest request,
org.apache.http.client.ResponseHandler<T> responseHandler) |
<T> T |
HopsworksHttpClient.handleRequest(org.apache.http.HttpRequest request,
org.apache.http.client.ResponseHandler<T> responseHandler) |
void |
CodeApi.post(FeatureGroupBase featureGroup,
Code code,
String entityId,
Code.RunType type,
String browserHostName) |
Statistics |
StatisticsApi.post(FeatureGroupBase featureGroup,
Statistics statistics) |
Statistics |
StatisticsApi.post(FeatureView featureView,
Integer trainingDataVersion,
Statistics statistics) |
void |
CodeApi.post(TrainingDataset trainingDataset,
Code code,
String entityId,
Code.RunType type,
String browserHostName) |
Statistics |
StatisticsApi.post(TrainingDataset trainingDataset,
Statistics statistics) |
<T> T |
FeatureGroupBase.read() |
String |
HopsworksExternalClient.readApiKey(SecretStore secretStore,
software.amazon.awssdk.regions.Region region,
String apiKeyFilepath)
Read API key.
|
void |
HopsworksInternalClient.refreshJwt() |
ExternalFeatureGroup |
FeatureGroupApi.save(ExternalFeatureGroup externalFeatureGroup) |
FeatureGroup |
FeatureGroupApi.save(FeatureGroup featureGroup) |
FeatureView |
FeatureViewApi.save(FeatureView featureView) |
StreamFeatureGroup |
FeatureGroupApi.save(StreamFeatureGroup featureGroup) |
static HopsworksClient |
HopsworksClient.setupHopsworksClient(String host,
int port,
software.amazon.awssdk.regions.Region region,
SecretStore secretStore,
boolean hostnameVerification,
String trustStorePath,
String apiKeyFilePath,
String apiKeyValue) |
FeatureView |
FeatureViewApi.update(FeatureView featureView) |
void |
FeatureGroupBase.updateDescription(String description)
Update the description of the feature group.
|
void |
FeatureGroupBase.updateFeatureDescription(String featureName,
String description)
Update the description of a single feature.
|
void |
FeatureGroupBase.updateFeatures(Feature feature)
Update the metadata of multiple features.
|
void |
FeatureGroupBase.updateFeatures(List<Feature> features)
Update the metadata of multiple features.
|
<T extends FeatureGroupBase> |
FeatureGroupApi.updateMetadata(FeatureGroupBase featureGroup,
String queryParameter,
Class<T> fgType) |
<T extends FeatureGroupBase> |
FeatureGroupApi.updateMetadata(FeatureGroupBase featureGroup,
String queryParameter,
Object value,
Class<T> fgType) |
TrainingDataset |
TrainingDatasetApi.updateMetadata(TrainingDataset trainingDataset,
String queryParameter) |
void |
FeatureGroupBase.updateStatisticsConfig()
Update the statistics configuration of the feature group.
|
| Constructor and Description |
|---|
HopsworksExternalClient(String host,
int port,
boolean hostnameVerification,
String trustStorePath,
software.amazon.awssdk.regions.Region region,
SecretStore secretStore) |
HopsworksExternalClient(String host,
int port,
boolean hostnameVerification,
String trustStorePath,
String apiKeyValue) |
HopsworksExternalClient(String host,
int port,
String apiKeyFilepath,
boolean hostnameVerification,
String trustStorePath) |
HopsworksInternalClient() |
Copyright © 2023. All rights reserved.