From 1289f3f7eea6bd3b08617606862a75f0224f9f18 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Fri, 24 Mar 2023 15:32:54 -0700 Subject: [PATCH 01/31] fix: Use pyarrow in a way that works across versions (#3562) Signed-off-by: Achal Shah --- sdk/python/feast/infra/offline_stores/file_source.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index e9f3735dee6..d8522fb4456 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -158,7 +158,13 @@ def get_table_column_names_and_types( # Adding support for different file format path # based on S3 filesystem if filesystem is None: - schema = ParquetDataset(path).schema.to_arrow_schema() + schema = ParquetDataset(path).schema + if hasattr(schema, "names") and hasattr(schema, "types"): + # Newer versions of pyarrow doesn't have this method, + # but this field is good enough. + pass + else: + schema = schema.to_arrow_schema() else: schema = ParquetDataset(path, filesystem=filesystem).schema From e7421c11172aaafff34da98fc14cf763c2d70002 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Sat, 25 Mar 2023 11:51:04 -0700 Subject: [PATCH 02/31] fix(cI): Install coreutils in mac github workers for smoke test (#3563) Signed-off-by: Achal Shah --- .github/workflows/build_wheels.yml | 5 ++++- .github/workflows/release.yml | 3 +-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 20a9f57a2a4..9bed2a4282b 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -52,7 +52,7 @@ jobs: echo $HIGHEST_SEMVER_TAG build-python-wheel: - name: Build wheels on ${{ matrix.os }} + name: Build wheels runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -164,6 +164,9 @@ jobs: with: name: wheels path: dist + - name: Install OS X dependencies + if: matrix.os == 'macos-10.15' + run: brew install coreutils - name: Install wheel if: ${{ !matrix.from-source }} # try to install all wheels; only the current platform wheel should be actually installed diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f18ee10cb1a..d064842afcb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -92,7 +92,7 @@ jobs: publish-web-ui-npm: - if: github.repository == 'feast-dev/feast' + if: github.repository == 'feast-dev/feast' and github.event.inputs.publish_ui == 'true' needs: [validate_version_bumps, get_dry_release_versions] runs-on: ubuntu-latest env: @@ -115,7 +115,6 @@ jobs: working-directory: ./ui run: yarn build:lib - name: Publish UI package - if: github.event.inputs.publish_ui == 'true' working-directory: ./ui run: npm publish env: From 8dd72171526646499351e66768dc2c0befae25e9 Mon Sep 17 00:00:00 2001 From: Achal Shah Date: Sat, 25 Mar 2023 11:59:23 -0700 Subject: [PATCH 03/31] chore: Fix invalid github action syntax (#3564) Signed-off-by: Achal Shah --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d064842afcb..ad66deaaf25 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -92,7 +92,7 @@ jobs: publish-web-ui-npm: - if: github.repository == 'feast-dev/feast' and github.event.inputs.publish_ui == 'true' + if: github.repository == 'feast-dev/feast' && github.event.inputs.publish_ui == 'true' needs: [validate_version_bumps, get_dry_release_versions] runs-on: ubuntu-latest env: From b05d50bcfeb179c2596f96f0d0a714754c516361 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mehmet=20Tokg=C3=B6z?= <56408993+mehmettokgoz@users.noreply.github.com> Date: Sun, 26 Mar 2023 02:35:45 +0300 Subject: [PATCH 04/31] feat: Add Hazelcast as an online store (#3523) * feat: Add Hazelcast as an online store Signed-off-by: mehmettokgoz * Fix template bugs and address review comments. Signed-off-by: mehmettokgoz --------- Signed-off-by: mehmettokgoz --- Makefile | 21 ++ docs/SUMMARY.md | 1 + docs/reference/online-stores/README.md | 5 + docs/reference/online-stores/hazelcast.md | 59 ++++ ..._stores.contrib.hazelcast_online_store.rst | 21 ++ .../feast.infra.online_stores.contrib.rst | 9 + sdk/python/feast/cli.py | 1 + .../contrib/hazelcast_online_store/README.md | 89 +++++ .../hazelcast_online_store/__init__.py | 0 .../hazelcast_online_store.py | 321 ++++++++++++++++++ .../contrib/hazelcast_repo_configuration.py | 26 ++ sdk/python/feast/repo_config.py | 1 + .../feast/templates/hazelcast/__init__.py | 0 .../feast/templates/hazelcast/bootstrap.py | 176 ++++++++++ .../hazelcast/feature_repo/__init__.py | 0 .../hazelcast/feature_repo/example_repo.py | 139 ++++++++ .../hazelcast/feature_repo/feature_store.yaml | 14 + .../hazelcast/feature_repo/test_workflow.py | 130 +++++++ .../requirements/py3.10-ci-requirements.txt | 2 + .../requirements/py3.8-ci-requirements.txt | 2 + .../requirements/py3.9-ci-requirements.txt | 2 + .../universal/online_store/hazelcast.py | 48 +++ setup.py | 6 + 23 files changed, 1073 insertions(+) create mode 100644 docs/reference/online-stores/hazelcast.md create mode 100644 sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst create mode 100644 sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md create mode 100644 sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/__init__.py create mode 100644 sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py create mode 100644 sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py create mode 100644 sdk/python/feast/templates/hazelcast/__init__.py create mode 100644 sdk/python/feast/templates/hazelcast/bootstrap.py create mode 100644 sdk/python/feast/templates/hazelcast/feature_repo/__init__.py create mode 100644 sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py create mode 100644 sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml create mode 100644 sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py create mode 100644 sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py diff --git a/Makefile b/Makefile index e1fd342881d..a104bf9ca5a 100644 --- a/Makefile +++ b/Makefile @@ -259,6 +259,27 @@ test-python-universal-cassandra: python -m pytest -x --integration \ sdk/python/tests +test-python-universal-hazelcast: + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.hazelcast_repo_configuration \ + PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.hazelcast \ + FEAST_USAGE=False \ + IS_TEST=True \ + python -m pytest -n 8 --integration \ + -k "not test_universal_cli and \ + not test_go_feature_server and \ + not test_feature_logging and \ + not test_reorder_columns and \ + not test_logged_features_validation and \ + not test_lambda_materialization_consistency and \ + not test_offline_write and \ + not test_push_features_to_offline_store and \ + not gcs_registry and \ + not s3_registry and \ + not test_universal_types and \ + not test_snowflake" \ + sdk/python/tests + test-python-universal-cassandra-no-cloud-providers: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.contrib.cassandra_repo_configuration \ diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index cdca6f37844..9b22d1e2865 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -97,6 +97,7 @@ * [Cassandra + Astra DB (contrib)](reference/online-stores/cassandra.md) * [MySQL (contrib)](reference/online-stores/mysql.md) * [Rockset (contrib)](reference/online-stores/rockset.md) + * [Hazelcast (contrib)](reference/online-stores/hazelcast.md) * [Providers](reference/providers/README.md) * [Local](reference/providers/local.md) * [Google Cloud Platform](reference/providers/google-cloud-platform.md) diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index 64b707a7e5d..2fdfd50f7c1 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -46,3 +46,8 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli [rockset.md](rockset.md) {% endcontent-ref %} +{% content-ref url="hazelcast.md" %} +[hazelcast.md](hazelcast.md) +{% endcontent-ref %} + + diff --git a/docs/reference/online-stores/hazelcast.md b/docs/reference/online-stores/hazelcast.md new file mode 100644 index 00000000000..ef65f42b316 --- /dev/null +++ b/docs/reference/online-stores/hazelcast.md @@ -0,0 +1,59 @@ +# Hazelcast online store + +## Description + +Hazelcast online store is in alpha development. + +The [Hazelcast](htpps://hazelcast.com) online store provides support for materializing feature values into a Hazelcast cluster for serving online features in real-time. +In order to use Hazelcast as online store, you need to have a running Hazelcast cluster. You can create a cluster using Hazelcast Viridian Serverless. See this [getting started](https://hazelcast.com/get-started/) page for more details. + +* Each feature view is mapped one-to-one to a specific Hazelcast IMap +* This implementation inherits all strengths of Hazelcast such as high availability, fault-tolerance, and data distribution. +* Secure TSL/SSL connection is supported by Hazelcast online store. +* You can set TTL (Time-To-Live) setting for your features in Hazelcast cluster. + +Each feature view corresponds to an IMap in Hazelcast cluster and the entries in that IMap corresponds to features of entities. +Each feature value stored separately and can be retrieved individually. + +## Getting started + +In order to use Hazelcast online store, you'll need to run `pip install 'feast[hazelcast]'`. You can then get started with the command `feast init REPO_NAME -t hazelcast`. + + +## Examples + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: hazelcast + cluster_name: dev + cluster_members: ["localhost:5701"] + key_ttl_seconds: 36000 +``` + +## Functionality Matrix + +| | Hazelcast | +| :-------------------------------------------------------- |:----------| +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | yes | +| support for ttl (time to live) at retrieval | yes | +| support for deleting expired data | yes | +| collocated by feature view | no | +| collocated by feature service | no | +| collocated by entity key | yes | + +To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). + diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst new file mode 100644 index 00000000000..bf3ed9d7d64 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.hazelcast_online_store.rst @@ -0,0 +1,21 @@ +feast.infra.online\_stores.contrib.hazelcast\_online\_store package +=================================================================== + +Submodules +---------- + +feast.infra.online\_stores.contrib.hazelcast\_online\_store.hazelcast\_online\_store module +------------------------------------------------------------------------------------------- + +.. automodule:: feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.online_stores.contrib.hazelcast_online_store + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst index f10ff306f32..b6c8a404ee4 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst @@ -8,6 +8,7 @@ Subpackages :maxdepth: 4 feast.infra.online_stores.contrib.cassandra_online_store + feast.infra.online_stores.contrib.hazelcast_online_store feast.infra.online_stores.contrib.hbase_online_store feast.infra.online_stores.contrib.mysql_online_store feast.infra.online_stores.contrib.rockset_online_store @@ -23,6 +24,14 @@ feast.infra.online\_stores.contrib.cassandra\_repo\_configuration module :undoc-members: :show-inheritance: +feast.infra.online\_stores.contrib.hazelcast\_repo\_configuration module +------------------------------------------------------------------------ + +.. automodule:: feast.infra.online_stores.contrib.hazelcast_repo_configuration + :members: + :undoc-members: + :show-inheritance: + feast.infra.online\_stores.contrib.hbase\_repo\_configuration module -------------------------------------------------------------------- diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 8adf1152261..e96ab772a6f 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -641,6 +641,7 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List "hbase", "cassandra", "rockset", + "hazelcast", ], case_sensitive=False, ), diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md new file mode 100644 index 00000000000..8c7f1022399 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/README.md @@ -0,0 +1,89 @@ +# Hazelcast Online Store + +This contribution makes it possible to use [Hazelcast](https://hazelcast.com/) as online store for Feast. + +Once the Hazelcast client configuration is given inside `feature_store.yaml` file, everything else +is handled as with any other online store: schema creation, read/write from/to Hazelcast and remove operations. + +## Quick usage + +The following refers to the [Feast quickstart](https://docs.feast.dev/getting-started/quickstart) page. +Only the Step 2 is different from this tutorial since it requires you to configure your Hazelcast online store. + +### Creating the feature repository + +The easiest way to get started is to use the Feast CLI to initialize a new +feature store. Once Feast is installed, the command + +``` +feast init FEATURE_STORE_NAME -t hazelcast +``` + +will interactively help you create the `feature_store.yaml` with the +required configuration details to access your Hazelcast cluster. + +Alternatively, you can run `feast init -t FEATURE_STORE_NAME`, as described +in the quickstart, and then manually edit the `online_store` section in +the `feature_store.yaml` file as detailed below. + +The following steps (setup of feature definitions, deployment of the store, +generation of training data, materialization, fetching of online/offline +features) proceed exactly as in the general Feast quickstart instructions. + +#### Hazelcast setup + +In order to use [Hazelcast](https://hazelcast.com) as online store, you need to have a running Hazelcast cluster. +You can create a cluster using Hazelcast Viridian Serverless easily or deploy one on your local/remote machine. +See this [getting started](https://hazelcast.com/get-started/) page for more details. + +Hazelcast online store provides capability to connect local/remote or Hazelcast Viridian Serverless cluster. +Following is an example to connect local cluster named "dev" running on port 5701 with TLS/SSL enabled. + +```yaml +[...] +online_store: + type: hazelcast + cluster_name: dev + cluster_members: ["localhost:5701"] + ssl_cafile_path: /path/to/ca/file + ssl_certfile_path: /path/to/cert/file + ssl_keyfile_path: /path/to/key/file + ssl_password: ${SSL_PASSWORD} # The password will be read form the `SSL_PASSWORD` environment variable. + key_ttl_seconds: 86400 # The default is 0 and means infinite. +``` + +If you want to connect your Hazelcast Viridian cluster instead of local/remote one, specify your configuration as follows: + +```yaml +[...] +online_store: + type: hazelcast + cluster_name: YOUR_CLUSTER_ID + discovery_token: YOUR_DISCOVERY_TOKEN + ssl_cafile_path: /path/to/ca/file + ssl_certfile_path: /path/to/cert/file + ssl_keyfile_path: /path/to/key/file + ssl_password: ${SSL_PASSWORD} # The password will be read form the `SSL_PASSWORD` environment variable. + key_ttl_seconds: 86400 # The default is 0 and means infinite. +``` + +#### TTL configuration + +TTL is the maximum time in seconds for each feature to stay idle in the map. +It limits the lifetime of the features relative to the time of the last read or write access performed on them. +The features whose idle period exceeds this limit are expired and evicted automatically. +A feature is idle if no get or put is called on it. +Valid values are integers between 0 and Integer.MAX_VALUE. +Its default value is 0, which means infinite. + +```yaml +[...] +online_store: + [...] + key_ttl_seconds: 86400 +``` + +### More info + +You can learn about Hazelcast more from the [Hazelcast Documentation](https://docs.hazelcast.com/home/). + diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/__init__.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py new file mode 100644 index 00000000000..7ec803a69c5 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_online_store/hazelcast_online_store.py @@ -0,0 +1,321 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +""" +Hazelcast online store for Feast. +""" +import base64 +import threading +from datetime import datetime, timezone +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple + +import pytz +from hazelcast.client import HazelcastClient +from hazelcast.core import HazelcastJsonValue +from hazelcast.discovery import HazelcastCloudDiscovery +from pydantic import StrictStr + +from feast import Entity, FeatureView, RepoConfig +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.online_stores.online_store import OnlineStore +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import FeastConfigBaseModel +from feast.usage import log_exceptions_and_usage + +# Exception messages +EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS = ( + "Unexpected configuration object (not a HazelcastOnlineStoreConfig instance)" +) + +# Hazelcast schema names for each field +D_FEATURE_NAME = "feature_name" +D_FEATURE_VALUE = "feature_value" +D_ENTITY_KEY = "entity_key" +D_EVENT_TS = "event_ts" +D_CREATED_TS = "created_ts" + + +class HazelcastInvalidConfig(Exception): + def __init__(self, msg: str): + super().__init__(msg) + + +class HazelcastOnlineStoreConfig(FeastConfigBaseModel): + """Online store config for Hazelcast store""" + + type: Literal["hazelcast"] = "hazelcast" + """Online store type selector""" + + cluster_name: StrictStr = "dev" + """Name of the cluster you want to connect. The default cluster name is `dev`""" + + cluster_members: Optional[List[str]] = ["localhost:5701"] + """List of member addresses which is connected to your cluster""" + + discovery_token: Optional[StrictStr] = "" + """The discovery token of your Hazelcast Viridian cluster""" + + ssl_cafile_path: Optional[StrictStr] = "" + """Absolute path of CA certificates in PEM format.""" + + ssl_certfile_path: Optional[StrictStr] = "" + """Absolute path of the client certificate in PEM format.""" + + ssl_keyfile_path: Optional[StrictStr] = "" + """Absolute path of the private key file for the client certificate in the PEM format.""" + + ssl_password: Optional[StrictStr] = "" + """Password for decrypting the keyfile if it is encrypted.""" + + key_ttl_seconds: Optional[int] = 0 + """Hazelcast key bin TTL (in seconds) for expiring entities""" + + +class HazelcastOnlineStore(OnlineStore): + """ + Hazelcast online store implementation for Feast + + Attributes: + _client: Hazelcast client connection. + _lock: Prevent race condition while creating the client connection + """ + + _client: Optional[HazelcastClient] = None + _lock = threading.Lock() + + def _get_client(self, config: HazelcastOnlineStoreConfig): + """ + Establish the client connection to Hazelcast cluster, if not yet created, + and return it. + + The established client connection could be Hazelcast Viridian and SSL enabled based on user config. + + Args: + config: The HazelcastOnlineStoreConfig for the online store. + """ + if self._client is None: + with self._lock: + if self._client is None: + if config.discovery_token != "": + HazelcastCloudDiscovery._CLOUD_URL_BASE = ( + "api.viridian.hazelcast.com" + ) + self._client = HazelcastClient( + cluster_name=config.cluster_name, + cloud_discovery_token=config.discovery_token, + statistics_enabled=True, + ssl_enabled=True, + ssl_cafile=config.ssl_cafile_path, + ssl_certfile=config.ssl_certfile_path, + ssl_keyfile=config.ssl_keyfile_path, + ssl_password=config.ssl_password, + ) + elif config.ssl_cafile_path != "": + self._client = HazelcastClient( + cluster_name=config.cluster_name, + statistics_enabled=True, + ssl_enabled=True, + ssl_cafile=config.ssl_cafile_path, + ssl_certfile=config.ssl_certfile_path, + ssl_keyfile=config.ssl_keyfile_path, + ssl_password=config.ssl_password, + ) + else: + self._client = HazelcastClient( + statistics_enabled=True, + cluster_members=config.cluster_members, + cluster_name=config.cluster_name, + ) + return self._client + + @log_exceptions_and_usage(online_store="hazelcast") + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ) -> None: + online_store_config = config.online_store + if not isinstance(online_store_config, HazelcastOnlineStoreConfig): + raise HazelcastInvalidConfig( + EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS + ) + + client = self._get_client(online_store_config) + fv_map = client.get_map(_map_name(config.project, table)) + + for entity_key, values, event_ts, created_ts in data: + entity_key_str = base64.b64encode( + serialize_entity_key( + entity_key, + entity_key_serialization_version=2, + ) + ).decode("utf-8") + event_ts_utc = pytz.utc.localize(event_ts, is_dst=None).timestamp() + created_ts_utc = 0.0 + if created_ts is not None: + created_ts_utc = pytz.utc.localize(created_ts, is_dst=None).timestamp() + for feature_name, value in values.items(): + feature_value = base64.b64encode(value.SerializeToString()).decode( + "utf-8" + ) + hz_combined_key = entity_key_str + feature_name + fv_map.put( + hz_combined_key, + HazelcastJsonValue( + { + D_ENTITY_KEY: entity_key_str, + D_FEATURE_NAME: feature_name, + D_FEATURE_VALUE: feature_value, + D_EVENT_TS: event_ts_utc, + D_CREATED_TS: created_ts_utc, + } + ), + online_store_config.key_ttl_seconds, + ) + if progress: + progress(1) + + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + + online_store_config = config.online_store + if not isinstance(online_store_config, HazelcastOnlineStoreConfig): + raise HazelcastInvalidConfig( + EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS + ) + + client = self._get_client(online_store_config) + entries: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + fv_map = client.get_map(_map_name(config.project, table)) + + hz_keys = [] + entity_keys_str = {} + for entity_key in entity_keys: + entity_key_str = base64.b64encode( + serialize_entity_key( + entity_key, + entity_key_serialization_version=2, + ) + ).decode("utf-8") + if requested_features: + feature_keys = [ + entity_key_str + feature for feature in requested_features + ] + else: + feature_keys = [entity_key_str + f.name for f in table.features] + hz_keys.extend(feature_keys) + entity_keys_str[entity_key_str] = feature_keys + + data = fv_map.get_all(hz_keys).result() + entities = [] + for key in hz_keys: + try: + data[key] = data[key].loads() + entities.append(data[key][D_ENTITY_KEY]) + except KeyError: + continue + + for key in entity_keys_str: + if key in entities: + entry = {} + event_ts = None + for f_key in entity_keys_str[key]: + row = data[f_key] + value = ValueProto() + value.ParseFromString(base64.b64decode(row[D_FEATURE_VALUE])) + entry[row[D_FEATURE_NAME]] = value + event_ts = datetime.fromtimestamp(row[D_EVENT_TS], tz=timezone.utc) + entries.append((event_ts, entry)) + else: + entries.append((None, None)) + return entries + + def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + online_store_config = config.online_store + if not isinstance(online_store_config, HazelcastOnlineStoreConfig): + raise HazelcastInvalidConfig( + EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS + ) + + client = self._get_client(online_store_config) + project = config.project + + for table in tables_to_keep: + client.sql.execute( + f"""CREATE OR REPLACE MAPPING {_map_name(project, table)} ( + __key VARCHAR, + {D_ENTITY_KEY} VARCHAR, + {D_FEATURE_NAME} VARCHAR, + {D_FEATURE_VALUE} VARCHAR, + {D_EVENT_TS} DECIMAL, + {D_CREATED_TS} DECIMAL + ) + TYPE IMap + OPTIONS ( + 'keyFormat' = 'varchar', + 'valueFormat' = 'json-flat' + ) + """ + ).result() + + for table in tables_to_delete: + client.sql.execute( + f"DELETE FROM {_map_name(config.project, table)}" + ).result() + client.sql.execute( + f"DROP MAPPING IF EXISTS {_map_name(config.project, table)}" + ).result() + + def teardown( + self, + config: RepoConfig, + tables: Sequence[FeatureView], + entities: Sequence[Entity], + ): + online_store_config = config.online_store + if not isinstance(online_store_config, HazelcastOnlineStoreConfig): + raise HazelcastInvalidConfig( + EXCEPTION_HAZELCAST_UNEXPECTED_CONFIGURATION_CLASS + ) + + client = self._get_client(online_store_config) + project = config.project + + for table in tables: + client.sql.execute(f"DELETE FROM {_map_name(config.project, table)}") + client.sql.execute(f"DROP MAPPING IF EXISTS {_map_name(project, table)}") + + +def _map_name(project: str, table: FeatureView) -> str: + return f"{project}_{table.name}" diff --git a/sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py b/sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py new file mode 100644 index 00000000000..5b3ea6e307b --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/hazelcast_repo_configuration.py @@ -0,0 +1,26 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.universal.online_store.hazelcast import ( + HazelcastOnlineStoreCreator, +) + +FULL_REPO_CONFIGS = [ + IntegrationTestRepoConfig(online_store_creator=HazelcastOnlineStoreCreator), +] diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 200f9d284ee..3461ae058bd 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -62,6 +62,7 @@ "cassandra": "feast.infra.online_stores.contrib.cassandra_online_store.cassandra_online_store.CassandraOnlineStore", "mysql": "feast.infra.online_stores.contrib.mysql_online_store.mysql.MySQLOnlineStore", "rockset": "feast.infra.online_stores.contrib.rockset_online_store.rockset.RocksetOnlineStore", + "hazelcast": "feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore", } OFFLINE_STORE_CLASS_FOR_TYPE = { diff --git a/sdk/python/feast/templates/hazelcast/__init__.py b/sdk/python/feast/templates/hazelcast/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/templates/hazelcast/bootstrap.py b/sdk/python/feast/templates/hazelcast/bootstrap.py new file mode 100644 index 00000000000..e5018e4fe02 --- /dev/null +++ b/sdk/python/feast/templates/hazelcast/bootstrap.py @@ -0,0 +1,176 @@ +# +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import pathlib +from datetime import datetime, timedelta + +import click + +from feast.file_utils import ( + remove_lines_from_file, + replace_str_in_file, + write_setting_or_remove, +) + + +def collect_hazelcast_online_store_settings(): + c_cluster_name = None + c_members = None + c_ca_path = None + c_cert_path = None + c_key_path = None + c_discovery_token = None + c_ttl_seconds = None + + cluster_type = click.prompt( + "Would you like to connect a [L]ocal cluster or [V]iridian cluster?", + type=click.Choice(["L", "V"]), + show_choices=False, + default="L", + ) + is_viridian = cluster_type == "V" + + if is_viridian: + c_cluster_name = click.prompt("Cluster ID: ") + c_discovery_token = click.prompt("Discovery Token: ") + c_ca_path = click.prompt("CA file path: ") + c_cert_path = click.prompt("CERT file path: ") + c_key_path = click.prompt("Key file path: ") + else: + c_cluster_name = click.prompt( + "Cluster name: ", + default="dev", + ) + c_members = click.prompt( + "Cluster members:", + default="localhost:5701", + ) + needs_ssl = click.confirm("Use TLS/SSL?", default=False) + if needs_ssl: + c_ca_path = click.prompt("CA file path: ") + c_cert_path = click.prompt("CERT file path: ") + c_key_path = click.prompt("Key file path: ") + + c_ttl_seconds = click.prompt( + "Key TTL seconds: ", + default=0, + ) + return { + "c_cluster_name": c_cluster_name, + "c_members": c_members, + "c_ca_path": c_ca_path, + "c_cert_path": c_cert_path, + "c_key_path": c_key_path, + "c_discovery_token": c_discovery_token, + "c_ttl_seconds": c_ttl_seconds, + } + + +def apply_hazelcast_store_settings(config_file, settings): + write_setting_or_remove( + config_file, + settings["c_cluster_name"], + "cluster_name", + "c_cluster_name", + ) + # + write_setting_or_remove( + config_file, + settings["c_discovery_token"], + "discovery_token", + "c_discovery_token", + ) + # + if settings["c_members"] is not None: + settings["c_members"] = "[" + settings["c_members"] + "]" + write_setting_or_remove( + config_file, + settings["c_members"], + "cluster_members", + "c_members", + ) + # + write_setting_or_remove( + config_file, + settings["c_ca_path"], + "ssl_cafile_path", + "c_ca_path", + ) + # + write_setting_or_remove( + config_file, + settings["c_cert_path"], + "ssl_certfile_path", + "c_cert_path", + ) + # + write_setting_or_remove( + config_file, + settings["c_key_path"], + "ssl_keyfile_path", + "c_key_path", + ) + if settings["c_ca_path"] is None: + remove_lines_from_file( + config_file, + "ssl_password: ${SSL_PASSWORD}", + True, + ) + # + replace_str_in_file( + config_file, + "c_ttl_seconds", + f"{settings['c_ttl_seconds']}", + ) + + +def bootstrap(): + """ + Bootstrap() will automatically be called + from the init_repo() during `feast init`. + """ + from feast.driver_test_data import create_driver_hourly_stats_df + + repo_path = pathlib.Path(__file__).parent.absolute() / "feature_repo" + config_file = repo_path / "feature_store.yaml" + + data_path = repo_path / "data" + data_path.mkdir(exist_ok=True) + + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + # + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df( + driver_entities, + start_date, + end_date, + ) + # + driver_stats_path = data_path / "driver_stats.parquet" + driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True) + + # example_repo.py + example_py_file = repo_path / "example_repo.py" + replace_str_in_file(example_py_file, "%PARQUET_PATH%", str(driver_stats_path)) + + # store config yaml, interact with user and then customize file: + settings = collect_hazelcast_online_store_settings() + apply_hazelcast_store_settings(config_file, settings) + + +if __name__ == "__main__": + bootstrap() diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/__init__.py b/sdk/python/feast/templates/hazelcast/feature_repo/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py b/sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py new file mode 100644 index 00000000000..131f1bcaa61 --- /dev/null +++ b/sdk/python/feast/templates/hazelcast/feature_repo/example_repo.py @@ -0,0 +1,139 @@ +# This is an example feature definition file + +from datetime import timedelta + +import pandas as pd + +from feast import ( + Entity, + FeatureService, + FeatureView, + Field, + FileSource, + PushSource, + RequestSource, +) +from feast.on_demand_feature_view import on_demand_feature_view +from feast.types import Float32, Float64, Int64 + +# Define an entity for the driver. You can think of an entity as a primary key used to +# fetch features. +driver = Entity(name="driver", join_keys=["driver_id"]) + +# Read data from parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path="%PARQUET_PATH%", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_stats_fv = FeatureView( + # The unique name of this feature view. Two feature views in a single + # project cannot have the same name + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=1), + # The list of features defined below act as a schema to both define features + # for both materialization of features into a store, and are used as references + # during retrieval for building a training dataset or serving features + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + # Tags are user defined key/value pairs that are attached to each + # feature view + tags={"team": "driver_performance"}, +) + +# Define a request data source which encodes features / information only +# available at request time (e.g. part of the user initiated HTTP request) +input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fv, input_request], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +# This groups features into a model version +driver_activity_v1 = FeatureService( + name="driver_activity_v1", + features=[ + driver_stats_fv[["conv_rate"]], # Sub-selects a feature from a feature view + transformed_conv_rate, # Selects all features from the feature view + ], +) +driver_activity_v2 = FeatureService( + name="driver_activity_v2", features=[driver_stats_fv, transformed_conv_rate] +) + +# Defines a way to push data (to be available offline, online or both) into Feast. +driver_stats_push_source = PushSource( + name="driver_stats_push_source", + batch_source=driver_stats_source, +) + +# Defines a slightly modified version of the feature view from above, where the source +# has been changed to the push source. This allows fresh features to be directly pushed +# to the online store for this feature view. +driver_stats_fresh_fv = FeatureView( + name="driver_hourly_stats_fresh", + entities=[driver], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_push_source, # Changed from above + tags={"team": "driver_performance"}, +) + + +# Define an on demand feature view which can generate new features based on +# existing feature views and RequestSource features +@on_demand_feature_view( + sources=[driver_stats_fresh_fv, input_request], # relies on fresh version of FV + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], +) +def transformed_conv_rate_fresh(inputs: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] + df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] + return df + + +driver_activity_v3 = FeatureService( + name="driver_activity_v3", + features=[driver_stats_fresh_fv, transformed_conv_rate_fresh], +) diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml b/sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml new file mode 100644 index 00000000000..e26d1bf7506 --- /dev/null +++ b/sdk/python/feast/templates/hazelcast/feature_repo/feature_store.yaml @@ -0,0 +1,14 @@ +project: my_project +registry: data/registry.db +provider: local +online_store: + type: hazelcast + cluster_name: c_cluster_name + cluster_members: c_members + discovery_token: c_discovery_token + ssl_cafile_path: c_ca_path + ssl_certfile_path: c_cert_path + ssl_keyfile_path: c_key_path + ssl_password: ${SSL_PASSWORD} # This value will be read form the `SSL_PASSWORD` environment variable. + key_ttl_seconds: c_ttl_seconds +entity_key_serialization_version: 2 diff --git a/sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py b/sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py new file mode 100644 index 00000000000..eebeb113115 --- /dev/null +++ b/sdk/python/feast/templates/hazelcast/feature_repo/test_workflow.py @@ -0,0 +1,130 @@ +import subprocess +from datetime import datetime + +import pandas as pd + +from feast import FeatureStore +from feast.data_source import PushMode + + +def run_demo(): + store = FeatureStore(repo_path=".") + print("\n--- Run feast apply ---") + subprocess.run(["feast", "apply"]) + + print("\n--- Historical features for training ---") + fetch_historical_features_entity_df(store, for_batch_scoring=False) + + print("\n--- Historical features for batch scoring ---") + fetch_historical_features_entity_df(store, for_batch_scoring=True) + + print("\n--- Load features into online store ---") + store.materialize_incremental(end_date=datetime.now()) + + print("\n--- Online features ---") + fetch_online_features(store) + + print("\n--- Online features retrieved (instead) through a feature service---") + fetch_online_features(store, source="feature_service") + + print( + "\n--- Online features retrieved (using feature service v3, which uses a feature view with a push source---" + ) + fetch_online_features(store, source="push") + + print("\n--- Simulate a stream event ingestion of the hourly stats df ---") + event_df = pd.DataFrame.from_dict( + { + "driver_id": [1001], + "event_timestamp": [ + datetime.now(), + ], + "created": [ + datetime.now(), + ], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + } + ) + print(event_df) + store.push("driver_stats_push_source", event_df, to=PushMode.ONLINE_AND_OFFLINE) + + print("\n--- Online features again with updated values from a stream push---") + fetch_online_features(store, source="push") + + print("\n--- Run feast teardown ---") + subprocess.run(["feast", "teardown"]) + + +def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): + # Note: see https://docs.feast.dev/getting-started/concepts/feature-retrieval for more details on how to retrieve + # for all entities in the offline store instead + entity_df = pd.DataFrame.from_dict( + { + # entity's join key -> entity values + "driver_id": [1001, 1002, 1003], + # "event_timestamp" (reserved key) -> timestamps + "event_timestamp": [ + datetime(2021, 4, 12, 10, 59, 42), + datetime(2021, 4, 12, 8, 12, 10), + datetime(2021, 4, 12, 16, 40, 26), + ], + # (optional) label name -> label values. Feast does not process these + "label_driver_reported_satisfaction": [1, 5, 3], + # values we're using for an on-demand transformation + "val_to_add": [1, 2, 3], + "val_to_add_2": [10, 20, 30], + } + ) + # For batch scoring, we want the latest timestamps + if for_batch_scoring: + entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) + + training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ], + ).to_df() + print(training_df.head()) + + +def fetch_online_features(store, source: str = ""): + entity_rows = [ + # {join_key: entity_value} + { + "driver_id": 1001, + "val_to_add": 1000, + "val_to_add_2": 2000, + }, + { + "driver_id": 1002, + "val_to_add": 1001, + "val_to_add_2": 2002, + }, + ] + if source == "feature_service": + features_to_fetch = store.get_feature_service("driver_activity_v1") + elif source == "push": + features_to_fetch = store.get_feature_service("driver_activity_v3") + else: + features_to_fetch = [ + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + "transformed_conv_rate:conv_rate_plus_val2", + ] + returned_features = store.get_online_features( + features=features_to_fetch, + entity_rows=entity_rows, + ).to_dict() + for key, value in sorted(returned_features.items()): + print(key, " : ", value) + + +if __name__ == "__main__": + run_demo() diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index ae99962f67b..07cd01ab703 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -336,6 +336,8 @@ h11==0.14.0 # uvicorn happybase==1.2.0 # via feast (setup.py) +hazelcast-python-client==5.1 + # via feast (setup.py) hiredis==2.2.2 # via feast (setup.py) httpcore==0.16.3 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index b1476660b68..a14e731d73f 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -340,6 +340,8 @@ h11==0.14.0 # uvicorn happybase==1.2.0 # via feast (setup.py) +hazelcast-python-client==5.1 + # via feast (setup.py) hiredis==2.2.2 # via feast (setup.py) httpcore==0.16.3 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 9b8dbf9353d..77308d5ec89 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -336,6 +336,8 @@ h11==0.14.0 # uvicorn happybase==1.2.0 # via feast (setup.py) +hazelcast-python-client==5.1 + # via feast (setup.py) hiredis==2.2.2 # via feast (setup.py) httpcore==0.16.3 diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py b/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py new file mode 100644 index 00000000000..65d74135ae9 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py @@ -0,0 +1,48 @@ +import logging +import random +import string +from typing import Any, Dict + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class HazelcastOnlineStoreCreator(OnlineStoreCreator): + + cluster_name: str = "" + container: DockerContainer = None + + def __init__(self, project_name: str, **kwargs): + logging.getLogger("hazelcast").setLevel(logging.ERROR) + super().__init__(project_name) + self.cluster_name = "".join( + random.choice(string.ascii_lowercase) for _ in range(5) + ) + self.container = ( + DockerContainer("hazelcast/hazelcast") + .with_env("HZ_CLUSTERNAME", self.cluster_name) + .with_env("HZ_NETWORK_PORT_AUTOINCREMENT", "true") + .with_exposed_ports(5701) + ) + + def create_online_store(self) -> Dict[str, Any]: + self.container.start() + cluster_member = ( + self.container.get_container_host_ip() + + ":" + + self.container.get_exposed_port(5701) + ) + log_string_to_wait_for = r"Cluster name: " + self.cluster_name + wait_for_logs(self.container, predicate=log_string_to_wait_for, timeout=10) + return { + "type": "hazelcast", + "cluster_name": self.cluster_name, + "cluster_members": [cluster_member], + } + + def teardown(self): + self.container.stop() diff --git a/setup.py b/setup.py index 09a02479cc9..3f361f82c36 100644 --- a/setup.py +++ b/setup.py @@ -137,6 +137,10 @@ "rockset>=1.0.3", ] +HAZELCAST_REQUIRED = [ + "hazelcast-python-client>=5.1", +] + CI_REQUIRED = ( [ "build", @@ -195,6 +199,7 @@ + CASSANDRA_REQUIRED + AZURE_REQUIRED + ROCKSET_REQUIRED + + HAZELCAST_REQUIRED ) @@ -361,6 +366,7 @@ def run(self): "hbase": HBASE_REQUIRED, "docs": DOCS_REQUIRED, "cassandra": CASSANDRA_REQUIRED, + "hazelcast": HAZELCAST_REQUIRED, }, include_package_data=True, license="Apache", From 3cefd6cf806997be4ea8427bcf4aa9852d6ce038 Mon Sep 17 00:00:00 2001 From: Kevin Loftis Date: Sun, 26 Mar 2023 09:32:46 -0700 Subject: [PATCH 05/31] fix: Add StreamFeatureViewSpec to FeastObjectSpecProto convenience type (#3550) * add streamfeatureview to convenience type Signed-off-by: loftiskg * lint Signed-off-by: loftiskg --------- Signed-off-by: loftiskg --- sdk/python/feast/feast_object.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sdk/python/feast/feast_object.py b/sdk/python/feast/feast_object.py index 38109f5d8cf..7cccf26455f 100644 --- a/sdk/python/feast/feast_object.py +++ b/sdk/python/feast/feast_object.py @@ -12,6 +12,7 @@ from .protos.feast.core.FeatureView_pb2 import FeatureViewSpec from .protos.feast.core.OnDemandFeatureView_pb2 import OnDemandFeatureViewSpec from .protos.feast.core.RequestFeatureView_pb2 import RequestFeatureViewSpec +from .protos.feast.core.StreamFeatureView_pb2 import StreamFeatureViewSpec from .protos.feast.core.ValidationProfile_pb2 import ( ValidationReference as ValidationReferenceProto, ) @@ -36,6 +37,7 @@ FeatureViewSpec, OnDemandFeatureViewSpec, RequestFeatureViewSpec, + StreamFeatureViewSpec, EntitySpecV2, FeatureServiceSpec, DataSourceProto, From bf86bd0b3e197a1591d20b5596f37f30febb5815 Mon Sep 17 00:00:00 2001 From: Madhavan Date: Sun, 26 Mar 2023 12:44:40 -0400 Subject: [PATCH 06/31] docs: Trivial updates to documentation for better clarity (#3533) * Trivial URL addition Signed-off-by: Madhavan Sridharan * Trivial URL addition Signed-off-by: Madhavan Sridharan --------- Signed-off-by: Madhavan Sridharan --- docs/reference/online-stores/cassandra.md | 2 +- docs/reference/online-stores/overview.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/online-stores/cassandra.md b/docs/reference/online-stores/cassandra.md index 30514305b6a..61659ba7a2d 100644 --- a/docs/reference/online-stores/cassandra.md +++ b/docs/reference/online-stores/cassandra.md @@ -2,7 +2,7 @@ ## Description -The [Cassandra / Astra DB] online store provides support for materializing feature values into an Apache Cassandra / Astra DB database for online features. +The [[Cassandra](https://cassandra.apache.org/_/index.html) / [Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=feast)] online store provides support for materializing feature values into an Apache Cassandra / Astra DB database for online features. * The whole project is contained within a Cassandra keyspace * Each feature view is mapped one-to-one to a specific Cassandra table diff --git a/docs/reference/online-stores/overview.md b/docs/reference/online-stores/overview.md index 981a1aeeed0..7a51a9a4687 100644 --- a/docs/reference/online-stores/overview.md +++ b/docs/reference/online-stores/overview.md @@ -34,7 +34,7 @@ Details for each specific online store, such as how to configure it in a `featur Below is a matrix indicating which online stores support what functionality. -| | Sqlite | Redis | DynamoDB | Snowflake | Datastore | Postgres | Hbase | Cassandra | +| | Sqlite | Redis | DynamoDB | Snowflake | Datastore | Postgres | Hbase | [[Cassandra](https://cassandra.apache.org/_/index.html) / [Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=feast)] | | :-------------------------------------------------------- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | | write feature values to the online store | yes | yes | yes | yes | yes | yes | yes | yes | | read feature values from the online store | yes | yes | yes | yes | yes | yes | yes | yes | From 89247953b08bf179ee8ebc2470f080b9e9a8f3f8 Mon Sep 17 00:00:00 2001 From: Dan Lin Date: Tue, 28 Mar 2023 14:14:36 -0700 Subject: [PATCH 07/31] docs: Rockset Docs typos (#3566) fix: Rockset Docs typos Fix up malformed feature_store.yaml example. Signed-off-by: Daniel Lin --- docs/reference/online-stores/rockset.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/online-stores/rockset.md b/docs/reference/online-stores/rockset.md index 523bf9a9a12..082bddf37b9 100644 --- a/docs/reference/online-stores/rockset.md +++ b/docs/reference/online-stores/rockset.md @@ -31,14 +31,14 @@ Data Model Used Per Doc project: my_feature_app registry: data/registry.db provider: local -online_stores +online_store: ## Basic Configs ## # If apikey or host is left blank the driver will try to pull # these values from environment variables ROCKSET_APIKEY and # ROCKSET_APISERVER respectively. type: rockset - apikey: + api_key: host: ## Advanced Configs ## From 8dc64f7b67656024a83295551a96dc19b3d70bca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 14:20:34 -0700 Subject: [PATCH 08/31] chore: Bump redis from 4.2.2 to 4.5.3 in /sdk/python/requirements (#3570) Bumps [redis](https://github.com/redis/redis-py) from 4.2.2 to 4.5.3. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.2.2...v4.5.3) --- updated-dependencies: - dependency-name: redis dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 07cd01ab703..67a9de4aa24 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -782,7 +782,7 @@ pyzmq==25.0.0 # jupyter-server # nbclassic # notebook -redis==4.2.2 +redis==4.5.3 # via feast (setup.py) regex==2022.10.31 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index a14e731d73f..842359f89ea 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -795,7 +795,7 @@ pyzmq==25.0.0 # jupyter-server # nbclassic # notebook -redis==4.2.2 +redis==4.5.3 # via feast (setup.py) regex==2022.10.31 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 77308d5ec89..4966a59d7ed 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -786,7 +786,7 @@ pyzmq==25.0.0 # jupyter-server # nbclassic # notebook -redis==4.2.2 +redis==4.5.3 # via feast (setup.py) regex==2022.10.31 # via feast (setup.py) From 8b90e2ff044143518870712996f820de10ac5e16 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Apr 2023 21:02:26 -0700 Subject: [PATCH 09/31] chore: Bump redis from 4.5.3 to 4.5.4 in /sdk/python/requirements (#3579) Bumps [redis](https://github.com/redis/redis-py) from 4.5.3 to 4.5.4. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.5.3...v4.5.4) --- updated-dependencies: - dependency-name: redis dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- sdk/python/requirements/py3.10-ci-requirements.txt | 2 +- sdk/python/requirements/py3.8-ci-requirements.txt | 2 +- sdk/python/requirements/py3.9-ci-requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 67a9de4aa24..0f75a6b2b27 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -782,7 +782,7 @@ pyzmq==25.0.0 # jupyter-server # nbclassic # notebook -redis==4.5.3 +redis==4.5.4 # via feast (setup.py) regex==2022.10.31 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 842359f89ea..8d0db8dc4b4 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -795,7 +795,7 @@ pyzmq==25.0.0 # jupyter-server # nbclassic # notebook -redis==4.5.3 +redis==4.5.4 # via feast (setup.py) regex==2022.10.31 # via feast (setup.py) diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index 4966a59d7ed..b9f8ea97077 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -786,7 +786,7 @@ pyzmq==25.0.0 # jupyter-server # nbclassic # notebook -redis==4.5.3 +redis==4.5.4 # via feast (setup.py) regex==2022.10.31 # via feast (setup.py) From f8d3890f9f049c4b9190456b071e0fdb29aae69e Mon Sep 17 00:00:00 2001 From: Adam Schmidt Date: Tue, 4 Apr 2023 01:58:46 +1000 Subject: [PATCH 10/31] fix: Snowflake remote storage (#3574) * fix: Snowflake remote storage Signed-off-by: adamschmidt * fix: lint Signed-off-by: adamschmidt * fix: field string build Signed-off-by: adamschmidt * fix: join typo Signed-off-by: adamschmidt * fix: formatting Signed-off-by: adamschmidt --------- Signed-off-by: adamschmidt --- .../feast/infra/offline_stores/snowflake.py | 30 +++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 404927146a7..1dc18256fa4 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -109,6 +109,9 @@ class SnowflakeOfflineStoreConfig(FeastConfigBaseModel): blob_export_location: Optional[str] = None """ Location (in S3, Google storage or Azure storage) where data is offloaded """ + convert_timestamp_columns: Optional[bool] = None + """ Convert timestamp columns on export to a Parquet-supported format """ + class Config: allow_population_by_field_name = True @@ -152,6 +155,29 @@ def pull_latest_from_table_or_query( + '"' ) + if config.offline_store.convert_timestamp_columns: + select_fields = list( + map( + lambda field_name: f'"{field_name}"', + join_key_columns + feature_name_columns, + ) + ) + select_timestamps = list( + map( + lambda field_name: f"to_varchar({field_name}, 'YYYY-MM-DD\"T\"HH24:MI:SS.FFTZH:TZM') as {field_name}", + timestamp_columns, + ) + ) + inner_field_string = ", ".join(select_fields + select_timestamps) + else: + select_fields = list( + map( + lambda field_name: f'"{field_name}"', + join_key_columns + feature_name_columns + timestamp_columns, + ) + ) + inner_field_string = ", ".join(select_fields) + if data_source.snowflake_options.warehouse: config.offline_store.warehouse = data_source.snowflake_options.warehouse @@ -166,7 +192,7 @@ def pull_latest_from_table_or_query( {field_string} {f''', TRIM({repr(DUMMY_ENTITY_VAL)}::VARIANT,'"') AS "{DUMMY_ENTITY_ID}"''' if not join_key_columns else ""} FROM ( - SELECT {field_string}, + SELECT {inner_field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS "_feast_row" FROM {from_expression} WHERE "{timestamp_field}" BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' @@ -533,7 +559,7 @@ def to_remote_storage(self) -> List[str]: self.to_snowflake(table) query = f""" - COPY INTO '{self.config.offline_store.blob_export_location}/{table}' FROM "{self.config.offline_store.database}"."{self.config.offline_store.schema_}"."{table}"\n + COPY INTO '{self.export_path}/{table}' FROM "{self.config.offline_store.database}"."{self.config.offline_store.schema_}"."{table}"\n STORAGE_INTEGRATION = {self.config.offline_store.storage_integration_name}\n FILE_FORMAT = (TYPE = PARQUET) DETAILED_OUTPUT = TRUE From 09f0e7e1011fc451b3bfb94c4b7764007fc69836 Mon Sep 17 00:00:00 2001 From: Jiwon Park Date: Tue, 4 Apr 2023 02:34:58 +0900 Subject: [PATCH 11/31] fix: Wrap the bigquery table name with backtick. (#3577) fix: Add escape the table name Signed-off-by: Jiwon Park --- sdk/python/feast/infra/offline_stores/bigquery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 7871cea02c8..973eddc7fb7 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -503,7 +503,7 @@ def to_bigquery( temp_dest_table = f"{tmp_dest['projectId']}.{tmp_dest['datasetId']}.{tmp_dest['tableId']}" # persist temp table - sql = f"CREATE TABLE {dest} AS SELECT * FROM {temp_dest_table}" + sql = f"CREATE TABLE `{dest}` AS SELECT * FROM {temp_dest_table}" self._execute_query(sql, timeout=timeout) print(f"Done writing to '{dest}'.") @@ -663,7 +663,7 @@ def _upload_entity_df( job: Union[bigquery.job.query.QueryJob, bigquery.job.load.LoadJob] if isinstance(entity_df, str): - job = client.query(f"CREATE TABLE {table_name} AS ({entity_df})") + job = client.query(f"CREATE TABLE `{table_name}` AS ({entity_df})") elif isinstance(entity_df, pd.DataFrame): # Drop the index so that we don't have unnecessary columns From 2c04ec175f9155c906f90502bffe1bd5a5619ddb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Diego=20Fern=C3=A1ndez=20Gil?= <33923877+diefergil@users.noreply.github.com> Date: Mon, 3 Apr 2023 23:45:13 +0200 Subject: [PATCH 12/31] docs: Fix the documentation to run the tutorial Validating historical features with Great Expectations (#3540) Fix the documentation to run the tutorial Signed-off-by: Diego <33923877+diefergil@users.noreply.github.com> --- .../validating-historical-features.md | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/tutorials/validating-historical-features.md b/docs/tutorials/validating-historical-features.md index 70be38eced2..03baccfbc9e 100644 --- a/docs/tutorials/validating-historical-features.md +++ b/docs/tutorials/validating-historical-features.md @@ -136,8 +136,8 @@ taxi_entity = Entity(name='taxi', join_keys=['taxi_id']) ```python trips_stats_fv = BatchFeatureView( name='trip_stats', - entities=['taxi'], - features=[ + entities=[taxi_entity], + schema=[ Field(name="total_miles_travelled", dtype=Float64), Field(name="total_trip_seconds", dtype=Float64), Field(name="total_earned", dtype=Float64), @@ -154,17 +154,17 @@ trips_stats_fv = BatchFeatureView( ```python @on_demand_feature_view( - schema=[ - Field("avg_fare", Float64), - Field("avg_speed", Float64), - Field("avg_trip_seconds", Float64), - Field("earned_per_hour", Float64), - ], sources=[ trips_stats_fv, + ], + schema=[ + Field(name="avg_fare", dtype=Float64), + Field(name="avg_speed", dtype=Float64), + Field(name="avg_trip_seconds", dtype=Float64), + Field(name="earned_per_hour", dtype=Float64), ] ) -def on_demand_stats(inp): +def on_demand_stats(inp: pd.DataFrame) -> pd.DataFrame: out = pd.DataFrame() out["avg_fare"] = inp["total_earned"] / inp["trip_count"] out["avg_speed"] = 3600 * inp["total_miles_travelled"] / inp["total_trip_seconds"] @@ -647,7 +647,7 @@ Now we can create validation reference from dataset and profiler function: ```python -validation_reference = ds.as_reference(profiler=stats_profiler) +validation_reference = ds.as_reference(name="validation_reference_dataset", profiler=stats_profiler) ``` and test it against our existing retrieval job From 6c09c39b64e31dc6e84be566524d6126683f3013 Mon Sep 17 00:00:00 2001 From: Miles Adkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Tue, 11 Apr 2023 16:18:30 -0700 Subject: [PATCH 13/31] fix: Fix Snowflake template (#3584) Signed-off-by: Miles Adkins --- sdk/python/feast/templates/snowflake/test_workflow.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/templates/snowflake/test_workflow.py b/sdk/python/feast/templates/snowflake/test_workflow.py index b121f229802..3c443428811 100644 --- a/sdk/python/feast/templates/snowflake/test_workflow.py +++ b/sdk/python/feast/templates/snowflake/test_workflow.py @@ -11,11 +11,12 @@ def run_demo(): - store = FeatureStore(repo_path="./feature_repo") print("\n--- Run feast apply to setup feature store on Snowflake ---") command = "cd feature_repo; feast apply" subprocess.run(command, shell=True) + store = FeatureStore(repo_path="./feature_repo") + print("\n--- Historical features for training ---") fetch_historical_features_entity_df(store, for_batch_scoring=False) From 6dc1368afb66a4231b7513939a7cbf204ab4d46f Mon Sep 17 00:00:00 2001 From: davidschuler-8451 <83785852+davidschuler-8451@users.noreply.github.com> Date: Fri, 21 Apr 2023 11:19:59 -0400 Subject: [PATCH 14/31] fix: Fix bug with no SqlRegistryConfig class (#3586) * adding SqlRegistryConfig class Signed-off-by: davidschuler-8451 * refactor: move SqlRegistryConfig class to sql.py Signed-off-by: davidschuler-8451 * enabling SqlRegistry to accept RegistryConfig or SqlRegistryConfig Signed-off-by: davidschuler-8451 --------- Signed-off-by: davidschuler-8451 --- sdk/python/feast/infra/registry/sql.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 628b6d1e656..14a85e9ad9e 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -5,6 +5,7 @@ from threading import Lock from typing import Any, Callable, List, Optional, Set, Union +from pydantic import StrictStr from sqlalchemy import ( # type: ignore BigInteger, Column, @@ -178,10 +179,19 @@ class FeastMetadataKeys(Enum): ) +class SqlRegistryConfig(RegistryConfig): + registry_type: StrictStr = "sql" + """ str: Provider name or a class name that implements Registry.""" + + path: StrictStr = "" + """ str: Path to metadata store. + If registry_type is 'sql', then this is a database URL as expected by SQLAlchemy """ + + class SqlRegistry(BaseRegistry): def __init__( self, - registry_config: Optional[RegistryConfig], + registry_config: Optional[Union[RegistryConfig, SqlRegistryConfig]], project: str, repo_path: Optional[Path], ): From 67acc0153da7f2e780b23d43b190d41808e0dc53 Mon Sep 17 00:00:00 2001 From: Dan Lin Date: Fri, 21 Apr 2023 08:20:53 -0700 Subject: [PATCH 15/31] ci: Add 'rockset' as feast pip install extra (#3578) Signed-off-by: Daniel Lin --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 3f361f82c36..a9155c8189a 100644 --- a/setup.py +++ b/setup.py @@ -367,6 +367,7 @@ def run(self): "docs": DOCS_REQUIRED, "cassandra": CASSANDRA_REQUIRED, "hazelcast": HAZELCAST_REQUIRED, + "rockset": ROCKSET_REQUIRED, }, include_package_data=True, license="Apache", From 6794338d0c9405a5a9ba7ef2b47de98cd905474e Mon Sep 17 00:00:00 2001 From: Adam Schmidt Date: Sat, 22 Apr 2023 01:23:13 +1000 Subject: [PATCH 16/31] fix: Bytewax materializer security context (#3573) * fix: Bytewax materializer security context Signed-off-by: adamschmidt * fix: lint Signed-off-by: adamschmidt * chore: docs Signed-off-by: adamschmidt --------- Signed-off-by: adamschmidt --- .../batch-materialization/bytewax.md | 6 ++-- .../bytewax/bytewax_materialization_engine.py | 29 ++++++++++++++----- 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/docs/reference/batch-materialization/bytewax.md b/docs/reference/batch-materialization/bytewax.md index 2e28937f50e..6a97bd391db 100644 --- a/docs/reference/batch-materialization/bytewax.md +++ b/docs/reference/batch-materialization/bytewax.md @@ -58,6 +58,7 @@ batch_engine: image_pull_secrets: - my_container_secret service_account_name: my-k8s-service-account + include_security_context_capabilities: false annotations: # example annotation you might include if running on AWS EKS iam.amazonaws.com/role: arn:aws:iam:::role/MyBytewaxPlatformRole @@ -73,8 +74,9 @@ batch_engine: **Notes:** * The `namespace` configuration directive specifies which Kubernetes [namespace](https://kubernetes.io/docs/concepts/overview/working-with-objects/namespaces/) jobs, services and configuration maps will be created in. -* The `image_pull_secrets` configuration directive specifies the pre-configured secret to use when pulling the image container from your registry -* The `service_account_name` specifies which Kubernetes service account to run the job under +* The `image_pull_secrets` configuration directive specifies the pre-configured secret to use when pulling the image container from your registry. +* The `service_account_name` specifies which Kubernetes service account to run the job under. +* The `include_security_context_capabilities` flag indicates whether or not `"add": ["NET_BIND_SERVICE"]` and `"drop": ["ALL"]` are included in the job & pod security context capabilities. * `annotations` allows you to include additional Kubernetes annotations to the job. This is particularly useful for IAM roles which grant the running pod access to cloud platform resources (for example). * The `resources` configuration directive sets the standard Kubernetes [resource requests](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/) for the job containers to utilise when materializing data. diff --git a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py index 991eafa641c..b222128bbbe 100644 --- a/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py +++ b/sdk/python/feast/infra/materialization/contrib/bytewax/bytewax_materialization_engine.py @@ -58,6 +58,9 @@ class BytewaxMaterializationEngineConfig(FeastConfigBaseModel): annotations: dict = {} """ (optional) Annotations to apply to the job container. Useful for linking the service account to IAM roles, operational metadata, etc """ + include_security_context_capabilities: bool = True + """ (optional) Include security context capabilities in the init and job container spec """ + class BytewaxMaterializationEngine(BatchMaterializationEngine): def __init__( @@ -198,6 +201,9 @@ def _create_configuration_map(self, job_id, paths, feature_view, namespace): "apiVersion": "v1", "metadata": { "name": f"feast-{job_id}", + "labels": { + "feast-bytewax-materializer": "configmap", + }, }, "data": { "feature_store.yaml": feature_store_configuration, @@ -247,12 +253,22 @@ def _create_job_definition(self, job_id, namespace, pods, env): # Add any Feast configured environment variables job_env.extend(env) + securityContextCapabilities = None + if self.batch_engine_config.include_security_context_capabilities: + securityContextCapabilities = { + "add": ["NET_BIND_SERVICE"], + "drop": ["ALL"], + } + job_definition = { "apiVersion": "batch/v1", "kind": "Job", "metadata": { "name": f"dataflow-{job_id}", "namespace": namespace, + "labels": { + "feast-bytewax-materializer": "job", + }, }, "spec": { "ttlSecondsAfterFinished": 3600, @@ -262,6 +278,9 @@ def _create_job_definition(self, job_id, namespace, pods, env): "template": { "metadata": { "annotations": self.batch_engine_config.annotations, + "labels": { + "feast-bytewax-materializer": "pod", + }, }, "spec": { "restartPolicy": "Never", @@ -282,10 +301,7 @@ def _create_job_definition(self, job_id, namespace, pods, env): "resources": {}, "securityContext": { "allowPrivilegeEscalation": False, - "capabilities": { - "add": ["NET_BIND_SERVICE"], - "drop": ["ALL"], - }, + "capabilities": securityContextCapabilities, "readOnlyRootFilesystem": True, }, "terminationMessagePath": "/dev/termination-log", @@ -320,10 +336,7 @@ def _create_job_definition(self, job_id, namespace, pods, env): "resources": self.batch_engine_config.resources, "securityContext": { "allowPrivilegeEscalation": False, - "capabilities": { - "add": ["NET_BIND_SERVICE"], - "drop": ["ALL"], - }, + "capabilities": securityContextCapabilities, "readOnlyRootFilesystem": False, }, "terminationMessagePath": "/dev/termination-log", From 7854f637160d4d1f4758b83e6c396fe49447e7b7 Mon Sep 17 00:00:00 2001 From: Kevin Loftis Date: Fri, 21 Apr 2023 08:23:46 -0700 Subject: [PATCH 17/31] fix: Add Stream Feature Views to helper that collect Feature View names (#3582) add sfv to helper that collects fvs Signed-off-by: Kevin Loftis --- sdk/python/feast/infra/registry/registry.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/registry/registry.py b/sdk/python/feast/infra/registry/registry.py index d2cf6a54ec0..6f571a13b0c 100644 --- a/sdk/python/feast/infra/registry/registry.py +++ b/sdk/python/feast/infra/registry/registry.py @@ -884,4 +884,7 @@ def _existing_feature_view_names_to_fvs(self) -> Dict[str, Message]: request_fvs = { fv.spec.name: fv for fv in self.cached_registry_proto.request_feature_views } - return {**odfvs, **fvs, **request_fvs} + sfv = { + fv.spec.name: fv for fv in self.cached_registry_proto.stream_feature_views + } + return {**odfvs, **fvs, **request_fvs, **sfv} From e2b03fd93d87701dd4467df7376bd1203ac4c904 Mon Sep 17 00:00:00 2001 From: Kevin Loftis Date: Fri, 21 Apr 2023 08:27:48 -0700 Subject: [PATCH 18/31] docs: Fix proto docstring typo (#3461) fix type Signed-off-by: Kevin Loftis Signed-off-by: Danny Chiao --- protos/feast/serving/ServingService.proto | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/protos/feast/serving/ServingService.proto b/protos/feast/serving/ServingService.proto index a940b725025..0eef3cd883c 100644 --- a/protos/feast/serving/ServingService.proto +++ b/protos/feast/serving/ServingService.proto @@ -119,7 +119,7 @@ enum FieldStatus { PRESENT = 1; // Values could be found for entity key and age is within max age, but - // this field value is assigned a value on ingestion into feast. + // this field value is not assigned a value on ingestion into feast. NULL_VALUE = 2; // Entity key did not return any values as they do not exist in Feast. From ffd50fd04b93f631091f433bb2c6f86c37534e21 Mon Sep 17 00:00:00 2001 From: Miles Adkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 21 Apr 2023 09:35:22 -0700 Subject: [PATCH 19/31] docs: Add Snowflake registry bolierplate (#3583) Signed-off-by: Miles Adkins --- docs/reference/offline-stores/snowflake.md | 7 ++--- docs/reference/registry/snowflake.md | 30 ++++++++++++++++++++++ 2 files changed, 34 insertions(+), 3 deletions(-) create mode 100644 docs/reference/registry/snowflake.md diff --git a/docs/reference/offline-stores/snowflake.md b/docs/reference/offline-stores/snowflake.md index 4ac7f164363..77a144c5c47 100644 --- a/docs/reference/offline-stores/snowflake.md +++ b/docs/reference/offline-stores/snowflake.md @@ -7,7 +7,7 @@ The [Snowflake](https://trial.snowflake.com) offline store provides support for * Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. A Pandas dataframes will be uploaded to Snowflake as a temporary table in order to complete join operations. ## Getting started -In order to use this offline store, you'll need to run `pip install 'feast[snowflake]'`. +In order to use this offline store, you'll need to run `pip install 'feast[snowflake]'`. If you're using a file based registry, then you'll also need to install the relevant cloud extra (`pip install 'feast[snowflake, CLOUD]'` where `CLOUD` is one of `aws`, `gcp`, `azure`) @@ -25,9 +25,10 @@ offline_store: account: snowflake_deployment.us-east-1 user: user_login password: user_password - role: sysadmin - warehouse: demo_wh + role: SYSADMIN + warehouse: COMPUTE_WH database: FEAST + schema: PUBLIC ``` {% endcode %} diff --git a/docs/reference/registry/snowflake.md b/docs/reference/registry/snowflake.md new file mode 100644 index 00000000000..31b0db95824 --- /dev/null +++ b/docs/reference/registry/snowflake.md @@ -0,0 +1,30 @@ +# Snowflake registry + +## Description + +The [Snowflake](https://trial.snowflake.com) registry provides support for storing the protobuf representation of your feature store objects (data sources, feature views, feature services, etc.) Because Snowflake is an ACID compliant database, this allows for changes to individual objects atomically. + +An example of how to configure this would be: + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +provider: local +registry: + registry_type: snowflake.registry + account: snowflake_deployment.us-east-1 + user: user_login + password: user_password + role: SYSADMIN + warehouse: COMPUTE_WH + database: FEAST + schema: PUBLIC + cache_ttl_seconds: 60 +offline_store: + ... +``` +{% endcode %} + +The full set of configuration options is available in [SnowflakeRegistryConfig](https://rtd.feast.dev/en/latest/#feast.infra.registry.snowflake.SnowflakeRegistryConfig). From ad4814643abd28d5b2e119b8ef46ddfdce77424a Mon Sep 17 00:00:00 2001 From: Miles Adkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 21 Apr 2023 09:36:00 -0700 Subject: [PATCH 20/31] fix: Make snowflake to remote tables temporary (#3588) Signed-off-by: Miles Adkins --- sdk/python/feast/infra/offline_stores/snowflake.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 1dc18256fa4..847e0733810 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -556,7 +556,7 @@ def to_remote_storage(self) -> List[str]: ) table = f"temporary_{uuid.uuid4().hex}" - self.to_snowflake(table) + self.to_snowflake(table, temporary=True) query = f""" COPY INTO '{self.export_path}/{table}' FROM "{self.config.offline_store.database}"."{self.config.offline_store.schema_}"."{table}"\n From f9862b565b6c9019ec146871d2fb45590eb31576 Mon Sep 17 00:00:00 2001 From: Miles Adkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 21 Apr 2023 11:56:38 -0500 Subject: [PATCH 21/31] =?UTF-8?q?fix:=20Batch=20Snowflake=20materializatio?= =?UTF-8?q?n=20queries=20to=20obey=20Snowpark=20100=20fea=E2=80=A6=20(#340?= =?UTF-8?q?6)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fix: Batch Snowflake materialization queries to obey Snowpark 100 feature limit Signed-off-by: miles.adkins Signed-off-by: miles.adkins --- .../infra/materialization/snowflake_engine.py | 96 ++++++++++++------- 1 file changed, 61 insertions(+), 35 deletions(-) diff --git a/sdk/python/feast/infra/materialization/snowflake_engine.py b/sdk/python/feast/infra/materialization/snowflake_engine.py index 8a63e008911..36c42cd390c 100644 --- a/sdk/python/feast/infra/materialization/snowflake_engine.py +++ b/sdk/python/feast/infra/materialization/snowflake_engine.py @@ -276,32 +276,65 @@ def _materialize_one( fv_latest_values_sql = offline_job.to_sql() + if feature_view.entity_columns: + join_keys = [entity.name for entity in feature_view.entity_columns] + unique_entities = '"' + '", "'.join(join_keys) + '"' + + query = f""" + SELECT + COUNT(DISTINCT {unique_entities}) + FROM + {feature_view.batch_source.get_table_query_string()} + """ + + with GetSnowflakeConnection(self.repo_config.offline_store) as conn: + entities_to_write = conn.cursor().execute(query).fetchall()[0][0] + else: + entities_to_write = ( + 1 # entityless feature view has a placeholder entity + ) + if feature_view.batch_source.field_mapping is not None: fv_latest_mapped_values_sql = _run_snowflake_field_mapping( fv_latest_values_sql, feature_view.batch_source.field_mapping ) - fv_to_proto_sql = self.generate_snowflake_materialization_query( - self.repo_config, - fv_latest_mapped_values_sql, - feature_view, - project, - ) + features_full_list = feature_view.features + feature_batches = [ + features_full_list[i : i + 100] + for i in range(0, len(features_full_list), 100) + ] if self.repo_config.online_store.type == "snowflake.online": - self.materialize_to_snowflake_online_store( - self.repo_config, - fv_to_proto_sql, - feature_view, - project, - ) + rows_to_write = entities_to_write * len(features_full_list) else: - self.materialize_to_external_online_store( - self.repo_config, - fv_to_proto_sql, - feature_view, - tqdm_builder, - ) + rows_to_write = entities_to_write * len(feature_batches) + + with tqdm_builder(rows_to_write) as pbar: + for i, feature_batch in enumerate(feature_batches): + fv_to_proto_sql = self.generate_snowflake_materialization_query( + self.repo_config, + fv_latest_mapped_values_sql, + feature_view, + feature_batch, + project, + ) + + if self.repo_config.online_store.type == "snowflake.online": + self.materialize_to_snowflake_online_store( + self.repo_config, + fv_to_proto_sql, + feature_view, + project, + ) + pbar.update(entities_to_write * len(feature_batch)) + else: + self.materialize_to_external_online_store( + self.repo_config, + fv_to_proto_sql, + feature_view, + pbar, + ) return SnowflakeMaterializationJob( job_id=job_id, status=MaterializationJobStatus.SUCCEEDED @@ -316,6 +349,7 @@ def generate_snowflake_materialization_query( repo_config: RepoConfig, fv_latest_mapped_values_sql: str, feature_view: Union[BatchFeatureView, FeatureView], + feature_batch: list, project: str, ) -> str: @@ -338,7 +372,7 @@ def generate_snowflake_materialization_query( UDF serialization function. """ feature_sql_list = [] - for feature in feature_view.features: + for feature in feature_batch: feature_value_type_name = feature.dtype.to_value_type().name feature_sql = _convert_value_name_to_snowflake_udf( @@ -434,11 +468,8 @@ def materialize_to_snowflake_online_store( """ with GetSnowflakeConnection(repo_config.batch_engine) as conn: - query_id = execute_snowflake_statement(conn, query).sfqid + execute_snowflake_statement(conn, query).sfqid - click.echo( - f"Snowflake Query ID: {Style.BRIGHT + Fore.GREEN}{query_id}{Style.RESET_ALL}" - ) return None def materialize_to_external_online_store( @@ -446,7 +477,7 @@ def materialize_to_external_online_store( repo_config: RepoConfig, materialization_sql: str, feature_view: Union[StreamFeatureView, FeatureView], - tqdm_builder: Callable[[int], tqdm], + pbar: tqdm, ) -> None: feature_names = [feature.name for feature in feature_view.features] @@ -455,10 +486,6 @@ def materialize_to_external_online_store( query = materialization_sql cursor = execute_snowflake_statement(conn, query) for i, df in enumerate(cursor.fetch_pandas_batches()): - click.echo( - f"Snowflake: Processing Materialization ResultSet Batch #{i+1}" - ) - entity_keys = ( df["entity_key"].apply(EntityKeyProto.FromString).to_numpy() ) @@ -494,11 +521,10 @@ def materialize_to_external_online_store( ) ) - with tqdm_builder(len(rows_to_write)) as pbar: - self.online_store.online_write_batch( - repo_config, - feature_view, - rows_to_write, - lambda x: pbar.update(x), - ) + self.online_store.online_write_batch( + repo_config, + feature_view, + rows_to_write, + lambda x: pbar.update(x), + ) return None From 7e77382c6b75f514e18b683fef1495fa1fa87308 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Fri, 21 Apr 2023 12:57:48 -0400 Subject: [PATCH 22/31] feat: Relax aws extras requirements (#3585) * relax aws extras requirements boto3 1.20.23 was released in late 2021. There have been many releases since that downstream projects would reasonably want to use. s3fs is a particularly thorny dependency because each version of s3fs depends on a very narrow range of aiobotocore versions. s3fs (as opposed to pyarrow._s3fs) does not appear to be used directly for AWS related code. If users want to avail themself of pandas<-->s3 integration with s3fs they can still install it directly. Signed-off-by: Chris Burroughs * fixup: regen requirements Signed-off-by: Chris Burroughs --------- Signed-off-by: Chris Burroughs --- .../requirements/py3.10-ci-requirements.txt | 173 +++++++++--------- .../requirements/py3.8-ci-requirements.txt | 172 ++++++++--------- .../requirements/py3.9-ci-requirements.txt | 172 ++++++++--------- setup.py | 2 +- 4 files changed, 245 insertions(+), 274 deletions(-) diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 0f75a6b2b27..4cb250531f7 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -10,16 +10,10 @@ adal==1.2.7 # msrestazure adlfs==0.5.9 # via feast (setup.py) -aiobotocore==2.1.2 - # via s3fs aiohttp==3.8.4 # via # adlfs - # aiobotocore # gcsfs - # s3fs -aioitertools==0.11.0 - # via aiobotocore aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 @@ -83,19 +77,18 @@ babel==2.12.1 # via sphinx backcall==0.2.0 # via ipython -beautifulsoup4==4.11.2 +beautifulsoup4==4.12.1 # via nbconvert black==22.12.0 # via feast (setup.py) bleach==6.0.0 # via nbconvert -boto3==1.20.23 +boto3==1.26.106 # via # feast (setup.py) # moto -botocore==1.23.24 +botocore==1.29.106 # via - # aiobotocore # boto3 # moto # s3transfer @@ -105,13 +98,13 @@ build==0.10.0 # via # feast (setup.py) # pip-tools -bytewax==0.13.1 +bytewax==0.15.1 # via feast (setup.py) cachecontrol==0.12.11 # via firebase-admin cachetools==5.3.0 # via google-auth -cassandra-driver==3.25.0 +cassandra-driver==3.26.0 # via feast (setup.py) certifi==2022.12.7 # via @@ -152,9 +145,9 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.1.2 +comm==0.1.3 # via ipykernel -coverage[toml]==7.2.1 +coverage[toml]==7.2.2 # via pytest-cov cryptography==35.0.0 # via @@ -170,9 +163,9 @@ cryptography==35.0.0 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2023.3.0 +dask==2023.3.2 # via feast (setup.py) -db-dtypes==1.0.5 +db-dtypes==1.1.1 # via google-cloud-bigquery debugpy==1.6.6 # via ipykernel @@ -201,13 +194,13 @@ docutils==0.19 # via sphinx entrypoints==0.4 # via altair -exceptiongroup==1.1.0 +exceptiongroup==1.1.1 # via pytest execnet==1.9.0 # via pytest-xdist executing==1.2.0 # via stack-data -fastapi==0.93.0 +fastapi==0.95.0 # via feast (setup.py) fastavro==1.7.3 # via @@ -215,7 +208,7 @@ fastavro==1.7.3 # pandavro fastjsonschema==2.16.3 # via nbformat -filelock==3.9.0 +filelock==3.10.7 # via # snowflake-connector-python # virtualenv @@ -236,7 +229,6 @@ fsspec==2022.1.0 # adlfs # dask # gcsfs - # s3fs gcsfs==2022.1.0 # via feast (setup.py) geojson==2.5.0 @@ -255,9 +247,9 @@ google-api-core[grpc]==2.11.0 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.80.0 +google-api-python-client==2.84.0 # via firebase-admin -google-auth==2.16.2 +google-auth==2.17.2 # via # gcsfs # google-api-core @@ -271,9 +263,9 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==1.0.0 # via gcsfs -google-cloud-bigquery[pandas]==3.6.0 +google-cloud-bigquery[pandas]==3.9.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.19.0 +google-cloud-bigquery-storage==2.19.1 # via feast (setup.py) google-cloud-bigtable==2.17.0 # via feast (setup.py) @@ -284,11 +276,11 @@ google-cloud-core==2.3.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.15.0 +google-cloud-datastore==2.15.1 # via feast (setup.py) -google-cloud-firestore==2.10.0 +google-cloud-firestore==2.11.0 # via firebase-admin -google-cloud-storage==2.7.0 +google-cloud-storage==2.8.0 # via # feast (setup.py) # firebase-admin @@ -299,7 +291,7 @@ google-resumable-media==2.4.1 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.58.0 +googleapis-common-protos[grpc]==1.59.0 # via # feast (setup.py) # google-api-core @@ -311,7 +303,7 @@ greenlet==2.0.2 # via sqlalchemy grpc-google-iam-v1==0.12.6 # via google-cloud-bigtable -grpcio==1.51.3 +grpcio==1.53.0 # via # feast (setup.py) # google-api-core @@ -322,13 +314,13 @@ grpcio==1.51.3 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.51.3 +grpcio-reflection==1.53.0 # via feast (setup.py) -grpcio-status==1.51.3 +grpcio-status==1.53.0 # via google-api-core -grpcio-testing==1.51.3 +grpcio-testing==1.53.0 # via feast (setup.py) -grpcio-tools==1.51.3 +grpcio-tools==1.53.0 # via feast (setup.py) h11==0.14.0 # via @@ -336,13 +328,13 @@ h11==0.14.0 # uvicorn happybase==1.2.0 # via feast (setup.py) -hazelcast-python-client==5.1 +hazelcast-python-client==5.2.0 # via feast (setup.py) hiredis==2.2.2 # via feast (setup.py) httpcore==0.16.3 # via httpx -httplib2==0.21.0 +httplib2==0.22.0 # via # google-api-python-client # google-auth-httplib2 @@ -350,7 +342,7 @@ httptools==0.5.0 # via uvicorn httpx==0.23.3 # via feast (setup.py) -identify==2.5.19 +identify==2.5.22 # via pre-commit idna==3.4 # via @@ -362,16 +354,18 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==6.0.0 - # via great-expectations +importlib-metadata==6.1.0 + # via + # dask + # great-expectations iniconfig==2.0.0 # via pytest -ipykernel==6.21.3 +ipykernel==6.22.0 # via # ipywidgets # nbclassic # notebook -ipython==8.11.0 +ipython==8.12.0 # via # great-expectations # ipykernel @@ -380,7 +374,7 @@ ipython-genutils==0.2.0 # via # nbclassic # notebook -ipywidgets==8.0.4 +ipywidgets==8.0.6 # via great-expectations isodate==0.6.1 # via @@ -403,7 +397,7 @@ jinja2==3.1.2 # nbconvert # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.1 # via # boto3 # botocore @@ -420,14 +414,14 @@ jsonschema[format-nongpl]==4.17.3 # great-expectations # jupyter-events # nbformat -jupyter-client==8.0.3 +jupyter-client==8.1.0 # via # ipykernel # jupyter-server # nbclassic # nbclient # notebook -jupyter-core==5.2.0 +jupyter-core==5.3.0 # via # ipykernel # jupyter-client @@ -439,7 +433,7 @@ jupyter-core==5.2.0 # notebook jupyter-events==0.6.3 # via jupyter-server -jupyter-server==2.4.0 +jupyter-server==2.5.0 # via # nbclassic # notebook-shim @@ -447,7 +441,7 @@ jupyter-server-terminals==0.4.4 # via jupyter-server jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-widgets==3.0.5 +jupyterlab-widgets==3.0.7 # via ipywidgets kubernetes==20.13.0 # via feast (setup.py) @@ -474,13 +468,13 @@ mistune==2.0.5 # via # great-expectations # nbconvert -mmh3==3.0.0 +mmh3==3.1.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==4.1.4 +moto==4.1.6 # via feast (setup.py) msal==1.21.0 # via @@ -512,16 +506,16 @@ mypy-protobuf==3.1 # via feast (setup.py) mysqlclient==2.1.1 # via feast (setup.py) -nbclassic==0.5.3 +nbclassic==0.5.5 # via notebook -nbclient==0.7.2 +nbclient==0.7.3 # via nbconvert -nbconvert==7.2.9 +nbconvert==7.3.0 # via # jupyter-server # nbclassic # notebook -nbformat==5.7.3 +nbformat==5.8.0 # via # great-expectations # jupyter-server @@ -587,7 +581,7 @@ parso==0.8.3 # via jedi partd==1.3.0 # via dask -pathspec==0.11.0 +pathspec==0.11.1 # via black pbr==5.11.1 # via mock @@ -597,7 +591,7 @@ pickleshare==0.7.5 # via ipython pip-tools==6.12.3 # via feast (setup.py) -platformdirs==3.1.1 +platformdirs==3.2.0 # via # black # jupyter-core @@ -608,7 +602,7 @@ ply==3.11 # via thriftpy2 portalocker==2.7.0 # via msal-extensions -pre-commit==3.1.1 +pre-commit==3.2.2 # via feast (setup.py) prometheus-client==0.16.0 # via @@ -646,7 +640,7 @@ psutil==5.9.0 # via # feast (setup.py) # ipykernel -psycopg2-binary==2.9.5 +psycopg2-binary==2.9.6 # via feast (setup.py) ptyprocess==0.7.0 # via @@ -680,7 +674,7 @@ pycparser==2.21 # via cffi pycryptodomex==3.17 # via snowflake-connector-python -pydantic==1.10.6 +pydantic==1.10.7 # via # fastapi # feast (setup.py) @@ -700,7 +694,7 @@ pyjwt[crypto]==2.6.0 # snowflake-connector-python pymssql==2.2.7 # via feast (setup.py) -pymysql==1.0.2 +pymysql==1.0.3 # via feast (setup.py) pyodbc==4.0.35 # via feast (setup.py) @@ -740,7 +734,7 @@ pytest-ordering==0.6 # via feast (setup.py) pytest-timeout==1.4.2 # via feast (setup.py) -pytest-xdist==3.2.0 +pytest-xdist==3.2.1 # via feast (setup.py) python-dateutil==2.8.2 # via @@ -758,7 +752,7 @@ python-dotenv==1.0.0 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -pytz==2022.7.1 +pytz==2023.3 # via # great-expectations # pandas @@ -775,16 +769,16 @@ pyyaml==6.0 # pre-commit # responses # uvicorn -pyzmq==25.0.0 +pyzmq==25.0.2 # via # ipykernel # jupyter-client # jupyter-server # nbclassic # notebook -redis==4.5.4 +redis==4.2.2 # via feast (setup.py) -regex==2022.10.31 +regex==2023.3.23 # via feast (setup.py) requests==2.28.2 # via @@ -814,7 +808,7 @@ requests-oauthlib==1.3.1 # google-auth-oauthlib # kubernetes # msrest -responses==0.23.0 +responses==0.23.1 # via moto rfc3339-validator==0.1.4 # via @@ -832,9 +826,7 @@ rsa==4.9 # via google-auth ruamel-yaml==0.17.17 # via great-expectations -s3fs==2022.1.0 - # via feast (setup.py) -s3transfer==0.5.2 +s3transfer==0.6.0 # via boto3 scipy==1.10.1 # via great-expectations @@ -887,13 +879,13 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.46 +sqlalchemy[mypy]==1.4.47 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a32 +sqlalchemy2-stubs==0.0.2a33 # via sqlalchemy stack-data==0.6.2 # via ipython -starlette==0.25.0 +starlette==0.26.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -962,27 +954,27 @@ types-protobuf==3.19.22 # via # feast (setup.py) # mypy-protobuf -types-pymysql==1.0.19.5 +types-pymysql==1.0.19.6 # via feast (setup.py) -types-pyopenssl==23.0.0.4 +types-pyopenssl==23.1.0.1 # via types-redis -types-python-dateutil==2.8.19.10 +types-python-dateutil==2.8.19.12 # via feast (setup.py) -types-pytz==2022.7.1.2 +types-pytz==2023.3.0.0 # via feast (setup.py) -types-pyyaml==6.0.12.8 +types-pyyaml==6.0.12.9 # via # feast (setup.py) # responses -types-redis==4.5.1.4 +types-redis==4.5.4.1 # via feast (setup.py) -types-requests==2.28.11.15 +types-requests==2.28.11.17 # via feast (setup.py) -types-setuptools==67.6.0.0 +types-setuptools==67.6.0.7 # via feast (setup.py) -types-tabulate==0.9.0.1 +types-tabulate==0.9.0.2 # via feast (setup.py) -types-urllib3==1.26.25.8 +types-urllib3==1.26.25.10 # via types-requests typing-extensions==4.5.0 # via @@ -993,9 +985,9 @@ typing-extensions==4.5.0 # pydantic # snowflake-connector-python # sqlalchemy2-stubs -tzdata==2022.7 +tzdata==2023.3 # via pytz-deprecation-shim -tzlocal==4.2 +tzlocal==4.3 # via # great-expectations # trino @@ -1003,7 +995,7 @@ uri-template==1.2.0 # via jsonschema uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.14 +urllib3==1.26.15 # via # botocore # docker @@ -1015,19 +1007,19 @@ urllib3==1.26.14 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.21.0 +uvicorn[standard]==0.21.1 # via feast (setup.py) uvloop==0.17.0 # via uvicorn -virtualenv==20.20.0 +virtualenv==20.21.0 # via pre-commit volatile==2.1.0 # via bowler -watchfiles==0.18.1 +watchfiles==0.19.0 # via uvicorn wcwidth==0.2.6 # via prompt-toolkit -webcolors==1.12 +webcolors==1.13 # via jsonschema webencodings==0.5.1 # via @@ -1038,17 +1030,16 @@ websocket-client==1.5.1 # docker # jupyter-server # kubernetes -websockets==10.4 +websockets==11.0 # via uvicorn werkzeug==2.2.3 # via moto -wheel==0.38.4 +wheel==0.40.0 # via pip-tools -widgetsnbextension==4.0.5 +widgetsnbextension==4.0.7 # via ipywidgets wrapt==1.15.0 # via - # aiobotocore # deprecated # testcontainers xmltodict==0.13.0 diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 8d0db8dc4b4..0f2fe05be8e 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -10,16 +10,10 @@ adal==1.2.7 # msrestazure adlfs==0.5.9 # via feast (setup.py) -aiobotocore==2.1.2 - # via s3fs aiohttp==3.8.4 # via # adlfs - # aiobotocore # gcsfs - # s3fs -aioitertools==0.11.0 - # via aiobotocore aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 @@ -87,19 +81,18 @@ backports-zoneinfo==0.2.1 # via # pytz-deprecation-shim # tzlocal -beautifulsoup4==4.11.2 +beautifulsoup4==4.12.1 # via nbconvert black==22.12.0 # via feast (setup.py) bleach==6.0.0 # via nbconvert -boto3==1.20.23 +boto3==1.26.106 # via # feast (setup.py) # moto -botocore==1.23.24 +botocore==1.29.106 # via - # aiobotocore # boto3 # moto # s3transfer @@ -109,13 +102,13 @@ build==0.10.0 # via # feast (setup.py) # pip-tools -bytewax==0.13.1 +bytewax==0.15.1 # via feast (setup.py) cachecontrol==0.12.11 # via firebase-admin cachetools==5.3.0 # via google-auth -cassandra-driver==3.25.0 +cassandra-driver==3.26.0 # via feast (setup.py) certifi==2022.12.7 # via @@ -156,9 +149,9 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.1.2 +comm==0.1.3 # via ipykernel -coverage[toml]==7.2.1 +coverage[toml]==7.2.2 # via pytest-cov cryptography==35.0.0 # via @@ -174,9 +167,9 @@ cryptography==35.0.0 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2023.3.0 +dask==2023.3.2 # via feast (setup.py) -db-dtypes==1.0.5 +db-dtypes==1.1.1 # via google-cloud-bigquery debugpy==1.6.6 # via ipykernel @@ -205,13 +198,13 @@ docutils==0.19 # via sphinx entrypoints==0.4 # via altair -exceptiongroup==1.1.0 +exceptiongroup==1.1.1 # via pytest execnet==1.9.0 # via pytest-xdist executing==1.2.0 # via stack-data -fastapi==0.94.0 +fastapi==0.95.0 # via feast (setup.py) fastavro==1.7.3 # via @@ -219,7 +212,7 @@ fastavro==1.7.3 # pandavro fastjsonschema==2.16.3 # via nbformat -filelock==3.9.0 +filelock==3.10.7 # via # snowflake-connector-python # virtualenv @@ -240,7 +233,6 @@ fsspec==2022.1.0 # adlfs # dask # gcsfs - # s3fs gcsfs==2022.1.0 # via feast (setup.py) geojson==2.5.0 @@ -259,9 +251,9 @@ google-api-core[grpc]==2.11.0 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.80.0 +google-api-python-client==2.84.0 # via firebase-admin -google-auth==2.16.2 +google-auth==2.17.2 # via # gcsfs # google-api-core @@ -275,9 +267,9 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==1.0.0 # via gcsfs -google-cloud-bigquery[pandas]==3.6.0 +google-cloud-bigquery[pandas]==3.9.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.19.0 +google-cloud-bigquery-storage==2.19.1 # via feast (setup.py) google-cloud-bigtable==2.17.0 # via feast (setup.py) @@ -288,11 +280,11 @@ google-cloud-core==2.3.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.15.0 +google-cloud-datastore==2.15.1 # via feast (setup.py) -google-cloud-firestore==2.10.0 +google-cloud-firestore==2.11.0 # via firebase-admin -google-cloud-storage==2.7.0 +google-cloud-storage==2.8.0 # via # feast (setup.py) # firebase-admin @@ -303,7 +295,7 @@ google-resumable-media==2.4.1 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.58.0 +googleapis-common-protos[grpc]==1.59.0 # via # feast (setup.py) # google-api-core @@ -315,7 +307,7 @@ greenlet==2.0.2 # via sqlalchemy grpc-google-iam-v1==0.12.6 # via google-cloud-bigtable -grpcio==1.51.3 +grpcio==1.53.0 # via # feast (setup.py) # google-api-core @@ -326,13 +318,13 @@ grpcio==1.51.3 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.51.3 +grpcio-reflection==1.53.0 # via feast (setup.py) -grpcio-status==1.51.3 +grpcio-status==1.53.0 # via google-api-core -grpcio-testing==1.51.3 +grpcio-testing==1.53.0 # via feast (setup.py) -grpcio-tools==1.51.3 +grpcio-tools==1.53.0 # via feast (setup.py) h11==0.14.0 # via @@ -340,13 +332,13 @@ h11==0.14.0 # uvicorn happybase==1.2.0 # via feast (setup.py) -hazelcast-python-client==5.1 +hazelcast-python-client==5.2.0 # via feast (setup.py) hiredis==2.2.2 # via feast (setup.py) httpcore==0.16.3 # via httpx -httplib2==0.21.0 +httplib2==0.22.0 # via # google-api-python-client # google-auth-httplib2 @@ -354,7 +346,7 @@ httptools==0.5.0 # via uvicorn httpx==0.23.3 # via feast (setup.py) -identify==2.5.19 +identify==2.5.22 # via pre-commit idna==3.4 # via @@ -366,8 +358,9 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==6.0.0 +importlib-metadata==6.1.0 # via + # dask # great-expectations # jupyter-client # nbconvert @@ -376,12 +369,12 @@ importlib-resources==5.12.0 # via jsonschema iniconfig==2.0.0 # via pytest -ipykernel==6.21.3 +ipykernel==6.22.0 # via # ipywidgets # nbclassic # notebook -ipython==8.11.0 +ipython==8.12.0 # via # great-expectations # ipykernel @@ -390,7 +383,7 @@ ipython-genutils==0.2.0 # via # nbclassic # notebook -ipywidgets==8.0.4 +ipywidgets==8.0.6 # via great-expectations isodate==0.6.1 # via @@ -413,7 +406,7 @@ jinja2==3.1.2 # nbconvert # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.1 # via # boto3 # botocore @@ -430,14 +423,14 @@ jsonschema[format-nongpl]==4.17.3 # great-expectations # jupyter-events # nbformat -jupyter-client==8.0.3 +jupyter-client==8.1.0 # via # ipykernel # jupyter-server # nbclassic # nbclient # notebook -jupyter-core==5.2.0 +jupyter-core==5.3.0 # via # ipykernel # jupyter-client @@ -449,7 +442,7 @@ jupyter-core==5.2.0 # notebook jupyter-events==0.6.3 # via jupyter-server -jupyter-server==2.4.0 +jupyter-server==2.5.0 # via # nbclassic # notebook-shim @@ -457,7 +450,7 @@ jupyter-server-terminals==0.4.4 # via jupyter-server jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-widgets==3.0.5 +jupyterlab-widgets==3.0.7 # via ipywidgets kubernetes==20.13.0 # via feast (setup.py) @@ -484,13 +477,13 @@ mistune==2.0.5 # via # great-expectations # nbconvert -mmh3==3.0.0 +mmh3==3.1.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==4.1.4 +moto==4.1.6 # via feast (setup.py) msal==1.21.0 # via @@ -522,16 +515,16 @@ mypy-protobuf==3.1 # via feast (setup.py) mysqlclient==2.1.1 # via feast (setup.py) -nbclassic==0.5.3 +nbclassic==0.5.5 # via notebook -nbclient==0.7.2 +nbclient==0.7.3 # via nbconvert -nbconvert==7.2.9 +nbconvert==7.3.0 # via # jupyter-server # nbclassic # notebook -nbformat==5.7.3 +nbformat==5.8.0 # via # great-expectations # jupyter-server @@ -597,7 +590,7 @@ parso==0.8.3 # via jedi partd==1.3.0 # via dask -pathspec==0.11.0 +pathspec==0.11.1 # via black pbr==5.11.1 # via mock @@ -609,7 +602,7 @@ pip-tools==6.12.3 # via feast (setup.py) pkgutil-resolve-name==1.3.10 # via jsonschema -platformdirs==3.1.1 +platformdirs==3.2.0 # via # black # jupyter-core @@ -620,7 +613,7 @@ ply==3.11 # via thriftpy2 portalocker==2.7.0 # via msal-extensions -pre-commit==3.1.1 +pre-commit==3.2.2 # via feast (setup.py) prometheus-client==0.16.0 # via @@ -658,7 +651,7 @@ psutil==5.9.0 # via # feast (setup.py) # ipykernel -psycopg2-binary==2.9.5 +psycopg2-binary==2.9.6 # via feast (setup.py) ptyprocess==0.7.0 # via @@ -692,7 +685,7 @@ pycparser==2.21 # via cffi pycryptodomex==3.17 # via snowflake-connector-python -pydantic==1.10.6 +pydantic==1.10.7 # via # fastapi # feast (setup.py) @@ -712,7 +705,7 @@ pyjwt[crypto]==2.6.0 # snowflake-connector-python pymssql==2.2.7 # via feast (setup.py) -pymysql==1.0.2 +pymysql==1.0.3 # via feast (setup.py) pyodbc==4.0.35 # via feast (setup.py) @@ -752,7 +745,7 @@ pytest-ordering==0.6 # via feast (setup.py) pytest-timeout==1.4.2 # via feast (setup.py) -pytest-xdist==3.2.0 +pytest-xdist==3.2.1 # via feast (setup.py) python-dateutil==2.8.2 # via @@ -770,7 +763,7 @@ python-dotenv==1.0.0 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -pytz==2022.7.1 +pytz==2023.3 # via # babel # great-expectations @@ -788,16 +781,16 @@ pyyaml==6.0 # pre-commit # responses # uvicorn -pyzmq==25.0.0 +pyzmq==25.0.2 # via # ipykernel # jupyter-client # jupyter-server # nbclassic # notebook -redis==4.5.4 +redis==4.2.2 # via feast (setup.py) -regex==2022.10.31 +regex==2023.3.23 # via feast (setup.py) requests==2.28.2 # via @@ -827,7 +820,7 @@ requests-oauthlib==1.3.1 # google-auth-oauthlib # kubernetes # msrest -responses==0.23.0 +responses==0.23.1 # via moto rfc3339-validator==0.1.4 # via @@ -847,9 +840,7 @@ ruamel-yaml==0.17.17 # via great-expectations ruamel-yaml-clib==0.2.7 # via ruamel-yaml -s3fs==2022.1.0 - # via feast (setup.py) -s3transfer==0.5.2 +s3transfer==0.6.0 # via boto3 scipy==1.10.1 # via great-expectations @@ -902,13 +893,13 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.46 +sqlalchemy[mypy]==1.4.47 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a32 +sqlalchemy2-stubs==0.0.2a33 # via sqlalchemy stack-data==0.6.2 # via ipython -starlette==0.26.0.post1 +starlette==0.26.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -977,43 +968,43 @@ types-protobuf==3.19.22 # via # feast (setup.py) # mypy-protobuf -types-pymysql==1.0.19.5 +types-pymysql==1.0.19.6 # via feast (setup.py) -types-pyopenssl==23.0.0.4 +types-pyopenssl==23.1.0.1 # via types-redis -types-python-dateutil==2.8.19.10 +types-python-dateutil==2.8.19.12 # via feast (setup.py) -types-pytz==2022.7.1.2 +types-pytz==2023.3.0.0 # via feast (setup.py) -types-pyyaml==6.0.12.8 +types-pyyaml==6.0.12.9 # via # feast (setup.py) # responses -types-redis==4.5.1.4 +types-redis==4.5.4.1 # via feast (setup.py) -types-requests==2.28.11.15 +types-requests==2.28.11.17 # via feast (setup.py) -types-setuptools==67.6.0.0 +types-setuptools==67.6.0.7 # via feast (setup.py) -types-tabulate==0.9.0.1 +types-tabulate==0.9.0.2 # via feast (setup.py) -types-urllib3==1.26.25.8 +types-urllib3==1.26.25.10 # via types-requests typing-extensions==4.5.0 # via - # aioitertools # azure-core # azure-storage-blob # black # great-expectations + # ipython # mypy # pydantic # snowflake-connector-python # sqlalchemy2-stubs # starlette -tzdata==2022.7 +tzdata==2023.3 # via pytz-deprecation-shim -tzlocal==4.2 +tzlocal==4.3 # via # great-expectations # trino @@ -1021,7 +1012,7 @@ uri-template==1.2.0 # via jsonschema uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.14 +urllib3==1.26.15 # via # botocore # docker @@ -1033,19 +1024,19 @@ urllib3==1.26.14 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.21.0 +uvicorn[standard]==0.21.1 # via feast (setup.py) uvloop==0.17.0 # via uvicorn -virtualenv==20.20.0 +virtualenv==20.21.0 # via pre-commit volatile==2.1.0 # via bowler -watchfiles==0.18.1 +watchfiles==0.19.0 # via uvicorn wcwidth==0.2.6 # via prompt-toolkit -webcolors==1.12 +webcolors==1.13 # via jsonschema webencodings==0.5.1 # via @@ -1056,17 +1047,16 @@ websocket-client==1.5.1 # docker # jupyter-server # kubernetes -websockets==10.4 +websockets==11.0 # via uvicorn werkzeug==2.2.3 # via moto -wheel==0.38.4 +wheel==0.40.0 # via pip-tools -widgetsnbextension==4.0.5 +widgetsnbextension==4.0.7 # via ipywidgets wrapt==1.15.0 # via - # aiobotocore # deprecated # testcontainers xmltodict==0.13.0 diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index b9f8ea97077..76d6c2dae46 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -10,16 +10,10 @@ adal==1.2.7 # msrestazure adlfs==0.5.9 # via feast (setup.py) -aiobotocore==2.1.2 - # via s3fs aiohttp==3.8.4 # via # adlfs - # aiobotocore # gcsfs - # s3fs -aioitertools==0.11.0 - # via aiobotocore aiosignal==1.3.1 # via aiohttp alabaster==0.7.13 @@ -83,19 +77,18 @@ babel==2.12.1 # via sphinx backcall==0.2.0 # via ipython -beautifulsoup4==4.11.2 +beautifulsoup4==4.12.1 # via nbconvert black==22.12.0 # via feast (setup.py) bleach==6.0.0 # via nbconvert -boto3==1.20.23 +boto3==1.26.106 # via # feast (setup.py) # moto -botocore==1.23.24 +botocore==1.29.106 # via - # aiobotocore # boto3 # moto # s3transfer @@ -105,13 +98,13 @@ build==0.10.0 # via # feast (setup.py) # pip-tools -bytewax==0.13.1 +bytewax==0.15.1 # via feast (setup.py) cachecontrol==0.12.11 # via firebase-admin cachetools==5.3.0 # via google-auth -cassandra-driver==3.25.0 +cassandra-driver==3.26.0 # via feast (setup.py) certifi==2022.12.7 # via @@ -152,9 +145,9 @@ colorama==0.4.6 # via # feast (setup.py) # great-expectations -comm==0.1.2 +comm==0.1.3 # via ipykernel -coverage[toml]==7.2.1 +coverage[toml]==7.2.2 # via pytest-cov cryptography==35.0.0 # via @@ -170,9 +163,9 @@ cryptography==35.0.0 # snowflake-connector-python # types-pyopenssl # types-redis -dask==2023.3.0 +dask==2023.3.2 # via feast (setup.py) -db-dtypes==1.0.5 +db-dtypes==1.1.1 # via google-cloud-bigquery debugpy==1.6.6 # via ipykernel @@ -201,13 +194,13 @@ docutils==0.19 # via sphinx entrypoints==0.4 # via altair -exceptiongroup==1.1.0 +exceptiongroup==1.1.1 # via pytest execnet==1.9.0 # via pytest-xdist executing==1.2.0 # via stack-data -fastapi==0.93.0 +fastapi==0.95.0 # via feast (setup.py) fastavro==1.7.3 # via @@ -215,7 +208,7 @@ fastavro==1.7.3 # pandavro fastjsonschema==2.16.3 # via nbformat -filelock==3.9.0 +filelock==3.10.7 # via # snowflake-connector-python # virtualenv @@ -236,7 +229,6 @@ fsspec==2022.1.0 # adlfs # dask # gcsfs - # s3fs gcsfs==2022.1.0 # via feast (setup.py) geojson==2.5.0 @@ -255,9 +247,9 @@ google-api-core[grpc]==2.11.0 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-api-python-client==2.80.0 +google-api-python-client==2.84.0 # via firebase-admin -google-auth==2.16.2 +google-auth==2.17.2 # via # gcsfs # google-api-core @@ -271,9 +263,9 @@ google-auth-httplib2==0.1.0 # via google-api-python-client google-auth-oauthlib==1.0.0 # via gcsfs -google-cloud-bigquery[pandas]==3.6.0 +google-cloud-bigquery[pandas]==3.9.0 # via feast (setup.py) -google-cloud-bigquery-storage==2.19.0 +google-cloud-bigquery-storage==2.19.1 # via feast (setup.py) google-cloud-bigtable==2.17.0 # via feast (setup.py) @@ -284,11 +276,11 @@ google-cloud-core==2.3.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.15.0 +google-cloud-datastore==2.15.1 # via feast (setup.py) -google-cloud-firestore==2.10.0 +google-cloud-firestore==2.11.0 # via firebase-admin -google-cloud-storage==2.7.0 +google-cloud-storage==2.8.0 # via # feast (setup.py) # firebase-admin @@ -299,7 +291,7 @@ google-resumable-media==2.4.1 # via # google-cloud-bigquery # google-cloud-storage -googleapis-common-protos[grpc]==1.58.0 +googleapis-common-protos[grpc]==1.59.0 # via # feast (setup.py) # google-api-core @@ -311,7 +303,7 @@ greenlet==2.0.2 # via sqlalchemy grpc-google-iam-v1==0.12.6 # via google-cloud-bigtable -grpcio==1.51.3 +grpcio==1.53.0 # via # feast (setup.py) # google-api-core @@ -322,13 +314,13 @@ grpcio==1.51.3 # grpcio-status # grpcio-testing # grpcio-tools -grpcio-reflection==1.51.3 +grpcio-reflection==1.53.0 # via feast (setup.py) -grpcio-status==1.51.3 +grpcio-status==1.53.0 # via google-api-core -grpcio-testing==1.51.3 +grpcio-testing==1.53.0 # via feast (setup.py) -grpcio-tools==1.51.3 +grpcio-tools==1.53.0 # via feast (setup.py) h11==0.14.0 # via @@ -336,13 +328,13 @@ h11==0.14.0 # uvicorn happybase==1.2.0 # via feast (setup.py) -hazelcast-python-client==5.1 +hazelcast-python-client==5.2.0 # via feast (setup.py) hiredis==2.2.2 # via feast (setup.py) httpcore==0.16.3 # via httpx -httplib2==0.21.0 +httplib2==0.22.0 # via # google-api-python-client # google-auth-httplib2 @@ -350,7 +342,7 @@ httptools==0.5.0 # via uvicorn httpx==0.23.3 # via feast (setup.py) -identify==2.5.19 +identify==2.5.22 # via pre-commit idna==3.4 # via @@ -362,20 +354,21 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==6.0.0 +importlib-metadata==6.1.0 # via + # dask # great-expectations # jupyter-client # nbconvert # sphinx iniconfig==2.0.0 # via pytest -ipykernel==6.21.3 +ipykernel==6.22.0 # via # ipywidgets # nbclassic # notebook -ipython==8.11.0 +ipython==8.12.0 # via # great-expectations # ipykernel @@ -384,7 +377,7 @@ ipython-genutils==0.2.0 # via # nbclassic # notebook -ipywidgets==8.0.4 +ipywidgets==8.0.6 # via great-expectations isodate==0.6.1 # via @@ -407,7 +400,7 @@ jinja2==3.1.2 # nbconvert # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.1 # via # boto3 # botocore @@ -424,14 +417,14 @@ jsonschema[format-nongpl]==4.17.3 # great-expectations # jupyter-events # nbformat -jupyter-client==8.0.3 +jupyter-client==8.1.0 # via # ipykernel # jupyter-server # nbclassic # nbclient # notebook -jupyter-core==5.2.0 +jupyter-core==5.3.0 # via # ipykernel # jupyter-client @@ -443,7 +436,7 @@ jupyter-core==5.2.0 # notebook jupyter-events==0.6.3 # via jupyter-server -jupyter-server==2.4.0 +jupyter-server==2.5.0 # via # nbclassic # notebook-shim @@ -451,7 +444,7 @@ jupyter-server-terminals==0.4.4 # via jupyter-server jupyterlab-pygments==0.2.2 # via nbconvert -jupyterlab-widgets==3.0.5 +jupyterlab-widgets==3.0.7 # via ipywidgets kubernetes==20.13.0 # via feast (setup.py) @@ -478,13 +471,13 @@ mistune==2.0.5 # via # great-expectations # nbconvert -mmh3==3.0.0 +mmh3==3.1.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -moto==4.1.4 +moto==4.1.6 # via feast (setup.py) msal==1.21.0 # via @@ -516,16 +509,16 @@ mypy-protobuf==3.1 # via feast (setup.py) mysqlclient==2.1.1 # via feast (setup.py) -nbclassic==0.5.3 +nbclassic==0.5.5 # via notebook -nbclient==0.7.2 +nbclient==0.7.3 # via nbconvert -nbconvert==7.2.9 +nbconvert==7.3.0 # via # jupyter-server # nbclassic # notebook -nbformat==5.7.3 +nbformat==5.8.0 # via # great-expectations # jupyter-server @@ -591,7 +584,7 @@ parso==0.8.3 # via jedi partd==1.3.0 # via dask -pathspec==0.11.0 +pathspec==0.11.1 # via black pbr==5.11.1 # via mock @@ -601,7 +594,7 @@ pickleshare==0.7.5 # via ipython pip-tools==6.12.3 # via feast (setup.py) -platformdirs==3.1.1 +platformdirs==3.2.0 # via # black # jupyter-core @@ -612,7 +605,7 @@ ply==3.11 # via thriftpy2 portalocker==2.7.0 # via msal-extensions -pre-commit==3.1.1 +pre-commit==3.2.2 # via feast (setup.py) prometheus-client==0.16.0 # via @@ -650,7 +643,7 @@ psutil==5.9.0 # via # feast (setup.py) # ipykernel -psycopg2-binary==2.9.5 +psycopg2-binary==2.9.6 # via feast (setup.py) ptyprocess==0.7.0 # via @@ -684,7 +677,7 @@ pycparser==2.21 # via cffi pycryptodomex==3.17 # via snowflake-connector-python -pydantic==1.10.6 +pydantic==1.10.7 # via # fastapi # feast (setup.py) @@ -704,7 +697,7 @@ pyjwt[crypto]==2.6.0 # snowflake-connector-python pymssql==2.2.7 # via feast (setup.py) -pymysql==1.0.2 +pymysql==1.0.3 # via feast (setup.py) pyodbc==4.0.35 # via feast (setup.py) @@ -744,7 +737,7 @@ pytest-ordering==0.6 # via feast (setup.py) pytest-timeout==1.4.2 # via feast (setup.py) -pytest-xdist==3.2.0 +pytest-xdist==3.2.1 # via feast (setup.py) python-dateutil==2.8.2 # via @@ -762,7 +755,7 @@ python-dotenv==1.0.0 # via uvicorn python-json-logger==2.0.7 # via jupyter-events -pytz==2022.7.1 +pytz==2023.3 # via # great-expectations # pandas @@ -779,16 +772,16 @@ pyyaml==6.0 # pre-commit # responses # uvicorn -pyzmq==25.0.0 +pyzmq==25.0.2 # via # ipykernel # jupyter-client # jupyter-server # nbclassic # notebook -redis==4.5.4 +redis==4.2.2 # via feast (setup.py) -regex==2022.10.31 +regex==2023.3.23 # via feast (setup.py) requests==2.28.2 # via @@ -818,7 +811,7 @@ requests-oauthlib==1.3.1 # google-auth-oauthlib # kubernetes # msrest -responses==0.23.0 +responses==0.23.1 # via moto rfc3339-validator==0.1.4 # via @@ -838,9 +831,7 @@ ruamel-yaml==0.17.17 # via great-expectations ruamel-yaml-clib==0.2.7 # via ruamel-yaml -s3fs==2022.1.0 - # via feast (setup.py) -s3transfer==0.5.2 +s3transfer==0.6.0 # via boto3 scipy==1.10.1 # via great-expectations @@ -893,13 +884,13 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sqlalchemy[mypy]==1.4.46 +sqlalchemy[mypy]==1.4.47 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a32 +sqlalchemy2-stubs==0.0.2a33 # via sqlalchemy stack-data==0.6.2 # via ipython -starlette==0.25.0 +starlette==0.26.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -968,43 +959,43 @@ types-protobuf==3.19.22 # via # feast (setup.py) # mypy-protobuf -types-pymysql==1.0.19.5 +types-pymysql==1.0.19.6 # via feast (setup.py) -types-pyopenssl==23.0.0.4 +types-pyopenssl==23.1.0.1 # via types-redis -types-python-dateutil==2.8.19.10 +types-python-dateutil==2.8.19.12 # via feast (setup.py) -types-pytz==2022.7.1.2 +types-pytz==2023.3.0.0 # via feast (setup.py) -types-pyyaml==6.0.12.8 +types-pyyaml==6.0.12.9 # via # feast (setup.py) # responses -types-redis==4.5.1.4 +types-redis==4.5.4.1 # via feast (setup.py) -types-requests==2.28.11.15 +types-requests==2.28.11.17 # via feast (setup.py) -types-setuptools==67.6.0.0 +types-setuptools==67.6.0.7 # via feast (setup.py) -types-tabulate==0.9.0.1 +types-tabulate==0.9.0.2 # via feast (setup.py) -types-urllib3==1.26.25.8 +types-urllib3==1.26.25.10 # via types-requests typing-extensions==4.5.0 # via - # aioitertools # azure-core # azure-storage-blob # black # great-expectations + # ipython # mypy # pydantic # snowflake-connector-python # sqlalchemy2-stubs # starlette -tzdata==2022.7 +tzdata==2023.3 # via pytz-deprecation-shim -tzlocal==4.2 +tzlocal==4.3 # via # great-expectations # trino @@ -1012,7 +1003,7 @@ uri-template==1.2.0 # via jsonschema uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.14 +urllib3==1.26.15 # via # botocore # docker @@ -1024,19 +1015,19 @@ urllib3==1.26.14 # responses # rockset # snowflake-connector-python -uvicorn[standard]==0.21.0 +uvicorn[standard]==0.21.1 # via feast (setup.py) uvloop==0.17.0 # via uvicorn -virtualenv==20.20.0 +virtualenv==20.21.0 # via pre-commit volatile==2.1.0 # via bowler -watchfiles==0.18.1 +watchfiles==0.19.0 # via uvicorn wcwidth==0.2.6 # via prompt-toolkit -webcolors==1.12 +webcolors==1.13 # via jsonschema webencodings==0.5.1 # via @@ -1047,17 +1038,16 @@ websocket-client==1.5.1 # docker # jupyter-server # kubernetes -websockets==10.4 +websockets==11.0 # via uvicorn werkzeug==2.2.3 # via moto -wheel==0.38.4 +wheel==0.40.0 # via pip-tools -widgetsnbextension==4.0.5 +widgetsnbextension==4.0.7 # via ipywidgets wrapt==1.15.0 # via - # aiobotocore # deprecated # testcontainers xmltodict==0.13.0 diff --git a/setup.py b/setup.py index a9155c8189a..7218c1de04a 100644 --- a/setup.py +++ b/setup.py @@ -89,7 +89,7 @@ "hiredis>=2.0.0,<3", ] -AWS_REQUIRED = ["boto3>=1.17.0,<=1.20.23", "docker>=5.0.2", "s3fs>=0.4.0,<=2022.01.0"] +AWS_REQUIRED = ["boto3>=1.17.0,<2", "docker>=5.0.2"] BYTEWAX_REQUIRED = ["bytewax==0.15.1", "docker>=5.0.2", "kubernetes<=20.13.0"] From 01a98f08e9e6d0aebf41188f2644f49111ea4ca9 Mon Sep 17 00:00:00 2001 From: "Hai Nguyen (Harry)" Date: Fri, 21 Apr 2023 23:58:45 +0700 Subject: [PATCH 23/31] fix: Support param timeout when persisting (#3593) * fix: Support param timeout when persisting Signed-off-by: Hai Nguyen * fix: Revert default timeout value in `to_bigquery` Signed-off-by: Hai Nguyen --------- Signed-off-by: Hai Nguyen --- sdk/python/feast/infra/offline_stores/bigquery.py | 14 ++++++++++---- .../contrib/athena_offline_store/athena.py | 7 ++++++- .../contrib/mssql_offline_store/mssql.py | 7 ++++++- .../contrib/postgres_offline_store/postgres.py | 7 ++++++- .../contrib/spark_offline_store/spark.py | 7 ++++++- .../contrib/trino_offline_store/trino.py | 7 ++++++- sdk/python/feast/infra/offline_stores/file.py | 7 ++++++- .../feast/infra/offline_stores/offline_store.py | 7 ++++++- sdk/python/feast/infra/offline_stores/redshift.py | 7 ++++++- sdk/python/feast/infra/offline_stores/snowflake.py | 7 ++++++- .../infra/offline_stores/test_offline_store.py | 7 ++++++- 11 files changed, 70 insertions(+), 14 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 973eddc7fb7..770477f2515 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -456,8 +456,8 @@ def to_sql(self) -> str: def to_bigquery( self, job_config: Optional[bigquery.QueryJobConfig] = None, - timeout: int = 1800, - retry_cadence: int = 10, + timeout: Optional[int] = 1800, + retry_cadence: Optional[int] = 10, ) -> str: """ Synchronously executes the underlying query and exports the result to a BigQuery table. The @@ -530,11 +530,17 @@ def _execute_query( block_until_done(client=self.client, bq_job=bq_job, timeout=timeout or 1800) return bq_job - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetBigQueryStorage) self.to_bigquery( - bigquery.QueryJobConfig(destination=storage.bigquery_options.table) + bigquery.QueryJobConfig(destination=storage.bigquery_options.table), + timeout=timeout, ) @property diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py index 2e1fc0d983d..85a61106aaf 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/athena.py @@ -412,7 +412,12 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table: def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetAthenaStorage) self.to_athena(table_name=storage.athena_options.table) diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py index 5849105869a..849d5cc797f 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/mssql.py @@ -356,7 +356,12 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: ## Implements persist in Feast 0.18 - This persists to filestorage ## ToDo: Persist to Azure Storage - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetFileStorage) filesystem, path = FileSource.create_filesystem_and_path( diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py index 837b9091e72..c2e95a8648e 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -302,7 +302,12 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table: def metadata(self) -> Optional[RetrievalMetadata]: return self._metadata - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetPostgreSQLStorage) df_to_postgres_table( diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index f51bd810ea1..7574ac4865c 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -344,7 +344,12 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: """Return dataset as pyarrow Table synchronously""" return pyarrow.Table.from_pandas(self._to_df_internal(timeout=timeout)) - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): """ Run the retrieval and persist the results in the same offline store used for read. Please note the persisting is done only within the scope of the spark session. diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index 7a7afa1665b..e0f73404ebe 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -126,7 +126,12 @@ def to_trino( self._client.execute_query(query_text=query) return destination_table - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): """ Run the retrieval and persist the results in the same offline store used for read. """ diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index d6cce78bd4c..5e4107545f0 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -88,7 +88,12 @@ def _to_arrow_internal(self, timeout: Optional[int] = None): df = self.evaluation_function().compute() return pyarrow.Table.from_pandas(df) - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetFileStorage) # Check if the specified location already exists. diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index 27a98a120f4..6141e3c435b 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -189,7 +189,12 @@ def on_demand_feature_views(self) -> List[OnDemandFeatureView]: pass @abstractmethod - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: bool = False, + timeout: Optional[int] = None, + ): """ Synchronously executes the underlying query and persists the result in the same offline store at the specified destination. diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index ffa30ba015e..35fd49f7460 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -476,7 +476,12 @@ def to_redshift(self, table_name: str) -> None: query, ) - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetRedshiftStorage) self.to_redshift(table_name=storage.redshift_options.table) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 847e0733810..d0bd9bd30a8 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -531,7 +531,12 @@ def to_spark_df(self, spark_session: "SparkSession") -> "DataFrame": else: raise InvalidSparkSessionException(spark_session) - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: Optional[bool] = False, + timeout: Optional[int] = None, + ): assert isinstance(storage, SavedDatasetSnowflakeStorage) self.to_snowflake(table_name=storage.snowflake_options.table) diff --git a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py index 53e9d061ade..ef0cce04707 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_offline_store.py @@ -67,7 +67,12 @@ def on_demand_feature_views(self) -> List[OnDemandFeatureView]: """Returns a list containing all the on demand feature views to be handled.""" pass - def persist(self, storage: SavedDatasetStorage, allow_overwrite: bool = False): + def persist( + self, + storage: SavedDatasetStorage, + allow_overwrite: bool = False, + timeout: Optional[int] = None, + ): """ Synchronously executes the underlying query and persists the result in the same offline store at the specified destination. From 58d63f7e6b1dde3dcd8893e4448940ea34e671cf Mon Sep 17 00:00:00 2001 From: "Hai Nguyen (Harry)" Date: Fri, 21 Apr 2023 23:59:39 +0700 Subject: [PATCH 24/31] feat: Show bigquery datasource table and query on UI (#3600) Signed-off-by: Hai Nguyen --- ui/package.json | 1 + .../BatchSourcePropertiesView.tsx | 19 + ui/yarn.lock | 350 +++++++++++++++++- 3 files changed, 364 insertions(+), 6 deletions(-) diff --git a/ui/package.json b/ui/package.json index 35cb893959e..f5d27a2f2b1 100644 --- a/ui/package.json +++ b/ui/package.json @@ -43,6 +43,7 @@ "prop-types": "^15.8.1", "protobufjs": "^7.1.1", "query-string": "^7.1.1", + "react-code-blocks": "^0.0.9-0", "react-query": "^3.34.12", "react-router-dom": "6", "react-scripts": "^5.0.0", diff --git a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx index c19e4ff50f8..b7cd3c90fc8 100644 --- a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx +++ b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx @@ -6,6 +6,7 @@ import { EuiFlexGroup, EuiFlexItem, } from "@elastic/eui"; +import { CopyBlock, atomOneDark } from "react-code-blocks"; import { feast } from "../../protos"; import { toDate } from "../../utils/timestamp"; @@ -61,6 +62,24 @@ const BatchSourcePropertiesView = (props: BatchSourcePropertiesViewProps) => { )} + {batchSource.bigqueryOptions && ( + + Source {batchSource.bigqueryOptions.table ? "Table" : "Query"} + {batchSource.bigqueryOptions.table ? ( + + {batchSource.bigqueryOptions.table} + + ) : + } + + + )} {batchSource.meta?.latestEventTimestamp && ( Latest Event diff --git a/ui/yarn.lock b/ui/yarn.lock index 72c8835ce2c..db6dacc906e 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -25,6 +25,13 @@ dependencies: "@babel/highlight" "^7.16.7" +"@babel/code-frame@^7.18.6", "@babel/code-frame@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.21.4.tgz#d0fa9e4413aca81f2b23b9442797bda1826edb39" + integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g== + dependencies: + "@babel/highlight" "^7.18.6" + "@babel/compat-data@^7.13.11", "@babel/compat-data@^7.16.4", "@babel/compat-data@^7.16.8": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.16.8.tgz#31560f9f29fdf1868de8cb55049538a1b9732a60" @@ -99,6 +106,23 @@ jsesc "^2.5.1" source-map "^0.5.0" +"@babel/generator@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.21.4.tgz#64a94b7448989f421f919d5239ef553b37bb26bc" + integrity sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA== + dependencies: + "@babel/types" "^7.21.4" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.16.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + "@babel/helper-annotate-as-pure@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz#bb2339a7534a9c128e3102024c60760a3a7f3862" @@ -166,6 +190,11 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + "@babel/helper-explode-assignable-expression@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz#12a6d8522fdd834f194e868af6354e8650242b7a" @@ -182,6 +211,14 @@ "@babel/template" "^7.16.7" "@babel/types" "^7.16.7" +"@babel/helper-function-name@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz#d552829b10ea9f120969304023cd0645fa00b1b4" + integrity sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg== + dependencies: + "@babel/template" "^7.20.7" + "@babel/types" "^7.21.0" + "@babel/helper-get-function-arity@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz#ea08ac753117a669f1508ba06ebcc49156387419" @@ -196,6 +233,13 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + "@babel/helper-member-expression-to-functions@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.7.tgz#42b9ca4b2b200123c3b7e726b0ae5153924905b0" @@ -203,6 +247,13 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.16.0": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.21.4.tgz#ac88b2f76093637489e718a90cec6cf8a9b029af" + integrity sha512-orajc5T2PsRYUN3ZryCEFeMDYwyw09c/pZeaQEZPH0MpKzSvn3e0uXsDBu3k03VI+9DBiRo+l22BfKTpKwa/Wg== + dependencies: + "@babel/types" "^7.21.4" + "@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437" @@ -277,11 +328,28 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" + integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== + "@babel/helper-validator-identifier@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + "@babel/helper-validator-option@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" @@ -324,6 +392,15 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + "@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.10", "@babel/parser@^7.16.12", "@babel/parser@^7.16.7": version "7.16.12" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.12.tgz#9474794f9a650cf5e2f892444227f98e28cdf8b6" @@ -334,6 +411,11 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.3.tgz#b07702b982990bf6fdc1da5049a23fece4c5c3d0" integrity sha512-7yJPvPV+ESz2IUTPbOL+YkIGyCqOyNIzdguKQuJGnH7bg1WTIifuM21YqokFt/THWh1AkCRn9IgoykTRCBVpzA== +"@babel/parser@^7.20.7", "@babel/parser@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.4.tgz#94003fdfc520bbe2875d4ae557b43ddb6d880f17" + integrity sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw== + "@babel/parser@^7.9.4": version "7.19.0" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.19.0.tgz#497fcafb1d5b61376959c1c338745ef0577aa02c" @@ -1078,6 +1160,13 @@ dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.10.4", "@babel/runtime@^7.3.1": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.21.0.tgz#5b55c9d394e5fcf304909a8b00c07dc217b56673" + integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw== + dependencies: + regenerator-runtime "^0.13.11" + "@babel/template@^7.16.7", "@babel/template@^7.3.3": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" @@ -1087,6 +1176,15 @@ "@babel/parser" "^7.16.7" "@babel/types" "^7.16.7" +"@babel/template@^7.20.7": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8" + integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.20.7" + "@babel/types" "^7.20.7" + "@babel/traverse@^7.13.0", "@babel/traverse@^7.16.10", "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.7.2": version "7.16.10" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.16.10.tgz#448f940defbe95b5a8029975b051f75993e8239f" @@ -1119,6 +1217,22 @@ debug "^4.1.0" globals "^11.1.0" +"@babel/traverse@^7.4.5": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.4.tgz#a836aca7b116634e97a6ed99976236b3282c9d36" + integrity sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q== + dependencies: + "@babel/code-frame" "^7.21.4" + "@babel/generator" "^7.21.4" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.21.4" + "@babel/types" "^7.21.4" + debug "^4.1.0" + globals "^11.1.0" + "@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.16.0", "@babel/types@^7.16.7", "@babel/types@^7.16.8", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.16.8.tgz#0ba5da91dd71e0a4e7781a30f22770831062e3c1" @@ -1135,6 +1249,15 @@ "@babel/helper-validator-identifier" "^7.16.7" to-fast-properties "^2.0.0" +"@babel/types@^7.18.6", "@babel/types@^7.20.7", "@babel/types@^7.21.0", "@babel/types@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.21.4.tgz#2d5d6bb7908699b3b416409ffd3b5daa25b030d4" + integrity sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA== + dependencies: + "@babel/helper-string-parser" "^7.19.4" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + "@base2/pretty-print-object@1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@base2/pretty-print-object/-/pretty-print-object-1.0.1.tgz#371ba8be66d556812dc7fb169ebc3c08378f69d4" @@ -1218,11 +1341,23 @@ resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== +"@emotion/is-prop-valid@^1.1.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.2.0.tgz#7f2d35c97891669f7e276eb71c83376a5dc44c83" + integrity sha512-3aDpDprjM0AwaxGE09bOPkNxHpBd+kA6jty3RnaEXdweX1DF1U3VQpPYb0g1IStAuK7SVQ1cy+bNBBKp4W3Fjg== + dependencies: + "@emotion/memoize" "^0.8.0" + "@emotion/memoize@^0.7.4": version "0.7.5" resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== +"@emotion/memoize@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.8.0.tgz#f580f9beb67176fa57aae70b08ed510e1b18980f" + integrity sha512-G/YwXTkv7Den9mXDO7AhLWkE3q+I92B+VqAE+dYG4NGPaHZGvt3G8Q0p9vmE+sq7rTGphUbAvmQ9YpbfMQGGlA== + "@emotion/react@^11.7.1": version "11.7.1" resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.7.1.tgz#3f800ce9b20317c13e77b8489ac4a0b922b2fe07" @@ -1252,7 +1387,12 @@ resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.1.0.tgz#56d99c41f0a1cda2726a05aa6a20afd4c63e58d2" integrity sha512-u0AX4aSo25sMAygCuQTzS+HsImZFuS8llY8O7b9MDRzbJM0kVJlAz6KNDqcG7pOuQZJmj/8X/rAW+66kMnMW+g== -"@emotion/unitless@^0.7.5": +"@emotion/stylis@^0.8.4": + version "0.8.5" + resolved "https://registry.yarnpkg.com/@emotion/stylis/-/stylis-0.8.5.tgz#deacb389bd6ee77d1e7fcaccce9e16c5c7e78e04" + integrity sha512-h6KtPihKFn3T9fuIrwvXXUOwlx3rfUvfZIcP5a6rh8Y7zjE3O06hT5Ss4S/YI1AYhuZ1kjaE/5EaOOI2NqSylQ== + +"@emotion/unitless@^0.7.4", "@emotion/unitless@^0.7.5": version "0.7.5" resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== @@ -1490,7 +1630,16 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@^3.0.3": +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": version "3.1.0" resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== @@ -1508,7 +1657,7 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/sourcemap-codec@^1.4.10": +"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== @@ -1521,6 +1670,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.17": + version "0.3.18" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + "@jridgewell/trace-mapping@^0.3.9": version "0.3.14" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" @@ -3285,6 +3442,22 @@ babel-plugin-polyfill-regenerator@^0.3.0: dependencies: "@babel/helper-define-polyfill-provider" "^0.3.1" +"babel-plugin-styled-components@>= 1.12.0": + version "2.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-styled-components/-/babel-plugin-styled-components-2.1.1.tgz#cd977cc0ff8410d5cbfdd142e42576e9c8794b87" + integrity sha512-c8lJlszObVQPguHkI+akXv8+Jgb9Ccujx0EetL7oIvwU100LxO6XAGe45qry37wUL40a5U9f23SYrivro2XKhA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.0" + "@babel/helper-module-imports" "^7.16.0" + babel-plugin-syntax-jsx "^6.18.0" + lodash "^4.17.21" + picomatch "^2.3.0" + +babel-plugin-syntax-jsx@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946" + integrity sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw== + babel-plugin-transform-react-remove-prop-types@^0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" @@ -3560,6 +3733,11 @@ camelcase@^6.2.0, camelcase@^6.2.1: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== +camelize@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/camelize/-/camelize-1.0.1.tgz#89b7e16884056331a35d6b5ad064332c91daa6c3" + integrity sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ== + caniuse-api@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" @@ -3734,6 +3912,15 @@ cli-width@^3.0.0: resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== +clipboard@^2.0.0: + version "2.0.11" + resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.11.tgz#62180360b97dd668b6b3a84ec226975762a70be5" + integrity sha512-C+0bbOqkezLIsmWSvlsXS0Q0bmkugu7jcfMIACB+RDEntIzQIkdr148we28AfSloQLRdZlYL/QYyrq05j/3Faw== + dependencies: + good-listener "^1.2.2" + select "^1.1.2" + tiny-emitter "^2.0.0" + cliui@^7.0.2: version "7.0.4" resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" @@ -3995,6 +4182,11 @@ css-box-model@^1.2.0: dependencies: tiny-invariant "^1.0.6" +css-color-keywords@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/css-color-keywords/-/css-color-keywords-1.0.0.tgz#fea2616dc676b2962686b3af8dbdbe180b244e05" + integrity sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg== + css-declaration-sorter@^6.0.3: version "6.1.4" resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.1.4.tgz#b9bfb4ed9a41f8dcca9bf7184d849ea94a8294b4" @@ -4066,6 +4258,15 @@ css-select@^4.1.3: domutils "^2.8.0" nth-check "^2.0.1" +css-to-react-native@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/css-to-react-native/-/css-to-react-native-3.2.0.tgz#cdd8099f71024e149e4f6fe17a7d46ecd55f1e32" + integrity sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ== + dependencies: + camelize "^1.0.0" + css-color-keywords "^1.0.0" + postcss-value-parser "^4.0.2" + css-tree@1.0.0-alpha.29: version "1.0.0-alpha.29" resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.29.tgz#3fa9d4ef3142cbd1c301e7664c1f352bd82f5a39" @@ -4590,6 +4791,11 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= +delegate@^3.1.2: + version "3.2.0" + resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166" + integrity sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw== + depd@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" @@ -5360,6 +5566,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fault@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/fault/-/fault-1.0.4.tgz#eafcfc0a6d214fc94601e170df29954a4f842f13" + integrity sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA== + dependencies: + format "^0.2.0" + faye-websocket@^0.11.3: version "0.11.4" resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" @@ -5532,6 +5745,11 @@ form-data@^3.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" +format@^0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" + integrity sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww== + forwarded@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" @@ -5752,6 +5970,13 @@ globby@^11.0.1, globby@^11.0.4: merge2 "^1.4.1" slash "^3.0.0" +good-listener@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50" + integrity sha512-goW1b+d9q/HIwbVYZzZ6SsTr4IgE+WA44A0GmPIQstuOrgsFcT7VEJ48nmr9GaRtNu0XTKacFLGnBPAM6Afouw== + dependencies: + delegate "^3.1.2" + graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.9" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" @@ -5907,6 +6132,16 @@ hast-util-whitespace@^1.0.0: resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz#e4fe77c4a9ae1cb2e6c25e02df0043d0164f6e41" integrity sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A== +hastscript@^5.0.0: + version "5.1.2" + resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-5.1.2.tgz#bde2c2e56d04c62dd24e8c5df288d050a355fb8a" + integrity sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ== + dependencies: + comma-separated-tokens "^1.0.0" + hast-util-parse-selector "^2.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + hastscript@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-6.0.0.tgz#e8768d7eac56c3fdeac8a92830d58e811e5bf640" @@ -5928,6 +6163,11 @@ headers-utils@^3.0.2: resolved "https://registry.yarnpkg.com/headers-utils/-/headers-utils-3.0.2.tgz#dfc65feae4b0e34357308aefbcafa99c895e59ef" integrity sha512-xAxZkM1dRyGV2Ou5bzMxBPNLoRCjcX+ya7KSWybQD2KwLphxsapUVK6x/02o7f4VU6GPSXch9vNY2+gkU8tYWQ== +highlight.js@~9.15.0, highlight.js@~9.15.1: + version "9.15.10" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-9.15.10.tgz#7b18ed75c90348c045eef9ed08ca1319a2219ad2" + integrity sha512-RoV7OkQm0T3os3Dd2VHLNMoaoDVx77Wygln3n9l5YV172XonWG6rgQD3XnF/BuFFZw9A0TJgmMSO8FEWQgvcXw== + history@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/history/-/history-5.2.0.tgz#7cdd31cf9bac3c5d31f09c231c9928fad0007b7c" @@ -5935,7 +6175,7 @@ history@^5.2.0: dependencies: "@babel/runtime" "^7.7.6" -hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: +hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== @@ -7360,6 +7600,14 @@ lower-case@^2.0.2: dependencies: tslib "^2.0.3" +lowlight@1.12.1: + version "1.12.1" + resolved "https://registry.yarnpkg.com/lowlight/-/lowlight-1.12.1.tgz#014acf8dd73a370e02ff1cc61debcde3bb1681eb" + integrity sha512-OqaVxMGIESnawn+TU/QMV5BJLbUghUfjDWPAtFqDYDmDtr4FnB+op8xM+pR7nKlauHNUHXGt0VgWatFB8voS5w== + dependencies: + fault "^1.0.2" + highlight.js "~9.15.0" + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -8051,6 +8299,18 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" +parse-entities@^1.1.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-1.2.2.tgz#c31bf0f653b6661354f8973559cb86dd1d5edf50" + integrity sha512-NzfpbxW/NPrzZ/yYSoQxyqUZMZXIdCfE0OIN4ESsnptHJECoUk3FZktxNuzQf4tjt5UEopnxpYJbvYuxIFDdsg== + dependencies: + character-entities "^1.0.0" + character-entities-legacy "^1.0.0" + character-reference-invalid "^1.0.0" + is-alphanumerical "^1.0.0" + is-decimal "^1.0.0" + is-hexadecimal "^1.0.0" + parse-entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8" @@ -8146,7 +8406,7 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== @@ -8718,6 +8978,18 @@ pretty-format@^27.0.0, pretty-format@^27.0.2, pretty-format@^27.4.6: ansi-styles "^5.0.0" react-is "^17.0.1" +prismjs@^1.8.4: + version "1.29.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12" + integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q== + +prismjs@~1.17.0: + version "1.17.1" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.17.1.tgz#e669fcbd4cdd873c35102881c33b14d0d68519be" + integrity sha512-PrEDJAFdUGbOP6xK/UsfkC5ghJsPJviKgnQOoxaDbBjwc8op68Quupwt1DeAFoG8GImPhiKXAvvsH7wDSLsu1Q== + optionalDependencies: + clipboard "^2.0.0" + prismjs@~1.25.0: version "1.25.0" resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.25.0.tgz#6f822df1bdad965734b310b315a23315cf999756" @@ -8914,6 +9186,16 @@ react-clientside-effect@^1.2.6: dependencies: "@babel/runtime" "^7.12.13" +react-code-blocks@^0.0.9-0: + version "0.0.9-0" + resolved "https://registry.yarnpkg.com/react-code-blocks/-/react-code-blocks-0.0.9-0.tgz#0c6d04d8a40b74cffe95f24f1a8e62a0fda8c014" + integrity sha512-jdYJVZwGtsr6WIUaqILy5fkF1acf57YV5s0V3+w5o9v3omYnqBeO6EuZi1Vf2x1hahkYGEedsp46+ofdkYlqyw== + dependencies: + "@babel/runtime" "^7.10.4" + react-syntax-highlighter "^12.2.1" + styled-components "^5.1.1" + tslib "^2.0.0" + react-dev-utils@^12.0.0: version "12.0.0" resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.0.tgz#4eab12cdb95692a077616770b5988f0adf806526" @@ -9142,6 +9424,17 @@ react-style-singleton@^2.2.0: invariant "^2.2.4" tslib "^2.0.0" +react-syntax-highlighter@^12.2.1: + version "12.2.1" + resolved "https://registry.yarnpkg.com/react-syntax-highlighter/-/react-syntax-highlighter-12.2.1.tgz#14d78352da1c1c3f93c6698b70ec7c706b83493e" + integrity sha512-CTsp0ZWijwKRYFg9xhkWD4DSpQqE4vb2NKVMdPAkomnILSmsNBHE0n5GuI5zB+PU3ySVvXvdt9jo+ViD9XibCA== + dependencies: + "@babel/runtime" "^7.3.1" + highlight.js "~9.15.1" + lowlight "1.12.1" + prismjs "^1.8.4" + refractor "^2.4.1" + react-virtualized-auto-sizer@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.6.tgz#66c5b1c9278064c5ef1699ed40a29c11518f97ca" @@ -9214,6 +9507,15 @@ redux@^4.0.0, redux@^4.0.4: dependencies: "@babel/runtime" "^7.9.2" +refractor@^2.4.1: + version "2.10.1" + resolved "https://registry.yarnpkg.com/refractor/-/refractor-2.10.1.tgz#166c32f114ed16fd96190ad21d5193d3afc7d34e" + integrity sha512-Xh9o7hQiQlDbxo5/XkOX6H+x/q8rmlmZKr97Ie1Q8ZM32IRRd3B/UxuA/yXDW79DBSXGWxm2yRTbcTVmAciJRw== + dependencies: + hastscript "^5.0.0" + parse-entities "^1.1.2" + prismjs "~1.17.0" + refractor@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/refractor/-/refractor-3.5.0.tgz#334586f352dda4beaf354099b48c2d18e0819aec" @@ -9235,6 +9537,11 @@ regenerate@^1.4.2: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== +regenerator-runtime@^0.13.11: + version "0.13.11" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" + integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== + regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: version "0.13.9" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" @@ -9662,6 +9969,11 @@ select-hose@^2.0.0: resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= +select@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" + integrity sha512-OwpTSOfy6xSs1+pwcNrv0RBMOzI39Lp3qQKUTPVVPRjCdNa5JH/oPRiqsesIskK8TVgmRiHwO4KXlV2Li9dANA== + selfsigned@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.0.0.tgz#e927cd5377cbb0a1075302cff8df1042cc2bce5b" @@ -9769,6 +10081,11 @@ setprototypeof@1.2.0: resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== +shallowequal@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8" + integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== + shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -10123,6 +10440,22 @@ style-to-object@^0.3.0: dependencies: inline-style-parser "0.1.1" +styled-components@^5.1.1: + version "5.3.9" + resolved "https://registry.yarnpkg.com/styled-components/-/styled-components-5.3.9.tgz#641af2a8bb89904de708c71b439caa9633e8f0ba" + integrity sha512-Aj3kb13B75DQBo2oRwRa/APdB5rSmwUfN5exyarpX+x/tlM/rwZA2vVk2vQgVSP6WKaZJHWwiFrzgHt+CLtB4A== + dependencies: + "@babel/helper-module-imports" "^7.0.0" + "@babel/traverse" "^7.4.5" + "@emotion/is-prop-valid" "^1.1.0" + "@emotion/stylis" "^0.8.4" + "@emotion/unitless" "^0.7.4" + babel-plugin-styled-components ">= 1.12.0" + css-to-react-native "^3.0.0" + hoist-non-react-statics "^3.0.0" + shallowequal "^1.1.0" + supports-color "^5.5.0" + stylehacks@^5.0.2: version "5.0.2" resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.0.2.tgz#fa10e5181c6e8dc0bddb4a3fb372e9ac42bba2ad" @@ -10136,7 +10469,7 @@ stylis@4.0.13: resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.0.13.tgz#f5db332e376d13cc84ecfe5dace9a2a51d954c91" integrity sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag== -supports-color@^5.3.0: +supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== @@ -10360,6 +10693,11 @@ timsort@^0.3.0: resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= +tiny-emitter@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" + integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== + tiny-invariant@^1.0.6: version "1.2.0" resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" From b27472fc1fb42368ffe1556c848dc3b21b2fca0c Mon Sep 17 00:00:00 2001 From: Yusuke Nishioka Date: Sat, 22 Apr 2023 02:13:38 +0900 Subject: [PATCH 25/31] feat: Cache Bigtable client (#3602) * Cache Bigtable client Signed-off-by: Yusuke Nishioka * Run format Signed-off-by: Yusuke Nishioka --------- Signed-off-by: Yusuke Nishioka --- sdk/python/feast/infra/online_stores/bigtable.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sdk/python/feast/infra/online_stores/bigtable.py b/sdk/python/feast/infra/online_stores/bigtable.py index e08bc44bdbe..30561d0840f 100644 --- a/sdk/python/feast/infra/online_stores/bigtable.py +++ b/sdk/python/feast/infra/online_stores/bigtable.py @@ -335,4 +335,8 @@ def teardown( def _get_client( self, online_config: BigtableOnlineStoreConfig, admin: bool = False ): - return bigtable.Client(project=online_config.project_id, admin=admin) + if self._client is None: + self._client = bigtable.Client( + project=online_config.project_id, admin=admin + ) + return self._client From 7da058085cd1211fb383ff0a6c5ae8f59999c5f0 Mon Sep 17 00:00:00 2001 From: Miles Adkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 21 Apr 2023 12:52:53 -0500 Subject: [PATCH 26/31] fix: Remove snowflake source warehouse tech debt (#3422) Signed-off-by: Miles Adkins --- protos/feast/core/DataSource.proto | 5 ++-- .../feast/infra/offline_stores/snowflake.py | 8 +------ .../infra/offline_stores/snowflake_source.py | 23 ++++++++----------- .../universal/data_sources/snowflake.py | 1 - sdk/python/tests/unit/test_data_sources.py | 1 - 5 files changed, 12 insertions(+), 26 deletions(-) diff --git a/protos/feast/core/DataSource.proto b/protos/feast/core/DataSource.proto index 3992d2c247d..d129086f451 100644 --- a/protos/feast/core/DataSource.proto +++ b/protos/feast/core/DataSource.proto @@ -197,6 +197,8 @@ message DataSource { // Defines options for DataSource that sources features from a Snowflake Query message SnowflakeOptions { + reserved 5; // Snowflake warehouse name + // Snowflake table name string table = 1; @@ -209,9 +211,6 @@ message DataSource { // Snowflake schema name string database = 4; - - // Snowflake warehouse name - string warehouse = 5; } // Defines options for DataSource that sources features from a spark table/query diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index d0bd9bd30a8..1463ea9cc83 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -164,7 +164,7 @@ def pull_latest_from_table_or_query( ) select_timestamps = list( map( - lambda field_name: f"to_varchar({field_name}, 'YYYY-MM-DD\"T\"HH24:MI:SS.FFTZH:TZM') as {field_name}", + lambda field_name: f"TO_VARCHAR({field_name}, 'YYYY-MM-DD\"T\"HH24:MI:SS.FFTZH:TZM') AS {field_name}", timestamp_columns, ) ) @@ -178,9 +178,6 @@ def pull_latest_from_table_or_query( ) inner_field_string = ", ".join(select_fields) - if data_source.snowflake_options.warehouse: - config.offline_store.warehouse = data_source.snowflake_options.warehouse - with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn @@ -232,9 +229,6 @@ def pull_all_from_table_or_query( + '"' ) - if data_source.snowflake_options.warehouse: - config.offline_store.warehouse = data_source.snowflake_options.warehouse - with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 63533214ea8..95bd46f1ec1 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -1,3 +1,4 @@ +import warnings from typing import Callable, Dict, Iterable, Optional, Tuple from typeguard import typechecked @@ -45,7 +46,6 @@ def __init__( timestamp_field (optional): Event timestamp field used for point in time joins of feature values. database (optional): Snowflake database where the features are stored. - warehouse (optional): Snowflake warehouse where the database is stored. schema (optional): Snowflake schema in which the table is located. table (optional): Snowflake table where the features are stored. Exactly one of 'table' and 'query' must be specified. @@ -60,6 +60,14 @@ def __init__( owner (optional): The owner of the snowflake source, typically the email of the primary maintainer. """ + + if warehouse: + warnings.warn( + "Specifying a warehouse within a SnowflakeSource is to be deprecated." + "Starting v0.32.0, the warehouse as part of the Snowflake store config will be used.", + RuntimeWarning, + ) + if table is None and query is None: raise ValueError('No "table" or "query" argument provided.') if table and query: @@ -73,7 +81,6 @@ def __init__( schema=_schema, table=table, query=query, - warehouse=warehouse, ) # If no name, use the table as the default name. @@ -109,7 +116,6 @@ def from_proto(data_source: DataSourceProto): database=data_source.snowflake_options.database, schema=data_source.snowflake_options.schema, table=data_source.snowflake_options.table, - warehouse=data_source.snowflake_options.warehouse, created_timestamp_column=data_source.created_timestamp_column, field_mapping=dict(data_source.field_mapping), query=data_source.snowflake_options.query, @@ -134,7 +140,6 @@ def __eq__(self, other): and self.schema == other.schema and self.table == other.table and self.query == other.query - and self.warehouse == other.warehouse ) @property @@ -157,11 +162,6 @@ def query(self): """Returns the snowflake options of this snowflake source.""" return self.snowflake_options.query - @property - def warehouse(self): - """Returns the warehouse of this snowflake source.""" - return self.snowflake_options.warehouse - def to_proto(self) -> DataSourceProto: """ Converts a SnowflakeSource object to its protobuf representation. @@ -335,13 +335,11 @@ def __init__( schema: Optional[str], table: Optional[str], query: Optional[str], - warehouse: Optional[str], ): self.database = database or "" self.schema = schema or "" self.table = table or "" self.query = query or "" - self.warehouse = warehouse or "" @classmethod def from_proto(cls, snowflake_options_proto: DataSourceProto.SnowflakeOptions): @@ -359,7 +357,6 @@ def from_proto(cls, snowflake_options_proto: DataSourceProto.SnowflakeOptions): schema=snowflake_options_proto.schema, table=snowflake_options_proto.table, query=snowflake_options_proto.query, - warehouse=snowflake_options_proto.warehouse, ) return snowflake_options @@ -376,7 +373,6 @@ def to_proto(self) -> DataSourceProto.SnowflakeOptions: schema=self.schema, table=self.table, query=self.query, - warehouse=self.warehouse, ) return snowflake_options_proto @@ -393,7 +389,6 @@ def __init__(self, table_ref: str): schema=None, table=table_ref, query=None, - warehouse=None, ) @staticmethod diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index 257e46df197..c7e5961a88a 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -66,7 +66,6 @@ def create_data_source( timestamp_field=timestamp_field, created_timestamp_column=created_timestamp_column, field_mapping=field_mapping or {"ts_1": "ts"}, - warehouse=self.offline_store_config.warehouse, ) def create_saved_dataset_destination(self) -> SavedDatasetSnowflakeStorage: diff --git a/sdk/python/tests/unit/test_data_sources.py b/sdk/python/tests/unit/test_data_sources.py index 1e8fb75c3ed..30b030feb67 100644 --- a/sdk/python/tests/unit/test_data_sources.py +++ b/sdk/python/tests/unit/test_data_sources.py @@ -118,7 +118,6 @@ def test_proto_conversion(): snowflake_source = SnowflakeSource( name="test_source", database="test_database", - warehouse="test_warehouse", schema="test_schema", table="test_table", timestamp_field="event_timestamp", From 58ce148401fe578b1727bc42ee6b4b9a558660c7 Mon Sep 17 00:00:00 2001 From: Chris Date: Fri, 21 Apr 2023 12:36:20 -0700 Subject: [PATCH 27/31] feat: Add AWS Redshift Serverless support (#3595) * Rebase master Signed-off-by: Chris Barcroft * Pass optional types to satisfy mypy Signed-off-by: Chris Barcroft * Remove redundant import Signed-off-by: Chris Barcroft * Regenerate python requirements Signed-off-by: Chris Barcroft * Fix casing error on DbUser Redshift kwarg Signed-off-by: Chris Barcroft --------- Signed-off-by: Chris Barcroft Signed-off-by: Chris Barcroft Co-authored-by: Chris Barcroft --- docs/reference/offline-stores/redshift.md | 22 ++++++ .../feast/infra/offline_stores/redshift.py | 49 +++++++++++-- .../infra/offline_stores/redshift_source.py | 27 +++++-- sdk/python/feast/infra/utils/aws_utils.py | 71 ++++++++++++++----- sdk/python/feast/templates/aws/bootstrap.py | 2 + .../requirements/py3.10-requirements.txt | 57 ++++++++------- .../requirements/py3.8-requirements.txt | 60 ++++++++-------- .../requirements/py3.9-requirements.txt | 54 +++++++------- .../universal/data_sources/redshift.py | 2 + .../infra/scaffolding/test_repo_config.py | 4 +- setup.py | 7 +- 11 files changed, 235 insertions(+), 120 deletions(-) diff --git a/docs/reference/offline-stores/redshift.md b/docs/reference/offline-stores/redshift.md index 98092c44bec..e9bcbfeff1a 100644 --- a/docs/reference/offline-stores/redshift.md +++ b/docs/reference/offline-stores/redshift.md @@ -155,3 +155,25 @@ While the following trust relationship is necessary to make sure that Redshift, ] } ``` + + +## Redshift Serverless + +In order to use [AWS Redshift Serverless](https://aws.amazon.com/redshift/redshift-serverless/), specify a workgroup instead of a cluster_id and user. + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: aws +offline_store: + type: redshift + region: us-west-2 + workgroup: feast-workgroup + database: feast-database + s3_staging_location: s3://feast-bucket/redshift + iam_role: arn:aws:iam::123456789012:role/redshift_s3_access_role +``` +{% endcode %} + +Please note that the IAM policies above will need the [redshift-serverless](https://aws.permissions.cloud/iam/redshift-serverless) version, rather than the standard [redshift](https://aws.permissions.cloud/iam/redshift). \ No newline at end of file diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 35fd49f7460..aba2bda353c 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -19,7 +19,7 @@ import pyarrow import pyarrow as pa from dateutil import parser -from pydantic import StrictStr +from pydantic import StrictStr, root_validator from pydantic.typing import Literal from pytz import utc @@ -51,15 +51,18 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel): type: Literal["redshift"] = "redshift" """ Offline store type selector""" - cluster_id: StrictStr - """ Redshift cluster identifier """ + cluster_id: Optional[StrictStr] + """ Redshift cluster identifier, for provisioned clusters """ + + user: Optional[StrictStr] + """ Redshift user name, only required for provisioned clusters """ + + workgroup: Optional[StrictStr] + """ Redshift workgroup identifier, for serverless """ region: StrictStr """ Redshift cluster's AWS region """ - user: StrictStr - """ Redshift user name """ - database: StrictStr """ Redshift database name """ @@ -69,6 +72,26 @@ class RedshiftOfflineStoreConfig(FeastConfigBaseModel): iam_role: StrictStr """ IAM Role for Redshift, granting it access to S3 """ + @root_validator + def require_cluster_and_user_or_workgroup(cls, values): + """ + Provisioned Redshift clusters: Require cluster_id and user, ignore workgroup + Serverless Redshift: Require workgroup, ignore cluster_id and user + """ + cluster_id, user, workgroup = ( + values.get("cluster_id"), + values.get("user"), + values.get("workgroup"), + ) + if not (cluster_id and user) and not workgroup: + raise ValueError( + "please specify either cluster_id & user if using provisioned clusters, or workgroup if using serverless" + ) + elif cluster_id and workgroup: + raise ValueError("cannot specify both cluster_id and workgroup") + + return values + class RedshiftOfflineStore(OfflineStore): @staticmethod @@ -248,6 +271,7 @@ def query_generator() -> Iterator[str]: aws_utils.execute_redshift_statement( redshift_client, config.offline_store.cluster_id, + config.offline_store.workgroup, config.offline_store.database, config.offline_store.user, f"DROP TABLE IF EXISTS {table_name}", @@ -294,6 +318,7 @@ def write_logged_features( table=data, redshift_data_client=redshift_client, cluster_id=config.offline_store.cluster_id, + workgroup=config.offline_store.workgroup, database=config.offline_store.database, user=config.offline_store.user, s3_resource=s3_resource, @@ -336,8 +361,10 @@ def offline_write_batch( table=table, redshift_data_client=redshift_client, cluster_id=config.offline_store.cluster_id, + workgroup=config.offline_store.workgroup, database=redshift_options.database - or config.offline_store.database, # Users can define database in the source if needed but it's not required. + # Users can define database in the source if needed but it's not required. + or config.offline_store.database, user=config.offline_store.user, s3_resource=s3_resource, s3_path=f"{config.offline_store.s3_staging_location}/push/{uuid.uuid4()}.parquet", @@ -405,6 +432,7 @@ def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: return aws_utils.unload_redshift_query_to_df( self._redshift_client, self._config.offline_store.cluster_id, + self._config.offline_store.workgroup, self._config.offline_store.database, self._config.offline_store.user, self._s3_resource, @@ -419,6 +447,7 @@ def _to_arrow_internal(self, timeout: Optional[int] = None) -> pa.Table: return aws_utils.unload_redshift_query_to_pa( self._redshift_client, self._config.offline_store.cluster_id, + self._config.offline_store.workgroup, self._config.offline_store.database, self._config.offline_store.user, self._s3_resource, @@ -439,6 +468,7 @@ def to_s3(self) -> str: aws_utils.execute_redshift_query_and_unload_to_s3( self._redshift_client, self._config.offline_store.cluster_id, + self._config.offline_store.workgroup, self._config.offline_store.database, self._config.offline_store.user, self._s3_path, @@ -455,6 +485,7 @@ def to_redshift(self, table_name: str) -> None: aws_utils.upload_df_to_redshift( self._redshift_client, self._config.offline_store.cluster_id, + self._config.offline_store.workgroup, self._config.offline_store.database, self._config.offline_store.user, self._s3_resource, @@ -471,6 +502,7 @@ def to_redshift(self, table_name: str) -> None: aws_utils.execute_redshift_statement( self._redshift_client, self._config.offline_store.cluster_id, + self._config.offline_store.workgroup, self._config.offline_store.database, self._config.offline_store.user, query, @@ -509,6 +541,7 @@ def _upload_entity_df( aws_utils.upload_df_to_redshift( redshift_client, config.offline_store.cluster_id, + config.offline_store.workgroup, config.offline_store.database, config.offline_store.user, s3_resource, @@ -522,6 +555,7 @@ def _upload_entity_df( aws_utils.execute_redshift_statement( redshift_client, config.offline_store.cluster_id, + config.offline_store.workgroup, config.offline_store.database, config.offline_store.user, f"CREATE TABLE {table_name} AS ({entity_df})", @@ -577,6 +611,7 @@ def _get_entity_df_event_timestamp_range( statement_id = aws_utils.execute_redshift_statement( redshift_client, config.offline_store.cluster_id, + config.offline_store.workgroup, config.offline_store.database, config.offline_store.user, f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max " diff --git a/sdk/python/feast/infra/offline_stores/redshift_source.py b/sdk/python/feast/infra/offline_stores/redshift_source.py index 4279e6a0687..1f80dede076 100644 --- a/sdk/python/feast/infra/offline_stores/redshift_source.py +++ b/sdk/python/feast/infra/offline_stores/redshift_source.py @@ -207,18 +207,30 @@ def get_table_column_names_and_types( if self.table: try: paginator = client.get_paginator("describe_table") - response_iterator = paginator.paginate( - ClusterIdentifier=config.offline_store.cluster_id, - Database=( + + paginator_kwargs = { + "Database": ( self.database if self.database else config.offline_store.database ), - DbUser=config.offline_store.user, - Table=self.table, - Schema=self.schema, - ) + "Table": self.table, + "Schema": self.schema, + } + + if config.offline_store.cluster_id: + # Provisioned cluster + paginator_kwargs[ + "ClusterIdentifier" + ] = config.offline_store.cluster_id + paginator_kwargs["DbUser"] = config.offline_store.user + elif config.offline_store.workgroup: + # Redshift serverless + paginator_kwargs["WorkgroupName"] = config.offline_store.workgroup + + response_iterator = paginator.paginate(**paginator_kwargs) table = response_iterator.build_full_result() + except ClientError as e: if e.response["Error"]["Code"] == "ValidationException": raise RedshiftCredentialsError() from e @@ -233,6 +245,7 @@ def get_table_column_names_and_types( statement_id = aws_utils.execute_redshift_statement( client, config.offline_store.cluster_id, + config.offline_store.workgroup, self.database if self.database else config.offline_store.database, config.offline_store.user, f"SELECT * FROM ({self.query}) LIMIT 1", diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index 7e8335ac92e..5095ef42219 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -74,7 +74,12 @@ def get_bucket_and_key(s3_path: str) -> Tuple[str, str]: reraise=True, ) def execute_redshift_statement_async( - redshift_data_client, cluster_id: str, database: str, user: str, query: str + redshift_data_client, + cluster_id: Optional[str], + workgroup: Optional[str], + database: str, + user: Optional[str], + query: str, ) -> dict: """Execute Redshift statement asynchronously. Does not wait for the query to finish. @@ -83,6 +88,7 @@ def execute_redshift_statement_async( Args: redshift_data_client: Redshift Data API Service client cluster_id: Redshift Cluster Identifier + workgroup: Redshift Serverless Workgroup database: Redshift Database Name user: Redshift username query: The SQL query to execute @@ -91,12 +97,17 @@ def execute_redshift_statement_async( """ try: - return redshift_data_client.execute_statement( - ClusterIdentifier=cluster_id, - Database=database, - DbUser=user, - Sql=query, - ) + rs_kwargs = {"Database": database, "Sql": query} + + # Standard Redshift requires a ClusterId as well as DbUser. RS Serverless instead requires a WorkgroupName. + if cluster_id and user: + rs_kwargs["ClusterIdentifier"] = cluster_id + rs_kwargs["DbUser"] = user + elif workgroup: + rs_kwargs["WorkgroupName"] = workgroup + + return redshift_data_client.execute_statement(**rs_kwargs) + except ClientError as e: if e.response["Error"]["Code"] == "ValidationException": raise RedshiftCredentialsError() from e @@ -133,7 +144,12 @@ def wait_for_redshift_statement(redshift_data_client, statement: dict) -> None: def execute_redshift_statement( - redshift_data_client, cluster_id: str, database: str, user: str, query: str + redshift_data_client, + cluster_id: Optional[str], + workgroup: Optional[str], + database: str, + user: Optional[str], + query: str, ) -> str: """Execute Redshift statement synchronously. Waits for the query to finish. @@ -144,6 +160,7 @@ def execute_redshift_statement( Args: redshift_data_client: Redshift Data API Service client cluster_id: Redshift Cluster Identifier + workgroup: Redshift Serverless Workgroup database: Redshift Database Name user: Redshift username query: The SQL query to execute @@ -152,7 +169,7 @@ def execute_redshift_statement( """ statement = execute_redshift_statement_async( - redshift_data_client, cluster_id, database, user, query + redshift_data_client, cluster_id, workgroup, database, user, query ) wait_for_redshift_statement(redshift_data_client, statement) return statement["Id"] @@ -193,9 +210,10 @@ def upload_df_to_s3( def upload_df_to_redshift( redshift_data_client, - cluster_id: str, + cluster_id: Optional[str], + workgroup: Optional[str], database: str, - user: str, + user: Optional[str], s3_resource, s3_path: str, iam_role: str, @@ -209,6 +227,7 @@ def upload_df_to_redshift( Args: redshift_data_client: Redshift Data API Service client cluster_id: Redshift Cluster Identifier + workgroup: Redshift Serverless Workgroup database: Redshift Database Name user: Redshift username s3_resource: S3 Resource object @@ -236,6 +255,7 @@ def upload_df_to_redshift( table, redshift_data_client, cluster_id=cluster_id, + workgroup=workgroup, database=database, user=user, s3_resource=s3_resource, @@ -248,6 +268,7 @@ def upload_df_to_redshift( def delete_redshift_table( redshift_data_client, cluster_id: str, + workgroup: str, database: str, user: str, table_name: str, @@ -256,6 +277,7 @@ def delete_redshift_table( execute_redshift_statement( redshift_data_client, cluster_id, + workgroup, database, user, drop_query, @@ -265,9 +287,10 @@ def delete_redshift_table( def upload_arrow_table_to_redshift( table: Union[pyarrow.Table, Path], redshift_data_client, - cluster_id: str, + cluster_id: Optional[str], + workgroup: Optional[str], database: str, - user: str, + user: Optional[str], s3_resource, iam_role: str, s3_path: str, @@ -286,6 +309,7 @@ def upload_arrow_table_to_redshift( Args: redshift_data_client: Redshift Data API Service client cluster_id: Redshift Cluster Identifier + workgroup: Redshift Serverless Workgroup database: Redshift Database Name user: Redshift username s3_resource: S3 Resource object @@ -345,6 +369,7 @@ def upload_arrow_table_to_redshift( execute_redshift_statement( redshift_data_client, cluster_id, + workgroup, database, user, f"{create_query}; {copy_query};", @@ -359,6 +384,7 @@ def upload_arrow_table_to_redshift( def temporarily_upload_df_to_redshift( redshift_data_client, cluster_id: str, + workgroup: str, database: str, user: str, s3_resource, @@ -381,6 +407,7 @@ def temporarily_upload_df_to_redshift( upload_df_to_redshift( redshift_data_client, cluster_id, + workgroup, database, user, s3_resource, @@ -396,6 +423,7 @@ def temporarily_upload_df_to_redshift( execute_redshift_statement( redshift_data_client, cluster_id, + workgroup, database, user, f"DROP TABLE {table_name}", @@ -407,6 +435,7 @@ def temporarily_upload_arrow_table_to_redshift( table: Union[pyarrow.Table, Path], redshift_data_client, cluster_id: str, + workgroup: str, database: str, user: str, s3_resource, @@ -431,6 +460,7 @@ def temporarily_upload_arrow_table_to_redshift( table, redshift_data_client, cluster_id, + workgroup, database, user, s3_resource, @@ -447,6 +477,7 @@ def temporarily_upload_arrow_table_to_redshift( execute_redshift_statement( redshift_data_client, cluster_id, + workgroup, database, user, f"DROP TABLE {table_name}", @@ -476,9 +507,10 @@ def delete_s3_directory(s3_resource, bucket: str, key: str): def execute_redshift_query_and_unload_to_s3( redshift_data_client, - cluster_id: str, + cluster_id: Optional[str], + workgroup: Optional[str], database: str, - user: str, + user: Optional[str], s3_path: str, iam_role: str, query: str, @@ -488,6 +520,7 @@ def execute_redshift_query_and_unload_to_s3( Args: redshift_data_client: Redshift Data API Service client cluster_id: Redshift Cluster Identifier + workgroup: Redshift Serverless workgroup name database: Redshift Database Name user: Redshift username s3_path: S3 directory where the unloaded data is written @@ -500,12 +533,15 @@ def execute_redshift_query_and_unload_to_s3( unique_table_name = "_" + str(uuid.uuid4()).replace("-", "") query = f"CREATE TEMPORARY TABLE {unique_table_name} AS ({query});\n" query += f"UNLOAD ('SELECT * FROM {unique_table_name}') TO '{s3_path}/' IAM_ROLE '{iam_role}' FORMAT AS PARQUET" - execute_redshift_statement(redshift_data_client, cluster_id, database, user, query) + execute_redshift_statement( + redshift_data_client, cluster_id, workgroup, database, user, query + ) def unload_redshift_query_to_pa( redshift_data_client, cluster_id: str, + workgroup: str, database: str, user: str, s3_resource, @@ -519,6 +555,7 @@ def unload_redshift_query_to_pa( execute_redshift_query_and_unload_to_s3( redshift_data_client, cluster_id, + workgroup, database, user, s3_path, @@ -535,6 +572,7 @@ def unload_redshift_query_to_pa( def unload_redshift_query_to_df( redshift_data_client, cluster_id: str, + workgroup: str, database: str, user: str, s3_resource, @@ -546,6 +584,7 @@ def unload_redshift_query_to_df( table = unload_redshift_query_to_pa( redshift_data_client, cluster_id, + workgroup, database, user, s3_resource, diff --git a/sdk/python/feast/templates/aws/bootstrap.py b/sdk/python/feast/templates/aws/bootstrap.py index dcabadd358b..63e5b50203b 100644 --- a/sdk/python/feast/templates/aws/bootstrap.py +++ b/sdk/python/feast/templates/aws/bootstrap.py @@ -35,6 +35,7 @@ def bootstrap(): aws_utils.execute_redshift_statement( client, cluster_id, + None, database, user, "DROP TABLE IF EXISTS feast_driver_hourly_stats", @@ -43,6 +44,7 @@ def bootstrap(): aws_utils.upload_df_to_redshift( client, cluster_id, + None, database, user, s3, diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 63d21e7fa27..f9702505e38 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt @@ -11,7 +11,7 @@ anyio==3.6.2 # watchfiles appdirs==1.4.4 # via fissix -attrs==22.2.0 +attrs==23.1.0 # via # bowler # jsonschema @@ -35,11 +35,11 @@ cloudpickle==2.2.1 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2023.3.0 +dask==2023.4.0 # via feast (setup.py) dill==0.3.6 # via feast (setup.py) -fastapi==0.93.0 +fastapi==0.95.1 # via feast (setup.py) fastavro==1.7.3 # via @@ -47,31 +47,33 @@ fastavro==1.7.3 # pandavro fissix==21.11.13 # via bowler -fsspec==2023.3.0 +fsspec==2023.4.0 # via dask greenlet==2.0.2 # via sqlalchemy -grpcio==1.51.3 +grpcio==1.54.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.51.3 +grpcio-reflection==1.54.0 # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn -httpcore==0.16.3 +httpcore==0.17.0 # via httpx httptools==0.5.0 # via uvicorn -httpx==0.23.3 +httpx==0.24.0 # via feast (setup.py) idna==3.4 # via # anyio + # httpx # requests - # rfc3986 +importlib-metadata==6.5.0 + # via dask jinja2==3.1.2 # via feast (setup.py) jsonschema==4.17.3 @@ -80,11 +82,11 @@ locket==1.0.0 # via partd markupsafe==2.1.2 # via jinja2 -mmh3==3.0.0 +mmh3==3.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.1.1 +mypy==1.2.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -94,7 +96,7 @@ numpy==1.24.2 # pandas # pandavro # pyarrow -packaging==23.0 +packaging==23.1 # via dask pandas==1.5.3 # via @@ -102,22 +104,22 @@ pandas==1.5.3 # pandavro pandavro==1.5.2 # via feast (setup.py) -partd==1.3.0 +partd==1.4.0 # via dask proto-plus==1.22.2 # via feast (setup.py) -protobuf==4.22.1 +protobuf==4.22.3 # via # feast (setup.py) # grpcio-reflection # proto-plus pyarrow==11.0.0 # via feast (setup.py) -pydantic==1.10.6 +pydantic==1.10.7 # via # fastapi # feast (setup.py) -pygments==2.14.0 +pygments==2.15.1 # via feast (setup.py) pyrsistent==0.19.3 # via jsonschema @@ -125,7 +127,7 @@ python-dateutil==2.8.2 # via pandas python-dotenv==1.0.0 # via uvicorn -pytz==2022.7.1 +pytz==2023.3 # via pandas pyyaml==6.0 # via @@ -134,8 +136,6 @@ pyyaml==6.0 # uvicorn requests==2.28.2 # via feast (setup.py) -rfc3986[idna2008]==1.5.0 - # via httpx six==1.16.0 # via # pandavro @@ -145,11 +145,11 @@ sniffio==1.3.0 # anyio # httpcore # httpx -sqlalchemy[mypy]==1.4.46 +sqlalchemy[mypy]==1.4.47 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a32 +sqlalchemy2-stubs==0.0.2a34 # via sqlalchemy -starlette==0.25.0 +starlette==0.26.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -172,15 +172,18 @@ typing-extensions==4.5.0 # mypy # pydantic # sqlalchemy2-stubs -urllib3==1.26.14 + # starlette +urllib3==1.26.15 # via requests -uvicorn[standard]==0.21.0 +uvicorn[standard]==0.21.1 # via feast (setup.py) uvloop==0.17.0 # via uvicorn volatile==2.1.0 # via bowler -watchfiles==0.18.1 +watchfiles==0.19.0 # via uvicorn -websockets==10.4 +websockets==11.0.2 # via uvicorn +zipp==3.15.0 + # via importlib-metadata diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index ca09b953c83..ac37f3a75db 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.9 # by the following command: # # pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt @@ -11,7 +11,7 @@ anyio==3.6.2 # watchfiles appdirs==1.4.4 # via fissix -attrs==22.2.0 +attrs==23.1.0 # via # bowler # jsonschema @@ -35,11 +35,11 @@ cloudpickle==2.2.1 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2023.3.0 +dask==2023.4.0 # via feast (setup.py) dill==0.3.6 # via feast (setup.py) -fastapi==0.94.0 +fastapi==0.95.1 # via feast (setup.py) fastavro==1.7.3 # via @@ -47,33 +47,33 @@ fastavro==1.7.3 # pandavro fissix==21.11.13 # via bowler -fsspec==2023.3.0 +fsspec==2023.4.0 # via dask greenlet==2.0.2 # via sqlalchemy -grpcio==1.51.3 +grpcio==1.54.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.51.3 +grpcio-reflection==1.54.0 # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn -httpcore==0.16.3 +httpcore==0.17.0 # via httpx httptools==0.5.0 # via uvicorn -httpx==0.23.3 +httpx==0.24.0 # via feast (setup.py) idna==3.4 # via # anyio + # httpx # requests - # rfc3986 -importlib-resources==5.12.0 - # via jsonschema +importlib-metadata==6.5.0 + # via dask jinja2==3.1.2 # via feast (setup.py) jsonschema==4.17.3 @@ -82,11 +82,11 @@ locket==1.0.0 # via partd markupsafe==2.1.2 # via jinja2 -mmh3==3.0.0 +mmh3==3.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.1.1 +mypy==1.2.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -96,7 +96,7 @@ numpy==1.24.2 # pandas # pandavro # pyarrow -packaging==23.0 +packaging==23.1 # via dask pandas==1.5.3 # via @@ -104,24 +104,22 @@ pandas==1.5.3 # pandavro pandavro==1.5.2 # via feast (setup.py) -partd==1.3.0 +partd==1.4.0 # via dask -pkgutil-resolve-name==1.3.10 - # via jsonschema proto-plus==1.22.2 # via feast (setup.py) -protobuf==4.22.1 +protobuf==4.22.3 # via # feast (setup.py) # grpcio-reflection # proto-plus pyarrow==11.0.0 # via feast (setup.py) -pydantic==1.10.6 +pydantic==1.10.7 # via # fastapi # feast (setup.py) -pygments==2.14.0 +pygments==2.15.1 # via feast (setup.py) pyrsistent==0.19.3 # via jsonschema @@ -129,7 +127,7 @@ python-dateutil==2.8.2 # via pandas python-dotenv==1.0.0 # via uvicorn -pytz==2022.7.1 +pytz==2023.3 # via pandas pyyaml==6.0 # via @@ -138,8 +136,6 @@ pyyaml==6.0 # uvicorn requests==2.28.2 # via feast (setup.py) -rfc3986[idna2008]==1.5.0 - # via httpx six==1.16.0 # via # pandavro @@ -149,11 +145,11 @@ sniffio==1.3.0 # anyio # httpcore # httpx -sqlalchemy[mypy]==1.4.46 +sqlalchemy[mypy]==1.4.47 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a32 +sqlalchemy2-stubs==0.0.2a34 # via sqlalchemy -starlette==0.26.0.post1 +starlette==0.26.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -177,17 +173,17 @@ typing-extensions==4.5.0 # pydantic # sqlalchemy2-stubs # starlette -urllib3==1.26.14 +urllib3==1.26.15 # via requests -uvicorn[standard]==0.21.0 +uvicorn[standard]==0.21.1 # via feast (setup.py) uvloop==0.17.0 # via uvicorn volatile==2.1.0 # via bowler -watchfiles==0.18.1 +watchfiles==0.19.0 # via uvicorn -websockets==10.4 +websockets==11.0.2 # via uvicorn zipp==3.15.0 - # via importlib-resources + # via importlib-metadata diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 9235d8fdf2b..ce9dc085eaa 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -11,7 +11,7 @@ anyio==3.6.2 # watchfiles appdirs==1.4.4 # via fissix -attrs==22.2.0 +attrs==23.1.0 # via # bowler # jsonschema @@ -35,11 +35,11 @@ cloudpickle==2.2.1 # via dask colorama==0.4.6 # via feast (setup.py) -dask==2023.3.0 +dask==2023.4.0 # via feast (setup.py) dill==0.3.6 # via feast (setup.py) -fastapi==0.93.0 +fastapi==0.95.1 # via feast (setup.py) fastavro==1.7.3 # via @@ -47,31 +47,33 @@ fastavro==1.7.3 # pandavro fissix==21.11.13 # via bowler -fsspec==2023.3.0 +fsspec==2023.4.0 # via dask greenlet==2.0.2 # via sqlalchemy -grpcio==1.51.3 +grpcio==1.54.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.51.3 +grpcio-reflection==1.54.0 # via feast (setup.py) h11==0.14.0 # via # httpcore # uvicorn -httpcore==0.16.3 +httpcore==0.17.0 # via httpx httptools==0.5.0 # via uvicorn -httpx==0.23.3 +httpx==0.24.0 # via feast (setup.py) idna==3.4 # via # anyio + # httpx # requests - # rfc3986 +importlib-metadata==6.5.0 + # via dask jinja2==3.1.2 # via feast (setup.py) jsonschema==4.17.3 @@ -80,11 +82,11 @@ locket==1.0.0 # via partd markupsafe==2.1.2 # via jinja2 -mmh3==3.0.0 +mmh3==3.1.0 # via feast (setup.py) moreorless==0.4.0 # via bowler -mypy==1.1.1 +mypy==1.2.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy @@ -94,7 +96,7 @@ numpy==1.24.2 # pandas # pandavro # pyarrow -packaging==23.0 +packaging==23.1 # via dask pandas==1.5.3 # via @@ -102,22 +104,22 @@ pandas==1.5.3 # pandavro pandavro==1.5.2 # via feast (setup.py) -partd==1.3.0 +partd==1.4.0 # via dask proto-plus==1.22.2 # via feast (setup.py) -protobuf==4.22.1 +protobuf==4.22.3 # via # feast (setup.py) # grpcio-reflection # proto-plus pyarrow==11.0.0 # via feast (setup.py) -pydantic==1.10.6 +pydantic==1.10.7 # via # fastapi # feast (setup.py) -pygments==2.14.0 +pygments==2.15.1 # via feast (setup.py) pyrsistent==0.19.3 # via jsonschema @@ -125,7 +127,7 @@ python-dateutil==2.8.2 # via pandas python-dotenv==1.0.0 # via uvicorn -pytz==2022.7.1 +pytz==2023.3 # via pandas pyyaml==6.0 # via @@ -134,8 +136,6 @@ pyyaml==6.0 # uvicorn requests==2.28.2 # via feast (setup.py) -rfc3986[idna2008]==1.5.0 - # via httpx six==1.16.0 # via # pandavro @@ -145,11 +145,11 @@ sniffio==1.3.0 # anyio # httpcore # httpx -sqlalchemy[mypy]==1.4.46 +sqlalchemy[mypy]==1.4.47 # via feast (setup.py) -sqlalchemy2-stubs==0.0.2a32 +sqlalchemy2-stubs==0.0.2a34 # via sqlalchemy -starlette==0.25.0 +starlette==0.26.1 # via fastapi tabulate==0.9.0 # via feast (setup.py) @@ -173,15 +173,17 @@ typing-extensions==4.5.0 # pydantic # sqlalchemy2-stubs # starlette -urllib3==1.26.14 +urllib3==1.26.15 # via requests -uvicorn[standard]==0.21.0 +uvicorn[standard]==0.21.1 # via feast (setup.py) uvloop==0.17.0 # via uvicorn volatile==2.1.0 # via bowler -watchfiles==0.18.1 +watchfiles==0.19.0 # via uvicorn -websockets==10.4 +websockets==11.0.2 # via uvicorn +zipp==3.15.0 + # via importlib-metadata diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index c92a413616b..dfe8e3d33bf 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -59,6 +59,7 @@ def create_data_source( aws_utils.upload_df_to_redshift( self.client, self.offline_store_config.cluster_id, + self.offline_store_config.workgroup, self.offline_store_config.database, self.offline_store_config.user, self.s3, @@ -105,6 +106,7 @@ def teardown(self): aws_utils.execute_redshift_statement( self.client, self.offline_store_config.cluster_id, + self.offline_store_config.workgroup, self.offline_store_config.database, self.offline_store_config.user, f"DROP TABLE IF EXISTS {table}", diff --git a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py index 22fd1e696f3..42229f8683f 100644 --- a/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py +++ b/sdk/python/tests/unit/infra/scaffolding/test_repo_config.py @@ -45,8 +45,8 @@ def test_nullable_online_store_aws(): entity_key_serialization_version: 2 """ ), - expect_error="__root__ -> offline_store -> cluster_id\n" - " field required (type=value_error.missing)", + expect_error="__root__ -> offline_store -> __root__\n" + " please specify either cluster_id & user if using provisioned clusters, or workgroup if using serverless (type=value_error)", ) diff --git a/setup.py b/setup.py index 7218c1de04a..bb9907dadca 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,8 @@ "mmh3", "numpy>=1.22,<3", "pandas>=1.4.3,<2", - "pandavro~=1.5.0", # For some reason pandavro higher than 1.5.* only support pandas less than 1.3. + # For some reason pandavro higher than 1.5.* only support pandas less than 1.3. + "pandavro~=1.5.0", "protobuf<5,>3.20", "proto-plus>=1.20.0,<2", "pyarrow>=4,<12", @@ -71,7 +72,8 @@ "uvicorn[standard]>=0.14.0,<1", "dask>=2021.1.0", "bowler", # Needed for automatic repo upgrades - "httpx>=0.23.3", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). + # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). + "httpx>=0.23.3", ] GCP_REQUIRED = [ @@ -313,7 +315,6 @@ def run(self): file.write(filedata) - class BuildCommand(build_py): """Custom build command.""" From be3e3491d83e337af42e06f75226919904cb5d86 Mon Sep 17 00:00:00 2001 From: Miles Adkins <82121043+sfc-gh-madkins@users.noreply.github.com> Date: Fri, 21 Apr 2023 14:50:56 -0500 Subject: [PATCH 28/31] feat: Update snowflake offline store job output formats -- added arrow (#3589) Signed-off-by: Miles Adkins --- docs/reference/offline-stores/overview.md | 6 +- docs/reference/offline-stores/snowflake.md | 4 +- .../feast/infra/offline_stores/snowflake.py | 129 +++++++++++------- 3 files changed, 84 insertions(+), 55 deletions(-) diff --git a/docs/reference/offline-stores/overview.md b/docs/reference/offline-stores/overview.md index 10f99813bac..b760a8a6172 100644 --- a/docs/reference/offline-stores/overview.md +++ b/docs/reference/offline-stores/overview.md @@ -46,11 +46,11 @@ Below is a matrix indicating which `RetrievalJob`s support what functionality. | --------------------------------- | --- | --- | --- | --- | --- | --- | --- | | export to dataframe | yes | yes | yes | yes | yes | yes | yes | | export to arrow table | yes | yes | yes | yes | yes | yes | yes | -| export to arrow batches | no | no | no | yes | no | no | no | -| export to SQL | no | yes | no | yes | yes | no | yes | +| export to arrow batches | no | no | yes | yes | no | no | no | +| export to SQL | no | yes | yes | yes | yes | no | yes | | export to data lake (S3, GCS, etc.) | no | no | yes | no | yes | no | no | | export to data warehouse | no | yes | yes | yes | yes | no | no | -| export as Spark dataframe | no | no | no | no | no | yes | no | +| export as Spark dataframe | no | no | yes | no | no | yes | no | | local execution of Python-based on-demand transforms | yes | yes | yes | yes | yes | no | yes | | remote execution of Python-based on-demand transforms | no | no | no | no | no | no | no | | persist results in the offline store | yes | yes | yes | yes | yes | yes | no | diff --git a/docs/reference/offline-stores/snowflake.md b/docs/reference/offline-stores/snowflake.md index 77a144c5c47..a3782024a1e 100644 --- a/docs/reference/offline-stores/snowflake.md +++ b/docs/reference/offline-stores/snowflake.md @@ -53,11 +53,11 @@ Below is a matrix indicating which functionality is supported by `SnowflakeRetri | ----------------------------------------------------- | --------- | | export to dataframe | yes | | export to arrow table | yes | -| export to arrow batches | no | +| export to arrow batches | yes | | export to SQL | yes | | export to data lake (S3, GCS, etc.) | yes | | export to data warehouse | yes | -| export as Spark dataframe | no | +| export as Spark dataframe | yes | | local execution of Python-based on-demand transforms | yes | | remote execution of Python-based on-demand transforms | no | | persist results in the offline store | yes | diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 1463ea9cc83..4cb525fc6e0 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -436,52 +436,85 @@ def on_demand_feature_views(self) -> List[OnDemandFeatureView]: return self._on_demand_feature_views def _to_df_internal(self, timeout: Optional[int] = None) -> pd.DataFrame: - with self._query_generator() as query: - - df = execute_snowflake_statement( - self.snowflake_conn, query - ).fetch_pandas_all() + df = execute_snowflake_statement( + self.snowflake_conn, self.to_sql() + ).fetch_pandas_all() return df def _to_arrow_internal(self, timeout: Optional[int] = None) -> pyarrow.Table: - with self._query_generator() as query: + pa_table = execute_snowflake_statement( + self.snowflake_conn, self.to_sql() + ).fetch_arrow_all() - pa_table = execute_snowflake_statement( - self.snowflake_conn, query - ).fetch_arrow_all() + if pa_table: + return pa_table + else: + empty_result = execute_snowflake_statement( + self.snowflake_conn, self.to_sql() + ) - if pa_table: - return pa_table - else: - empty_result = execute_snowflake_statement(self.snowflake_conn, query) + return pyarrow.Table.from_pandas( + pd.DataFrame(columns=[md.name for md in empty_result.description]) + ) - return pyarrow.Table.from_pandas( - pd.DataFrame(columns=[md.name for md in empty_result.description]) - ) + def to_sql(self) -> str: + """ + Returns the SQL query that will be executed in Snowflake to build the historical feature table. + """ + with self._query_generator() as query: + return query - def to_snowflake(self, table_name: str, temporary=False) -> None: + def to_snowflake( + self, table_name: str, allow_overwrite: bool = False, temporary: bool = False + ) -> None: """Save dataset as a new Snowflake table""" if self.on_demand_feature_views: transformed_df = self.to_df() + if allow_overwrite: + query = f'DROP TABLE IF EXISTS "{table_name}"' + execute_snowflake_statement(self.snowflake_conn, query) + write_pandas( - self.snowflake_conn, transformed_df, table_name, auto_create_table=True + self.snowflake_conn, + transformed_df, + table_name, + auto_create_table=True, + create_temp_table=temporary, ) - return None + else: + query = f'CREATE {"OR REPLACE" if allow_overwrite else ""} {"TEMPORARY" if temporary else ""} TABLE {"IF NOT EXISTS" if not allow_overwrite else ""} "{table_name}" AS ({self.to_sql()});\n' + execute_snowflake_statement(self.snowflake_conn, query) - with self._query_generator() as query: - query = f'CREATE {"TEMPORARY" if temporary else ""} TABLE IF NOT EXISTS "{table_name}" AS ({query});\n' + return None - execute_snowflake_statement(self.snowflake_conn, query) + def to_arrow_batches(self) -> Iterator[pyarrow.Table]: - def to_sql(self) -> str: - """ - Returns the SQL query that will be executed in Snowflake to build the historical feature table. - """ - with self._query_generator() as query: - return query + table_name = "temp_arrow_batches_" + uuid.uuid4().hex + + self.to_snowflake(table_name=table_name, allow_overwrite=True, temporary=True) + + query = f'SELECT * FROM "{table_name}"' + arrow_batches = execute_snowflake_statement( + self.snowflake_conn, query + ).fetch_arrow_batches() + + return arrow_batches + + def to_pandas_batches(self) -> Iterator[pd.DataFrame]: + + table_name = "temp_pandas_batches_" + uuid.uuid4().hex + + self.to_snowflake(table_name=table_name, allow_overwrite=True, temporary=True) + + query = f'SELECT * FROM "{table_name}"' + arrow_batches = execute_snowflake_statement( + self.snowflake_conn, query + ).fetch_pandas_batches() + + return arrow_batches def to_spark_df(self, spark_session: "SparkSession") -> "DataFrame": """ @@ -502,37 +535,33 @@ def to_spark_df(self, spark_session: "SparkSession") -> "DataFrame": raise FeastExtrasDependencyImportError("spark", str(e)) if isinstance(spark_session, SparkSession): - with self._query_generator() as query: - - arrow_batches = execute_snowflake_statement( - self.snowflake_conn, query - ).fetch_arrow_batches() - - if arrow_batches: - spark_df = reduce( - DataFrame.unionAll, - [ - spark_session.createDataFrame(batch.to_pandas()) - for batch in arrow_batches - ], - ) - - return spark_df - - else: - raise EntitySQLEmptyResults(query) - + arrow_batches = self.to_arrow_batches() + + if arrow_batches: + spark_df = reduce( + DataFrame.unionAll, + [ + spark_session.createDataFrame(batch.to_pandas()) + for batch in arrow_batches + ], + ) + return spark_df + else: + raise EntitySQLEmptyResults(self.to_sql()) else: raise InvalidSparkSessionException(spark_session) def persist( self, storage: SavedDatasetStorage, - allow_overwrite: Optional[bool] = False, + allow_overwrite: bool = False, timeout: Optional[int] = None, ): assert isinstance(storage, SavedDatasetSnowflakeStorage) - self.to_snowflake(table_name=storage.snowflake_options.table) + + self.to_snowflake( + table_name=storage.snowflake_options.table, allow_overwrite=allow_overwrite + ) @property def metadata(self) -> Optional[RetrievalMetadata]: From 3ee0b9ee825171e9adaa79ba326ba5628911125d Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Fri, 21 Apr 2023 14:10:48 -0700 Subject: [PATCH 29/31] ci: Add dry run capability to web UI release (#3605) Add skip web ui flag Signed-off-by: Felix Wang --- .github/workflows/release.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ad66deaaf25..37a09c57363 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -116,6 +116,7 @@ jobs: run: yarn build:lib - name: Publish UI package working-directory: ./ui + if: github.event.inputs.dry_run == 'false' run: npm publish env: # This publish is working using an NPM automation token to bypass 2FA From 61a3be62f2499ffcb606f6831f1a9aace41ef467 Mon Sep 17 00:00:00 2001 From: Felix Wang Date: Fri, 21 Apr 2023 14:58:12 -0700 Subject: [PATCH 30/31] ci: Move `publish_ui` flag to correct location (#3606) Move `publish_ui` flag to correct location Signed-off-by: Felix Wang --- .github/workflows/release.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 37a09c57363..da16c5f8f1c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -92,7 +92,6 @@ jobs: publish-web-ui-npm: - if: github.repository == 'feast-dev/feast' && github.event.inputs.publish_ui == 'true' needs: [validate_version_bumps, get_dry_release_versions] runs-on: ubuntu-latest env: @@ -116,7 +115,7 @@ jobs: run: yarn build:lib - name: Publish UI package working-directory: ./ui - if: github.event.inputs.dry_run == 'false' + if: github.event.inputs.dry_run == 'false' && github.event.inputs.publish_ui == 'true' run: npm publish env: # This publish is working using an NPM automation token to bypass 2FA From 902f23f5403c601238aa9f4fcbdd1359f004fb1c Mon Sep 17 00:00:00 2001 From: feast-ci-bot Date: Fri, 21 Apr 2023 22:07:40 +0000 Subject: [PATCH 31/31] chore(release): release 0.31.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # [0.31.0](https://github.com/feast-dev/feast/compare/v0.30.0...v0.31.0) (2023-04-21) ### Bug Fixes * Add Stream Feature Views to helper that collect Feature View names ([#3582](https://github.com/feast-dev/feast/issues/3582)) ([7854f63](https://github.com/feast-dev/feast/commit/7854f637160d4d1f4758b83e6c396fe49447e7b7)) * Add StreamFeatureViewSpec to FeastObjectSpecProto convenience type ([#3550](https://github.com/feast-dev/feast/issues/3550)) ([3cefd6c](https://github.com/feast-dev/feast/commit/3cefd6cf806997be4ea8427bcf4aa9852d6ce038)) * Batch Snowflake materialization queries to obey Snowpark 100 fea… ([#3406](https://github.com/feast-dev/feast/issues/3406)) ([f9862b5](https://github.com/feast-dev/feast/commit/f9862b565b6c9019ec146871d2fb45590eb31576)) * Bytewax materializer security context ([#3573](https://github.com/feast-dev/feast/issues/3573)) ([6794338](https://github.com/feast-dev/feast/commit/6794338d0c9405a5a9ba7ef2b47de98cd905474e)) * **cI:** Install coreutils in mac github workers for smoke test ([#3563](https://github.com/feast-dev/feast/issues/3563)) ([e7421c1](https://github.com/feast-dev/feast/commit/e7421c11172aaafff34da98fc14cf763c2d70002)) * Fix bug with no SqlRegistryConfig class ([#3586](https://github.com/feast-dev/feast/issues/3586)) ([6dc1368](https://github.com/feast-dev/feast/commit/6dc1368afb66a4231b7513939a7cbf204ab4d46f)) * Fix Snowflake template ([#3584](https://github.com/feast-dev/feast/issues/3584)) ([6c09c39](https://github.com/feast-dev/feast/commit/6c09c39b64e31dc6e84be566524d6126683f3013)) * Make snowflake to remote tables temporary ([#3588](https://github.com/feast-dev/feast/issues/3588)) ([ad48146](https://github.com/feast-dev/feast/commit/ad4814643abd28d5b2e119b8ef46ddfdce77424a)) * Remove snowflake source warehouse tech debt ([#3422](https://github.com/feast-dev/feast/issues/3422)) ([7da0580](https://github.com/feast-dev/feast/commit/7da058085cd1211fb383ff0a6c5ae8f59999c5f0)) * Snowflake remote storage ([#3574](https://github.com/feast-dev/feast/issues/3574)) ([f8d3890](https://github.com/feast-dev/feast/commit/f8d3890f9f049c4b9190456b071e0fdb29aae69e)) * Support param timeout when persisting ([#3593](https://github.com/feast-dev/feast/issues/3593)) ([01a98f0](https://github.com/feast-dev/feast/commit/01a98f08e9e6d0aebf41188f2644f49111ea4ca9)) * Use pyarrow in a way that works across versions ([#3562](https://github.com/feast-dev/feast/issues/3562)) ([1289f3f](https://github.com/feast-dev/feast/commit/1289f3f7eea6bd3b08617606862a75f0224f9f18)) * Wrap the bigquery table name with backtick. ([#3577](https://github.com/feast-dev/feast/issues/3577)) ([09f0e7e](https://github.com/feast-dev/feast/commit/09f0e7e1011fc451b3bfb94c4b7764007fc69836)) ### Features * Add AWS Redshift Serverless support ([#3595](https://github.com/feast-dev/feast/issues/3595)) ([58ce148](https://github.com/feast-dev/feast/commit/58ce148401fe578b1727bc42ee6b4b9a558660c7)) * Add Hazelcast as an online store ([#3523](https://github.com/feast-dev/feast/issues/3523)) ([b05d50b](https://github.com/feast-dev/feast/commit/b05d50bcfeb179c2596f96f0d0a714754c516361)) * Cache Bigtable client ([#3602](https://github.com/feast-dev/feast/issues/3602)) ([b27472f](https://github.com/feast-dev/feast/commit/b27472fc1fb42368ffe1556c848dc3b21b2fca0c)) * Relax aws extras requirements ([#3585](https://github.com/feast-dev/feast/issues/3585)) ([7e77382](https://github.com/feast-dev/feast/commit/7e77382c6b75f514e18b683fef1495fa1fa87308)) * Show bigquery datasource table and query on UI ([#3600](https://github.com/feast-dev/feast/issues/3600)) ([58d63f7](https://github.com/feast-dev/feast/commit/58d63f7e6b1dde3dcd8893e4448940ea34e671cf)) * Update snowflake offline store job output formats -- added arrow ([#3589](https://github.com/feast-dev/feast/issues/3589)) ([be3e349](https://github.com/feast-dev/feast/commit/be3e3491d83e337af42e06f75226919904cb5d86)) --- CHANGELOG.md | 29 +++++++++++++++++++ infra/charts/feast-feature-server/Chart.yaml | 2 +- infra/charts/feast-feature-server/README.md | 4 +-- infra/charts/feast-feature-server/values.yaml | 2 +- infra/charts/feast/Chart.yaml | 2 +- infra/charts/feast/README.md | 6 ++-- .../feast/charts/feature-server/Chart.yaml | 4 +-- .../feast/charts/feature-server/README.md | 4 +-- .../feast/charts/feature-server/values.yaml | 2 +- .../charts/transformation-service/Chart.yaml | 4 +-- .../charts/transformation-service/README.md | 4 +-- .../charts/transformation-service/values.yaml | 2 +- infra/charts/feast/requirements.yaml | 4 +-- java/pom.xml | 2 +- sdk/python/feast/ui/package.json | 2 +- sdk/python/feast/ui/yarn.lock | 8 ++--- ui/package.json | 2 +- 17 files changed, 56 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cc3ebe2457b..ad30d312ee4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +# [0.31.0](https://github.com/feast-dev/feast/compare/v0.30.0...v0.31.0) (2023-04-21) + + +### Bug Fixes + +* Add Stream Feature Views to helper that collect Feature View names ([#3582](https://github.com/feast-dev/feast/issues/3582)) ([7854f63](https://github.com/feast-dev/feast/commit/7854f637160d4d1f4758b83e6c396fe49447e7b7)) +* Add StreamFeatureViewSpec to FeastObjectSpecProto convenience type ([#3550](https://github.com/feast-dev/feast/issues/3550)) ([3cefd6c](https://github.com/feast-dev/feast/commit/3cefd6cf806997be4ea8427bcf4aa9852d6ce038)) +* Batch Snowflake materialization queries to obey Snowpark 100 fea… ([#3406](https://github.com/feast-dev/feast/issues/3406)) ([f9862b5](https://github.com/feast-dev/feast/commit/f9862b565b6c9019ec146871d2fb45590eb31576)) +* Bytewax materializer security context ([#3573](https://github.com/feast-dev/feast/issues/3573)) ([6794338](https://github.com/feast-dev/feast/commit/6794338d0c9405a5a9ba7ef2b47de98cd905474e)) +* **cI:** Install coreutils in mac github workers for smoke test ([#3563](https://github.com/feast-dev/feast/issues/3563)) ([e7421c1](https://github.com/feast-dev/feast/commit/e7421c11172aaafff34da98fc14cf763c2d70002)) +* Fix bug with no SqlRegistryConfig class ([#3586](https://github.com/feast-dev/feast/issues/3586)) ([6dc1368](https://github.com/feast-dev/feast/commit/6dc1368afb66a4231b7513939a7cbf204ab4d46f)) +* Fix Snowflake template ([#3584](https://github.com/feast-dev/feast/issues/3584)) ([6c09c39](https://github.com/feast-dev/feast/commit/6c09c39b64e31dc6e84be566524d6126683f3013)) +* Make snowflake to remote tables temporary ([#3588](https://github.com/feast-dev/feast/issues/3588)) ([ad48146](https://github.com/feast-dev/feast/commit/ad4814643abd28d5b2e119b8ef46ddfdce77424a)) +* Remove snowflake source warehouse tech debt ([#3422](https://github.com/feast-dev/feast/issues/3422)) ([7da0580](https://github.com/feast-dev/feast/commit/7da058085cd1211fb383ff0a6c5ae8f59999c5f0)) +* Snowflake remote storage ([#3574](https://github.com/feast-dev/feast/issues/3574)) ([f8d3890](https://github.com/feast-dev/feast/commit/f8d3890f9f049c4b9190456b071e0fdb29aae69e)) +* Support param timeout when persisting ([#3593](https://github.com/feast-dev/feast/issues/3593)) ([01a98f0](https://github.com/feast-dev/feast/commit/01a98f08e9e6d0aebf41188f2644f49111ea4ca9)) +* Use pyarrow in a way that works across versions ([#3562](https://github.com/feast-dev/feast/issues/3562)) ([1289f3f](https://github.com/feast-dev/feast/commit/1289f3f7eea6bd3b08617606862a75f0224f9f18)) +* Wrap the bigquery table name with backtick. ([#3577](https://github.com/feast-dev/feast/issues/3577)) ([09f0e7e](https://github.com/feast-dev/feast/commit/09f0e7e1011fc451b3bfb94c4b7764007fc69836)) + + +### Features + +* Add AWS Redshift Serverless support ([#3595](https://github.com/feast-dev/feast/issues/3595)) ([58ce148](https://github.com/feast-dev/feast/commit/58ce148401fe578b1727bc42ee6b4b9a558660c7)) +* Add Hazelcast as an online store ([#3523](https://github.com/feast-dev/feast/issues/3523)) ([b05d50b](https://github.com/feast-dev/feast/commit/b05d50bcfeb179c2596f96f0d0a714754c516361)) +* Cache Bigtable client ([#3602](https://github.com/feast-dev/feast/issues/3602)) ([b27472f](https://github.com/feast-dev/feast/commit/b27472fc1fb42368ffe1556c848dc3b21b2fca0c)) +* Relax aws extras requirements ([#3585](https://github.com/feast-dev/feast/issues/3585)) ([7e77382](https://github.com/feast-dev/feast/commit/7e77382c6b75f514e18b683fef1495fa1fa87308)) +* Show bigquery datasource table and query on UI ([#3600](https://github.com/feast-dev/feast/issues/3600)) ([58d63f7](https://github.com/feast-dev/feast/commit/58d63f7e6b1dde3dcd8893e4448940ea34e671cf)) +* Update snowflake offline store job output formats -- added arrow ([#3589](https://github.com/feast-dev/feast/issues/3589)) ([be3e349](https://github.com/feast-dev/feast/commit/be3e3491d83e337af42e06f75226919904cb5d86)) + # [0.30.0](https://github.com/feast-dev/feast/compare/v0.29.0...v0.30.0) (2023-03-24) diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml index bdd0d6b0eb2..65dea258e09 100644 --- a/infra/charts/feast-feature-server/Chart.yaml +++ b/infra/charts/feast-feature-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-feature-server description: Feast Feature Server in Go or Python type: application -version: 0.30.0 +version: 0.31.0 keywords: - machine learning - big data diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md index 63d103075db..e3515b10f8f 100644 --- a/infra/charts/feast-feature-server/README.md +++ b/infra/charts/feast-feature-server/README.md @@ -1,6 +1,6 @@ # Feast Python / Go Feature Server Helm Charts -Current chart version is `0.30.0` +Current chart version is `0.31.0` ## Installation @@ -30,7 +30,7 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/python-helm-d | fullnameOverride | string | `""` | | | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"feastdev/feature-server"` | Docker image for Feature Server repository | -| image.tag | string | `"0.30.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | +| image.tag | string | `"0.31.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | | imagePullSecrets | list | `[]` | | | livenessProbe.initialDelaySeconds | int | `30` | | | livenessProbe.periodSeconds | int | `30` | | diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 56d4f724e31..8a66052861c 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -9,7 +9,7 @@ image: repository: feastdev/feature-server pullPolicy: IfNotPresent # image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) - tag: 0.30.0 + tag: 0.31.0 imagePullSecrets: [] nameOverride: "" diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index 99efef8723d..0daded67ed6 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.30.0 +version: 0.31.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index 36a6561c45a..17ba86b1d1a 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast Java components that are being installe ## Chart: Feast -Feature store for machine learning Current chart version is `0.30.0` +Feature store for machine learning Current chart version is `0.31.0` ## Installation @@ -65,8 +65,8 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/java-demo) fo | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.30.0 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.30.0 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.31.0 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.31.0 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index 0d7100b545e..78b83bb4f23 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.30.0 -appVersion: v0.30.0 +version: 0.31.0 +appVersion: v0.31.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index 3b1df0e9396..237ed7b67ac 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.30.0](https://img.shields.io/badge/Version-0.30.0-informational?style=flat-square) ![AppVersion: v0.30.0](https://img.shields.io/badge/AppVersion-v0.30.0-informational?style=flat-square) +![Version: 0.31.0](https://img.shields.io/badge/Version-0.31.0-informational?style=flat-square) ![AppVersion: v0.31.0](https://img.shields.io/badge/AppVersion-v0.31.0-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.30.0"` | Image tag | +| image.tag | string | `"0.31.0"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index 45eba9a96d1..054729f4784 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: feastdev/feature-server-java # image.tag -- Image tag - tag: 0.30.0 + tag: 0.31.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index 59ef9e5b7cc..ecb02682f13 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.30.0 -appVersion: v0.30.0 +version: 0.31.0 +appVersion: v0.31.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 96d2a1cc284..64ebff9e9d4 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.30.0](https://img.shields.io/badge/Version-0.30.0-informational?style=flat-square) ![AppVersion: v0.30.0](https://img.shields.io/badge/AppVersion-v0.30.0-informational?style=flat-square) +![Version: 0.31.0](https://img.shields.io/badge/Version-0.31.0-informational?style=flat-square) ![AppVersion: v0.31.0](https://img.shields.io/badge/AppVersion-v0.31.0-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.30.0"` | Image tag | +| image.tag | string | `"0.31.0"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index 2178f0dece2..ecd9221eebb 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.30.0 + tag: 0.31.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 34f3c885422..4d611f6fc8f 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.30.0 + version: 0.31.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.30.0 + version: 0.31.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/java/pom.xml b/java/pom.xml index b7e8b99dea5..d4523524c99 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -35,7 +35,7 @@ - 0.30.0 + 0.31.0 https://github.com/feast-dev/feast UTF-8 diff --git a/sdk/python/feast/ui/package.json b/sdk/python/feast/ui/package.json index 75eebe275b8..547e21b690e 100644 --- a/sdk/python/feast/ui/package.json +++ b/sdk/python/feast/ui/package.json @@ -6,7 +6,7 @@ "@elastic/datemath": "^5.0.3", "@elastic/eui": "^55.0.1", "@emotion/react": "^11.9.0", - "@feast-dev/feast-ui": "0.30.0", + "@feast-dev/feast-ui": "0.30.2", "@testing-library/jest-dom": "^5.16.4", "@testing-library/react": "^13.2.0", "@testing-library/user-event": "^13.5.0", diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock index b43ef7d0871..b375ef68813 100644 --- a/sdk/python/feast/ui/yarn.lock +++ b/sdk/python/feast/ui/yarn.lock @@ -1300,10 +1300,10 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@feast-dev/feast-ui@0.30.0": - version "0.30.0" - resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.30.0.tgz#6c68b243d65f8a3a1df029a39f4c382d17a4b272" - integrity sha512-o6YOAhSAHS8nCTZOB8ZTflM8HzJSWMngx4Ruy2EpO7vpMfjoHZu6OnV+ezX7GGqkpsxpTKxykqDNm0M8rtTKPw== +"@feast-dev/feast-ui@0.30.2": + version "0.30.2" + resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.30.2.tgz#867db94daba3dcb7d91f767bf104e489174dd666" + integrity sha512-qfsJYQb9eGaTLuk5tqi24nCE6YaP8SU8uf6ukENjj5Y8yRJhVy6IeZrIl/qpUsvAQd1PmmX0ZDr/zndD4QQsPA== dependencies: "@elastic/datemath" "^5.0.3" "@elastic/eui" "^55.0.1" diff --git a/ui/package.json b/ui/package.json index f5d27a2f2b1..298efeffab7 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.30.0", + "version": "0.31.0", "private": false, "files": [ "dist"