From a1259b6c6e9e80294032e2e7f057b1d2218a1cd5 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Wed, 18 Jun 2025 13:09:15 +0200 Subject: [PATCH 01/23] Modified SDK client to use backend endpoints --- local/local_test.py | 98 ++++++ poetry.lock | 565 ++++++++++++++++++++++++------ pyproject.toml | 6 +- src/cvec/__init__.py | 4 +- src/cvec/cvec.py | 256 +++++++------- src/cvec/metric.py | 31 -- src/cvec/models/__init__.py | 4 + src/cvec/models/metric.py | 38 ++ src/cvec/models/span.py | 17 + src/cvec/span.py | 42 --- src/cvec/utils/__init__.py | 3 + src/cvec/utils/arrow_converter.py | 89 +++++ 12 files changed, 837 insertions(+), 316 deletions(-) create mode 100644 local/local_test.py delete mode 100644 src/cvec/metric.py create mode 100644 src/cvec/models/__init__.py create mode 100644 src/cvec/models/metric.py create mode 100644 src/cvec/models/span.py delete mode 100644 src/cvec/span.py create mode 100644 src/cvec/utils/__init__.py create mode 100644 src/cvec/utils/arrow_converter.py diff --git a/local/local_test.py b/local/local_test.py new file mode 100644 index 0000000..6e871b7 --- /dev/null +++ b/local/local_test.py @@ -0,0 +1,98 @@ +from datetime import datetime, timedelta, timezone +from cvec import CVec +from cvec.models.metric import MetricDataPoint +import random + +def test_cvec(): + # Initialize CVec client + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", # Replace with your API host + tenant="test", # Replace with your tenant + api_key="your-api-key", # Replace with your API key + ) + + # Set default time range for queries + end_at = datetime.utcnow() + start_at = end_at - timedelta(hours=1) + + test_metric_name = "python-sdk/test" + + # # Example 1: Get available metrics + # print("\nGetting available metrics...") + # metrics = cvec.get_metrics() + # print(f"Found {len(metrics)} metrics") + # for metric in metrics: # Print first 5 metrics + # print(f"- {metric.name}") + + # # Example 2: Get metric data as DataFrame (using JSON) + # print("\nGetting metric data as DataFrame (JSON)...") + # df_json = cvec.get_metric_dataframe(names=[test_metric_name], use_arrow=False) + # print(f"DataFrame shape: {df_json.shape}") + # print("\nFirst few rows:") + # print(df_json.head()) + + # # Example 3: Get metric data as DataFrame (using Arrow) + # print("\nGetting metric data as DataFrame (Arrow)...") + # df_arrow = cvec.get_metric_dataframe(names=[test_metric_name], use_arrow=True) + # print(f"DataFrame shape: {df_arrow.shape}") + # print("\nFirst few rows:") + # print(df_arrow.head()) + + # # Example 4: Get metric data as objects + # print("\nGetting metric data as objects...") + # data_points = cvec.get_metric_data(names=[test_metric_name], use_arrow=True) + # print(f"Found {len(data_points)} data points") + # for point in data_points[:3]: # Print first 3 data points + # print(f"- {point.name}: {point.value_double or point.value_string} at {point.time}") + + # # Example 5: Get spans for a specific metric + # if metrics: + # metric_name = metrics[0].name + # print(f"\nGetting spans for metric '{metric_name}'...") + # spans = cvec.get_spans(metric_name, limit=5) + # print(f"Found {len(spans)} spans") + # for span in spans: + # print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") + + # Example 6: Add new metric data + print("\nAdding new metric data...") + new_data = [ + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + ] + cvec.add_metric_data(new_data, use_arrow=False) + print("Data added successfully") + + # Example 7: Add new metric data + print("\nAdding new metric data using Arrow...") + new_data = [ + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + ] + cvec.add_metric_data(new_data, use_arrow=True) + print("Data added successfully") + + + +if __name__ == "__main__": + test_cvec() \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 2cd0855..924da4d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,131 @@ # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "certifi" +version = "2025.6.15" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057"}, + {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -32,6 +158,21 @@ typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.1.0" @@ -112,67 +253,48 @@ files = [ [[package]] name = "numpy" -version = "2.2.5" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.10" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"}, - {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"}, - {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:19f4718c9012e3baea91a7dba661dcab2451cda2550678dc30d53acb91a7290f"}, - {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:eb7fd5b184e5d277afa9ec0ad5e4eb562ecff541e7f60e69ee69c8d59e9aeaba"}, - {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6413d48a9be53e183eb06495d8e3b006ef8f87c324af68241bbe7a39e8ff54c3"}, - {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7451f92eddf8503c9b8aa4fe6aa7e87fd51a29c2cfc5f7dbd72efde6c65acf57"}, - {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bcb1d057b7571334139129b7f941588f69ce7c4ed15a9d6162b2ea54ded700c"}, - {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36ab5b23915887543441efd0417e6a3baa08634308894316f446027611b53bf1"}, - {file = "numpy-2.2.5-cp310-cp310-win32.whl", hash = "sha256:422cc684f17bc963da5f59a31530b3936f57c95a29743056ef7a7903a5dbdf88"}, - {file = "numpy-2.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:e4f0b035d9d0ed519c813ee23e0a733db81ec37d2e9503afbb6e54ccfdee0fa7"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54"}, - {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610"}, - {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b"}, - {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be"}, - {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906"}, - {file = "numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175"}, - {file = "numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa"}, - {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571"}, - {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073"}, - {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8"}, - {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae"}, - {file = "numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb"}, - {file = "numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191"}, - {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372"}, - {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d"}, - {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7"}, - {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73"}, - {file = "numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b"}, - {file = "numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376"}, - {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19"}, - {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0"}, - {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a"}, - {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066"}, - {file = "numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e"}, - {file = "numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4ea7e1cff6784e58fe281ce7e7f05036b3e1c89c6f922a6bfbc0a7e8768adbe"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d7543263084a85fbc09c704b515395398d31d6395518446237eac219eab9e55e"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2e3bdadaba0e040d1e7ab39db73e0afe2c74ae277f5614dad53eadbecbbb169"}, - {file = "numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -189,54 +311,54 @@ files = [ [[package]] name = "pandas" -version = "2.2.3" +version = "2.3.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, + {file = "pandas-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:625466edd01d43b75b1883a64d859168e4556261a5035b32f9d743b67ef44634"}, + {file = "pandas-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6872d695c896f00df46b71648eea332279ef4077a409e2fe94220208b6bb675"}, + {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2"}, + {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e"}, + {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1"}, + {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6"}, + {file = "pandas-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:40cecc4ea5abd2921682b57532baea5588cc5f80f0231c624056b146887274d2"}, + {file = "pandas-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8adff9f138fc614347ff33812046787f7d43b3cef7c0f0171b3340cae333f6ca"}, + {file = "pandas-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e5f08eb9a445d07720776df6e641975665c9ea12c9d8a331e0f6890f2dcd76ef"}, + {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d"}, + {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46"}, + {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33"}, + {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c"}, + {file = "pandas-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:fa07e138b3f6c04addfeaf56cc7fdb96c3b68a3fe5e5401251f231fce40a0d7a"}, + {file = "pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf"}, + {file = "pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027"}, + {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09"}, + {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d"}, + {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20"}, + {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b"}, + {file = "pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be"}, + {file = "pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983"}, + {file = "pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd"}, + {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f"}, + {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3"}, + {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8"}, + {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9"}, + {file = "pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390"}, + {file = "pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575"}, + {file = "pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042"}, + {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c"}, + {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67"}, + {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f"}, + {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249"}, + {file = "pandas-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9efc0acbbffb5236fbdf0409c04edce96bec4bdaa649d49985427bd1ec73e085"}, + {file = "pandas-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75651c14fde635e680496148a8526b328e09fe0572d9ae9b638648c46a544ba3"}, + {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14"}, + {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324"}, + {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34"}, + {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb"}, + {file = "pandas-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b198687ca9c8529662213538a9bb1e60fa0bf0f6af89292eb68fea28743fcd5a"}, + {file = "pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133"}, ] [package.dependencies] @@ -307,28 +429,187 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "psycopg" -version = "3.2.9" -description = "PostgreSQL database adapter for Python" +name = "pyarrow" +version = "15.0.2" +description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6"}, - {file = "psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, + {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, + {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, + {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, + {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, + {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, + {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, + {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, + {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, + {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, + {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, + {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, + {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, + {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, + {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, + {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, + {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pydantic" +version = "2.11.7" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, ] [package.dependencies] -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} -tzdata = {version = "*", markers = "sys_platform == \"win32\""} +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] -binary = ["psycopg-binary (==3.2.9) ; implementation_name != \"pypy\""] -c = ["psycopg-c (==3.2.9) ; implementation_name != \"pypy\""] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "types-shapely (>=2.0)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pytest" @@ -380,6 +661,28 @@ files = [ {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] +[[package]] +name = "requests" +version = "2.32.4" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "ruff" version = "0.11.10" @@ -486,7 +789,21 @@ files = [ {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] -markers = {main = "python_version < \"3.13\""} + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" [[package]] name = "tzdata" @@ -500,7 +817,25 @@ files = [ {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [metadata] lock-version = "2.1" python-versions = ">=3.10" -content-hash = "88c626506f796301947928a0229fda5f5656dffdd6d3e79a8426a85643e65002" +content-hash = "ddf669df11b5f3c5c8824be51339b0aa79338aa7299f32ac7f1e64bb282b381f" diff --git a/pyproject.toml b/pyproject.toml index 8a34276..35d01e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,8 +8,10 @@ authors = [ readme = "README.md" requires-python = ">=3.10" dependencies = [ - "pandas (>=2.2.3,<3.0.0)", - "psycopg (>=3.1.0,<4.0.0)" # Assuming a recent version of psycopg3 + "requests (>=2.32.3,<3.0.0)", + "pydantic>=2.7.0,<3.0.0", + "pyarrow>=15.0.0,<16.0.0", + "pandas>=2.2.0,<3.0.0" ] [tool.poetry] diff --git a/src/cvec/__init__.py b/src/cvec/__init__.py index 30c8e17..e25fa13 100644 --- a/src/cvec/__init__.py +++ b/src/cvec/__init__.py @@ -1,5 +1,3 @@ from .cvec import CVec -from .span import Span -from .metric import Metric -__all__ = ["CVec", "Span", "Metric"] +__all__ = ["CVec"] diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 062a143..07f15d6 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -1,12 +1,14 @@ import os from datetime import datetime from typing import Any, List, Optional +from urllib.parse import urljoin import pandas as pd -import psycopg +import requests -from .span import Span -from .metric import Metric +from cvec.models.metric import Metric, MetricDataPoint +from cvec.models.span import Span +from cvec.utils.arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow, arrow_to_dataframe class CVec: @@ -53,14 +55,37 @@ def __init__( "CVEC_API_KEY must be set either as an argument or environment variable" ) - def _get_db_connection(self) -> psycopg.Connection: - """Helper method to establish a database connection.""" - return psycopg.connect( - user=self.tenant, - password=self.api_key, - host=self.host, - dbname=self.tenant, + def _get_headers(self) -> dict[str, str]: + """Helper method to get request headers.""" + return { + "Authorization": f"Bearer {self.api_key}", + "X-Tenant": self.tenant, + "Content-Type": "application/json", + } + + def _make_request( + self, method: str, endpoint: str, params: Optional[dict] = None, json: Optional[dict] = None, + data: Optional[bytes] = None, headers: Optional[dict] = None + ) -> Any: + """Helper method to make HTTP requests.""" + url = urljoin(self.host, endpoint) + request_headers = self._get_headers() + if headers: + request_headers.update(headers) + + response = requests.request( + method=method, + url=url, + headers=request_headers, + params=params, + json=json, + data=data, ) + response.raise_for_status() + + if response.headers.get('content-type') == 'application/vnd.apache.arrow.stream': + return response.content + return response.json() def get_spans( self, @@ -93,102 +118,91 @@ def get_spans( _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at - with self._get_db_connection() as conn: - with conn.cursor() as cur: - query_params = { - "metric": name, - "start_at": _start_at, - "end_at": _end_at, - # Fetch up to 'limit' points. If limit is None, then the `LIMIT NULL` clause - # has no effect (in PostgreSQL). - "limit": limit, - } - - combined_query = """ - SELECT - time, - value_double, - value_string - FROM metric_data - WHERE metric = %(metric)s - AND (time >= %(start_at)s OR %(start_at)s IS NULL) - AND (time < %(end_at)s OR %(end_at)s IS NULL) - ORDER BY time DESC - LIMIT %(limit)s - """ - cur.execute(combined_query, query_params) - db_rows = cur.fetchall() - spans = [] - - # None indicates that the end time is not known; the span extends beyond - # the query period. - raw_end_at = None - for time, value_double, value_string in db_rows: - raw_start_at = time - value = value_double if value_double is not None else value_string - spans.append( - Span( - id=None, - name=name, - value=value, - raw_start_at=raw_start_at, - raw_end_at=raw_end_at, - metadata=None, - ) - ) - raw_end_at = raw_start_at - - return spans + params = { + "start_at": _start_at.isoformat() if _start_at else None, + "end_at": _end_at.isoformat() if _end_at else None, + "limit": limit, + } + + response_data = self._make_request("GET", f"/api/metrics/spans/{name}", params=params) + return [Span.model_validate(span_data) for span_data in response_data] def get_metric_data( self, names: Optional[List[str]] = None, start_at: Optional[datetime] = None, end_at: Optional[datetime] = None, - ) -> pd.DataFrame: + use_arrow: bool = False, + ) -> List[MetricDataPoint]: """ Return all data-points within a given [start_at, end_at) interval, optionally selecting a given list of metric names. - The return value is a Pandas DataFrame with four columns: name, time, value_double, value_string. - One row is returned for each metric value transition. + Returns a list of MetricDataPoint objects, one for each metric value transition. + + Args: + names: Optional list of metric names to filter by + start_at: Optional start time for the query + end_at: Optional end time for the query + use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) """ _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at params = { - "start_at": _start_at, - "end_at": _end_at, - "tag_names_is_null": names is None, - # Pass an empty tuple if names is None or empty, otherwise the tuple of names. - # ANY(%(empty_tuple)s) will correctly result in no matches if names is empty. - # If names is None, the tag_names_is_null condition handles it. - "tag_names_list": names if names else [], + "start_at": _start_at.isoformat() if _start_at else None, + "end_at": _end_at.isoformat() if _end_at else None, + "names": ",".join(names) if names else None, } - sql_query = """ - SELECT metric AS name, time, value_double, value_string - FROM metric_data - WHERE (time >= %(start_at)s OR %(start_at)s IS NULL) - AND (time < %(end_at)s OR %(end_at)s IS NULL) - AND (%(tag_names_is_null)s IS TRUE OR metric = ANY(%(tag_names_list)s)) - ORDER BY name, time ASC + endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + response_data = self._make_request("GET", endpoint, params=params) + + if use_arrow: + return arrow_to_metric_data_points(response_data) + return [MetricDataPoint.model_validate(point_data) for point_data in response_data] + + def get_metric_dataframe( + self, + names: Optional[List[str]] = None, + start_at: Optional[datetime] = None, + end_at: Optional[datetime] = None, + use_arrow: bool = False, + ) -> pd.DataFrame: + """ + Return all data-points within a given [start_at, end_at) interval, + optionally selecting a given list of metric names. + The return value is a Pandas DataFrame with four columns: name, time, value_double, value_string. + One row is returned for each metric value transition. + + Args: + names: Optional list of metric names to filter by + start_at: Optional start time for the query + end_at: Optional end time for the query + use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) """ + _start_at = start_at or self.default_start_at + _end_at = end_at or self.default_end_at - with self._get_db_connection() as conn: - with conn.cursor() as cur: - cur.execute(sql_query, params) - rows = cur.fetchall() + params = { + "start_at": _start_at.isoformat() if _start_at else None, + "end_at": _end_at.isoformat() if _end_at else None, + "names": ",".join(names) if names else None, + } - if not rows: + endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + response_data = self._make_request("GET", endpoint, params=params) + + if not response_data: return pd.DataFrame( columns=["name", "time", "value_double", "value_string"] ) + + if use_arrow: + return arrow_to_dataframe(response_data) - # Create DataFrame from fetched rows - df = pd.DataFrame( - rows, columns=["name", "time", "value_double", "value_string"] - ) - + # Create DataFrame from response data + df = pd.DataFrame(response_data) + # Return the DataFrame with the required columns return df[["name", "time", "value_double", "value_string"]] @@ -199,44 +213,40 @@ def get_metrics( Return a list of metrics that had at least one transition in the given [start_at, end_at) interval. All metrics are returned if no start_at and end_at are given. """ - sql_query: str - params: Optional[dict[str, Any]] - - if start_at is None and end_at is None: - # No time interval specified by arguments, return all tags - sql_query = """ - SELECT id, normalized_name AS name, birth_at, death_at - FROM tag_names - ORDER BY name ASC; - """ - params = None - else: - # Time interval specified, find tags with transitions in the interval - _start_at = start_at or self.default_start_at - _end_at = end_at or self.default_end_at - - params = {"start_at_param": _start_at, "end_at_param": _end_at} - sql_query = f""" - SELECT DISTINCT metric_id AS id, metric AS name, birth_at, death_at - FROM {self.tenant}.metric_data - WHERE (time >= %(start_at_param)s OR %(start_at_param)s IS NULL) - AND (time < %(end_at_param)s OR %(end_at_param)s IS NULL) - ORDER BY name ASC; - """ - - with self._get_db_connection() as conn: - with conn.cursor() as cur: - cur.execute(sql_query, params) - rows = cur.fetchall() - - # Format rows into list of Metric objects - metrics_list = [ - Metric( - id=row[0], - name=row[1], - birth_at=row[2], - death_at=row[3], + _start_at = start_at or self.default_start_at + _end_at = end_at or self.default_end_at + + params = { + "start_at": _start_at.isoformat() if _start_at else None, + "end_at": _end_at.isoformat() if _end_at else None, + } + + response_data = self._make_request("GET", "/api/metrics", params=params) + return [Metric.model_validate(metric_data) for metric_data in response_data] + + def add_metric_data( + self, + data_points: List[MetricDataPoint], + use_arrow: bool = False, + ) -> None: + """ + Add multiple metric data points to the database. + + Args: + data_points: List of MetricDataPoint objects to add + use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) + """ + endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + + if use_arrow: + arrow_data = metric_data_points_to_arrow(data_points) + self._make_request( + "POST", + endpoint, + data=arrow_data, + headers={"Content-Type": "application/vnd.apache.arrow.stream"} ) - for row in rows - ] - return metrics_list + else: + data_dicts = [point.model_dump(mode='json') for point in data_points] + print(data_dicts) + self._make_request("POST", endpoint, json=data_dicts) diff --git a/src/cvec/metric.py b/src/cvec/metric.py deleted file mode 100644 index 5a392b8..0000000 --- a/src/cvec/metric.py +++ /dev/null @@ -1,31 +0,0 @@ -from datetime import datetime -from typing import Optional - - -class Metric: - """ - Represents metadata for a metric. - """ - - id: int - name: str - birth_at: Optional[datetime] - death_at: Optional[datetime] - - def __init__( - self, - id: int, - name: str, - birth_at: Optional[datetime], - death_at: Optional[datetime], - ): - self.id = id - self.name = name - self.birth_at = birth_at - self.death_at = death_at - - def __repr__(self) -> str: - return ( - f"Metric(id={self.id!r}, name={self.name!r}, " - f"birth_at={self.birth_at!r}, death_at={self.death_at!r})" - ) diff --git a/src/cvec/models/__init__.py b/src/cvec/models/__init__.py new file mode 100644 index 0000000..c69c7c0 --- /dev/null +++ b/src/cvec/models/__init__.py @@ -0,0 +1,4 @@ +from .metric import Metric, MetricDataPoint +from .span import Span + +__all__ = ["Metric", "MetricDataPoint", "Span"] \ No newline at end of file diff --git a/src/cvec/models/metric.py b/src/cvec/models/metric.py new file mode 100644 index 0000000..559339b --- /dev/null +++ b/src/cvec/models/metric.py @@ -0,0 +1,38 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, ConfigDict + + +class MetricDataPoint(BaseModel): + """ + Represents a single data point for a metric. + """ + + name: str + time: datetime + value_double: Optional[float] = None + value_string: Optional[str] = None + + model_config = ConfigDict( + json_encoders={ + datetime: lambda dt: dt.isoformat() + } + ) + + +class Metric(BaseModel): + """ + Represents metadata for a metric. + """ + + id: int + name: str + birth_at: Optional[datetime] = None + death_at: Optional[datetime] = None + + model_config = ConfigDict( + json_encoders={ + datetime: lambda dt: dt.isoformat() + } + ) diff --git a/src/cvec/models/span.py b/src/cvec/models/span.py new file mode 100644 index 0000000..bdff552 --- /dev/null +++ b/src/cvec/models/span.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import Any, Optional, Union + +from pydantic import BaseModel + + +class Span(BaseModel): + """ + Represents a time span where a metric has a constant value. + """ + + id: Optional[Any] = None + name: str + value: Optional[Union[float, str]] + raw_start_at: datetime + raw_end_at: Optional[datetime] + metadata: Optional[Any] = None diff --git a/src/cvec/span.py b/src/cvec/span.py deleted file mode 100644 index 55078bc..0000000 --- a/src/cvec/span.py +++ /dev/null @@ -1,42 +0,0 @@ -from datetime import datetime -from typing import Any, Optional, Union - - -class Span: - """ - Represents a time span where a metric has a constant value. - """ - - id: Optional[Any] - name: str - value: Optional[Union[float, str]] - raw_start_at: datetime - raw_end_at: Optional[datetime] - metadata: Optional[Any] - - def __init__( - self, - id: Optional[Any], - name: str, - value: Optional[Union[float, str]], - raw_start_at: datetime, - raw_end_at: Optional[datetime], - metadata: Optional[Any], - ): - self.id = id - self.name = name - self.value = value - self.raw_start_at = raw_start_at - self.raw_end_at = raw_end_at - self.metadata = metadata - - def __repr__(self) -> str: - raw_start_at_repr = ( - self.raw_start_at.isoformat() if self.raw_start_at else "None" - ) - raw_end_at_repr = self.raw_end_at.isoformat() if self.raw_end_at else "None" - return ( - f"Span(id={self.id!r}, name={self.name!r}, value={self.value!r}, " - f"raw_start_at={raw_start_at_repr}, raw_end_at={raw_end_at_repr}, " - f"metadata={self.metadata!r})" - ) diff --git a/src/cvec/utils/__init__.py b/src/cvec/utils/__init__.py new file mode 100644 index 0000000..65e2c34 --- /dev/null +++ b/src/cvec/utils/__init__.py @@ -0,0 +1,3 @@ +from .arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow, arrow_to_dataframe + +__all__ = ["arrow_to_metric_data_points", "metric_data_points_to_arrow", "arrow_to_dataframe"] \ No newline at end of file diff --git a/src/cvec/utils/arrow_converter.py b/src/cvec/utils/arrow_converter.py new file mode 100644 index 0000000..cd486ea --- /dev/null +++ b/src/cvec/utils/arrow_converter.py @@ -0,0 +1,89 @@ +import io +from typing import List + +import pyarrow as pa +import pyarrow.ipc as ipc +import pandas as pd + +from cvec.models.metric import MetricDataPoint + + +def metric_data_points_to_arrow(data_points: List[MetricDataPoint]) -> bytes: + """ + Convert metric data points to Arrow format. + + Args: + data_points: List of MetricDataPoint objects to convert + + Returns: + bytes: Arrow IPC format data + """ + # Create arrays for each field + names = [point.name for point in data_points] + times = [point.time for point in data_points] + value_doubles = [point.value_double for point in data_points] + value_strings = [point.value_string for point in data_points] + + # Create Arrow arrays + names_array = pa.array(names) + times_array = pa.array(times, type=pa.timestamp('us', tz='UTC')) + value_doubles_array = pa.array(value_doubles) + value_strings_array = pa.array(value_strings) + + # Create Arrow table + table = pa.table({ + 'name': names_array, + 'time': times_array, + 'value_double': value_doubles_array, + 'value_string': value_strings_array + }) + + # Convert to Arrow IPC format + sink = pa.BufferOutputStream() + with ipc.new_file(sink, table.schema) as writer: + writer.write_table(table) + return sink.getvalue().to_pybytes() + + +def arrow_to_metric_data_points(arrow_data: bytes) -> List[MetricDataPoint]: + """ + Convert Arrow format to metric data points. + + Args: + arrow_data: Arrow IPC format data + + Returns: + List[MetricDataPoint]: List of converted metric data points + """ + # Read Arrow data + reader = ipc.open_file(io.BytesIO(arrow_data)) + table = reader.read_all() + + # Convert to list of MetricDataPoint + data_points = [] + for i in range(len(table)): + data_points.append(MetricDataPoint( + name=table['name'][i].as_py(), + time=table['time'][i].as_py(), + value_double=table['value_double'][i].as_py(), + value_string=table['value_string'][i].as_py() + )) + return data_points + + +def arrow_to_dataframe(arrow_data: bytes) -> pd.DataFrame: + """ + Convert Arrow format directly to a pandas DataFrame. + + Args: + arrow_data: Arrow IPC format data + + Returns: + pd.DataFrame: DataFrame with columns [name, time, value_double, value_string] + """ + # Read Arrow data + reader = ipc.open_file(io.BytesIO(arrow_data)) + table = reader.read_all() + + # Convert directly to DataFrame + return table.to_pandas() \ No newline at end of file From 313cdb87f424599258634ea23d32db842a98aaa7 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Wed, 18 Jun 2025 13:32:37 +0200 Subject: [PATCH 02/23] Removed debug print --- src/cvec/cvec.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 07f15d6..e87c2cd 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -248,5 +248,4 @@ def add_metric_data( ) else: data_dicts = [point.model_dump(mode='json') for point in data_points] - print(data_dicts) self._make_request("POST", endpoint, json=data_dicts) From 94d7e0fc28a4d1bcc1c29bb70f40025e68c6aee7 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Wed, 18 Jun 2025 17:23:41 +0200 Subject: [PATCH 03/23] Removed pandas dependency --- local/local_test.py | 75 ++++--- poetry.lock | 329 +++++++++--------------------- pyproject.toml | 4 +- src/cvec/cvec.py | 30 +-- src/cvec/utils/__init__.py | 4 +- src/cvec/utils/arrow_converter.py | 21 +- 6 files changed, 142 insertions(+), 321 deletions(-) diff --git a/local/local_test.py b/local/local_test.py index 6e871b7..3741158 100644 --- a/local/local_test.py +++ b/local/local_test.py @@ -1,4 +1,6 @@ from datetime import datetime, timedelta, timezone +import io +import pyarrow.ipc as ipc from cvec import CVec from cvec.models.metric import MetricDataPoint import random @@ -11,50 +13,45 @@ def test_cvec(): api_key="your-api-key", # Replace with your API key ) - # Set default time range for queries - end_at = datetime.utcnow() - start_at = end_at - timedelta(hours=1) - test_metric_name = "python-sdk/test" - # # Example 1: Get available metrics - # print("\nGetting available metrics...") - # metrics = cvec.get_metrics() - # print(f"Found {len(metrics)} metrics") - # for metric in metrics: # Print first 5 metrics - # print(f"- {metric.name}") - - # # Example 2: Get metric data as DataFrame (using JSON) - # print("\nGetting metric data as DataFrame (JSON)...") - # df_json = cvec.get_metric_dataframe(names=[test_metric_name], use_arrow=False) - # print(f"DataFrame shape: {df_json.shape}") - # print("\nFirst few rows:") - # print(df_json.head()) + # Example 1: Get available metrics + print("\nGetting available metrics...") + metrics = cvec.get_metrics() + print(f"Found {len(metrics)} metrics") + for metric in metrics: # Print first 5 metrics + print(f"- {metric.name}") - # # Example 3: Get metric data as DataFrame (using Arrow) - # print("\nGetting metric data as DataFrame (Arrow)...") - # df_arrow = cvec.get_metric_dataframe(names=[test_metric_name], use_arrow=True) - # print(f"DataFrame shape: {df_arrow.shape}") - # print("\nFirst few rows:") - # print(df_arrow.head()) + # Example 2: Get metric data as Arrow + print("\nGetting metric data as Arrow...") + arrow_data = cvec.get_metric_arrow(names=[test_metric_name]) + + # Read the Arrow data + reader = ipc.open_file(io.BytesIO(arrow_data)) + table = reader.read_all() + + print(f"Arrow table shape: {len(table)} rows") + print("\nFirst few rows:") + for i in range(min(5, len(table))): + print(f"- {table['name'][i].as_py()}: {table['value_double'][i].as_py() or table['value_string'][i].as_py()} at {table['time'][i].as_py()}") - # # Example 4: Get metric data as objects - # print("\nGetting metric data as objects...") - # data_points = cvec.get_metric_data(names=[test_metric_name], use_arrow=True) - # print(f"Found {len(data_points)} data points") - # for point in data_points[:3]: # Print first 3 data points - # print(f"- {point.name}: {point.value_double or point.value_string} at {point.time}") + # Example 3: Get metric data as objects + print("\nGetting metric data as objects...") + data_points = cvec.get_metric_data(names=[test_metric_name]) + print(f"Found {len(data_points)} data points") + for point in data_points[:3]: # Print first 3 data points + print(f"- {point.name}: {point.value_double or point.value_string} at {point.time}") - # # Example 5: Get spans for a specific metric - # if metrics: - # metric_name = metrics[0].name - # print(f"\nGetting spans for metric '{metric_name}'...") - # spans = cvec.get_spans(metric_name, limit=5) - # print(f"Found {len(spans)} spans") - # for span in spans: - # print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") + # Example 4: Get spans for a specific metric + if metrics: + metric_name = metrics[0].name + print(f"\nGetting spans for metric '{metric_name}'...") + spans = cvec.get_spans(metric_name, limit=5) + print(f"Found {len(spans)} spans") + for span in spans: + print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") - # Example 6: Add new metric data + # Example 5: Add new metric data print("\nAdding new metric data...") new_data = [ MetricDataPoint( @@ -73,7 +70,7 @@ def test_cvec(): cvec.add_metric_data(new_data, use_arrow=False) print("Data added successfully") - # Example 7: Add new metric data + # Example 6: Add new metric data using Arrow print("\nAdding new metric data using Arrow...") new_data = [ MetricDataPoint( diff --git a/poetry.lock b/poetry.lock index 924da4d..58b87ce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -187,48 +187,49 @@ files = [ [[package]] name = "mypy" -version = "1.15.0" +version = "1.16.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, + {file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"}, + {file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"}, + {file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea"}, + {file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574"}, + {file = "mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d"}, + {file = "mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6"}, + {file = "mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc"}, + {file = "mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782"}, + {file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507"}, + {file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca"}, + {file = "mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4"}, + {file = "mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6"}, + {file = "mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d"}, + {file = "mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9"}, + {file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79"}, + {file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15"}, + {file = "mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd"}, + {file = "mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b"}, + {file = "mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438"}, + {file = "mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536"}, + {file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f"}, + {file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359"}, + {file = "mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be"}, + {file = "mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee"}, + {file = "mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069"}, + {file = "mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da"}, + {file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c"}, + {file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383"}, + {file = "mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40"}, + {file = "mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b"}, + {file = "mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37"}, + {file = "mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" @@ -257,7 +258,7 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -310,123 +311,32 @@ files = [ ] [[package]] -name = "pandas" -version = "2.3.0" -description = "Powerful data structures for data analysis, time series, and statistics" +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pandas-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:625466edd01d43b75b1883a64d859168e4556261a5035b32f9d743b67ef44634"}, - {file = "pandas-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6872d695c896f00df46b71648eea332279ef4077a409e2fe94220208b6bb675"}, - {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2"}, - {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e"}, - {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1"}, - {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6"}, - {file = "pandas-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:40cecc4ea5abd2921682b57532baea5588cc5f80f0231c624056b146887274d2"}, - {file = "pandas-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8adff9f138fc614347ff33812046787f7d43b3cef7c0f0171b3340cae333f6ca"}, - {file = "pandas-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e5f08eb9a445d07720776df6e641975665c9ea12c9d8a331e0f6890f2dcd76ef"}, - {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d"}, - {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46"}, - {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33"}, - {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c"}, - {file = "pandas-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:fa07e138b3f6c04addfeaf56cc7fdb96c3b68a3fe5e5401251f231fce40a0d7a"}, - {file = "pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf"}, - {file = "pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027"}, - {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09"}, - {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d"}, - {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20"}, - {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b"}, - {file = "pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be"}, - {file = "pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983"}, - {file = "pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd"}, - {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f"}, - {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3"}, - {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8"}, - {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9"}, - {file = "pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390"}, - {file = "pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575"}, - {file = "pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042"}, - {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c"}, - {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67"}, - {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f"}, - {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249"}, - {file = "pandas-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9efc0acbbffb5236fbdf0409c04edce96bec4bdaa649d49985427bd1ec73e085"}, - {file = "pandas-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75651c14fde635e680496148a8526b328e09fe0572d9ae9b638648c46a544ba3"}, - {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14"}, - {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324"}, - {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34"}, - {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb"}, - {file = "pandas-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b198687ca9c8529662213538a9bb1e60fa0bf0f6af89292eb68fea28743fcd5a"}, - {file = "pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandas-stubs" -version = "2.2.3.250308" -description = "Type annotations for pandas" -optional = false -python-versions = ">=3.10" +python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pandas_stubs-2.2.3.250308-py3-none-any.whl", hash = "sha256:a377edff3b61f8b268c82499fdbe7c00fdeed13235b8b71d6a1dc347aeddc74d"}, - {file = "pandas_stubs-2.2.3.250308.tar.gz", hash = "sha256:3a6e9daf161f00b85c83772ed3d5cff9522028f07a94817472c07b91f46710fd"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[package.dependencies] -numpy = ">=1.23.5" -types-pytz = ">=2022.1.1" - [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pyarrow" @@ -612,54 +522,43 @@ files = [ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] -name = "pytest" -version = "8.3.5" -description = "pytest: simple powerful testing with Python" +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" +name = "pytest" +version = "8.4.1" +description = "pytest: simple powerful testing with Python" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -six = ">=1.5" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} -[[package]] -name = "pytz" -version = "2025.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, -] +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "requests" @@ -685,42 +584,30 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.11.10" +version = "0.11.13" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"}, - {file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"}, - {file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"}, - {file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"}, - {file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"}, - {file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"}, - {file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"}, - {file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"}, - {file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"}, - {file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"}, - {file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"}, - {file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"}, -] - -[[package]] -name = "six" -version = "1.17.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main"] -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, + {file = "ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46"}, + {file = "ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48"}, + {file = "ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492"}, + {file = "ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250"}, + {file = "ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3"}, + {file = "ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b"}, + {file = "ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514"}, ] [[package]] @@ -766,28 +653,16 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -[[package]] -name = "types-pytz" -version = "2025.2.0.20250326" -description = "Typing stubs for pytz" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "types_pytz-2025.2.0.20250326-py3-none-any.whl", hash = "sha256:3c397fd1b845cd2b3adc9398607764ced9e578a98a5d1fbb4a9bc9253edfb162"}, - {file = "types_pytz-2025.2.0.20250326.tar.gz", hash = "sha256:deda02de24f527066fc8d6a19e284ab3f3ae716a42b4adb6b40e75e408c08d36"}, -] - [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] [[package]] @@ -805,28 +680,16 @@ files = [ [package.dependencies] typing-extensions = ">=4.12.0" -[[package]] -name = "tzdata" -version = "2025.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -groups = ["main"] -files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, -] - [[package]] name = "urllib3" -version = "2.4.0" +version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, - {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] @@ -838,4 +701,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.1" python-versions = ">=3.10" -content-hash = "ddf669df11b5f3c5c8824be51339b0aa79338aa7299f32ac7f1e64bb282b381f" +content-hash = "d92cb048b51985bc2bf1909d1c0965e65fd945b5655d9fc77ae832792df5a1cb" diff --git a/pyproject.toml b/pyproject.toml index 35d01e5..0bc3bcf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,8 +10,7 @@ requires-python = ">=3.10" dependencies = [ "requests (>=2.32.3,<3.0.0)", "pydantic>=2.7.0,<3.0.0", - "pyarrow>=15.0.0,<16.0.0", - "pandas>=2.2.0,<3.0.0" + "pyarrow>=15.0.0,<16.0.0" ] [tool.poetry] @@ -21,7 +20,6 @@ packages = [{include = "cvec", from = "src"}] [tool.poetry.group.dev.dependencies] pytest = "^8.3.5" mypy = "^1.15.0" -pandas-stubs = "^2.2.3.250308" ruff = "^0.11.10" [build-system] diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index e87c2cd..7f6b720 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -3,12 +3,11 @@ from typing import Any, List, Optional from urllib.parse import urljoin -import pandas as pd import requests from cvec.models.metric import Metric, MetricDataPoint from cvec.models.span import Span -from cvec.utils.arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow, arrow_to_dataframe +from cvec.utils.arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow class CVec: @@ -161,24 +160,21 @@ def get_metric_data( return arrow_to_metric_data_points(response_data) return [MetricDataPoint.model_validate(point_data) for point_data in response_data] - def get_metric_dataframe( + def get_metric_arrow( self, names: Optional[List[str]] = None, start_at: Optional[datetime] = None, end_at: Optional[datetime] = None, - use_arrow: bool = False, - ) -> pd.DataFrame: + ) -> bytes: """ Return all data-points within a given [start_at, end_at) interval, optionally selecting a given list of metric names. - The return value is a Pandas DataFrame with four columns: name, time, value_double, value_string. - One row is returned for each metric value transition. + Returns Arrow IPC format data that can be read using pyarrow.ipc.open_file. Args: names: Optional list of metric names to filter by start_at: Optional start time for the query end_at: Optional end time for the query - use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) """ _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at @@ -189,22 +185,8 @@ def get_metric_dataframe( "names": ",".join(names) if names else None, } - endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" - response_data = self._make_request("GET", endpoint, params=params) - - if not response_data: - return pd.DataFrame( - columns=["name", "time", "value_double", "value_string"] - ) - - if use_arrow: - return arrow_to_dataframe(response_data) - - # Create DataFrame from response data - df = pd.DataFrame(response_data) - - # Return the DataFrame with the required columns - return df[["name", "time", "value_double", "value_string"]] + endpoint = "/api/metrics/data/arrow" + return self._make_request("GET", endpoint, params=params) def get_metrics( self, start_at: Optional[datetime] = None, end_at: Optional[datetime] = None diff --git a/src/cvec/utils/__init__.py b/src/cvec/utils/__init__.py index 65e2c34..f929365 100644 --- a/src/cvec/utils/__init__.py +++ b/src/cvec/utils/__init__.py @@ -1,3 +1,3 @@ -from .arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow, arrow_to_dataframe +from .arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow -__all__ = ["arrow_to_metric_data_points", "metric_data_points_to_arrow", "arrow_to_dataframe"] \ No newline at end of file +__all__ = ["arrow_to_metric_data_points", "metric_data_points_to_arrow"] \ No newline at end of file diff --git a/src/cvec/utils/arrow_converter.py b/src/cvec/utils/arrow_converter.py index cd486ea..5f4b784 100644 --- a/src/cvec/utils/arrow_converter.py +++ b/src/cvec/utils/arrow_converter.py @@ -3,7 +3,6 @@ import pyarrow as pa import pyarrow.ipc as ipc -import pandas as pd from cvec.models.metric import MetricDataPoint @@ -68,22 +67,4 @@ def arrow_to_metric_data_points(arrow_data: bytes) -> List[MetricDataPoint]: value_double=table['value_double'][i].as_py(), value_string=table['value_string'][i].as_py() )) - return data_points - - -def arrow_to_dataframe(arrow_data: bytes) -> pd.DataFrame: - """ - Convert Arrow format directly to a pandas DataFrame. - - Args: - arrow_data: Arrow IPC format data - - Returns: - pd.DataFrame: DataFrame with columns [name, time, value_double, value_string] - """ - # Read Arrow data - reader = ipc.open_file(io.BytesIO(arrow_data)) - table = reader.read_all() - - # Convert directly to DataFrame - return table.to_pandas() \ No newline at end of file + return data_points \ No newline at end of file From 4c7f47dc8737e4f790a54896f1c7b277e16e3979 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Wed, 18 Jun 2025 17:26:30 +0200 Subject: [PATCH 04/23] Updated pyarrow to 19.0.1 --- poetry.lock | 134 +++++++++++++++++-------------------------------- pyproject.toml | 2 +- 2 files changed, 48 insertions(+), 88 deletions(-) diff --git a/poetry.lock b/poetry.lock index 58b87ce..4414d06 100644 --- a/poetry.lock +++ b/poetry.lock @@ -252,52 +252,6 @@ files = [ {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - [[package]] name = "packaging" version = "25.0" @@ -340,52 +294,58 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "pyarrow" -version = "15.0.2" +version = "19.0.1" description = "Python library for Apache Arrow" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, - {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, - {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, - {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, - {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, - {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, - {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, - {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, - {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, - {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, - {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, - {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, - {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, - {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, - {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, - {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, + {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:fc28912a2dc924dddc2087679cc8b7263accc71b9ff025a1362b004711661a69"}, + {file = "pyarrow-19.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fca15aabbe9b8355800d923cc2e82c8ef514af321e18b437c3d782aa884eaeec"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad76aef7f5f7e4a757fddcdcf010a8290958f09e3470ea458c80d26f4316ae89"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03c9d6f2a3dffbd62671ca070f13fc527bb1867b4ec2b98c7eeed381d4f389a"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:65cf9feebab489b19cdfcfe4aa82f62147218558d8d3f0fc1e9dea0ab8e7905a"}, + {file = "pyarrow-19.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:41f9706fbe505e0abc10e84bf3a906a1338905cbbcf1177b71486b03e6ea6608"}, + {file = "pyarrow-19.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6cb2335a411b713fdf1e82a752162f72d4a7b5dbc588e32aa18383318b05866"}, + {file = "pyarrow-19.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc55d71898ea30dc95900297d191377caba257612f384207fe9f8293b5850f90"}, + {file = "pyarrow-19.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:7a544ec12de66769612b2d6988c36adc96fb9767ecc8ee0a4d270b10b1c51e00"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0148bb4fc158bfbc3d6dfe5001d93ebeed253793fff4435167f6ce1dc4bddeae"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f24faab6ed18f216a37870d8c5623f9c044566d75ec586ef884e13a02a9d62c5"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:4982f8e2b7afd6dae8608d70ba5bd91699077323f812a0448d8b7abdff6cb5d3"}, + {file = "pyarrow-19.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:49a3aecb62c1be1d822f8bf629226d4a96418228a42f5b40835c1f10d42e4db6"}, + {file = "pyarrow-19.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:008a4009efdb4ea3d2e18f05cd31f9d43c388aad29c636112c2966605ba33466"}, + {file = "pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b"}, + {file = "pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6"}, + {file = "pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832"}, + {file = "pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960"}, + {file = "pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c"}, + {file = "pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6"}, + {file = "pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136"}, + {file = "pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef"}, + {file = "pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0"}, + {file = "pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a"}, + {file = "pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8"}, + {file = "pyarrow-19.0.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b9766a47a9cb56fefe95cb27f535038b5a195707a08bf61b180e642324963b46"}, + {file = "pyarrow-19.0.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:6c5941c1aac89a6c2f2b16cd64fe76bcdb94b2b1e99ca6459de4e6f07638d755"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd44d66093a239358d07c42a91eebf5015aa54fccba959db899f932218ac9cc8"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:335d170e050bcc7da867a1ed8ffb8b44c57aaa6e0843b156a501298657b1e972"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:1c7556165bd38cf0cd992df2636f8bcdd2d4b26916c6b7e646101aff3c16f76f"}, + {file = "pyarrow-19.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:699799f9c80bebcf1da0983ba86d7f289c5a2a5c04b945e2f2bcf7e874a91911"}, + {file = "pyarrow-19.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8464c9fbe6d94a7fe1599e7e8965f350fd233532868232ab2596a71586c5a429"}, + {file = "pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e"}, ] -[package.dependencies] -numpy = ">=1.16.6,<2" +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] [[package]] name = "pydantic" @@ -701,4 +661,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.1" python-versions = ">=3.10" -content-hash = "d92cb048b51985bc2bf1909d1c0965e65fd945b5655d9fc77ae832792df5a1cb" +content-hash = "b286a33795fe9f764f5da9cc1b9569bd5100251997017f8e85ebc67261707f59" diff --git a/pyproject.toml b/pyproject.toml index 0bc3bcf..0f2a119 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ requires-python = ">=3.10" dependencies = [ "requests (>=2.32.3,<3.0.0)", "pydantic>=2.7.0,<3.0.0", - "pyarrow>=15.0.0,<16.0.0" + "pyarrow>=18.0.0,<20.0.0" ] [tool.poetry] From 87a6cec357c78172e9979457db5481bb66deef41 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Mon, 23 Jun 2025 10:32:11 +0200 Subject: [PATCH 05/23] Split examples into multiple scripts --- examples/add_metric_data_arrow_example.py | 32 +++++++ examples/add_metric_data_example.py | 32 +++++++ examples/get_metric_arrow_example.py | 22 +++++ examples/get_metric_data_objects_example.py | 17 ++++ examples/get_metrics_example.py | 16 ++++ examples/get_spans_example.py | 21 +++++ local/local_test.py | 95 --------------------- 7 files changed, 140 insertions(+), 95 deletions(-) create mode 100644 examples/add_metric_data_arrow_example.py create mode 100644 examples/add_metric_data_example.py create mode 100644 examples/get_metric_arrow_example.py create mode 100644 examples/get_metric_data_objects_example.py create mode 100644 examples/get_metrics_example.py create mode 100644 examples/get_spans_example.py delete mode 100644 local/local_test.py diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py new file mode 100644 index 0000000..caa12b8 --- /dev/null +++ b/examples/add_metric_data_arrow_example.py @@ -0,0 +1,32 @@ +from cvec import CVec +from cvec.models.metric import MetricDataPoint +from datetime import datetime, timezone +import random + +def main(): + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", + tenant="test", + api_key="your-api-key", + ) + test_metric_name = "python-sdk/test" + print("\nAdding new metric data using Arrow...") + new_data = [ + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + ] + cvec.add_metric_data(new_data, use_arrow=True) + print("Data added successfully") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py new file mode 100644 index 0000000..cd79f0f --- /dev/null +++ b/examples/add_metric_data_example.py @@ -0,0 +1,32 @@ +from cvec import CVec +from cvec.models.metric import MetricDataPoint +from datetime import datetime, timezone +import random + +def main(): + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", + tenant="test", + api_key="your-api-key", + ) + test_metric_name = "python-sdk/test" + print("\nAdding new metric data...") + new_data = [ + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random.random() * 100.0, + value_string=None, + ), + ] + cvec.add_metric_data(new_data, use_arrow=False) + print("Data added successfully") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py new file mode 100644 index 0000000..ca83f65 --- /dev/null +++ b/examples/get_metric_arrow_example.py @@ -0,0 +1,22 @@ +from cvec import CVec +import io +import pyarrow.ipc as ipc + +def main(): + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", + tenant="test", + api_key="your-api-key", + ) + test_metric_name = "python-sdk/test" + print("\nGetting metric data as Arrow...") + arrow_data = cvec.get_metric_arrow(names=[test_metric_name]) + reader = ipc.open_file(io.BytesIO(arrow_data)) + table = reader.read_all() + print(f"Arrow table shape: {len(table)} rows") + print("\nFirst few rows:") + for i in range(min(5, len(table))): + print(f"- {table['name'][i].as_py()}: {table['value_double'][i].as_py() or table['value_string'][i].as_py()} at {table['time'][i].as_py()}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py new file mode 100644 index 0000000..3f93752 --- /dev/null +++ b/examples/get_metric_data_objects_example.py @@ -0,0 +1,17 @@ +from cvec import CVec + +def main(): + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", + tenant="test", + api_key="your-api-key", + ) + test_metric_name = "python-sdk/test" + print("\nGetting metric data as objects...") + data_points = cvec.get_metric_data(names=[test_metric_name]) + print(f"Found {len(data_points)} data points") + for point in data_points[:3]: + print(f"- {point.name}: {point.value_double or point.value_string} at {point.time}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py new file mode 100644 index 0000000..151515e --- /dev/null +++ b/examples/get_metrics_example.py @@ -0,0 +1,16 @@ +from cvec import CVec + +def main(): + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", # Replace with your API host + tenant="test", # Replace with your tenant + api_key="your-api-key", # Replace with your API key + ) + print("\nGetting available metrics...") + metrics = cvec.get_metrics() + print(f"Found {len(metrics)} metrics") + for metric in metrics: + print(f"- {metric.name}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py new file mode 100644 index 0000000..fad32cc --- /dev/null +++ b/examples/get_spans_example.py @@ -0,0 +1,21 @@ +from cvec import CVec + +def main(): + cvec = CVec( + host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", + tenant="test", + api_key="your-api-key", + ) + metrics = cvec.get_metrics() + if metrics: + metric_name = "python-sdk/test" + print(f"\nGetting spans for metric '{metric_name}'...") + spans = cvec.get_spans(metric_name, limit=5) + print(f"Found {len(spans)} spans") + for span in spans: + print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") + else: + print("No metrics found to get spans.") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/local/local_test.py b/local/local_test.py deleted file mode 100644 index 3741158..0000000 --- a/local/local_test.py +++ /dev/null @@ -1,95 +0,0 @@ -from datetime import datetime, timedelta, timezone -import io -import pyarrow.ipc as ipc -from cvec import CVec -from cvec.models.metric import MetricDataPoint -import random - -def test_cvec(): - # Initialize CVec client - cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", # Replace with your API host - tenant="test", # Replace with your tenant - api_key="your-api-key", # Replace with your API key - ) - - test_metric_name = "python-sdk/test" - - # Example 1: Get available metrics - print("\nGetting available metrics...") - metrics = cvec.get_metrics() - print(f"Found {len(metrics)} metrics") - for metric in metrics: # Print first 5 metrics - print(f"- {metric.name}") - - # Example 2: Get metric data as Arrow - print("\nGetting metric data as Arrow...") - arrow_data = cvec.get_metric_arrow(names=[test_metric_name]) - - # Read the Arrow data - reader = ipc.open_file(io.BytesIO(arrow_data)) - table = reader.read_all() - - print(f"Arrow table shape: {len(table)} rows") - print("\nFirst few rows:") - for i in range(min(5, len(table))): - print(f"- {table['name'][i].as_py()}: {table['value_double'][i].as_py() or table['value_string'][i].as_py()} at {table['time'][i].as_py()}") - - # Example 3: Get metric data as objects - print("\nGetting metric data as objects...") - data_points = cvec.get_metric_data(names=[test_metric_name]) - print(f"Found {len(data_points)} data points") - for point in data_points[:3]: # Print first 3 data points - print(f"- {point.name}: {point.value_double or point.value_string} at {point.time}") - - # Example 4: Get spans for a specific metric - if metrics: - metric_name = metrics[0].name - print(f"\nGetting spans for metric '{metric_name}'...") - spans = cvec.get_spans(metric_name, limit=5) - print(f"Found {len(spans)} spans") - for span in spans: - print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") - - # Example 5: Add new metric data - print("\nAdding new metric data...") - new_data = [ - MetricDataPoint( - name=test_metric_name, - time=datetime.now(timezone.utc), - value_double=random.random() * 100.0, - value_string=None, - ), - MetricDataPoint( - name=test_metric_name, - time=datetime.now(timezone.utc), - value_double=random.random() * 100.0, - value_string=None, - ), - ] - cvec.add_metric_data(new_data, use_arrow=False) - print("Data added successfully") - - # Example 6: Add new metric data using Arrow - print("\nAdding new metric data using Arrow...") - new_data = [ - MetricDataPoint( - name=test_metric_name, - time=datetime.now(timezone.utc), - value_double=random.random() * 100.0, - value_string=None, - ), - MetricDataPoint( - name=test_metric_name, - time=datetime.now(timezone.utc), - value_double=random.random() * 100.0, - value_string=None, - ), - ] - cvec.add_metric_data(new_data, use_arrow=True) - print("Data added successfully") - - - -if __name__ == "__main__": - test_cvec() \ No newline at end of file From 95ddad955b351db0a27e042cd738242abc22c0d5 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Mon, 23 Jun 2025 15:46:07 +0200 Subject: [PATCH 06/23] Fixed unit tests --- tests/test_cvec.py | 408 +++++++++++---------------------------------- 1 file changed, 95 insertions(+), 313 deletions(-) diff --git a/tests/test_cvec.py b/tests/test_cvec.py index eb31985..ababc8e 100644 --- a/tests/test_cvec.py +++ b/tests/test_cvec.py @@ -2,10 +2,11 @@ import os from unittest.mock import patch, MagicMock from datetime import datetime -import pandas as pd -import numpy as np -from pandas.testing import assert_frame_equal -from cvec import CVec, Metric +from cvec import CVec +from cvec.models.metric import Metric +import pyarrow as pa +import pyarrow.ipc as ipc +import io class TestCVecConstructor: @@ -115,350 +116,131 @@ def test_constructor_args_override_env_vars(self) -> None: class TestCVecGetSpans: - @patch("cvec.cvec.psycopg.connect") - def test_get_spans_basic_case(self, mock_connect: MagicMock) -> None: - """Test get_spans with a few data points.""" - # Setup mock connection and cursor - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - # Sample data (time, value_double, value_string) - newest first - time1 = datetime(2023, 1, 1, 10, 0, 0) - time2 = datetime(2023, 1, 1, 11, 0, 0) - time3 = datetime(2023, 1, 1, 12, 0, 0) - mock_db_rows = [ - (time3, 30.0, None), # Newest - (time2, None, "val2"), - (time1, 10.0, None), # Oldest + def test_get_spans_basic_case(self): + # Simulate backend response + response_data = [ + {"name": "test_tag", "value": 30.0, "raw_start_at": datetime(2023, 1, 1, 12, 0, 0), "raw_end_at": None}, + {"name": "test_tag", "value": "val2", "raw_start_at": datetime(2023, 1, 1, 11, 0, 0), "raw_end_at": datetime(2023, 1, 1, 12, 0, 0)}, + {"name": "test_tag", "value": 10.0, "raw_start_at": datetime(2023, 1, 1, 10, 0, 0), "raw_end_at": datetime(2023, 1, 1, 11, 0, 0)}, ] - mock_cur.fetchall.return_value = mock_db_rows - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - tag_name = "test_tag" - spans = client.get_spans(name=tag_name) - + client._make_request = lambda *args, **kwargs: response_data + spans = client.get_spans(name="test_tag") assert len(spans) == 3 - mock_cur.execute.assert_called_once() - - # Verify psycopg query parameters - (_sql, params), _kwargs = mock_cur.execute.call_args - assert params["metric"] == tag_name - assert params["end_at"] is None # Default end_at - assert params["limit"] is None # Default limit - - # Span 1 (from newest data point: time3) - # The raw_end_at is None for the newest span, because the span is still open. - assert spans[0].name == tag_name + assert spans[0].name == "test_tag" assert spans[0].value == 30.0 - assert spans[0].raw_start_at == time3 + assert spans[0].raw_start_at == datetime(2023, 1, 1, 12, 0, 0) assert spans[0].raw_end_at is None - - # Span 2 (from data point: time2) - assert spans[1].name == tag_name assert spans[1].value == "val2" - assert spans[1].raw_start_at == time2 - assert spans[1].raw_end_at == time3 - - # Span 3 (from oldest data point: time1) - assert spans[2].name == tag_name assert spans[2].value == 10.0 - assert spans[2].raw_start_at == time1 - assert spans[2].raw_end_at == time2 class TestCVecGetMetrics: - @patch("cvec.cvec.psycopg.connect") - def test_get_metrics_no_interval(self, mock_connect: MagicMock) -> None: - """Test get_metrics when no start_at or end_at is provided (fetches all metrics).""" - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - time_birth1 = datetime(2023, 1, 1, 0, 0, 0) - time_death1 = datetime(2023, 1, 10, 0, 0, 0) - time_birth2 = datetime(2023, 2, 1, 0, 0, 0) - mock_db_rows = [ - (1, "metric1", time_birth1, time_death1), - (2, "metric2", time_birth2, None), + def test_get_metrics_no_interval(self): + response_data = [ + {"id": 1, "name": "metric1", "birth_at": datetime(2023, 1, 1, 0, 0, 0), "death_at": datetime(2023, 1, 10, 0, 0, 0)}, + {"id": 2, "name": "metric2", "birth_at": datetime(2023, 2, 1, 0, 0, 0), "death_at": None}, ] - mock_cur.fetchall.return_value = mock_db_rows - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client._make_request = lambda *args, **kwargs: response_data metrics = client.get_metrics() - - mock_cur.execute.assert_called_once() - sql_query, params = mock_cur.execute.call_args.args - assert "SELECT id, normalized_name AS name, birth_at, death_at" in sql_query - assert "FROM tag_names" in sql_query - assert "ORDER BY name ASC" in sql_query - assert params is None # No params when fetching all - assert len(metrics) == 2 assert isinstance(metrics[0], Metric) assert metrics[0].id == 1 assert metrics[0].name == "metric1" - assert metrics[0].birth_at == time_birth1 - assert metrics[0].death_at == time_death1 - - assert isinstance(metrics[1], Metric) assert metrics[1].id == 2 assert metrics[1].name == "metric2" - assert metrics[1].birth_at == time_birth2 - assert metrics[1].death_at is None - @patch("cvec.cvec.psycopg.connect") - def test_get_metrics_with_interval(self, mock_connect: MagicMock) -> None: - """Test get_metrics when a start_at and end_at interval is provided.""" - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - time_birth1 = datetime(2023, 1, 1, 0, 0, 0) - mock_db_rows = [ - (1, "metric_in_interval", time_birth1, None), + def test_get_metrics_with_interval(self): + response_data = [ + {"id": 1, "name": "metric_in_interval", "birth_at": datetime(2023, 1, 1, 0, 0, 0), "death_at": None}, ] - mock_cur.fetchall.return_value = mock_db_rows - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - start_query = datetime(2023, 1, 5, 0, 0, 0) - end_query = datetime(2023, 1, 15, 0, 0, 0) - metrics = client.get_metrics(start_at=start_query, end_at=end_query) - - mock_cur.execute.assert_called_once() - sql_query, params = mock_cur.execute.call_args.args - assert ( - "SELECT DISTINCT metric_id AS id, metric AS name, birth_at, death_at" - in sql_query - ) - assert f"FROM {client.tenant}.metric_data" in sql_query - assert ( - "WHERE (time >= %(start_at_param)s OR %(start_at_param)s IS NULL)" - in sql_query - ) - assert "AND (time < %(end_at_param)s OR %(end_at_param)s IS NULL)" in sql_query - assert params is not None - assert params["start_at_param"] == start_query - assert params["end_at_param"] == end_query - + client._make_request = lambda *args, **kwargs: response_data + metrics = client.get_metrics(start_at=datetime(2023, 1, 5, 0, 0, 0), end_at=datetime(2023, 1, 15, 0, 0, 0)) assert len(metrics) == 1 - assert isinstance(metrics[0], Metric) - assert metrics[0].id == 1 assert metrics[0].name == "metric_in_interval" - assert metrics[0].birth_at == time_birth1 - assert metrics[0].death_at is None - - @patch("cvec.cvec.psycopg.connect") - def test_get_metrics_no_data_found(self, mock_connect: MagicMock) -> None: - """Test get_metrics when no metrics are found for the given criteria.""" - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - mock_cur.fetchall.return_value = [] # No rows returned + def test_get_metrics_no_data_found(self): client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - metrics = client.get_metrics( - start_at=datetime(2024, 1, 1), end_at=datetime(2024, 1, 2) - ) - - mock_cur.execute.assert_called_once() + client._make_request = lambda *args, **kwargs: [] + metrics = client.get_metrics(start_at=datetime(2024, 1, 1), end_at=datetime(2024, 1, 2)) assert len(metrics) == 0 class TestCVecGetMetricData: - @patch("cvec.cvec.psycopg.connect") - def test_get_metric_data_basic_case(self, mock_connect: MagicMock) -> None: - """Test get_metric_data with a few data points for multiple tags.""" - # Setup mock connection and cursor - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - # Sample data (metric, time, value_double, value_string) + def test_get_metric_data_basic_case(self): + # Simulate backend response time1 = datetime(2023, 1, 1, 10, 0, 0) time2 = datetime(2023, 1, 1, 11, 0, 0) time3 = datetime(2023, 1, 1, 12, 0, 0) - mock_db_rows = [ - ("tag1", time1, 10.0, None), - ("tag1", time2, 20.0, None), - ("tag2", time3, None, "val_str"), + response_data = [ + {"name": "tag1", "time": time1, "value_double": 10.0, "value_string": None}, + {"name": "tag1", "time": time2, "value_double": 20.0, "value_string": None}, + {"name": "tag2", "time": time3, "value_double": None, "value_string": "val_str"}, ] - mock_cur.fetchall.return_value = mock_db_rows - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - names_to_query = ["tag1", "tag2"] - df = client.get_metric_data(names=names_to_query) - - mock_cur.execute.assert_called_once() - (_sql, params), _kwargs = mock_cur.execute.call_args - assert params["tag_names_is_null"] is False - assert params["tag_names_list"] == names_to_query - assert params["start_at"] is None # Default start_at - assert params["end_at"] is None # Default end_at - - expected_data = { - "name": ["tag1", "tag1", "tag2"], - "time": [time1, time2, time3], - "value_double": [10.0, 20.0, np.nan], # Use np.nan for missing float - "value_string": [None, None, "val_str"], # Use None for missing string - } - expected_df = pd.DataFrame(expected_data) - - # Ensure correct dtypes for comparison, especially for NA handling - expected_df = expected_df.astype( - {"value_double": "float64", "value_string": "object"} - ) - df = df.astype({"value_double": "float64", "value_string": "object"}) - - assert_frame_equal(df, expected_df, check_dtype=True) - - @patch("cvec.cvec.psycopg.connect") - def test_get_metric_data_no_data_points(self, mock_connect: MagicMock) -> None: - """Test get_metric_data when no data points are returned.""" - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - mock_cur.fetchall.return_value = [] - + client._make_request = lambda *args, **kwargs: response_data + data_points = client.get_metric_data(names=["tag1", "tag2"]) + assert len(data_points) == 3 + assert data_points[0].name == "tag1" + assert data_points[0].time == time1 + assert data_points[0].value_double == 10.0 + assert data_points[0].value_string is None + assert data_points[2].name == "tag2" + assert data_points[2].time == time3 + assert data_points[2].value_double is None + assert data_points[2].value_string == "val_str" + + def test_get_metric_data_no_data_points(self): client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - df = client.get_metric_data(names=["non_existent_tag"]) - - mock_cur.execute.assert_called_once() - expected_df = pd.DataFrame( - columns=["name", "time", "value_double", "value_string"] - ) - # Ensure correct dtypes for empty DataFrame comparison - expected_df = expected_df.astype( - { - "name": "object", - "time": "datetime64[ns]", - "value_double": "float64", - "value_string": "object", - } - ) - df = df.astype( - { - "name": "object", - "time": "datetime64[ns]", - "value_double": "float64", - "value_string": "object", - } - ) - assert_frame_equal(df, expected_df, check_dtype=True) - - @patch("cvec.cvec.psycopg.connect") - def test_get_spans_no_data_points(self, mock_connect: MagicMock) -> None: - """Test get_spans when no data points are returned from the database.""" - # Setup mock connection and cursor - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - mock_cur.fetchall.return_value = [] # No data points - + client._make_request = lambda *args, **kwargs: [] + data_points = client.get_metric_data(names=["non_existent_tag"]) + assert data_points == [] + + def test_get_metric_arrow_basic_case(self): + # Prepare Arrow table + names = ["tag1", "tag1", "tag2"] + times = [datetime(2023, 1, 1, 10, 0, 0), datetime(2023, 1, 1, 11, 0, 0), datetime(2023, 1, 1, 12, 0, 0)] + value_doubles = [10.0, 20.0, None] + value_strings = [None, None, "val_str"] + table = pa.table({ + "name": pa.array(names), + "time": pa.array(times, type=pa.timestamp('us', tz=None)), + "value_double": pa.array(value_doubles, type=pa.float64()), + "value_string": pa.array(value_strings, type=pa.string()), + }) + sink = pa.BufferOutputStream() + with ipc.new_file(sink, table.schema) as writer: + writer.write_table(table) + arrow_bytes = sink.getvalue().to_pybytes() client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - tag_name = "test_tag_no_data" - spans = client.get_spans(name=tag_name) - - assert len(spans) == 0 - mock_cur.execute.assert_called_once() - - # Verify psycopg query parameters - (_sql, params) = mock_cur.execute.call_args.args - assert params["metric"] == tag_name - assert params["end_at"] is None # Default end_at - assert params["limit"] is None # Default limit - - @patch("cvec.cvec.psycopg.connect") - def test_get_spans_with_limit_parameter(self, mock_connect: MagicMock) -> None: - """Test get_spans when a limit parameter is provided.""" - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - # Sample data (time, value_double, value_string) - newest first - time1 = datetime(2023, 1, 1, 10, 0, 0) - time2 = datetime(2023, 1, 1, 11, 0, 0) - mock_db_rows = [ - (time2, None, "val2"), # Newest - (time1, 10.0, None), # Oldest - ] - mock_cur.fetchall.return_value = mock_db_rows - + client._make_request = lambda *args, **kwargs: arrow_bytes + result = client.get_metric_arrow(names=["tag1", "tag2"]) + reader = ipc.open_file(io.BytesIO(result)) + result_table = reader.read_all() + assert result_table.num_rows == 3 + assert result_table.column("name").to_pylist() == names + assert result_table.column("value_double").to_pylist() == [10.0, 20.0, None] + assert result_table.column("value_string").to_pylist() == [None, None, "val_str"] + + def test_get_metric_arrow_empty(self): + table = pa.table({ + "name": pa.array([], type=pa.string()), + "time": pa.array([], type=pa.timestamp('us', tz=None)), + "value_double": pa.array([], type=pa.float64()), + "value_string": pa.array([], type=pa.string()), + }) + sink = pa.BufferOutputStream() + with ipc.new_file(sink, table.schema) as writer: + writer.write_table(table) + arrow_bytes = sink.getvalue().to_pybytes() client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - tag_name = "test_tag_limited" - query_limit = 2 - spans = client.get_spans(name=tag_name, limit=query_limit) - - mock_cur.execute.assert_called_once() - - # Verify psycopg query parameters - (_sql, params), _kwargs = mock_cur.execute.call_args - assert params["metric"] == tag_name - assert params["limit"] == query_limit - - assert len(spans) == 2 - - @patch("cvec.cvec.psycopg.connect") - def test_get_spans_with_end_at_parameter(self, mock_connect: MagicMock) -> None: - """Test get_spans when an end_at parameter is provided.""" - # Setup mock connection and cursor - mock_conn = MagicMock() - mock_cur = MagicMock() - mock_connect.return_value.__enter__.return_value = mock_conn - mock_conn.cursor.return_value.__enter__.return_value = mock_cur - - # Sample data (time, value_double, value_string) - newest first - time1 = datetime(2023, 1, 1, 10, 0, 0) - time2 = datetime(2023, 1, 1, 11, 0, 0) - time3 = datetime(2023, 1, 1, 12, 0, 0) - mock_db_rows = [ - (time3, 30.0, None), # Newest - (time2, None, "val2"), - (time1, 10.0, None), # Oldest - ] - mock_cur.fetchall.return_value = mock_db_rows - - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - tag_name = "test_tag" - # Provide an end_at time that is after all sample data points - query_end_at = datetime(2023, 1, 1, 13, 0, 0) - spans = client.get_spans(name=tag_name, end_at=query_end_at) - - assert len(spans) == 3 - mock_cur.execute.assert_called_once() - - # Verify psycopg query parameters - (_sql, params), _kwargs = mock_cur.execute.call_args - assert params["metric"] == tag_name - assert params["end_at"] == query_end_at - assert params["limit"] is None # Default limit - - # Span 1 (from newest data point: time3) - # The raw_end_at is None for the newest span, regardless of the _end_at query parameter. - assert spans[0].name == tag_name - assert spans[0].value == 30.0 - assert spans[0].raw_start_at == time3 - assert spans[0].raw_end_at is None - - # Span 2 (from data point: time2) - assert spans[1].name == tag_name - assert spans[1].value == "val2" - assert spans[1].raw_start_at == time2 - assert spans[1].raw_end_at == time3 - - # Span 3 (from oldest data point: time1) - assert spans[2].name == tag_name - assert spans[2].value == 10.0 - assert spans[2].raw_start_at == time1 - assert spans[2].raw_end_at == time2 + client._make_request = lambda *args, **kwargs: arrow_bytes + result = client.get_metric_arrow(names=["non_existent_tag"]) + reader = ipc.open_file(io.BytesIO(result)) + result_table = reader.read_all() + assert result_table.num_rows == 0 + assert result_table.column("name").to_pylist() == [] + assert result_table.column("value_double").to_pylist() == [] + assert result_table.column("value_string").to_pylist() == [] From 9f2db8b0972f72d50ecc03c37d2508deae98b648 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Mon, 23 Jun 2025 15:57:27 +0200 Subject: [PATCH 07/23] Fixed lint issues --- tests/test_cvec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cvec.py b/tests/test_cvec.py index ababc8e..14569d2 100644 --- a/tests/test_cvec.py +++ b/tests/test_cvec.py @@ -1,6 +1,6 @@ import pytest import os -from unittest.mock import patch, MagicMock +from unittest.mock import patch from datetime import datetime from cvec import CVec from cvec.models.metric import Metric From 53d2f3c9444be9126a1deea769b254f10ced90e3 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Mon, 23 Jun 2025 15:59:28 +0200 Subject: [PATCH 08/23] Ruff format --- examples/add_metric_data_arrow_example.py | 4 +- examples/add_metric_data_example.py | 4 +- examples/get_metric_arrow_example.py | 8 +- examples/get_metric_data_objects_example.py | 8 +- examples/get_metrics_example.py | 4 +- examples/get_spans_example.py | 8 +- src/cvec/cvec.py | 51 +++++++---- src/cvec/models/__init__.py | 2 +- src/cvec/models/metric.py | 12 +-- src/cvec/utils/__init__.py | 2 +- src/cvec/utils/arrow_converter.py | 40 +++++---- tests/test_cvec.py | 98 ++++++++++++++++----- 12 files changed, 161 insertions(+), 80 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index caa12b8..0882a8d 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -3,6 +3,7 @@ from datetime import datetime, timezone import random + def main(): cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", @@ -28,5 +29,6 @@ def main(): cvec.add_metric_data(new_data, use_arrow=True) print("Data added successfully") + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index cd79f0f..bc7bfd1 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -3,6 +3,7 @@ from datetime import datetime, timezone import random + def main(): cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", @@ -28,5 +29,6 @@ def main(): cvec.add_metric_data(new_data, use_arrow=False) print("Data added successfully") + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index ca83f65..418b770 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -2,6 +2,7 @@ import io import pyarrow.ipc as ipc + def main(): cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", @@ -16,7 +17,10 @@ def main(): print(f"Arrow table shape: {len(table)} rows") print("\nFirst few rows:") for i in range(min(5, len(table))): - print(f"- {table['name'][i].as_py()}: {table['value_double'][i].as_py() or table['value_string'][i].as_py()} at {table['time'][i].as_py()}") + print( + f"- {table['name'][i].as_py()}: {table['value_double'][i].as_py() or table['value_string'][i].as_py()} at {table['time'][i].as_py()}" + ) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index 3f93752..2f6d8ba 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -1,5 +1,6 @@ from cvec import CVec + def main(): cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", @@ -11,7 +12,10 @@ def main(): data_points = cvec.get_metric_data(names=[test_metric_name]) print(f"Found {len(data_points)} data points") for point in data_points[:3]: - print(f"- {point.name}: {point.value_double or point.value_string} at {point.time}") + print( + f"- {point.name}: {point.value_double or point.value_string} at {point.time}" + ) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index 151515e..70044da 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -1,5 +1,6 @@ from cvec import CVec + def main(): cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", # Replace with your API host @@ -12,5 +13,6 @@ def main(): for metric in metrics: print(f"- {metric.name}") + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index fad32cc..e322a37 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -1,5 +1,6 @@ from cvec import CVec + def main(): cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", @@ -13,9 +14,12 @@ def main(): spans = cvec.get_spans(metric_name, limit=5) print(f"Found {len(spans)} spans") for span in spans: - print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") + print( + f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}" + ) else: print("No metrics found to get spans.") + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 7f6b720..540114a 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -7,7 +7,10 @@ from cvec.models.metric import Metric, MetricDataPoint from cvec.models.span import Span -from cvec.utils.arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow +from cvec.utils.arrow_converter import ( + arrow_to_metric_data_points, + metric_data_points_to_arrow, +) class CVec: @@ -63,15 +66,20 @@ def _get_headers(self) -> dict[str, str]: } def _make_request( - self, method: str, endpoint: str, params: Optional[dict] = None, json: Optional[dict] = None, - data: Optional[bytes] = None, headers: Optional[dict] = None + self, + method: str, + endpoint: str, + params: Optional[dict] = None, + json: Optional[dict] = None, + data: Optional[bytes] = None, + headers: Optional[dict] = None, ) -> Any: """Helper method to make HTTP requests.""" url = urljoin(self.host, endpoint) request_headers = self._get_headers() if headers: request_headers.update(headers) - + response = requests.request( method=method, url=url, @@ -81,8 +89,11 @@ def _make_request( data=data, ) response.raise_for_status() - - if response.headers.get('content-type') == 'application/vnd.apache.arrow.stream': + + if ( + response.headers.get("content-type") + == "application/vnd.apache.arrow.stream" + ): return response.content return response.json() @@ -123,7 +134,9 @@ def get_spans( "limit": limit, } - response_data = self._make_request("GET", f"/api/metrics/spans/{name}", params=params) + response_data = self._make_request( + "GET", f"/api/metrics/spans/{name}", params=params + ) return [Span.model_validate(span_data) for span_data in response_data] def get_metric_data( @@ -137,7 +150,7 @@ def get_metric_data( Return all data-points within a given [start_at, end_at) interval, optionally selecting a given list of metric names. Returns a list of MetricDataPoint objects, one for each metric value transition. - + Args: names: Optional list of metric names to filter by start_at: Optional start time for the query @@ -155,10 +168,12 @@ def get_metric_data( endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" response_data = self._make_request("GET", endpoint, params=params) - + if use_arrow: return arrow_to_metric_data_points(response_data) - return [MetricDataPoint.model_validate(point_data) for point_data in response_data] + return [ + MetricDataPoint.model_validate(point_data) for point_data in response_data + ] def get_metric_arrow( self, @@ -170,7 +185,7 @@ def get_metric_arrow( Return all data-points within a given [start_at, end_at) interval, optionally selecting a given list of metric names. Returns Arrow IPC format data that can be read using pyarrow.ipc.open_file. - + Args: names: Optional list of metric names to filter by start_at: Optional start time for the query @@ -207,27 +222,27 @@ def get_metrics( return [Metric.model_validate(metric_data) for metric_data in response_data] def add_metric_data( - self, + self, data_points: List[MetricDataPoint], use_arrow: bool = False, ) -> None: """ Add multiple metric data points to the database. - + Args: data_points: List of MetricDataPoint objects to add use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) """ endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" - + if use_arrow: arrow_data = metric_data_points_to_arrow(data_points) self._make_request( - "POST", - endpoint, + "POST", + endpoint, data=arrow_data, - headers={"Content-Type": "application/vnd.apache.arrow.stream"} + headers={"Content-Type": "application/vnd.apache.arrow.stream"}, ) else: - data_dicts = [point.model_dump(mode='json') for point in data_points] + data_dicts = [point.model_dump(mode="json") for point in data_points] self._make_request("POST", endpoint, json=data_dicts) diff --git a/src/cvec/models/__init__.py b/src/cvec/models/__init__.py index c69c7c0..d986a8d 100644 --- a/src/cvec/models/__init__.py +++ b/src/cvec/models/__init__.py @@ -1,4 +1,4 @@ from .metric import Metric, MetricDataPoint from .span import Span -__all__ = ["Metric", "MetricDataPoint", "Span"] \ No newline at end of file +__all__ = ["Metric", "MetricDataPoint", "Span"] diff --git a/src/cvec/models/metric.py b/src/cvec/models/metric.py index 559339b..2154e2b 100644 --- a/src/cvec/models/metric.py +++ b/src/cvec/models/metric.py @@ -14,11 +14,7 @@ class MetricDataPoint(BaseModel): value_double: Optional[float] = None value_string: Optional[str] = None - model_config = ConfigDict( - json_encoders={ - datetime: lambda dt: dt.isoformat() - } - ) + model_config = ConfigDict(json_encoders={datetime: lambda dt: dt.isoformat()}) class Metric(BaseModel): @@ -31,8 +27,4 @@ class Metric(BaseModel): birth_at: Optional[datetime] = None death_at: Optional[datetime] = None - model_config = ConfigDict( - json_encoders={ - datetime: lambda dt: dt.isoformat() - } - ) + model_config = ConfigDict(json_encoders={datetime: lambda dt: dt.isoformat()}) diff --git a/src/cvec/utils/__init__.py b/src/cvec/utils/__init__.py index f929365..6c29dd0 100644 --- a/src/cvec/utils/__init__.py +++ b/src/cvec/utils/__init__.py @@ -1,3 +1,3 @@ from .arrow_converter import arrow_to_metric_data_points, metric_data_points_to_arrow -__all__ = ["arrow_to_metric_data_points", "metric_data_points_to_arrow"] \ No newline at end of file +__all__ = ["arrow_to_metric_data_points", "metric_data_points_to_arrow"] diff --git a/src/cvec/utils/arrow_converter.py b/src/cvec/utils/arrow_converter.py index 5f4b784..f819ed1 100644 --- a/src/cvec/utils/arrow_converter.py +++ b/src/cvec/utils/arrow_converter.py @@ -10,10 +10,10 @@ def metric_data_points_to_arrow(data_points: List[MetricDataPoint]) -> bytes: """ Convert metric data points to Arrow format. - + Args: data_points: List of MetricDataPoint objects to convert - + Returns: bytes: Arrow IPC format data """ @@ -25,17 +25,19 @@ def metric_data_points_to_arrow(data_points: List[MetricDataPoint]) -> bytes: # Create Arrow arrays names_array = pa.array(names) - times_array = pa.array(times, type=pa.timestamp('us', tz='UTC')) + times_array = pa.array(times, type=pa.timestamp("us", tz="UTC")) value_doubles_array = pa.array(value_doubles) value_strings_array = pa.array(value_strings) # Create Arrow table - table = pa.table({ - 'name': names_array, - 'time': times_array, - 'value_double': value_doubles_array, - 'value_string': value_strings_array - }) + table = pa.table( + { + "name": names_array, + "time": times_array, + "value_double": value_doubles_array, + "value_string": value_strings_array, + } + ) # Convert to Arrow IPC format sink = pa.BufferOutputStream() @@ -47,10 +49,10 @@ def metric_data_points_to_arrow(data_points: List[MetricDataPoint]) -> bytes: def arrow_to_metric_data_points(arrow_data: bytes) -> List[MetricDataPoint]: """ Convert Arrow format to metric data points. - + Args: arrow_data: Arrow IPC format data - + Returns: List[MetricDataPoint]: List of converted metric data points """ @@ -61,10 +63,12 @@ def arrow_to_metric_data_points(arrow_data: bytes) -> List[MetricDataPoint]: # Convert to list of MetricDataPoint data_points = [] for i in range(len(table)): - data_points.append(MetricDataPoint( - name=table['name'][i].as_py(), - time=table['time'][i].as_py(), - value_double=table['value_double'][i].as_py(), - value_string=table['value_string'][i].as_py() - )) - return data_points \ No newline at end of file + data_points.append( + MetricDataPoint( + name=table["name"][i].as_py(), + time=table["time"][i].as_py(), + value_double=table["value_double"][i].as_py(), + value_string=table["value_string"][i].as_py(), + ) + ) + return data_points diff --git a/tests/test_cvec.py b/tests/test_cvec.py index 14569d2..9918af7 100644 --- a/tests/test_cvec.py +++ b/tests/test_cvec.py @@ -119,9 +119,24 @@ class TestCVecGetSpans: def test_get_spans_basic_case(self): # Simulate backend response response_data = [ - {"name": "test_tag", "value": 30.0, "raw_start_at": datetime(2023, 1, 1, 12, 0, 0), "raw_end_at": None}, - {"name": "test_tag", "value": "val2", "raw_start_at": datetime(2023, 1, 1, 11, 0, 0), "raw_end_at": datetime(2023, 1, 1, 12, 0, 0)}, - {"name": "test_tag", "value": 10.0, "raw_start_at": datetime(2023, 1, 1, 10, 0, 0), "raw_end_at": datetime(2023, 1, 1, 11, 0, 0)}, + { + "name": "test_tag", + "value": 30.0, + "raw_start_at": datetime(2023, 1, 1, 12, 0, 0), + "raw_end_at": None, + }, + { + "name": "test_tag", + "value": "val2", + "raw_start_at": datetime(2023, 1, 1, 11, 0, 0), + "raw_end_at": datetime(2023, 1, 1, 12, 0, 0), + }, + { + "name": "test_tag", + "value": 10.0, + "raw_start_at": datetime(2023, 1, 1, 10, 0, 0), + "raw_end_at": datetime(2023, 1, 1, 11, 0, 0), + }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") client._make_request = lambda *args, **kwargs: response_data @@ -138,8 +153,18 @@ def test_get_spans_basic_case(self): class TestCVecGetMetrics: def test_get_metrics_no_interval(self): response_data = [ - {"id": 1, "name": "metric1", "birth_at": datetime(2023, 1, 1, 0, 0, 0), "death_at": datetime(2023, 1, 10, 0, 0, 0)}, - {"id": 2, "name": "metric2", "birth_at": datetime(2023, 2, 1, 0, 0, 0), "death_at": None}, + { + "id": 1, + "name": "metric1", + "birth_at": datetime(2023, 1, 1, 0, 0, 0), + "death_at": datetime(2023, 1, 10, 0, 0, 0), + }, + { + "id": 2, + "name": "metric2", + "birth_at": datetime(2023, 2, 1, 0, 0, 0), + "death_at": None, + }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") client._make_request = lambda *args, **kwargs: response_data @@ -153,18 +178,28 @@ def test_get_metrics_no_interval(self): def test_get_metrics_with_interval(self): response_data = [ - {"id": 1, "name": "metric_in_interval", "birth_at": datetime(2023, 1, 1, 0, 0, 0), "death_at": None}, + { + "id": 1, + "name": "metric_in_interval", + "birth_at": datetime(2023, 1, 1, 0, 0, 0), + "death_at": None, + }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") client._make_request = lambda *args, **kwargs: response_data - metrics = client.get_metrics(start_at=datetime(2023, 1, 5, 0, 0, 0), end_at=datetime(2023, 1, 15, 0, 0, 0)) + metrics = client.get_metrics( + start_at=datetime(2023, 1, 5, 0, 0, 0), + end_at=datetime(2023, 1, 15, 0, 0, 0), + ) assert len(metrics) == 1 assert metrics[0].name == "metric_in_interval" def test_get_metrics_no_data_found(self): client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") client._make_request = lambda *args, **kwargs: [] - metrics = client.get_metrics(start_at=datetime(2024, 1, 1), end_at=datetime(2024, 1, 2)) + metrics = client.get_metrics( + start_at=datetime(2024, 1, 1), end_at=datetime(2024, 1, 2) + ) assert len(metrics) == 0 @@ -177,7 +212,12 @@ def test_get_metric_data_basic_case(self): response_data = [ {"name": "tag1", "time": time1, "value_double": 10.0, "value_string": None}, {"name": "tag1", "time": time2, "value_double": 20.0, "value_string": None}, - {"name": "tag2", "time": time3, "value_double": None, "value_string": "val_str"}, + { + "name": "tag2", + "time": time3, + "value_double": None, + "value_string": "val_str", + }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") client._make_request = lambda *args, **kwargs: response_data @@ -201,15 +241,21 @@ def test_get_metric_data_no_data_points(self): def test_get_metric_arrow_basic_case(self): # Prepare Arrow table names = ["tag1", "tag1", "tag2"] - times = [datetime(2023, 1, 1, 10, 0, 0), datetime(2023, 1, 1, 11, 0, 0), datetime(2023, 1, 1, 12, 0, 0)] + times = [ + datetime(2023, 1, 1, 10, 0, 0), + datetime(2023, 1, 1, 11, 0, 0), + datetime(2023, 1, 1, 12, 0, 0), + ] value_doubles = [10.0, 20.0, None] value_strings = [None, None, "val_str"] - table = pa.table({ - "name": pa.array(names), - "time": pa.array(times, type=pa.timestamp('us', tz=None)), - "value_double": pa.array(value_doubles, type=pa.float64()), - "value_string": pa.array(value_strings, type=pa.string()), - }) + table = pa.table( + { + "name": pa.array(names), + "time": pa.array(times, type=pa.timestamp("us", tz=None)), + "value_double": pa.array(value_doubles, type=pa.float64()), + "value_string": pa.array(value_strings, type=pa.string()), + } + ) sink = pa.BufferOutputStream() with ipc.new_file(sink, table.schema) as writer: writer.write_table(table) @@ -222,15 +268,21 @@ def test_get_metric_arrow_basic_case(self): assert result_table.num_rows == 3 assert result_table.column("name").to_pylist() == names assert result_table.column("value_double").to_pylist() == [10.0, 20.0, None] - assert result_table.column("value_string").to_pylist() == [None, None, "val_str"] + assert result_table.column("value_string").to_pylist() == [ + None, + None, + "val_str", + ] def test_get_metric_arrow_empty(self): - table = pa.table({ - "name": pa.array([], type=pa.string()), - "time": pa.array([], type=pa.timestamp('us', tz=None)), - "value_double": pa.array([], type=pa.float64()), - "value_string": pa.array([], type=pa.string()), - }) + table = pa.table( + { + "name": pa.array([], type=pa.string()), + "time": pa.array([], type=pa.timestamp("us", tz=None)), + "value_double": pa.array([], type=pa.float64()), + "value_string": pa.array([], type=pa.string()), + } + ) sink = pa.BufferOutputStream() with ipc.new_file(sink, table.schema) as writer: writer.write_table(table) From 33eeb5706b361a4c35a3eb2c73740727c3e73d07 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Mon, 23 Jun 2025 16:24:14 +0200 Subject: [PATCH 09/23] mypy fixes --- examples/add_metric_data_arrow_example.py | 2 +- examples/add_metric_data_example.py | 2 +- examples/get_metric_arrow_example.py | 4 +-- examples/get_metric_data_objects_example.py | 2 +- examples/get_metrics_example.py | 2 +- examples/get_spans_example.py | 4 +-- src/cvec/cvec.py | 33 ++++++++++--------- src/cvec/utils/arrow_converter.py | 8 ++--- tests/test_cvec.py | 36 ++++++++++----------- 9 files changed, 48 insertions(+), 45 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index 0882a8d..23f4d61 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -4,7 +4,7 @@ import random -def main(): +def main() -> None: cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", tenant="test", diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index bc7bfd1..12f95cb 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -4,7 +4,7 @@ import random -def main(): +def main() -> None: cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", tenant="test", diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index 418b770..8a1685b 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -1,9 +1,9 @@ from cvec import CVec import io -import pyarrow.ipc as ipc +import pyarrow.ipc as ipc # type: ignore[import-untyped] -def main(): +def main() -> None: cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", tenant="test", diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index 2f6d8ba..cbf0041 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -1,7 +1,7 @@ from cvec import CVec -def main(): +def main() -> None: cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", tenant="test", diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index 70044da..ae737af 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -1,7 +1,7 @@ from cvec import CVec -def main(): +def main() -> None: cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", # Replace with your API host tenant="test", # Replace with your tenant diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index e322a37..330a5b2 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -1,7 +1,7 @@ from cvec import CVec -def main(): +def main() -> None: cvec = CVec( host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", tenant="test", @@ -9,7 +9,7 @@ def main(): ) metrics = cvec.get_metrics() if metrics: - metric_name = "python-sdk/test" + metric_name = metrics[0].name print(f"\nGetting spans for metric '{metric_name}'...") spans = cvec.get_spans(metric_name, limit=5) print(f"Found {len(spans)} spans") diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 540114a..6cbade4 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -1,9 +1,10 @@ import os from datetime import datetime -from typing import Any, List, Optional +from typing import Any, List, Optional, Dict from urllib.parse import urljoin -import requests +import requests # type: ignore[import-untyped] +import pyarrow as pa # type: ignore[import-untyped] from cvec.models.metric import Metric, MetricDataPoint from cvec.models.span import Span @@ -57,11 +58,11 @@ def __init__( "CVEC_API_KEY must be set either as an argument or environment variable" ) - def _get_headers(self) -> dict[str, str]: + def _get_headers(self) -> Dict[str, str]: """Helper method to get request headers.""" return { "Authorization": f"Bearer {self.api_key}", - "X-Tenant": self.tenant, + "X-Tenant": self.tenant or "", "Content-Type": "application/json", } @@ -69,13 +70,13 @@ def _make_request( self, method: str, endpoint: str, - params: Optional[dict] = None, - json: Optional[dict] = None, + params: Optional[Dict[str, Any]] = None, + json: Optional[Dict[str, Any]] = None, data: Optional[bytes] = None, - headers: Optional[dict] = None, + headers: Optional[Dict[str, str]] = None, ) -> Any: """Helper method to make HTTP requests.""" - url = urljoin(self.host, endpoint) + url = urljoin(self.host or "", endpoint) request_headers = self._get_headers() if headers: request_headers.update(headers) @@ -128,7 +129,7 @@ def get_spans( _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at - params = { + params: Dict[str, Any] = { "start_at": _start_at.isoformat() if _start_at else None, "end_at": _end_at.isoformat() if _end_at else None, "limit": limit, @@ -160,7 +161,7 @@ def get_metric_data( _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at - params = { + params: Dict[str, Any] = { "start_at": _start_at.isoformat() if _start_at else None, "end_at": _end_at.isoformat() if _end_at else None, "names": ",".join(names) if names else None, @@ -194,14 +195,16 @@ def get_metric_arrow( _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at - params = { + params: Dict[str, Any] = { "start_at": _start_at.isoformat() if _start_at else None, "end_at": _end_at.isoformat() if _end_at else None, "names": ",".join(names) if names else None, } endpoint = "/api/metrics/data/arrow" - return self._make_request("GET", endpoint, params=params) + result = self._make_request("GET", endpoint, params=params) + assert isinstance(result, bytes) + return result def get_metrics( self, start_at: Optional[datetime] = None, end_at: Optional[datetime] = None @@ -213,7 +216,7 @@ def get_metrics( _start_at = start_at or self.default_start_at _end_at = end_at or self.default_end_at - params = { + params: Dict[str, Any] = { "start_at": _start_at.isoformat() if _start_at else None, "end_at": _end_at.isoformat() if _end_at else None, } @@ -244,5 +247,5 @@ def add_metric_data( headers={"Content-Type": "application/vnd.apache.arrow.stream"}, ) else: - data_dicts = [point.model_dump(mode="json") for point in data_points] - self._make_request("POST", endpoint, json=data_dicts) + data_dicts: List[Dict[str, Any]] = [point.model_dump(mode='json') for point in data_points] + self._make_request("POST", endpoint, json=data_dicts) # type: ignore[arg-type] diff --git a/src/cvec/utils/arrow_converter.py b/src/cvec/utils/arrow_converter.py index f819ed1..89a287c 100644 --- a/src/cvec/utils/arrow_converter.py +++ b/src/cvec/utils/arrow_converter.py @@ -1,8 +1,8 @@ import io from typing import List -import pyarrow as pa -import pyarrow.ipc as ipc +import pyarrow as pa # type: ignore[import-untyped] +import pyarrow.ipc as ipc # type: ignore[import-untyped] from cvec.models.metric import MetricDataPoint @@ -43,7 +43,7 @@ def metric_data_points_to_arrow(data_points: List[MetricDataPoint]) -> bytes: sink = pa.BufferOutputStream() with ipc.new_file(sink, table.schema) as writer: writer.write_table(table) - return sink.getvalue().to_pybytes() + return bytes(sink.getvalue().to_pybytes()) def arrow_to_metric_data_points(arrow_data: bytes) -> List[MetricDataPoint]: @@ -61,7 +61,7 @@ def arrow_to_metric_data_points(arrow_data: bytes) -> List[MetricDataPoint]: table = reader.read_all() # Convert to list of MetricDataPoint - data_points = [] + data_points: List[MetricDataPoint] = [] for i in range(len(table)): data_points.append( MetricDataPoint( diff --git a/tests/test_cvec.py b/tests/test_cvec.py index 9918af7..b4f99b2 100644 --- a/tests/test_cvec.py +++ b/tests/test_cvec.py @@ -4,8 +4,8 @@ from datetime import datetime from cvec import CVec from cvec.models.metric import Metric -import pyarrow as pa -import pyarrow.ipc as ipc +import pyarrow as pa # type: ignore[import-untyped] +import pyarrow.ipc as ipc # type: ignore[import-untyped] import io @@ -116,7 +116,7 @@ def test_constructor_args_override_env_vars(self) -> None: class TestCVecGetSpans: - def test_get_spans_basic_case(self): + def test_get_spans_basic_case(self) -> None: # Simulate backend response response_data = [ { @@ -139,7 +139,7 @@ def test_get_spans_basic_case(self): }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: response_data + client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] spans = client.get_spans(name="test_tag") assert len(spans) == 3 assert spans[0].name == "test_tag" @@ -151,7 +151,7 @@ def test_get_spans_basic_case(self): class TestCVecGetMetrics: - def test_get_metrics_no_interval(self): + def test_get_metrics_no_interval(self) -> None: response_data = [ { "id": 1, @@ -167,7 +167,7 @@ def test_get_metrics_no_interval(self): }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: response_data + client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] metrics = client.get_metrics() assert len(metrics) == 2 assert isinstance(metrics[0], Metric) @@ -176,7 +176,7 @@ def test_get_metrics_no_interval(self): assert metrics[1].id == 2 assert metrics[1].name == "metric2" - def test_get_metrics_with_interval(self): + def test_get_metrics_with_interval(self) -> None: response_data = [ { "id": 1, @@ -186,7 +186,7 @@ def test_get_metrics_with_interval(self): }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: response_data + client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] metrics = client.get_metrics( start_at=datetime(2023, 1, 5, 0, 0, 0), end_at=datetime(2023, 1, 15, 0, 0, 0), @@ -194,9 +194,9 @@ def test_get_metrics_with_interval(self): assert len(metrics) == 1 assert metrics[0].name == "metric_in_interval" - def test_get_metrics_no_data_found(self): + def test_get_metrics_no_data_found(self) -> None: client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: [] + client._make_request = lambda *args, **kwargs: [] # type: ignore[method-assign] metrics = client.get_metrics( start_at=datetime(2024, 1, 1), end_at=datetime(2024, 1, 2) ) @@ -204,7 +204,7 @@ def test_get_metrics_no_data_found(self): class TestCVecGetMetricData: - def test_get_metric_data_basic_case(self): + def test_get_metric_data_basic_case(self) -> None: # Simulate backend response time1 = datetime(2023, 1, 1, 10, 0, 0) time2 = datetime(2023, 1, 1, 11, 0, 0) @@ -220,7 +220,7 @@ def test_get_metric_data_basic_case(self): }, ] client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: response_data + client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] data_points = client.get_metric_data(names=["tag1", "tag2"]) assert len(data_points) == 3 assert data_points[0].name == "tag1" @@ -232,13 +232,13 @@ def test_get_metric_data_basic_case(self): assert data_points[2].value_double is None assert data_points[2].value_string == "val_str" - def test_get_metric_data_no_data_points(self): + def test_get_metric_data_no_data_points(self) -> None: client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: [] + client._make_request = lambda *args, **kwargs: [] # type: ignore[method-assign] data_points = client.get_metric_data(names=["non_existent_tag"]) assert data_points == [] - def test_get_metric_arrow_basic_case(self): + def test_get_metric_arrow_basic_case(self) -> None: # Prepare Arrow table names = ["tag1", "tag1", "tag2"] times = [ @@ -261,7 +261,7 @@ def test_get_metric_arrow_basic_case(self): writer.write_table(table) arrow_bytes = sink.getvalue().to_pybytes() client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: arrow_bytes + client._make_request = lambda *args, **kwargs: arrow_bytes # type: ignore[method-assign] result = client.get_metric_arrow(names=["tag1", "tag2"]) reader = ipc.open_file(io.BytesIO(result)) result_table = reader.read_all() @@ -274,7 +274,7 @@ def test_get_metric_arrow_basic_case(self): "val_str", ] - def test_get_metric_arrow_empty(self): + def test_get_metric_arrow_empty(self) -> None: table = pa.table( { "name": pa.array([], type=pa.string()), @@ -288,7 +288,7 @@ def test_get_metric_arrow_empty(self): writer.write_table(table) arrow_bytes = sink.getvalue().to_pybytes() client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") - client._make_request = lambda *args, **kwargs: arrow_bytes + client._make_request = lambda *args, **kwargs: arrow_bytes # type: ignore[method-assign] result = client.get_metric_arrow(names=["non_existent_tag"]) reader = ipc.open_file(io.BytesIO(result)) result_table = reader.read_all() From 7962fddd759fe64301dd07c5070c76cba06cb3e0 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Mon, 23 Jun 2025 16:26:22 +0200 Subject: [PATCH 10/23] Another ruff fix --- src/cvec/cvec.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 6cbade4..8251ca0 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -4,7 +4,6 @@ from urllib.parse import urljoin import requests # type: ignore[import-untyped] -import pyarrow as pa # type: ignore[import-untyped] from cvec.models.metric import Metric, MetricDataPoint from cvec.models.span import Span @@ -247,5 +246,7 @@ def add_metric_data( headers={"Content-Type": "application/vnd.apache.arrow.stream"}, ) else: - data_dicts: List[Dict[str, Any]] = [point.model_dump(mode='json') for point in data_points] + data_dicts: List[Dict[str, Any]] = [ + point.model_dump(mode="json") for point in data_points + ] self._make_request("POST", endpoint, json=data_dicts) # type: ignore[arg-type] From f2956e3448dd4251d7fe10aa1738bc1e30d88666 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Tue, 1 Jul 2025 02:34:34 +0200 Subject: [PATCH 11/23] Added authentication to SDK. Modified examples. --- examples/add_metric_data_arrow_example.py | 8 +- examples/add_metric_data_example.py | 8 +- examples/get_metric_arrow_example.py | 8 +- examples/get_metric_data_objects_example.py | 8 +- examples/get_metrics_example.py | 8 +- examples/get_spans_example.py | 8 +- src/cvec/cvec.py | 122 +++++++++++++++++--- 7 files changed, 133 insertions(+), 37 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index 23f4d61..a2fb3d5 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -2,13 +2,15 @@ from cvec.models.metric import MetricDataPoint from datetime import datetime, timezone import random +import os def main() -> None: cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", - tenant="test", - api_key="your-api-key", + host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + password=os.environ.get("CVEC_PASSWORD", "your-password"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data using Arrow...") diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index 12f95cb..ecd21bf 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -2,13 +2,15 @@ from cvec.models.metric import MetricDataPoint from datetime import datetime, timezone import random +import os def main() -> None: cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", - tenant="test", - api_key="your-api-key", + host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + password=os.environ.get("CVEC_PASSWORD", "your-password"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data...") diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index 8a1685b..08a8201 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -1,13 +1,15 @@ from cvec import CVec import io import pyarrow.ipc as ipc # type: ignore[import-untyped] +import os def main() -> None: cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", - tenant="test", - api_key="your-api-key", + host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + password=os.environ.get("CVEC_PASSWORD", "your-password"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as Arrow...") diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index cbf0041..3b44a74 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -1,11 +1,13 @@ from cvec import CVec +import os def main() -> None: cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", - tenant="test", - api_key="your-api-key", + host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + password=os.environ.get("CVEC_PASSWORD", "your-password"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as objects...") diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index ae737af..acd44a4 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -1,11 +1,13 @@ from cvec import CVec +import os def main() -> None: cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", # Replace with your API host - tenant="test", # Replace with your tenant - api_key="your-api-key", # Replace with your API key + host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), # Replace with your API host + email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + password=os.environ.get("CVEC_PASSWORD", "your-password"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), ) print("\nGetting available metrics...") metrics = cvec.get_metrics() diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index 330a5b2..a699c86 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -1,11 +1,13 @@ from cvec import CVec +import os def main() -> None: cvec = CVec( - host="https://cvec-backend-rzhang-cvec-sandbox.deployments.quix.io", - tenant="test", - api_key="your-api-key", + host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + password=os.environ.get("CVEC_PASSWORD", "your-password"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), ) metrics = cvec.get_metrics() if metrics: diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 8251ca0..e5ac5f1 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -19,49 +19,56 @@ class CVec: """ host: Optional[str] - tenant: Optional[str] - api_key: Optional[str] default_start_at: Optional[datetime] default_end_at: Optional[datetime] + # Supabase authentication + _access_token: Optional[str] + _refresh_token: Optional[str] + _publishable_key: Optional[str] def __init__( self, host: Optional[str] = None, - tenant: Optional[str] = None, - api_key: Optional[str] = None, default_start_at: Optional[datetime] = None, default_end_at: Optional[datetime] = None, + email: Optional[str] = None, + password: Optional[str] = None, + publishable_key: Optional[str] = None, ) -> None: - """ - Setup the SDK with the given host and API Key. - The host and API key are loaded from environment variables CVEC_HOST, - CVEC_TENANT, CVEC_API_KEY, if they are not given as arguments to the constructor. - The default_start_at and default_end_at can provide a default query time interval for API methods. - """ + self.host = host or os.environ.get("CVEC_HOST") - self.tenant = tenant or os.environ.get("CVEC_TENANT") - self.api_key = api_key or os.environ.get("CVEC_API_KEY") self.default_start_at = default_start_at self.default_end_at = default_end_at + + # Supabase authentication + self._access_token = None + self._refresh_token = None + self._publishable_key = publishable_key or os.environ.get("CVEC_PUBLISHABLE_KEY") if not self.host: raise ValueError( "CVEC_HOST must be set either as an argument or environment variable" ) - if not self.tenant: + if not self._publishable_key: raise ValueError( - "CVEC_TENANT must be set either as an argument or environment variable" + "CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable" ) - if not self.api_key: + + # Handle authentication + if email and password: + self._login_with_supabase(email, password) + else: raise ValueError( - "CVEC_API_KEY must be set either as an argument or environment variable" + "Email and password must be provided for Supabase authentication" ) def _get_headers(self) -> Dict[str, str]: """Helper method to get request headers.""" + if not self._access_token: + raise ValueError("No access token available. Please login first.") + return { - "Authorization": f"Bearer {self.api_key}", - "X-Tenant": self.tenant or "", + "Authorization": f"Bearer {self._access_token}", "Content-Type": "application/json", } @@ -88,6 +95,30 @@ def _make_request( json=json, data=data, ) + + # If we get a 401 and we have Supabase tokens, try to refresh and retry + if response.status_code == 401 and self._access_token and self._refresh_token: + try: + self._refresh_supabase_token() + # Update headers with new token + request_headers = self._get_headers() + if headers: + request_headers.update(headers) + + # Retry the request + response = requests.request( + method=method, + url=url, + headers=request_headers, + params=params, + json=json, + data=data, + ) + except Exception: + print("Token refresh failed") + # If refresh fails, continue with the original error + pass + response.raise_for_status() if ( @@ -220,7 +251,7 @@ def get_metrics( "end_at": _end_at.isoformat() if _end_at else None, } - response_data = self._make_request("GET", "/api/metrics", params=params) + response_data = self._make_request("GET", "/api/metrics/", params=params) return [Metric.model_validate(metric_data) for metric_data in response_data] def add_metric_data( @@ -250,3 +281,56 @@ def add_metric_data( point.model_dump(mode="json") for point in data_points ] self._make_request("POST", endpoint, json=data_dicts) # type: ignore[arg-type] + + def _login_with_supabase(self, email: str, password: str) -> None: + """ + Login to Supabase and get access/refresh tokens. + + Args: + email: User email + password: User password + """ + supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=password" + + payload = { + "email": email, + "password": password + } + + headers = { + "Content-Type": "application/json", + "apikey": self._publishable_key + } + + response = requests.post(supabase_url, json=payload, headers=headers) + response.raise_for_status() + + data = response.json() + + self._access_token = data["access_token"] + self._refresh_token = data["refresh_token"] + + def _refresh_supabase_token(self) -> None: + """ + Refresh the Supabase access token using the refresh token. + """ + if not self._refresh_token: + raise ValueError("No refresh token available") + + supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=refresh_token" + + payload = { + "refresh_token": self._refresh_token + } + + headers = { + "Content-Type": "application/json", + "apikey": self._publishable_key + } + + response = requests.post(supabase_url, json=payload, headers=headers) + response.raise_for_status() + + data = response.json() + self._access_token = data["access_token"] + self._refresh_token = data["refresh_token"] From 7625e6e00942eede1dba3710761b233651c9de1e Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Tue, 1 Jul 2025 12:19:59 +0200 Subject: [PATCH 12/23] Small changes in example scritps --- examples/add_metric_data_arrow_example.py | 6 +++--- examples/add_metric_data_example.py | 6 +++--- examples/get_metric_arrow_example.py | 6 +++--- examples/get_metric_data_objects_example.py | 6 +++--- examples/get_metrics_example.py | 6 +++--- examples/get_spans_example.py | 6 +++--- 6 files changed, 18 insertions(+), 18 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index a2fb3d5..46a4ee6 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -7,10 +7,10 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data using Arrow...") diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index ecd21bf..3327a4d 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -7,10 +7,10 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data...") diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index 08a8201..0164fa3 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -6,10 +6,10 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as Arrow...") diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index 3b44a74..9255b50 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -4,10 +4,10 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as objects...") diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index acd44a4..44aebc2 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -4,10 +4,10 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), # Replace with your API host - email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), # Replace with your API host + email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), ) print("\nGetting available metrics...") metrics = cvec.get_metrics() diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index a699c86..2371aeb 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -4,10 +4,10 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://sandbox.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@example.com"), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), + email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-supabase-publishable-key"), + publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), ) metrics = cvec.get_metrics() if metrics: From 9a9c696d06d29ebb63788d39dcebac8d0cdf8ac3 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Tue, 1 Jul 2025 14:21:15 +0200 Subject: [PATCH 13/23] Fixed tests and linter issues --- examples/add_metric_data_arrow_example.py | 4 +- examples/add_metric_data_example.py | 4 +- examples/get_metric_arrow_example.py | 4 +- examples/get_metric_data_objects_example.py | 4 +- examples/get_metrics_example.py | 8 +- examples/get_spans_example.py | 4 +- src/cvec/cvec.py | 56 +++---- tests/test_cvec.py | 162 ++++++++++++++------ 8 files changed, 157 insertions(+), 89 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index 46a4ee6..e1cb1a1 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -10,7 +10,9 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), + publishable_key=os.environ.get( + "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" + ), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data using Arrow...") diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index 3327a4d..e81fe42 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -10,7 +10,9 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), + publishable_key=os.environ.get( + "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" + ), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data...") diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index 0164fa3..7a5bb52 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -9,7 +9,9 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), + publishable_key=os.environ.get( + "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" + ), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as Arrow...") diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index 9255b50..8fabf05 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -7,7 +7,9 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), + publishable_key=os.environ.get( + "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" + ), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as objects...") diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index 44aebc2..62a6478 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -4,10 +4,14 @@ def main() -> None: cvec = CVec( - host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), # Replace with your API host + host=os.environ.get( + "CVEC_HOST", "https://your-subdomain.cvector.dev" + ), # Replace with your API host email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), + publishable_key=os.environ.get( + "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" + ), ) print("\nGetting available metrics...") metrics = cvec.get_metrics() diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index 2371aeb..3e4c0dd 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -7,7 +7,9 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get("CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key"), + publishable_key=os.environ.get( + "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" + ), ) metrics = cvec.get_metrics() if metrics: diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index e5ac5f1..91a2fc8 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -35,15 +35,16 @@ def __init__( password: Optional[str] = None, publishable_key: Optional[str] = None, ) -> None: - self.host = host or os.environ.get("CVEC_HOST") self.default_start_at = default_start_at self.default_end_at = default_end_at - + # Supabase authentication self._access_token = None self._refresh_token = None - self._publishable_key = publishable_key or os.environ.get("CVEC_PUBLISHABLE_KEY") + self._publishable_key = publishable_key or os.environ.get( + "CVEC_PUBLISHABLE_KEY" + ) if not self.host: raise ValueError( @@ -53,7 +54,7 @@ def __init__( raise ValueError( "CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable" ) - + # Handle authentication if email and password: self._login_with_supabase(email, password) @@ -66,7 +67,7 @@ def _get_headers(self) -> Dict[str, str]: """Helper method to get request headers.""" if not self._access_token: raise ValueError("No access token available. Please login first.") - + return { "Authorization": f"Bearer {self._access_token}", "Content-Type": "application/json", @@ -95,7 +96,7 @@ def _make_request( json=json, data=data, ) - + # If we get a 401 and we have Supabase tokens, try to refresh and retry if response.status_code == 401 and self._access_token and self._refresh_token: try: @@ -104,7 +105,7 @@ def _make_request( request_headers = self._get_headers() if headers: request_headers.update(headers) - + # Retry the request response = requests.request( method=method, @@ -118,7 +119,7 @@ def _make_request( print("Token refresh failed") # If refresh fails, continue with the original error pass - + response.raise_for_status() if ( @@ -285,26 +286,20 @@ def add_metric_data( def _login_with_supabase(self, email: str, password: str) -> None: """ Login to Supabase and get access/refresh tokens. - + Args: email: User email password: User password """ supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=password" - - payload = { - "email": email, - "password": password - } - - headers = { - "Content-Type": "application/json", - "apikey": self._publishable_key - } - + + payload = {"email": email, "password": password} + + headers = {"Content-Type": "application/json", "apikey": self._publishable_key} + response = requests.post(supabase_url, json=payload, headers=headers) response.raise_for_status() - + data = response.json() self._access_token = data["access_token"] @@ -316,21 +311,16 @@ def _refresh_supabase_token(self) -> None: """ if not self._refresh_token: raise ValueError("No refresh token available") - + supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=refresh_token" - - payload = { - "refresh_token": self._refresh_token - } - - headers = { - "Content-Type": "application/json", - "apikey": self._publishable_key - } - + + payload = {"refresh_token": self._refresh_token} + + headers = {"Content-Type": "application/json", "apikey": self._publishable_key} + response = requests.post(supabase_url, json=payload, headers=headers) response.raise_for_status() - + data = response.json() self._access_token = data["access_token"] self._refresh_token = data["refresh_token"] diff --git a/tests/test_cvec.py b/tests/test_cvec.py index b4f99b2..de88528 100644 --- a/tests/test_cvec.py +++ b/tests/test_cvec.py @@ -7,116 +7,133 @@ import pyarrow as pa # type: ignore[import-untyped] import pyarrow.ipc as ipc # type: ignore[import-untyped] import io +from typing import Any class TestCVecConstructor: - def test_constructor_with_arguments(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_constructor_with_arguments(self, mock_login: Any) -> None: """Test CVec constructor with all arguments provided.""" client = CVec( host="test_host", - tenant="test_tenant", - api_key="test_api_key", default_start_at=datetime(2023, 1, 1, 0, 0, 0), default_end_at=datetime(2023, 1, 2, 0, 0, 0), + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", ) assert client.host == "test_host" - assert client.tenant == "test_tenant" - assert client.api_key == "test_api_key" assert client.default_start_at == datetime(2023, 1, 1, 0, 0, 0) assert client.default_end_at == datetime(2023, 1, 2, 0, 0, 0) + assert client._publishable_key == "test_publishable_key" + @patch.object(CVec, "_login_with_supabase", return_value=None) @patch.dict( os.environ, { "CVEC_HOST": "env_host", - "CVEC_TENANT": "env_tenant", - "CVEC_API_KEY": "env_api_key", + "CVEC_PUBLISHABLE_KEY": "env_publishable_key", }, clear=True, ) - def test_constructor_with_env_vars(self) -> None: + def test_constructor_with_env_vars(self, mock_login: Any) -> None: """Test CVec constructor with environment variables.""" client = CVec( default_start_at=datetime(2023, 2, 1, 0, 0, 0), default_end_at=datetime(2023, 2, 2, 0, 0, 0), + email="user@example.com", + password="password123", ) assert client.host == "env_host" - assert client.tenant == "env_tenant" - assert client.api_key == "env_api_key" + assert client._publishable_key == "env_publishable_key" assert client.default_start_at == datetime(2023, 2, 1, 0, 0, 0) assert client.default_end_at == datetime(2023, 2, 2, 0, 0, 0) + @patch.object(CVec, "_login_with_supabase", return_value=None) @patch.dict(os.environ, {}, clear=True) - def test_constructor_missing_host_raises_value_error(self) -> None: + def test_constructor_missing_host_raises_value_error(self, mock_login: Any) -> None: """Test CVec constructor raises ValueError if host is missing.""" with pytest.raises( ValueError, match="CVEC_HOST must be set either as an argument or environment variable", ): - CVec(tenant="test_tenant", api_key="test_api_key") + CVec( + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) + @patch.object(CVec, "_login_with_supabase", return_value=None) @patch.dict(os.environ, {}, clear=True) - def test_constructor_missing_tenant_raises_value_error(self) -> None: - """Test CVec constructor raises ValueError if tenant is missing.""" + def test_constructor_missing_publishable_key_raises_value_error( + self, mock_login: Any + ) -> None: + """Test CVec constructor raises ValueError if publishable_key is missing.""" with pytest.raises( ValueError, - match="CVEC_TENANT must be set either as an argument or environment variable", + match="CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable", ): - CVec(host="test_host", api_key="test_api_key") + CVec(host="test_host", email="user@example.com", password="password123") + @patch.object(CVec, "_login_with_supabase", return_value=None) @patch.dict(os.environ, {}, clear=True) - def test_constructor_missing_api_key_raises_value_error(self) -> None: - """Test CVec constructor raises ValueError if api_key is missing.""" + def test_constructor_missing_email_password_raises_value_error( + self, mock_login: Any + ) -> None: + """Test CVec constructor raises ValueError if email or password is missing.""" with pytest.raises( ValueError, - match="CVEC_API_KEY must be set either as an argument or environment variable", + match="Email and password must be provided for Supabase authentication", ): - CVec(host="test_host", tenant="test_tenant") + CVec(host="test_host", publishable_key="test_publishable_key") + @patch.object(CVec, "_login_with_supabase", return_value=None) @patch.dict( os.environ, { "CVEC_HOST": "env_host", - # CVEC_TENANT is missing - "CVEC_API_KEY": "env_api_key", + # CVEC_PUBLISHABLE_KEY is missing }, clear=True, ) - def test_constructor_missing_tenant_env_var_raises_value_error(self) -> None: - """Test CVec constructor raises ValueError if CVEC_TENANT env var is missing.""" + def test_constructor_missing_publishable_key_env_var_raises_value_error( + self, mock_login: Any + ) -> None: + """Test CVec constructor raises ValueError if CVEC_PUBLISHABLE_KEY env var is missing.""" with pytest.raises( ValueError, - match="CVEC_TENANT must be set either as an argument or environment variable", + match="CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable", ): - CVec() + CVec(email="user@example.com", password="password123") - def test_constructor_args_override_env_vars(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_constructor_args_override_env_vars(self, mock_login: Any) -> None: """Test CVec constructor arguments override environment variables.""" with patch.dict( os.environ, { "CVEC_HOST": "env_host", - "CVEC_TENANT": "env_tenant", - "CVEC_API_KEY": "env_api_key", + "CVEC_PUBLISHABLE_KEY": "env_publishable_key", }, clear=True, ): client = CVec( host="arg_host", - tenant="arg_tenant", - api_key="arg_api_key", default_start_at=datetime(2023, 3, 1, 0, 0, 0), default_end_at=datetime(2023, 3, 2, 0, 0, 0), + email="user@example.com", + password="password123", + publishable_key="arg_publishable_key", ) assert client.host == "arg_host" - assert client.tenant == "arg_tenant" - assert client.api_key == "arg_api_key" + assert client._publishable_key == "arg_publishable_key" assert client.default_start_at == datetime(2023, 3, 1, 0, 0, 0) assert client.default_end_at == datetime(2023, 3, 2, 0, 0, 0) class TestCVecGetSpans: - def test_get_spans_basic_case(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_spans_basic_case(self, mock_login: Any) -> None: # Simulate backend response response_data = [ { @@ -138,7 +155,12 @@ def test_get_spans_basic_case(self) -> None: "raw_end_at": datetime(2023, 1, 1, 11, 0, 0), }, ] - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] spans = client.get_spans(name="test_tag") assert len(spans) == 3 @@ -151,7 +173,8 @@ def test_get_spans_basic_case(self) -> None: class TestCVecGetMetrics: - def test_get_metrics_no_interval(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metrics_no_interval(self, mock_login: Any) -> None: response_data = [ { "id": 1, @@ -166,7 +189,12 @@ def test_get_metrics_no_interval(self) -> None: "death_at": None, }, ] - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] metrics = client.get_metrics() assert len(metrics) == 2 @@ -176,7 +204,8 @@ def test_get_metrics_no_interval(self) -> None: assert metrics[1].id == 2 assert metrics[1].name == "metric2" - def test_get_metrics_with_interval(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metrics_with_interval(self, mock_login: Any) -> None: response_data = [ { "id": 1, @@ -185,7 +214,12 @@ def test_get_metrics_with_interval(self) -> None: "death_at": None, }, ] - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] metrics = client.get_metrics( start_at=datetime(2023, 1, 5, 0, 0, 0), @@ -194,8 +228,14 @@ def test_get_metrics_with_interval(self) -> None: assert len(metrics) == 1 assert metrics[0].name == "metric_in_interval" - def test_get_metrics_no_data_found(self) -> None: - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metrics_no_data_found(self, mock_login: Any) -> None: + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: [] # type: ignore[method-assign] metrics = client.get_metrics( start_at=datetime(2024, 1, 1), end_at=datetime(2024, 1, 2) @@ -204,7 +244,8 @@ def test_get_metrics_no_data_found(self) -> None: class TestCVecGetMetricData: - def test_get_metric_data_basic_case(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metric_data_basic_case(self, mock_login: Any) -> None: # Simulate backend response time1 = datetime(2023, 1, 1, 10, 0, 0) time2 = datetime(2023, 1, 1, 11, 0, 0) @@ -219,7 +260,12 @@ def test_get_metric_data_basic_case(self) -> None: "value_string": "val_str", }, ] - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] data_points = client.get_metric_data(names=["tag1", "tag2"]) assert len(data_points) == 3 @@ -232,13 +278,20 @@ def test_get_metric_data_basic_case(self) -> None: assert data_points[2].value_double is None assert data_points[2].value_string == "val_str" - def test_get_metric_data_no_data_points(self) -> None: - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metric_data_no_data_points(self, mock_login: Any) -> None: + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: [] # type: ignore[method-assign] data_points = client.get_metric_data(names=["non_existent_tag"]) assert data_points == [] - def test_get_metric_arrow_basic_case(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metric_arrow_basic_case(self, mock_login: Any) -> None: # Prepare Arrow table names = ["tag1", "tag1", "tag2"] times = [ @@ -260,7 +313,12 @@ def test_get_metric_arrow_basic_case(self) -> None: with ipc.new_file(sink, table.schema) as writer: writer.write_table(table) arrow_bytes = sink.getvalue().to_pybytes() - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: arrow_bytes # type: ignore[method-assign] result = client.get_metric_arrow(names=["tag1", "tag2"]) reader = ipc.open_file(io.BytesIO(result)) @@ -274,7 +332,8 @@ def test_get_metric_arrow_basic_case(self) -> None: "val_str", ] - def test_get_metric_arrow_empty(self) -> None: + @patch.object(CVec, "_login_with_supabase", return_value=None) + def test_get_metric_arrow_empty(self, mock_login: Any) -> None: table = pa.table( { "name": pa.array([], type=pa.string()), @@ -287,7 +346,12 @@ def test_get_metric_arrow_empty(self) -> None: with ipc.new_file(sink, table.schema) as writer: writer.write_table(table) arrow_bytes = sink.getvalue().to_pybytes() - client = CVec(host="test_host", tenant="test_tenant", api_key="test_api_key") + client = CVec( + host="test_host", + email="user@example.com", + password="password123", + publishable_key="test_publishable_key", + ) client._make_request = lambda *args, **kwargs: arrow_bytes # type: ignore[method-assign] result = client.get_metric_arrow(names=["non_existent_tag"]) reader = ipc.open_file(io.BytesIO(result)) From 0916a33079a826df12acc2bef38f4bac05c20f75 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Thu, 3 Jul 2025 00:33:34 +0200 Subject: [PATCH 14/23] Removed publishable key from examples --- examples/add_metric_data_arrow_example.py | 3 --- examples/add_metric_data_example.py | 3 --- examples/get_metric_arrow_example.py | 3 --- examples/get_metric_data_objects_example.py | 3 --- examples/get_metrics_example.py | 3 --- examples/get_spans_example.py | 3 --- 6 files changed, 18 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index e1cb1a1..9210495 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -10,9 +10,6 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get( - "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" - ), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data using Arrow...") diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index e81fe42..fcd7eb4 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -10,9 +10,6 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get( - "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" - ), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data...") diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index 7a5bb52..1b44f09 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -9,9 +9,6 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get( - "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" - ), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as Arrow...") diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index 8fabf05..93046c8 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -7,9 +7,6 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get( - "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" - ), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as objects...") diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index 62a6478..9db6345 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -9,9 +9,6 @@ def main() -> None: ), # Replace with your API host email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get( - "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" - ), ) print("\nGetting available metrics...") metrics = cvec.get_metrics() diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index 3e4c0dd..bccb327 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -7,9 +7,6 @@ def main() -> None: host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), password=os.environ.get("CVEC_PASSWORD", "your-password"), - publishable_key=os.environ.get( - "CVEC_PUBLISHABLE_KEY", "your-cvec-publishable-key" - ), ) metrics = cvec.get_metrics() if metrics: From 9930c7e776303be084d152f6938e516b9c3829d0 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Thu, 3 Jul 2025 01:13:33 +0200 Subject: [PATCH 15/23] Removed publishable key and email from cvec class params. Added api call for config and construction of user email for auth. --- examples/add_metric_data_arrow_example.py | 3 +- examples/add_metric_data_example.py | 3 +- examples/get_metric_arrow_example.py | 3 +- examples/get_metric_data_objects_example.py | 3 +- examples/get_metrics_example.py | 3 +- examples/get_spans_example.py | 3 +- src/cvec/cvec.py | 75 ++++++++++++++++----- 7 files changed, 66 insertions(+), 27 deletions(-) diff --git a/examples/add_metric_data_arrow_example.py b/examples/add_metric_data_arrow_example.py index 9210495..efefbbf 100644 --- a/examples/add_metric_data_arrow_example.py +++ b/examples/add_metric_data_arrow_example.py @@ -8,8 +8,7 @@ def main() -> None: cvec = CVec( host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), - password=os.environ.get("CVEC_PASSWORD", "your-password"), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data using Arrow...") diff --git a/examples/add_metric_data_example.py b/examples/add_metric_data_example.py index fcd7eb4..068684a 100644 --- a/examples/add_metric_data_example.py +++ b/examples/add_metric_data_example.py @@ -8,8 +8,7 @@ def main() -> None: cvec = CVec( host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), - password=os.environ.get("CVEC_PASSWORD", "your-password"), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) test_metric_name = "python-sdk/test" print("\nAdding new metric data...") diff --git a/examples/get_metric_arrow_example.py b/examples/get_metric_arrow_example.py index 1b44f09..aa65887 100644 --- a/examples/get_metric_arrow_example.py +++ b/examples/get_metric_arrow_example.py @@ -7,8 +7,7 @@ def main() -> None: cvec = CVec( host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), - password=os.environ.get("CVEC_PASSWORD", "your-password"), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as Arrow...") diff --git a/examples/get_metric_data_objects_example.py b/examples/get_metric_data_objects_example.py index 93046c8..56a2054 100644 --- a/examples/get_metric_data_objects_example.py +++ b/examples/get_metric_data_objects_example.py @@ -5,8 +5,7 @@ def main() -> None: cvec = CVec( host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), - password=os.environ.get("CVEC_PASSWORD", "your-password"), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) test_metric_name = "python-sdk/test" print("\nGetting metric data as objects...") diff --git a/examples/get_metrics_example.py b/examples/get_metrics_example.py index 9db6345..d5bcb79 100644 --- a/examples/get_metrics_example.py +++ b/examples/get_metrics_example.py @@ -7,8 +7,7 @@ def main() -> None: host=os.environ.get( "CVEC_HOST", "https://your-subdomain.cvector.dev" ), # Replace with your API host - email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), - password=os.environ.get("CVEC_PASSWORD", "your-password"), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) print("\nGetting available metrics...") metrics = cvec.get_metrics() diff --git a/examples/get_spans_example.py b/examples/get_spans_example.py index bccb327..fb9c010 100644 --- a/examples/get_spans_example.py +++ b/examples/get_spans_example.py @@ -5,8 +5,7 @@ def main() -> None: cvec = CVec( host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), - email=os.environ.get("CVEC_EMAIL", "your-email@cvector.app"), - password=os.environ.get("CVEC_PASSWORD", "your-password"), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) metrics = cvec.get_metrics() if metrics: diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 91a2fc8..a5d6e00 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -25,15 +25,14 @@ class CVec: _access_token: Optional[str] _refresh_token: Optional[str] _publishable_key: Optional[str] + _api_key: Optional[str] def __init__( self, host: Optional[str] = None, default_start_at: Optional[datetime] = None, default_end_at: Optional[datetime] = None, - email: Optional[str] = None, - password: Optional[str] = None, - publishable_key: Optional[str] = None, + api_key: Optional[str] = None, ) -> None: self.host = host or os.environ.get("CVEC_HOST") self.default_start_at = default_start_at @@ -42,26 +41,44 @@ def __init__( # Supabase authentication self._access_token = None self._refresh_token = None - self._publishable_key = publishable_key or os.environ.get( - "CVEC_PUBLISHABLE_KEY" - ) + self._publishable_key = None + self._api_key = api_key or os.environ.get("CVEC_API_KEY") if not self.host: raise ValueError( "CVEC_HOST must be set either as an argument or environment variable" ) - if not self._publishable_key: + if not self._api_key: raise ValueError( - "CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable" + "CVEC_API_KEY must be set either as an argument or environment variable" ) - + + # Fetch publishable key from host config + self._publishable_key = self._fetch_publishable_key() + # Handle authentication - if email and password: - self._login_with_supabase(email, password) - else: - raise ValueError( - "Email and password must be provided for Supabase authentication" - ) + email = self._construct_email_from_api_key() + self._login_with_supabase(email, self._api_key) + + def _construct_email_from_api_key(self) -> str: + """ + Construct email from API key using the pattern cva+@cvector.app + + Returns: + The constructed email address + + Raises: + ValueError: If the API key doesn't match the expected pattern + """ + if not self._api_key.startswith("cva_"): + raise ValueError("API key must start with 'cva_'") + + if len(self._api_key) != 40: # cva_ + 36 62-base encoded symbols + raise ValueError("API key invalid length. Expected cva_ + 36 symbols.") + + # Extract 4 characters after "cva_" + key_id = self._api_key[4:8] + return f"cva+{key_id}@cvector.app" def _get_headers(self) -> Dict[str, str]: """Helper method to get request headers.""" @@ -324,3 +341,31 @@ def _refresh_supabase_token(self) -> None: data = response.json() self._access_token = data["access_token"] self._refresh_token = data["refresh_token"] + + def _fetch_publishable_key(self) -> str: + """ + Fetch the publishable key from the host's config endpoint. + + Returns: + The publishable key from the config response + + Raises: + ValueError: If the config endpoint is not accessible or doesn't contain the key + """ + try: + config_url = f"{self.host}/config" + response = requests.get(config_url) + response.raise_for_status() + + config_data = response.json() + publishable_key = config_data.get("supabasePublishableKey") + + if not publishable_key: + raise ValueError(f"Configuration fetched from {config_url} is invalid") + + return publishable_key + + except requests.RequestException as e: + raise ValueError(f"Failed to fetch config from {self.host}/config: {e}") + except (KeyError, ValueError) as e: + raise ValueError(f"Invalid config response: {e}") From 3000478bc622f88d1c18dba52ec7ffdf73f1dbc7 Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Thu, 3 Jul 2025 01:28:52 +0200 Subject: [PATCH 16/23] Fixed linting and tests. --- src/cvec/cvec.py | 29 ++++--- tests/test_cvec.py | 195 +++++++++++++++++++++++++-------------------- 2 files changed, 124 insertions(+), 100 deletions(-) diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index a5d6e00..86bee13 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -52,10 +52,10 @@ def __init__( raise ValueError( "CVEC_API_KEY must be set either as an argument or environment variable" ) - + # Fetch publishable key from host config self._publishable_key = self._fetch_publishable_key() - + # Handle authentication email = self._construct_email_from_api_key() self._login_with_supabase(email, self._api_key) @@ -63,19 +63,22 @@ def __init__( def _construct_email_from_api_key(self) -> str: """ Construct email from API key using the pattern cva+@cvector.app - + Returns: The constructed email address - + Raises: ValueError: If the API key doesn't match the expected pattern """ + if not self._api_key: + raise ValueError("API key is not set") + if not self._api_key.startswith("cva_"): raise ValueError("API key must start with 'cva_'") - + if len(self._api_key) != 40: # cva_ + 36 62-base encoded symbols raise ValueError("API key invalid length. Expected cva_ + 36 symbols.") - + # Extract 4 characters after "cva_" key_id = self._api_key[4:8] return f"cva+{key_id}@cvector.app" @@ -345,10 +348,10 @@ def _refresh_supabase_token(self) -> None: def _fetch_publishable_key(self) -> str: """ Fetch the publishable key from the host's config endpoint. - + Returns: The publishable key from the config response - + Raises: ValueError: If the config endpoint is not accessible or doesn't contain the key """ @@ -356,15 +359,15 @@ def _fetch_publishable_key(self) -> str: config_url = f"{self.host}/config" response = requests.get(config_url) response.raise_for_status() - + config_data = response.json() publishable_key = config_data.get("supabasePublishableKey") - + if not publishable_key: raise ValueError(f"Configuration fetched from {config_url} is invalid") - - return publishable_key - + + return str(publishable_key) + except requests.RequestException as e: raise ValueError(f"Failed to fetch config from {self.host}/config: {e}") except (KeyError, ValueError) as e: diff --git a/tests/test_cvec.py b/tests/test_cvec.py index de88528..31378bd 100644 --- a/tests/test_cvec.py +++ b/tests/test_cvec.py @@ -12,108 +12,84 @@ class TestCVecConstructor: @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_constructor_with_arguments(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_constructor_with_arguments( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: """Test CVec constructor with all arguments provided.""" client = CVec( host="test_host", default_start_at=datetime(2023, 1, 1, 0, 0, 0), default_end_at=datetime(2023, 1, 2, 0, 0, 0), - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) assert client.host == "test_host" assert client.default_start_at == datetime(2023, 1, 1, 0, 0, 0) assert client.default_end_at == datetime(2023, 1, 2, 0, 0, 0) assert client._publishable_key == "test_publishable_key" + assert client._api_key == "cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O" @patch.object(CVec, "_login_with_supabase", return_value=None) + @patch.object(CVec, "_fetch_publishable_key", return_value="env_publishable_key") @patch.dict( os.environ, { "CVEC_HOST": "env_host", - "CVEC_PUBLISHABLE_KEY": "env_publishable_key", + "CVEC_API_KEY": "cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", }, clear=True, ) - def test_constructor_with_env_vars(self, mock_login: Any) -> None: + def test_constructor_with_env_vars( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: """Test CVec constructor with environment variables.""" client = CVec( default_start_at=datetime(2023, 2, 1, 0, 0, 0), default_end_at=datetime(2023, 2, 2, 0, 0, 0), - email="user@example.com", - password="password123", ) assert client.host == "env_host" assert client._publishable_key == "env_publishable_key" + assert client._api_key == "cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O" assert client.default_start_at == datetime(2023, 2, 1, 0, 0, 0) assert client.default_end_at == datetime(2023, 2, 2, 0, 0, 0) @patch.object(CVec, "_login_with_supabase", return_value=None) + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") @patch.dict(os.environ, {}, clear=True) - def test_constructor_missing_host_raises_value_error(self, mock_login: Any) -> None: + def test_constructor_missing_host_raises_value_error( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: """Test CVec constructor raises ValueError if host is missing.""" with pytest.raises( ValueError, match="CVEC_HOST must be set either as an argument or environment variable", ): - CVec( - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", - ) + CVec(api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O") @patch.object(CVec, "_login_with_supabase", return_value=None) + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") @patch.dict(os.environ, {}, clear=True) - def test_constructor_missing_publishable_key_raises_value_error( - self, mock_login: Any + def test_constructor_missing_api_key_raises_value_error( + self, mock_fetch_key: Any, mock_login: Any ) -> None: - """Test CVec constructor raises ValueError if publishable_key is missing.""" + """Test CVec constructor raises ValueError if api_key is missing.""" with pytest.raises( ValueError, - match="CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable", + match="CVEC_API_KEY must be set either as an argument or environment variable", ): - CVec(host="test_host", email="user@example.com", password="password123") + CVec(host="test_host") @patch.object(CVec, "_login_with_supabase", return_value=None) - @patch.dict(os.environ, {}, clear=True) - def test_constructor_missing_email_password_raises_value_error( - self, mock_login: Any + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_constructor_args_override_env_vars( + self, mock_fetch_key: Any, mock_login: Any ) -> None: - """Test CVec constructor raises ValueError if email or password is missing.""" - with pytest.raises( - ValueError, - match="Email and password must be provided for Supabase authentication", - ): - CVec(host="test_host", publishable_key="test_publishable_key") - - @patch.object(CVec, "_login_with_supabase", return_value=None) - @patch.dict( - os.environ, - { - "CVEC_HOST": "env_host", - # CVEC_PUBLISHABLE_KEY is missing - }, - clear=True, - ) - def test_constructor_missing_publishable_key_env_var_raises_value_error( - self, mock_login: Any - ) -> None: - """Test CVec constructor raises ValueError if CVEC_PUBLISHABLE_KEY env var is missing.""" - with pytest.raises( - ValueError, - match="CVEC_PUBLISHABLE_KEY must be set either as an argument or environment variable", - ): - CVec(email="user@example.com", password="password123") - - @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_constructor_args_override_env_vars(self, mock_login: Any) -> None: """Test CVec constructor arguments override environment variables.""" with patch.dict( os.environ, { "CVEC_HOST": "env_host", - "CVEC_PUBLISHABLE_KEY": "env_publishable_key", + "CVEC_API_KEY": "cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", }, clear=True, ): @@ -121,19 +97,61 @@ def test_constructor_args_override_env_vars(self, mock_login: Any) -> None: host="arg_host", default_start_at=datetime(2023, 3, 1, 0, 0, 0), default_end_at=datetime(2023, 3, 2, 0, 0, 0), - email="user@example.com", - password="password123", - publishable_key="arg_publishable_key", + api_key="cva_differentKeyKALxMnxUdI9hanF0TBPvvvr1", ) assert client.host == "arg_host" - assert client._publishable_key == "arg_publishable_key" + assert client._api_key == "cva_differentKeyKALxMnxUdI9hanF0TBPvvvr1" assert client.default_start_at == datetime(2023, 3, 1, 0, 0, 0) assert client.default_end_at == datetime(2023, 3, 2, 0, 0, 0) + @patch.object(CVec, "_login_with_supabase", return_value=None) + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_construct_email_from_api_key( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: + """Test email construction from API key.""" + client = CVec( + host="test_host", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", + ) + email = client._construct_email_from_api_key() + assert email == "cva+hHs0@cvector.app" + + @patch.object(CVec, "_login_with_supabase", return_value=None) + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_construct_email_from_api_key_invalid_format( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: + """Test email construction with invalid API key format.""" + client = CVec( + host="test_host", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", + ) + client._api_key = "invalid_key" + with pytest.raises(ValueError, match="API key must start with 'cva_'"): + client._construct_email_from_api_key() + + @patch.object(CVec, "_login_with_supabase", return_value=None) + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_construct_email_from_api_key_invalid_length( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: + """Test email construction with invalid API key length.""" + client = CVec( + host="test_host", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", + ) + client._api_key = "cva_short" + with pytest.raises( + ValueError, match="API key invalid length. Expected cva_ \\+ 36 symbols." + ): + client._construct_email_from_api_key() + class TestCVecGetSpans: @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_spans_basic_case(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_spans_basic_case(self, mock_fetch_key: Any, mock_login: Any) -> None: # Simulate backend response response_data = [ { @@ -157,9 +175,7 @@ def test_get_spans_basic_case(self, mock_login: Any) -> None: ] client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] spans = client.get_spans(name="test_tag") @@ -174,7 +190,10 @@ def test_get_spans_basic_case(self, mock_login: Any) -> None: class TestCVecGetMetrics: @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metrics_no_interval(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metrics_no_interval( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: response_data = [ { "id": 1, @@ -191,9 +210,7 @@ def test_get_metrics_no_interval(self, mock_login: Any) -> None: ] client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] metrics = client.get_metrics() @@ -205,7 +222,10 @@ def test_get_metrics_no_interval(self, mock_login: Any) -> None: assert metrics[1].name == "metric2" @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metrics_with_interval(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metrics_with_interval( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: response_data = [ { "id": 1, @@ -216,9 +236,7 @@ def test_get_metrics_with_interval(self, mock_login: Any) -> None: ] client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] metrics = client.get_metrics( @@ -229,12 +247,13 @@ def test_get_metrics_with_interval(self, mock_login: Any) -> None: assert metrics[0].name == "metric_in_interval" @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metrics_no_data_found(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metrics_no_data_found( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: [] # type: ignore[method-assign] metrics = client.get_metrics( @@ -245,7 +264,10 @@ def test_get_metrics_no_data_found(self, mock_login: Any) -> None: class TestCVecGetMetricData: @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metric_data_basic_case(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metric_data_basic_case( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: # Simulate backend response time1 = datetime(2023, 1, 1, 10, 0, 0) time2 = datetime(2023, 1, 1, 11, 0, 0) @@ -262,9 +284,7 @@ def test_get_metric_data_basic_case(self, mock_login: Any) -> None: ] client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: response_data # type: ignore[method-assign] data_points = client.get_metric_data(names=["tag1", "tag2"]) @@ -279,19 +299,23 @@ def test_get_metric_data_basic_case(self, mock_login: Any) -> None: assert data_points[2].value_string == "val_str" @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metric_data_no_data_points(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metric_data_no_data_points( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: [] # type: ignore[method-assign] data_points = client.get_metric_data(names=["non_existent_tag"]) assert data_points == [] @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metric_arrow_basic_case(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metric_arrow_basic_case( + self, mock_fetch_key: Any, mock_login: Any + ) -> None: # Prepare Arrow table names = ["tag1", "tag1", "tag2"] times = [ @@ -315,9 +339,7 @@ def test_get_metric_arrow_basic_case(self, mock_login: Any) -> None: arrow_bytes = sink.getvalue().to_pybytes() client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: arrow_bytes # type: ignore[method-assign] result = client.get_metric_arrow(names=["tag1", "tag2"]) @@ -333,7 +355,8 @@ def test_get_metric_arrow_basic_case(self, mock_login: Any) -> None: ] @patch.object(CVec, "_login_with_supabase", return_value=None) - def test_get_metric_arrow_empty(self, mock_login: Any) -> None: + @patch.object(CVec, "_fetch_publishable_key", return_value="test_publishable_key") + def test_get_metric_arrow_empty(self, mock_fetch_key: Any, mock_login: Any) -> None: table = pa.table( { "name": pa.array([], type=pa.string()), @@ -348,9 +371,7 @@ def test_get_metric_arrow_empty(self, mock_login: Any) -> None: arrow_bytes = sink.getvalue().to_pybytes() client = CVec( host="test_host", - email="user@example.com", - password="password123", - publishable_key="test_publishable_key", + api_key="cva_hHs0CbkKALxMnxUdI9hanF0TBPvvvr1HjG6O", ) client._make_request = lambda *args, **kwargs: arrow_bytes # type: ignore[method-assign] result = client.get_metric_arrow(names=["non_existent_tag"]) From 7f6c06a98bd35022abbea7fbb2c0ed9df83ab85a Mon Sep 17 00:00:00 2001 From: Michal Chrobok Date: Thu, 3 Jul 2025 01:30:19 +0200 Subject: [PATCH 17/23] Another lint fix --- src/cvec/cvec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 86bee13..5670a55 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -72,7 +72,7 @@ def _construct_email_from_api_key(self) -> str: """ if not self._api_key: raise ValueError("API key is not set") - + if not self._api_key.startswith("cva_"): raise ValueError("API key must start with 'cva_'") From a17445111b048f3ebe78c86cc0dde95f29fffc26 Mon Sep 17 00:00:00 2001 From: Joshua Napoli Date: Tue, 15 Jul 2025 08:46:15 -0400 Subject: [PATCH 18/23] fix: remove mention of CVEC_TENANT --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2e5413d..1de9c8c 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ import cvec from datetime import datetime ``` -Construct the CVec client. The host, tenant, and api_key can be given through parameters to the constructor or from the environment variables CVEC_HOST, CVEC_TENANT, and CVEC_API_KEY: +Construct the CVec client. The host, tenant, and api_key can be given through parameters to the constructor or from the environment variables CVEC_HOST, and CVEC_API_KEY: ``` cvec = cvec.CVec() @@ -48,7 +48,7 @@ The newest span for a metric does not have an end time, since it has not ended y To get the spans on `my_tag_name` since 2025-05-14 10am, run: ``` -for span in cvec.get_spans("mygroup/myedge/mode", start_at=datetime(2025, 5, 14, 10, 0, 0)): +for span in cvec.get_spans("mygroup/myedge/node", start_at=datetime(2025, 5, 14, 10, 0, 0)): print("%s\t%s" % (span.value, span.raw_start_at)) ``` @@ -121,7 +121,7 @@ The SDK provides an API client class named `CVec` with the following functions. ## `__init__(?host, ?tenant, ?api_key, ?default_start_at, ?default_end_at)` -Setup the SDK with the given host and API Key. The host and API key are loaded from environment variables CVEC_HOST, CVEC_TENANT, CVEC_API_KEY, if they are not given as arguments to the constructor. The `default_start_at` and `default_end_at` can provide a default query time interval for API methods. +Setup the SDK with the given host and API Key. The host and API key are loaded from environment variables CVEC_HOST, CVEC_API_KEY, if they are not given as arguments to the constructor. The `default_start_at` and `default_end_at` can provide a default query time interval for API methods. ## `get_spans(name, ?start_at, ?end_at, ?limit)` From 116204e3016de51572fcf66339f099e141ec1431 Mon Sep 17 00:00:00 2001 From: bafo Date: Tue, 29 Jul 2025 17:30:48 +0200 Subject: [PATCH 19/23] feat: integration example with all enpoints --- .gitignore | 2 + ...multiple_metrics_and_get_spans_for_them.py | 135 ++++++++++++++++++ src/cvec/cvec.py | 67 ++++++--- 3 files changed, 187 insertions(+), 17 deletions(-) create mode 100644 examples/add_multiple_metrics_and_get_spans_for_them.py diff --git a/.gitignore b/.gitignore index 17abd6d..48aaab2 100644 --- a/.gitignore +++ b/.gitignore @@ -173,3 +173,5 @@ cython_debug/ # PyPI configuration file .pypirc .aider* + +.vscode/ diff --git a/examples/add_multiple_metrics_and_get_spans_for_them.py b/examples/add_multiple_metrics_and_get_spans_for_them.py new file mode 100644 index 0000000..b000e28 --- /dev/null +++ b/examples/add_multiple_metrics_and_get_spans_for_them.py @@ -0,0 +1,135 @@ +import random +from cvec import CVec +from datetime import datetime, timedelta, timezone +import os +import io +import pyarrow.ipc as ipc # type: ignore[import-untyped] + +from cvec.models.metric import MetricDataPoint + + +def main() -> None: + cvec = CVec( + host=os.environ.get( + "CVEC_HOST", "https://your-subdomain.cvector.dev" + ), + api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), + ) + test_metric_name = "" + + # fetch & pick metrics + metrics = cvec.get_metrics( + start_at=datetime(2025, 7, 14, 10, 0, 0), + end_at=datetime(2025, 7, 14, 11, 0, 0), + ) + print(f"Found {len(metrics)} metrics") + for metric in metrics: + print(f"- {metric.name} - {metric.id}") + if metrics: + found_metric_name = next( + (m.name for m in metrics if "Sensor_" in m.name) + ) + assert found_metric_name, "No suitable metric found" + test_metric_name = found_metric_name + print(f"\nUsing metric: {test_metric_name}") + + # Add metric non-Arrow data + random_number_nonarrow = random.randint(10000, 20000) + print( + f"\nAdding new metric data point with non-Arrow format for metric " + f"'{test_metric_name}' and values {random_number_nonarrow}..." + ) + new_data = [ + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random_number_nonarrow, + value_string=None, + ), + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=None, + value_string=str(random_number_nonarrow), + ), + ] + cvec.add_metric_data(new_data, use_arrow=False) + print("Non-Arrow Data added successfully") + + # Add metric Arrow data + + random_number_arrow = random.randint(10000, 20000) + print( + f"\nAdding new metric data point with Arrow format for metric " + f"'{test_metric_name}' and value {random_number_arrow}..." + ) + new_data = [ + MetricDataPoint( + name=test_metric_name, + time=datetime.now(timezone.utc), + value_double=random_number_arrow, + value_string=None, + ), + ] + cvec.add_metric_data(new_data, use_arrow=True) + print("Arrow Data added successfully") + + # Fetch and print metric data - non-Arrow + data_points = cvec.get_metric_data( + start_at=datetime.now(timezone.utc) - timedelta(minutes=1), + end_at=datetime.now(timezone.utc), + names=[test_metric_name], + ) + assert len(data_points) > 0, "No data points found for the metric" + assert any( + dp.value_double == random_number_nonarrow for dp in data_points + ), "No data point found with the expected non-Arrow value" + assert any( + dp.value_string == str(random_number_nonarrow) for dp in data_points + ), "No data point found with the expected non-Arrow string value" + assert any( + dp.value_double == random_number_arrow for dp in data_points + ), "No data point found with the expected Arrow value" + print( + f"\nFound {len(data_points)} data points for metric '{test_metric_name}'" + ) + for point in data_points: + print( + f"- {point.name}: {point.value_double or point.value_string} at {point.time}" + ) + + # Fetch and print metric data - Arrow + arrow_data = cvec.get_metric_arrow( + start_at=datetime.now(timezone.utc) - timedelta(minutes=1), + end_at=datetime.now(timezone.utc), + names=[test_metric_name], + ) + reader = ipc.open_file(io.BytesIO(arrow_data)) + table = reader.read_all() + assert len(table) > 0, "No data found in Arrow format" + print(f"Arrow table shape: {len(table)} rows") + print("\nFirst few rows:") + for i in range(min(5, len(table))): + print( + f"- {table['name'][i].as_py()}: {table['value_double'][i].as_py() or table['value_string'][i].as_py()} at {table['time'][i].as_py()}" + ) + + # spans + spans = cvec.get_spans( + start_at=datetime.now(timezone.utc) - timedelta(minutes=1), + end_at=datetime.now(timezone.utc), + name=test_metric_name, + limit=5, + ) + assert len(spans) > 0, "No spans found for the metric" + print(f"Found {len(spans)} spans") + for span in spans: + print( + f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}" + ) + + print("\nAll operations completed successfully.") + + +if __name__ == "__main__": + main() diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 5670a55..b207b98 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -1,6 +1,5 @@ -import os from datetime import datetime -from typing import Any, List, Optional, Dict +from typing import Any, Dict, List, Optional from urllib.parse import urljoin import requests # type: ignore[import-untyped] @@ -77,7 +76,9 @@ def _construct_email_from_api_key(self) -> str: raise ValueError("API key must start with 'cva_'") if len(self._api_key) != 40: # cva_ + 36 62-base encoded symbols - raise ValueError("API key invalid length. Expected cva_ + 36 symbols.") + raise ValueError( + "API key invalid length. Expected cva_ + 36 symbols." + ) # Extract 4 characters after "cva_" key_id = self._api_key[4:8] @@ -118,7 +119,11 @@ def _make_request( ) # If we get a 401 and we have Supabase tokens, try to refresh and retry - if response.status_code == 401 and self._access_token and self._refresh_token: + if ( + response.status_code == 401 + and self._access_token + and self._refresh_token + ): try: self._refresh_supabase_token() # Update headers with new token @@ -218,13 +223,16 @@ def get_metric_data( "names": ",".join(names) if names else None, } - endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + endpoint = ( + "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + ) response_data = self._make_request("GET", endpoint, params=params) if use_arrow: return arrow_to_metric_data_points(response_data) return [ - MetricDataPoint.model_validate(point_data) for point_data in response_data + MetricDataPoint.model_validate(point_data) + for point_data in response_data ] def get_metric_arrow( @@ -258,7 +266,9 @@ def get_metric_arrow( return result def get_metrics( - self, start_at: Optional[datetime] = None, end_at: Optional[datetime] = None + self, + start_at: Optional[datetime] = None, + end_at: Optional[datetime] = None, ) -> List[Metric]: """ Return a list of metrics that had at least one transition in the given [start_at, end_at) interval. @@ -272,8 +282,13 @@ def get_metrics( "end_at": _end_at.isoformat() if _end_at else None, } - response_data = self._make_request("GET", "/api/metrics/", params=params) - return [Metric.model_validate(metric_data) for metric_data in response_data] + response_data = self._make_request( + "GET", "/api/metrics/", params=params + ) + return [ + Metric.model_validate(metric_data) + for metric_data in response_data + ] def add_metric_data( self, @@ -287,7 +302,9 @@ def add_metric_data( data_points: List of MetricDataPoint objects to add use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) """ - endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + endpoint = ( + "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" + ) if use_arrow: arrow_data = metric_data_points_to_arrow(data_points) @@ -295,7 +312,9 @@ def add_metric_data( "POST", endpoint, data=arrow_data, - headers={"Content-Type": "application/vnd.apache.arrow.stream"}, + headers={ + "Content-Type": "application/vnd.apache.arrow.stream" + }, ) else: data_dicts: List[Dict[str, Any]] = [ @@ -311,11 +330,16 @@ def _login_with_supabase(self, email: str, password: str) -> None: email: User email password: User password """ - supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=password" + supabase_url = ( + f"{self.host}/supabase/auth/v1/token?grant_type=password" + ) payload = {"email": email, "password": password} - headers = {"Content-Type": "application/json", "apikey": self._publishable_key} + headers = { + "Content-Type": "application/json", + "apikey": self._publishable_key, + } response = requests.post(supabase_url, json=payload, headers=headers) response.raise_for_status() @@ -332,11 +356,16 @@ def _refresh_supabase_token(self) -> None: if not self._refresh_token: raise ValueError("No refresh token available") - supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=refresh_token" + supabase_url = ( + f"{self.host}/supabase/auth/v1/token?grant_type=refresh_token" + ) payload = {"refresh_token": self._refresh_token} - headers = {"Content-Type": "application/json", "apikey": self._publishable_key} + headers = { + "Content-Type": "application/json", + "apikey": self._publishable_key, + } response = requests.post(supabase_url, json=payload, headers=headers) response.raise_for_status() @@ -364,11 +393,15 @@ def _fetch_publishable_key(self) -> str: publishable_key = config_data.get("supabasePublishableKey") if not publishable_key: - raise ValueError(f"Configuration fetched from {config_url} is invalid") + raise ValueError( + f"Configuration fetched from {config_url} is invalid" + ) return str(publishable_key) except requests.RequestException as e: - raise ValueError(f"Failed to fetch config from {self.host}/config: {e}") + raise ValueError( + f"Failed to fetch config from {self.host}/config: {e}" + ) except (KeyError, ValueError) as e: raise ValueError(f"Invalid config response: {e}") From 1571ece566d8d9a2c97dee4116af3cb8a758aa1d Mon Sep 17 00:00:00 2001 From: bafo Date: Tue, 29 Jul 2025 17:35:28 +0200 Subject: [PATCH 20/23] fix: add import --- src/cvec/cvec.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index b207b98..62913f0 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -1,3 +1,4 @@ +import os from datetime import datetime from typing import Any, Dict, List, Optional from urllib.parse import urljoin From 9679de9eec2ca094c6a41cd2511496309f7299ea Mon Sep 17 00:00:00 2001 From: bafo Date: Tue, 29 Jul 2025 17:37:59 +0200 Subject: [PATCH 21/23] fix: formatting --- ...multiple_metrics_and_get_spans_for_them.py | 32 +++++------- src/cvec/cvec.py | 50 +++++-------------- 2 files changed, 24 insertions(+), 58 deletions(-) diff --git a/examples/add_multiple_metrics_and_get_spans_for_them.py b/examples/add_multiple_metrics_and_get_spans_for_them.py index b000e28..f666ef1 100644 --- a/examples/add_multiple_metrics_and_get_spans_for_them.py +++ b/examples/add_multiple_metrics_and_get_spans_for_them.py @@ -10,9 +10,7 @@ def main() -> None: cvec = CVec( - host=os.environ.get( - "CVEC_HOST", "https://your-subdomain.cvector.dev" - ), + host=os.environ.get("CVEC_HOST", "https://your-subdomain.cvector.dev"), api_key=os.environ.get("CVEC_API_KEY", "your-api-key"), ) test_metric_name = "" @@ -26,9 +24,7 @@ def main() -> None: for metric in metrics: print(f"- {metric.name} - {metric.id}") if metrics: - found_metric_name = next( - (m.name for m in metrics if "Sensor_" in m.name) - ) + found_metric_name = next((m.name for m in metrics if "Sensor_" in m.name)) assert found_metric_name, "No suitable metric found" test_metric_name = found_metric_name print(f"\nUsing metric: {test_metric_name}") @@ -81,18 +77,16 @@ def main() -> None: names=[test_metric_name], ) assert len(data_points) > 0, "No data points found for the metric" - assert any( - dp.value_double == random_number_nonarrow for dp in data_points - ), "No data point found with the expected non-Arrow value" - assert any( - dp.value_string == str(random_number_nonarrow) for dp in data_points - ), "No data point found with the expected non-Arrow string value" - assert any( - dp.value_double == random_number_arrow for dp in data_points - ), "No data point found with the expected Arrow value" - print( - f"\nFound {len(data_points)} data points for metric '{test_metric_name}'" + assert any(dp.value_double == random_number_nonarrow for dp in data_points), ( + "No data point found with the expected non-Arrow value" + ) + assert any(dp.value_string == str(random_number_nonarrow) for dp in data_points), ( + "No data point found with the expected non-Arrow string value" ) + assert any(dp.value_double == random_number_arrow for dp in data_points), ( + "No data point found with the expected Arrow value" + ) + print(f"\nFound {len(data_points)} data points for metric '{test_metric_name}'") for point in data_points: print( f"- {point.name}: {point.value_double or point.value_string} at {point.time}" @@ -124,9 +118,7 @@ def main() -> None: assert len(spans) > 0, "No spans found for the metric" print(f"Found {len(spans)} spans") for span in spans: - print( - f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}" - ) + print(f"- Value: {span.value} from {span.raw_start_at} to {span.raw_end_at}") print("\nAll operations completed successfully.") diff --git a/src/cvec/cvec.py b/src/cvec/cvec.py index 62913f0..5590515 100644 --- a/src/cvec/cvec.py +++ b/src/cvec/cvec.py @@ -77,9 +77,7 @@ def _construct_email_from_api_key(self) -> str: raise ValueError("API key must start with 'cva_'") if len(self._api_key) != 40: # cva_ + 36 62-base encoded symbols - raise ValueError( - "API key invalid length. Expected cva_ + 36 symbols." - ) + raise ValueError("API key invalid length. Expected cva_ + 36 symbols.") # Extract 4 characters after "cva_" key_id = self._api_key[4:8] @@ -120,11 +118,7 @@ def _make_request( ) # If we get a 401 and we have Supabase tokens, try to refresh and retry - if ( - response.status_code == 401 - and self._access_token - and self._refresh_token - ): + if response.status_code == 401 and self._access_token and self._refresh_token: try: self._refresh_supabase_token() # Update headers with new token @@ -224,16 +218,13 @@ def get_metric_data( "names": ",".join(names) if names else None, } - endpoint = ( - "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" - ) + endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" response_data = self._make_request("GET", endpoint, params=params) if use_arrow: return arrow_to_metric_data_points(response_data) return [ - MetricDataPoint.model_validate(point_data) - for point_data in response_data + MetricDataPoint.model_validate(point_data) for point_data in response_data ] def get_metric_arrow( @@ -283,13 +274,8 @@ def get_metrics( "end_at": _end_at.isoformat() if _end_at else None, } - response_data = self._make_request( - "GET", "/api/metrics/", params=params - ) - return [ - Metric.model_validate(metric_data) - for metric_data in response_data - ] + response_data = self._make_request("GET", "/api/metrics/", params=params) + return [Metric.model_validate(metric_data) for metric_data in response_data] def add_metric_data( self, @@ -303,9 +289,7 @@ def add_metric_data( data_points: List of MetricDataPoint objects to add use_arrow: If True, uses Arrow format for data transfer (more efficient for large datasets) """ - endpoint = ( - "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" - ) + endpoint = "/api/metrics/data/arrow" if use_arrow else "/api/metrics/data" if use_arrow: arrow_data = metric_data_points_to_arrow(data_points) @@ -313,9 +297,7 @@ def add_metric_data( "POST", endpoint, data=arrow_data, - headers={ - "Content-Type": "application/vnd.apache.arrow.stream" - }, + headers={"Content-Type": "application/vnd.apache.arrow.stream"}, ) else: data_dicts: List[Dict[str, Any]] = [ @@ -331,9 +313,7 @@ def _login_with_supabase(self, email: str, password: str) -> None: email: User email password: User password """ - supabase_url = ( - f"{self.host}/supabase/auth/v1/token?grant_type=password" - ) + supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=password" payload = {"email": email, "password": password} @@ -357,9 +337,7 @@ def _refresh_supabase_token(self) -> None: if not self._refresh_token: raise ValueError("No refresh token available") - supabase_url = ( - f"{self.host}/supabase/auth/v1/token?grant_type=refresh_token" - ) + supabase_url = f"{self.host}/supabase/auth/v1/token?grant_type=refresh_token" payload = {"refresh_token": self._refresh_token} @@ -394,15 +372,11 @@ def _fetch_publishable_key(self) -> str: publishable_key = config_data.get("supabasePublishableKey") if not publishable_key: - raise ValueError( - f"Configuration fetched from {config_url} is invalid" - ) + raise ValueError(f"Configuration fetched from {config_url} is invalid") return str(publishable_key) except requests.RequestException as e: - raise ValueError( - f"Failed to fetch config from {self.host}/config: {e}" - ) + raise ValueError(f"Failed to fetch config from {self.host}/config: {e}") except (KeyError, ValueError) as e: raise ValueError(f"Invalid config response: {e}") From 121774b40f029e4606d3c8d0a26413a8da39f048 Mon Sep 17 00:00:00 2001 From: Joshua Napoli Date: Tue, 29 Jul 2025 11:30:34 -0400 Subject: [PATCH 22/23] docs: Document `add_metric_data` function in README Co-authored-by: aider (gemini/gemini-2.5-pro) --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 1de9c8c..c9dff79 100644 --- a/README.md +++ b/README.md @@ -143,6 +143,13 @@ If no relevant value changes are found, an empty list is returned. Return all data-points within a given [`start_at`, `end_at`) interval, optionally selecting a given list of metric names. The return value is a Pandas DataFrame with four columns: name, time, value_double, value_string. One row is returned for each metric value transition. +## `add_metric_data(data_points, ?use_arrow)` + +Add multiple metric data points to the database. + +- `data_points`: A list of `MetricDataPoint` objects to add. +- `use_arrow`: An optional boolean. If `True`, data is sent to the server using the more efficient Apache Arrow format. This is recommended for large datasets. Defaults to `False`. + ## `get_metrics(?start_at, ?end_at)` Return a list of metrics that had at least one transition in the given [`start_at`, `end_at`) interval. All metrics are returned if no `start_at` and `end_at` are given. From 1b3e6a9fed9c12471b9e71c2850d0f5b4228de5e Mon Sep 17 00:00:00 2001 From: Joshua Napoli Date: Tue, 29 Jul 2025 11:34:39 -0400 Subject: [PATCH 23/23] docs: Add example for `add_metric_data` to README Co-authored-by: aider (gemini/gemini-2.5-pro) --- README.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/README.md b/README.md index c9dff79..4c754d1 100644 --- a/README.md +++ b/README.md @@ -115,6 +115,34 @@ Example output: [46257 rows x 4 columns] ``` +### Adding Metric Data + +To add new metric data points, you create a list of `MetricDataPoint` objects and pass them to `add_metric_data`. Each `MetricDataPoint` should have a `name`, a `time`, and either a `value_double` (for numeric values) or a `value_string` (for string values). + +```python +from datetime import datetime +from cvec.models import MetricDataPoint + +# Assuming 'cvec' client is already initialized + +# Create some data points +data_points = [ + MetricDataPoint( + name="mygroup/myedge/compressor01/stage1/temp_out/c", + time=datetime(2025, 7, 29, 10, 0, 0), + value_double=25.5, + ), + MetricDataPoint( + name="mygroup/myedge/compressor01/status", + time=datetime(2025, 7, 29, 10, 0, 5), + value_string="running", + ), +] + +# Add the data points to CVec +cvec.add_metric_data(data_points) +``` + # CVec Class The SDK provides an API client class named `CVec` with the following functions.