From 9a38311965858b29d48a6067c3fa174acf1587f2 Mon Sep 17 00:00:00 2001 From: Rafal Jankowski Date: Wed, 17 Jul 2024 10:24:11 +0200 Subject: [PATCH 1/3] Use ingestion proto buffers from neptune-api --- pyproject.toml | 2 +- src/neptune/api/operations.py | 44 +- .../api/proto/neptune_pb/ingest/__init__.py | 0 .../proto/neptune_pb/ingest/v1/__init__.py | 0 .../proto/neptune_pb/ingest/v1/common_pb2.py | 59 --- .../proto/neptune_pb/ingest/v1/common_pb2.pyi | 390 ------------------ .../neptune_pb/ingest/v1/pub/__init__.py | 0 .../neptune_pb/ingest/v1/pub/client_pb2.py | 26 -- .../neptune_pb/ingest/v1/pub/client_pb2.pyi | 30 -- .../neptune_pb/ingest/v1/pub/ingest_pb2.py | 27 -- .../neptune_pb/ingest/v1/pub/ingest_pb2.pyi | 64 --- .../new/api/test_operation_to_api_visitor.py | 32 +- .../new/core/operations/test_operations.py | 285 +++++-------- 13 files changed, 121 insertions(+), 838 deletions(-) delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.py delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.pyi delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/pub/__init__.py delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.py delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.pyi delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.py delete mode 100644 src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.pyi diff --git a/pyproject.toml b/pyproject.toml index 235e71df2..00791fb7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ websocket-client = ">=0.35.0, !=1.0.0" urllib3 = "*" swagger-spec-validator = ">=2.7.4" protobuf = "^4.0.0" -neptune-api = "0.2.0" +neptune-api = "0.3.0" # Built-in integrations psutil = "*" diff --git a/src/neptune/api/operations.py b/src/neptune/api/operations.py index c4fbbecbe..38ead305b 100644 --- a/src/neptune/api/operations.py +++ b/src/neptune/api/operations.py @@ -35,11 +35,10 @@ Optional, ) +import neptune_api.proto.neptune_pb.ingest.v1.common_pb2 as common_pb2 +import neptune_api.proto.neptune_pb.ingest.v1.pub.ingest_pb2 as ingest_pb2 from google.protobuf import timestamp_pb2 -import neptune.api.proto.neptune_pb.ingest.v1.common_pb2 as common_pb2 -import neptune.api.proto.neptune_pb.ingest.v1.pub.ingest_pb2 as ingest_pb2 - class Serializable(abc.ABC): @abc.abstractmethod @@ -59,8 +58,9 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: create=common_pb2.Run( creation_time=timestamp_pb2.Timestamp(seconds=int(self.created_at.timestamp())), run_id=self.custom_id, + experiment_id=self.custom_id, + family=self.custom_id, ), - api_key=b"", ) @@ -90,13 +90,9 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: run_id=run_op.run_id, update=common_pb2.UpdateRunSnapshot( step=step, - append={ - "path": common_pb2.Value(string=self.path), - "value": common_pb2.Value(float64=first_item.value), - }, + append={f"{self.path}": common_pb2.Value(float64=first_item.value)}, timestamp=timestamp_pb2.Timestamp(seconds=int(first_item.timestamp)), ), - api_key=b"", ) @@ -110,12 +106,8 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: project=run_op.project, run_id=run_op.run_id, update=common_pb2.UpdateRunSnapshot( - assign={ - "path": common_pb2.Value(string=self.path), - "value": common_pb2.Value(int64=self.value), - }, + assign={f"{self.path}": common_pb2.Value(int64=self.value)}, ), - api_key=b"", ) @@ -129,12 +121,8 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: project=run_op.project, run_id=run_op.run_id, update=common_pb2.UpdateRunSnapshot( - assign={ - "path": common_pb2.Value(string=self.path), - "value": common_pb2.Value(float64=self.value), - }, + assign={f"{self.path}": common_pb2.Value(float64=self.value)}, ), - api_key=b"", ) @@ -148,12 +136,8 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: project=run_op.project, run_id=run_op.run_id, update=common_pb2.UpdateRunSnapshot( - assign={ - "path": common_pb2.Value(string=self.path), - "value": common_pb2.Value(bool=self.value), - }, + assign={f"{self.path}": common_pb2.Value(bool=self.value)}, ), - api_key=b"", ) @@ -167,12 +151,8 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: project=run_op.project, run_id=run_op.run_id, update=common_pb2.UpdateRunSnapshot( - assign={ - "path": common_pb2.Value(string=self.path), - "value": common_pb2.Value(string=self.value), - }, + assign={f"{self.path}": common_pb2.Value(string=self.value)}, ), - api_key=b"", ) @@ -187,11 +167,11 @@ def to_proto(self, run_op: "RunOperation") -> ingest_pb2.RunOperation: run_id=run_op.run_id, update=common_pb2.UpdateRunSnapshot( assign={ - "path": common_pb2.Value(string=self.path), - "value": common_pb2.Value(timestamp=timestamp_pb2.Timestamp(seconds=int(self.value.timestamp()))), + f"{self.path}": common_pb2.Value( + timestamp=timestamp_pb2.Timestamp(seconds=int(self.value.timestamp())) + ) }, ), - api_key=b"", ) diff --git a/src/neptune/api/proto/neptune_pb/ingest/__init__.py b/src/neptune/api/proto/neptune_pb/ingest/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/__init__.py b/src/neptune/api/proto/neptune_pb/ingest/v1/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.py b/src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.py deleted file mode 100644 index 17b135fba..000000000 --- a/src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: neptune_pb/ingest/v1/common.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!neptune_pb/ingest/v1/common.proto\x12\x11neptune.ingest.v1\x1a\x1fgoogle/protobuf/timestamp.proto\"$\n\x04Step\x12\r\n\x05whole\x18\x01 \x01(\x04\x12\r\n\x05micro\x18\x02 \x01(\x04\"a\n\tForkPoint\x12\x16\n\x0eparent_project\x18\x01 \x01(\t\x12\x15\n\rparent_run_id\x18\x02 \x01(\t\x12%\n\x04step\x18\x04 \x01(\x0b\x32\x17.neptune.ingest.v1.Step\"\x1b\n\tStringSet\x12\x0e\n\x06values\x18\x01 \x03(\t\"\xbb\x01\n\x05Value\x12\x11\n\x07\x66loat64\x18\x01 \x01(\x01H\x00\x12\x0f\n\x05int64\x18\x03 \x01(\x03H\x00\x12\x0e\n\x04\x62ool\x18\x05 \x01(\x08H\x00\x12\x10\n\x06string\x18\x06 \x01(\tH\x00\x12/\n\ttimestamp\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x32\n\nstring_set\x18\x0c \x01(\x0b\x32\x1c.neptune.ingest.v1.StringSetH\x00\x42\x07\n\x05value\"\xa2\x01\n\x0fModifyStringSet\x12>\n\x06values\x18\x01 \x03(\x0b\x32..neptune.ingest.v1.ModifyStringSet.ValuesEntry\x1aO\n\x0bValuesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12/\n\x05value\x18\x02 \x01(\x0e\x32 .neptune.ingest.v1.SET_OPERATION:\x02\x38\x01\"I\n\tModifySet\x12\x34\n\x06string\x18\x01 \x01(\x0b\x32\".neptune.ingest.v1.ModifyStringSetH\x00\x42\x06\n\x04type\"\xef\x01\n\x03Run\x12\x13\n\x06run_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1a\n\rexperiment_id\x18\x05 \x01(\tH\x01\x88\x01\x01\x12\x30\n\nfork_point\x18\x02 \x01(\x0b\x32\x1c.neptune.ingest.v1.ForkPoint\x12\x13\n\x06\x66\x61mily\x18\x04 \x01(\tH\x02\x88\x01\x01\x12\x36\n\rcreation_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x03\x88\x01\x01\x42\t\n\x07_run_idB\x10\n\x0e_experiment_idB\t\n\x07_familyB\x10\n\x0e_creation_time\"\x9b\x04\n\x11UpdateRunSnapshot\x12%\n\x04step\x18\x01 \x01(\x0b\x32\x17.neptune.ingest.v1.Step\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x06\x61ssign\x18\x04 \x03(\x0b\x32\x30.neptune.ingest.v1.UpdateRunSnapshot.AssignEntry\x12I\n\x0bmodify_sets\x18\x05 \x03(\x0b\x32\x34.neptune.ingest.v1.UpdateRunSnapshot.ModifySetsEntry\x12@\n\x06\x61ppend\x18\x08 \x03(\x0b\x32\x30.neptune.ingest.v1.UpdateRunSnapshot.AppendEntry\x1aG\n\x0b\x41ssignEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.neptune.ingest.v1.Value:\x02\x38\x01\x1aO\n\x0fModifySetsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.neptune.ingest.v1.ModifySet:\x02\x38\x01\x1aG\n\x0b\x41ppendEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.neptune.ingest.v1.Value:\x02\x38\x01*.\n\rSET_OPERATION\x12\x08\n\x04NOOP\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x42\x35\n$ml.neptune.leaderboard.api.ingest.v1B\x0b\x43ommonProtoP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'neptune_pb.ingest.v1.common_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n$ml.neptune.leaderboard.api.ingest.v1B\013CommonProtoP\001' - _MODIFYSTRINGSET_VALUESENTRY._options = None - _MODIFYSTRINGSET_VALUESENTRY._serialized_options = b'8\001' - _UPDATERUNSNAPSHOT_ASSIGNENTRY._options = None - _UPDATERUNSNAPSHOT_ASSIGNENTRY._serialized_options = b'8\001' - _UPDATERUNSNAPSHOT_MODIFYSETSENTRY._options = None - _UPDATERUNSNAPSHOT_MODIFYSETSENTRY._serialized_options = b'8\001' - _UPDATERUNSNAPSHOT_APPENDENTRY._options = None - _UPDATERUNSNAPSHOT_APPENDENTRY._serialized_options = b'8\001' - _globals['_SET_OPERATION']._serialized_start=1469 - _globals['_SET_OPERATION']._serialized_end=1515 - _globals['_STEP']._serialized_start=89 - _globals['_STEP']._serialized_end=125 - _globals['_FORKPOINT']._serialized_start=127 - _globals['_FORKPOINT']._serialized_end=224 - _globals['_STRINGSET']._serialized_start=226 - _globals['_STRINGSET']._serialized_end=253 - _globals['_VALUE']._serialized_start=256 - _globals['_VALUE']._serialized_end=443 - _globals['_MODIFYSTRINGSET']._serialized_start=446 - _globals['_MODIFYSTRINGSET']._serialized_end=608 - _globals['_MODIFYSTRINGSET_VALUESENTRY']._serialized_start=529 - _globals['_MODIFYSTRINGSET_VALUESENTRY']._serialized_end=608 - _globals['_MODIFYSET']._serialized_start=610 - _globals['_MODIFYSET']._serialized_end=683 - _globals['_RUN']._serialized_start=686 - _globals['_RUN']._serialized_end=925 - _globals['_UPDATERUNSNAPSHOT']._serialized_start=928 - _globals['_UPDATERUNSNAPSHOT']._serialized_end=1467 - _globals['_UPDATERUNSNAPSHOT_ASSIGNENTRY']._serialized_start=1242 - _globals['_UPDATERUNSNAPSHOT_ASSIGNENTRY']._serialized_end=1313 - _globals['_UPDATERUNSNAPSHOT_MODIFYSETSENTRY']._serialized_start=1315 - _globals['_UPDATERUNSNAPSHOT_MODIFYSETSENTRY']._serialized_end=1394 - _globals['_UPDATERUNSNAPSHOT_APPENDENTRY']._serialized_start=1396 - _globals['_UPDATERUNSNAPSHOT_APPENDENTRY']._serialized_end=1467 -# @@protoc_insertion_point(module_scope) diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.pyi b/src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.pyi deleted file mode 100644 index 85078708c..000000000 --- a/src/neptune/api/proto/neptune_pb/ingest/v1/common_pb2.pyi +++ /dev/null @@ -1,390 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import collections.abc -import google.protobuf.descriptor -import google.protobuf.internal.containers -import google.protobuf.internal.enum_type_wrapper -import google.protobuf.message -import google.protobuf.timestamp_pb2 -import sys -import typing - -if sys.version_info >= (3, 10): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -class _SET_OPERATION: - ValueType = typing.NewType("ValueType", builtins.int) - V: typing_extensions.TypeAlias = ValueType - -class _SET_OPERATIONEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SET_OPERATION.ValueType], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor - NOOP: _SET_OPERATION.ValueType # 0 - ADD: _SET_OPERATION.ValueType # 1 - REMOVE: _SET_OPERATION.ValueType # 2 - -class SET_OPERATION(_SET_OPERATION, metaclass=_SET_OPERATIONEnumTypeWrapper): - """SET_OPERATION is used to describe the operation to be performed on a set.""" - -NOOP: SET_OPERATION.ValueType # 0 -ADD: SET_OPERATION.ValueType # 1 -REMOVE: SET_OPERATION.ValueType # 2 -global___SET_OPERATION = SET_OPERATION - -@typing_extensions.final -class Step(google.protobuf.message.Message): - """Step is used to measure computational progress of the Run and it's used to stamp its state. - For example, to express Step `3.5`, use `Step{whole: 3, micro: 500_000`}. - """ - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - WHOLE_FIELD_NUMBER: builtins.int - MICRO_FIELD_NUMBER: builtins.int - whole: builtins.int - """Whole step index.""" - micro: builtins.int - """Fractional part of a step expressed as number of micro steps. - Expression `0 <= micro < 1_000_000` must be true at all times. - """ - def __init__( - self, - *, - whole: builtins.int = ..., - micro: builtins.int = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["micro", b"micro", "whole", b"whole"]) -> None: ... - -global___Step = Step - -@typing_extensions.final -class ForkPoint(google.protobuf.message.Message): - """ForkPoint is used to mark the parent and its last inherited state during Forking.""" - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - PARENT_PROJECT_FIELD_NUMBER: builtins.int - PARENT_RUN_ID_FIELD_NUMBER: builtins.int - STEP_FIELD_NUMBER: builtins.int - parent_project: builtins.str - """Optional. Parent project qualified name. If not set, it will default to the context project.""" - parent_run_id: builtins.str - """Required. The id of the parent run within the parent project.""" - @property - def step(self) -> global___Step: - """Fork Step, which is the last step that a new run will inherit from its parent. - If not set, it will default to the last seen step of the parent run by the server at the time of forking. - New run may start numbering steps from the next micro step after the fork step. - """ - def __init__( - self, - *, - parent_project: builtins.str = ..., - parent_run_id: builtins.str = ..., - step: global___Step | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["step", b"step"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["parent_project", b"parent_project", "parent_run_id", b"parent_run_id", "step", b"step"]) -> None: ... - -global___ForkPoint = ForkPoint - -@typing_extensions.final -class StringSet(google.protobuf.message.Message): - """StringSet represents a set of strings. The order of strings is irrelevant and duplicates are ignored.""" - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - VALUES_FIELD_NUMBER: builtins.int - @property - def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... - def __init__( - self, - *, - values: collections.abc.Iterable[builtins.str] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... - -global___StringSet = StringSet - -@typing_extensions.final -class Value(google.protobuf.message.Message): - """Value is a union of all supported types that can be used to update a field. - Different types of operations support different subset of this field, so please refer to the documentation. - """ - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - FLOAT64_FIELD_NUMBER: builtins.int - INT64_FIELD_NUMBER: builtins.int - BOOL_FIELD_NUMBER: builtins.int - STRING_FIELD_NUMBER: builtins.int - TIMESTAMP_FIELD_NUMBER: builtins.int - STRING_SET_FIELD_NUMBER: builtins.int - float64: builtins.float - int64: builtins.int - bool: builtins.bool - string: builtins.str - @property - def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... - @property - def string_set(self) -> global___StringSet: ... - def __init__( - self, - *, - float64: builtins.float = ..., - int64: builtins.int = ..., - bool: builtins.bool = ..., - string: builtins.str = ..., - timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., - string_set: global___StringSet | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["bool", b"bool", "float64", b"float64", "int64", b"int64", "string", b"string", "string_set", b"string_set", "timestamp", b"timestamp", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["bool", b"bool", "float64", b"float64", "int64", b"int64", "string", b"string", "string_set", b"string_set", "timestamp", b"timestamp", "value", b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["float64", "int64", "bool", "string", "timestamp", "string_set"] | None: ... - -global___Value = Value - -@typing_extensions.final -class ModifyStringSet(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - @typing_extensions.final - class ValuesEntry(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - KEY_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - key: builtins.str - value: global___SET_OPERATION.ValueType - def __init__( - self, - *, - key: builtins.str = ..., - value: global___SET_OPERATION.ValueType = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - - VALUES_FIELD_NUMBER: builtins.int - @property - def values(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, global___SET_OPERATION.ValueType]: ... - def __init__( - self, - *, - values: collections.abc.Mapping[builtins.str, global___SET_OPERATION.ValueType] | None = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... - -global___ModifyStringSet = ModifyStringSet - -@typing_extensions.final -class ModifySet(google.protobuf.message.Message): - """Allows to update tag values in an incremental way.""" - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - STRING_FIELD_NUMBER: builtins.int - @property - def string(self) -> global___ModifyStringSet: ... - def __init__( - self, - *, - string: global___ModifyStringSet | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["string", b"string", "type", b"type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["string", b"string", "type", b"type"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["type", b"type"]) -> typing_extensions.Literal["string"] | None: ... - -global___ModifySet = ModifySet - -@typing_extensions.final -class Run(google.protobuf.message.Message): - """CreateRun can be used to create a new run. This can be done in two ways: - 1. Create a new run with no inherited state. You may specify a new run family that will be - inherited by future runs forking from this one, otherwise the new family will be selected by the server. - 2. Create a new run that inherits state from the parent run up to a specific step. You may specify - a new run family that will be inherited by future runs forking from this one. By default, the new run - will be in the same Family as the parent run. - CreateRun is idempotent, as long as `fork_point` and `family` parameters are the same. In case pf conflict, - the second operation will fail. - """ - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - RUN_ID_FIELD_NUMBER: builtins.int - EXPERIMENT_ID_FIELD_NUMBER: builtins.int - FORK_POINT_FIELD_NUMBER: builtins.int - FAMILY_FIELD_NUMBER: builtins.int - CREATION_TIME_FIELD_NUMBER: builtins.int - run_id: builtins.str - """Id of the run to be created. Optional if parent context has already specified run_id. If both are set, they - must be equal, otherwise the operation will fail. - """ - experiment_id: builtins.str - """Experiment Id to assign to this Run. If Experiment Id is already assigned to another Run, specifying it in this - field will move it from the previous Run, so that at most one Run in given project has a given Experiment Id. - Note: Experiment Id is currently exposed as "sys/name" field in the Run metadata. - """ - @property - def fork_point(self) -> global___ForkPoint: - """Optional. ForkPoint is used to identify the exact point in the parent history from which the new run continues. - If not specified, the new run will start with no inherited state. - """ - family: builtins.str - """Specifies Family for the new run. Run Family is used to group forking runs that share common ancestry. - By default, the new forking run will be in the same family as the parent run. - """ - @property - def creation_time(self) -> google.protobuf.timestamp_pb2.Timestamp: - """User-specified creation time for the new run. This is especially useful for getting consistent relative timestamps - for series. - If not specified, server will use its current time instead when processing the request. - """ - def __init__( - self, - *, - run_id: builtins.str | None = ..., - experiment_id: builtins.str | None = ..., - fork_point: global___ForkPoint | None = ..., - family: builtins.str | None = ..., - creation_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["_creation_time", b"_creation_time", "_experiment_id", b"_experiment_id", "_family", b"_family", "_run_id", b"_run_id", "creation_time", b"creation_time", "experiment_id", b"experiment_id", "family", b"family", "fork_point", b"fork_point", "run_id", b"run_id"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["_creation_time", b"_creation_time", "_experiment_id", b"_experiment_id", "_family", b"_family", "_run_id", b"_run_id", "creation_time", b"creation_time", "experiment_id", b"experiment_id", "family", b"family", "fork_point", b"fork_point", "run_id", b"run_id"]) -> None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_creation_time", b"_creation_time"]) -> typing_extensions.Literal["creation_time"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_experiment_id", b"_experiment_id"]) -> typing_extensions.Literal["experiment_id"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_family", b"_family"]) -> typing_extensions.Literal["family"] | None: ... - @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["_run_id", b"_run_id"]) -> typing_extensions.Literal["run_id"] | None: ... - -global___Run = Run - -@typing_extensions.final -class UpdateRunSnapshot(google.protobuf.message.Message): - """Stores Snapshot information about updated fields for a single step. Only passed fields will be updated.""" - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - @typing_extensions.final - class AssignEntry(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - KEY_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - key: builtins.str - @property - def value(self) -> global___Value: ... - def __init__( - self, - *, - key: builtins.str = ..., - value: global___Value | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - - @typing_extensions.final - class ModifySetsEntry(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - KEY_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - key: builtins.str - @property - def value(self) -> global___ModifySet: ... - def __init__( - self, - *, - key: builtins.str = ..., - value: global___ModifySet | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - - @typing_extensions.final - class AppendEntry(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - KEY_FIELD_NUMBER: builtins.int - VALUE_FIELD_NUMBER: builtins.int - key: builtins.str - @property - def value(self) -> global___Value: ... - def __init__( - self, - *, - key: builtins.str = ..., - value: global___Value | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... - - STEP_FIELD_NUMBER: builtins.int - TIMESTAMP_FIELD_NUMBER: builtins.int - ASSIGN_FIELD_NUMBER: builtins.int - MODIFY_SETS_FIELD_NUMBER: builtins.int - APPEND_FIELD_NUMBER: builtins.int - @property - def step(self) -> global___Step: - """Optional. Step value within the run. If not set, it will default to next full step of the run - (highest step across step values). - """ - @property - def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: - """Timestamp field common to all included operations""" - @property - def assign(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Value]: - """Assigns values for fields. Fields `field_path` is used as a key. Example: - ``` - {assign: { - "parameters/learning_rate": {float64: 0.01}, - "parameters/optimizer": {string: "sgd"}}} - ``` - Note: when using a StringSet value, this action will override the whole Set with the new values. - If you want to add or remove tags individually, use `modify_set.string` instead. - """ - @property - def modify_sets(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___ModifySet]: - """Modify string sets with incremental changes. For example: - ``` - {modify_sets: { - "sys/tags": { - "string": { - values: { - "new_tag1": ADD, - "prev_tag1": REMOVE, - "new_tag2": ADD}}}} - ``` - """ - @property - def append(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Value]: - """Appends values for Series fields. Fields `field_path` is used as a key. Example: - ``` - {assign: { - "metrics/recall": {float64: 0.6}, - "metrics/precision": {float64: 0.72}}} - ``` - Note: when using a StringSet value, this action will override the whole Set with the new values. - If you want to add or remove tags individually, use `modify_set.string` instead. - """ - def __init__( - self, - *, - step: global___Step | None = ..., - timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., - assign: collections.abc.Mapping[builtins.str, global___Value] | None = ..., - modify_sets: collections.abc.Mapping[builtins.str, global___ModifySet] | None = ..., - append: collections.abc.Mapping[builtins.str, global___Value] | None = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["step", b"step", "timestamp", b"timestamp"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["append", b"append", "assign", b"assign", "modify_sets", b"modify_sets", "step", b"step", "timestamp", b"timestamp"]) -> None: ... - -global___UpdateRunSnapshot = UpdateRunSnapshot diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/__init__.py b/src/neptune/api/proto/neptune_pb/ingest/v1/pub/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.py b/src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.py deleted file mode 100644 index 576585663..000000000 --- a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: client.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x63lient.proto\x12\x15neptune.ingest.v1.pub\"\x1a\n\tRequestId\x12\r\n\x05value\x18\x01 \x01(\tB3\n\x1cml.neptune.client.api.modelsB\x11\x43lientIngestProtoP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'client_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\034ml.neptune.client.api.modelsB\021ClientIngestProtoP\001' - _globals['_REQUESTID']._serialized_start=39 - _globals['_REQUESTID']._serialized_end=65 -# @@protoc_insertion_point(module_scope) diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.pyi b/src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.pyi deleted file mode 100644 index 8711af34c..000000000 --- a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/client_pb2.pyi +++ /dev/null @@ -1,30 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import google.protobuf.descriptor -import google.protobuf.message -import sys - -if sys.version_info >= (3, 8): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing_extensions.final -class RequestId(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - VALUE_FIELD_NUMBER: builtins.int - value: builtins.str - def __init__( - self, - *, - value: builtins.str = ..., - ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... - -global___RequestId = RequestId diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.py b/src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.py deleted file mode 100644 index d81465bfd..000000000 --- a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: neptune_pb/ingest/v1/pub/ingest.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from neptune.api.proto.neptune_pb.ingest.v1 import common_pb2 as neptune__pb_dot_ingest_dot_v1_dot_common__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%neptune_pb/ingest/v1/pub/ingest.proto\x12\x15neptune.ingest.v1.pub\x1a!neptune_pb/ingest/v1/common.proto\"\xcf\x01\n\x0cRunOperation\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x0e\n\x06run_id\x18\x02 \x01(\t\x12\x1e\n\x16\x63reate_missing_project\x18\x03 \x01(\x08\x12(\n\x06\x63reate\x18\x06 \x01(\x0b\x32\x16.neptune.ingest.v1.RunH\x00\x12\x36\n\x06update\x18\x07 \x01(\x0b\x32$.neptune.ingest.v1.UpdateRunSnapshotH\x00\x12\x0f\n\x07\x61pi_key\x18\x0c \x01(\x0c\x42\x0b\n\toperationB<\n(ml.neptune.leaderboard.api.ingest.v1.pubB\x0eIngestPubProtoP\x01\x62\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'neptune_pb.ingest.v1.pub.ingest_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n(ml.neptune.leaderboard.api.ingest.v1.pubB\016IngestPubProtoP\001' - _globals['_RUNOPERATION']._serialized_start=100 - _globals['_RUNOPERATION']._serialized_end=307 -# @@protoc_insertion_point(module_scope) diff --git a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.pyi b/src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.pyi deleted file mode 100644 index 1963ccaff..000000000 --- a/src/neptune/api/proto/neptune_pb/ingest/v1/pub/ingest_pb2.pyi +++ /dev/null @@ -1,64 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import builtins -import google.protobuf.descriptor -import google.protobuf.message -import neptune.api.proto.neptune_pb.ingest.v1.common_pb2 as common_pb2 -import sys - -if sys.version_info >= (3, 8): - import typing as typing_extensions -else: - import typing_extensions - -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor - -@typing_extensions.final -class RunOperation(google.protobuf.message.Message): - """RunOperation is a message body for the operation to be performed on the run.""" - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - PROJECT_FIELD_NUMBER: builtins.int - RUN_ID_FIELD_NUMBER: builtins.int - CREATE_MISSING_PROJECT_FIELD_NUMBER: builtins.int - CREATE_FIELD_NUMBER: builtins.int - UPDATE_FIELD_NUMBER: builtins.int - API_KEY_FIELD_NUMBER: builtins.int - project: builtins.str - """Qualified name of the context project. This name must include workspace, e.g.: "your-workspace/your-project".""" - run_id: builtins.str - """Required. Subject run id of the operation. In case of `CreateRun` it's the id of the new run. - User must ensure uniqueness of the id within the project. - """ - create_missing_project: builtins.bool - """Optional. Will create project if it doesn't yet exist. This operation is idempotent.""" - @property - def create(self) -> common_pb2.Run: - """Creates a new run. See `CreateRun` for details.""" - @property - def update(self) -> common_pb2.UpdateRunSnapshot: - """All included fields will be aligned to the same step. In case the step is not set, it will select the - successor of the highest last_step value among metrics currently being updated. - """ - api_key: builtins.bytes - """API Key used for authorization of operations. - See https://docs.neptune.ai/setup/setting_api_token/ for more information on how to obtain an API Key. - """ - def __init__( - self, - *, - project: builtins.str = ..., - run_id: builtins.str = ..., - create_missing_project: builtins.bool = ..., - create: common_pb2.Run | None = ..., - update: common_pb2.UpdateRunSnapshot | None = ..., - api_key: builtins.bytes = ..., - ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["create", b"create", "operation", b"operation", "update", b"update"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["api_key", b"api_key", "create", b"create", "create_missing_project", b"create_missing_project", "operation", b"operation", "project", b"project", "run_id", b"run_id", "update", b"update"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["operation", b"operation"]) -> typing_extensions.Literal["create", "update"] | None: ... - -global___RunOperation = RunOperation diff --git a/tests/unit/neptune/new/api/test_operation_to_api_visitor.py b/tests/unit/neptune/new/api/test_operation_to_api_visitor.py index 837b36382..5df10b191 100644 --- a/tests/unit/neptune/new/api/test_operation_to_api_visitor.py +++ b/tests/unit/neptune/new/api/test_operation_to_api_visitor.py @@ -2,17 +2,17 @@ import pytest from google.protobuf import timestamp_pb2 - -import neptune.core.operations.operation as core_operations -from neptune.api.operation_to_api import OperationToApiVisitor -from neptune.api.operations import RunOperation -from neptune.api.proto.neptune_pb.ingest.v1.common_pb2 import ( +from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import ( Run, Step, UpdateRunSnapshot, Value, ) -from neptune.api.proto.neptune_pb.ingest.v1.pub.ingest_pb2 import RunOperation as ProtoRunOperation +from neptune_api.proto.neptune_pb.ingest.v1.pub.ingest_pb2 import RunOperation as ProtoRunOperation + +import neptune.core.operations.operation as core_operations +from neptune.api.operation_to_api import OperationToApiVisitor +from neptune.api.operations import RunOperation @pytest.fixture(scope="module") @@ -27,12 +27,8 @@ def build_expected_atom_proto_run_operation(path: str, value: Value) -> ProtoRun create_missing_project=False, create=None, update=UpdateRunSnapshot( - assign={ - "path": Value(string=path), - "value": value, - }, + assign={f"{path}": value}, ), - api_key=b"", ) @@ -45,10 +41,7 @@ def build_expected_series_proto_run_operation(path: str, value: Value) -> ProtoR update=UpdateRunSnapshot( step=Step(whole=1, micro=0), timestamp=timestamp_pb2.Timestamp(seconds=1), - append={ - "path": Value(string=path), - "value": value, - }, + append={f"{path}": value}, ), api_key=b"", ) @@ -68,15 +61,12 @@ def test_operation_to_api_visitor_run_creation(): expected_create = Run( run_id="custom_id", creation_time=timestamp_pb2.Timestamp(seconds=int(created_at.timestamp())), + experiment_id="custom_id", + family="custom_id", ) expected = ProtoRunOperation( - project="project", - run_id="run_id", - create_missing_project=False, - create=expected_create, - update=None, - api_key=b"", + project="project", run_id="run_id", create_missing_project=False, create=expected_create, update=None ) assert res == expected diff --git a/tests/unit/neptune/new/core/operations/test_operations.py b/tests/unit/neptune/new/core/operations/test_operations.py index aa1dc3103..6aceab7b7 100644 --- a/tests/unit/neptune/new/core/operations/test_operations.py +++ b/tests/unit/neptune/new/core/operations/test_operations.py @@ -1,224 +1,133 @@ -# -# Copyright (c) 2024, Neptune Labs Sp. z o.o. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import datetime -import random -import string - -import pytest - -from neptune.core.operations import ( +from datetime import datetime + +from google.protobuf import timestamp_pb2 +from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import Run as ProtoRun +from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import ( + Step, + UpdateRunSnapshot, + Value, +) +from neptune_api.proto.neptune_pb.ingest.v1.pub import ingest_pb2 + +from neptune.api.operations import ( AssignBool, AssignDatetime, AssignFloat, - AssignInt, + AssignInteger, AssignString, + FloatValue, LogFloats, + Run, + RunOperation, ) -from neptune.core.operations.operation import FieldOperation -from neptune.exceptions import MalformedOperation - - -class TestAssignInt: - def test__assign_int_operation__to_dict(self): - # given - value = random.randint(int(-1e8), int(1e8)) - assign_int = AssignInt(["test", "path"], value) - # when - data = assign_int.to_dict() - # then - assert data == {"type": "AssignInt", "path": ["test", "path"], "value": value} +def test_assign_float(): + op = AssignFloat("path", 1.0) + run_op = RunOperation("project", "run_id", op) - def test__assign_int_operation__from_dict(self): - # given - value = random.randint(int(-1e8), int(1e8)) - data = {"type": "AssignInt", "path": ["test", "path"], "value": value} - - # when - assign_int = AssignInt.from_dict(data) - - # then - assert assign_int == AssignInt(["test", "path"], value) + serialized = op.to_proto(run_op) + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + update=UpdateRunSnapshot( + assign={"path": Value(float64=1.0)}, + ), + ) -class TestAssignString: - def test__assign_string_operation__to_dict(self): - # given - value = "".join(random.choices(string.ascii_lowercase, k=20)) - assign_string = AssignString(["test", "path"], value) - # when - data = assign_string.to_dict() +def test_assign_int(): + op = AssignInteger("path", 1) + run_op = RunOperation("project", "run_id", op) - # then - assert data == {"type": "AssignString", "path": ["test", "path"], "value": value} + serialized = op.to_proto(run_op) - def test__assign_string_operation__from_dict(self): - # given - value = "".join(random.choices(string.ascii_lowercase, k=20)) - data = {"type": "AssignString", "path": ["test", "path"], "value": value} - assign_string = AssignString.from_dict(data) + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + update=UpdateRunSnapshot( + assign={"path": Value(int64=1)}, + ), + ) - # then - assert assign_string == AssignString(["test", "path"], value) +def test_assign_bool(): + op = AssignBool("path", True) + run_op = RunOperation("project", "run_id", op) -class TestAssignBool: - def test__assign_bool_operation__to_dict(self): - # given - value = random.choice([True, False]) - assign_bool = AssignBool(["test", "path"], value) + serialized = op.to_proto(run_op) - # when - data = assign_bool.to_dict() + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + update=UpdateRunSnapshot( + assign={"path": Value(bool=True)}, + ), + ) - # then - assert data == {"type": "AssignBool", "path": ["test", "path"], "value": value} - def test__assign_bool_operation__from_dict(self): - # given - value = random.choice([True, False]) - data = {"type": "AssignBool", "path": ["test", "path"], "value": value} +def test_assign_string(): + op = AssignString("path", "value") + run_op = RunOperation("project", "run_id", op) - # when - assign_bool = AssignBool.from_dict(data) + serialized = op.to_proto(run_op) - # then - assert assign_bool == AssignBool(["test", "path"], value) + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + update=UpdateRunSnapshot( + assign={"path": Value(string="value")}, + ), + ) -class TestAssignFloat: - def test__assign_float_operation__to_dict(self): - # given - value = random.uniform(-1e8, 1e8) - assign_float = AssignFloat(["test", "path"], value) +def test_assign_datetime(): + op = AssignDatetime("path", datetime(2021, 1, 1)) + run_op = RunOperation("project", "run_id", op) - # when - data = assign_float.to_dict() + serialized = op.to_proto(run_op) - # then - assert data == {"type": "AssignFloat", "path": ["test", "path"], "value": value} + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + update=UpdateRunSnapshot( + assign={"path": Value(timestamp=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp())))}, + ), + ) - def test__assign_float_operation__from_dict(self): - # given - value = random.uniform(-1e8, 1e8) - data = {"type": "AssignFloat", "path": ["test", "path"], "value": value} - assign_float = AssignFloat.from_dict(data) - # then - assert assign_float == AssignFloat(["test", "path"], value) +def test_log_floats(): + op = LogFloats("path", [FloatValue(1, 1.0, 1)]) + run_op = RunOperation("project", "run_id", op) + serialized = op.to_proto(run_op) -class TestAssignDatetime: - def test__assign_datetime_operation__to_dict(self): - # given - value = datetime.datetime.utcnow() - assign_datetime = AssignDatetime(["test", "path"], value) + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + update=UpdateRunSnapshot( + step=Step(whole=1, micro=0), + timestamp=timestamp_pb2.Timestamp(seconds=1), + append={"path": Value(float64=1.0)}, + ), + ) - # when - data = assign_datetime.to_dict() - - # then - assert data == {"type": "AssignDatetime", "path": ["test", "path"], "value": int(value.timestamp() * 1000)} - - def test__assign_datetime_operation__from_dict(self): - # given - value = datetime.datetime.now().replace(microsecond=0) - value_as_int = int(value.timestamp() * 1000) - data = { - "type": "AssignDatetime", - "path": ["test", "path"], - "value": value_as_int, - } - assign_datetime = AssignDatetime.from_dict(data) - - # then - assert assign_datetime == AssignDatetime(["test", "path"], value) - - -class TestLogFloats: - def test__log_floats_operation__to_dict(self): - # given - values = [ - LogFloats.ValueType( - value=random.uniform(-1e8, 1e8), - step=random.uniform(-1e8, 1e8), - ts=random.uniform(-1e8, 1e8), - ) - for _ in range(5) - ] - - expected_values = [value.to_dict() for value in values] - log_floats = LogFloats(["test", "path"], values) - - # when - data = log_floats.to_dict() - - # then - assert data == {"type": "LogFloats", "path": ["test", "path"], "values": expected_values} - - def test__log_floats_operation__from_dict(self): - # given - values = [ - LogFloats.ValueType( - value=random.uniform(-1e8, 1e8), - step=random.uniform(-1e8, 1e8), - ts=random.uniform(-1e8, 1e8), - ) - for _ in range(5) - ] - - dict_values = [value.to_dict() for value in values] - data = {"type": "LogFloats", "path": ["test", "path"], "values": dict_values} - - # when - log_floats = LogFloats.from_dict(data) - - # then - assert log_floats == LogFloats(["test", "path"], values) - - -@pytest.mark.parametrize( - "operation", - [ - AssignInt(["test", "path"], 1), - AssignString(["test", "path"], "test"), - AssignBool(["test", "path"], True), - AssignDatetime(["test", "path"], datetime.datetime.now().replace(microsecond=0)), - AssignFloat(["test", "path"], 1.0), - LogFloats(["test", "path"], [LogFloats.ValueType(1.0, 1.0, 1.0)]), - ], -) -def test_is_serialisation_consistent(operation): - assert operation.from_dict(operation.to_dict()) == operation +def test_run_creation(): + op = Run(datetime(2021, 1, 1), "run_id") -def test__operation__from_dict(): - with pytest.raises(MalformedOperation): - FieldOperation.from_dict({"path": ["test", "path"], "value": 1}) + run_op = RunOperation("project", "run_id", op) - with pytest.raises(MalformedOperation): - FieldOperation.from_dict({"type": "IncorrectType", "path": ["test", "path"], "value": 1}) + serialized = op.to_proto(run_op) - assert FieldOperation.from_dict({"type": "AssignInt", "path": ["test", "path"], "value": 1}) == AssignInt( - ["test", "path"], 1 + assert serialized == ingest_pb2.RunOperation( + project="project", + run_id="run_id", + create=ProtoRun( + creation_time=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp())), + run_id="run_id", + family="run_id", + experiment_id="run_id", + ), ) - - assert FieldOperation.from_dict( - {"type": "LogFloats", "path": ["test", "path"], "values": [{"value": 1.0, "step": 1.0, "ts": 1.0}]} - ) == LogFloats(["test", "path"], [LogFloats.ValueType(1.0, 1.0, 1.0)]) From 0bca294a574ca8bd864a5ee367a63ea34fc0a704 Mon Sep 17 00:00:00 2001 From: Rafal Jankowski Date: Wed, 17 Jul 2024 10:29:25 +0200 Subject: [PATCH 2/3] Test fixes --- tests/unit/neptune/new/api/test_operations.py | 53 +--- .../new/core/operations/test_operations.py | 285 ++++++++++++------ 2 files changed, 203 insertions(+), 135 deletions(-) diff --git a/tests/unit/neptune/new/api/test_operations.py b/tests/unit/neptune/new/api/test_operations.py index c082221e6..6aceab7b7 100644 --- a/tests/unit/neptune/new/api/test_operations.py +++ b/tests/unit/neptune/new/api/test_operations.py @@ -1,6 +1,13 @@ from datetime import datetime from google.protobuf import timestamp_pb2 +from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import Run as ProtoRun +from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import ( + Step, + UpdateRunSnapshot, + Value, +) +from neptune_api.proto.neptune_pb.ingest.v1.pub import ingest_pb2 from neptune.api.operations import ( AssignBool, @@ -13,13 +20,6 @@ Run, RunOperation, ) -from neptune.api.proto.neptune_pb.ingest.v1.common_pb2 import Run as ProtoRun -from neptune.api.proto.neptune_pb.ingest.v1.common_pb2 import ( - Step, - UpdateRunSnapshot, - Value, -) -from neptune.api.proto.neptune_pb.ingest.v1.pub import ingest_pb2 def test_assign_float(): @@ -32,12 +32,8 @@ def test_assign_float(): project="project", run_id="run_id", update=UpdateRunSnapshot( - assign={ - "path": Value(string="path"), - "value": Value(float64=1.0), - }, + assign={"path": Value(float64=1.0)}, ), - api_key=b"", ) @@ -51,12 +47,8 @@ def test_assign_int(): project="project", run_id="run_id", update=UpdateRunSnapshot( - assign={ - "path": Value(string="path"), - "value": Value(int64=1), - }, + assign={"path": Value(int64=1)}, ), - api_key=b"", ) @@ -70,12 +62,8 @@ def test_assign_bool(): project="project", run_id="run_id", update=UpdateRunSnapshot( - assign={ - "path": Value(string="path"), - "value": Value(bool=True), - }, + assign={"path": Value(bool=True)}, ), - api_key=b"", ) @@ -89,12 +77,8 @@ def test_assign_string(): project="project", run_id="run_id", update=UpdateRunSnapshot( - assign={ - "path": Value(string="path"), - "value": Value(string="value"), - }, + assign={"path": Value(string="value")}, ), - api_key=b"", ) @@ -108,12 +92,8 @@ def test_assign_datetime(): project="project", run_id="run_id", update=UpdateRunSnapshot( - assign={ - "path": Value(string="path"), - "value": Value(timestamp=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp()))), - }, + assign={"path": Value(timestamp=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp())))}, ), - api_key=b"", ) @@ -129,12 +109,8 @@ def test_log_floats(): update=UpdateRunSnapshot( step=Step(whole=1, micro=0), timestamp=timestamp_pb2.Timestamp(seconds=1), - append={ - "path": Value(string="path"), - "value": Value(float64=1.0), - }, + append={"path": Value(float64=1.0)}, ), - api_key=b"", ) @@ -151,6 +127,7 @@ def test_run_creation(): create=ProtoRun( creation_time=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp())), run_id="run_id", + family="run_id", + experiment_id="run_id", ), - api_key=b"", ) diff --git a/tests/unit/neptune/new/core/operations/test_operations.py b/tests/unit/neptune/new/core/operations/test_operations.py index 6aceab7b7..aa1dc3103 100644 --- a/tests/unit/neptune/new/core/operations/test_operations.py +++ b/tests/unit/neptune/new/core/operations/test_operations.py @@ -1,133 +1,224 @@ -from datetime import datetime - -from google.protobuf import timestamp_pb2 -from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import Run as ProtoRun -from neptune_api.proto.neptune_pb.ingest.v1.common_pb2 import ( - Step, - UpdateRunSnapshot, - Value, -) -from neptune_api.proto.neptune_pb.ingest.v1.pub import ingest_pb2 - -from neptune.api.operations import ( +# +# Copyright (c) 2024, Neptune Labs Sp. z o.o. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import datetime +import random +import string + +import pytest + +from neptune.core.operations import ( AssignBool, AssignDatetime, AssignFloat, - AssignInteger, + AssignInt, AssignString, - FloatValue, LogFloats, - Run, - RunOperation, ) +from neptune.core.operations.operation import FieldOperation +from neptune.exceptions import MalformedOperation -def test_assign_float(): - op = AssignFloat("path", 1.0) - run_op = RunOperation("project", "run_id", op) +class TestAssignInt: + def test__assign_int_operation__to_dict(self): + # given + value = random.randint(int(-1e8), int(1e8)) + assign_int = AssignInt(["test", "path"], value) - serialized = op.to_proto(run_op) + # when + data = assign_int.to_dict() - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - update=UpdateRunSnapshot( - assign={"path": Value(float64=1.0)}, - ), - ) + # then + assert data == {"type": "AssignInt", "path": ["test", "path"], "value": value} + def test__assign_int_operation__from_dict(self): + # given + value = random.randint(int(-1e8), int(1e8)) + data = {"type": "AssignInt", "path": ["test", "path"], "value": value} -def test_assign_int(): - op = AssignInteger("path", 1) - run_op = RunOperation("project", "run_id", op) + # when + assign_int = AssignInt.from_dict(data) - serialized = op.to_proto(run_op) + # then + assert assign_int == AssignInt(["test", "path"], value) - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - update=UpdateRunSnapshot( - assign={"path": Value(int64=1)}, - ), - ) +class TestAssignString: + def test__assign_string_operation__to_dict(self): + # given + value = "".join(random.choices(string.ascii_lowercase, k=20)) + assign_string = AssignString(["test", "path"], value) -def test_assign_bool(): - op = AssignBool("path", True) - run_op = RunOperation("project", "run_id", op) + # when + data = assign_string.to_dict() - serialized = op.to_proto(run_op) + # then + assert data == {"type": "AssignString", "path": ["test", "path"], "value": value} - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - update=UpdateRunSnapshot( - assign={"path": Value(bool=True)}, - ), - ) + def test__assign_string_operation__from_dict(self): + # given + value = "".join(random.choices(string.ascii_lowercase, k=20)) + data = {"type": "AssignString", "path": ["test", "path"], "value": value} + assign_string = AssignString.from_dict(data) + # then + assert assign_string == AssignString(["test", "path"], value) -def test_assign_string(): - op = AssignString("path", "value") - run_op = RunOperation("project", "run_id", op) - serialized = op.to_proto(run_op) +class TestAssignBool: + def test__assign_bool_operation__to_dict(self): + # given + value = random.choice([True, False]) + assign_bool = AssignBool(["test", "path"], value) - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - update=UpdateRunSnapshot( - assign={"path": Value(string="value")}, - ), - ) + # when + data = assign_bool.to_dict() + # then + assert data == {"type": "AssignBool", "path": ["test", "path"], "value": value} -def test_assign_datetime(): - op = AssignDatetime("path", datetime(2021, 1, 1)) - run_op = RunOperation("project", "run_id", op) + def test__assign_bool_operation__from_dict(self): + # given + value = random.choice([True, False]) + data = {"type": "AssignBool", "path": ["test", "path"], "value": value} - serialized = op.to_proto(run_op) + # when + assign_bool = AssignBool.from_dict(data) - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - update=UpdateRunSnapshot( - assign={"path": Value(timestamp=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp())))}, - ), - ) + # then + assert assign_bool == AssignBool(["test", "path"], value) -def test_log_floats(): - op = LogFloats("path", [FloatValue(1, 1.0, 1)]) - run_op = RunOperation("project", "run_id", op) +class TestAssignFloat: + def test__assign_float_operation__to_dict(self): + # given + value = random.uniform(-1e8, 1e8) + assign_float = AssignFloat(["test", "path"], value) - serialized = op.to_proto(run_op) + # when + data = assign_float.to_dict() - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - update=UpdateRunSnapshot( - step=Step(whole=1, micro=0), - timestamp=timestamp_pb2.Timestamp(seconds=1), - append={"path": Value(float64=1.0)}, - ), - ) + # then + assert data == {"type": "AssignFloat", "path": ["test", "path"], "value": value} + + def test__assign_float_operation__from_dict(self): + # given + value = random.uniform(-1e8, 1e8) + data = {"type": "AssignFloat", "path": ["test", "path"], "value": value} + assign_float = AssignFloat.from_dict(data) + # then + assert assign_float == AssignFloat(["test", "path"], value) -def test_run_creation(): - op = Run(datetime(2021, 1, 1), "run_id") - run_op = RunOperation("project", "run_id", op) +class TestAssignDatetime: + def test__assign_datetime_operation__to_dict(self): + # given + value = datetime.datetime.utcnow() + assign_datetime = AssignDatetime(["test", "path"], value) + + # when + data = assign_datetime.to_dict() + + # then + assert data == {"type": "AssignDatetime", "path": ["test", "path"], "value": int(value.timestamp() * 1000)} + + def test__assign_datetime_operation__from_dict(self): + # given + value = datetime.datetime.now().replace(microsecond=0) + value_as_int = int(value.timestamp() * 1000) + data = { + "type": "AssignDatetime", + "path": ["test", "path"], + "value": value_as_int, + } + assign_datetime = AssignDatetime.from_dict(data) + + # then + assert assign_datetime == AssignDatetime(["test", "path"], value) + + +class TestLogFloats: + def test__log_floats_operation__to_dict(self): + # given + values = [ + LogFloats.ValueType( + value=random.uniform(-1e8, 1e8), + step=random.uniform(-1e8, 1e8), + ts=random.uniform(-1e8, 1e8), + ) + for _ in range(5) + ] + + expected_values = [value.to_dict() for value in values] + log_floats = LogFloats(["test", "path"], values) + + # when + data = log_floats.to_dict() + + # then + assert data == {"type": "LogFloats", "path": ["test", "path"], "values": expected_values} + + def test__log_floats_operation__from_dict(self): + # given + values = [ + LogFloats.ValueType( + value=random.uniform(-1e8, 1e8), + step=random.uniform(-1e8, 1e8), + ts=random.uniform(-1e8, 1e8), + ) + for _ in range(5) + ] + + dict_values = [value.to_dict() for value in values] + data = {"type": "LogFloats", "path": ["test", "path"], "values": dict_values} + + # when + log_floats = LogFloats.from_dict(data) + + # then + assert log_floats == LogFloats(["test", "path"], values) + + +@pytest.mark.parametrize( + "operation", + [ + AssignInt(["test", "path"], 1), + AssignString(["test", "path"], "test"), + AssignBool(["test", "path"], True), + AssignDatetime(["test", "path"], datetime.datetime.now().replace(microsecond=0)), + AssignFloat(["test", "path"], 1.0), + LogFloats(["test", "path"], [LogFloats.ValueType(1.0, 1.0, 1.0)]), + ], +) +def test_is_serialisation_consistent(operation): + assert operation.from_dict(operation.to_dict()) == operation - serialized = op.to_proto(run_op) - assert serialized == ingest_pb2.RunOperation( - project="project", - run_id="run_id", - create=ProtoRun( - creation_time=timestamp_pb2.Timestamp(seconds=int(datetime(2021, 1, 1).timestamp())), - run_id="run_id", - family="run_id", - experiment_id="run_id", - ), +def test__operation__from_dict(): + with pytest.raises(MalformedOperation): + FieldOperation.from_dict({"path": ["test", "path"], "value": 1}) + + with pytest.raises(MalformedOperation): + FieldOperation.from_dict({"type": "IncorrectType", "path": ["test", "path"], "value": 1}) + + assert FieldOperation.from_dict({"type": "AssignInt", "path": ["test", "path"], "value": 1}) == AssignInt( + ["test", "path"], 1 ) + + assert FieldOperation.from_dict( + {"type": "LogFloats", "path": ["test", "path"], "values": [{"value": 1.0, "step": 1.0, "ts": 1.0}]} + ) == LogFloats(["test", "path"], [LogFloats.ValueType(1.0, 1.0, 1.0)]) From 4d6145dfdc0b0fa6fb99b3ffcbfa9a10bbd4fc5f Mon Sep 17 00:00:00 2001 From: Rafal Jankowski Date: Tue, 2 Jul 2024 12:03:02 +0200 Subject: [PATCH 3/3] Basic sync flow --- CHANGELOG.md | 1 + src/neptune/attributes/atoms/copiable_atom.py | 2 +- src/neptune/attributes/series/float_series.py | 2 +- .../attributes/series/string_series.py | 2 +- .../core/operation_processors/factory.py | 7 +- .../sync_operation_processor.py | 29 +++++++- src/neptune/internal/operation.py | 6 +- src/neptune/objects/neptune_object.py | 10 ++- src/neptune/objects/run.py | 5 +- src/neptune/objects/with_backend.py | 20 ++++++ .../attributes/series/test_float_series.py | 7 +- .../new/attributes/sets/test_string_set.py | 2 +- .../new/client/abstract_tables_test.py | 5 ++ .../neptune/new/client/test_run_tables.py | 4 ++ .../test_sync_operation_processor.py | 22 ++++-- .../backends/test_neptune_backend_mock.py | 2 + .../backends/test_operations_preprocessor.py | 8 ++- .../unit/neptune/new/internal/test_streams.py | 3 + tests/unit/neptune/new/test_handler.py | 72 ++++++++++--------- tests/unit/neptune/new/test_logging.py | 1 + 20 files changed, 148 insertions(+), 62 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 559a22649..fbecd5575 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ ### Features - Added auto-generation of `custom_run_id` if it's not provided ([#1762](https://github.com/neptune-ai/neptune-client/pull/1762)) +- Added support for `sync` mode ([#1835](https://github.com/neptune-ai/neptune-client/pull/1835)) ### Changes - Stop sending `X-Neptune-LegacyClient` header ([#1715](https://github.com/neptune-ai/neptune-client/pull/1715)) diff --git a/src/neptune/attributes/atoms/copiable_atom.py b/src/neptune/attributes/atoms/copiable_atom.py index b1742e3d0..fd059aa83 100644 --- a/src/neptune/attributes/atoms/copiable_atom.py +++ b/src/neptune/attributes/atoms/copiable_atom.py @@ -38,7 +38,7 @@ def copy(self, value: ValueCopy, *, wait: bool = False): self._enqueue_operation( CopyAttribute( self._path, - container_id=source_attr._container_id, + container_id=source_attr._custom_id, container_type=source_attr._container_type, source_path=parse_path(source_path), source_attr_cls=source_attr.__class__, diff --git a/src/neptune/attributes/series/float_series.py b/src/neptune/attributes/series/float_series.py index 0a6ab5873..043f92fce 100644 --- a/src/neptune/attributes/series/float_series.py +++ b/src/neptune/attributes/series/float_series.py @@ -69,7 +69,7 @@ def _is_value_type(self, value) -> bool: def fetch_last(self) -> float: raise NeptuneUnsupportedFunctionalityException - val = self._backend.get_float_series_attribute(self._container_id, self._container_type, self._path) + val = self._backend.get_float_series_attribute(self._custom_id, self._container_type, self._path) return val.last def _fetch_values_from_backend(self, limit: int, from_step: Optional[float] = None) -> FloatSeriesValues: diff --git a/src/neptune/attributes/series/string_series.py b/src/neptune/attributes/series/string_series.py index f56b490be..7308e9617 100644 --- a/src/neptune/attributes/series/string_series.py +++ b/src/neptune/attributes/series/string_series.py @@ -94,7 +94,7 @@ def _handle_stringified_value(self, value) -> Union[List[str], str]: def fetch_last(self) -> str: raise NeptuneUnsupportedFunctionalityException - val = self._backend.get_string_series_attribute(self._container_id, self._container_type, self._path) + val = self._backend.get_string_series_attribute(self._custom_id, self._container_type, self._path) return val.last def _fetch_values_from_backend(self, limit: int, from_step: Optional[float] = None) -> StringSeriesValues: diff --git a/src/neptune/core/operation_processors/factory.py b/src/neptune/core/operation_processors/factory.py index 8e203e374..34cab8520 100644 --- a/src/neptune/core/operation_processors/factory.py +++ b/src/neptune/core/operation_processors/factory.py @@ -29,6 +29,7 @@ from neptune.core.typing.container_type import ContainerType from neptune.core.typing.id_formats import CustomId from neptune.envs import NEPTUNE_ASYNC_BATCH_SIZE +from neptune.internal.backends.hosted_neptune_backend_v2 import HostedNeptuneBackendV2 from neptune.objects.mode import Mode if TYPE_CHECKING: @@ -37,6 +38,8 @@ def get_operation_processor( mode: Mode, + project: str, + client: "HostedNeptuneBackendV2", custom_id: CustomId, container_type: ContainerType, lock: threading.RLock, @@ -53,7 +56,9 @@ def get_operation_processor( signal_queue=queue, ) elif mode in {Mode.SYNC, Mode.DEBUG}: - return SyncOperationProcessor(custom_id=custom_id, container_type=container_type) + return SyncOperationProcessor( + project=project, custom_id=custom_id, client=client, container_type=container_type + ) elif mode == Mode.OFFLINE: return OfflineOperationProcessor(custom_id=custom_id, container_type=container_type, lock=lock) elif mode == Mode.READ_ONLY: diff --git a/src/neptune/core/operation_processors/sync_operation_processor.py b/src/neptune/core/operation_processors/sync_operation_processor.py index 2b7f6f9ae..888176d4a 100644 --- a/src/neptune/core/operation_processors/sync_operation_processor.py +++ b/src/neptune/core/operation_processors/sync_operation_processor.py @@ -17,10 +17,15 @@ from pathlib import Path from typing import ( + TYPE_CHECKING, Optional, Tuple, ) +from neptune_api.api.data_ingestion import submit_operation + +from neptune.api.operation_to_api import OperationToApiVisitor +from neptune.api.operations import RunOperation from neptune.constants import SYNC_DIRECTORY from neptune.core.components.abstract import ( Resource, @@ -37,13 +42,25 @@ from neptune.core.typing.container_type import ContainerType from neptune.core.typing.id_formats import CustomId from neptune.internal.utils.disk_utilization import ensure_disk_not_overutilize +from neptune.internal.utils.logger import get_logger + +if TYPE_CHECKING: + from neptune.internal.backends.hosted_neptune_backend_v2 import HostedNeptuneBackendV2 + + +LOGGER = get_logger() class SyncOperationProcessor(WithResources, OperationProcessor): - def __init__(self, custom_id: "CustomId", container_type: "ContainerType"): - self._container_id: "CustomId" = custom_id + def __init__( + self, project: str, custom_id: "CustomId", client: "HostedNeptuneBackendV2", container_type: "ContainerType" + ): + self._project: str = project + self._custom_id: "CustomId" = custom_id self._container_type: "ContainerType" = container_type + self._client: "HostedNeptuneBackendV2" = client + self._api_operation_visitor = OperationToApiVisitor() self._data_path = get_container_full_path(SYNC_DIRECTORY, custom_id, container_type) # Initialize directory @@ -68,7 +85,13 @@ def resources(self) -> Tuple["Resource", ...]: return self._metadata_file, self._operation_storage @ensure_disk_not_overutilize - def enqueue_operation(self, op: "Operation", *, wait: bool) -> None: ... + def enqueue_operation(self, op: "Operation", *, wait: bool) -> None: + LOGGER.debug(f"Processing operation {op}") + + api_operation = op.accept(self._api_operation_visitor) + + batch = RunOperation(self._project, self._custom_id, operation=api_operation) + _ = submit_operation.sync_detailed(client=self._client.auth_client, body=batch.to_proto()) def stop(self, seconds: Optional[float] = None) -> None: self.flush() diff --git a/src/neptune/internal/operation.py b/src/neptune/internal/operation.py index 074655a2d..d4e841ace 100644 --- a/src/neptune/internal/operation.py +++ b/src/neptune/internal/operation.py @@ -27,7 +27,10 @@ ) from neptune.core.components.operation_storage import OperationStorage -from neptune.exceptions import MalformedOperation +from neptune.exceptions import ( + MalformedOperation, + NeptuneUnsupportedFunctionalityException, +) from neptune.internal.container_type import ContainerType if TYPE_CHECKING: @@ -208,6 +211,7 @@ class LogStrings(LogOperation): values: List[ValueType] def accept(self, visitor: "OperationVisitor[Ret]") -> Ret: + raise NeptuneUnsupportedFunctionalityException() return visitor.visit_log_strings(self) def to_dict(self) -> dict: diff --git a/src/neptune/objects/neptune_object.py b/src/neptune/objects/neptune_object.py index dd7692b27..031cf639c 100644 --- a/src/neptune/objects/neptune_object.py +++ b/src/neptune/objects/neptune_object.py @@ -88,10 +88,7 @@ from neptune.internal.value_to_attribute_visitor import ValueToAttributeVisitor from neptune.internal.warnings import warn_about_unsupported_type from neptune.objects.mode import Mode -from neptune.objects.utils import ( - ensure_not_stopped, - temporarily_disabled, -) +from neptune.objects.utils import ensure_not_stopped from neptune.objects.with_backend import WithBackend from neptune.types.type_casting import cast_value from neptune.utils import stop_synchronization_callback @@ -111,7 +108,7 @@ def __init__( custom_id: Optional[str] = None, project: Optional[str] = None, api_token: Optional[str] = None, - mode: Mode = Mode.ASYNC, + mode: Mode = Mode.SYNC, flush_period: float = DEFAULT_FLUSH_PERIOD, proxies: Optional[dict] = None, async_lag_callback: Optional[NeptuneObjectCallback] = None, @@ -157,6 +154,8 @@ def __init__( self._op_processor: OperationProcessor = get_operation_processor( mode=mode, + project=self._project_qualified_name, + client=self._client, custom_id=CustomId(self._custom_id), container_type=self.container_type, lock=self._lock, @@ -192,7 +191,6 @@ def __init__( On Linux it looks like it does not help much but does not break anything either. """ - @temporarily_disabled def _async_create_run(self): """placeholder for async run creation""" operation = RunCreation(created_at=datetime.datetime.now(), custom_id=self._custom_id) diff --git a/src/neptune/objects/run.py b/src/neptune/objects/run.py index aeaaf56d6..ebab663ac 100644 --- a/src/neptune/objects/run.py +++ b/src/neptune/objects/run.py @@ -77,8 +77,8 @@ from neptune.objects.neptune_object import ( NeptuneObject, NeptuneObjectCallback, - temporarily_disabled, ) +from neptune.objects.utils import temporarily_disabled from neptune.types import StringSeries if TYPE_CHECKING: @@ -330,7 +330,8 @@ def __init__( ) # for backward compatibility imports - mode = Mode(mode or os.getenv(CONNECTION_MODE) or Mode.ASYNC.value) + # TODO: Get back to ASYNC + mode = Mode(mode or os.getenv(CONNECTION_MODE) or Mode.SYNC.value) self._stdout_path: str = "{}/stdout".format(self._monitoring_namespace) self._capture_stdout: bool = capture_stdout diff --git a/src/neptune/objects/with_backend.py b/src/neptune/objects/with_backend.py index 61739c4ad..19b200153 100644 --- a/src/neptune/objects/with_backend.py +++ b/src/neptune/objects/with_backend.py @@ -16,6 +16,7 @@ __all__ = ["WithBackend"] import abc +import os from contextlib import AbstractContextManager from types import TracebackType from typing import ( @@ -23,11 +24,18 @@ Type, ) +from neptune_api.credentials import Credentials + +from neptune import ANONYMOUS_API_TOKEN +from neptune.exceptions import NeptuneMissingApiTokenException from neptune.internal.backends.api_model import Project from neptune.internal.backends.factory import get_backend +from neptune.internal.backends.hosted_neptune_backend_v2 import HostedNeptuneBackendV2 from neptune.internal.backends.neptune_backend import NeptuneBackend from neptune.internal.backends.project_name_lookup import project_name_lookup +from neptune.internal.constants import ANONYMOUS_API_TOKEN_CONTENT from neptune.internal.container_type import ContainerType +from neptune.internal.envs import API_TOKEN_ENV_NAME from neptune.internal.id_formats import ( QualifiedName, UniqueId, @@ -54,6 +62,18 @@ def __init__( self._mode = mode self._backend: NeptuneBackend = get_backend(mode=mode, api_token=api_token, proxies=proxies) + + if api_token is None: + api_token = os.getenv(API_TOKEN_ENV_NAME) + + if api_token == ANONYMOUS_API_TOKEN: + api_token = ANONYMOUS_API_TOKEN_CONTENT + + if api_token is None: + raise NeptuneMissingApiTokenException() + + self._client = HostedNeptuneBackendV2(Credentials.from_api_key(api_token)) if mode == Mode.SYNC else None + self._project_qualified_name: Optional[str] = conform_optional(project, QualifiedName) self._project_api_object: Project = project_name_lookup( backend=self._backend, diff --git a/tests/unit/neptune/new/attributes/series/test_float_series.py b/tests/unit/neptune/new/attributes/series/test_float_series.py index a53105220..59a78cb39 100644 --- a/tests/unit/neptune/new/attributes/series/test_float_series.py +++ b/tests/unit/neptune/new/attributes/series/test_float_series.py @@ -20,7 +20,6 @@ ) from neptune.attributes.series.float_series import FloatSeries -from neptune.exceptions import NeptuneUnsupportedFunctionalityException from neptune.internal.warnings import NeptuneUnsupportedValue from tests.unit.neptune.new.attributes.test_attribute_base import TestAttributeBase @@ -39,7 +38,7 @@ def test_log_type_error(self): with self.assertRaises(Exception): FloatSeries(MagicMock(), MagicMock()).log(value) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip("Backend not implemented") def test_get(self): with self._exp() as exp: var = FloatSeries(exp, self._random_path()) @@ -47,7 +46,7 @@ def test_get(self): var.log(34) self.assertEqual(34, var.fetch_last()) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip("Backend not implemented") def test_log(self): with self._exp() as exp: var = FloatSeries(exp, self._random_path()) @@ -57,7 +56,7 @@ def test_log(self): expected = list(range(0, 5000)) self.assertEqual(len(set(expected)), len(set(values))) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip("Backend not implemented") def test_float_warnings(self): with self._exp() as run: with pytest.warns(NeptuneUnsupportedValue): diff --git a/tests/unit/neptune/new/attributes/sets/test_string_set.py b/tests/unit/neptune/new/attributes/sets/test_string_set.py index 761fd1d40..bee88db8e 100644 --- a/tests/unit/neptune/new/attributes/sets/test_string_set.py +++ b/tests/unit/neptune/new/attributes/sets/test_string_set.py @@ -146,7 +146,7 @@ def test_clear(self, get_operation_processor): var.clear(wait=wait) processor.enqueue_operation.assert_called_with(ClearStringSet(path), wait=wait) - @pytest.mark.xfail(reason="StringSet remove disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip("Backend not implemented") def test_get(self): with self._exp() as exp: var = StringSet(exp, self._random_path()) diff --git a/tests/unit/neptune/new/client/abstract_tables_test.py b/tests/unit/neptune/new/client/abstract_tables_test.py index c22492f8a..78e491b73 100644 --- a/tests/unit/neptune/new/client/abstract_tables_test.py +++ b/tests/unit/neptune/new/client/abstract_tables_test.py @@ -83,6 +83,7 @@ def build_fields_leaderboard(now: datetime): StringSetField(path="string/set", values={"a", "b"}), ] + @pytest.mark.skip("Backend not implemented") @patch.object(NeptuneBackendMock, "search_leaderboard_entries") def test_get_table_with_columns_filter(self, search_leaderboard_entries): # when @@ -93,6 +94,7 @@ def test_get_table_with_columns_filter(self, search_leaderboard_entries): parameters = search_leaderboard_entries.call_args[1] self.assertEqual({"sys/id", "sys/creation_time", "datetime"}, parameters.get("columns")) + @pytest.mark.skip("Backend not implemented") @patch.object(NeptuneBackendMock, "search_leaderboard_entries") def test_get_table_as_pandas(self, search_leaderboard_entries): # given @@ -116,6 +118,7 @@ def test_get_table_as_pandas(self, search_leaderboard_entries): self.assertEqual("last text", df["string/series"][1]) self.assertEqual({"a", "b"}, set(df["string/set"][1].split(","))) + @pytest.mark.skip("Backend not implemented") @patch.object(NeptuneBackendMock, "search_leaderboard_entries") def test_get_table_as_rows(self, search_leaderboard_entries): # given @@ -142,6 +145,7 @@ def test_get_table_as_rows(self, search_leaderboard_entries): self.assertEqual("last text", row.get_attribute_value("string/series")) self.assertEqual({"a", "b"}, row.get_attribute_value("string/set")) + @pytest.mark.skip("Backend not implemented") @patch.object(NeptuneBackendMock, "search_leaderboard_entries") def test_get_table_as_table_entries( self, @@ -168,6 +172,7 @@ def test_get_table_as_table_entries( self.assertEqual("last text", table_entry["string/series"].get()) self.assertEqual({"a", "b"}, table_entry["string/set"].get()) + @pytest.mark.skip("Backend not implemented") def test_table_limit(self): with pytest.raises(ValueError): self.get_table(limit=-4) diff --git a/tests/unit/neptune/new/client/test_run_tables.py b/tests/unit/neptune/new/client/test_run_tables.py index 2ec525584..30b9af739 100644 --- a/tests/unit/neptune/new/client/test_run_tables.py +++ b/tests/unit/neptune/new/client/test_run_tables.py @@ -18,6 +18,7 @@ from datetime import datetime from typing import List +import pytest from mock import patch from neptune import init_project @@ -43,6 +44,7 @@ def get_table(self, **kwargs) -> Table: def get_table_entries(self, table) -> List[TableEntry]: return table.to_rows() + @pytest.mark.skip("Backend not implemented") @patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) def test_fetch_runs_table_is_case_insensitive(self): states = ["active", "inactive", "Active", "Inactive", "aCTive", "INacTiVe"] @@ -53,6 +55,7 @@ def test_fetch_runs_table_is_case_insensitive(self): except ValueError as e: self.fail(e) + @pytest.mark.skip("Backend not implemented") @patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) def test_fetch_runs_table_raises_correct_exception_for_incorrect_states(self): for incorrect_state in ["idle", "running", "some_arbitrary_state"]: @@ -60,6 +63,7 @@ def test_fetch_runs_table_raises_correct_exception_for_incorrect_states(self): with self.assertRaises(ValueError): self.get_table(state=incorrect_state) + @pytest.mark.skip("Backend not implemented") @patch("neptune.internal.backends.factory.HostedNeptuneBackend", NeptuneBackendMock) def test_fetch_runs_table_raises_if_query_used_with_params(self): query = "some_query" diff --git a/tests/unit/neptune/new/core/operation_processors/test_sync_operation_processor.py b/tests/unit/neptune/new/core/operation_processors/test_sync_operation_processor.py index 1d2c0e64b..4899288d1 100644 --- a/tests/unit/neptune/new/core/operation_processors/test_sync_operation_processor.py +++ b/tests/unit/neptune/new/core/operation_processors/test_sync_operation_processor.py @@ -43,7 +43,9 @@ def test_setup(_, __, operation_storage_mock, mkdir_mock, random_choice_mock): op_storage = operation_storage_mock.return_value # and - processor = SyncOperationProcessor(custom_id=custom_id, container_type=container_type) + processor = SyncOperationProcessor( + custom_id=custom_id, client=None, container_type=container_type, project="test/project" + ) # then mkdir_mock.assert_called_once_with(parents=True, exist_ok=True) @@ -70,7 +72,9 @@ def test_flush(metadata_file_mock, operation_storage_mock): operation_storage = operation_storage_mock.return_value # and - processor = SyncOperationProcessor(custom_id=custom_id, container_type=container_type) + processor = SyncOperationProcessor( + custom_id=custom_id, client=None, container_type=container_type, project="test/project" + ) # and processor.start() @@ -95,7 +99,9 @@ def test_close(metadata_file_mock, operation_storage_mock): operation_storage = operation_storage_mock.return_value # and - processor = SyncOperationProcessor(custom_id=custom_id, container_type=container_type) + processor = SyncOperationProcessor( + custom_id=custom_id, client=None, container_type=container_type, project="test/project" + ) # and processor.start() @@ -121,7 +127,9 @@ def test_stop(metadata_file_mock, operation_storage_mock, rmdir_mock): operation_storage = operation_storage_mock.return_value # and - processor = SyncOperationProcessor(custom_id=custom_id, container_type=container_type) + processor = SyncOperationProcessor( + custom_id=custom_id, client=None, container_type=container_type, project="test/project" + ) # and processor.start() @@ -154,7 +162,9 @@ def test_cleanup_oserror_not_raising_toplevel_exception(rmdir_mock): container_type = ContainerType.RUN # and - processor = SyncOperationProcessor(custom_id=custom_id, container_type=container_type) + processor = SyncOperationProcessor( + custom_id=custom_id, client=None, container_type=container_type, project="test/project" + ) # when processor.cleanup() # no exception raised @@ -171,7 +181,7 @@ def test_metadata(metadata_file_mock, _): container_type = ContainerType.RUN # when - SyncOperationProcessor(custom_id, container_type=container_type) + SyncOperationProcessor(custom_id=custom_id, client=None, container_type=container_type, project="test/project") # then metadata = metadata_file_mock.call_args_list[0][1]["metadata"] diff --git a/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py b/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py index bd516c356..6755995af 100644 --- a/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py +++ b/tests/unit/neptune/new/internal/backends/test_neptune_backend_mock.py @@ -21,6 +21,7 @@ from pathlib import Path from time import time +import pytest from freezegun import freeze_time from neptune.api.models import ( @@ -57,6 +58,7 @@ def a_string() -> str: return "".join(random.sample(char_set * 10, 10)) +@pytest.mark.skip(reason="Backend not implemented") class TestNeptuneBackendMock(unittest.TestCase): def setUp(self) -> None: self.backend = NeptuneBackendMock() diff --git a/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py b/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py index 6bcea4995..5c32a82f4 100644 --- a/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py +++ b/tests/unit/neptune/new/internal/backends/test_operations_preprocessor.py @@ -13,7 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from neptune.exceptions import MetadataInconsistency +import pytest + +from neptune.exceptions import ( + MetadataInconsistency, + NeptuneUnsupportedFunctionalityException, +) from neptune.internal.backends.operations_preprocessor import OperationsPreprocessor from neptune.internal.operation import ( AddStrings, @@ -116,6 +121,7 @@ def test_assign(self): ) self.assertEqual(processor.processed_ops_count, len(operations)) + @pytest.mark.xfail(reason="cli commands are disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) def test_series(self): # given processor = OperationsPreprocessor() diff --git a/tests/unit/neptune/new/internal/test_streams.py b/tests/unit/neptune/new/internal/test_streams.py index 883a299e8..aa688f1da 100644 --- a/tests/unit/neptune/new/internal/test_streams.py +++ b/tests/unit/neptune/new/internal/test_streams.py @@ -20,6 +20,8 @@ from io import StringIO from unittest.mock import MagicMock +import pytest + from neptune.internal.streams.std_stream_capture_logger import ( StdoutCaptureLogger, StdStreamCaptureLogger, @@ -61,6 +63,7 @@ def test_does_not_report_if_used_after_stop(self): stdout.seek(0) self.assertEqual(stdout.read(), "testing\n") + @pytest.mark.skip(reason="Backend not implemented") def test_logger_with_lock_does_not_cause_deadlock(self): stream = StringIO() mock_run = MagicMock() diff --git a/tests/unit/neptune/new/test_handler.py b/tests/unit/neptune/new/test_handler.py index 2fdaf1897..d031c2f77 100644 --- a/tests/unit/neptune/new/test_handler.py +++ b/tests/unit/neptune/new/test_handler.py @@ -40,10 +40,7 @@ API_TOKEN_ENV_NAME, PROJECT_ENV_NAME, ) -from neptune.exceptions import ( - NeptuneUnsupportedFunctionalityException, - NeptuneUserApiInputException, -) +from neptune.exceptions import NeptuneUserApiInputException from neptune.internal.warnings import ( NeptuneUnsupportedType, warned_once, @@ -79,7 +76,6 @@ def assert_logged_warning(capsys: pytest.CaptureFixture, msg: str = ""): assert msg in captured.out -@pytest.mark.skip(reason="Backend not implemented") @patch.object( NeptuneObject, "_async_create_run", @@ -91,6 +87,7 @@ def setUpClass(cls) -> None: os.environ[PROJECT_ENV_NAME] = "organization/project" os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN + @pytest.mark.skip(reason="Backend not implemented") def test_assign_operator(self): with init_run(mode="debug", flush_period=0.5) as exp: now = datetime.now() @@ -107,6 +104,7 @@ def test_assign_operator(self): assert isinstance(exp.get_structure()["some"]["str"]["val"], String) assert isinstance(exp.get_structure()["some"]["datetime"]["val"], Datetime) + @pytest.mark.skip(reason="Backend not implemented") def test_assign(self): with init_run(mode="debug", flush_period=0.5) as exp: now = datetime.now() @@ -137,6 +135,7 @@ def test_assign(self): assert isinstance(exp.get_structure()["some"]["str"]["val"], String) assert isinstance(exp.get_structure()["some"]["datetime"]["val"], Datetime) + @pytest.mark.skip(reason="Backend not implemented") def test_lookup(self): with init_run(mode="debug", flush_period=0.5) as exp: ns = exp["some/ns"] @@ -149,6 +148,7 @@ def test_lookup(self): exp.wait() assert ns["some/value"].fetch() == 3 + @pytest.mark.skip(reason="Backend not implemented") def test_stringify_path(self): with init_run(mode="debug", flush_period=0.5) as exp: exp[None] = 5 @@ -185,7 +185,7 @@ def setUpClass(cls) -> None: os.environ[PROJECT_ENV_NAME] = "organization/project" os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_assign_series(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].assign(FloatSeriesVal([1, 2, 0, 10])) @@ -198,7 +198,7 @@ def test_assign_series(self): assert exp["some"]["num"]["val"].fetch_last() == 5 assert exp["some"]["str"]["val"].fetch_last() == "other 3" - @pytest.mark.xfail(reason="Fetch last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_log(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].log(5, step=1) @@ -206,14 +206,14 @@ def test_log(self): assert exp["some"]["num"]["val"].fetch_last() == 5 assert exp["some"]["str"]["val"].fetch_last() == "some text" - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_log_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: dict_value = str({"key-a": "value-a", "key-b": "value-b"}) exp["some/num/val"].log(dict_value, step=1) assert exp["some"]["num"]["val"].fetch_last() == str(dict_value) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_append(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].append(5, step=1) @@ -221,7 +221,7 @@ def test_append(self): assert exp["some"]["num"]["val"].fetch_last() == 5 assert exp["some"]["str"]["val"].fetch_last() == "some text" - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_append_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: dict_value = {"key-a": "value-a", "key-b": "value-b"} @@ -229,7 +229,7 @@ def test_append_dict(self): assert exp["some"]["num"]["val"]["key-a"].fetch_last() == "value-a" assert exp["some"]["num"]["val"]["key-b"].fetch_last() == "value-b" - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_append_complex_input(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["train/dictOfDicts"].append( @@ -244,7 +244,7 @@ def test_append_complex_input(self): assert exp["train"]["dictOfDicts"]["key-b"]["ba"].fetch_last() == 33 assert exp["train"]["dictOfDicts"]["key-b"]["bb"].fetch_last() == 44 - @pytest.mark.xfail(reason="Doesn't work with step enforcement", strict=True, raises=TypeError) + @pytest.mark.skip(reason="Backend not implemented") def test_log_many_values(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].log([5, 10, 15]) @@ -275,7 +275,7 @@ def test_append_many_values_cause_error(self): with assert_unsupported_warning(): exp["some/list-custom-obj/val"].append([Obj(), Obj()], step=1) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_extend(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].extend([5, 7], steps=[0, 1]) @@ -283,11 +283,7 @@ def test_extend(self): assert exp["some"]["num"]["val"].fetch_last() == 7 assert exp["some"]["str"]["val"].fetch_last() == "text" - @pytest.mark.xfail( - reason="steps must be passed, but it doesn't work with dict", - strict=True, - raises=TypeError, - ) + @pytest.mark.skip(reason="Backend not implemented") def test_extend_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: dict_value = {"key-a": ["value-a", "value-aa"], "key-b": ["value-b", "value-bb"], "key-c": ["ccc"]} @@ -296,7 +292,7 @@ def test_extend_dict(self): assert exp["some"]["num"]["val"]["key-b"].fetch_last() == "value-bb" assert exp["some"]["num"]["val"]["key-c"].fetch_last() == "ccc" - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_extend_nested(self): """We expect that we are able to log arbitrary tre structure""" with init_run(mode="debug", flush_period=0.5) as exp: @@ -340,7 +336,7 @@ def test_extend_nested_with_wrong_parameters(self): values={"list1": [1, 2, 3], "list2": [10, 20, 30]}, timestamps=[time.time()] * 2, steps=[0, 1] ) - @pytest.mark.xfail(reason="Fetch last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_log_value_errors(self): with init_run(mode="debug", flush_period=0.5) as exp: with pytest.raises(ValueError): @@ -362,7 +358,6 @@ def test_log_value_errors(self): assert exp["some"]["str"]["val"].fetch_last() == "str" -@pytest.mark.skip(reason="Backend not implemented") @patch.object( NeptuneObject, "_async_create_run", @@ -374,7 +369,7 @@ def setUpClass(cls) -> None: os.environ[PROJECT_ENV_NAME] = "organization/project" os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_append_errors(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].append(5, step=1) @@ -389,10 +384,11 @@ def test_append_errors(self): def test_extend_value_errors(self): with init_run(mode="debug", flush_period=0.5) as exp: with pytest.raises(NeptuneUserApiInputException): - exp["x"].extend(10, step=10) + exp["x"].extend(10, steps=10) with pytest.raises(ValueError): - exp["x"].extend([5, "str"]) + exp["x"].extend([5, "str"], steps=[15, 20]) + @pytest.mark.skip(reason="Backend not implemented") def test_assign_set(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/str/val"].assign(StringSetVal(["tag1", "tag2"]), wait=True) @@ -403,6 +399,7 @@ def test_assign_set(self): assert exp["some/str/val"].fetch() == {"other_1", "other_2", "other_3"} assert isinstance(exp.get_structure()["some"]["str"]["val"], StringSet) + @pytest.mark.skip(reason="Backend not implemented") def test_add(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/str/val"].add(["some text", "something else"], wait=True) @@ -414,7 +411,6 @@ def test_add(self): assert isinstance(exp.get_structure()["some"]["str"]["val"], StringSet) -@pytest.mark.skip(reason="Backend not implemented") @patch.object( NeptuneObject, "_async_create_run", @@ -426,7 +422,7 @@ def setUpClass(cls) -> None: os.environ[PROJECT_ENV_NAME] = "organization/project" os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_assign_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["params"] = { @@ -441,12 +437,14 @@ def test_assign_dict(self): assert exp["params/toys"].fetch_last() == "hat" assert exp["params/nested/nested/deep_secret"].fetch_last() == 15 + @pytest.mark.skip(reason="Backend not implemented") def test_assign_empty_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["params"] = {} exp["params"] = {"foo": 5} assert exp["params/foo"].fetch() == 5 + @pytest.mark.skip(reason="Backend not implemented") def test_argparse_namespace(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["params"] = argparse.Namespace( @@ -457,6 +455,7 @@ def test_argparse_namespace(self): assert exp["params/nested/nested_attr"].fetch() == "[1, 2, 3]" assert exp["params/nested/num"].fetch() == 55 + @pytest.mark.skip(reason="Backend not implemented") def test_assign_namespace(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/namespace"].assign( @@ -481,6 +480,7 @@ def test_assign_namespace(self): with pytest.raises(TypeError): exp["some"].assign(NamespaceVal({"namespace/sub-namespace/val1": {"tagA", "tagB"}})) + @pytest.mark.skip(reason="Backend not implemented") def test_fetch_dict(self): now = datetime.now() @@ -508,6 +508,7 @@ def test_fetch_dict(self): }, } + @pytest.mark.skip(reason="Backend not implemented") def test_fetch_dict_with_path(self): now = datetime.now() @@ -522,6 +523,7 @@ def test_fetch_dict_with_path(self): params_dict = exp["params/sub-namespace"].fetch() assert params_dict == {"int": 42, "string": "Some text"} + @pytest.mark.skip(reason="Backend not implemented") def test_assign_drops_dict_entry_with_empty_key(self, capsys): with init_run(mode="debug", flush_period=0.5) as exp: with assert_logged_warning(capsys, '"" can\'t be used in Namespaces and dicts stored in Neptune'): @@ -549,7 +551,7 @@ def setUpClass(cls) -> None: os.environ[PROJECT_ENV_NAME] = "organization/project" os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN - @pytest.mark.xfail(reason="Field deletion disabled", raises=NeptuneUnsupportedFunctionalityException, strict=True) + @pytest.mark.skip(reason="Backend not implemented") def test_pop(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].assign(3, wait=True) @@ -558,7 +560,7 @@ def test_pop(self): ns.pop("num/val", wait=True) assert "some" not in exp.get_structure() - @pytest.mark.xfail(reason="Field deletion disabled", raises=NeptuneUnsupportedFunctionalityException, strict=True) + @pytest.mark.skip(reason="Backend not implemented") def test_pop_self(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["x"].assign(3, wait=True) @@ -566,7 +568,7 @@ def test_pop_self(self): exp["x"].pop(wait=True) assert "x" not in exp.get_structure() - @pytest.mark.xfail(reason="Field deletion disabled", raises=NeptuneUnsupportedFunctionalityException, strict=True) + @pytest.mark.skip(reason="Backend not implemented") def test_del(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/num/val"].assign(3) @@ -576,7 +578,6 @@ def test_del(self): assert "some" not in exp.get_structure() -@pytest.mark.skip(reason="Backend not implemented") @patch.object( NeptuneObject, "_async_create_run", @@ -588,6 +589,7 @@ def setUpClass(cls) -> None: os.environ[PROJECT_ENV_NAME] = "organization/project" os.environ[API_TOKEN_ENV_NAME] = ANONYMOUS_API_TOKEN + @pytest.mark.skip(reason="Backend not implemented") def test_assign_distinct_types(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["some/str/val"].assign(FloatVal(1.0), wait=True) @@ -602,7 +604,7 @@ def test_attribute_error(self): with pytest.raises(AttributeError): exp["var"].something() - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_float_like_types(self): with init_run(mode="debug", flush_period=0.5) as exp: exp.define("attr1", self.FloatLike(5)) @@ -626,7 +628,7 @@ def test_float_like_types(self): with pytest.raises(ValueError): exp["attr3"].log([4, "234a"]) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_append_float_like_types(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["attr"].append(self.FloatLike(34)) @@ -640,7 +642,7 @@ def test_append_float_like_types(self): exp["attr"].append(4) exp["attr"].append("234a") - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_extend_float_like_types(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["attr"].extend([self.FloatLike(34)]) @@ -650,7 +652,7 @@ def test_extend_float_like_types(self): with pytest.raises(ValueError): exp["attr"].extend([4, "234a"]) - @pytest.mark.xfail(reason="fetch_last disabled", strict=True, raises=NeptuneUnsupportedFunctionalityException) + @pytest.mark.skip(reason="Backend not implemented") def test_assign_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["params"] = { @@ -669,6 +671,7 @@ def test_assign_dict(self): assert exp["params/0/some_data"].fetch() == 345 assert exp["params/None/some_data"].fetch() == 345 + @pytest.mark.skip(reason="Backend not implemented") def test_convertable_to_dict(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["params"] = argparse.Namespace( @@ -679,6 +682,7 @@ def test_convertable_to_dict(self): assert exp["params/nested/nested_attr"].fetch() == "[1, 2, 3]" assert exp["params/nested/num"].fetch() == 55 + @pytest.mark.skip(reason="Backend not implemented") def test_representation(self): with init_run(mode="debug", flush_period=0.5) as exp: exp["params/int"] = 1 diff --git a/tests/unit/neptune/new/test_logging.py b/tests/unit/neptune/new/test_logging.py index 2be265600..f74469063 100644 --- a/tests/unit/neptune/new/test_logging.py +++ b/tests/unit/neptune/new/test_logging.py @@ -55,6 +55,7 @@ def test_internal_logger_loglevels(self, capsys: pytest.CaptureFixture, log_leve with assert_out(capsys, _log("CRITICAL")): logger.critical("message") + @pytest.mark.skip(reason="Backend not implemented") def test_user_can_set_logging_levels(self, capsys, log_level_teardown): # given logger = logging.getLogger(NEPTUNE_LOGGER_NAME)