diff --git a/alephclient/api.py b/alephclient/api.py index 5ed681d..d8ec446 100644 --- a/alephclient/api.py +++ b/alephclient/api.py @@ -337,10 +337,15 @@ def _bulk_chunk( entityset_id: Optional[str] = None, force: bool = False, unsafe: bool = False, + cleaned: bool = False ): for attempt in count(1): url = self._make_url(f"collections/{collection_id}/_bulk") - params = {"unsafe": unsafe, "entityset_id": entityset_id} + params = {"entityset_id": entityset_id} + if unsafe: + params['safe'] = not unsafe + if cleaned: + params['clean'] = not cleaned try: response = self.session.post(url, json=chunk, params=params) response.raise_for_status() diff --git a/alephclient/cli.py b/alephclient/cli.py index 1d373c8..6868a54 100644 --- a/alephclient/cli.py +++ b/alephclient/cli.py @@ -254,7 +254,10 @@ def read_json_stream(stream): "--force", is_flag=True, default=False, help="continue after server errors" ) @click.option( - "--unsafe", is_flag=True, default=False, help="disable server-side validation" + "--unsafe", is_flag=True, default=False, help="allow references to archive hashes" +) +@click.option( + "--cleaned", is_flag=True, default=False, help="disable server-side validation for all types" ) @click.pass_context def write_entities( @@ -265,6 +268,7 @@ def write_entities( chunksize=1000, force=False, unsafe=False, + cleaned=False ): """Read entities from standard input and index them.""" api = ctx.obj["api"] @@ -288,6 +292,7 @@ def read_json_stream(stream): chunk_size=chunksize, unsafe=unsafe, force=force, + cleaned=cleaned, entityset_id=entityset_id, ) except AlephException as exc: