Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change raise e from e calls to be just raise and remove unneeded ones #2225

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pymilvus/bulk_writer/local_bulk_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def _flush(self, call_back: Optional[Callable] = None):
call_back(file_list)
except Exception as e:
logger.error(f"Failed to fulsh, error: {e}")
raise e from e
raise
finally:
del self._working_thread[threading.current_thread().name]
logger.info(f"Flush thread finished, name: {threading.current_thread().name}")
Expand Down
12 changes: 5 additions & 7 deletions pymilvus/client/grpc_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,6 @@ def _wait_for_channel_ready(self, timeout: Union[float] = 10):
code=Status.CONNECT_FAILED,
message=f"Fail connecting to server on {self._address}, illegal connection params or server unavailable",
) from e
except Exception as e:
raise e from e

def close(self):
self.deregister_state_change_callbacks()
Expand Down Expand Up @@ -579,7 +577,7 @@ def batch_insert(
except Exception as err:
if kwargs.get("_async", False):
return MutationFuture(None, None, err)
raise err from err
raise
else:
return m

Expand Down Expand Up @@ -616,7 +614,7 @@ def delete(
except Exception as err:
if kwargs.get("_async", False):
return MutationFuture(None, None, err)
raise err from err
raise
else:
return m

Expand Down Expand Up @@ -676,7 +674,7 @@ def upsert(
except Exception as err:
if kwargs.get("_async", False):
return MutationFuture(None, None, err)
raise err from err
raise
else:
return m

Expand Down Expand Up @@ -738,7 +736,7 @@ def _execute_search(
except Exception as e:
if kwargs.get("_async", False):
return SearchFuture(None, None, e)
raise e from e
raise

def _execute_hybrid_search(
self, request: milvus_types.HybridSearchRequest, timeout: Optional[float] = None, **kwargs
Expand All @@ -757,7 +755,7 @@ def _execute_hybrid_search(
except Exception as e:
if kwargs.get("_async", False):
return SearchFuture(None, None, e)
raise e from e
raise

@retry_on_rpc_failure()
def search(
Expand Down
20 changes: 7 additions & 13 deletions pymilvus/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def timeout(start_time: Optional[float] = None) -> bool:
except grpc.RpcError as e:
# Do not retry on these codes
if e.code() in IGNORE_RETRY_CODES:
raise e from e
raise
if timeout(start_time):
raise MilvusException(e.code, f"{to_msg}, message={e.details()}") from e

Expand All @@ -113,9 +113,7 @@ def timeout(start_time: Optional[float] = None) -> bool:
time.sleep(back_off)
back_off = min(back_off * back_off_multiplier, max_back_off)
else:
raise e from e
except Exception as e:
raise e from e
raise
finally:
counter += 1

Expand All @@ -138,21 +136,21 @@ def handler(*args, **kwargs):
except MilvusException as e:
record_dict["RPC error"] = str(datetime.datetime.now())
LOGGER.error(f"RPC error: [{inner_name}], {e}, <Time:{record_dict}>")
raise e from e
raise
except grpc.FutureTimeoutError as e:
record_dict["gRPC timeout"] = str(datetime.datetime.now())
LOGGER.error(
f"grpc Timeout: [{inner_name}], <{e.__class__.__name__}: "
f"{e.code()}, {e.details()}>, <Time:{record_dict}>"
)
raise e from e
raise
except grpc.RpcError as e:
record_dict["gRPC error"] = str(datetime.datetime.now())
LOGGER.error(
f"grpc RpcError: [{inner_name}], <{e.__class__.__name__}: "
f"{e.code()}, {e.details()}>, <Time:{record_dict}>"
)
raise e from e
raise
except Exception as e:
record_dict["Exception"] = str(datetime.datetime.now())
LOGGER.error(f"Unexpected error: [{inner_name}], {e}, <Time: {record_dict}>")
Expand Down Expand Up @@ -190,9 +188,7 @@ def handler(*args, **kwargs):
if e.code() == grpc.StatusCode.UNIMPLEMENTED:
LOGGER.debug(f"{func.__name__} unimplemented, ignore it")
return default_return_value
raise e from e
except Exception as e:
raise e from e
raise

return handler

Expand All @@ -211,8 +207,6 @@ def handler(*args, **kwargs):
"please downgrade your sdk or upgrade your server"
)
raise MilvusException(message=msg) from e
raise e from e
except Exception as e:
raise e from e
raise

return handler
65 changes: 23 additions & 42 deletions pymilvus/milvus_client/milvus_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def _fast_create_collection(
logger.debug("Successfully created collection: %s", collection_name)
except Exception as ex:
logger.error("Failed to create collection: %s", collection_name)
raise ex from ex
raise

index_params = IndexParams()
index_params.add_index(vector_field_name, "", "", metric_type=metric_type)
Expand Down Expand Up @@ -173,7 +173,7 @@ def _create_index(
logger.debug("Successfully created an index on collection: %s", collection_name)
except Exception as ex:
logger.error("Failed to create an index on collection: %s", collection_name)
raise ex from ex
raise

def insert(
self,
Expand Down Expand Up @@ -216,12 +216,9 @@ def insert(

conn = self._get_connection()
# Insert into the collection.
try:
res = conn.insert_rows(
collection_name, data, partition_name=partition_name, timeout=timeout
)
except Exception as ex:
raise ex from ex
res = conn.insert_rows(
collection_name, data, partition_name=partition_name, timeout=timeout
)
return OmitZeroDict(
{
"insert_count": res.insert_count,
Expand Down Expand Up @@ -268,13 +265,9 @@ def upsert(

conn = self._get_connection()
# Upsert into the collection.
try:
res = conn.upsert_rows(
collection_name, data, partition_name=partition_name, timeout=timeout, **kwargs
)
except Exception as ex:
raise ex from ex

res = conn.upsert_rows(
collection_name, data, partition_name=partition_name, timeout=timeout, **kwargs
)
return OmitZeroDict(
{
"upsert_count": res.upsert_count,
Expand Down Expand Up @@ -333,7 +326,7 @@ def search(
)
except Exception as ex:
logger.error("Failed to search collection: %s", collection_name)
raise ex from ex
raise

ret = []
for hits in res:
Expand Down Expand Up @@ -384,7 +377,7 @@ def query(
schema_dict = conn.describe_collection(collection_name, timeout=timeout, **kwargs)
except Exception as ex:
logger.error("Failed to describe collection: %s", collection_name)
raise ex from ex
raise

if ids:
filter = self._pack_pks_expr(schema_dict, ids)
Expand All @@ -406,7 +399,7 @@ def query(
)
except Exception as ex:
logger.error("Failed to query collection: %s", collection_name)
raise ex from ex
raise

return res

Expand Down Expand Up @@ -446,7 +439,7 @@ def get(
schema_dict = conn.describe_collection(collection_name, timeout=timeout, **kwargs)
except Exception as ex:
logger.error("Failed to describe collection: %s", collection_name)
raise ex from ex
raise

if not output_fields:
output_fields = ["*"]
Expand All @@ -466,7 +459,7 @@ def get(
)
except Exception as ex:
logger.error("Failed to get collection: %s", collection_name)
raise ex from ex
raise

return res

Expand Down Expand Up @@ -528,7 +521,7 @@ def delete(
schema_dict = conn.describe_collection(collection_name, timeout=timeout, **kwargs)
except Exception as ex:
logger.error("Failed to describe collection: %s", collection_name)
raise ex from ex
raise

expr = self._pack_pks_expr(schema_dict, pks)

Expand All @@ -555,7 +548,7 @@ def delete(
ret_pks.extend(res.primary_keys)
except Exception as ex:
logger.error("Failed to delete primary keys in collection: %s", collection_name)
raise ex from ex
raise

if ret_pks:
return ret_pks
Expand Down Expand Up @@ -625,7 +618,7 @@ def _create_collection_with_schema(
logger.debug("Successfully created collection: %s", collection_name)
except Exception as ex:
logger.error("Failed to create collection: %s", collection_name)
raise ex from ex
raise

if index_params:
self.create_index(collection_name, index_params, timeout=timeout)
Expand Down Expand Up @@ -653,7 +646,7 @@ def _create_connection(
connections.connect(using, user, password, db_name, token, uri=uri, **kwargs)
except Exception as ex:
logger.error("Failed to create new connection using: %s", using)
raise ex from ex
raise
else:
logger.debug("Created new connection using: %s", using)
return using
Expand Down Expand Up @@ -700,15 +693,15 @@ def load_collection(self, collection_name: str, timeout: Optional[float] = None,
conn.load_collection(collection_name, timeout=timeout, **kwargs)
except MilvusException as ex:
logger.error("Failed to load collection: %s", collection_name)
raise ex from ex
raise

def release_collection(self, collection_name: str, timeout: Optional[float] = None, **kwargs):
conn = self._get_connection()
try:
conn.release_collection(collection_name, timeout=timeout, **kwargs)
except MilvusException as ex:
logger.error("Failed to load collection: %s", collection_name)
raise ex from ex
raise

def get_load_state(
self,
Expand All @@ -721,10 +714,7 @@ def get_load_state(
partition_names = None
if partition_name:
partition_names = [partition_name]
try:
state = conn.get_load_state(collection_name, partition_names, timeout=timeout, **kwargs)
except Exception as ex:
raise ex from ex
state = conn.get_load_state(collection_name, partition_names, timeout=timeout, **kwargs)

ret = {"state": state}
if state == LoadState.Loading:
Expand Down Expand Up @@ -865,10 +855,7 @@ def list_users(self, timeout: Optional[float] = None, **kwargs):

def describe_user(self, user_name: str, timeout: Optional[float] = None, **kwargs):
conn = self._get_connection()
try:
res = conn.select_one_user(user_name, True, timeout=timeout, **kwargs)
except Exception as ex:
raise ex from ex
res = conn.select_one_user(user_name, True, timeout=timeout, **kwargs)
if res.groups:
item = res.groups[0]
return {"user_name": user_name, "roles": item.roles}
Expand Down Expand Up @@ -897,21 +884,15 @@ def describe_role(
) -> List[Dict]:
conn = self._get_connection()
db_name = kwargs.pop("db_name", "")
try:
res = conn.select_grant_for_one_role(role_name, db_name, timeout=timeout, **kwargs)
except Exception as ex:
raise ex from ex
res = conn.select_grant_for_one_role(role_name, db_name, timeout=timeout, **kwargs)
ret = {}
ret["role"] = role_name
ret["privileges"] = [dict(i) for i in res.groups]
return ret

def list_roles(self, timeout: Optional[float] = None, **kwargs):
conn = self._get_connection()
try:
res = conn.select_all_role(False, timeout=timeout, **kwargs)
except Exception as ex:
raise ex from ex
res = conn.select_all_role(False, timeout=timeout, **kwargs)

groups = res.groups
return [g.role_name for g in groups]
Expand Down