Skip to content

Commit

Permalink
Merge pull request #802 from Aiven-Open/aiven-anton/demote-exception-…
Browse files Browse the repository at this point in the history
…logs

Demote various exception logs to warning and info
  • Loading branch information
tvainika authored Jan 26, 2024
2 parents 87c8a81 + 0ad4536 commit 4023176
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
14 changes: 7 additions & 7 deletions karapace/kafka_rest_apis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ async def get_user_proxy(self, request: HTTPRequest) -> "UserRestProxy":
if self.proxies.get(key) is None:
self.proxies[key] = UserRestProxy(self.config, self.kafka_timeout, self.serializer)
except (NoBrokersAvailable, AuthenticationFailedError):
log.exception("Failed to connect to Kafka with the credentials")
log.warning("Failed to connect to Kafka with the credentials")
self.r(body={"message": "Forbidden"}, content_type=JSON_CONTENT_TYPE, status=HTTPStatus.FORBIDDEN)
proxy = self.proxies[key]
proxy.mark_used()
Expand Down Expand Up @@ -497,9 +497,9 @@ async def _maybe_create_async_producer(self) -> AsyncKafkaProducer:
except (NoBrokersAvailable, AuthenticationFailedError):
await producer.stop()
if retry:
log.exception("Unable to connect to the bootstrap servers, retrying")
log.warning("Unable to connect to the bootstrap servers, retrying")
else:
log.exception("Giving up after trying to connect to the bootstrap servers")
log.warning("Giving up after trying to connect to the bootstrap servers")
raise
await asyncio.sleep(1)
except Exception:
Expand Down Expand Up @@ -626,7 +626,7 @@ async def cluster_metadata(self, topics: Optional[List[str]] = None) -> dict:
self._cluster_metadata = metadata
self._cluster_metadata_complete = topics is None
except KafkaException:
log.exception("Could not refresh cluster metadata")
log.warning("Could not refresh cluster metadata")
KafkaRest.r(
body={
"message": "Kafka node not ready",
Expand All @@ -653,9 +653,9 @@ def init_admin_client(self):
break
except: # pylint: disable=bare-except
if retry:
log.exception("Unable to start admin client, retrying")
log.warning("Unable to start admin client, retrying")
else:
log.exception("Giving up after failing to start admin client")
log.warning("Giving up after failing to start admin client")
raise
time.sleep(1)

Expand Down Expand Up @@ -857,7 +857,7 @@ async def validate_schema_info(
try:
data[f"{subject_type}_schema_id"] = await self.get_schema_id(data, topic, subject_type, schema_type)
except InvalidPayload:
log.exception("Unable to retrieve schema id")
log.warning("Unable to retrieve schema id")
KafkaRest.r(
body={
"error_code": RESTErrorCodes.HTTP_BAD_REQUEST.value,
Expand Down
4 changes: 2 additions & 2 deletions karapace/kafka_rest_apis/consumer_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,9 +228,9 @@ async def create_kafka_consumer(self, fetch_min_bytes, group_name, internal_name
return c
except: # pylint: disable=bare-except
if retry:
LOG.exception("Unable to create consumer, retrying")
LOG.warning("Unable to create consumer, retrying")
else:
LOG.exception("Giving up after failing to create consumer")
LOG.warning("Giving up after failing to create consumer")
raise
await asyncio.sleep(1)

Expand Down
4 changes: 2 additions & 2 deletions karapace/schema_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def normalize_schema_str(
try:
schema_str = json_encode(json_decode(schema_str), compact=True, sort_keys=True)
except JSONDecodeError as e:
LOG.error("Schema is not valid JSON")
LOG.info("Schema is not valid JSON")
raise e
elif schema_type == SchemaType.PROTOBUF:
if schema:
Expand All @@ -138,7 +138,7 @@ def normalize_schema_str(
try:
schema_str = str(parse_protobuf_schema_definition(schema_str, None, None, False))
except InvalidSchema as e:
LOG.exception("Schema is not valid ProtoBuf definition")
LOG.info("Schema is not valid ProtoBuf definition")
raise e

else:
Expand Down
8 changes: 4 additions & 4 deletions karapace/schema_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def run(self) -> None:
LOG.warning("[Admin Client] No Brokers available yet. Retrying")
self._stop_schema_reader.wait(timeout=KAFKA_CLIENT_CREATION_TIMEOUT_SECONDS)
except KafkaConfigurationError:
LOG.exception("[Admin Client] Invalid configuration. Bailing")
LOG.info("[Admin Client] Invalid configuration. Bailing")
raise
except Exception as e: # pylint: disable=broad-except
LOG.exception("[Admin Client] Unexpected exception. Retrying")
Expand All @@ -182,7 +182,7 @@ def run(self) -> None:
LOG.warning("[Consumer] No Brokers available yet. Retrying")
self._stop_schema_reader.wait(timeout=2.0)
except KafkaConfigurationError:
LOG.exception("[Consumer] Invalid configuration. Bailing")
LOG.info("[Consumer] Invalid configuration. Bailing")
raise
except Exception as e: # pylint: disable=broad-except
LOG.exception("[Consumer] Unexpected exception. Retrying")
Expand Down Expand Up @@ -239,7 +239,7 @@ def _get_beginning_offset(self) -> int:
# * See `OFFSET_EMPTY` and `OFFSET_UNINITIALIZED`
return beginning_offset - 1
except KafkaTimeoutError:
LOG.exception("Reading begin offsets timed out.")
LOG.warning("Reading begin offsets timed out.")
except Exception as e: # pylint: disable=broad-except
self.stats.unexpected_exception(ex=e, where="_get_beginning_offset")
LOG.exception("Unexpected exception when reading begin offsets.")
Expand All @@ -254,7 +254,7 @@ def _is_ready(self) -> bool:
try:
_, end_offset = self.consumer.get_watermark_offsets(TopicPartition(self.config["topic_name"], 0))
except KafkaTimeoutError:
LOG.exception("Reading end offsets timed out.")
LOG.warning("Reading end offsets timed out.")
return False
except Exception as e: # pylint: disable=broad-except
self.stats.unexpected_exception(ex=e, where="_is_ready")
Expand Down

0 comments on commit 4023176

Please sign in to comment.