Skip to content

Commit

Permalink
feat: enable opentelemetry grpc instrumentation
Browse files Browse the repository at this point in the history
  • Loading branch information
fdupont committed Feb 7, 2024
1 parent 1f8e804 commit 28debf2
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 0 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ The following models can be served by pythie-serving:
* `--port`: Port number to listen to.
#### Environment variables
* `OPENTELEMETRY_COLLECTOR_HOST`: OpenTelemetry Collector receiver endpoint. If not defined OpenTelemtetry will not be activated. See https://opentelemetry.io/docs/what-is-opentelemetry for more details.
For a treelite served model:
* `TREELITE_NTHREAD`: Number of threads to use to compute predictions
* `TREELINTE_BIND_THREADS`: Set to `0` to deactivate thread pinning. See https://treelite.readthedocs.io/en/latest/treelite-runtime-api.html
Expand Down
4 changes: 4 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
"treelite_runtime~=2.2.2",
"scikit-learn~=1.2.0",
"cloudpickle~=2.1.0",
"opentelemetry-instrumentation-grpc~=0.38b0",
"opentelemetry-api>=1.17.0, <2.0",
"opentelemetry-sdk>=1.17.0, <2.0",
"opentelemetry-exporter-otlp>=1.17.0, <2.0",
]
extras_require_test = [
*extras_require_serving,
Expand Down
16 changes: 16 additions & 0 deletions src/pythie_serving/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,27 @@
from logging.config import dictConfig

from google.protobuf import text_format
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.grpc import GrpcInstrumentorServer
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor

from pythie_serving import create_grpc_server
from pythie_serving.tensorflow_proto.tensorflow_serving.config import (
model_server_config_pb2,
)


def initialize_opentelemetry():
otel_collector_host = str(os.environ.get("OPENTELEMETRY_COLLECTOR_HOST"))
if otel_collector_host is not None:
trace.set_tracer_provider(TracerProvider())
otlp_exporter = OTLPSpanExporter(endpoint=otel_collector_host, insecure=True)
trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(otlp_exporter))
GrpcInstrumentorServer().instrument()


def run():
model_choice_set = {"xgboost", "lightgbm", "treelite", "sklearn", "table"}
model_choice_str = ",".join(model_choice_set)
Expand Down Expand Up @@ -76,6 +90,8 @@ def run():
with open(ns.model_config_file_path) as opened_config_file:
text_format.Parse(opened_config_file.read(), model_server_config)

initialize_opentelemetry()

maximum_concurrent_rpcs = ns.maximum_concurrent_rpcs
if maximum_concurrent_rpcs < 0:
maximum_concurrent_rpcs = None # grpc.server takes None to accept unlimited amount of connections
Expand Down

0 comments on commit 28debf2

Please sign in to comment.