# This file contains configuration for the MLOps agent# URL to the DataRobot MLOps servicemlopsUrl:"https://<MLOPS_HOST>"# DataRobot API tokenapiToken:"<MLOPS_API_TOKEN>"# Execute the agent once, then exitrunOnce:false# When dryrun mode is true, do not report the metrics to MLOps servicedryRun:false# When verifySSL is true, SSL certification validation will be performed when# connecting to MLOps DataRobot. When verifySSL is false, these checks are skipped.# Note: It is highly recommended to keep this config variable as true.verifySSL:true# Path to write agent statsstatsPath:"/tmp/tracking-agent-stats.json"# Prediction Environment served by this agent.# Events and errors not specific to a single deployment are reported against this Prediction Environment.predictionEnvironmentId:"<PE_ID_FROM_DATAROBOT_UI>"# Number of times the agent will retry sending a request to the MLOps service on failure.httpRetry:3# Http client timeout in milliseconds (30sec timeout)httpTimeout:30000# Number of concurrent http request, default=1 -> synchronous mode; > 1 -> asynchronoushttpConcurrentRequest:10# Number of HTTP Connections to establish with the MLOps service, Default: 1numMLOpsConnections:1# Comment out and configure the lines below for the spooler type(s) you are using.# Note: the spooler configuration must match that used by the MLOps library.# Note: Spoolers must be set up before using them.# - For the filesystem spooler, create the directory that will be used.# - For the SQS spooler, create the queue.# - For the PubSub spooler, create the project and topic.# - For the Kafka spooler, create the topic.channelConfigs:-type:"FS_SPOOL"details:{name:"filesystem", directory:"/tmp/ta"}# - type: "SQS_SPOOL"# details: {name: "sqs", queueUrl: "your SQS queue URL", queueName: "<your AWS SQS queue name>"}# - type: "RABBITMQ_SPOOL"# details: {name: "rabbit", queueName: <your rabbitmq queue name>, queueUrl: "amqp://<ip address>",# caCertificatePath: "<path_to_ca_certificate>",# certificatePath: "<path_to_client_certificate>",# keyfilePath: "<path_to_key_file>"}# - type: "PUBSUB_SPOOL"# details: {name: "pubsub", projectId: <your project ID>, topicName: <your topic name>, subscriptionName: <your sub name>}# - type: "KAFKA_SPOOL"# details: {name: "kafka", topicName: "<your topic name>", bootstrapServers: "<ip address 1>,<ip address 2>,..."}# The number of threads that the agent will launch to process data records.agentThreadPoolSize:4# The maximum number of records each thread will process per fetchNewDataFreq interval.agentMaxRecordsTask:100# Maximum number of records to aggregate before sending to DataRobot MLOpsagentMaxAggregatedRecords:500# A timeout for pending records before aggregating and submittingagentPendingRecordsTimeoutMs:5000