1. Packages
  2. Packages
  3. Elasticstack Provider
  4. API Docs
  5. getElasticsearchIngestProcessorInference
Viewing docs for elasticstack 0.15.0
published on Thursday, May 14, 2026 by elastic
Viewing docs for elasticstack 0.15.0
published on Thursday, May 14, 2026 by elastic

    Helper data source which can be used to create the configuration for an inference ingest processor. The inference processor uses a pre-trained data frame analytics model or a model deployed for natural language processing tasks to infer against the data that is being ingested in the pipeline. See: https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-ingest-put-pipeline

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as elasticstack from "@pulumi/elasticstack";
    
    const inference = elasticstack.getElasticsearchIngestProcessorInference({
        modelId: "example",
        inputOutput: {
            inputField: "body",
            outputField: "body_vector",
        },
    });
    const myIngestPipeline = new elasticstack.ElasticsearchIngestPipeline("my_ingest_pipeline", {
        name: "inference-ingest",
        processors: [inference.then(inference => inference.json)],
    });
    
    import pulumi
    import pulumi_elasticstack as elasticstack
    
    inference = elasticstack.get_elasticsearch_ingest_processor_inference(model_id="example",
        input_output={
            "input_field": "body",
            "output_field": "body_vector",
        })
    my_ingest_pipeline = elasticstack.ElasticsearchIngestPipeline("my_ingest_pipeline",
        name="inference-ingest",
        processors=[inference.json])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/elasticstack/elasticstack"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		inference, err := elasticstack.GetElasticsearchIngestProcessorInference(ctx, &elasticstack.GetElasticsearchIngestProcessorInferenceArgs{
    			ModelId: "example",
    			InputOutput: elasticstack.GetElasticsearchIngestProcessorInferenceInputOutput{
    				InputField:  "body",
    				OutputField: pulumi.StringRef("body_vector"),
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = elasticstack.NewElasticsearchIngestPipeline(ctx, "my_ingest_pipeline", &elasticstack.ElasticsearchIngestPipelineArgs{
    			Name: pulumi.String("inference-ingest"),
    			Processors: pulumi.StringArray{
    				pulumi.String(inference.Json),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Elasticstack = Pulumi.Elasticstack;
    
    return await Deployment.RunAsync(() => 
    {
        var inference = Elasticstack.GetElasticsearchIngestProcessorInference.Invoke(new()
        {
            ModelId = "example",
            InputOutput = new Elasticstack.Inputs.GetElasticsearchIngestProcessorInferenceInputOutputInputArgs
            {
                InputField = "body",
                OutputField = "body_vector",
            },
        });
    
        var myIngestPipeline = new Elasticstack.ElasticsearchIngestPipeline("my_ingest_pipeline", new()
        {
            Name = "inference-ingest",
            Processors = new[]
            {
                inference.Apply(getElasticsearchIngestProcessorInferenceResult => getElasticsearchIngestProcessorInferenceResult.Json),
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.elasticstack.ElasticstackFunctions;
    import com.pulumi.elasticstack.inputs.GetElasticsearchIngestProcessorInferenceArgs;
    import com.pulumi.elasticstack.inputs.GetElasticsearchIngestProcessorInferenceInputOutputArgs;
    import com.pulumi.elasticstack.ElasticsearchIngestPipeline;
    import com.pulumi.elasticstack.ElasticsearchIngestPipelineArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var inference = ElasticstackFunctions.getElasticsearchIngestProcessorInference(GetElasticsearchIngestProcessorInferenceArgs.builder()
                .modelId("example")
                .inputOutput(GetElasticsearchIngestProcessorInferenceInputOutputArgs.builder()
                    .inputField("body")
                    .outputField("body_vector")
                    .build())
                .build());
    
            var myIngestPipeline = new ElasticsearchIngestPipeline("myIngestPipeline", ElasticsearchIngestPipelineArgs.builder()
                .name("inference-ingest")
                .processors(inference.json())
                .build());
    
        }
    }
    
    resources:
      myIngestPipeline:
        type: elasticstack:ElasticsearchIngestPipeline
        name: my_ingest_pipeline
        properties:
          name: inference-ingest
          processors:
            - ${inference.json}
    variables:
      inference:
        fn::invoke:
          function: elasticstack:getElasticsearchIngestProcessorInference
          arguments:
            modelId: example
            inputOutput:
              inputField: body
              outputField: body_vector
    
    Example coming soon!
    

    Using getElasticsearchIngestProcessorInference

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getElasticsearchIngestProcessorInference(args: GetElasticsearchIngestProcessorInferenceArgs, opts?: InvokeOptions): Promise<GetElasticsearchIngestProcessorInferenceResult>
    function getElasticsearchIngestProcessorInferenceOutput(args: GetElasticsearchIngestProcessorInferenceOutputArgs, opts?: InvokeOptions): Output<GetElasticsearchIngestProcessorInferenceResult>
    def get_elasticsearch_ingest_processor_inference(description: Optional[str] = None,
                                                     field_map: Optional[Mapping[str, str]] = None,
                                                     if_: Optional[str] = None,
                                                     ignore_failure: Optional[bool] = None,
                                                     input_output: Optional[GetElasticsearchIngestProcessorInferenceInputOutput] = None,
                                                     model_id: Optional[str] = None,
                                                     on_failures: Optional[Sequence[str]] = None,
                                                     tag: Optional[str] = None,
                                                     target_field: Optional[str] = None,
                                                     opts: Optional[InvokeOptions] = None) -> GetElasticsearchIngestProcessorInferenceResult
    def get_elasticsearch_ingest_processor_inference_output(description: pulumi.Input[Optional[str]] = None,
                                                     field_map: pulumi.Input[Optional[Mapping[str, pulumi.Input[str]]]] = None,
                                                     if_: pulumi.Input[Optional[str]] = None,
                                                     ignore_failure: pulumi.Input[Optional[bool]] = None,
                                                     input_output: pulumi.Input[Optional[GetElasticsearchIngestProcessorInferenceInputOutputArgs]] = None,
                                                     model_id: pulumi.Input[Optional[str]] = None,
                                                     on_failures: pulumi.Input[Optional[Sequence[pulumi.Input[str]]]] = None,
                                                     tag: pulumi.Input[Optional[str]] = None,
                                                     target_field: pulumi.Input[Optional[str]] = None,
                                                     opts: Optional[InvokeOptions] = None) -> Output[GetElasticsearchIngestProcessorInferenceResult]
    func GetElasticsearchIngestProcessorInference(ctx *Context, args *GetElasticsearchIngestProcessorInferenceArgs, opts ...InvokeOption) (*GetElasticsearchIngestProcessorInferenceResult, error)
    func GetElasticsearchIngestProcessorInferenceOutput(ctx *Context, args *GetElasticsearchIngestProcessorInferenceOutputArgs, opts ...InvokeOption) GetElasticsearchIngestProcessorInferenceResultOutput

    > Note: This function is named GetElasticsearchIngestProcessorInference in the Go SDK.

    public static class GetElasticsearchIngestProcessorInference 
    {
        public static Task<GetElasticsearchIngestProcessorInferenceResult> InvokeAsync(GetElasticsearchIngestProcessorInferenceArgs args, InvokeOptions? opts = null)
        public static Output<GetElasticsearchIngestProcessorInferenceResult> Invoke(GetElasticsearchIngestProcessorInferenceInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetElasticsearchIngestProcessorInferenceResult> getElasticsearchIngestProcessorInference(GetElasticsearchIngestProcessorInferenceArgs args, InvokeOptions options)
    public static Output<GetElasticsearchIngestProcessorInferenceResult> getElasticsearchIngestProcessorInference(GetElasticsearchIngestProcessorInferenceArgs args, InvokeOptions options)
    
    fn::invoke:
      function: elasticstack:index/getElasticsearchIngestProcessorInference:getElasticsearchIngestProcessorInference
      arguments:
        # arguments dictionary
    data "elasticstack_getelasticsearchingestprocessorinference" "name" {
        # arguments
    }

    The following arguments are supported:

    ModelId string
    The ID or alias for the trained model, or the ID of the deployment.
    Description string
    Description of the processor.
    FieldMap Dictionary<string, string>
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    If string
    Conditionally execute the processor
    IgnoreFailure bool
    Ignore failures for the processor.
    InputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    OnFailures List<string>
    Handle failures for the processor.
    Tag string
    Identifier for the processor.
    TargetField string
    Field added to incoming documents to contain results objects.
    ModelId string
    The ID or alias for the trained model, or the ID of the deployment.
    Description string
    Description of the processor.
    FieldMap map[string]string
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    If string
    Conditionally execute the processor
    IgnoreFailure bool
    Ignore failures for the processor.
    InputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    OnFailures []string
    Handle failures for the processor.
    Tag string
    Identifier for the processor.
    TargetField string
    Field added to incoming documents to contain results objects.
    model_id string
    The ID or alias for the trained model, or the ID of the deployment.
    description string
    Description of the processor.
    field_map map(string)
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if string
    Conditionally execute the processor
    ignore_failure bool
    Ignore failures for the processor.
    input_output object
    Input and output field mappings for the inference processor.
    on_failures list(string)
    Handle failures for the processor.
    tag string
    Identifier for the processor.
    target_field string
    Field added to incoming documents to contain results objects.
    modelId String
    The ID or alias for the trained model, or the ID of the deployment.
    description String
    Description of the processor.
    fieldMap Map<String,String>
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if_ String
    Conditionally execute the processor
    ignoreFailure Boolean
    Ignore failures for the processor.
    inputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    onFailures List<String>
    Handle failures for the processor.
    tag String
    Identifier for the processor.
    targetField String
    Field added to incoming documents to contain results objects.
    modelId string
    The ID or alias for the trained model, or the ID of the deployment.
    description string
    Description of the processor.
    fieldMap {[key: string]: string}
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if string
    Conditionally execute the processor
    ignoreFailure boolean
    Ignore failures for the processor.
    inputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    onFailures string[]
    Handle failures for the processor.
    tag string
    Identifier for the processor.
    targetField string
    Field added to incoming documents to contain results objects.
    model_id str
    The ID or alias for the trained model, or the ID of the deployment.
    description str
    Description of the processor.
    field_map Mapping[str, str]
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if_ str
    Conditionally execute the processor
    ignore_failure bool
    Ignore failures for the processor.
    input_output GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    on_failures Sequence[str]
    Handle failures for the processor.
    tag str
    Identifier for the processor.
    target_field str
    Field added to incoming documents to contain results objects.
    modelId String
    The ID or alias for the trained model, or the ID of the deployment.
    description String
    Description of the processor.
    fieldMap Map<String>
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if String
    Conditionally execute the processor
    ignoreFailure Boolean
    Ignore failures for the processor.
    inputOutput Property Map
    Input and output field mappings for the inference processor.
    onFailures List<String>
    Handle failures for the processor.
    tag String
    Identifier for the processor.
    targetField String
    Field added to incoming documents to contain results objects.

    getElasticsearchIngestProcessorInference Result

    The following output properties are available:

    Id string
    Internal identifier of the resource
    IgnoreFailure bool
    Ignore failures for the processor.
    Json string
    JSON representation of this data source.
    ModelId string
    The ID or alias for the trained model, or the ID of the deployment.
    Description string
    Description of the processor.
    FieldMap Dictionary<string, string>
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    If string
    Conditionally execute the processor
    InputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    OnFailures List<string>
    Handle failures for the processor.
    Tag string
    Identifier for the processor.
    TargetField string
    Field added to incoming documents to contain results objects.
    Id string
    Internal identifier of the resource
    IgnoreFailure bool
    Ignore failures for the processor.
    Json string
    JSON representation of this data source.
    ModelId string
    The ID or alias for the trained model, or the ID of the deployment.
    Description string
    Description of the processor.
    FieldMap map[string]string
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    If string
    Conditionally execute the processor
    InputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    OnFailures []string
    Handle failures for the processor.
    Tag string
    Identifier for the processor.
    TargetField string
    Field added to incoming documents to contain results objects.
    id string
    Internal identifier of the resource
    ignore_failure bool
    Ignore failures for the processor.
    json string
    JSON representation of this data source.
    model_id string
    The ID or alias for the trained model, or the ID of the deployment.
    description string
    Description of the processor.
    field_map map(string)
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if string
    Conditionally execute the processor
    input_output object
    Input and output field mappings for the inference processor.
    on_failures list(string)
    Handle failures for the processor.
    tag string
    Identifier for the processor.
    target_field string
    Field added to incoming documents to contain results objects.
    id String
    Internal identifier of the resource
    ignoreFailure Boolean
    Ignore failures for the processor.
    json String
    JSON representation of this data source.
    modelId String
    The ID or alias for the trained model, or the ID of the deployment.
    description String
    Description of the processor.
    fieldMap Map<String,String>
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if_ String
    Conditionally execute the processor
    inputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    onFailures List<String>
    Handle failures for the processor.
    tag String
    Identifier for the processor.
    targetField String
    Field added to incoming documents to contain results objects.
    id string
    Internal identifier of the resource
    ignoreFailure boolean
    Ignore failures for the processor.
    json string
    JSON representation of this data source.
    modelId string
    The ID or alias for the trained model, or the ID of the deployment.
    description string
    Description of the processor.
    fieldMap {[key: string]: string}
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if string
    Conditionally execute the processor
    inputOutput GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    onFailures string[]
    Handle failures for the processor.
    tag string
    Identifier for the processor.
    targetField string
    Field added to incoming documents to contain results objects.
    id str
    Internal identifier of the resource
    ignore_failure bool
    Ignore failures for the processor.
    json str
    JSON representation of this data source.
    model_id str
    The ID or alias for the trained model, or the ID of the deployment.
    description str
    Description of the processor.
    field_map Mapping[str, str]
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if_ str
    Conditionally execute the processor
    input_output GetElasticsearchIngestProcessorInferenceInputOutput
    Input and output field mappings for the inference processor.
    on_failures Sequence[str]
    Handle failures for the processor.
    tag str
    Identifier for the processor.
    target_field str
    Field added to incoming documents to contain results objects.
    id String
    Internal identifier of the resource
    ignoreFailure Boolean
    Ignore failures for the processor.
    json String
    JSON representation of this data source.
    modelId String
    The ID or alias for the trained model, or the ID of the deployment.
    description String
    Description of the processor.
    fieldMap Map<String>
    Maps the document field names to the known field names of the model. Maps the document fields to the model's expected input fields.
    if String
    Conditionally execute the processor
    inputOutput Property Map
    Input and output field mappings for the inference processor.
    onFailures List<String>
    Handle failures for the processor.
    tag String
    Identifier for the processor.
    targetField String
    Field added to incoming documents to contain results objects.

    Supporting Types

    GetElasticsearchIngestProcessorInferenceInputOutput

    InputField string
    The field name from which the inference processor reads its input value.
    OutputField string
    The field name to which the inference processor writes its output.
    InputField string
    The field name from which the inference processor reads its input value.
    OutputField string
    The field name to which the inference processor writes its output.
    input_field string
    The field name from which the inference processor reads its input value.
    output_field string
    The field name to which the inference processor writes its output.
    inputField String
    The field name from which the inference processor reads its input value.
    outputField String
    The field name to which the inference processor writes its output.
    inputField string
    The field name from which the inference processor reads its input value.
    outputField string
    The field name to which the inference processor writes its output.
    input_field str
    The field name from which the inference processor reads its input value.
    output_field str
    The field name to which the inference processor writes its output.
    inputField String
    The field name from which the inference processor reads its input value.
    outputField String
    The field name to which the inference processor writes its output.

    Package Details

    Repository
    elasticstack elastic/terraform-provider-elasticstack
    License
    Notes
    This Pulumi package is based on the elasticstack Terraform Provider.
    Viewing docs for elasticstack 0.15.0
    published on Thursday, May 14, 2026 by elastic
      Try Pulumi Cloud free. Your team will thank you.