Search Options

Results per page
Sort
Preferred Languages
Advance

Results 41 - 50 of 65 for serving_default (0.27 sec)

  1. tensorflow/compiler/mlir/quantization/tensorflow/tests/insert_custom_aggregation_ops.mlir

      func.func @serving_default(%arg0: tensor<1x4xf32> {tf_saved_model.index_path = ["x"]}) -> (tensor<1x3xf32> {tf_saved_model.index_path = ["output"]}) attributes {tf.entry_function = {control_outputs = "", inputs = "serving_default_x:0", outputs = "PartitionedCall:0"}, tf_saved_model.exported_names = ["serving_default"]} {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri May 10 04:07:09 UTC 2024
    - 32.1K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/quantization/tensorflow/tests/lift_quantizable_spots_as_functions_xla_selective_quantization.mlir

        return %2 : tensor<1x3x2x2xf32>
      }
    
      func.func @serving_default(%arg0: tensor<1x3x4x3xf32>) -> (tensor<1x3x2x2xf32>) {
        %0 = "tf.PartitionedCall"(%arg0) {config = "", config_proto = "", executor_type = "", f = @conv2d_with_inliner}
            : (tensor<1x3x4x3xf32>) -> tensor<1x3x2x2xf32>
        return %0 : tensor<1x3x2x2xf32>
      }
    
    // CHECK-LABEL: func @serving_default
    // CHECK: "tf.PartitionedCall"
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Oct 30 06:52:55 UTC 2023
    - 6.8K bytes
    - Viewed (0)
  3. tensorflow/compiler/mlir/tensorflow_to_stablehlo/python/pywrap_tensorflow_to_stablehlo.pyi

    # ==============================================================================
    
    # LINT.IfChange(savedmodel_to_stablehlo)
    def savedmodel_to_stablehlo(
        input_path: str,
        exported_model_signatures: list[str] = ["serving_default"],
        tag_names: list[str] = ["serve"],
        input_arg_shapes_str: str = "",
    ) -> bytes: ...
    # LINT.ThenChange()
    
    # LINT.IfChange(tensorflow_module_to_stablehlo)
    def tensorflow_module_to_stablehlo(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 21 22:58:42 UTC 2024
    - 1.1K bytes
    - Viewed (0)
  4. tensorflow/compiler/mlir/lite/tests/mlir2flatbuffer/signature_def.mlir

    // CHECK-NEXT:      tensor_index: 5
    // CHECK-NEXT:    }, {
    // CHECK-NEXT:      name: "start_logits",
    // CHECK-NEXT:      tensor_index: 6
    // CHECK-NEXT:    } ],
    // CHECK-NEXT:    signature_key: "serving_default"
    // CHECK-NEXT:  } ]
    // CHECK-NEXT:}
    module attributes {tf.versions = {bad_consumers = [], min_consumer = 12 : i32, producer = 554 : i32}, tf_saved_model.semantics} {
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Dec 06 18:55:51 UTC 2023
    - 4.9K bytes
    - Viewed (0)
  5. tensorflow/compiler/mlir/tensorflow_to_stablehlo/README.md

            exported_model_signatures=["serving_default"],
            tag_names=["serve"],
            input_arg_shapes_str="1,28,28,3::32"
    )
    
    ```
    
    #### Arguments:
    
    * `input_path` (required): Path to your SavedModel directory.
    * `exported_model_signatures` (optional): List of signature names to convert.
                                              Defaults to ["serving_default"].
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 21 22:58:42 UTC 2024
    - 4.4K bytes
    - Viewed (0)
  6. tensorflow/compiler/mlir/quantization/common/func.h

    #include "mlir/IR/BuiltinOps.h"  // from @llvm-project
    
    namespace mlir::quant {
    
    // Returns a public `func::FuncOp` in `module_op` whose name matches either
    // `main` or `serving_default`. If `func::FuncOps` with both names exist, the
    // function with name "main" takes precedence. Returns null if no such a
    // function exists.
    func::FuncOp FindMainFuncOp(ModuleOp module_op);
    
    }  // namespace mlir::quant
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Feb 19 06:55:11 UTC 2024
    - 1.3K bytes
    - Viewed (0)
  7. tensorflow/compiler/mlir/quantization/tensorflow/python/representative_dataset.py

        tf.quantization.experimental.TfRecordRepresentativeDatasetSaver(
              path_map={'serving_default': '/tmp/representative_dataset_path'}
          ).save({'serving_default': representative_dataset})
      )
    
      # Using in QuantizationOptions.
      quantization_options = tf.quantization.experimental.QuantizationOptions(
          signature_keys=['serving_default'],
          representative_datasets=dataset_file_map,
      )
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Mar 22 22:55:22 UTC 2024
    - 14.2K bytes
    - Viewed (0)
  8. tensorflow/compiler/mlir/tfrt/tests/saved_model/saved_model_test.cc

      TF_ASSERT_OK(MapFunctionSignaturesFromTFSavedModelMLIR(
          module.get(), [&](const TFRTSavedModelSignatureInfo& sig_info) {
            // Only check the signature of "serving_default".
            if (sig_info.func_name != "serving_default") return;
    
            transform(sig_info.input_names, std::back_inserter(inputs),
                      [](llvm::StringRef x) { return x.str(); });
            in_specs.assign(sig_info.input_specs.begin(),
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Fri Oct 13 01:17:29 UTC 2023
    - 9K bytes
    - Viewed (0)
  9. tensorflow/compiler/mlir/quantization/tensorflow/python/integration_test/concurrency_test.py

                preset_method=quant_opts_pb2.QuantizationMethod.PresetMethod.METHOD_STATIC_RANGE_INT8
            ),
            tags={tag_constants.SERVING},
            signature_keys=['serving_default'],
        )
    
        model = quantize_model.quantize(
            temp_path,
            quantization_options=quantization_options,
            representative_dataset=data_gen(),
        )
        return model
    
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Sep 11 00:47:05 UTC 2023
    - 3.6K bytes
    - Viewed (0)
  10. tensorflow/compiler/mlir/quantization/stablehlo/python/integration_test/quantize_model_test_base.py

          self, output_saved_model_path: str
      ) -> str:
        """Extracts the first XlaCallModule op from output saved model to string."""
        root = load.load(output_saved_model_path)
        tf_graph_def = root.signatures['serving_default'].graph.as_graph_def()
        for function in tf_graph_def.library.function:
          for node_def in function.node_def:
            if node_def.op == 'XlaCallModule':
              with ir.Context() as context:
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Tue May 14 06:31:57 UTC 2024
    - 18.2K bytes
    - Viewed (0)
Back to top