Search Options

Results per page
Sort
Preferred Languages
Advance

Results 1 - 2 of 2 for UnPackTo (0.15 sec)

  1. tensorflow/compiler/mlir/lite/sparsity/sparsify_model_test.cc

      // Load input model
      auto input_fbm = tflite::FlatBufferModel::BuildFromFile(
          "tensorflow/lite/testdata/sparse_tensor.bin");
      tflite::ModelT input_model;
      input_fbm->GetModel()->UnPackTo(&input_model);
    
      // Populate input metadata
      auto model_metadata_buffer = std::make_unique<tflite::BufferT>();
      model_metadata_buffer->data =
          std::vector<uint8_t>(expected_value.begin(), expected_value.end());
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Mon Jun 10 20:16:40 UTC 2024
    - 2.9K bytes
    - Viewed (0)
  2. tensorflow/compiler/mlir/lite/quantization/lite/quantize_weights.cc

      return op_name.lower();
    }
    
    std::unique_ptr<tflite::ModelT> CreateMutableModelFromFile(
        const tflite::Model* input_model) {
      auto copied_model = std::make_unique<tflite::ModelT>();
      input_model->UnPackTo(copied_model.get(), nullptr);
      return copied_model;
    }
    }  // namespace
    
    // TODO(b/214314076): Support MLIR model as an input for the C++ dynamic range
    // quantization API
    TfLiteStatus QuantizeWeights(
    Registered: Sun Jun 16 05:45:23 UTC 2024
    - Last Modified: Wed Jun 12 23:15:24 UTC 2024
    - 9.5K bytes
    - Viewed (0)
Back to top