- Sort Score
- Result 10 results
- Languages All
Results 1 - 7 of 7 for GetBufferPointer (0.32 sec)
-
tensorflow/compiler/mlir/lite/quantization/lite/quantize_weights_test.cc
LoadBasicModel(); flatbuffers::FlatBufferBuilder builder; auto status = QuantizeWeights(&builder, model_, 0); EXPECT_EQ(status, kTfLiteOk); const uint8_t* buffer = builder.GetBufferPointer(); const Model* output_model = GetModel(buffer); ASSERT_TRUE(output_model); } TEST_F(QuantizeWeightsTest, QuantizationFails) { LoadBasicModel(); flatbuffers::FlatBufferBuilder builder;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 23:15:24 UTC 2024 - 32.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/execution_metadata_exporter_test.cc
fb_builder, hardwares, fb_builder.CreateVector(subgraphs.begin(), subgraphs.size())); fb_builder.Finish(metadata); return std::string( reinterpret_cast<const char*>(fb_builder.GetBufferPointer()), fb_builder.GetSize()); } void Verify(const RuntimeMetadata* result, const RuntimeMetadata* expected) { EXPECT_EQ(result->subgraph_metadata()->size(), expected->subgraph_metadata()->size());
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Jun 11 06:11:34 UTC 2024 - 6K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/sparsity/sparsify_model.cc
tflite::Model::Pack(input_builder, &input_model); tflite::FinishModelBuffer(input_builder, input_model_location); std::string serialized_model( reinterpret_cast<const char*>(input_builder.GetBufferPointer()), input_builder.GetSize()); OwningOpRef<mlir::ModuleOp> module = tflite::FlatBufferToMlir( serialized_model, &context, UnknownLoc::get(&context)); if (!module) {
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Mon Jun 10 20:16:40 UTC 2024 - 4.3K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/quantization/lite/quantize_weights.cc
input_builder, CreateMutableModelFromFile(input_model).get()); tflite::FinishModelBuffer(input_builder, input_model_location); std::string serialized_model( reinterpret_cast<const char*>(input_builder.GetBufferPointer()), input_builder.GetSize()); OwningOpRef<mlir::ModuleOp> module = tflite::FlatBufferToMlir( serialized_model, &context, UnknownLoc::get(&context)); // Apply quantization passes.
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 23:15:24 UTC 2024 - 9.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/experimental/tac/execution_metadata_exporter.cc
fb_builder.CreateVector(subgraphs_metadata)); fb_builder.Finish(runtime_metadata); return std::string( reinterpret_cast<const char*>(fb_builder.GetBufferPointer()), fb_builder.GetSize()); }
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Jun 11 06:11:34 UTC 2024 - 7.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/flatbuffer_export.cc
return std::nullopt; } tflite::UpdateOpVersion(builder_.GetBufferPointer()); tflite::UpdateMinimumRuntimeVersionForModel(builder_.GetBufferPointer()); if (supported_backends_.find("GPU") != supported_backends_.end()) { if (!CheckGpuDelegateCompatibility(builder_.GetBufferPointer())) { return std::nullopt; } } absl::Cord result;
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 21:41:49 UTC 2024 - 164.5K bytes - Viewed (0) -
tensorflow/compiler/mlir/lite/quantization/lite/quantize_model_test.cc
tflite::FinishModelBuffer(input_builder, tflite::Model::Pack(input_builder, model)); const std::string input_buffer( reinterpret_cast<const char*>(input_builder.GetBufferPointer()), input_builder.GetSize()); auto status = mlir::lite::QuantizeModel( input_buffer, input_type, output_type, inference_tensor_type,
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Wed Jun 12 23:15:24 UTC 2024 - 73.9K bytes - Viewed (0)