- Sort Score
- Result 10 results
- Languages All
Results 1 - 7 of 7 for read_dict (0.31 sec)
-
tests/test_response_by_alias.py
@app.get("/dict", response_model=Model, response_model_by_alias=False) def read_dict(): return {"alias": "Foo"} @app.get("/model", response_model=Model, response_model_by_alias=False) def read_model(): return Model(alias="Foo") @app.get("/list", response_model=List[Model], response_model_by_alias=False) def read_list(): return [{"alias": "Foo"}, {"alias": "Bar"}]
Registered: Mon Jun 17 08:32:26 UTC 2024 - Last Modified: Fri Jul 07 17:12:13 UTC 2023 - 11.1K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/representative_dataset_test.py
sample = {'input_tensor': input_tensor} feed_dict = repr_dataset.create_feed_dict_from_input_data( sample, signature_def ) input_tensor_data = input_tensor.eval() self.assertLen(feed_dict, 1) self.assertIn('input:0', feed_dict) self.assertIsInstance(feed_dict['input:0'], np.ndarray) self.assertAllEqual(feed_dict['input:0'], input_tensor_data)
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Thu Jan 04 07:35:19 UTC 2024 - 11.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/representative_dataset.py
""" feed_dict = {} for input_key, input_value in input_data.items(): input_tensor_name = signature_def.inputs[input_key].name value = input_value if isinstance(input_value, core.Tensor): # Take the data out of the tensor. value = input_value.eval() feed_dict[input_tensor_name] = value
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri Mar 22 22:55:22 UTC 2024 - 14.2K bytes - Viewed (0) -
src/go/internal/gcimporter/ureader.go
// within a package. type reader struct { pkgbits.Decoder p *pkgReader dict *readerDict } // A readerDict holds the state for type parameters that parameterize // the current unified IR element. type readerDict struct { // bounds is a slice of typeInfos corresponding to the underlying // bounds of the element's type parameters. bounds []typeInfo
Registered: Wed Jun 12 16:32:35 UTC 2024 - Last Modified: Thu May 23 01:00:11 UTC 2024 - 16.6K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/py_function_lib.py
): # Create a mapping from input tensor name to the input tensor value. # ex) "Placeholder:0" -> [0, 1, 2] feed_dict = rd.create_feed_dict_from_input_data(sample, signature_def) sess.run(output_tensor_names, feed_dict=feed_dict) def _run_graph_for_calibration_graph_mode( model_dir: str, tags: Collection[str],
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 31 05:32:11 UTC 2024 - 27.4K bytes - Viewed (0) -
tensorflow/compiler/mlir/quantization/tensorflow/python/integration_test/quantize_model_test.py
for sample_input in sample_inputs: feed_dict = {} for input_key, input_value in sample_input.items(): input_tensor_name = signature_def.inputs[input_key].name feed_dict[input_tensor_name] = input_value # Obtain the output of the model. output_values.append( sess.run(output_tensor_names, feed_dict=feed_dict)[0] ) return output_values
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Fri May 17 03:36:50 UTC 2024 - 235.6K bytes - Viewed (0) -
RELEASE.md
have resulted in an error. When this happens, a noop is returned and the input tensors are marked non-feedable. In other words, if they are used as keys in `feed_dict` argument to `session.run()`, an error will be raised. Also, because some assert ops don't make it into the graph, the graph structure changes. A different graph can result in different per-op random
Registered: Sun Jun 16 05:45:23 UTC 2024 - Last Modified: Tue Jun 11 23:24:08 UTC 2024 - 730.3K bytes - Viewed (0)