Export modules in ir with google protobuf#9746
Export modules in ir with google protobuf#9746li-roy wants to merge 15 commits intopytorch:masterfrom
Conversation
test/test_jit.py
Outdated
| f.seek(0) | ||
| import zipfile | ||
| with zipfile.ZipFile(f, 'r', compression=zipfile.ZIP_STORED) as z: | ||
| self.assertExpected(str([file.filename for file in z.infolist()])) |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
| "\n\nDefined at:\n" + getNodeStackTraceString(node)) | ||
| IR_ELSE() | ||
| // Special error messages for certain types of operators | ||
| if (node->kind() == aten::expand) { |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/export.cpp
Outdated
| operator_export_type_(operator_export_type) { | ||
| model_proto->set_producer_name("pytorch"); | ||
| model_proto->set_ir_version(3); | ||
| model_proto->set_producer_version("0.4"); |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/export.cpp
Outdated
| defer_weight_export_(defer_weight_export), | ||
| operator_export_type_(operator_export_type) { | ||
| model_proto->set_producer_name("pytorch"); | ||
| model_proto->set_ir_version(3); |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
| auto g = attr->add_graphs(); | ||
| EncodeGraph(g, v); | ||
| } | ||
| break; |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
| return std::string("\n") + idt(indent); | ||
| } | ||
|
|
||
| void dump(const onnx::TensorProto& tensor, std::ostream& stream) { |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
|
|
||
| return Reader<Graph_>::read(&istream); | ||
| at::Tensor JitDecoder::buildTensor(const onnx_torch::TensorProto& tensor_proto) { | ||
| at::Tensor tensor = at::CPU(onnxTypeToATenType(tensor_proto.data_type())).tensor(); |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/onnx/utils.py
Outdated
| compression = zipfile.ZIP_DEFLATED \ | ||
| if export_type == ExportTypes.COMPRESSED_ZIP_ARCHIVE \ | ||
| else zipfile.ZIP_STORED | ||
| with zipfile.ZipFile(f, 'w', compression=compression) as z: |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/onnx/utils.py
Outdated
| return torch_out | ||
|
|
||
|
|
||
| def _export_module(module, f, operator_export_type=OperatorExportTypes.ONNX, export_type=ExportTypes.PROTOBUF_FILE): |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/onnx/utils.py
Outdated
| return torch_out | ||
|
|
||
|
|
||
| def _export_module(module, f, operator_export_type=OperatorExportTypes.ONNX, export_type=ExportTypes.PROTOBUF_FILE): |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
zdevito
left a comment
There was a problem hiding this comment.
This looks really good! I found a few places where there are bugs (around storage offsets and sharing), and it needs a lot more tests, the contents of which I described in a comment.
| import zipfile | ||
| with zipfile.ZipFile(f, 'r', compression=zipfile.ZIP_STORED) as z: | ||
| self.assertExpected(str([file.filename for file in z.infolist()])) | ||
|
|
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/export.cpp
Outdated
| void encodeBlock(onnx::GraphProto * p_g, Block *b, | ||
| const std::vector<at::Tensor> & initializers, | ||
| ExportContext *ctx, RawDataExportMap* raw_data_export_map); | ||
| class JitEncoder { |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/export.cpp
Outdated
| } | ||
|
|
||
| void encodeTypeProtoTensorType(onnx::TypeProto_Tensor* tensor_type, Value* n) { | ||
| void EncodeTypeProtoTensorType(onnx::TypeProto_Tensor* tensor_type, const Value* n) { |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/export.cpp
Outdated
| // Add a buffer to the raw_data_export_map for the caller to dump into an | ||
| // external data store. If external_ref is not specified, we instead dump | ||
| // the contiguous data into the protobuf itself | ||
| if (defer_weight_export_) { |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/export.cpp
Outdated
| const std::unique_ptr<script::Method> &method, | ||
| const std::string prefix); | ||
|
|
||
| void EncodeTensor(onnx::TensorProto *tensor_proto, |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/import.cpp
Outdated
|
|
||
| std::shared_ptr<script::Module> curr = root_module; | ||
| for (size_t i = 0; i < vec.size() - 1; i++) { | ||
| if (curr->find_module(vec[i]) != nullptr) { |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/import.cpp
Outdated
| std::tie(parent_module, name) = parseFullName(root_module, tensor_proto.name()); | ||
|
|
||
| auto param = buildParameter(tensor_proto); | ||
| parent_module->register_parameter(name, param, tensor_proto.int64_data(1)); |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/import.h
Outdated
|
|
||
| TORCH_API std::shared_ptr<Graph> ImportIRGraph(const std::string& serialized_graph, std::vector<at::Tensor> & initializers); | ||
|
|
||
| std::shared_ptr<script::Module> ImportIRModule( |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/onnx/utils.py
Outdated
| return torch_out | ||
|
|
||
|
|
||
| def _export_module(module, f, operator_export_type=OperatorExportTypes.ONNX, export_type=ExportTypes.PROTOBUF_FILE): |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/onnx/utils.py
Outdated
| compression = zipfile.ZIP_DEFLATED \ | ||
| if export_type == ExportTypes.COMPRESSED_ZIP_ARCHIVE \ | ||
| else zipfile.ZIP_STORED | ||
| with zipfile.ZipFile(f, 'w', compression=compression) as z: |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
|
@pytorchbot retest this please |
ecd1164 to
c2bfe10
Compare
|
@pytorchbot retest this please |
zdevito
left a comment
There was a problem hiding this comment.
The changes to encode types look correct. I think this is good to go once we document the encoding of types, and the differences between parameter/tensor encodings.
torch/csrc/jit/export.cpp
Outdated
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
torch/csrc/jit/import.cpp
Outdated
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
facebook-github-bot
left a comment
There was a problem hiding this comment.
li-roy has imported this pull request. If you are a Facebook employee, you can view this diff on Phabricator.
facebook-github-bot
left a comment
There was a problem hiding this comment.
li-roy has imported this pull request. If you are a Facebook employee, you can view this diff on Phabricator.
facebook-github-bot
left a comment
There was a problem hiding this comment.
li-roy has imported this pull request. If you are a Facebook employee, you can view this diff on Phabricator.
facebook-github-bot
left a comment
There was a problem hiding this comment.
li-roy has imported this pull request. If you are a Facebook employee, you can view this diff on Phabricator.
facebook-github-bot
left a comment
There was a problem hiding this comment.
li-roy has imported this pull request. If you are a Facebook employee, you can view this diff on Phabricator.
Summary: Pull Request resolved: pytorch#9746 Differential Revision: D9110006 Pulled By: li-roy fbshipit-source-id: 8b9744c042f822fdfe959a7a7fef3d0baff4f639
| } | ||
| p->set_data_type(onnx_type); | ||
| // CPU's HalfTensor doesn't have contiguous(), so first calling contiguous() | ||
| auto t = tensor.contiguous().toBackend(at::kCPU).toType(cast_type); |
This comment was marked as off-topic.
This comment was marked as off-topic.
Sorry, something went wrong.
No description provided.