Revert "[Caffe2]Remove Caffe2 proto files (#126134)"

This reverts commit a40658481ada9ecfd5716513a8537818c79cb3ef.

Reverted https://github.com/pytorch/pytorch/pull/126134 on behalf of https://github.com/malfet due to Broke bazel builds, see https://github.com/pytorch/pytorch/actions/runs/9278148147/job/25528691981 ([comment](https://github.com/pytorch/pytorch/pull/126134#issuecomment-2136373096))
This commit is contained in:
PyTorch MergeBot
2024-05-29 01:53:45 +00:00
parent 669560d51a
commit 7a506dd005
12 changed files with 803 additions and 0 deletions

View File

@ -614,6 +614,7 @@ cc_library(
":caffe2_core_macros",
":caffe2_for_aten_headers",
"//caffe2/proto:caffe2_pb",
"//caffe2/proto:cc_proto",
],
)
@ -635,6 +636,7 @@ cc_library(
":caffe2_perfkernels_avx2",
":caffe2_perfkernels_avx512",
"//caffe2/proto:caffe2_pb",
"//caffe2/proto:cc_proto",
"//third_party/miniz-2.1.0:miniz",
"@com_google_protobuf//:protobuf",
"@eigen",
@ -842,6 +844,7 @@ cc_library(
deps = [
":caffe2",
":torch_headers",
"//caffe2/proto:torch_cc_proto",
"@kineto",
] + if_cuda([
"@cuda//:nvToolsExt",

View File

@ -35,3 +35,21 @@ proto_library(
srcs = ["torch.proto"],
deps = [":caffe2_proto"],
)
cc_proto_library(
name = "cc_proto",
visibility = ["//:__pkg__"],
deps = [":proto"],
)
proto_library(
name = "proto",
srcs = [
"caffe2_legacy.proto",
"hsm.proto",
"metanet.proto",
"predictor_consts.proto",
"prof_dag.proto",
],
deps = [":caffe2_proto"],
)

View File

@ -0,0 +1,50 @@
syntax = "proto2";
package caffe2;
// Original Caffe1 Datum copy: this is used in image input op to allow us to
// load caffe1 serialized datum without having to regenerate the database.
message CaffeDatum {
optional int32 channels = 1;
optional int32 height = 2;
optional int32 width = 3;
// the actual image data, in bytes
optional bytes data = 4;
optional int32 label = 5;
// Optionally, the datum could also hold float data.
repeated float float_data = 6;
// If true data contains an encoded image that need to be decoded
optional bool encoded = 7 [ default = false ];
}
enum LegacyPadding {
NOTSET = 0; // Do not use old-stype padding strategies.
// VALID and SAME are two strategies adopted in Google DistBelief: it forces
// the input shape as follows. For SAME, the output is:
// R_out = ceil(float(R) / float(S))
// C_out = ceil(float(C) / float(S))
// where R and C are row and column, S is the stride, and K is the kernel.
// The number of padded pixels is then computed as
// Pr = ((R_out - 1) * S + K - R)
// Pc = ((C_out - 1) * S + K - C)
// When Pr and Pc are even numbers, both sides (left and right, or top and
// bottom) get half each. When Pr and Pc are odd numbers, the right and the
// bottom gets the one additional padding pixel.
// For VALID, padding values of 0 are always used.
VALID = 1;
SAME = 2;
// CAFFE_LEGACY_POOLING is a flag that notifies the code to use the old Caffe
// padding strategy.
// Basically, in caffe2, after padding the convolution and pooling use the
// same computation strategy: half-windows at the right and bottom are
// discarded. In Caffe, convolution follows this strategy but if there are
// some pixels in the half-windows, the pooling layer will actually put one
// additional output. If you set LegacyPadding to this, we will compute the
// equivalent padding strategy in caffe2 so that the output size is
// backward compatible with Caffe.
// THIS IS NOW DEPRECATED. ANY non-conventional use has to be manually
// converted.
CAFFE_LEGACY_POOLING = 3;
}

View File

@ -0,0 +1,58 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
global___LegacyPadding = LegacyPadding
class _LegacyPadding(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LegacyPadding], type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
NOTSET = LegacyPadding.V(0)
VALID = LegacyPadding.V(1)
SAME = LegacyPadding.V(2)
CAFFE_LEGACY_POOLING = LegacyPadding.V(3)
class LegacyPadding(metaclass=_LegacyPadding):
V = typing.NewType('V', int)
NOTSET = LegacyPadding.V(0)
VALID = LegacyPadding.V(1)
SAME = LegacyPadding.V(2)
CAFFE_LEGACY_POOLING = LegacyPadding.V(3)
class CaffeDatum(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
CHANNELS_FIELD_NUMBER: int
HEIGHT_FIELD_NUMBER: int
WIDTH_FIELD_NUMBER: int
DATA_FIELD_NUMBER: int
LABEL_FIELD_NUMBER: int
FLOAT_DATA_FIELD_NUMBER: int
ENCODED_FIELD_NUMBER: int
channels: int = ...
height: int = ...
width: int = ...
data: bytes = ...
label: int = ...
float_data: google.protobuf.internal.containers.RepeatedScalarFieldContainer[float] = ...
encoded: bool = ...
def __init__(self,
*,
channels : typing.Optional[int] = ...,
height : typing.Optional[int] = ...,
width : typing.Optional[int] = ...,
data : typing.Optional[bytes] = ...,
label : typing.Optional[int] = ...,
float_data : typing.Optional[typing.Iterable[float]] = ...,
encoded : typing.Optional[bool] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"channels",b"channels",u"data",b"data",u"encoded",b"encoded",u"height",b"height",u"label",b"label",u"width",b"width"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"channels",b"channels",u"data",b"data",u"encoded",b"encoded",u"float_data",b"float_data",u"height",b"height",u"label",b"label",u"width",b"width"]) -> None: ...
global___CaffeDatum = CaffeDatum

62
caffe2/proto/hsm.proto Normal file
View File

@ -0,0 +1,62 @@
syntax = "proto2";
package caffe2;
// Hierarchical Softmax protobuffer convention:
// The HSM operator requires a hierarchy of vocabulary words in the form of a
// tree from the user. This tree is expressed using the proto format.
// TreeProto points to the root NodeProto which can recursively contain children
// NodeProtos (internal nodes) or word_ids (leaf nodes).
// The aforementioned TreeProto is internally translated into a list of word_ids
// tagged with a list of NodeProtos that lie in the path from the root to that
// word_id using hsm_util.create_hierarchy(tree_proto).
// Specifically, HierarchyProto contains a list of PathProtos. Each PathProto
// belongs to a word_id and contains a list of PathNodeProtos. Each
// PathNodeProto contains information about the number of children the node has
// (length), the index of the child node that lies in the path from root to
// word_id (target) and a cumulative sum of children nodes (index; this acts as
// the weight parameter matrix offset).
// Each node in the hierarchy contains links to either leaf nodes or more
// non-terminal nodes
message NodeProto {
// Links to non-terminal children nodes
repeated NodeProto children = 1;
// Links to terminal (leaf) nodes
repeated int32 word_ids = 2;
optional int32 offset = 3;
optional string name = 4;
repeated float scores = 5;
}
// Protobuf format to accept hierarchy for hierarchical softmax operator.
// TreeProto points to the root node.
message TreeProto {
optional NodeProto root_node = 1;
}
// Internal Protobuf format which represents the path in the tree hierarchy for
// each word in the vocabulary.
message HierarchyProto {
optional int32 size = 1;
repeated PathProto paths = 2;
}
// Each PathProto belongs to a word and is an array of nodes in the
// path from the root to the leaf (which is the word itself) in the tree.
message PathProto {
optional int32 word_id = 1;
repeated PathNodeProto path_nodes = 2;
}
// Represents a node in the path from the root node all the way down to the
// word (leaf).
message PathNodeProto {
// Parameter matrix offset for this node
optional int32 index = 1;
// Number of children
optional int32 length = 2;
// Index of the next node in the path
optional int32 target = 3;
}

109
caffe2/proto/hsm_pb2.pyi Normal file
View File

@ -0,0 +1,109 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class NodeProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
CHILDREN_FIELD_NUMBER: int
WORD_IDS_FIELD_NUMBER: int
OFFSET_FIELD_NUMBER: int
NAME_FIELD_NUMBER: int
SCORES_FIELD_NUMBER: int
word_ids: google.protobuf.internal.containers.RepeatedScalarFieldContainer[int] = ...
offset: int = ...
name: typing.Text = ...
scores: google.protobuf.internal.containers.RepeatedScalarFieldContainer[float] = ...
@property
def children(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NodeProto]: ...
def __init__(self,
*,
children : typing.Optional[typing.Iterable[global___NodeProto]] = ...,
word_ids : typing.Optional[typing.Iterable[int]] = ...,
offset : typing.Optional[int] = ...,
name : typing.Optional[typing.Text] = ...,
scores : typing.Optional[typing.Iterable[float]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"offset",b"offset"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"children",b"children",u"name",b"name",u"offset",b"offset",u"scores",b"scores",u"word_ids",b"word_ids"]) -> None: ...
global___NodeProto = NodeProto
class TreeProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
ROOT_NODE_FIELD_NUMBER: int
@property
def root_node(self) -> global___NodeProto: ...
def __init__(self,
*,
root_node : typing.Optional[global___NodeProto] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"root_node",b"root_node"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"root_node",b"root_node"]) -> None: ...
global___TreeProto = TreeProto
class HierarchyProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
SIZE_FIELD_NUMBER: int
PATHS_FIELD_NUMBER: int
size: int = ...
@property
def paths(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PathProto]: ...
def __init__(self,
*,
size : typing.Optional[int] = ...,
paths : typing.Optional[typing.Iterable[global___PathProto]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"size",b"size"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"paths",b"paths",u"size",b"size"]) -> None: ...
global___HierarchyProto = HierarchyProto
class PathProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
WORD_ID_FIELD_NUMBER: int
PATH_NODES_FIELD_NUMBER: int
word_id: int = ...
@property
def path_nodes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PathNodeProto]: ...
def __init__(self,
*,
word_id : typing.Optional[int] = ...,
path_nodes : typing.Optional[typing.Iterable[global___PathNodeProto]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"word_id",b"word_id"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"path_nodes",b"path_nodes",u"word_id",b"word_id"]) -> None: ...
global___PathProto = PathProto
class PathNodeProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
INDEX_FIELD_NUMBER: int
LENGTH_FIELD_NUMBER: int
TARGET_FIELD_NUMBER: int
index: int = ...
length: int = ...
target: int = ...
def __init__(self,
*,
index : typing.Optional[int] = ...,
length : typing.Optional[int] = ...,
target : typing.Optional[int] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"index",b"index",u"length",b"length",u"target",b"target"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"index",b"index",u"length",b"length",u"target",b"target"]) -> None: ...
global___PathNodeProto = PathNodeProto

View File

@ -0,0 +1,50 @@
syntax = "proto2";
import "caffe2/proto/caffe2.proto";
package caffe2;
message ModelInfo {
optional string project = 1;
optional string modelClass = 2;
optional string version = 3;
optional string predictorType = 4 [ default = "SINGLE_PREDICTOR" ];
optional string modelId = 5;
}
message BlobsMap {
required string key = 1;
repeated string value = 2;
}
message NetsMap {
required string key = 1;
required NetDef value = 2;
}
message PlansMap {
required string key = 1;
required PlanDef value = 2;
}
message StringMap {
required string key = 1;
required string value = 2;
}
message MetaNetDef {
repeated BlobsMap blobs = 1;
// Text-format serialized NetDefs.
repeated NetsMap nets = 2;
// Info about where the model comes from. Possible use cases:
// 1) sanity check or diagnose
// 2) provide info for evaluation.
optional ModelInfo modelInfo = 3;
repeated PlansMap plans = 4;
repeated StringMap applicationSpecificInfo = 5;
repeated string blobsOrder = 6;
repeated string preLoadBlobs = 7;
optional TensorBoundShapes tensorBoundShapes = 8;
repeated string requestOnlyEmbeddings = 9;
optional AOTConfig aotConfig = 10;
}

View File

@ -0,0 +1,160 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import caffe2.proto.caffe2_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class ModelInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
PROJECT_FIELD_NUMBER: int
MODELCLASS_FIELD_NUMBER: int
VERSION_FIELD_NUMBER: int
PREDICTORTYPE_FIELD_NUMBER: int
MODELID_FIELD_NUMBER: int
project: typing.Text = ...
modelClass: typing.Text = ...
version: typing.Text = ...
predictorType: typing.Text = ...
modelId: typing.Text = ...
def __init__(self,
*,
project : typing.Optional[typing.Text] = ...,
modelClass : typing.Optional[typing.Text] = ...,
version : typing.Optional[typing.Text] = ...,
predictorType : typing.Optional[typing.Text] = ...,
modelId : typing.Optional[typing.Text] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"modelClass",b"modelClass",u"modelId",b"modelId",u"predictorType",b"predictorType",u"project",b"project",u"version",b"version"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"modelClass",b"modelClass",u"modelId",b"modelId",u"predictorType",b"predictorType",u"project",b"project",u"version",b"version"]) -> None: ...
global___ModelInfo = ModelInfo
class BlobsMap(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
KEY_FIELD_NUMBER: int
VALUE_FIELD_NUMBER: int
key: typing.Text = ...
value: google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text] = ...
def __init__(self,
*,
key : typing.Optional[typing.Text] = ...,
value : typing.Optional[typing.Iterable[typing.Text]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"key",b"key"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ...
global___BlobsMap = BlobsMap
class NetsMap(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
KEY_FIELD_NUMBER: int
VALUE_FIELD_NUMBER: int
key: typing.Text = ...
@property
def value(self) -> caffe2.proto.caffe2_pb2.NetDef: ...
def __init__(self,
*,
key : typing.Optional[typing.Text] = ...,
value : typing.Optional[caffe2.proto.caffe2_pb2.NetDef] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ...
global___NetsMap = NetsMap
class PlansMap(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
KEY_FIELD_NUMBER: int
VALUE_FIELD_NUMBER: int
key: typing.Text = ...
@property
def value(self) -> caffe2.proto.caffe2_pb2.PlanDef: ...
def __init__(self,
*,
key : typing.Optional[typing.Text] = ...,
value : typing.Optional[caffe2.proto.caffe2_pb2.PlanDef] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ...
global___PlansMap = PlansMap
class StringMap(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
KEY_FIELD_NUMBER: int
VALUE_FIELD_NUMBER: int
key: typing.Text = ...
value: typing.Text = ...
def __init__(self,
*,
key : typing.Optional[typing.Text] = ...,
value : typing.Optional[typing.Text] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ...
global___StringMap = StringMap
class MetaNetDef(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
BLOBS_FIELD_NUMBER: int
NETS_FIELD_NUMBER: int
MODELINFO_FIELD_NUMBER: int
PLANS_FIELD_NUMBER: int
APPLICATIONSPECIFICINFO_FIELD_NUMBER: int
BLOBSORDER_FIELD_NUMBER: int
PRELOADBLOBS_FIELD_NUMBER: int
TENSORBOUNDSHAPES_FIELD_NUMBER: int
REQUESTONLYEMBEDDINGS_FIELD_NUMBER: int
AOTCONFIG_FIELD_NUMBER: int
blobsOrder: google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text] = ...
preLoadBlobs: google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text] = ...
requestOnlyEmbeddings: google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text] = ...
@property
def blobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BlobsMap]: ...
@property
def nets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NetsMap]: ...
@property
def modelInfo(self) -> global___ModelInfo: ...
@property
def plans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PlansMap]: ...
@property
def applicationSpecificInfo(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StringMap]: ...
@property
def tensorBoundShapes(self) -> caffe2.proto.caffe2_pb2.TensorBoundShapes: ...
@property
def aotConfig(self) -> caffe2.proto.caffe2_pb2.AOTConfig: ...
def __init__(self,
*,
blobs : typing.Optional[typing.Iterable[global___BlobsMap]] = ...,
nets : typing.Optional[typing.Iterable[global___NetsMap]] = ...,
modelInfo : typing.Optional[global___ModelInfo] = ...,
plans : typing.Optional[typing.Iterable[global___PlansMap]] = ...,
applicationSpecificInfo : typing.Optional[typing.Iterable[global___StringMap]] = ...,
blobsOrder : typing.Optional[typing.Iterable[typing.Text]] = ...,
preLoadBlobs : typing.Optional[typing.Iterable[typing.Text]] = ...,
tensorBoundShapes : typing.Optional[caffe2.proto.caffe2_pb2.TensorBoundShapes] = ...,
requestOnlyEmbeddings : typing.Optional[typing.Iterable[typing.Text]] = ...,
aotConfig : typing.Optional[caffe2.proto.caffe2_pb2.AOTConfig] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"aotConfig",b"aotConfig",u"modelInfo",b"modelInfo",u"tensorBoundShapes",b"tensorBoundShapes"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"aotConfig",b"aotConfig",u"applicationSpecificInfo",b"applicationSpecificInfo",u"blobs",b"blobs",u"blobsOrder",b"blobsOrder",u"modelInfo",b"modelInfo",u"nets",b"nets",u"plans",b"plans",u"preLoadBlobs",b"preLoadBlobs",u"requestOnlyEmbeddings",b"requestOnlyEmbeddings",u"tensorBoundShapes",b"tensorBoundShapes"]) -> None: ...
global___MetaNetDef = MetaNetDef

View File

@ -0,0 +1,36 @@
syntax = "proto2";
package caffe2;
message PredictorConsts {
// Important - to ensure ordered traversal of the DB, these must be
// set in the given (lexicographic) order in the input DBReader.
optional string META_NET_DEF = 1 [ default = "!!META_NET_DEF" ];
// The key the Predictor sets in the global workspace for DBReader
// consumed by the LoadOp in GLOBAL_INIT_NET.
optional string PREDICTOR_DBREADER = 2 [ default = "!!PREDICTOR_DBREADER" ];
// Blob types used in MetaNetDef blobs
optional string PARAMETERS_BLOB_TYPE = 3 [ default = "PARAMETERS_BLOB_TYPE" ];
optional string INPUTS_BLOB_TYPE = 4 [ default = "INPUTS_BLOB_TYPE" ];
optional string OUTPUTS_BLOB_TYPE = 5 [ default = "OUTPUTS_BLOB_TYPE" ];
// Net types used in MetaNetDef nets
optional string GLOBAL_INIT_NET_TYPE = 6 [ default = "GLOBAL_INIT_NET_TYPE" ];
optional string PREDICT_INIT_NET_TYPE = 7
[ default = "PREDICT_INIT_NET_TYPE" ];
optional string PREDICT_NET_TYPE = 8 [ default = "PREDICT_NET_TYPE" ];
optional string SINGLE_PREDICTOR = 9 [ default = "SINGLE_PREDICTOR" ];
optional string MULTI_PREDICTOR = 10 [ default = "MULTI_PREDICTOR" ];
optional string TRAIN_INIT_PLAN_TYPE = 11
[ default = "TRAIN_INIT_PLAN_TYPE" ];
optional string TRAIN_PLAN_TYPE = 12 [ default = "TRAIN_PLAN_TYPE" ];
// Shape info blob name
optional string SHAPE_INFO_BLOB = 13 [ default = "SHAPE_INFO_BLOB" ];
// Sequential blob reader name
optional string DEFERRED_BLOB_READER = 14
[ default = "__DEFERRED_BLOB_READER__" ];
}

View File

@ -0,0 +1,63 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class PredictorConsts(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
META_NET_DEF_FIELD_NUMBER: int
PREDICTOR_DBREADER_FIELD_NUMBER: int
PARAMETERS_BLOB_TYPE_FIELD_NUMBER: int
INPUTS_BLOB_TYPE_FIELD_NUMBER: int
OUTPUTS_BLOB_TYPE_FIELD_NUMBER: int
GLOBAL_INIT_NET_TYPE_FIELD_NUMBER: int
PREDICT_INIT_NET_TYPE_FIELD_NUMBER: int
PREDICT_NET_TYPE_FIELD_NUMBER: int
SINGLE_PREDICTOR_FIELD_NUMBER: int
MULTI_PREDICTOR_FIELD_NUMBER: int
TRAIN_INIT_PLAN_TYPE_FIELD_NUMBER: int
TRAIN_PLAN_TYPE_FIELD_NUMBER: int
SHAPE_INFO_BLOB_FIELD_NUMBER: int
DEFERRED_BLOB_READER_FIELD_NUMBER: int
META_NET_DEF: typing.Text = ...
PREDICTOR_DBREADER: typing.Text = ...
PARAMETERS_BLOB_TYPE: typing.Text = ...
INPUTS_BLOB_TYPE: typing.Text = ...
OUTPUTS_BLOB_TYPE: typing.Text = ...
GLOBAL_INIT_NET_TYPE: typing.Text = ...
PREDICT_INIT_NET_TYPE: typing.Text = ...
PREDICT_NET_TYPE: typing.Text = ...
SINGLE_PREDICTOR: typing.Text = ...
MULTI_PREDICTOR: typing.Text = ...
TRAIN_INIT_PLAN_TYPE: typing.Text = ...
TRAIN_PLAN_TYPE: typing.Text = ...
SHAPE_INFO_BLOB: typing.Text = ...
DEFERRED_BLOB_READER: typing.Text = ...
def __init__(self,
*,
META_NET_DEF : typing.Optional[typing.Text] = ...,
PREDICTOR_DBREADER : typing.Optional[typing.Text] = ...,
PARAMETERS_BLOB_TYPE : typing.Optional[typing.Text] = ...,
INPUTS_BLOB_TYPE : typing.Optional[typing.Text] = ...,
OUTPUTS_BLOB_TYPE : typing.Optional[typing.Text] = ...,
GLOBAL_INIT_NET_TYPE : typing.Optional[typing.Text] = ...,
PREDICT_INIT_NET_TYPE : typing.Optional[typing.Text] = ...,
PREDICT_NET_TYPE : typing.Optional[typing.Text] = ...,
SINGLE_PREDICTOR : typing.Optional[typing.Text] = ...,
MULTI_PREDICTOR : typing.Optional[typing.Text] = ...,
TRAIN_INIT_PLAN_TYPE : typing.Optional[typing.Text] = ...,
TRAIN_PLAN_TYPE : typing.Optional[typing.Text] = ...,
SHAPE_INFO_BLOB : typing.Optional[typing.Text] = ...,
DEFERRED_BLOB_READER : typing.Optional[typing.Text] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"DEFERRED_BLOB_READER",b"DEFERRED_BLOB_READER",u"GLOBAL_INIT_NET_TYPE",b"GLOBAL_INIT_NET_TYPE",u"INPUTS_BLOB_TYPE",b"INPUTS_BLOB_TYPE",u"META_NET_DEF",b"META_NET_DEF",u"MULTI_PREDICTOR",b"MULTI_PREDICTOR",u"OUTPUTS_BLOB_TYPE",b"OUTPUTS_BLOB_TYPE",u"PARAMETERS_BLOB_TYPE",b"PARAMETERS_BLOB_TYPE",u"PREDICTOR_DBREADER",b"PREDICTOR_DBREADER",u"PREDICT_INIT_NET_TYPE",b"PREDICT_INIT_NET_TYPE",u"PREDICT_NET_TYPE",b"PREDICT_NET_TYPE",u"SHAPE_INFO_BLOB",b"SHAPE_INFO_BLOB",u"SINGLE_PREDICTOR",b"SINGLE_PREDICTOR",u"TRAIN_INIT_PLAN_TYPE",b"TRAIN_INIT_PLAN_TYPE",u"TRAIN_PLAN_TYPE",b"TRAIN_PLAN_TYPE"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"DEFERRED_BLOB_READER",b"DEFERRED_BLOB_READER",u"GLOBAL_INIT_NET_TYPE",b"GLOBAL_INIT_NET_TYPE",u"INPUTS_BLOB_TYPE",b"INPUTS_BLOB_TYPE",u"META_NET_DEF",b"META_NET_DEF",u"MULTI_PREDICTOR",b"MULTI_PREDICTOR",u"OUTPUTS_BLOB_TYPE",b"OUTPUTS_BLOB_TYPE",u"PARAMETERS_BLOB_TYPE",b"PARAMETERS_BLOB_TYPE",u"PREDICTOR_DBREADER",b"PREDICTOR_DBREADER",u"PREDICT_INIT_NET_TYPE",b"PREDICT_INIT_NET_TYPE",u"PREDICT_NET_TYPE",b"PREDICT_NET_TYPE",u"SHAPE_INFO_BLOB",b"SHAPE_INFO_BLOB",u"SINGLE_PREDICTOR",b"SINGLE_PREDICTOR",u"TRAIN_INIT_PLAN_TYPE",b"TRAIN_INIT_PLAN_TYPE",u"TRAIN_PLAN_TYPE",b"TRAIN_PLAN_TYPE"]) -> None: ...
global___PredictorConsts = PredictorConsts

View File

@ -0,0 +1,68 @@
syntax = "proto2";
package caffe2;
// A few notes about the Caffe2's protobuffer convention:
// (1) Most objects are registered by their types, such as operators and nets.
// For these, we have a string-type field "type" for registration purposes.
// (2) We do not use extension because that used to create quite some conflicts
// in Caffe's protobuf design.
// (3) We have not used any proto3 specific features, such as Any or Map. This
// is mainly for backward compatibility purposes but we may consider using
// those in the future.
// A two number summary for a value. It also has count for restoring.
message TwoNumberStatsProto {
optional float mean = 1;
optional float stddev = 2;
optional int64 count = 3;
}
// Blob profiling information. Profile for a blob is created every time
// a node outputs to the blob.
message BlobProfile {
// Name of the blob (corresponds to OperatorDef.output).
optional string name = 1; // required
// Profiling statistics.
optional TwoNumberStatsProto bytes_used = 3;
}
// Protobuf format to serialize profiler data.
message ProfDAGProto {
// The name for the operator
required string name = 1;
// The mean execution time
required float mean = 2;
// The standard deviation
required float stddev = 3;
// New field to represent the numbers above, and with count.
optional TwoNumberStatsProto execution_time = 4;
// Blob profiles that this node outputs.
repeated BlobProfile output_profile = 5;
// The extra_info from the operator device option.
repeated string extra_info = 7;
}
// Operator profiling information.
//
// Note: The indices for elements of 'stats' and the indices of
// 'output_profile' inside each 'stats' are assumed to match the
// indices of 'op' elements of a corresponding NetDef and the 'output'
// indices within each 'op'.
message ProfDAGProtos {
repeated ProfDAGProto stats = 1;
optional string net_name = 2;
repeated OpProfile ops_stats = 3;
}
// Represents specification of an operation cost.
message OpProfile {
optional string idx = 1;
optional string net_name = 2;
optional string type = 3;
optional float exec_time_secs = 4;
}

View File

@ -0,0 +1,126 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class TwoNumberStatsProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
MEAN_FIELD_NUMBER: int
STDDEV_FIELD_NUMBER: int
COUNT_FIELD_NUMBER: int
mean: float = ...
stddev: float = ...
count: int = ...
def __init__(self,
*,
mean : typing.Optional[float] = ...,
stddev : typing.Optional[float] = ...,
count : typing.Optional[int] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"count",b"count",u"mean",b"mean",u"stddev",b"stddev"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"count",b"count",u"mean",b"mean",u"stddev",b"stddev"]) -> None: ...
global___TwoNumberStatsProto = TwoNumberStatsProto
class BlobProfile(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
NAME_FIELD_NUMBER: int
BYTES_USED_FIELD_NUMBER: int
name: typing.Text = ...
@property
def bytes_used(self) -> global___TwoNumberStatsProto: ...
def __init__(self,
*,
name : typing.Optional[typing.Text] = ...,
bytes_used : typing.Optional[global___TwoNumberStatsProto] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"bytes_used",b"bytes_used",u"name",b"name"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"bytes_used",b"bytes_used",u"name",b"name"]) -> None: ...
global___BlobProfile = BlobProfile
class ProfDAGProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
NAME_FIELD_NUMBER: int
MEAN_FIELD_NUMBER: int
STDDEV_FIELD_NUMBER: int
EXECUTION_TIME_FIELD_NUMBER: int
OUTPUT_PROFILE_FIELD_NUMBER: int
EXTRA_INFO_FIELD_NUMBER: int
name: typing.Text = ...
mean: float = ...
stddev: float = ...
extra_info: google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text] = ...
@property
def execution_time(self) -> global___TwoNumberStatsProto: ...
@property
def output_profile(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BlobProfile]: ...
def __init__(self,
*,
name : typing.Optional[typing.Text] = ...,
mean : typing.Optional[float] = ...,
stddev : typing.Optional[float] = ...,
execution_time : typing.Optional[global___TwoNumberStatsProto] = ...,
output_profile : typing.Optional[typing.Iterable[global___BlobProfile]] = ...,
extra_info : typing.Optional[typing.Iterable[typing.Text]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"execution_time",b"execution_time",u"mean",b"mean",u"name",b"name",u"stddev",b"stddev"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"execution_time",b"execution_time",u"extra_info",b"extra_info",u"mean",b"mean",u"name",b"name",u"output_profile",b"output_profile",u"stddev",b"stddev"]) -> None: ...
global___ProfDAGProto = ProfDAGProto
class ProfDAGProtos(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
STATS_FIELD_NUMBER: int
NET_NAME_FIELD_NUMBER: int
OPS_STATS_FIELD_NUMBER: int
net_name: typing.Text = ...
@property
def stats(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ProfDAGProto]: ...
@property
def ops_stats(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpProfile]: ...
def __init__(self,
*,
stats : typing.Optional[typing.Iterable[global___ProfDAGProto]] = ...,
net_name : typing.Optional[typing.Text] = ...,
ops_stats : typing.Optional[typing.Iterable[global___OpProfile]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"net_name",b"net_name"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"net_name",b"net_name",u"ops_stats",b"ops_stats",u"stats",b"stats"]) -> None: ...
global___ProfDAGProtos = ProfDAGProtos
class OpProfile(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
IDX_FIELD_NUMBER: int
NET_NAME_FIELD_NUMBER: int
TYPE_FIELD_NUMBER: int
EXEC_TIME_SECS_FIELD_NUMBER: int
idx: typing.Text = ...
net_name: typing.Text = ...
type: typing.Text = ...
exec_time_secs: float = ...
def __init__(self,
*,
idx : typing.Optional[typing.Text] = ...,
net_name : typing.Optional[typing.Text] = ...,
type : typing.Optional[typing.Text] = ...,
exec_time_secs : typing.Optional[float] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal[u"exec_time_secs",b"exec_time_secs",u"idx",b"idx",u"net_name",b"net_name",u"type",b"type"]) -> bool: ...
def ClearField(self, field_name: typing_extensions.Literal[u"exec_time_secs",b"exec_time_secs",u"idx",b"idx",u"net_name",b"net_name",u"type",b"type"]) -> None: ...
global___OpProfile = OpProfile