feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake
1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试 2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程 3.重整权利声明文件,重整代码工程,确保最小化侵权风险 Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
41126
3rdparty/opencv-4.5.4/modules/dnn/misc/caffe/opencv-caffe.pb.cc
vendored
Normal file
41126
3rdparty/opencv-4.5.4/modules/dnn/misc/caffe/opencv-caffe.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
32721
3rdparty/opencv-4.5.4/modules/dnn/misc/caffe/opencv-caffe.pb.h
vendored
Normal file
32721
3rdparty/opencv-4.5.4/modules/dnn/misc/caffe/opencv-caffe.pb.h
vendored
Normal file
File diff suppressed because it is too large
Load Diff
196
3rdparty/opencv-4.5.4/modules/dnn/misc/face_detector_accuracy.py
vendored
Normal file
196
3rdparty/opencv-4.5.4/modules/dnn/misc/face_detector_accuracy.py
vendored
Normal file
@ -0,0 +1,196 @@
|
||||
# This script is used to estimate an accuracy of different face detection models.
|
||||
# COCO evaluation tool is used to compute an accuracy metrics (Average Precision).
|
||||
# Script works with different face detection datasets.
|
||||
import os
|
||||
import json
|
||||
from fnmatch import fnmatch
|
||||
from math import pi
|
||||
import cv2 as cv
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from pycocotools.coco import COCO
|
||||
from pycocotools.cocoeval import COCOeval
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Evaluate OpenCV face detection algorithms '
|
||||
'using COCO evaluation tool, http://cocodataset.org/#detections-eval')
|
||||
parser.add_argument('--proto', help='Path to .prototxt of Caffe model or .pbtxt of TensorFlow graph')
|
||||
parser.add_argument('--model', help='Path to .caffemodel trained in Caffe or .pb from TensorFlow')
|
||||
parser.add_argument('--cascade', help='Optional path to trained Haar cascade as '
|
||||
'an additional model for evaluation')
|
||||
parser.add_argument('--ann', help='Path to text file with ground truth annotations')
|
||||
parser.add_argument('--pics', help='Path to images root directory')
|
||||
parser.add_argument('--fddb', help='Evaluate FDDB dataset, http://vis-www.cs.umass.edu/fddb/', action='store_true')
|
||||
parser.add_argument('--wider', help='Evaluate WIDER FACE dataset, http://mmlab.ie.cuhk.edu.hk/projects/WIDERFace/', action='store_true')
|
||||
args = parser.parse_args()
|
||||
|
||||
dataset = {}
|
||||
dataset['images'] = []
|
||||
dataset['categories'] = [{ 'id': 0, 'name': 'face' }]
|
||||
dataset['annotations'] = []
|
||||
|
||||
def ellipse2Rect(params):
|
||||
rad_x = params[0]
|
||||
rad_y = params[1]
|
||||
angle = params[2] * 180.0 / pi
|
||||
center_x = params[3]
|
||||
center_y = params[4]
|
||||
pts = cv.ellipse2Poly((int(center_x), int(center_y)), (int(rad_x), int(rad_y)),
|
||||
int(angle), 0, 360, 10)
|
||||
rect = cv.boundingRect(pts)
|
||||
left = rect[0]
|
||||
top = rect[1]
|
||||
right = rect[0] + rect[2]
|
||||
bottom = rect[1] + rect[3]
|
||||
return left, top, right, bottom
|
||||
|
||||
def addImage(imagePath):
|
||||
assert('images' in dataset)
|
||||
imageId = len(dataset['images'])
|
||||
dataset['images'].append({
|
||||
'id': int(imageId),
|
||||
'file_name': imagePath
|
||||
})
|
||||
return imageId
|
||||
|
||||
def addBBox(imageId, left, top, width, height):
|
||||
assert('annotations' in dataset)
|
||||
dataset['annotations'].append({
|
||||
'id': len(dataset['annotations']),
|
||||
'image_id': int(imageId),
|
||||
'category_id': 0, # Face
|
||||
'bbox': [int(left), int(top), int(width), int(height)],
|
||||
'iscrowd': 0,
|
||||
'area': float(width * height)
|
||||
})
|
||||
|
||||
def addDetection(detections, imageId, left, top, width, height, score):
|
||||
detections.append({
|
||||
'image_id': int(imageId),
|
||||
'category_id': 0, # Face
|
||||
'bbox': [int(left), int(top), int(width), int(height)],
|
||||
'score': float(score)
|
||||
})
|
||||
|
||||
|
||||
def fddb_dataset(annotations, images):
|
||||
for d in os.listdir(annotations):
|
||||
if fnmatch(d, 'FDDB-fold-*-ellipseList.txt'):
|
||||
with open(os.path.join(annotations, d), 'rt') as f:
|
||||
lines = [line.rstrip('\n') for line in f]
|
||||
lineId = 0
|
||||
while lineId < len(lines):
|
||||
# Image
|
||||
imgPath = lines[lineId]
|
||||
lineId += 1
|
||||
imageId = addImage(os.path.join(images, imgPath) + '.jpg')
|
||||
|
||||
img = cv.imread(os.path.join(images, imgPath) + '.jpg')
|
||||
|
||||
# Faces
|
||||
numFaces = int(lines[lineId])
|
||||
lineId += 1
|
||||
for i in range(numFaces):
|
||||
params = [float(v) for v in lines[lineId].split()]
|
||||
lineId += 1
|
||||
left, top, right, bottom = ellipse2Rect(params)
|
||||
addBBox(imageId, left, top, width=right - left + 1,
|
||||
height=bottom - top + 1)
|
||||
|
||||
|
||||
def wider_dataset(annotations, images):
|
||||
with open(annotations, 'rt') as f:
|
||||
lines = [line.rstrip('\n') for line in f]
|
||||
lineId = 0
|
||||
while lineId < len(lines):
|
||||
# Image
|
||||
imgPath = lines[lineId]
|
||||
lineId += 1
|
||||
imageId = addImage(os.path.join(images, imgPath))
|
||||
|
||||
# Faces
|
||||
numFaces = int(lines[lineId])
|
||||
lineId += 1
|
||||
for i in range(numFaces):
|
||||
params = [int(v) for v in lines[lineId].split()]
|
||||
lineId += 1
|
||||
left, top, width, height = params[0], params[1], params[2], params[3]
|
||||
addBBox(imageId, left, top, width, height)
|
||||
|
||||
def evaluate():
|
||||
cocoGt = COCO('annotations.json')
|
||||
cocoDt = cocoGt.loadRes('detections.json')
|
||||
cocoEval = COCOeval(cocoGt, cocoDt, 'bbox')
|
||||
cocoEval.evaluate()
|
||||
cocoEval.accumulate()
|
||||
cocoEval.summarize()
|
||||
|
||||
|
||||
### Convert to COCO annotations format #########################################
|
||||
assert(args.fddb or args.wider)
|
||||
if args.fddb:
|
||||
fddb_dataset(args.ann, args.pics)
|
||||
elif args.wider:
|
||||
wider_dataset(args.ann, args.pics)
|
||||
|
||||
with open('annotations.json', 'wt') as f:
|
||||
json.dump(dataset, f)
|
||||
|
||||
### Obtain detections ##########################################################
|
||||
detections = []
|
||||
if args.proto and args.model:
|
||||
net = cv.dnn.readNet(args.proto, args.model)
|
||||
|
||||
def detect(img, imageId):
|
||||
imgWidth = img.shape[1]
|
||||
imgHeight = img.shape[0]
|
||||
net.setInput(cv.dnn.blobFromImage(img, 1.0, (300, 300), (104., 177., 123.), False, False))
|
||||
out = net.forward()
|
||||
|
||||
for i in range(out.shape[2]):
|
||||
confidence = out[0, 0, i, 2]
|
||||
left = int(out[0, 0, i, 3] * img.shape[1])
|
||||
top = int(out[0, 0, i, 4] * img.shape[0])
|
||||
right = int(out[0, 0, i, 5] * img.shape[1])
|
||||
bottom = int(out[0, 0, i, 6] * img.shape[0])
|
||||
|
||||
x = max(0, min(left, img.shape[1] - 1))
|
||||
y = max(0, min(top, img.shape[0] - 1))
|
||||
w = max(0, min(right - x + 1, img.shape[1] - x))
|
||||
h = max(0, min(bottom - y + 1, img.shape[0] - y))
|
||||
|
||||
addDetection(detections, imageId, x, y, w, h, score=confidence)
|
||||
|
||||
elif args.cascade:
|
||||
cascade = cv.CascadeClassifier(args.cascade)
|
||||
|
||||
def detect(img, imageId):
|
||||
srcImgGray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
|
||||
faces = cascade.detectMultiScale(srcImgGray)
|
||||
|
||||
for rect in faces:
|
||||
left, top, width, height = rect[0], rect[1], rect[2], rect[3]
|
||||
addDetection(detections, imageId, left, top, width, height, score=1.0)
|
||||
|
||||
for i in range(len(dataset['images'])):
|
||||
sys.stdout.write('\r%d / %d' % (i + 1, len(dataset['images'])))
|
||||
sys.stdout.flush()
|
||||
|
||||
img = cv.imread(dataset['images'][i]['file_name'])
|
||||
imageId = int(dataset['images'][i]['id'])
|
||||
|
||||
detect(img, imageId)
|
||||
|
||||
with open('detections.json', 'wt') as f:
|
||||
json.dump(detections, f)
|
||||
|
||||
evaluate()
|
||||
|
||||
|
||||
def rm(f):
|
||||
if os.path.exists(f):
|
||||
os.remove(f)
|
||||
|
||||
rm('annotations.json')
|
||||
rm('detections.json')
|
1
3rdparty/opencv-4.5.4/modules/dnn/misc/java/filelist_common
vendored
Normal file
1
3rdparty/opencv-4.5.4/modules/dnn/misc/java/filelist_common
vendored
Normal file
@ -0,0 +1 @@
|
||||
misc/java/src/cpp/dnn_converters.hpp
|
63
3rdparty/opencv-4.5.4/modules/dnn/misc/java/gen_dict.json
vendored
Normal file
63
3rdparty/opencv-4.5.4/modules/dnn/misc/java/gen_dict.json
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
{
|
||||
"type_dict": {
|
||||
"MatShape": {
|
||||
"j_type": "MatOfInt",
|
||||
"jn_type": "long",
|
||||
"jni_type": "jlong",
|
||||
"jni_var": "MatShape %(n)s",
|
||||
"suffix": "J",
|
||||
"v_type": "Mat",
|
||||
"j_import": "org.opencv.core.MatOfInt"
|
||||
},
|
||||
"vector_MatShape": {
|
||||
"j_type": "List<MatOfInt>",
|
||||
"jn_type": "List<MatOfInt>",
|
||||
"jni_type": "jobject",
|
||||
"jni_var": "std::vector< MatShape > %(n)s",
|
||||
"suffix": "Ljava_util_List",
|
||||
"v_type": "vector_MatShape",
|
||||
"j_import": "org.opencv.core.MatOfInt"
|
||||
},
|
||||
"vector_size_t": {
|
||||
"j_type": "MatOfDouble",
|
||||
"jn_type": "long",
|
||||
"jni_type": "jlong",
|
||||
"jni_var": "std::vector<size_t> %(n)s",
|
||||
"suffix": "J",
|
||||
"v_type": "Mat",
|
||||
"j_import": "org.opencv.core.MatOfDouble"
|
||||
},
|
||||
"vector_Ptr_Layer": {
|
||||
"j_type": "List<Layer>",
|
||||
"jn_type": "List<Layer>",
|
||||
"jni_type": "jobject",
|
||||
"jni_var": "std::vector< Ptr<cv::dnn::Layer> > %(n)s",
|
||||
"suffix": "Ljava_util_List",
|
||||
"v_type": "vector_Layer",
|
||||
"j_import": "org.opencv.dnn.Layer"
|
||||
},
|
||||
"vector_Target": {
|
||||
"j_type": "List<Integer>",
|
||||
"jn_type": "List<Integer>",
|
||||
"jni_type": "jobject",
|
||||
"jni_var": "std::vector< cv::dnn::Target > %(n)s",
|
||||
"suffix": "Ljava_util_List",
|
||||
"v_type": "vector_Target"
|
||||
},
|
||||
"LayerId": {
|
||||
"j_type": "DictValue",
|
||||
"jn_type": "long",
|
||||
"jn_args": [
|
||||
[
|
||||
"__int64",
|
||||
".getNativeObjAddr()"
|
||||
]
|
||||
|
||||
],
|
||||
"jni_name": "(*(*(Ptr<cv::dnn::DictValue>*)%(n)s_nativeObj))",
|
||||
"jni_type": "jlong",
|
||||
"suffix": "J",
|
||||
"j_import": "org.opencv.dnn.DictValue"
|
||||
}
|
||||
}
|
||||
}
|
102
3rdparty/opencv-4.5.4/modules/dnn/misc/java/src/cpp/dnn_converters.cpp
vendored
Normal file
102
3rdparty/opencv-4.5.4/modules/dnn/misc/java/src/cpp/dnn_converters.cpp
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html
|
||||
|
||||
// Author: abratchik
|
||||
|
||||
#include "dnn_converters.hpp"
|
||||
|
||||
#define LOG_TAG "org.opencv.dnn"
|
||||
|
||||
void Mat_to_MatShape(cv::Mat& mat, MatShape& matshape)
|
||||
{
|
||||
matshape.clear();
|
||||
CHECK_MAT(mat.type()==CV_32SC1 && mat.cols==1);
|
||||
matshape = (MatShape) mat;
|
||||
}
|
||||
|
||||
void MatShape_to_Mat(MatShape& matshape, cv::Mat& mat)
|
||||
{
|
||||
mat = cv::Mat(matshape, true);
|
||||
}
|
||||
|
||||
std::vector<MatShape> List_to_vector_MatShape(JNIEnv* env, jobject list)
|
||||
{
|
||||
static jclass juArrayList = ARRAYLIST(env);
|
||||
jmethodID m_size = LIST_SIZE(env, juArrayList);
|
||||
jmethodID m_get = LIST_GET(env, juArrayList);
|
||||
|
||||
static jclass jMatOfInt = MATOFINT(env);
|
||||
|
||||
jint len = env->CallIntMethod(list, m_size);
|
||||
std::vector<MatShape> result;
|
||||
result.reserve(len);
|
||||
for (jint i=0; i<len; i++)
|
||||
{
|
||||
jobject element = static_cast<jobject>(env->CallObjectMethod(list, m_get, i));
|
||||
cv::Mat& mat = *((cv::Mat*) GETNATIVEOBJ(env, jMatOfInt, element) );
|
||||
MatShape matshape = (MatShape) mat;
|
||||
result.push_back(matshape);
|
||||
env->DeleteLocalRef(element);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
jobject vector_Ptr_Layer_to_List(JNIEnv* env, std::vector<cv::Ptr<cv::dnn::Layer> >& vs)
|
||||
{
|
||||
static jclass juArrayList = ARRAYLIST(env);
|
||||
static jmethodID m_create = CONSTRUCTOR(env, juArrayList);
|
||||
jmethodID m_add = LIST_ADD(env, juArrayList);
|
||||
|
||||
static jclass jLayerClass = LAYER(env);
|
||||
static jmethodID m_create_layer = LAYER_CONSTRUCTOR(env, jLayerClass);
|
||||
|
||||
jobject result = env->NewObject(juArrayList, m_create, vs.size());
|
||||
for (std::vector< cv::Ptr<cv::dnn::Layer> >::iterator it = vs.begin(); it != vs.end(); ++it) {
|
||||
jobject element = env->NewObject(jLayerClass, m_create_layer, (*it).get());
|
||||
env->CallBooleanMethod(result, m_add, element);
|
||||
env->DeleteLocalRef(element);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
jobject vector_Target_to_List(JNIEnv* env, std::vector<cv::dnn::Target>& vs)
|
||||
{
|
||||
static jclass juArrayList = ARRAYLIST(env);
|
||||
static jmethodID m_create = CONSTRUCTOR(env, juArrayList);
|
||||
jmethodID m_add = LIST_ADD(env, juArrayList);
|
||||
|
||||
static jclass jInteger = env->FindClass("java/lang/Integer");
|
||||
static jmethodID m_create_Integer = env->GetMethodID(jInteger, "<init>", "(I)V");
|
||||
|
||||
jobject result = env->NewObject(juArrayList, m_create, vs.size());
|
||||
for (size_t i = 0; i < vs.size(); ++i)
|
||||
{
|
||||
jobject element = env->NewObject(jInteger, m_create_Integer, vs[i]);
|
||||
env->CallBooleanMethod(result, m_add, element);
|
||||
env->DeleteLocalRef(element);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
std::vector<cv::Ptr<cv::dnn::Layer> > List_to_vector_Ptr_Layer(JNIEnv* env, jobject list)
|
||||
{
|
||||
static jclass juArrayList = ARRAYLIST(env);
|
||||
jmethodID m_size = LIST_SIZE(env, juArrayList);
|
||||
jmethodID m_get = LIST_GET(env, juArrayList);
|
||||
|
||||
static jclass jLayerClass = LAYER(env);
|
||||
|
||||
jint len = env->CallIntMethod(list, m_size);
|
||||
std::vector< cv::Ptr<cv::dnn::Layer> > result;
|
||||
result.reserve(len);
|
||||
for (jint i=0; i<len; i++)
|
||||
{
|
||||
jobject element = static_cast<jobject>(env->CallObjectMethod(list, m_get, i));
|
||||
cv::Ptr<cv::dnn::Layer>* layer_ptr = (cv::Ptr<cv::dnn::Layer>*) GETNATIVEOBJ(env, jLayerClass, element) ;
|
||||
cv::Ptr<cv::dnn::Layer> layer = *(layer_ptr);
|
||||
result.push_back(layer);
|
||||
env->DeleteLocalRef(element);
|
||||
}
|
||||
return result;
|
||||
}
|
33
3rdparty/opencv-4.5.4/modules/dnn/misc/java/src/cpp/dnn_converters.hpp
vendored
Normal file
33
3rdparty/opencv-4.5.4/modules/dnn/misc/java/src/cpp/dnn_converters.hpp
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html
|
||||
|
||||
// Author: abratchik
|
||||
|
||||
#ifndef DNN_CONVERTERS_HPP
|
||||
#define DNN_CONVERTERS_HPP
|
||||
|
||||
#include <jni.h>
|
||||
#include "opencv_java.hpp"
|
||||
#include "opencv2/core.hpp"
|
||||
#include "opencv2/dnn/dnn.hpp"
|
||||
|
||||
#define LAYER(ENV) static_cast<jclass>(ENV->NewGlobalRef(ENV->FindClass("org/opencv/dnn/Layer")))
|
||||
#define LAYER_CONSTRUCTOR(ENV, CLS) ENV->GetMethodID(CLS, "<init>", "(J)V")
|
||||
|
||||
|
||||
using namespace cv::dnn;
|
||||
|
||||
void Mat_to_MatShape(cv::Mat& mat, MatShape& matshape);
|
||||
|
||||
void MatShape_to_Mat(MatShape& matshape, cv::Mat& mat);
|
||||
|
||||
std::vector<MatShape> List_to_vector_MatShape(JNIEnv* env, jobject list);
|
||||
|
||||
jobject vector_Ptr_Layer_to_List(JNIEnv* env, std::vector<cv::Ptr<cv::dnn::Layer> >& vs);
|
||||
|
||||
std::vector<cv::Ptr<cv::dnn::Layer> > List_to_vector_Ptr_Layer(JNIEnv* env, jobject list);
|
||||
|
||||
jobject vector_Target_to_List(JNIEnv* env, std::vector<cv::dnn::Target>& vs);
|
||||
|
||||
#endif /* DNN_CONVERTERS_HPP */
|
119
3rdparty/opencv-4.5.4/modules/dnn/misc/java/test/DnnListRegressionTest.java
vendored
Normal file
119
3rdparty/opencv-4.5.4/modules/dnn/misc/java/test/DnnListRegressionTest.java
vendored
Normal file
@ -0,0 +1,119 @@
|
||||
package org.opencv.test.dnn;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfInt;
|
||||
import org.opencv.core.MatOfFloat;
|
||||
import org.opencv.core.MatOfByte;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.dnn.DictValue;
|
||||
import org.opencv.dnn.Dnn;
|
||||
import org.opencv.dnn.Layer;
|
||||
import org.opencv.dnn.Net;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.test.OpenCVTestCase;
|
||||
|
||||
/*
|
||||
* regression test for #12324,
|
||||
* testing various java.util.List invocations,
|
||||
* which use the LIST_GET macro
|
||||
*/
|
||||
|
||||
public class DnnListRegressionTest extends OpenCVTestCase {
|
||||
|
||||
private final static String ENV_OPENCV_DNN_TEST_DATA_PATH = "OPENCV_DNN_TEST_DATA_PATH";
|
||||
|
||||
private final static String ENV_OPENCV_TEST_DATA_PATH = "OPENCV_TEST_DATA_PATH";
|
||||
|
||||
String modelFileName = "";
|
||||
String sourceImageFile = "";
|
||||
|
||||
Net net;
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
String envDnnTestDataPath = System.getenv(ENV_OPENCV_DNN_TEST_DATA_PATH);
|
||||
|
||||
if(envDnnTestDataPath == null){
|
||||
isTestCaseEnabled = false;
|
||||
return;
|
||||
}
|
||||
|
||||
File dnnTestDataPath = new File(envDnnTestDataPath);
|
||||
modelFileName = new File(dnnTestDataPath, "dnn/tensorflow_inception_graph.pb").toString();
|
||||
|
||||
String envTestDataPath = System.getenv(ENV_OPENCV_TEST_DATA_PATH);
|
||||
|
||||
if(envTestDataPath == null) throw new Exception(ENV_OPENCV_TEST_DATA_PATH + " has to be defined!");
|
||||
|
||||
File testDataPath = new File(envTestDataPath);
|
||||
|
||||
File f = new File(testDataPath, "dnn/grace_hopper_227.png");
|
||||
sourceImageFile = f.toString();
|
||||
if(!f.exists()) throw new Exception("Test image is missing: " + sourceImageFile);
|
||||
|
||||
net = Dnn.readNetFromTensorflow(modelFileName);
|
||||
|
||||
Mat image = Imgcodecs.imread(sourceImageFile);
|
||||
assertNotNull("Loading image from file failed!", image);
|
||||
|
||||
Mat inputBlob = Dnn.blobFromImage(image, 1.0, new Size(224, 224), new Scalar(0), true, true);
|
||||
assertNotNull("Converting image to blob failed!", inputBlob);
|
||||
|
||||
net.setInput(inputBlob, "input");
|
||||
}
|
||||
|
||||
public void testSetInputsNames() {
|
||||
List<String> inputs = new ArrayList();
|
||||
inputs.add("input");
|
||||
try {
|
||||
net.setInputsNames(inputs);
|
||||
} catch(Exception e) {
|
||||
fail("Net setInputsNames failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testForward() {
|
||||
List<Mat> outs = new ArrayList();
|
||||
List<String> outNames = new ArrayList();
|
||||
outNames.add("softmax2");
|
||||
try {
|
||||
net.forward(outs,outNames);
|
||||
} catch(Exception e) {
|
||||
fail("Net forward failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetMemoryConsumption() {
|
||||
int layerId = 1;
|
||||
List<MatOfInt> netInputShapes = new ArrayList();
|
||||
netInputShapes.add(new MatOfInt(1, 3, 224, 224));
|
||||
long[] weights=null;
|
||||
long[] blobs=null;
|
||||
try {
|
||||
net.getMemoryConsumption(layerId, netInputShapes, weights, blobs);
|
||||
} catch(Exception e) {
|
||||
fail("Net getMemoryConsumption failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetFLOPS() {
|
||||
int layerId = 1;
|
||||
List<MatOfInt> netInputShapes = new ArrayList();
|
||||
netInputShapes.add(new MatOfInt(1, 3, 224, 224));
|
||||
try {
|
||||
net.getFLOPS(layerId, netInputShapes);
|
||||
} catch(Exception e) {
|
||||
fail("Net getFLOPS failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
149
3rdparty/opencv-4.5.4/modules/dnn/misc/java/test/DnnTensorFlowTest.java
vendored
Normal file
149
3rdparty/opencv-4.5.4/modules/dnn/misc/java/test/DnnTensorFlowTest.java
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
package org.opencv.test.dnn;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfFloat;
|
||||
import org.opencv.core.MatOfByte;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.dnn.DictValue;
|
||||
import org.opencv.dnn.Dnn;
|
||||
import org.opencv.dnn.Layer;
|
||||
import org.opencv.dnn.Net;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.test.OpenCVTestCase;
|
||||
|
||||
public class DnnTensorFlowTest extends OpenCVTestCase {
|
||||
|
||||
private final static String ENV_OPENCV_DNN_TEST_DATA_PATH = "OPENCV_DNN_TEST_DATA_PATH";
|
||||
|
||||
private final static String ENV_OPENCV_TEST_DATA_PATH = "OPENCV_TEST_DATA_PATH";
|
||||
|
||||
String modelFileName = "";
|
||||
String sourceImageFile = "";
|
||||
|
||||
Net net;
|
||||
|
||||
private static void normAssert(Mat ref, Mat test) {
|
||||
final double l1 = 1e-5;
|
||||
final double lInf = 1e-4;
|
||||
double normL1 = Core.norm(ref, test, Core.NORM_L1) / ref.total();
|
||||
double normLInf = Core.norm(ref, test, Core.NORM_INF) / ref.total();
|
||||
assertTrue(normL1 < l1);
|
||||
assertTrue(normLInf < lInf);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
String envDnnTestDataPath = System.getenv(ENV_OPENCV_DNN_TEST_DATA_PATH);
|
||||
|
||||
if(envDnnTestDataPath == null){
|
||||
isTestCaseEnabled = false;
|
||||
return;
|
||||
}
|
||||
|
||||
File dnnTestDataPath = new File(envDnnTestDataPath);
|
||||
modelFileName = new File(dnnTestDataPath, "dnn/tensorflow_inception_graph.pb").toString();
|
||||
|
||||
String envTestDataPath = System.getenv(ENV_OPENCV_TEST_DATA_PATH);
|
||||
|
||||
if(envTestDataPath == null) throw new Exception(ENV_OPENCV_TEST_DATA_PATH + " has to be defined!");
|
||||
|
||||
File testDataPath = new File(envTestDataPath);
|
||||
|
||||
File f = new File(testDataPath, "dnn/grace_hopper_227.png");
|
||||
sourceImageFile = f.toString();
|
||||
if(!f.exists()) throw new Exception("Test image is missing: " + sourceImageFile);
|
||||
|
||||
net = Dnn.readNetFromTensorflow(modelFileName);
|
||||
}
|
||||
|
||||
public void testGetLayerTypes() {
|
||||
List<String> layertypes = new ArrayList();
|
||||
net.getLayerTypes(layertypes);
|
||||
|
||||
assertFalse("No layer types returned!", layertypes.isEmpty());
|
||||
}
|
||||
|
||||
public void testGetLayer() {
|
||||
List<String> layernames = net.getLayerNames();
|
||||
|
||||
assertFalse("Test net returned no layers!", layernames.isEmpty());
|
||||
|
||||
String testLayerName = layernames.get(0);
|
||||
|
||||
DictValue layerId = new DictValue(testLayerName);
|
||||
|
||||
assertEquals("DictValue did not return the string, which was used in constructor!", testLayerName, layerId.getStringValue());
|
||||
|
||||
Layer layer = net.getLayer(layerId);
|
||||
|
||||
assertEquals("Layer name does not match the expected value!", testLayerName, layer.get_name());
|
||||
|
||||
}
|
||||
|
||||
public void checkInceptionNet(Net net)
|
||||
{
|
||||
Mat image = Imgcodecs.imread(sourceImageFile);
|
||||
assertNotNull("Loading image from file failed!", image);
|
||||
|
||||
Mat inputBlob = Dnn.blobFromImage(image, 1.0, new Size(224, 224), new Scalar(0), true, true);
|
||||
assertNotNull("Converting image to blob failed!", inputBlob);
|
||||
|
||||
net.setInput(inputBlob, "input");
|
||||
|
||||
Mat result = new Mat();
|
||||
try {
|
||||
net.setPreferableBackend(Dnn.DNN_BACKEND_OPENCV);
|
||||
result = net.forward("softmax2");
|
||||
}
|
||||
catch (Exception e) {
|
||||
fail("DNN forward failed: " + e.getMessage());
|
||||
}
|
||||
assertNotNull("Net returned no result!", result);
|
||||
|
||||
result = result.reshape(1, 1);
|
||||
Core.MinMaxLocResult minmax = Core.minMaxLoc(result);
|
||||
assertEquals("Wrong prediction", (int)minmax.maxLoc.x, 866);
|
||||
|
||||
Mat top5RefScores = new MatOfFloat(new float[] {
|
||||
0.63032645f, 0.2561979f, 0.032181446f, 0.015721032f, 0.014785315f
|
||||
}).reshape(1, 1);
|
||||
|
||||
Core.sort(result, result, Core.SORT_DESCENDING);
|
||||
|
||||
normAssert(result.colRange(0, 5), top5RefScores);
|
||||
}
|
||||
|
||||
public void testTestNetForward() {
|
||||
checkInceptionNet(net);
|
||||
}
|
||||
|
||||
public void testReadFromBuffer() {
|
||||
File modelFile = new File(modelFileName);
|
||||
byte[] modelBuffer = new byte[ (int)modelFile.length() ];
|
||||
|
||||
try {
|
||||
FileInputStream fis = new FileInputStream(modelFile);
|
||||
fis.read(modelBuffer);
|
||||
fis.close();
|
||||
} catch (IOException e) {
|
||||
fail("Failed to read a model: " + e.getMessage());
|
||||
}
|
||||
net = Dnn.readNetFromTensorflow(new MatOfByte(modelBuffer));
|
||||
checkInceptionNet(net);
|
||||
}
|
||||
|
||||
public void testGetAvailableTargets() {
|
||||
List<Integer> targets = Dnn.getAvailableTargets(Dnn.DNN_BACKEND_OPENCV);
|
||||
assertTrue(targets.contains(Dnn.DNN_TARGET_CPU));
|
||||
}
|
||||
}
|
46
3rdparty/opencv-4.5.4/modules/dnn/misc/objc/gen_dict.json
vendored
Normal file
46
3rdparty/opencv-4.5.4/modules/dnn/misc/objc/gen_dict.json
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
{
|
||||
"func_arg_fix" : {
|
||||
"Dnn": {
|
||||
"(Net*)readNetFromCaffe:(NSString*)prototxt caffeModel:(NSString*)caffeModel" : { "readNetFromCaffe" : {"name" : "readNetFromCaffeFile"} },
|
||||
"(Net*)readNetFromCaffe:(ByteVector*)bufferProto bufferModel:(ByteVector*)bufferModel" : { "readNetFromCaffe" : {"name" : "readNetFromCaffeBuffer"} },
|
||||
"(Net*)readNetFromDarknet:(NSString*)cfgFile darknetModel:(NSString*)darknetModel" : { "readNetFromDarknet" : {"name" : "readNetFromDarknetFile"} },
|
||||
"(Net*)readNetFromDarknet:(ByteVector*)bufferCfg bufferModel:(ByteVector*)bufferModel" : { "readNetFromDarknet" : {"name" : "readNetFromDarknetBuffer"} },
|
||||
"(Net*)readNetFromONNX:(NSString*)onnxFile" : { "readNetFromONNX" : {"name" : "readNetFromONNXFile"} },
|
||||
"(Net*)readNetFromONNX:(ByteVector*)buffer" : { "readNetFromONNX" : {"name" : "readNetFromONNXBuffer"} },
|
||||
"(Net*)readNetFromTensorflow:(NSString*)model config:(NSString*)config" : { "readNetFromTensorflow" : {"name" : "readNetFromTensorflowFile"} },
|
||||
"(Net*)readNetFromTensorflow:(ByteVector*)bufferModel bufferConfig:(ByteVector*)bufferConfig" : { "readNetFromTensorflow" : {"name" : "readNetFromTensorflowBuffer"} }
|
||||
},
|
||||
"Net": {
|
||||
"(void)forward:(NSMutableArray<Mat*>*)outputBlobs outputName:(NSString*)outputName" : { "forward" : {"name" : "forwardOutputBlobs"} },
|
||||
"(void)forward:(NSMutableArray<Mat*>*)outputBlobs outBlobNames:(NSArray<NSString*>*)outBlobNames" : { "forward" : {"name" : "forwardOutputBlobs"} },
|
||||
"(void)forwardAndRetrieve:(NSMutableArray<NSMutableArray<Mat*>*>*)outputBlobs outBlobNames:(NSArray<NSString*>*)outBlobNames" : { "forward" : {"swift_name" : "forwardAndRetrieve"} },
|
||||
"(long)getFLOPS:(IntVector*)netInputShape" : { "getFLOPS" : {"name" : "getFLOPSWithNetInputShape"} },
|
||||
"(long)getFLOPS:(NSArray<IntVector*>*)netInputShapes" : { "getFLOPS" : {"name" : "getFLOPSWithNetInputShapes"} },
|
||||
"(long)getFLOPS:(int)layerId netInputShape:(IntVector*)netInputShape" : { "getFLOPS" : {"name" : "getFLOPSWithLayerId"} },
|
||||
"(long)getFLOPS:(int)layerId netInputShapes:(NSArray<IntVector*>*)netInputShapes" : { "getFLOPS" : {"name" : "getFLOPSWithLayerId"} },
|
||||
"(void)getLayersShapes:(IntVector*)netInputShape layersIds:(IntVector*)layersIds inLayersShapes:(NSMutableArray<NSMutableArray<IntVector*>*>*)inLayersShapes outLayersShapes:(NSMutableArray<NSMutableArray<IntVector*>*>*)outLayersShapes" : { "getLayersShapes" : {"name" : "getLayersShapesWithNetInputShape"} },
|
||||
"(void)getLayersShapes:(NSArray<IntVector*>*)netInputShapes layersIds:(IntVector*)layersIds inLayersShapes:(NSMutableArray<NSMutableArray<IntVector*>*>*)inLayersShapes outLayersShapes:(NSMutableArray<NSMutableArray<IntVector*>*>*)outLayersShapes" : { "getLayersShapes" : {"name" : "getLayersShapesWithNetInputShapes"} }
|
||||
}
|
||||
},
|
||||
"type_dict": {
|
||||
"MatShape": {
|
||||
"objc_type": "IntVector*",
|
||||
"to_cpp": "%(n)s.nativeRef",
|
||||
"from_cpp": "[IntVector fromNative:%(n)s]",
|
||||
"cast_to": "std::vector<int>"
|
||||
},
|
||||
"vector_MatShape": {
|
||||
"objc_type": "IntVector*",
|
||||
"v_type": "IntVector"
|
||||
},
|
||||
"vector_vector_MatShape": {
|
||||
"objc_type": "IntVector*",
|
||||
"v_v_type": "IntVector"
|
||||
},
|
||||
"LayerId": {
|
||||
"objc_type": "DictValue*",
|
||||
"to_cpp": "*(cv::dnn::DictValue*)(%(n)s.nativePtr)",
|
||||
"from_cpp": "[DictValue fromNative:%(n)s]"
|
||||
}
|
||||
}
|
||||
}
|
6977
3rdparty/opencv-4.5.4/modules/dnn/misc/onnx/opencv-onnx.pb.cc
vendored
Normal file
6977
3rdparty/opencv-4.5.4/modules/dnn/misc/onnx/opencv-onnx.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5849
3rdparty/opencv-4.5.4/modules/dnn/misc/onnx/opencv-onnx.pb.h
vendored
Normal file
5849
3rdparty/opencv-4.5.4/modules/dnn/misc/onnx/opencv-onnx.pb.h
vendored
Normal file
File diff suppressed because it is too large
Load Diff
219
3rdparty/opencv-4.5.4/modules/dnn/misc/python/pyopencv_dnn.hpp
vendored
Normal file
219
3rdparty/opencv-4.5.4/modules/dnn/misc/python/pyopencv_dnn.hpp
vendored
Normal file
@ -0,0 +1,219 @@
|
||||
#ifdef HAVE_OPENCV_DNN
|
||||
typedef dnn::DictValue LayerId;
|
||||
typedef std::vector<dnn::MatShape> vector_MatShape;
|
||||
typedef std::vector<std::vector<dnn::MatShape> > vector_vector_MatShape;
|
||||
|
||||
template<>
|
||||
bool pyopencv_to(PyObject *o, dnn::DictValue &dv, const ArgInfo& info)
|
||||
{
|
||||
CV_UNUSED(info);
|
||||
if (!o || o == Py_None)
|
||||
return true; //Current state will be used
|
||||
else if (PyLong_Check(o))
|
||||
{
|
||||
dv = dnn::DictValue((int64)PyLong_AsLongLong(o));
|
||||
return true;
|
||||
}
|
||||
else if (PyInt_Check(o))
|
||||
{
|
||||
dv = dnn::DictValue((int64)PyInt_AS_LONG(o));
|
||||
return true;
|
||||
}
|
||||
else if (PyFloat_Check(o))
|
||||
{
|
||||
dv = dnn::DictValue(PyFloat_AsDouble(o));
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
std::string str;
|
||||
if (getUnicodeString(o, str))
|
||||
{
|
||||
dv = dnn::DictValue(str);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
PyObject* pyopencv_from(const dnn::DictValue &dv)
|
||||
{
|
||||
if (dv.size() > 1)
|
||||
{
|
||||
std::vector<T> vec(dv.size());
|
||||
for (int i = 0; i < dv.size(); ++i)
|
||||
vec[i] = dv.get<T>(i);
|
||||
return pyopencv_from_generic_vec(vec);
|
||||
}
|
||||
else
|
||||
return pyopencv_from(dv.get<T>());
|
||||
}
|
||||
|
||||
template<>
|
||||
PyObject* pyopencv_from(const dnn::DictValue &dv)
|
||||
{
|
||||
if (dv.isInt()) return pyopencv_from<int>(dv);
|
||||
if (dv.isReal()) return pyopencv_from<float>(dv);
|
||||
if (dv.isString()) return pyopencv_from<String>(dv);
|
||||
CV_Error(Error::StsNotImplemented, "Unknown value type");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
template<>
|
||||
PyObject* pyopencv_from(const dnn::LayerParams& lp)
|
||||
{
|
||||
PyObject* dict = PyDict_New();
|
||||
for (std::map<String, dnn::DictValue>::const_iterator it = lp.begin(); it != lp.end(); ++it)
|
||||
{
|
||||
CV_Assert(!PyDict_SetItemString(dict, it->first.c_str(), pyopencv_from(it->second)));
|
||||
}
|
||||
return dict;
|
||||
}
|
||||
|
||||
template<>
|
||||
PyObject* pyopencv_from(const std::vector<dnn::Target> &t)
|
||||
{
|
||||
return pyopencv_from(std::vector<int>(t.begin(), t.end()));
|
||||
}
|
||||
|
||||
class pycvLayer CV_FINAL : public dnn::Layer
|
||||
{
|
||||
public:
|
||||
pycvLayer(const dnn::LayerParams ¶ms, PyObject* pyLayer) : Layer(params)
|
||||
{
|
||||
PyGILState_STATE gstate;
|
||||
gstate = PyGILState_Ensure();
|
||||
|
||||
PyObject* args = PyTuple_New(2);
|
||||
CV_Assert(!PyTuple_SetItem(args, 0, pyopencv_from(params)));
|
||||
CV_Assert(!PyTuple_SetItem(args, 1, pyopencv_from(params.blobs)));
|
||||
o = PyObject_CallObject(pyLayer, args);
|
||||
|
||||
Py_DECREF(args);
|
||||
PyGILState_Release(gstate);
|
||||
if (!o)
|
||||
CV_Error(Error::StsError, "Failed to create an instance of custom layer");
|
||||
}
|
||||
|
||||
static void registerLayer(const std::string& type, PyObject* o)
|
||||
{
|
||||
std::map<std::string, std::vector<PyObject*> >::iterator it = pyLayers.find(type);
|
||||
if (it != pyLayers.end())
|
||||
it->second.push_back(o);
|
||||
else
|
||||
pyLayers[type] = std::vector<PyObject*>(1, o);
|
||||
}
|
||||
|
||||
static void unregisterLayer(const std::string& type)
|
||||
{
|
||||
std::map<std::string, std::vector<PyObject*> >::iterator it = pyLayers.find(type);
|
||||
if (it != pyLayers.end())
|
||||
{
|
||||
if (it->second.size() > 1)
|
||||
it->second.pop_back();
|
||||
else
|
||||
pyLayers.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
static Ptr<dnn::Layer> create(dnn::LayerParams ¶ms)
|
||||
{
|
||||
std::map<std::string, std::vector<PyObject*> >::iterator it = pyLayers.find(params.type);
|
||||
if (it == pyLayers.end())
|
||||
CV_Error(Error::StsNotImplemented, "Layer with a type \"" + params.type +
|
||||
"\" is not implemented");
|
||||
CV_Assert(!it->second.empty());
|
||||
return Ptr<dnn::Layer>(new pycvLayer(params, it->second.back()));
|
||||
}
|
||||
|
||||
virtual bool getMemoryShapes(const std::vector<std::vector<int> > &inputs,
|
||||
const int,
|
||||
std::vector<std::vector<int> > &outputs,
|
||||
std::vector<std::vector<int> > &) const CV_OVERRIDE
|
||||
{
|
||||
PyGILState_STATE gstate;
|
||||
gstate = PyGILState_Ensure();
|
||||
|
||||
PyObject* args = PyList_New(inputs.size());
|
||||
for(size_t i = 0; i < inputs.size(); ++i)
|
||||
PyList_SetItem(args, i, pyopencv_from_generic_vec(inputs[i]));
|
||||
|
||||
PyObject* res = PyObject_CallMethodObjArgs(o, PyString_FromString("getMemoryShapes"), args, NULL);
|
||||
Py_DECREF(args);
|
||||
PyGILState_Release(gstate);
|
||||
if (!res)
|
||||
CV_Error(Error::StsNotImplemented, "Failed to call \"getMemoryShapes\" method");
|
||||
CV_Assert(pyopencv_to_generic_vec(res, outputs, ArgInfo("", 0)));
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual void forward(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
|
||||
{
|
||||
PyGILState_STATE gstate;
|
||||
gstate = PyGILState_Ensure();
|
||||
|
||||
std::vector<Mat> inputs, outputs;
|
||||
inputs_arr.getMatVector(inputs);
|
||||
outputs_arr.getMatVector(outputs);
|
||||
|
||||
PyObject* args = pyopencv_from(inputs);
|
||||
PyObject* res = PyObject_CallMethodObjArgs(o, PyString_FromString("forward"), args, NULL);
|
||||
Py_DECREF(args);
|
||||
if (!res)
|
||||
CV_Error(Error::StsNotImplemented, "Failed to call \"forward\" method");
|
||||
|
||||
std::vector<Mat> pyOutputs;
|
||||
CV_Assert(pyopencv_to(res, pyOutputs, ArgInfo("", 0)));
|
||||
Py_DECREF(res);
|
||||
PyGILState_Release(gstate);
|
||||
|
||||
CV_Assert(pyOutputs.size() == outputs.size());
|
||||
for (size_t i = 0; i < outputs.size(); ++i)
|
||||
{
|
||||
CV_Assert(pyOutputs[i].size == outputs[i].size);
|
||||
CV_Assert(pyOutputs[i].type() == outputs[i].type());
|
||||
pyOutputs[i].copyTo(outputs[i]);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
// Map layers types to python classes.
|
||||
static std::map<std::string, std::vector<PyObject*> > pyLayers;
|
||||
PyObject* o; // Instance of implemented python layer.
|
||||
};
|
||||
|
||||
std::map<std::string, std::vector<PyObject*> > pycvLayer::pyLayers;
|
||||
|
||||
static PyObject *pyopencv_cv_dnn_registerLayer(PyObject*, PyObject *args, PyObject *kw)
|
||||
{
|
||||
const char *keywords[] = { "type", "class", NULL };
|
||||
char* layerType;
|
||||
PyObject *classInstance;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kw, "sO", (char**)keywords, &layerType, &classInstance))
|
||||
return NULL;
|
||||
if (!PyCallable_Check(classInstance)) {
|
||||
PyErr_SetString(PyExc_TypeError, "class must be callable");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
pycvLayer::registerLayer(layerType, classInstance);
|
||||
dnn::LayerFactory::registerLayer(layerType, pycvLayer::create);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject *pyopencv_cv_dnn_unregisterLayer(PyObject*, PyObject *args, PyObject *kw)
|
||||
{
|
||||
const char *keywords[] = { "type", NULL };
|
||||
char* layerType;
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kw, "s", (char**)keywords, &layerType))
|
||||
return NULL;
|
||||
|
||||
pycvLayer::unregisterLayer(layerType);
|
||||
dnn::LayerFactory::unregisterLayer(layerType);
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
#endif // HAVE_OPENCV_DNN
|
415
3rdparty/opencv-4.5.4/modules/dnn/misc/python/test/test_dnn.py
vendored
Normal file
415
3rdparty/opencv-4.5.4/modules/dnn/misc/python/test/test_dnn.py
vendored
Normal file
@ -0,0 +1,415 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import cv2 as cv
|
||||
import numpy as np
|
||||
|
||||
from tests_common import NewOpenCVTests, unittest
|
||||
|
||||
def normAssert(test, a, b, msg=None, lInf=1e-5):
|
||||
test.assertLess(np.max(np.abs(a - b)), lInf, msg)
|
||||
|
||||
def inter_area(box1, box2):
|
||||
x_min, x_max = max(box1[0], box2[0]), min(box1[2], box2[2])
|
||||
y_min, y_max = max(box1[1], box2[1]), min(box1[3], box2[3])
|
||||
return (x_max - x_min) * (y_max - y_min)
|
||||
|
||||
def area(box):
|
||||
return (box[2] - box[0]) * (box[3] - box[1])
|
||||
|
||||
def box2str(box):
|
||||
left, top = box[0], box[1]
|
||||
width, height = box[2] - left, box[3] - top
|
||||
return '[%f x %f from (%f, %f)]' % (width, height, left, top)
|
||||
|
||||
def normAssertDetections(test, refClassIds, refScores, refBoxes, testClassIds, testScores, testBoxes,
|
||||
confThreshold=0.0, scores_diff=1e-5, boxes_iou_diff=1e-4):
|
||||
matchedRefBoxes = [False] * len(refBoxes)
|
||||
errMsg = ''
|
||||
for i in range(len(testBoxes)):
|
||||
testScore = testScores[i]
|
||||
if testScore < confThreshold:
|
||||
continue
|
||||
|
||||
testClassId, testBox = testClassIds[i], testBoxes[i]
|
||||
matched = False
|
||||
for j in range(len(refBoxes)):
|
||||
if (not matchedRefBoxes[j]) and testClassId == refClassIds[j] and \
|
||||
abs(testScore - refScores[j]) < scores_diff:
|
||||
interArea = inter_area(testBox, refBoxes[j])
|
||||
iou = interArea / (area(testBox) + area(refBoxes[j]) - interArea)
|
||||
if abs(iou - 1.0) < boxes_iou_diff:
|
||||
matched = True
|
||||
matchedRefBoxes[j] = True
|
||||
if not matched:
|
||||
errMsg += '\nUnmatched prediction: class %d score %f box %s' % (testClassId, testScore, box2str(testBox))
|
||||
|
||||
for i in range(len(refBoxes)):
|
||||
if (not matchedRefBoxes[i]) and refScores[i] > confThreshold:
|
||||
errMsg += '\nUnmatched reference: class %d score %f box %s' % (refClassIds[i], refScores[i], box2str(refBoxes[i]))
|
||||
if errMsg:
|
||||
test.fail(errMsg)
|
||||
|
||||
def printParams(backend, target):
|
||||
backendNames = {
|
||||
cv.dnn.DNN_BACKEND_OPENCV: 'OCV',
|
||||
cv.dnn.DNN_BACKEND_INFERENCE_ENGINE: 'DLIE'
|
||||
}
|
||||
targetNames = {
|
||||
cv.dnn.DNN_TARGET_CPU: 'CPU',
|
||||
cv.dnn.DNN_TARGET_OPENCL: 'OCL',
|
||||
cv.dnn.DNN_TARGET_OPENCL_FP16: 'OCL_FP16',
|
||||
cv.dnn.DNN_TARGET_MYRIAD: 'MYRIAD'
|
||||
}
|
||||
print('%s/%s' % (backendNames[backend], targetNames[target]))
|
||||
|
||||
def getDefaultThreshold(target):
|
||||
if target == cv.dnn.DNN_TARGET_OPENCL_FP16 or target == cv.dnn.DNN_TARGET_MYRIAD:
|
||||
return 4e-3
|
||||
else:
|
||||
return 1e-5
|
||||
|
||||
testdata_required = bool(os.environ.get('OPENCV_DNN_TEST_REQUIRE_TESTDATA', False))
|
||||
|
||||
g_dnnBackendsAndTargets = None
|
||||
|
||||
class dnn_test(NewOpenCVTests):
|
||||
|
||||
def setUp(self):
|
||||
super(dnn_test, self).setUp()
|
||||
|
||||
global g_dnnBackendsAndTargets
|
||||
if g_dnnBackendsAndTargets is None:
|
||||
g_dnnBackendsAndTargets = self.initBackendsAndTargets()
|
||||
self.dnnBackendsAndTargets = g_dnnBackendsAndTargets
|
||||
|
||||
def initBackendsAndTargets(self):
|
||||
self.dnnBackendsAndTargets = [
|
||||
[cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_CPU],
|
||||
]
|
||||
|
||||
if self.checkIETarget(cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_CPU):
|
||||
self.dnnBackendsAndTargets.append([cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_CPU])
|
||||
if self.checkIETarget(cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_MYRIAD):
|
||||
self.dnnBackendsAndTargets.append([cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_MYRIAD])
|
||||
|
||||
if cv.ocl.haveOpenCL() and cv.ocl.useOpenCL():
|
||||
self.dnnBackendsAndTargets.append([cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_OPENCL])
|
||||
self.dnnBackendsAndTargets.append([cv.dnn.DNN_BACKEND_OPENCV, cv.dnn.DNN_TARGET_OPENCL_FP16])
|
||||
if cv.ocl_Device.getDefault().isIntel():
|
||||
if self.checkIETarget(cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_OPENCL):
|
||||
self.dnnBackendsAndTargets.append([cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_OPENCL])
|
||||
if self.checkIETarget(cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_OPENCL_FP16):
|
||||
self.dnnBackendsAndTargets.append([cv.dnn.DNN_BACKEND_INFERENCE_ENGINE, cv.dnn.DNN_TARGET_OPENCL_FP16])
|
||||
return self.dnnBackendsAndTargets
|
||||
|
||||
def find_dnn_file(self, filename, required=True):
|
||||
if not required:
|
||||
required = testdata_required
|
||||
return self.find_file(filename, [os.environ.get('OPENCV_DNN_TEST_DATA_PATH', os.getcwd()),
|
||||
os.environ['OPENCV_TEST_DATA_PATH']],
|
||||
required=required)
|
||||
|
||||
def checkIETarget(self, backend, target):
|
||||
proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt')
|
||||
model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel')
|
||||
net = cv.dnn.readNet(proto, model)
|
||||
net.setPreferableBackend(backend)
|
||||
net.setPreferableTarget(target)
|
||||
inp = np.random.standard_normal([1, 2, 10, 11]).astype(np.float32)
|
||||
try:
|
||||
net.setInput(inp)
|
||||
net.forward()
|
||||
except BaseException as e:
|
||||
return False
|
||||
return True
|
||||
|
||||
def test_getAvailableTargets(self):
|
||||
targets = cv.dnn.getAvailableTargets(cv.dnn.DNN_BACKEND_OPENCV)
|
||||
self.assertTrue(cv.dnn.DNN_TARGET_CPU in targets)
|
||||
|
||||
def test_blobFromImage(self):
|
||||
np.random.seed(324)
|
||||
|
||||
width = 6
|
||||
height = 7
|
||||
scale = 1.0/127.5
|
||||
mean = (10, 20, 30)
|
||||
|
||||
# Test arguments names.
|
||||
img = np.random.randint(0, 255, [4, 5, 3]).astype(np.uint8)
|
||||
blob = cv.dnn.blobFromImage(img, scale, (width, height), mean, True, False)
|
||||
blob_args = cv.dnn.blobFromImage(img, scalefactor=scale, size=(width, height),
|
||||
mean=mean, swapRB=True, crop=False)
|
||||
normAssert(self, blob, blob_args)
|
||||
|
||||
# Test values.
|
||||
target = cv.resize(img, (width, height), interpolation=cv.INTER_LINEAR)
|
||||
target = target.astype(np.float32)
|
||||
target = target[:,:,[2, 1, 0]] # BGR2RGB
|
||||
target[:,:,0] -= mean[0]
|
||||
target[:,:,1] -= mean[1]
|
||||
target[:,:,2] -= mean[2]
|
||||
target *= scale
|
||||
target = target.transpose(2, 0, 1).reshape(1, 3, height, width) # to NCHW
|
||||
normAssert(self, blob, target)
|
||||
|
||||
|
||||
def test_model(self):
|
||||
img_path = self.find_dnn_file("dnn/street.png")
|
||||
weights = self.find_dnn_file("dnn/MobileNetSSD_deploy.caffemodel", required=False)
|
||||
config = self.find_dnn_file("dnn/MobileNetSSD_deploy.prototxt", required=False)
|
||||
if weights is None or config is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (dnn/MobileNetSSD_deploy.{prototxt/caffemodel}). Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
|
||||
frame = cv.imread(img_path)
|
||||
model = cv.dnn_DetectionModel(weights, config)
|
||||
model.setInputParams(size=(300, 300), mean=(127.5, 127.5, 127.5), scale=1.0/127.5)
|
||||
|
||||
iouDiff = 0.05
|
||||
confThreshold = 0.0001
|
||||
nmsThreshold = 0
|
||||
scoreDiff = 1e-3
|
||||
|
||||
classIds, confidences, boxes = model.detect(frame, confThreshold, nmsThreshold)
|
||||
|
||||
refClassIds = (7, 15)
|
||||
refConfidences = (0.9998, 0.8793)
|
||||
refBoxes = ((328, 238, 85, 102), (101, 188, 34, 138))
|
||||
|
||||
normAssertDetections(self, refClassIds, refConfidences, refBoxes,
|
||||
classIds, confidences, boxes,confThreshold, scoreDiff, iouDiff)
|
||||
|
||||
for box in boxes:
|
||||
cv.rectangle(frame, box, (0, 255, 0))
|
||||
cv.rectangle(frame, np.array(box), (0, 255, 0))
|
||||
cv.rectangle(frame, tuple(box), (0, 255, 0))
|
||||
cv.rectangle(frame, list(box), (0, 255, 0))
|
||||
|
||||
|
||||
def test_classification_model(self):
|
||||
img_path = self.find_dnn_file("dnn/googlenet_0.png")
|
||||
weights = self.find_dnn_file("dnn/squeezenet_v1.1.caffemodel", required=False)
|
||||
config = self.find_dnn_file("dnn/squeezenet_v1.1.prototxt")
|
||||
ref = np.load(self.find_dnn_file("dnn/squeezenet_v1.1_prob.npy"))
|
||||
if weights is None or config is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (dnn/squeezenet_v1.1.{prototxt/caffemodel}). Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
|
||||
frame = cv.imread(img_path)
|
||||
model = cv.dnn_ClassificationModel(config, weights)
|
||||
model.setInputSize(227, 227)
|
||||
model.setInputCrop(True)
|
||||
|
||||
out = model.predict(frame)
|
||||
normAssert(self, out, ref)
|
||||
|
||||
|
||||
def test_textdetection_model(self):
|
||||
img_path = self.find_dnn_file("dnn/text_det_test1.png")
|
||||
weights = self.find_dnn_file("dnn/onnx/models/DB_TD500_resnet50.onnx", required=False)
|
||||
if weights is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (onnx/models/DB_TD500_resnet50.onnx). Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
|
||||
frame = cv.imread(img_path)
|
||||
scale = 1.0 / 255.0
|
||||
size = (736, 736)
|
||||
mean = (122.67891434, 116.66876762, 104.00698793)
|
||||
|
||||
model = cv.dnn_TextDetectionModel_DB(weights)
|
||||
model.setInputParams(scale, size, mean)
|
||||
out, _ = model.detect(frame)
|
||||
|
||||
self.assertTrue(type(out) == tuple, msg='actual type {}'.format(str(type(out))))
|
||||
self.assertTrue(np.array(out).shape == (2, 4, 2))
|
||||
|
||||
|
||||
def test_face_detection(self):
|
||||
proto = self.find_dnn_file('dnn/opencv_face_detector.prototxt')
|
||||
model = self.find_dnn_file('dnn/opencv_face_detector.caffemodel', required=False)
|
||||
if proto is None or model is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (dnn/opencv_face_detector.{prototxt/caffemodel}). Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
|
||||
img = self.get_sample('gpu/lbpcascade/er.png')
|
||||
blob = cv.dnn.blobFromImage(img, mean=(104, 177, 123), swapRB=False, crop=False)
|
||||
|
||||
ref = [[0, 1, 0.99520785, 0.80997437, 0.16379407, 0.87996572, 0.26685631],
|
||||
[0, 1, 0.9934696, 0.2831718, 0.50738752, 0.345781, 0.5985168],
|
||||
[0, 1, 0.99096733, 0.13629119, 0.24892329, 0.19756334, 0.3310290],
|
||||
[0, 1, 0.98977017, 0.23901358, 0.09084064, 0.29902688, 0.1769477],
|
||||
[0, 1, 0.97203469, 0.67965847, 0.06876482, 0.73999709, 0.1513494],
|
||||
[0, 1, 0.95097077, 0.51901293, 0.45863652, 0.5777427, 0.5347801]]
|
||||
|
||||
print('\n')
|
||||
for backend, target in self.dnnBackendsAndTargets:
|
||||
printParams(backend, target)
|
||||
|
||||
net = cv.dnn.readNet(proto, model)
|
||||
net.setPreferableBackend(backend)
|
||||
net.setPreferableTarget(target)
|
||||
net.setInput(blob)
|
||||
out = net.forward().reshape(-1, 7)
|
||||
|
||||
scoresDiff = 4e-3 if target in [cv.dnn.DNN_TARGET_OPENCL_FP16, cv.dnn.DNN_TARGET_MYRIAD] else 1e-5
|
||||
iouDiff = 2e-2 if target in [cv.dnn.DNN_TARGET_OPENCL_FP16, cv.dnn.DNN_TARGET_MYRIAD] else 1e-4
|
||||
|
||||
ref = np.array(ref, np.float32)
|
||||
refClassIds, testClassIds = ref[:, 1], out[:, 1]
|
||||
refScores, testScores = ref[:, 2], out[:, 2]
|
||||
refBoxes, testBoxes = ref[:, 3:], out[:, 3:]
|
||||
|
||||
normAssertDetections(self, refClassIds, refScores, refBoxes, testClassIds,
|
||||
testScores, testBoxes, 0.5, scoresDiff, iouDiff)
|
||||
|
||||
def test_async(self):
|
||||
timeout = 10*1000*10**6 # in nanoseconds (10 sec)
|
||||
proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt')
|
||||
model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel')
|
||||
if proto is None or model is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (dnn/layers/layer_convolution.{prototxt/caffemodel}). Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
|
||||
print('\n')
|
||||
for backend, target in self.dnnBackendsAndTargets:
|
||||
if backend != cv.dnn.DNN_BACKEND_INFERENCE_ENGINE:
|
||||
continue
|
||||
|
||||
printParams(backend, target)
|
||||
|
||||
netSync = cv.dnn.readNet(proto, model)
|
||||
netSync.setPreferableBackend(backend)
|
||||
netSync.setPreferableTarget(target)
|
||||
|
||||
netAsync = cv.dnn.readNet(proto, model)
|
||||
netAsync.setPreferableBackend(backend)
|
||||
netAsync.setPreferableTarget(target)
|
||||
|
||||
# Generate inputs
|
||||
numInputs = 10
|
||||
inputs = []
|
||||
for _ in range(numInputs):
|
||||
inputs.append(np.random.standard_normal([2, 6, 75, 113]).astype(np.float32))
|
||||
|
||||
# Run synchronously
|
||||
refs = []
|
||||
for i in range(numInputs):
|
||||
netSync.setInput(inputs[i])
|
||||
refs.append(netSync.forward())
|
||||
|
||||
# Run asynchronously. To make test more robust, process inputs in the reversed order.
|
||||
outs = []
|
||||
for i in reversed(range(numInputs)):
|
||||
netAsync.setInput(inputs[i])
|
||||
outs.insert(0, netAsync.forwardAsync())
|
||||
|
||||
for i in reversed(range(numInputs)):
|
||||
ret, result = outs[i].get(timeoutNs=float(timeout))
|
||||
self.assertTrue(ret)
|
||||
normAssert(self, refs[i], result, 'Index: %d' % i, 1e-10)
|
||||
|
||||
def test_nms(self):
|
||||
confs = (1, 1)
|
||||
rects = ((0, 0, 0.4, 0.4), (0, 0, 0.2, 0.4)) # 0.5 overlap
|
||||
|
||||
self.assertTrue(all(cv.dnn.NMSBoxes(rects, confs, 0, 0.6).ravel() == (0, 1)))
|
||||
|
||||
def test_custom_layer(self):
|
||||
class CropLayer(object):
|
||||
def __init__(self, params, blobs):
|
||||
self.xstart = 0
|
||||
self.xend = 0
|
||||
self.ystart = 0
|
||||
self.yend = 0
|
||||
# Our layer receives two inputs. We need to crop the first input blob
|
||||
# to match a shape of the second one (keeping batch size and number of channels)
|
||||
def getMemoryShapes(self, inputs):
|
||||
inputShape, targetShape = inputs[0], inputs[1]
|
||||
batchSize, numChannels = inputShape[0], inputShape[1]
|
||||
height, width = targetShape[2], targetShape[3]
|
||||
self.ystart = (inputShape[2] - targetShape[2]) // 2
|
||||
self.xstart = (inputShape[3] - targetShape[3]) // 2
|
||||
self.yend = self.ystart + height
|
||||
self.xend = self.xstart + width
|
||||
return [[batchSize, numChannels, height, width]]
|
||||
def forward(self, inputs):
|
||||
return [inputs[0][:,:,self.ystart:self.yend,self.xstart:self.xend]]
|
||||
|
||||
cv.dnn_registerLayer('CropCaffe', CropLayer)
|
||||
proto = '''
|
||||
name: "TestCrop"
|
||||
input: "input"
|
||||
input_shape
|
||||
{
|
||||
dim: 1
|
||||
dim: 2
|
||||
dim: 5
|
||||
dim: 5
|
||||
}
|
||||
input: "roi"
|
||||
input_shape
|
||||
{
|
||||
dim: 1
|
||||
dim: 2
|
||||
dim: 3
|
||||
dim: 3
|
||||
}
|
||||
layer {
|
||||
name: "Crop"
|
||||
type: "CropCaffe"
|
||||
bottom: "input"
|
||||
bottom: "roi"
|
||||
top: "Crop"
|
||||
}'''
|
||||
|
||||
net = cv.dnn.readNetFromCaffe(bytearray(proto.encode()))
|
||||
for backend, target in self.dnnBackendsAndTargets:
|
||||
if backend != cv.dnn.DNN_BACKEND_OPENCV:
|
||||
continue
|
||||
|
||||
printParams(backend, target)
|
||||
|
||||
net.setPreferableBackend(backend)
|
||||
net.setPreferableTarget(target)
|
||||
src_shape = [1, 2, 5, 5]
|
||||
dst_shape = [1, 2, 3, 3]
|
||||
inp = np.arange(0, np.prod(src_shape), dtype=np.float32).reshape(src_shape)
|
||||
roi = np.empty(dst_shape, dtype=np.float32)
|
||||
net.setInput(inp, "input")
|
||||
net.setInput(roi, "roi")
|
||||
out = net.forward()
|
||||
ref = inp[:, :, 1:4, 1:4]
|
||||
normAssert(self, out, ref)
|
||||
|
||||
cv.dnn_unregisterLayer('CropCaffe')
|
||||
|
||||
# check that dnn module can work with 3D tensor as input for network
|
||||
def test_input_3d(self):
|
||||
model = self.find_dnn_file('dnn/onnx/models/hidden_lstm.onnx')
|
||||
input_file = self.find_dnn_file('dnn/onnx/data/input_hidden_lstm.npy')
|
||||
output_file = self.find_dnn_file('dnn/onnx/data/output_hidden_lstm.npy')
|
||||
if model is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (dnn/onnx/models/hidden_lstm.onnx). "
|
||||
"Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
if input_file is None or output_file is None:
|
||||
raise unittest.SkipTest("Missing DNN test files (dnn/onnx/data/{input/output}_hidden_lstm.npy). "
|
||||
"Verify OPENCV_DNN_TEST_DATA_PATH configuration parameter.")
|
||||
|
||||
input = np.load(input_file)
|
||||
# we have to expand the shape of input tensor because Python bindings cut 3D tensors to 2D
|
||||
# it should be fixed in future. see : https://github.com/opencv/opencv/issues/19091
|
||||
# please remove `expand_dims` after that
|
||||
input = np.expand_dims(input, axis=3)
|
||||
gold_output = np.load(output_file)
|
||||
|
||||
for backend, target in self.dnnBackendsAndTargets:
|
||||
printParams(backend, target)
|
||||
|
||||
net = cv.dnn.readNet(model)
|
||||
|
||||
net.setPreferableBackend(backend)
|
||||
net.setPreferableTarget(target)
|
||||
|
||||
net.setInput(input)
|
||||
real_output = net.forward()
|
||||
|
||||
normAssert(self, real_output, gold_output, "", getDefaultThreshold(target))
|
||||
|
||||
if __name__ == '__main__':
|
||||
NewOpenCVTests.bootstrap()
|
365
3rdparty/opencv-4.5.4/modules/dnn/misc/quantize_face_detector.py
vendored
Normal file
365
3rdparty/opencv-4.5.4/modules/dnn/misc/quantize_face_detector.py
vendored
Normal file
@ -0,0 +1,365 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import argparse
|
||||
import cv2 as cv
|
||||
import tensorflow as tf
|
||||
import numpy as np
|
||||
import struct
|
||||
|
||||
if sys.version_info > (3,):
|
||||
long = int
|
||||
|
||||
from tensorflow.python.tools import optimize_for_inference_lib
|
||||
from tensorflow.tools.graph_transforms import TransformGraph
|
||||
from tensorflow.core.framework.node_def_pb2 import NodeDef
|
||||
from google.protobuf import text_format
|
||||
|
||||
parser = argparse.ArgumentParser(description="Use this script to create TensorFlow graph "
|
||||
"with weights from OpenCV's face detection network. "
|
||||
"Only backbone part of SSD model is converted this way. "
|
||||
"Look for .pbtxt configuration file at "
|
||||
"https://github.com/opencv/opencv_extra/tree/master/testdata/dnn/opencv_face_detector.pbtxt")
|
||||
parser.add_argument('--model', help='Path to .caffemodel weights', required=True)
|
||||
parser.add_argument('--proto', help='Path to .prototxt Caffe model definition', required=True)
|
||||
parser.add_argument('--pb', help='Path to output .pb TensorFlow model', required=True)
|
||||
parser.add_argument('--pbtxt', help='Path to output .pbxt TensorFlow graph', required=True)
|
||||
parser.add_argument('--quantize', help='Quantize weights to uint8', action='store_true')
|
||||
parser.add_argument('--fp16', help='Convert weights to half precision floats', action='store_true')
|
||||
args = parser.parse_args()
|
||||
|
||||
assert(not args.quantize or not args.fp16)
|
||||
|
||||
dtype = tf.float16 if args.fp16 else tf.float32
|
||||
|
||||
################################################################################
|
||||
cvNet = cv.dnn.readNetFromCaffe(args.proto, args.model)
|
||||
|
||||
def dnnLayer(name):
|
||||
return cvNet.getLayer(long(cvNet.getLayerId(name)))
|
||||
|
||||
def scale(x, name):
|
||||
with tf.variable_scope(name):
|
||||
layer = dnnLayer(name)
|
||||
w = tf.Variable(layer.blobs[0].flatten(), dtype=dtype, name='mul')
|
||||
if len(layer.blobs) > 1:
|
||||
b = tf.Variable(layer.blobs[1].flatten(), dtype=dtype, name='add')
|
||||
return tf.nn.bias_add(tf.multiply(x, w), b)
|
||||
else:
|
||||
return tf.multiply(x, w, name)
|
||||
|
||||
def conv(x, name, stride=1, pad='SAME', dilation=1, activ=None):
|
||||
with tf.variable_scope(name):
|
||||
layer = dnnLayer(name)
|
||||
w = tf.Variable(layer.blobs[0].transpose(2, 3, 1, 0), dtype=dtype, name='weights')
|
||||
if dilation == 1:
|
||||
conv = tf.nn.conv2d(x, filter=w, strides=(1, stride, stride, 1), padding=pad)
|
||||
else:
|
||||
assert(stride == 1)
|
||||
conv = tf.nn.atrous_conv2d(x, w, rate=dilation, padding=pad)
|
||||
|
||||
if len(layer.blobs) > 1:
|
||||
b = tf.Variable(layer.blobs[1].flatten(), dtype=dtype, name='bias')
|
||||
conv = tf.nn.bias_add(conv, b)
|
||||
return activ(conv) if activ else conv
|
||||
|
||||
def batch_norm(x, name):
|
||||
with tf.variable_scope(name):
|
||||
# Unfortunately, TensorFlow's batch normalization layer doesn't work with fp16 input.
|
||||
# Here we do a cast to fp32 but remove it in the frozen graph.
|
||||
if x.dtype != tf.float32:
|
||||
x = tf.cast(x, tf.float32)
|
||||
|
||||
layer = dnnLayer(name)
|
||||
assert(len(layer.blobs) >= 3)
|
||||
|
||||
mean = layer.blobs[0].flatten()
|
||||
std = layer.blobs[1].flatten()
|
||||
scale = layer.blobs[2].flatten()
|
||||
|
||||
eps = 1e-5
|
||||
hasBias = len(layer.blobs) > 3
|
||||
hasWeights = scale.shape != (1,)
|
||||
|
||||
if not hasWeights and not hasBias:
|
||||
mean /= scale[0]
|
||||
std /= scale[0]
|
||||
|
||||
mean = tf.Variable(mean, dtype=tf.float32, name='mean')
|
||||
std = tf.Variable(std, dtype=tf.float32, name='std')
|
||||
gamma = tf.Variable(scale if hasWeights else np.ones(mean.shape), dtype=tf.float32, name='gamma')
|
||||
beta = tf.Variable(layer.blobs[3].flatten() if hasBias else np.zeros(mean.shape), dtype=tf.float32, name='beta')
|
||||
bn = tf.nn.fused_batch_norm(x, gamma, beta, mean, std, eps,
|
||||
is_training=False)[0]
|
||||
if bn.dtype != dtype:
|
||||
bn = tf.cast(bn, dtype)
|
||||
return bn
|
||||
|
||||
def l2norm(x, name):
|
||||
with tf.variable_scope(name):
|
||||
layer = dnnLayer(name)
|
||||
w = tf.Variable(layer.blobs[0].flatten(), dtype=dtype, name='mul')
|
||||
return tf.nn.l2_normalize(x, 3, epsilon=1e-10) * w
|
||||
|
||||
### Graph definition ###########################################################
|
||||
inp = tf.placeholder(dtype, [1, 300, 300, 3], 'data')
|
||||
data_bn = batch_norm(inp, 'data_bn')
|
||||
data_scale = scale(data_bn, 'data_scale')
|
||||
|
||||
# Instead of tf.pad we use tf.space_to_batch_nd layers which override convolution's padding strategy to explicit numbers
|
||||
# data_scale = tf.pad(data_scale, [[0, 0], [3, 3], [3, 3], [0, 0]])
|
||||
data_scale = tf.space_to_batch_nd(data_scale, [1, 1], [[3, 3], [3, 3]], name='Pad')
|
||||
conv1_h = conv(data_scale, stride=2, pad='VALID', name='conv1_h')
|
||||
|
||||
conv1_bn_h = batch_norm(conv1_h, 'conv1_bn_h')
|
||||
conv1_scale_h = scale(conv1_bn_h, 'conv1_scale_h')
|
||||
conv1_relu = tf.nn.relu(conv1_scale_h)
|
||||
conv1_pool = tf.layers.max_pooling2d(conv1_relu, pool_size=(3, 3), strides=(2, 2),
|
||||
padding='SAME', name='conv1_pool')
|
||||
|
||||
layer_64_1_conv1_h = conv(conv1_pool, 'layer_64_1_conv1_h')
|
||||
layer_64_1_bn2_h = batch_norm(layer_64_1_conv1_h, 'layer_64_1_bn2_h')
|
||||
layer_64_1_scale2_h = scale(layer_64_1_bn2_h, 'layer_64_1_scale2_h')
|
||||
layer_64_1_relu2 = tf.nn.relu(layer_64_1_scale2_h)
|
||||
layer_64_1_conv2_h = conv(layer_64_1_relu2, 'layer_64_1_conv2_h')
|
||||
layer_64_1_sum = layer_64_1_conv2_h + conv1_pool
|
||||
|
||||
layer_128_1_bn1_h = batch_norm(layer_64_1_sum, 'layer_128_1_bn1_h')
|
||||
layer_128_1_scale1_h = scale(layer_128_1_bn1_h, 'layer_128_1_scale1_h')
|
||||
layer_128_1_relu1 = tf.nn.relu(layer_128_1_scale1_h)
|
||||
layer_128_1_conv1_h = conv(layer_128_1_relu1, stride=2, name='layer_128_1_conv1_h')
|
||||
layer_128_1_bn2 = batch_norm(layer_128_1_conv1_h, 'layer_128_1_bn2')
|
||||
layer_128_1_scale2 = scale(layer_128_1_bn2, 'layer_128_1_scale2')
|
||||
layer_128_1_relu2 = tf.nn.relu(layer_128_1_scale2)
|
||||
layer_128_1_conv2 = conv(layer_128_1_relu2, 'layer_128_1_conv2')
|
||||
layer_128_1_conv_expand_h = conv(layer_128_1_relu1, stride=2, name='layer_128_1_conv_expand_h')
|
||||
layer_128_1_sum = layer_128_1_conv2 + layer_128_1_conv_expand_h
|
||||
|
||||
layer_256_1_bn1 = batch_norm(layer_128_1_sum, 'layer_256_1_bn1')
|
||||
layer_256_1_scale1 = scale(layer_256_1_bn1, 'layer_256_1_scale1')
|
||||
layer_256_1_relu1 = tf.nn.relu(layer_256_1_scale1)
|
||||
|
||||
# layer_256_1_conv1 = tf.pad(layer_256_1_relu1, [[0, 0], [1, 1], [1, 1], [0, 0]])
|
||||
layer_256_1_conv1 = tf.space_to_batch_nd(layer_256_1_relu1, [1, 1], [[1, 1], [1, 1]], name='Pad_1')
|
||||
layer_256_1_conv1 = conv(layer_256_1_conv1, stride=2, pad='VALID', name='layer_256_1_conv1')
|
||||
|
||||
layer_256_1_bn2 = batch_norm(layer_256_1_conv1, 'layer_256_1_bn2')
|
||||
layer_256_1_scale2 = scale(layer_256_1_bn2, 'layer_256_1_scale2')
|
||||
layer_256_1_relu2 = tf.nn.relu(layer_256_1_scale2)
|
||||
layer_256_1_conv2 = conv(layer_256_1_relu2, 'layer_256_1_conv2')
|
||||
layer_256_1_conv_expand = conv(layer_256_1_relu1, stride=2, name='layer_256_1_conv_expand')
|
||||
layer_256_1_sum = layer_256_1_conv2 + layer_256_1_conv_expand
|
||||
|
||||
layer_512_1_bn1 = batch_norm(layer_256_1_sum, 'layer_512_1_bn1')
|
||||
layer_512_1_scale1 = scale(layer_512_1_bn1, 'layer_512_1_scale1')
|
||||
layer_512_1_relu1 = tf.nn.relu(layer_512_1_scale1)
|
||||
layer_512_1_conv1_h = conv(layer_512_1_relu1, 'layer_512_1_conv1_h')
|
||||
layer_512_1_bn2_h = batch_norm(layer_512_1_conv1_h, 'layer_512_1_bn2_h')
|
||||
layer_512_1_scale2_h = scale(layer_512_1_bn2_h, 'layer_512_1_scale2_h')
|
||||
layer_512_1_relu2 = tf.nn.relu(layer_512_1_scale2_h)
|
||||
layer_512_1_conv2_h = conv(layer_512_1_relu2, dilation=2, name='layer_512_1_conv2_h')
|
||||
layer_512_1_conv_expand_h = conv(layer_512_1_relu1, 'layer_512_1_conv_expand_h')
|
||||
layer_512_1_sum = layer_512_1_conv2_h + layer_512_1_conv_expand_h
|
||||
|
||||
last_bn_h = batch_norm(layer_512_1_sum, 'last_bn_h')
|
||||
last_scale_h = scale(last_bn_h, 'last_scale_h')
|
||||
fc7 = tf.nn.relu(last_scale_h, name='last_relu')
|
||||
|
||||
conv6_1_h = conv(fc7, 'conv6_1_h', activ=tf.nn.relu)
|
||||
conv6_2_h = conv(conv6_1_h, stride=2, name='conv6_2_h', activ=tf.nn.relu)
|
||||
conv7_1_h = conv(conv6_2_h, 'conv7_1_h', activ=tf.nn.relu)
|
||||
|
||||
# conv7_2_h = tf.pad(conv7_1_h, [[0, 0], [1, 1], [1, 1], [0, 0]])
|
||||
conv7_2_h = tf.space_to_batch_nd(conv7_1_h, [1, 1], [[1, 1], [1, 1]], name='Pad_2')
|
||||
conv7_2_h = conv(conv7_2_h, stride=2, pad='VALID', name='conv7_2_h', activ=tf.nn.relu)
|
||||
|
||||
conv8_1_h = conv(conv7_2_h, pad='SAME', name='conv8_1_h', activ=tf.nn.relu)
|
||||
conv8_2_h = conv(conv8_1_h, pad='VALID', name='conv8_2_h', activ=tf.nn.relu)
|
||||
conv9_1_h = conv(conv8_2_h, 'conv9_1_h', activ=tf.nn.relu)
|
||||
conv9_2_h = conv(conv9_1_h, pad='VALID', name='conv9_2_h', activ=tf.nn.relu)
|
||||
|
||||
conv4_3_norm = l2norm(layer_256_1_relu1, 'conv4_3_norm')
|
||||
|
||||
### Locations and confidences ##################################################
|
||||
locations = []
|
||||
confidences = []
|
||||
flattenLayersNames = [] # Collect all reshape layers names that should be replaced to flattens.
|
||||
for top, suffix in zip([locations, confidences], ['_mbox_loc', '_mbox_conf']):
|
||||
for bottom, name in zip([conv4_3_norm, fc7, conv6_2_h, conv7_2_h, conv8_2_h, conv9_2_h],
|
||||
['conv4_3_norm', 'fc7', 'conv6_2', 'conv7_2', 'conv8_2', 'conv9_2']):
|
||||
name += suffix
|
||||
flat = tf.layers.flatten(conv(bottom, name))
|
||||
flattenLayersNames.append(flat.name[:flat.name.find(':')])
|
||||
top.append(flat)
|
||||
|
||||
mbox_loc = tf.concat(locations, axis=-1, name='mbox_loc')
|
||||
mbox_conf = tf.concat(confidences, axis=-1, name='mbox_conf')
|
||||
|
||||
total = int(np.prod(mbox_conf.shape[1:]))
|
||||
mbox_conf_reshape = tf.reshape(mbox_conf, [-1, 2], name='mbox_conf_reshape')
|
||||
mbox_conf_softmax = tf.nn.softmax(mbox_conf_reshape, name='mbox_conf_softmax')
|
||||
mbox_conf_flatten = tf.reshape(mbox_conf_softmax, [-1, total], name='mbox_conf_flatten')
|
||||
flattenLayersNames.append('mbox_conf_flatten')
|
||||
|
||||
with tf.Session() as sess:
|
||||
sess.run(tf.global_variables_initializer())
|
||||
|
||||
### Check correctness ######################################################
|
||||
out_nodes = ['mbox_loc', 'mbox_conf_flatten']
|
||||
inp_nodes = [inp.name[:inp.name.find(':')]]
|
||||
|
||||
np.random.seed(2701)
|
||||
inputData = np.random.standard_normal([1, 3, 300, 300]).astype(np.float32)
|
||||
|
||||
cvNet.setInput(inputData)
|
||||
cvNet.setPreferableBackend(cv.dnn.DNN_BACKEND_OPENCV)
|
||||
outDNN = cvNet.forward(out_nodes)
|
||||
|
||||
outTF = sess.run([mbox_loc, mbox_conf_flatten], feed_dict={inp: inputData.transpose(0, 2, 3, 1)})
|
||||
print('Max diff @ locations: %e' % np.max(np.abs(outDNN[0] - outTF[0])))
|
||||
print('Max diff @ confidence: %e' % np.max(np.abs(outDNN[1] - outTF[1])))
|
||||
|
||||
# Save a graph
|
||||
graph_def = sess.graph.as_graph_def()
|
||||
|
||||
# Freeze graph. Replaces variables to constants.
|
||||
graph_def = tf.graph_util.convert_variables_to_constants(sess, graph_def, out_nodes)
|
||||
# Optimize graph. Removes training-only ops, unused nodes.
|
||||
graph_def = optimize_for_inference_lib.optimize_for_inference(graph_def, inp_nodes, out_nodes, dtype.as_datatype_enum)
|
||||
# Fuse constant operations.
|
||||
transforms = ["fold_constants(ignore_errors=True)"]
|
||||
if args.quantize:
|
||||
transforms += ["quantize_weights(minimum_size=0)"]
|
||||
transforms += ["sort_by_execution_order"]
|
||||
graph_def = TransformGraph(graph_def, inp_nodes, out_nodes, transforms)
|
||||
|
||||
# By default, float16 weights are stored in repeated tensor's field called
|
||||
# `half_val`. It has type int32 with leading zeros for unused bytes.
|
||||
# This type is encoded by Variant that means only 7 bits are used for value
|
||||
# representation but the last one is indicated the end of encoding. This way
|
||||
# float16 might takes 1 or 2 or 3 bytes depends on value. To improve compression,
|
||||
# we replace all `half_val` values to `tensor_content` using only 2 bytes for everyone.
|
||||
for node in graph_def.node:
|
||||
if 'value' in node.attr:
|
||||
halfs = node.attr["value"].tensor.half_val
|
||||
if not node.attr["value"].tensor.tensor_content and halfs:
|
||||
node.attr["value"].tensor.tensor_content = struct.pack('H' * len(halfs), *halfs)
|
||||
node.attr["value"].tensor.ClearField('half_val')
|
||||
|
||||
# Serialize
|
||||
with tf.gfile.FastGFile(args.pb, 'wb') as f:
|
||||
f.write(graph_def.SerializeToString())
|
||||
|
||||
|
||||
################################################################################
|
||||
# Write a text graph representation
|
||||
################################################################################
|
||||
def tensorMsg(values):
|
||||
msg = 'tensor { dtype: DT_FLOAT tensor_shape { dim { size: %d } }' % len(values)
|
||||
for value in values:
|
||||
msg += 'float_val: %f ' % value
|
||||
return msg + '}'
|
||||
|
||||
# Remove Const nodes and unused attributes.
|
||||
for i in reversed(range(len(graph_def.node))):
|
||||
if graph_def.node[i].op in ['Const', 'Dequantize']:
|
||||
del graph_def.node[i]
|
||||
for attr in ['T', 'data_format', 'Tshape', 'N', 'Tidx', 'Tdim',
|
||||
'use_cudnn_on_gpu', 'Index', 'Tperm', 'is_training',
|
||||
'Tpaddings', 'Tblock_shape', 'Tcrops']:
|
||||
if attr in graph_def.node[i].attr:
|
||||
del graph_def.node[i].attr[attr]
|
||||
|
||||
# Append prior box generators
|
||||
min_sizes = [30, 60, 111, 162, 213, 264]
|
||||
max_sizes = [60, 111, 162, 213, 264, 315]
|
||||
steps = [8, 16, 32, 64, 100, 300]
|
||||
aspect_ratios = [[2], [2, 3], [2, 3], [2, 3], [2], [2]]
|
||||
layers = [conv4_3_norm, fc7, conv6_2_h, conv7_2_h, conv8_2_h, conv9_2_h]
|
||||
for i in range(6):
|
||||
priorBox = NodeDef()
|
||||
priorBox.name = 'PriorBox_%d' % i
|
||||
priorBox.op = 'PriorBox'
|
||||
priorBox.input.append(layers[i].name[:layers[i].name.find(':')])
|
||||
priorBox.input.append(inp_nodes[0]) # data
|
||||
|
||||
text_format.Merge('i: %d' % min_sizes[i], priorBox.attr["min_size"])
|
||||
text_format.Merge('i: %d' % max_sizes[i], priorBox.attr["max_size"])
|
||||
text_format.Merge('b: true', priorBox.attr["flip"])
|
||||
text_format.Merge('b: false', priorBox.attr["clip"])
|
||||
text_format.Merge(tensorMsg(aspect_ratios[i]), priorBox.attr["aspect_ratio"])
|
||||
text_format.Merge(tensorMsg([0.1, 0.1, 0.2, 0.2]), priorBox.attr["variance"])
|
||||
text_format.Merge('f: %f' % steps[i], priorBox.attr["step"])
|
||||
text_format.Merge('f: 0.5', priorBox.attr["offset"])
|
||||
graph_def.node.extend([priorBox])
|
||||
|
||||
# Concatenate prior boxes
|
||||
concat = NodeDef()
|
||||
concat.name = 'mbox_priorbox'
|
||||
concat.op = 'ConcatV2'
|
||||
for i in range(6):
|
||||
concat.input.append('PriorBox_%d' % i)
|
||||
concat.input.append('mbox_loc/axis')
|
||||
graph_def.node.extend([concat])
|
||||
|
||||
# DetectionOutput layer
|
||||
detectionOut = NodeDef()
|
||||
detectionOut.name = 'detection_out'
|
||||
detectionOut.op = 'DetectionOutput'
|
||||
|
||||
detectionOut.input.append('mbox_loc')
|
||||
detectionOut.input.append('mbox_conf_flatten')
|
||||
detectionOut.input.append('mbox_priorbox')
|
||||
|
||||
text_format.Merge('i: 2', detectionOut.attr['num_classes'])
|
||||
text_format.Merge('b: true', detectionOut.attr['share_location'])
|
||||
text_format.Merge('i: 0', detectionOut.attr['background_label_id'])
|
||||
text_format.Merge('f: 0.45', detectionOut.attr['nms_threshold'])
|
||||
text_format.Merge('i: 400', detectionOut.attr['top_k'])
|
||||
text_format.Merge('s: "CENTER_SIZE"', detectionOut.attr['code_type'])
|
||||
text_format.Merge('i: 200', detectionOut.attr['keep_top_k'])
|
||||
text_format.Merge('f: 0.01', detectionOut.attr['confidence_threshold'])
|
||||
|
||||
graph_def.node.extend([detectionOut])
|
||||
|
||||
# Replace L2Normalization subgraph onto a single node.
|
||||
for i in reversed(range(len(graph_def.node))):
|
||||
if graph_def.node[i].name in ['conv4_3_norm/l2_normalize/Square',
|
||||
'conv4_3_norm/l2_normalize/Sum',
|
||||
'conv4_3_norm/l2_normalize/Maximum',
|
||||
'conv4_3_norm/l2_normalize/Rsqrt']:
|
||||
del graph_def.node[i]
|
||||
for node in graph_def.node:
|
||||
if node.name == 'conv4_3_norm/l2_normalize':
|
||||
node.op = 'L2Normalize'
|
||||
node.input.pop()
|
||||
node.input.pop()
|
||||
node.input.append(layer_256_1_relu1.name)
|
||||
node.input.append('conv4_3_norm/l2_normalize/Sum/reduction_indices')
|
||||
break
|
||||
|
||||
softmaxShape = NodeDef()
|
||||
softmaxShape.name = 'reshape_before_softmax'
|
||||
softmaxShape.op = 'Const'
|
||||
text_format.Merge(
|
||||
'tensor {'
|
||||
' dtype: DT_INT32'
|
||||
' tensor_shape { dim { size: 3 } }'
|
||||
' int_val: 0'
|
||||
' int_val: -1'
|
||||
' int_val: 2'
|
||||
'}', softmaxShape.attr["value"])
|
||||
graph_def.node.extend([softmaxShape])
|
||||
|
||||
for node in graph_def.node:
|
||||
if node.name == 'mbox_conf_reshape':
|
||||
node.input[1] = softmaxShape.name
|
||||
elif node.name == 'mbox_conf_softmax':
|
||||
text_format.Merge('i: 2', node.attr['axis'])
|
||||
elif node.name in flattenLayersNames:
|
||||
node.op = 'Flatten'
|
||||
inpName = node.input[0]
|
||||
node.input.pop()
|
||||
node.input.pop()
|
||||
node.input.append(inpName)
|
||||
|
||||
tf.train.write_graph(graph_def, "", args.pbtxt, as_text=True)
|
2153
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/attr_value.pb.cc
vendored
Normal file
2153
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/attr_value.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1749
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/attr_value.pb.h
vendored
Normal file
1749
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/attr_value.pb.h
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1892
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/function.pb.cc
vendored
Normal file
1892
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/function.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1385
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/function.pb.h
vendored
Normal file
1385
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/function.pb.h
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1295
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/graph.pb.cc
vendored
Normal file
1295
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/graph.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
968
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/graph.pb.h
vendored
Normal file
968
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/graph.pb.h
vendored
Normal file
@ -0,0 +1,968 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: graph.proto
|
||||
|
||||
#ifndef PROTOBUF_graph_2eproto__INCLUDED
|
||||
#define PROTOBUF_graph_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 3005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 3005001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/arena.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
#include <google/protobuf/generated_message_table_driven.h>
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/metadata.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
|
||||
#include <google/protobuf/extension_set.h> // IWYU pragma: export
|
||||
#include <google/protobuf/map.h> // IWYU pragma: export
|
||||
#include <google/protobuf/map_entry.h>
|
||||
#include <google/protobuf/map_field_inl.h>
|
||||
#include <google/protobuf/unknown_field_set.h>
|
||||
#include "attr_value.pb.h"
|
||||
#include "function.pb.h"
|
||||
#include "versions.pb.h"
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace protobuf_graph_2eproto {
|
||||
// Internal implementation detail -- do not use these members.
|
||||
struct TableStruct {
|
||||
static const ::google::protobuf::internal::ParseTableField entries[];
|
||||
static const ::google::protobuf::internal::AuxillaryParseTableField aux[];
|
||||
static const ::google::protobuf::internal::ParseTable schema[3];
|
||||
static const ::google::protobuf::internal::FieldMetadata field_metadata[];
|
||||
static const ::google::protobuf::internal::SerializationTable serialization_table[];
|
||||
static const ::google::protobuf::uint32 offsets[];
|
||||
};
|
||||
void AddDescriptors();
|
||||
void InitDefaultsGraphDefImpl();
|
||||
void InitDefaultsGraphDef();
|
||||
void InitDefaultsNodeDef_AttrEntry_DoNotUseImpl();
|
||||
void InitDefaultsNodeDef_AttrEntry_DoNotUse();
|
||||
void InitDefaultsNodeDefImpl();
|
||||
void InitDefaultsNodeDef();
|
||||
inline void InitDefaults() {
|
||||
InitDefaultsGraphDef();
|
||||
InitDefaultsNodeDef_AttrEntry_DoNotUse();
|
||||
InitDefaultsNodeDef();
|
||||
}
|
||||
} // namespace protobuf_graph_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
class GraphDef;
|
||||
class GraphDefDefaultTypeInternal;
|
||||
extern GraphDefDefaultTypeInternal _GraphDef_default_instance_;
|
||||
class NodeDef;
|
||||
class NodeDefDefaultTypeInternal;
|
||||
extern NodeDefDefaultTypeInternal _NodeDef_default_instance_;
|
||||
class NodeDef_AttrEntry_DoNotUse;
|
||||
class NodeDef_AttrEntry_DoNotUseDefaultTypeInternal;
|
||||
extern NodeDef_AttrEntry_DoNotUseDefaultTypeInternal _NodeDef_AttrEntry_DoNotUse_default_instance_;
|
||||
} // namespace opencv_tensorflow
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class GraphDef : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:opencv_tensorflow.GraphDef) */ {
|
||||
public:
|
||||
GraphDef();
|
||||
virtual ~GraphDef();
|
||||
|
||||
GraphDef(const GraphDef& from);
|
||||
|
||||
inline GraphDef& operator=(const GraphDef& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
#if LANG_CXX11
|
||||
GraphDef(GraphDef&& from) noexcept
|
||||
: GraphDef() {
|
||||
*this = ::std::move(from);
|
||||
}
|
||||
|
||||
inline GraphDef& operator=(GraphDef&& from) noexcept {
|
||||
if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
|
||||
if (this != &from) InternalSwap(&from);
|
||||
} else {
|
||||
CopyFrom(from);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
inline ::google::protobuf::Arena* GetArena() const PROTOBUF_FINAL {
|
||||
return GetArenaNoVirtual();
|
||||
}
|
||||
inline void* GetMaybeArenaPointer() const PROTOBUF_FINAL {
|
||||
return MaybeArenaPtr();
|
||||
}
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const GraphDef& default_instance();
|
||||
|
||||
static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
|
||||
static inline const GraphDef* internal_default_instance() {
|
||||
return reinterpret_cast<const GraphDef*>(
|
||||
&_GraphDef_default_instance_);
|
||||
}
|
||||
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages =
|
||||
0;
|
||||
|
||||
void UnsafeArenaSwap(GraphDef* other);
|
||||
void Swap(GraphDef* other);
|
||||
friend void swap(GraphDef& a, GraphDef& b) {
|
||||
a.Swap(&b);
|
||||
}
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
inline GraphDef* New() const PROTOBUF_FINAL { return New(NULL); }
|
||||
|
||||
GraphDef* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL;
|
||||
void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void CopyFrom(const GraphDef& from);
|
||||
void MergeFrom(const GraphDef& from);
|
||||
void Clear() PROTOBUF_FINAL;
|
||||
bool IsInitialized() const PROTOBUF_FINAL;
|
||||
|
||||
size_t ByteSizeLong() const PROTOBUF_FINAL;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL;
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL;
|
||||
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL;
|
||||
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const PROTOBUF_FINAL;
|
||||
void InternalSwap(GraphDef* other);
|
||||
protected:
|
||||
explicit GraphDef(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
static void ArenaDtor(void* object);
|
||||
inline void RegisterArenaDtor(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
|
||||
return _internal_metadata_.arena();
|
||||
}
|
||||
inline void* MaybeArenaPtr() const {
|
||||
return _internal_metadata_.raw_arena_ptr();
|
||||
}
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// repeated .opencv_tensorflow.NodeDef node = 1;
|
||||
int node_size() const;
|
||||
void clear_node();
|
||||
static const int kNodeFieldNumber = 1;
|
||||
const ::opencv_tensorflow::NodeDef& node(int index) const;
|
||||
::opencv_tensorflow::NodeDef* mutable_node(int index);
|
||||
::opencv_tensorflow::NodeDef* add_node();
|
||||
::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::NodeDef >*
|
||||
mutable_node();
|
||||
const ::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::NodeDef >&
|
||||
node() const;
|
||||
|
||||
// .opencv_tensorflow.FunctionDefLibrary library = 2;
|
||||
bool has_library() const;
|
||||
void clear_library();
|
||||
static const int kLibraryFieldNumber = 2;
|
||||
private:
|
||||
void _slow_mutable_library();
|
||||
public:
|
||||
const ::opencv_tensorflow::FunctionDefLibrary& library() const;
|
||||
::opencv_tensorflow::FunctionDefLibrary* release_library();
|
||||
::opencv_tensorflow::FunctionDefLibrary* mutable_library();
|
||||
void set_allocated_library(::opencv_tensorflow::FunctionDefLibrary* library);
|
||||
void unsafe_arena_set_allocated_library(
|
||||
::opencv_tensorflow::FunctionDefLibrary* library);
|
||||
::opencv_tensorflow::FunctionDefLibrary* unsafe_arena_release_library();
|
||||
|
||||
// .opencv_tensorflow.VersionDef versions = 4;
|
||||
bool has_versions() const;
|
||||
void clear_versions();
|
||||
static const int kVersionsFieldNumber = 4;
|
||||
private:
|
||||
void _slow_mutable_versions();
|
||||
public:
|
||||
const ::opencv_tensorflow::VersionDef& versions() const;
|
||||
::opencv_tensorflow::VersionDef* release_versions();
|
||||
::opencv_tensorflow::VersionDef* mutable_versions();
|
||||
void set_allocated_versions(::opencv_tensorflow::VersionDef* versions);
|
||||
void unsafe_arena_set_allocated_versions(
|
||||
::opencv_tensorflow::VersionDef* versions);
|
||||
::opencv_tensorflow::VersionDef* unsafe_arena_release_versions();
|
||||
|
||||
// int32 version = 3 [deprecated = true];
|
||||
GOOGLE_PROTOBUF_DEPRECATED_ATTR void clear_version();
|
||||
GOOGLE_PROTOBUF_DEPRECATED_ATTR static const int kVersionFieldNumber = 3;
|
||||
GOOGLE_PROTOBUF_DEPRECATED_ATTR ::google::protobuf::int32 version() const;
|
||||
GOOGLE_PROTOBUF_DEPRECATED_ATTR void set_version(::google::protobuf::int32 value);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:opencv_tensorflow.GraphDef)
|
||||
private:
|
||||
|
||||
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
|
||||
template <typename T> friend class ::google::protobuf::Arena::InternalHelper;
|
||||
typedef void InternalArenaConstructable_;
|
||||
typedef void DestructorSkippable_;
|
||||
::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::NodeDef > node_;
|
||||
::opencv_tensorflow::FunctionDefLibrary* library_;
|
||||
::opencv_tensorflow::VersionDef* versions_;
|
||||
::google::protobuf::int32 version_;
|
||||
mutable int _cached_size_;
|
||||
friend struct ::protobuf_graph_2eproto::TableStruct;
|
||||
friend void ::protobuf_graph_2eproto::InitDefaultsGraphDefImpl();
|
||||
};
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class NodeDef_AttrEntry_DoNotUse : public ::google::protobuf::internal::MapEntry<NodeDef_AttrEntry_DoNotUse,
|
||||
::std::string, ::opencv_tensorflow::AttrValue,
|
||||
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
|
||||
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
|
||||
0 > {
|
||||
public:
|
||||
typedef ::google::protobuf::internal::MapEntry<NodeDef_AttrEntry_DoNotUse,
|
||||
::std::string, ::opencv_tensorflow::AttrValue,
|
||||
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
|
||||
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
|
||||
0 > SuperType;
|
||||
NodeDef_AttrEntry_DoNotUse();
|
||||
NodeDef_AttrEntry_DoNotUse(::google::protobuf::Arena* arena);
|
||||
void MergeFrom(const NodeDef_AttrEntry_DoNotUse& other);
|
||||
static const NodeDef_AttrEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const NodeDef_AttrEntry_DoNotUse*>(&_NodeDef_AttrEntry_DoNotUse_default_instance_); }
|
||||
void MergeFrom(const ::google::protobuf::Message& other) PROTOBUF_FINAL;
|
||||
::google::protobuf::Metadata GetMetadata() const;
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class NodeDef : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:opencv_tensorflow.NodeDef) */ {
|
||||
public:
|
||||
NodeDef();
|
||||
virtual ~NodeDef();
|
||||
|
||||
NodeDef(const NodeDef& from);
|
||||
|
||||
inline NodeDef& operator=(const NodeDef& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
#if LANG_CXX11
|
||||
NodeDef(NodeDef&& from) noexcept
|
||||
: NodeDef() {
|
||||
*this = ::std::move(from);
|
||||
}
|
||||
|
||||
inline NodeDef& operator=(NodeDef&& from) noexcept {
|
||||
if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
|
||||
if (this != &from) InternalSwap(&from);
|
||||
} else {
|
||||
CopyFrom(from);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
inline ::google::protobuf::Arena* GetArena() const PROTOBUF_FINAL {
|
||||
return GetArenaNoVirtual();
|
||||
}
|
||||
inline void* GetMaybeArenaPointer() const PROTOBUF_FINAL {
|
||||
return MaybeArenaPtr();
|
||||
}
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const NodeDef& default_instance();
|
||||
|
||||
static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
|
||||
static inline const NodeDef* internal_default_instance() {
|
||||
return reinterpret_cast<const NodeDef*>(
|
||||
&_NodeDef_default_instance_);
|
||||
}
|
||||
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages =
|
||||
2;
|
||||
|
||||
void UnsafeArenaSwap(NodeDef* other);
|
||||
void Swap(NodeDef* other);
|
||||
friend void swap(NodeDef& a, NodeDef& b) {
|
||||
a.Swap(&b);
|
||||
}
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
inline NodeDef* New() const PROTOBUF_FINAL { return New(NULL); }
|
||||
|
||||
NodeDef* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL;
|
||||
void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void CopyFrom(const NodeDef& from);
|
||||
void MergeFrom(const NodeDef& from);
|
||||
void Clear() PROTOBUF_FINAL;
|
||||
bool IsInitialized() const PROTOBUF_FINAL;
|
||||
|
||||
size_t ByteSizeLong() const PROTOBUF_FINAL;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL;
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL;
|
||||
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL;
|
||||
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const PROTOBUF_FINAL;
|
||||
void InternalSwap(NodeDef* other);
|
||||
protected:
|
||||
explicit NodeDef(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
static void ArenaDtor(void* object);
|
||||
inline void RegisterArenaDtor(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
|
||||
return _internal_metadata_.arena();
|
||||
}
|
||||
inline void* MaybeArenaPtr() const {
|
||||
return _internal_metadata_.raw_arena_ptr();
|
||||
}
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// repeated string input = 3;
|
||||
int input_size() const;
|
||||
void clear_input();
|
||||
static const int kInputFieldNumber = 3;
|
||||
const ::std::string& input(int index) const;
|
||||
::std::string* mutable_input(int index);
|
||||
void set_input(int index, const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_input(int index, ::std::string&& value);
|
||||
#endif
|
||||
void set_input(int index, const char* value);
|
||||
void set_input(int index, const char* value, size_t size);
|
||||
::std::string* add_input();
|
||||
void add_input(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void add_input(::std::string&& value);
|
||||
#endif
|
||||
void add_input(const char* value);
|
||||
void add_input(const char* value, size_t size);
|
||||
const ::google::protobuf::RepeatedPtrField< ::std::string>& input() const;
|
||||
::google::protobuf::RepeatedPtrField< ::std::string>* mutable_input();
|
||||
|
||||
// map<string, .opencv_tensorflow.AttrValue> attr = 5;
|
||||
int attr_size() const;
|
||||
void clear_attr();
|
||||
static const int kAttrFieldNumber = 5;
|
||||
const ::google::protobuf::Map< ::std::string, ::opencv_tensorflow::AttrValue >&
|
||||
attr() const;
|
||||
::google::protobuf::Map< ::std::string, ::opencv_tensorflow::AttrValue >*
|
||||
mutable_attr();
|
||||
|
||||
// string name = 1;
|
||||
void clear_name();
|
||||
static const int kNameFieldNumber = 1;
|
||||
const ::std::string& name() const;
|
||||
void set_name(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_name(::std::string&& value);
|
||||
#endif
|
||||
void set_name(const char* value);
|
||||
void set_name(const char* value, size_t size);
|
||||
::std::string* mutable_name();
|
||||
::std::string* release_name();
|
||||
void set_allocated_name(::std::string* name);
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
::std::string* unsafe_arena_release_name();
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
void unsafe_arena_set_allocated_name(
|
||||
::std::string* name);
|
||||
|
||||
// string op = 2;
|
||||
void clear_op();
|
||||
static const int kOpFieldNumber = 2;
|
||||
const ::std::string& op() const;
|
||||
void set_op(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_op(::std::string&& value);
|
||||
#endif
|
||||
void set_op(const char* value);
|
||||
void set_op(const char* value, size_t size);
|
||||
::std::string* mutable_op();
|
||||
::std::string* release_op();
|
||||
void set_allocated_op(::std::string* op);
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
::std::string* unsafe_arena_release_op();
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
void unsafe_arena_set_allocated_op(
|
||||
::std::string* op);
|
||||
|
||||
// string device = 4;
|
||||
void clear_device();
|
||||
static const int kDeviceFieldNumber = 4;
|
||||
const ::std::string& device() const;
|
||||
void set_device(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_device(::std::string&& value);
|
||||
#endif
|
||||
void set_device(const char* value);
|
||||
void set_device(const char* value, size_t size);
|
||||
::std::string* mutable_device();
|
||||
::std::string* release_device();
|
||||
void set_allocated_device(::std::string* device);
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
::std::string* unsafe_arena_release_device();
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
void unsafe_arena_set_allocated_device(
|
||||
::std::string* device);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:opencv_tensorflow.NodeDef)
|
||||
private:
|
||||
|
||||
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
|
||||
template <typename T> friend class ::google::protobuf::Arena::InternalHelper;
|
||||
typedef void InternalArenaConstructable_;
|
||||
typedef void DestructorSkippable_;
|
||||
::google::protobuf::RepeatedPtrField< ::std::string> input_;
|
||||
::google::protobuf::internal::MapField<
|
||||
NodeDef_AttrEntry_DoNotUse,
|
||||
::std::string, ::opencv_tensorflow::AttrValue,
|
||||
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
|
||||
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
|
||||
0 > attr_;
|
||||
::google::protobuf::internal::ArenaStringPtr name_;
|
||||
::google::protobuf::internal::ArenaStringPtr op_;
|
||||
::google::protobuf::internal::ArenaStringPtr device_;
|
||||
mutable int _cached_size_;
|
||||
friend struct ::protobuf_graph_2eproto::TableStruct;
|
||||
friend void ::protobuf_graph_2eproto::InitDefaultsNodeDefImpl();
|
||||
};
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
|
||||
#endif // __GNUC__
|
||||
// GraphDef
|
||||
|
||||
// repeated .opencv_tensorflow.NodeDef node = 1;
|
||||
inline int GraphDef::node_size() const {
|
||||
return node_.size();
|
||||
}
|
||||
inline void GraphDef::clear_node() {
|
||||
node_.Clear();
|
||||
}
|
||||
inline const ::opencv_tensorflow::NodeDef& GraphDef::node(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.GraphDef.node)
|
||||
return node_.Get(index);
|
||||
}
|
||||
inline ::opencv_tensorflow::NodeDef* GraphDef::mutable_node(int index) {
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.GraphDef.node)
|
||||
return node_.Mutable(index);
|
||||
}
|
||||
inline ::opencv_tensorflow::NodeDef* GraphDef::add_node() {
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.GraphDef.node)
|
||||
return node_.Add();
|
||||
}
|
||||
inline ::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::NodeDef >*
|
||||
GraphDef::mutable_node() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.GraphDef.node)
|
||||
return &node_;
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::NodeDef >&
|
||||
GraphDef::node() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.GraphDef.node)
|
||||
return node_;
|
||||
}
|
||||
|
||||
// .opencv_tensorflow.VersionDef versions = 4;
|
||||
inline bool GraphDef::has_versions() const {
|
||||
return this != internal_default_instance() && versions_ != NULL;
|
||||
}
|
||||
inline const ::opencv_tensorflow::VersionDef& GraphDef::versions() const {
|
||||
const ::opencv_tensorflow::VersionDef* p = versions_;
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.GraphDef.versions)
|
||||
return p != NULL ? *p : *reinterpret_cast<const ::opencv_tensorflow::VersionDef*>(
|
||||
&::opencv_tensorflow::_VersionDef_default_instance_);
|
||||
}
|
||||
inline ::opencv_tensorflow::VersionDef* GraphDef::release_versions() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.GraphDef.versions)
|
||||
|
||||
::opencv_tensorflow::VersionDef* temp = versions_;
|
||||
if (GetArenaNoVirtual() != NULL) {
|
||||
temp = ::google::protobuf::internal::DuplicateIfNonNull(temp, NULL);
|
||||
}
|
||||
versions_ = NULL;
|
||||
return temp;
|
||||
}
|
||||
inline ::opencv_tensorflow::VersionDef* GraphDef::unsafe_arena_release_versions() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.GraphDef.versions)
|
||||
|
||||
::opencv_tensorflow::VersionDef* temp = versions_;
|
||||
versions_ = NULL;
|
||||
return temp;
|
||||
}
|
||||
inline ::opencv_tensorflow::VersionDef* GraphDef::mutable_versions() {
|
||||
|
||||
if (versions_ == NULL) {
|
||||
_slow_mutable_versions();
|
||||
}
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.GraphDef.versions)
|
||||
return versions_;
|
||||
}
|
||||
inline void GraphDef::set_allocated_versions(::opencv_tensorflow::VersionDef* versions) {
|
||||
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
|
||||
if (message_arena == NULL) {
|
||||
delete reinterpret_cast< ::google::protobuf::MessageLite*>(versions_);
|
||||
}
|
||||
if (versions) {
|
||||
::google::protobuf::Arena* submessage_arena =
|
||||
reinterpret_cast< ::google::protobuf::MessageLite*>(versions)->GetArena();
|
||||
if (message_arena != submessage_arena) {
|
||||
versions = ::google::protobuf::internal::GetOwnedMessage(
|
||||
message_arena, versions, submessage_arena);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
versions_ = versions;
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.GraphDef.versions)
|
||||
}
|
||||
|
||||
// int32 version = 3 [deprecated = true];
|
||||
inline void GraphDef::clear_version() {
|
||||
version_ = 0;
|
||||
}
|
||||
inline ::google::protobuf::int32 GraphDef::version() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.GraphDef.version)
|
||||
return version_;
|
||||
}
|
||||
inline void GraphDef::set_version(::google::protobuf::int32 value) {
|
||||
|
||||
version_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.GraphDef.version)
|
||||
}
|
||||
|
||||
// .opencv_tensorflow.FunctionDefLibrary library = 2;
|
||||
inline bool GraphDef::has_library() const {
|
||||
return this != internal_default_instance() && library_ != NULL;
|
||||
}
|
||||
inline const ::opencv_tensorflow::FunctionDefLibrary& GraphDef::library() const {
|
||||
const ::opencv_tensorflow::FunctionDefLibrary* p = library_;
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.GraphDef.library)
|
||||
return p != NULL ? *p : *reinterpret_cast<const ::opencv_tensorflow::FunctionDefLibrary*>(
|
||||
&::opencv_tensorflow::_FunctionDefLibrary_default_instance_);
|
||||
}
|
||||
inline ::opencv_tensorflow::FunctionDefLibrary* GraphDef::release_library() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.GraphDef.library)
|
||||
|
||||
::opencv_tensorflow::FunctionDefLibrary* temp = library_;
|
||||
if (GetArenaNoVirtual() != NULL) {
|
||||
temp = ::google::protobuf::internal::DuplicateIfNonNull(temp, NULL);
|
||||
}
|
||||
library_ = NULL;
|
||||
return temp;
|
||||
}
|
||||
inline ::opencv_tensorflow::FunctionDefLibrary* GraphDef::unsafe_arena_release_library() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.GraphDef.library)
|
||||
|
||||
::opencv_tensorflow::FunctionDefLibrary* temp = library_;
|
||||
library_ = NULL;
|
||||
return temp;
|
||||
}
|
||||
inline ::opencv_tensorflow::FunctionDefLibrary* GraphDef::mutable_library() {
|
||||
|
||||
if (library_ == NULL) {
|
||||
_slow_mutable_library();
|
||||
}
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.GraphDef.library)
|
||||
return library_;
|
||||
}
|
||||
inline void GraphDef::set_allocated_library(::opencv_tensorflow::FunctionDefLibrary* library) {
|
||||
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
|
||||
if (message_arena == NULL) {
|
||||
delete reinterpret_cast< ::google::protobuf::MessageLite*>(library_);
|
||||
}
|
||||
if (library) {
|
||||
::google::protobuf::Arena* submessage_arena =
|
||||
reinterpret_cast< ::google::protobuf::MessageLite*>(library)->GetArena();
|
||||
if (message_arena != submessage_arena) {
|
||||
library = ::google::protobuf::internal::GetOwnedMessage(
|
||||
message_arena, library, submessage_arena);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
library_ = library;
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.GraphDef.library)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// NodeDef
|
||||
|
||||
// string name = 1;
|
||||
inline void NodeDef::clear_name() {
|
||||
name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline const ::std::string& NodeDef::name() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.NodeDef.name)
|
||||
return name_.Get();
|
||||
}
|
||||
inline void NodeDef::set_name(const ::std::string& value) {
|
||||
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.NodeDef.name)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void NodeDef::set_name(::std::string&& value) {
|
||||
|
||||
name_.Set(
|
||||
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_rvalue:opencv_tensorflow.NodeDef.name)
|
||||
}
|
||||
#endif
|
||||
inline void NodeDef::set_name(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.NodeDef.name)
|
||||
}
|
||||
inline void NodeDef::set_name(const char* value,
|
||||
size_t size) {
|
||||
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
|
||||
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.NodeDef.name)
|
||||
}
|
||||
inline ::std::string* NodeDef::mutable_name() {
|
||||
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.NodeDef.name)
|
||||
return name_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline ::std::string* NodeDef::release_name() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.NodeDef.name)
|
||||
|
||||
return name_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline void NodeDef::set_allocated_name(::std::string* name) {
|
||||
if (name != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
name_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name,
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.NodeDef.name)
|
||||
}
|
||||
inline ::std::string* NodeDef::unsafe_arena_release_name() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.NodeDef.name)
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
|
||||
return name_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
GetArenaNoVirtual());
|
||||
}
|
||||
inline void NodeDef::unsafe_arena_set_allocated_name(
|
||||
::std::string* name) {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
if (name != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
name_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
name, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:opencv_tensorflow.NodeDef.name)
|
||||
}
|
||||
|
||||
// string op = 2;
|
||||
inline void NodeDef::clear_op() {
|
||||
op_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline const ::std::string& NodeDef::op() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.NodeDef.op)
|
||||
return op_.Get();
|
||||
}
|
||||
inline void NodeDef::set_op(const ::std::string& value) {
|
||||
|
||||
op_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.NodeDef.op)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void NodeDef::set_op(::std::string&& value) {
|
||||
|
||||
op_.Set(
|
||||
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_rvalue:opencv_tensorflow.NodeDef.op)
|
||||
}
|
||||
#endif
|
||||
inline void NodeDef::set_op(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
|
||||
op_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.NodeDef.op)
|
||||
}
|
||||
inline void NodeDef::set_op(const char* value,
|
||||
size_t size) {
|
||||
|
||||
op_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
|
||||
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.NodeDef.op)
|
||||
}
|
||||
inline ::std::string* NodeDef::mutable_op() {
|
||||
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.NodeDef.op)
|
||||
return op_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline ::std::string* NodeDef::release_op() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.NodeDef.op)
|
||||
|
||||
return op_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline void NodeDef::set_allocated_op(::std::string* op) {
|
||||
if (op != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
op_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), op,
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.NodeDef.op)
|
||||
}
|
||||
inline ::std::string* NodeDef::unsafe_arena_release_op() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.NodeDef.op)
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
|
||||
return op_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
GetArenaNoVirtual());
|
||||
}
|
||||
inline void NodeDef::unsafe_arena_set_allocated_op(
|
||||
::std::string* op) {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
if (op != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
op_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
op, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:opencv_tensorflow.NodeDef.op)
|
||||
}
|
||||
|
||||
// repeated string input = 3;
|
||||
inline int NodeDef::input_size() const {
|
||||
return input_.size();
|
||||
}
|
||||
inline void NodeDef::clear_input() {
|
||||
input_.Clear();
|
||||
}
|
||||
inline const ::std::string& NodeDef::input(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.NodeDef.input)
|
||||
return input_.Get(index);
|
||||
}
|
||||
inline ::std::string* NodeDef::mutable_input(int index) {
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.NodeDef.input)
|
||||
return input_.Mutable(index);
|
||||
}
|
||||
inline void NodeDef::set_input(int index, const ::std::string& value) {
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.NodeDef.input)
|
||||
input_.Mutable(index)->assign(value);
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void NodeDef::set_input(int index, ::std::string&& value) {
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.NodeDef.input)
|
||||
input_.Mutable(index)->assign(std::move(value));
|
||||
}
|
||||
#endif
|
||||
inline void NodeDef::set_input(int index, const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
input_.Mutable(index)->assign(value);
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.NodeDef.input)
|
||||
}
|
||||
inline void NodeDef::set_input(int index, const char* value, size_t size) {
|
||||
input_.Mutable(index)->assign(
|
||||
reinterpret_cast<const char*>(value), size);
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.NodeDef.input)
|
||||
}
|
||||
inline ::std::string* NodeDef::add_input() {
|
||||
// @@protoc_insertion_point(field_add_mutable:opencv_tensorflow.NodeDef.input)
|
||||
return input_.Add();
|
||||
}
|
||||
inline void NodeDef::add_input(const ::std::string& value) {
|
||||
input_.Add()->assign(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.NodeDef.input)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void NodeDef::add_input(::std::string&& value) {
|
||||
input_.Add(std::move(value));
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.NodeDef.input)
|
||||
}
|
||||
#endif
|
||||
inline void NodeDef::add_input(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
input_.Add()->assign(value);
|
||||
// @@protoc_insertion_point(field_add_char:opencv_tensorflow.NodeDef.input)
|
||||
}
|
||||
inline void NodeDef::add_input(const char* value, size_t size) {
|
||||
input_.Add()->assign(reinterpret_cast<const char*>(value), size);
|
||||
// @@protoc_insertion_point(field_add_pointer:opencv_tensorflow.NodeDef.input)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedPtrField< ::std::string>&
|
||||
NodeDef::input() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.NodeDef.input)
|
||||
return input_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedPtrField< ::std::string>*
|
||||
NodeDef::mutable_input() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.NodeDef.input)
|
||||
return &input_;
|
||||
}
|
||||
|
||||
// string device = 4;
|
||||
inline void NodeDef::clear_device() {
|
||||
device_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline const ::std::string& NodeDef::device() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.NodeDef.device)
|
||||
return device_.Get();
|
||||
}
|
||||
inline void NodeDef::set_device(const ::std::string& value) {
|
||||
|
||||
device_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.NodeDef.device)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void NodeDef::set_device(::std::string&& value) {
|
||||
|
||||
device_.Set(
|
||||
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_rvalue:opencv_tensorflow.NodeDef.device)
|
||||
}
|
||||
#endif
|
||||
inline void NodeDef::set_device(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
|
||||
device_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.NodeDef.device)
|
||||
}
|
||||
inline void NodeDef::set_device(const char* value,
|
||||
size_t size) {
|
||||
|
||||
device_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
|
||||
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.NodeDef.device)
|
||||
}
|
||||
inline ::std::string* NodeDef::mutable_device() {
|
||||
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.NodeDef.device)
|
||||
return device_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline ::std::string* NodeDef::release_device() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.NodeDef.device)
|
||||
|
||||
return device_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline void NodeDef::set_allocated_device(::std::string* device) {
|
||||
if (device != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
device_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), device,
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.NodeDef.device)
|
||||
}
|
||||
inline ::std::string* NodeDef::unsafe_arena_release_device() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.NodeDef.device)
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
|
||||
return device_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
GetArenaNoVirtual());
|
||||
}
|
||||
inline void NodeDef::unsafe_arena_set_allocated_device(
|
||||
::std::string* device) {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
if (device != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
device_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
device, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:opencv_tensorflow.NodeDef.device)
|
||||
}
|
||||
|
||||
// map<string, .opencv_tensorflow.AttrValue> attr = 5;
|
||||
inline int NodeDef::attr_size() const {
|
||||
return attr_.size();
|
||||
}
|
||||
inline const ::google::protobuf::Map< ::std::string, ::opencv_tensorflow::AttrValue >&
|
||||
NodeDef::attr() const {
|
||||
// @@protoc_insertion_point(field_map:opencv_tensorflow.NodeDef.attr)
|
||||
return attr_.GetMap();
|
||||
}
|
||||
inline ::google::protobuf::Map< ::std::string, ::opencv_tensorflow::AttrValue >*
|
||||
NodeDef::mutable_attr() {
|
||||
// @@protoc_insertion_point(field_mutable_map:opencv_tensorflow.NodeDef.attr)
|
||||
return attr_.MutableMap();
|
||||
}
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif // __GNUC__
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_graph_2eproto__INCLUDED
|
2840
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/op_def.pb.cc
vendored
Normal file
2840
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/op_def.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2435
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/op_def.pb.h
vendored
Normal file
2435
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/op_def.pb.h
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1115
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor.pb.cc
vendored
Normal file
1115
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor.pb.cc
vendored
Normal file
File diff suppressed because it is too large
Load Diff
844
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor.pb.h
vendored
Normal file
844
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor.pb.h
vendored
Normal file
@ -0,0 +1,844 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: tensor.proto
|
||||
|
||||
#ifndef PROTOBUF_tensor_2eproto__INCLUDED
|
||||
#define PROTOBUF_tensor_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 3005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 3005001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/arena.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
#include <google/protobuf/generated_message_table_driven.h>
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/metadata.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
|
||||
#include <google/protobuf/extension_set.h> // IWYU pragma: export
|
||||
#include <google/protobuf/unknown_field_set.h>
|
||||
#include "tensor_shape.pb.h"
|
||||
#include "types.pb.h"
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace protobuf_tensor_2eproto {
|
||||
// Internal implementation detail -- do not use these members.
|
||||
struct TableStruct {
|
||||
static const ::google::protobuf::internal::ParseTableField entries[];
|
||||
static const ::google::protobuf::internal::AuxillaryParseTableField aux[];
|
||||
static const ::google::protobuf::internal::ParseTable schema[1];
|
||||
static const ::google::protobuf::internal::FieldMetadata field_metadata[];
|
||||
static const ::google::protobuf::internal::SerializationTable serialization_table[];
|
||||
static const ::google::protobuf::uint32 offsets[];
|
||||
};
|
||||
void AddDescriptors();
|
||||
void InitDefaultsTensorProtoImpl();
|
||||
void InitDefaultsTensorProto();
|
||||
inline void InitDefaults() {
|
||||
InitDefaultsTensorProto();
|
||||
}
|
||||
} // namespace protobuf_tensor_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
class TensorProto;
|
||||
class TensorProtoDefaultTypeInternal;
|
||||
extern TensorProtoDefaultTypeInternal _TensorProto_default_instance_;
|
||||
} // namespace opencv_tensorflow
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class TensorProto : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:opencv_tensorflow.TensorProto) */ {
|
||||
public:
|
||||
TensorProto();
|
||||
virtual ~TensorProto();
|
||||
|
||||
TensorProto(const TensorProto& from);
|
||||
|
||||
inline TensorProto& operator=(const TensorProto& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
#if LANG_CXX11
|
||||
TensorProto(TensorProto&& from) noexcept
|
||||
: TensorProto() {
|
||||
*this = ::std::move(from);
|
||||
}
|
||||
|
||||
inline TensorProto& operator=(TensorProto&& from) noexcept {
|
||||
if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
|
||||
if (this != &from) InternalSwap(&from);
|
||||
} else {
|
||||
CopyFrom(from);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
inline ::google::protobuf::Arena* GetArena() const PROTOBUF_FINAL {
|
||||
return GetArenaNoVirtual();
|
||||
}
|
||||
inline void* GetMaybeArenaPointer() const PROTOBUF_FINAL {
|
||||
return MaybeArenaPtr();
|
||||
}
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const TensorProto& default_instance();
|
||||
|
||||
static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
|
||||
static inline const TensorProto* internal_default_instance() {
|
||||
return reinterpret_cast<const TensorProto*>(
|
||||
&_TensorProto_default_instance_);
|
||||
}
|
||||
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages =
|
||||
0;
|
||||
|
||||
void UnsafeArenaSwap(TensorProto* other);
|
||||
void Swap(TensorProto* other);
|
||||
friend void swap(TensorProto& a, TensorProto& b) {
|
||||
a.Swap(&b);
|
||||
}
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
inline TensorProto* New() const PROTOBUF_FINAL { return New(NULL); }
|
||||
|
||||
TensorProto* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL;
|
||||
void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void CopyFrom(const TensorProto& from);
|
||||
void MergeFrom(const TensorProto& from);
|
||||
void Clear() PROTOBUF_FINAL;
|
||||
bool IsInitialized() const PROTOBUF_FINAL;
|
||||
|
||||
size_t ByteSizeLong() const PROTOBUF_FINAL;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL;
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL;
|
||||
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL;
|
||||
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const PROTOBUF_FINAL;
|
||||
void InternalSwap(TensorProto* other);
|
||||
protected:
|
||||
explicit TensorProto(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
static void ArenaDtor(void* object);
|
||||
inline void RegisterArenaDtor(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
|
||||
return _internal_metadata_.arena();
|
||||
}
|
||||
inline void* MaybeArenaPtr() const {
|
||||
return _internal_metadata_.raw_arena_ptr();
|
||||
}
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// repeated float float_val = 5 [packed = true];
|
||||
int float_val_size() const;
|
||||
void clear_float_val();
|
||||
static const int kFloatValFieldNumber = 5;
|
||||
float float_val(int index) const;
|
||||
void set_float_val(int index, float value);
|
||||
void add_float_val(float value);
|
||||
const ::google::protobuf::RepeatedField< float >&
|
||||
float_val() const;
|
||||
::google::protobuf::RepeatedField< float >*
|
||||
mutable_float_val();
|
||||
|
||||
// repeated double double_val = 6 [packed = true];
|
||||
int double_val_size() const;
|
||||
void clear_double_val();
|
||||
static const int kDoubleValFieldNumber = 6;
|
||||
double double_val(int index) const;
|
||||
void set_double_val(int index, double value);
|
||||
void add_double_val(double value);
|
||||
const ::google::protobuf::RepeatedField< double >&
|
||||
double_val() const;
|
||||
::google::protobuf::RepeatedField< double >*
|
||||
mutable_double_val();
|
||||
|
||||
// repeated int32 int_val = 7 [packed = true];
|
||||
int int_val_size() const;
|
||||
void clear_int_val();
|
||||
static const int kIntValFieldNumber = 7;
|
||||
::google::protobuf::int32 int_val(int index) const;
|
||||
void set_int_val(int index, ::google::protobuf::int32 value);
|
||||
void add_int_val(::google::protobuf::int32 value);
|
||||
const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >&
|
||||
int_val() const;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int32 >*
|
||||
mutable_int_val();
|
||||
|
||||
// repeated bytes string_val = 8;
|
||||
int string_val_size() const;
|
||||
void clear_string_val();
|
||||
static const int kStringValFieldNumber = 8;
|
||||
const ::std::string& string_val(int index) const;
|
||||
::std::string* mutable_string_val(int index);
|
||||
void set_string_val(int index, const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_string_val(int index, ::std::string&& value);
|
||||
#endif
|
||||
void set_string_val(int index, const char* value);
|
||||
void set_string_val(int index, const void* value, size_t size);
|
||||
::std::string* add_string_val();
|
||||
void add_string_val(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void add_string_val(::std::string&& value);
|
||||
#endif
|
||||
void add_string_val(const char* value);
|
||||
void add_string_val(const void* value, size_t size);
|
||||
const ::google::protobuf::RepeatedPtrField< ::std::string>& string_val() const;
|
||||
::google::protobuf::RepeatedPtrField< ::std::string>* mutable_string_val();
|
||||
|
||||
// repeated float scomplex_val = 9 [packed = true];
|
||||
int scomplex_val_size() const;
|
||||
void clear_scomplex_val();
|
||||
static const int kScomplexValFieldNumber = 9;
|
||||
float scomplex_val(int index) const;
|
||||
void set_scomplex_val(int index, float value);
|
||||
void add_scomplex_val(float value);
|
||||
const ::google::protobuf::RepeatedField< float >&
|
||||
scomplex_val() const;
|
||||
::google::protobuf::RepeatedField< float >*
|
||||
mutable_scomplex_val();
|
||||
|
||||
// repeated int64 int64_val = 10 [packed = true];
|
||||
int int64_val_size() const;
|
||||
void clear_int64_val();
|
||||
static const int kInt64ValFieldNumber = 10;
|
||||
::google::protobuf::int64 int64_val(int index) const;
|
||||
void set_int64_val(int index, ::google::protobuf::int64 value);
|
||||
void add_int64_val(::google::protobuf::int64 value);
|
||||
const ::google::protobuf::RepeatedField< ::google::protobuf::int64 >&
|
||||
int64_val() const;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int64 >*
|
||||
mutable_int64_val();
|
||||
|
||||
// repeated bool bool_val = 11 [packed = true];
|
||||
int bool_val_size() const;
|
||||
void clear_bool_val();
|
||||
static const int kBoolValFieldNumber = 11;
|
||||
bool bool_val(int index) const;
|
||||
void set_bool_val(int index, bool value);
|
||||
void add_bool_val(bool value);
|
||||
const ::google::protobuf::RepeatedField< bool >&
|
||||
bool_val() const;
|
||||
::google::protobuf::RepeatedField< bool >*
|
||||
mutable_bool_val();
|
||||
|
||||
// repeated double dcomplex_val = 12 [packed = true];
|
||||
int dcomplex_val_size() const;
|
||||
void clear_dcomplex_val();
|
||||
static const int kDcomplexValFieldNumber = 12;
|
||||
double dcomplex_val(int index) const;
|
||||
void set_dcomplex_val(int index, double value);
|
||||
void add_dcomplex_val(double value);
|
||||
const ::google::protobuf::RepeatedField< double >&
|
||||
dcomplex_val() const;
|
||||
::google::protobuf::RepeatedField< double >*
|
||||
mutable_dcomplex_val();
|
||||
|
||||
// repeated int32 half_val = 13 [packed = true];
|
||||
int half_val_size() const;
|
||||
void clear_half_val();
|
||||
static const int kHalfValFieldNumber = 13;
|
||||
::google::protobuf::int32 half_val(int index) const;
|
||||
void set_half_val(int index, ::google::protobuf::int32 value);
|
||||
void add_half_val(::google::protobuf::int32 value);
|
||||
const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >&
|
||||
half_val() const;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int32 >*
|
||||
mutable_half_val();
|
||||
|
||||
// bytes tensor_content = 4;
|
||||
void clear_tensor_content();
|
||||
static const int kTensorContentFieldNumber = 4;
|
||||
const ::std::string& tensor_content() const;
|
||||
void set_tensor_content(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_tensor_content(::std::string&& value);
|
||||
#endif
|
||||
void set_tensor_content(const char* value);
|
||||
void set_tensor_content(const void* value, size_t size);
|
||||
::std::string* mutable_tensor_content();
|
||||
::std::string* release_tensor_content();
|
||||
void set_allocated_tensor_content(::std::string* tensor_content);
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
::std::string* unsafe_arena_release_tensor_content();
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
void unsafe_arena_set_allocated_tensor_content(
|
||||
::std::string* tensor_content);
|
||||
|
||||
// .opencv_tensorflow.TensorShapeProto tensor_shape = 2;
|
||||
bool has_tensor_shape() const;
|
||||
void clear_tensor_shape();
|
||||
static const int kTensorShapeFieldNumber = 2;
|
||||
private:
|
||||
void _slow_mutable_tensor_shape();
|
||||
public:
|
||||
const ::opencv_tensorflow::TensorShapeProto& tensor_shape() const;
|
||||
::opencv_tensorflow::TensorShapeProto* release_tensor_shape();
|
||||
::opencv_tensorflow::TensorShapeProto* mutable_tensor_shape();
|
||||
void set_allocated_tensor_shape(::opencv_tensorflow::TensorShapeProto* tensor_shape);
|
||||
void unsafe_arena_set_allocated_tensor_shape(
|
||||
::opencv_tensorflow::TensorShapeProto* tensor_shape);
|
||||
::opencv_tensorflow::TensorShapeProto* unsafe_arena_release_tensor_shape();
|
||||
|
||||
// .opencv_tensorflow.DataType dtype = 1;
|
||||
void clear_dtype();
|
||||
static const int kDtypeFieldNumber = 1;
|
||||
::opencv_tensorflow::DataType dtype() const;
|
||||
void set_dtype(::opencv_tensorflow::DataType value);
|
||||
|
||||
// int32 version_number = 3;
|
||||
void clear_version_number();
|
||||
static const int kVersionNumberFieldNumber = 3;
|
||||
::google::protobuf::int32 version_number() const;
|
||||
void set_version_number(::google::protobuf::int32 value);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:opencv_tensorflow.TensorProto)
|
||||
private:
|
||||
|
||||
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
|
||||
template <typename T> friend class ::google::protobuf::Arena::InternalHelper;
|
||||
typedef void InternalArenaConstructable_;
|
||||
typedef void DestructorSkippable_;
|
||||
::google::protobuf::RepeatedField< float > float_val_;
|
||||
mutable int _float_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedField< double > double_val_;
|
||||
mutable int _double_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int32 > int_val_;
|
||||
mutable int _int_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedPtrField< ::std::string> string_val_;
|
||||
::google::protobuf::RepeatedField< float > scomplex_val_;
|
||||
mutable int _scomplex_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int64 > int64_val_;
|
||||
mutable int _int64_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedField< bool > bool_val_;
|
||||
mutable int _bool_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedField< double > dcomplex_val_;
|
||||
mutable int _dcomplex_val_cached_byte_size_;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int32 > half_val_;
|
||||
mutable int _half_val_cached_byte_size_;
|
||||
::google::protobuf::internal::ArenaStringPtr tensor_content_;
|
||||
::opencv_tensorflow::TensorShapeProto* tensor_shape_;
|
||||
int dtype_;
|
||||
::google::protobuf::int32 version_number_;
|
||||
mutable int _cached_size_;
|
||||
friend struct ::protobuf_tensor_2eproto::TableStruct;
|
||||
friend void ::protobuf_tensor_2eproto::InitDefaultsTensorProtoImpl();
|
||||
};
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
|
||||
#endif // __GNUC__
|
||||
// TensorProto
|
||||
|
||||
// .opencv_tensorflow.DataType dtype = 1;
|
||||
inline void TensorProto::clear_dtype() {
|
||||
dtype_ = 0;
|
||||
}
|
||||
inline ::opencv_tensorflow::DataType TensorProto::dtype() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.dtype)
|
||||
return static_cast< ::opencv_tensorflow::DataType >(dtype_);
|
||||
}
|
||||
inline void TensorProto::set_dtype(::opencv_tensorflow::DataType value) {
|
||||
|
||||
dtype_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.dtype)
|
||||
}
|
||||
|
||||
// .opencv_tensorflow.TensorShapeProto tensor_shape = 2;
|
||||
inline bool TensorProto::has_tensor_shape() const {
|
||||
return this != internal_default_instance() && tensor_shape_ != NULL;
|
||||
}
|
||||
inline const ::opencv_tensorflow::TensorShapeProto& TensorProto::tensor_shape() const {
|
||||
const ::opencv_tensorflow::TensorShapeProto* p = tensor_shape_;
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.tensor_shape)
|
||||
return p != NULL ? *p : *reinterpret_cast<const ::opencv_tensorflow::TensorShapeProto*>(
|
||||
&::opencv_tensorflow::_TensorShapeProto_default_instance_);
|
||||
}
|
||||
inline ::opencv_tensorflow::TensorShapeProto* TensorProto::release_tensor_shape() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.TensorProto.tensor_shape)
|
||||
|
||||
::opencv_tensorflow::TensorShapeProto* temp = tensor_shape_;
|
||||
if (GetArenaNoVirtual() != NULL) {
|
||||
temp = ::google::protobuf::internal::DuplicateIfNonNull(temp, NULL);
|
||||
}
|
||||
tensor_shape_ = NULL;
|
||||
return temp;
|
||||
}
|
||||
inline ::opencv_tensorflow::TensorShapeProto* TensorProto::unsafe_arena_release_tensor_shape() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.TensorProto.tensor_shape)
|
||||
|
||||
::opencv_tensorflow::TensorShapeProto* temp = tensor_shape_;
|
||||
tensor_shape_ = NULL;
|
||||
return temp;
|
||||
}
|
||||
inline ::opencv_tensorflow::TensorShapeProto* TensorProto::mutable_tensor_shape() {
|
||||
|
||||
if (tensor_shape_ == NULL) {
|
||||
_slow_mutable_tensor_shape();
|
||||
}
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.TensorProto.tensor_shape)
|
||||
return tensor_shape_;
|
||||
}
|
||||
inline void TensorProto::set_allocated_tensor_shape(::opencv_tensorflow::TensorShapeProto* tensor_shape) {
|
||||
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
|
||||
if (message_arena == NULL) {
|
||||
delete reinterpret_cast< ::google::protobuf::MessageLite*>(tensor_shape_);
|
||||
}
|
||||
if (tensor_shape) {
|
||||
::google::protobuf::Arena* submessage_arena =
|
||||
reinterpret_cast< ::google::protobuf::MessageLite*>(tensor_shape)->GetArena();
|
||||
if (message_arena != submessage_arena) {
|
||||
tensor_shape = ::google::protobuf::internal::GetOwnedMessage(
|
||||
message_arena, tensor_shape, submessage_arena);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
tensor_shape_ = tensor_shape;
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.TensorProto.tensor_shape)
|
||||
}
|
||||
|
||||
// int32 version_number = 3;
|
||||
inline void TensorProto::clear_version_number() {
|
||||
version_number_ = 0;
|
||||
}
|
||||
inline ::google::protobuf::int32 TensorProto::version_number() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.version_number)
|
||||
return version_number_;
|
||||
}
|
||||
inline void TensorProto::set_version_number(::google::protobuf::int32 value) {
|
||||
|
||||
version_number_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.version_number)
|
||||
}
|
||||
|
||||
// bytes tensor_content = 4;
|
||||
inline void TensorProto::clear_tensor_content() {
|
||||
tensor_content_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline const ::std::string& TensorProto::tensor_content() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.tensor_content)
|
||||
return tensor_content_.Get();
|
||||
}
|
||||
inline void TensorProto::set_tensor_content(const ::std::string& value) {
|
||||
|
||||
tensor_content_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.tensor_content)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void TensorProto::set_tensor_content(::std::string&& value) {
|
||||
|
||||
tensor_content_.Set(
|
||||
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_rvalue:opencv_tensorflow.TensorProto.tensor_content)
|
||||
}
|
||||
#endif
|
||||
inline void TensorProto::set_tensor_content(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
|
||||
tensor_content_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.TensorProto.tensor_content)
|
||||
}
|
||||
inline void TensorProto::set_tensor_content(const void* value,
|
||||
size_t size) {
|
||||
|
||||
tensor_content_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
|
||||
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.TensorProto.tensor_content)
|
||||
}
|
||||
inline ::std::string* TensorProto::mutable_tensor_content() {
|
||||
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.TensorProto.tensor_content)
|
||||
return tensor_content_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline ::std::string* TensorProto::release_tensor_content() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.TensorProto.tensor_content)
|
||||
|
||||
return tensor_content_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline void TensorProto::set_allocated_tensor_content(::std::string* tensor_content) {
|
||||
if (tensor_content != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
tensor_content_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), tensor_content,
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.TensorProto.tensor_content)
|
||||
}
|
||||
inline ::std::string* TensorProto::unsafe_arena_release_tensor_content() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.TensorProto.tensor_content)
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
|
||||
return tensor_content_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
GetArenaNoVirtual());
|
||||
}
|
||||
inline void TensorProto::unsafe_arena_set_allocated_tensor_content(
|
||||
::std::string* tensor_content) {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
if (tensor_content != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
tensor_content_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
tensor_content, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:opencv_tensorflow.TensorProto.tensor_content)
|
||||
}
|
||||
|
||||
// repeated int32 half_val = 13 [packed = true];
|
||||
inline int TensorProto::half_val_size() const {
|
||||
return half_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_half_val() {
|
||||
half_val_.Clear();
|
||||
}
|
||||
inline ::google::protobuf::int32 TensorProto::half_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.half_val)
|
||||
return half_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_half_val(int index, ::google::protobuf::int32 value) {
|
||||
half_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.half_val)
|
||||
}
|
||||
inline void TensorProto::add_half_val(::google::protobuf::int32 value) {
|
||||
half_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.half_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >&
|
||||
TensorProto::half_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.half_val)
|
||||
return half_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >*
|
||||
TensorProto::mutable_half_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.half_val)
|
||||
return &half_val_;
|
||||
}
|
||||
|
||||
// repeated float float_val = 5 [packed = true];
|
||||
inline int TensorProto::float_val_size() const {
|
||||
return float_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_float_val() {
|
||||
float_val_.Clear();
|
||||
}
|
||||
inline float TensorProto::float_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.float_val)
|
||||
return float_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_float_val(int index, float value) {
|
||||
float_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.float_val)
|
||||
}
|
||||
inline void TensorProto::add_float_val(float value) {
|
||||
float_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.float_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< float >&
|
||||
TensorProto::float_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.float_val)
|
||||
return float_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< float >*
|
||||
TensorProto::mutable_float_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.float_val)
|
||||
return &float_val_;
|
||||
}
|
||||
|
||||
// repeated double double_val = 6 [packed = true];
|
||||
inline int TensorProto::double_val_size() const {
|
||||
return double_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_double_val() {
|
||||
double_val_.Clear();
|
||||
}
|
||||
inline double TensorProto::double_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.double_val)
|
||||
return double_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_double_val(int index, double value) {
|
||||
double_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.double_val)
|
||||
}
|
||||
inline void TensorProto::add_double_val(double value) {
|
||||
double_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.double_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< double >&
|
||||
TensorProto::double_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.double_val)
|
||||
return double_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< double >*
|
||||
TensorProto::mutable_double_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.double_val)
|
||||
return &double_val_;
|
||||
}
|
||||
|
||||
// repeated int32 int_val = 7 [packed = true];
|
||||
inline int TensorProto::int_val_size() const {
|
||||
return int_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_int_val() {
|
||||
int_val_.Clear();
|
||||
}
|
||||
inline ::google::protobuf::int32 TensorProto::int_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.int_val)
|
||||
return int_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_int_val(int index, ::google::protobuf::int32 value) {
|
||||
int_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.int_val)
|
||||
}
|
||||
inline void TensorProto::add_int_val(::google::protobuf::int32 value) {
|
||||
int_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.int_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >&
|
||||
TensorProto::int_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.int_val)
|
||||
return int_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >*
|
||||
TensorProto::mutable_int_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.int_val)
|
||||
return &int_val_;
|
||||
}
|
||||
|
||||
// repeated bytes string_val = 8;
|
||||
inline int TensorProto::string_val_size() const {
|
||||
return string_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_string_val() {
|
||||
string_val_.Clear();
|
||||
}
|
||||
inline const ::std::string& TensorProto::string_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.string_val)
|
||||
return string_val_.Get(index);
|
||||
}
|
||||
inline ::std::string* TensorProto::mutable_string_val(int index) {
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.TensorProto.string_val)
|
||||
return string_val_.Mutable(index);
|
||||
}
|
||||
inline void TensorProto::set_string_val(int index, const ::std::string& value) {
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.string_val)
|
||||
string_val_.Mutable(index)->assign(value);
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void TensorProto::set_string_val(int index, ::std::string&& value) {
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.string_val)
|
||||
string_val_.Mutable(index)->assign(std::move(value));
|
||||
}
|
||||
#endif
|
||||
inline void TensorProto::set_string_val(int index, const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
string_val_.Mutable(index)->assign(value);
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.TensorProto.string_val)
|
||||
}
|
||||
inline void TensorProto::set_string_val(int index, const void* value, size_t size) {
|
||||
string_val_.Mutable(index)->assign(
|
||||
reinterpret_cast<const char*>(value), size);
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.TensorProto.string_val)
|
||||
}
|
||||
inline ::std::string* TensorProto::add_string_val() {
|
||||
// @@protoc_insertion_point(field_add_mutable:opencv_tensorflow.TensorProto.string_val)
|
||||
return string_val_.Add();
|
||||
}
|
||||
inline void TensorProto::add_string_val(const ::std::string& value) {
|
||||
string_val_.Add()->assign(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.string_val)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void TensorProto::add_string_val(::std::string&& value) {
|
||||
string_val_.Add(std::move(value));
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.string_val)
|
||||
}
|
||||
#endif
|
||||
inline void TensorProto::add_string_val(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
string_val_.Add()->assign(value);
|
||||
// @@protoc_insertion_point(field_add_char:opencv_tensorflow.TensorProto.string_val)
|
||||
}
|
||||
inline void TensorProto::add_string_val(const void* value, size_t size) {
|
||||
string_val_.Add()->assign(reinterpret_cast<const char*>(value), size);
|
||||
// @@protoc_insertion_point(field_add_pointer:opencv_tensorflow.TensorProto.string_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedPtrField< ::std::string>&
|
||||
TensorProto::string_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.string_val)
|
||||
return string_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedPtrField< ::std::string>*
|
||||
TensorProto::mutable_string_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.string_val)
|
||||
return &string_val_;
|
||||
}
|
||||
|
||||
// repeated float scomplex_val = 9 [packed = true];
|
||||
inline int TensorProto::scomplex_val_size() const {
|
||||
return scomplex_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_scomplex_val() {
|
||||
scomplex_val_.Clear();
|
||||
}
|
||||
inline float TensorProto::scomplex_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.scomplex_val)
|
||||
return scomplex_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_scomplex_val(int index, float value) {
|
||||
scomplex_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.scomplex_val)
|
||||
}
|
||||
inline void TensorProto::add_scomplex_val(float value) {
|
||||
scomplex_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.scomplex_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< float >&
|
||||
TensorProto::scomplex_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.scomplex_val)
|
||||
return scomplex_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< float >*
|
||||
TensorProto::mutable_scomplex_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.scomplex_val)
|
||||
return &scomplex_val_;
|
||||
}
|
||||
|
||||
// repeated int64 int64_val = 10 [packed = true];
|
||||
inline int TensorProto::int64_val_size() const {
|
||||
return int64_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_int64_val() {
|
||||
int64_val_.Clear();
|
||||
}
|
||||
inline ::google::protobuf::int64 TensorProto::int64_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.int64_val)
|
||||
return int64_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_int64_val(int index, ::google::protobuf::int64 value) {
|
||||
int64_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.int64_val)
|
||||
}
|
||||
inline void TensorProto::add_int64_val(::google::protobuf::int64 value) {
|
||||
int64_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.int64_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< ::google::protobuf::int64 >&
|
||||
TensorProto::int64_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.int64_val)
|
||||
return int64_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< ::google::protobuf::int64 >*
|
||||
TensorProto::mutable_int64_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.int64_val)
|
||||
return &int64_val_;
|
||||
}
|
||||
|
||||
// repeated bool bool_val = 11 [packed = true];
|
||||
inline int TensorProto::bool_val_size() const {
|
||||
return bool_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_bool_val() {
|
||||
bool_val_.Clear();
|
||||
}
|
||||
inline bool TensorProto::bool_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.bool_val)
|
||||
return bool_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_bool_val(int index, bool value) {
|
||||
bool_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.bool_val)
|
||||
}
|
||||
inline void TensorProto::add_bool_val(bool value) {
|
||||
bool_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.bool_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< bool >&
|
||||
TensorProto::bool_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.bool_val)
|
||||
return bool_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< bool >*
|
||||
TensorProto::mutable_bool_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.bool_val)
|
||||
return &bool_val_;
|
||||
}
|
||||
|
||||
// repeated double dcomplex_val = 12 [packed = true];
|
||||
inline int TensorProto::dcomplex_val_size() const {
|
||||
return dcomplex_val_.size();
|
||||
}
|
||||
inline void TensorProto::clear_dcomplex_val() {
|
||||
dcomplex_val_.Clear();
|
||||
}
|
||||
inline double TensorProto::dcomplex_val(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorProto.dcomplex_val)
|
||||
return dcomplex_val_.Get(index);
|
||||
}
|
||||
inline void TensorProto::set_dcomplex_val(int index, double value) {
|
||||
dcomplex_val_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorProto.dcomplex_val)
|
||||
}
|
||||
inline void TensorProto::add_dcomplex_val(double value) {
|
||||
dcomplex_val_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorProto.dcomplex_val)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< double >&
|
||||
TensorProto::dcomplex_val() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorProto.dcomplex_val)
|
||||
return dcomplex_val_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< double >*
|
||||
TensorProto::mutable_dcomplex_val() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorProto.dcomplex_val)
|
||||
return &dcomplex_val_;
|
||||
}
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif // __GNUC__
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_tensor_2eproto__INCLUDED
|
783
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor_shape.pb.cc
vendored
Normal file
783
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor_shape.pb.cc
vendored
Normal file
@ -0,0 +1,783 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: tensor_shape.proto
|
||||
|
||||
#include "tensor_shape.pb.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/port.h>
|
||||
#include <google/protobuf/stubs/once.h>
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/wire_format_lite_inl.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/generated_message_reflection.h>
|
||||
#include <google/protobuf/reflection_ops.h>
|
||||
#include <google/protobuf/wire_format.h>
|
||||
// This is a temporary google only hack
|
||||
#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
#include "third_party/protobuf/version.h"
|
||||
#endif
|
||||
// @@protoc_insertion_point(includes)
|
||||
namespace opencv_tensorflow {
|
||||
class TensorShapeProto_DimDefaultTypeInternal {
|
||||
public:
|
||||
::google::protobuf::internal::ExplicitlyConstructed<TensorShapeProto_Dim>
|
||||
_instance;
|
||||
} _TensorShapeProto_Dim_default_instance_;
|
||||
class TensorShapeProtoDefaultTypeInternal {
|
||||
public:
|
||||
::google::protobuf::internal::ExplicitlyConstructed<TensorShapeProto>
|
||||
_instance;
|
||||
} _TensorShapeProto_default_instance_;
|
||||
} // namespace opencv_tensorflow
|
||||
namespace protobuf_tensor_5fshape_2eproto {
|
||||
void InitDefaultsTensorShapeProto_DimImpl() {
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
::google::protobuf::internal::InitProtobufDefaultsForceUnique();
|
||||
#else
|
||||
::google::protobuf::internal::InitProtobufDefaults();
|
||||
#endif // GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
{
|
||||
void* ptr = &::opencv_tensorflow::_TensorShapeProto_Dim_default_instance_;
|
||||
new (ptr) ::opencv_tensorflow::TensorShapeProto_Dim();
|
||||
::google::protobuf::internal::OnShutdownDestroyMessage(ptr);
|
||||
}
|
||||
::opencv_tensorflow::TensorShapeProto_Dim::InitAsDefaultInstance();
|
||||
}
|
||||
|
||||
void InitDefaultsTensorShapeProto_Dim() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &InitDefaultsTensorShapeProto_DimImpl);
|
||||
}
|
||||
|
||||
void InitDefaultsTensorShapeProtoImpl() {
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
::google::protobuf::internal::InitProtobufDefaultsForceUnique();
|
||||
#else
|
||||
::google::protobuf::internal::InitProtobufDefaults();
|
||||
#endif // GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto_Dim();
|
||||
{
|
||||
void* ptr = &::opencv_tensorflow::_TensorShapeProto_default_instance_;
|
||||
new (ptr) ::opencv_tensorflow::TensorShapeProto();
|
||||
::google::protobuf::internal::OnShutdownDestroyMessage(ptr);
|
||||
}
|
||||
::opencv_tensorflow::TensorShapeProto::InitAsDefaultInstance();
|
||||
}
|
||||
|
||||
void InitDefaultsTensorShapeProto() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &InitDefaultsTensorShapeProtoImpl);
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata file_level_metadata[2];
|
||||
|
||||
const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
~0u, // no _has_bits_
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::TensorShapeProto_Dim, _internal_metadata_),
|
||||
~0u, // no _extensions_
|
||||
~0u, // no _oneof_case_
|
||||
~0u, // no _weak_field_map_
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::TensorShapeProto_Dim, size_),
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::TensorShapeProto_Dim, name_),
|
||||
~0u, // no _has_bits_
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::TensorShapeProto, _internal_metadata_),
|
||||
~0u, // no _extensions_
|
||||
~0u, // no _oneof_case_
|
||||
~0u, // no _weak_field_map_
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::TensorShapeProto, dim_),
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::TensorShapeProto, unknown_rank_),
|
||||
};
|
||||
static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
{ 0, -1, sizeof(::opencv_tensorflow::TensorShapeProto_Dim)},
|
||||
{ 7, -1, sizeof(::opencv_tensorflow::TensorShapeProto)},
|
||||
};
|
||||
|
||||
static ::google::protobuf::Message const * const file_default_instances[] = {
|
||||
reinterpret_cast<const ::google::protobuf::Message*>(&::opencv_tensorflow::_TensorShapeProto_Dim_default_instance_),
|
||||
reinterpret_cast<const ::google::protobuf::Message*>(&::opencv_tensorflow::_TensorShapeProto_default_instance_),
|
||||
};
|
||||
|
||||
void protobuf_AssignDescriptors() {
|
||||
AddDescriptors();
|
||||
::google::protobuf::MessageFactory* factory = NULL;
|
||||
AssignDescriptors(
|
||||
"tensor_shape.proto", schemas, file_default_instances, TableStruct::offsets, factory,
|
||||
file_level_metadata, NULL, NULL);
|
||||
}
|
||||
|
||||
void protobuf_AssignDescriptorsOnce() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors);
|
||||
}
|
||||
|
||||
void protobuf_RegisterTypes(const ::std::string&) GOOGLE_PROTOBUF_ATTRIBUTE_COLD;
|
||||
void protobuf_RegisterTypes(const ::std::string&) {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 2);
|
||||
}
|
||||
|
||||
void AddDescriptorsImpl() {
|
||||
InitDefaults();
|
||||
static const char descriptor[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
"\n\022tensor_shape.proto\022\021opencv_tensorflow\""
|
||||
"\201\001\n\020TensorShapeProto\0224\n\003dim\030\002 \003(\0132\'.open"
|
||||
"cv_tensorflow.TensorShapeProto.Dim\022\024\n\014un"
|
||||
"known_rank\030\003 \001(\010\032!\n\003Dim\022\014\n\004size\030\001 \001(\003\022\014\n"
|
||||
"\004name\030\002 \001(\tB2\n\030org.tensorflow.frameworkB"
|
||||
"\021TensorShapeProtosP\001\370\001\001b\006proto3"
|
||||
};
|
||||
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
|
||||
descriptor, 231);
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
|
||||
"tensor_shape.proto", &protobuf_RegisterTypes);
|
||||
}
|
||||
|
||||
void AddDescriptors() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl);
|
||||
}
|
||||
// Force AddDescriptors() to be called at dynamic initialization time.
|
||||
struct StaticDescriptorInitializer {
|
||||
StaticDescriptorInitializer() {
|
||||
AddDescriptors();
|
||||
}
|
||||
} static_descriptor_initializer;
|
||||
} // namespace protobuf_tensor_5fshape_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
void TensorShapeProto_Dim::InitAsDefaultInstance() {
|
||||
}
|
||||
#if !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
const int TensorShapeProto_Dim::kSizeFieldNumber;
|
||||
const int TensorShapeProto_Dim::kNameFieldNumber;
|
||||
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
|
||||
TensorShapeProto_Dim::TensorShapeProto_Dim()
|
||||
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
|
||||
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
|
||||
::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto_Dim();
|
||||
}
|
||||
SharedCtor();
|
||||
// @@protoc_insertion_point(constructor:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
}
|
||||
TensorShapeProto_Dim::TensorShapeProto_Dim(::google::protobuf::Arena* arena)
|
||||
: ::google::protobuf::Message(),
|
||||
_internal_metadata_(arena) {
|
||||
::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto_Dim();
|
||||
SharedCtor();
|
||||
RegisterArenaDtor(arena);
|
||||
// @@protoc_insertion_point(arena_constructor:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
}
|
||||
TensorShapeProto_Dim::TensorShapeProto_Dim(const TensorShapeProto_Dim& from)
|
||||
: ::google::protobuf::Message(),
|
||||
_internal_metadata_(NULL),
|
||||
_cached_size_(0) {
|
||||
_internal_metadata_.MergeFrom(from._internal_metadata_);
|
||||
name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
|
||||
if (from.name().size() > 0) {
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name(),
|
||||
GetArenaNoVirtual());
|
||||
}
|
||||
size_ = from.size_;
|
||||
// @@protoc_insertion_point(copy_constructor:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::SharedCtor() {
|
||||
name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
|
||||
size_ = GOOGLE_LONGLONG(0);
|
||||
_cached_size_ = 0;
|
||||
}
|
||||
|
||||
TensorShapeProto_Dim::~TensorShapeProto_Dim() {
|
||||
// @@protoc_insertion_point(destructor:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
SharedDtor();
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::SharedDtor() {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() == NULL);
|
||||
name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::ArenaDtor(void* object) {
|
||||
TensorShapeProto_Dim* _this = reinterpret_cast< TensorShapeProto_Dim* >(object);
|
||||
(void)_this;
|
||||
}
|
||||
void TensorShapeProto_Dim::RegisterArenaDtor(::google::protobuf::Arena* arena) {
|
||||
}
|
||||
void TensorShapeProto_Dim::SetCachedSize(int size) const {
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
}
|
||||
const ::google::protobuf::Descriptor* TensorShapeProto_Dim::descriptor() {
|
||||
::protobuf_tensor_5fshape_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return ::protobuf_tensor_5fshape_2eproto::file_level_metadata[kIndexInFileMessages].descriptor;
|
||||
}
|
||||
|
||||
const TensorShapeProto_Dim& TensorShapeProto_Dim::default_instance() {
|
||||
::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto_Dim();
|
||||
return *internal_default_instance();
|
||||
}
|
||||
|
||||
TensorShapeProto_Dim* TensorShapeProto_Dim::New(::google::protobuf::Arena* arena) const {
|
||||
return ::google::protobuf::Arena::CreateMessage<TensorShapeProto_Dim>(arena);
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::Clear() {
|
||||
// @@protoc_insertion_point(message_clear_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
// Prevent compiler warnings about cached_has_bits being unused
|
||||
(void) cached_has_bits;
|
||||
|
||||
name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
size_ = GOOGLE_LONGLONG(0);
|
||||
_internal_metadata_.Clear();
|
||||
}
|
||||
|
||||
bool TensorShapeProto_Dim::MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) {
|
||||
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
|
||||
::google::protobuf::uint32 tag;
|
||||
// @@protoc_insertion_point(parse_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
for (;;) {
|
||||
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
|
||||
tag = p.first;
|
||||
if (!p.second) goto handle_unusual;
|
||||
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
|
||||
// int64 size = 1;
|
||||
case 1: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
|
||||
|
||||
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
|
||||
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
|
||||
input, &size_)));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// string name = 2;
|
||||
case 2: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
|
||||
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
|
||||
input, this->mutable_name()));
|
||||
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
|
||||
this->name().data(), static_cast<int>(this->name().length()),
|
||||
::google::protobuf::internal::WireFormatLite::PARSE,
|
||||
"opencv_tensorflow.TensorShapeProto.Dim.name"));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
handle_unusual:
|
||||
if (tag == 0) {
|
||||
goto success;
|
||||
}
|
||||
DO_(::google::protobuf::internal::WireFormat::SkipField(
|
||||
input, tag, _internal_metadata_.mutable_unknown_fields()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
success:
|
||||
// @@protoc_insertion_point(parse_success:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
return true;
|
||||
failure:
|
||||
// @@protoc_insertion_point(parse_failure:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
return false;
|
||||
#undef DO_
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const {
|
||||
// @@protoc_insertion_point(serialize_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
// int64 size = 1;
|
||||
if (this->size() != 0) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteInt64(1, this->size(), output);
|
||||
}
|
||||
|
||||
// string name = 2;
|
||||
if (this->name().size() > 0) {
|
||||
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
|
||||
this->name().data(), static_cast<int>(this->name().length()),
|
||||
::google::protobuf::internal::WireFormatLite::SERIALIZE,
|
||||
"opencv_tensorflow.TensorShapeProto.Dim.name");
|
||||
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
|
||||
2, this->name(), output);
|
||||
}
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output);
|
||||
}
|
||||
// @@protoc_insertion_point(serialize_end:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
}
|
||||
|
||||
::google::protobuf::uint8* TensorShapeProto_Dim::InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const {
|
||||
(void)deterministic; // Unused
|
||||
// @@protoc_insertion_point(serialize_to_array_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
// int64 size = 1;
|
||||
if (this->size() != 0) {
|
||||
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(1, this->size(), target);
|
||||
}
|
||||
|
||||
// string name = 2;
|
||||
if (this->name().size() > 0) {
|
||||
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
|
||||
this->name().data(), static_cast<int>(this->name().length()),
|
||||
::google::protobuf::internal::WireFormatLite::SERIALIZE,
|
||||
"opencv_tensorflow.TensorShapeProto.Dim.name");
|
||||
target =
|
||||
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
|
||||
2, this->name(), target);
|
||||
}
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target);
|
||||
}
|
||||
// @@protoc_insertion_point(serialize_to_array_end:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
return target;
|
||||
}
|
||||
|
||||
size_t TensorShapeProto_Dim::ByteSizeLong() const {
|
||||
// @@protoc_insertion_point(message_byte_size_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
size_t total_size = 0;
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
total_size +=
|
||||
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()));
|
||||
}
|
||||
// string name = 2;
|
||||
if (this->name().size() > 0) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::StringSize(
|
||||
this->name());
|
||||
}
|
||||
|
||||
// int64 size = 1;
|
||||
if (this->size() != 0) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::Int64Size(
|
||||
this->size());
|
||||
}
|
||||
|
||||
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = cached_size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
return total_size;
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::MergeFrom(const ::google::protobuf::Message& from) {
|
||||
// @@protoc_insertion_point(generalized_merge_from_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
GOOGLE_DCHECK_NE(&from, this);
|
||||
const TensorShapeProto_Dim* source =
|
||||
::google::protobuf::internal::DynamicCastToGenerated<const TensorShapeProto_Dim>(
|
||||
&from);
|
||||
if (source == NULL) {
|
||||
// @@protoc_insertion_point(generalized_merge_from_cast_fail:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
::google::protobuf::internal::ReflectionOps::Merge(from, this);
|
||||
} else {
|
||||
// @@protoc_insertion_point(generalized_merge_from_cast_success:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
MergeFrom(*source);
|
||||
}
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::MergeFrom(const TensorShapeProto_Dim& from) {
|
||||
// @@protoc_insertion_point(class_specific_merge_from_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
GOOGLE_DCHECK_NE(&from, this);
|
||||
_internal_metadata_.MergeFrom(from._internal_metadata_);
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
if (from.name().size() > 0) {
|
||||
set_name(from.name());
|
||||
}
|
||||
if (from.size() != 0) {
|
||||
set_size(from.size());
|
||||
}
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::CopyFrom(const ::google::protobuf::Message& from) {
|
||||
// @@protoc_insertion_point(generalized_copy_from_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::CopyFrom(const TensorShapeProto_Dim& from) {
|
||||
// @@protoc_insertion_point(class_specific_copy_from_start:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
bool TensorShapeProto_Dim::IsInitialized() const {
|
||||
return true;
|
||||
}
|
||||
|
||||
void TensorShapeProto_Dim::Swap(TensorShapeProto_Dim* other) {
|
||||
if (other == this) return;
|
||||
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
|
||||
InternalSwap(other);
|
||||
} else {
|
||||
TensorShapeProto_Dim* temp = New(GetArenaNoVirtual());
|
||||
temp->MergeFrom(*other);
|
||||
other->CopyFrom(*this);
|
||||
InternalSwap(temp);
|
||||
if (GetArenaNoVirtual() == NULL) {
|
||||
delete temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
void TensorShapeProto_Dim::UnsafeArenaSwap(TensorShapeProto_Dim* other) {
|
||||
if (other == this) return;
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
|
||||
InternalSwap(other);
|
||||
}
|
||||
void TensorShapeProto_Dim::InternalSwap(TensorShapeProto_Dim* other) {
|
||||
using std::swap;
|
||||
name_.Swap(&other->name_);
|
||||
swap(size_, other->size_);
|
||||
_internal_metadata_.Swap(&other->_internal_metadata_);
|
||||
swap(_cached_size_, other->_cached_size_);
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata TensorShapeProto_Dim::GetMetadata() const {
|
||||
protobuf_tensor_5fshape_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return ::protobuf_tensor_5fshape_2eproto::file_level_metadata[kIndexInFileMessages];
|
||||
}
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
void TensorShapeProto::InitAsDefaultInstance() {
|
||||
}
|
||||
#if !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
const int TensorShapeProto::kDimFieldNumber;
|
||||
const int TensorShapeProto::kUnknownRankFieldNumber;
|
||||
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
|
||||
TensorShapeProto::TensorShapeProto()
|
||||
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
|
||||
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
|
||||
::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto();
|
||||
}
|
||||
SharedCtor();
|
||||
// @@protoc_insertion_point(constructor:opencv_tensorflow.TensorShapeProto)
|
||||
}
|
||||
TensorShapeProto::TensorShapeProto(::google::protobuf::Arena* arena)
|
||||
: ::google::protobuf::Message(),
|
||||
_internal_metadata_(arena),
|
||||
dim_(arena) {
|
||||
::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto();
|
||||
SharedCtor();
|
||||
RegisterArenaDtor(arena);
|
||||
// @@protoc_insertion_point(arena_constructor:opencv_tensorflow.TensorShapeProto)
|
||||
}
|
||||
TensorShapeProto::TensorShapeProto(const TensorShapeProto& from)
|
||||
: ::google::protobuf::Message(),
|
||||
_internal_metadata_(NULL),
|
||||
dim_(from.dim_),
|
||||
_cached_size_(0) {
|
||||
_internal_metadata_.MergeFrom(from._internal_metadata_);
|
||||
unknown_rank_ = from.unknown_rank_;
|
||||
// @@protoc_insertion_point(copy_constructor:opencv_tensorflow.TensorShapeProto)
|
||||
}
|
||||
|
||||
void TensorShapeProto::SharedCtor() {
|
||||
unknown_rank_ = false;
|
||||
_cached_size_ = 0;
|
||||
}
|
||||
|
||||
TensorShapeProto::~TensorShapeProto() {
|
||||
// @@protoc_insertion_point(destructor:opencv_tensorflow.TensorShapeProto)
|
||||
SharedDtor();
|
||||
}
|
||||
|
||||
void TensorShapeProto::SharedDtor() {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() == NULL);
|
||||
}
|
||||
|
||||
void TensorShapeProto::ArenaDtor(void* object) {
|
||||
TensorShapeProto* _this = reinterpret_cast< TensorShapeProto* >(object);
|
||||
(void)_this;
|
||||
}
|
||||
void TensorShapeProto::RegisterArenaDtor(::google::protobuf::Arena* arena) {
|
||||
}
|
||||
void TensorShapeProto::SetCachedSize(int size) const {
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
}
|
||||
const ::google::protobuf::Descriptor* TensorShapeProto::descriptor() {
|
||||
::protobuf_tensor_5fshape_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return ::protobuf_tensor_5fshape_2eproto::file_level_metadata[kIndexInFileMessages].descriptor;
|
||||
}
|
||||
|
||||
const TensorShapeProto& TensorShapeProto::default_instance() {
|
||||
::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto();
|
||||
return *internal_default_instance();
|
||||
}
|
||||
|
||||
TensorShapeProto* TensorShapeProto::New(::google::protobuf::Arena* arena) const {
|
||||
return ::google::protobuf::Arena::CreateMessage<TensorShapeProto>(arena);
|
||||
}
|
||||
|
||||
void TensorShapeProto::Clear() {
|
||||
// @@protoc_insertion_point(message_clear_start:opencv_tensorflow.TensorShapeProto)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
// Prevent compiler warnings about cached_has_bits being unused
|
||||
(void) cached_has_bits;
|
||||
|
||||
dim_.Clear();
|
||||
unknown_rank_ = false;
|
||||
_internal_metadata_.Clear();
|
||||
}
|
||||
|
||||
bool TensorShapeProto::MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) {
|
||||
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
|
||||
::google::protobuf::uint32 tag;
|
||||
// @@protoc_insertion_point(parse_start:opencv_tensorflow.TensorShapeProto)
|
||||
for (;;) {
|
||||
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
|
||||
tag = p.first;
|
||||
if (!p.second) goto handle_unusual;
|
||||
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
|
||||
// repeated .opencv_tensorflow.TensorShapeProto.Dim dim = 2;
|
||||
case 2: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(18u /* 18 & 0xFF */)) {
|
||||
DO_(::google::protobuf::internal::WireFormatLite::ReadMessage(input, add_dim()));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// bool unknown_rank = 3;
|
||||
case 3: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(24u /* 24 & 0xFF */)) {
|
||||
|
||||
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
|
||||
bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
|
||||
input, &unknown_rank_)));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
handle_unusual:
|
||||
if (tag == 0) {
|
||||
goto success;
|
||||
}
|
||||
DO_(::google::protobuf::internal::WireFormat::SkipField(
|
||||
input, tag, _internal_metadata_.mutable_unknown_fields()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
success:
|
||||
// @@protoc_insertion_point(parse_success:opencv_tensorflow.TensorShapeProto)
|
||||
return true;
|
||||
failure:
|
||||
// @@protoc_insertion_point(parse_failure:opencv_tensorflow.TensorShapeProto)
|
||||
return false;
|
||||
#undef DO_
|
||||
}
|
||||
|
||||
void TensorShapeProto::SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const {
|
||||
// @@protoc_insertion_point(serialize_start:opencv_tensorflow.TensorShapeProto)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
// repeated .opencv_tensorflow.TensorShapeProto.Dim dim = 2;
|
||||
for (unsigned int i = 0,
|
||||
n = static_cast<unsigned int>(this->dim_size()); i < n; i++) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
|
||||
2, this->dim(static_cast<int>(i)), output);
|
||||
}
|
||||
|
||||
// bool unknown_rank = 3;
|
||||
if (this->unknown_rank() != 0) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteBool(3, this->unknown_rank(), output);
|
||||
}
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output);
|
||||
}
|
||||
// @@protoc_insertion_point(serialize_end:opencv_tensorflow.TensorShapeProto)
|
||||
}
|
||||
|
||||
::google::protobuf::uint8* TensorShapeProto::InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const {
|
||||
(void)deterministic; // Unused
|
||||
// @@protoc_insertion_point(serialize_to_array_start:opencv_tensorflow.TensorShapeProto)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
// repeated .opencv_tensorflow.TensorShapeProto.Dim dim = 2;
|
||||
for (unsigned int i = 0,
|
||||
n = static_cast<unsigned int>(this->dim_size()); i < n; i++) {
|
||||
target = ::google::protobuf::internal::WireFormatLite::
|
||||
InternalWriteMessageToArray(
|
||||
2, this->dim(static_cast<int>(i)), deterministic, target);
|
||||
}
|
||||
|
||||
// bool unknown_rank = 3;
|
||||
if (this->unknown_rank() != 0) {
|
||||
target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(3, this->unknown_rank(), target);
|
||||
}
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target);
|
||||
}
|
||||
// @@protoc_insertion_point(serialize_to_array_end:opencv_tensorflow.TensorShapeProto)
|
||||
return target;
|
||||
}
|
||||
|
||||
size_t TensorShapeProto::ByteSizeLong() const {
|
||||
// @@protoc_insertion_point(message_byte_size_start:opencv_tensorflow.TensorShapeProto)
|
||||
size_t total_size = 0;
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
total_size +=
|
||||
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()));
|
||||
}
|
||||
// repeated .opencv_tensorflow.TensorShapeProto.Dim dim = 2;
|
||||
{
|
||||
unsigned int count = static_cast<unsigned int>(this->dim_size());
|
||||
total_size += 1UL * count;
|
||||
for (unsigned int i = 0; i < count; i++) {
|
||||
total_size +=
|
||||
::google::protobuf::internal::WireFormatLite::MessageSize(
|
||||
this->dim(static_cast<int>(i)));
|
||||
}
|
||||
}
|
||||
|
||||
// bool unknown_rank = 3;
|
||||
if (this->unknown_rank() != 0) {
|
||||
total_size += 1 + 1;
|
||||
}
|
||||
|
||||
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = cached_size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
return total_size;
|
||||
}
|
||||
|
||||
void TensorShapeProto::MergeFrom(const ::google::protobuf::Message& from) {
|
||||
// @@protoc_insertion_point(generalized_merge_from_start:opencv_tensorflow.TensorShapeProto)
|
||||
GOOGLE_DCHECK_NE(&from, this);
|
||||
const TensorShapeProto* source =
|
||||
::google::protobuf::internal::DynamicCastToGenerated<const TensorShapeProto>(
|
||||
&from);
|
||||
if (source == NULL) {
|
||||
// @@protoc_insertion_point(generalized_merge_from_cast_fail:opencv_tensorflow.TensorShapeProto)
|
||||
::google::protobuf::internal::ReflectionOps::Merge(from, this);
|
||||
} else {
|
||||
// @@protoc_insertion_point(generalized_merge_from_cast_success:opencv_tensorflow.TensorShapeProto)
|
||||
MergeFrom(*source);
|
||||
}
|
||||
}
|
||||
|
||||
void TensorShapeProto::MergeFrom(const TensorShapeProto& from) {
|
||||
// @@protoc_insertion_point(class_specific_merge_from_start:opencv_tensorflow.TensorShapeProto)
|
||||
GOOGLE_DCHECK_NE(&from, this);
|
||||
_internal_metadata_.MergeFrom(from._internal_metadata_);
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
dim_.MergeFrom(from.dim_);
|
||||
if (from.unknown_rank() != 0) {
|
||||
set_unknown_rank(from.unknown_rank());
|
||||
}
|
||||
}
|
||||
|
||||
void TensorShapeProto::CopyFrom(const ::google::protobuf::Message& from) {
|
||||
// @@protoc_insertion_point(generalized_copy_from_start:opencv_tensorflow.TensorShapeProto)
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void TensorShapeProto::CopyFrom(const TensorShapeProto& from) {
|
||||
// @@protoc_insertion_point(class_specific_copy_from_start:opencv_tensorflow.TensorShapeProto)
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
bool TensorShapeProto::IsInitialized() const {
|
||||
return true;
|
||||
}
|
||||
|
||||
void TensorShapeProto::Swap(TensorShapeProto* other) {
|
||||
if (other == this) return;
|
||||
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
|
||||
InternalSwap(other);
|
||||
} else {
|
||||
TensorShapeProto* temp = New(GetArenaNoVirtual());
|
||||
temp->MergeFrom(*other);
|
||||
other->CopyFrom(*this);
|
||||
InternalSwap(temp);
|
||||
if (GetArenaNoVirtual() == NULL) {
|
||||
delete temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
void TensorShapeProto::UnsafeArenaSwap(TensorShapeProto* other) {
|
||||
if (other == this) return;
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
|
||||
InternalSwap(other);
|
||||
}
|
||||
void TensorShapeProto::InternalSwap(TensorShapeProto* other) {
|
||||
using std::swap;
|
||||
dim_.InternalSwap(&other->dim_);
|
||||
swap(unknown_rank_, other->unknown_rank_);
|
||||
_internal_metadata_.Swap(&other->_internal_metadata_);
|
||||
swap(_cached_size_, other->_cached_size_);
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata TensorShapeProto::GetMetadata() const {
|
||||
protobuf_tensor_5fshape_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return ::protobuf_tensor_5fshape_2eproto::file_level_metadata[kIndexInFileMessages];
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
491
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor_shape.pb.h
vendored
Normal file
491
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/tensor_shape.pb.h
vendored
Normal file
@ -0,0 +1,491 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: tensor_shape.proto
|
||||
|
||||
#ifndef PROTOBUF_tensor_5fshape_2eproto__INCLUDED
|
||||
#define PROTOBUF_tensor_5fshape_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 3005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 3005001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/arena.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
#include <google/protobuf/generated_message_table_driven.h>
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/metadata.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
|
||||
#include <google/protobuf/extension_set.h> // IWYU pragma: export
|
||||
#include <google/protobuf/unknown_field_set.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace protobuf_tensor_5fshape_2eproto {
|
||||
// Internal implementation detail -- do not use these members.
|
||||
struct TableStruct {
|
||||
static const ::google::protobuf::internal::ParseTableField entries[];
|
||||
static const ::google::protobuf::internal::AuxillaryParseTableField aux[];
|
||||
static const ::google::protobuf::internal::ParseTable schema[2];
|
||||
static const ::google::protobuf::internal::FieldMetadata field_metadata[];
|
||||
static const ::google::protobuf::internal::SerializationTable serialization_table[];
|
||||
static const ::google::protobuf::uint32 offsets[];
|
||||
};
|
||||
void AddDescriptors();
|
||||
void InitDefaultsTensorShapeProto_DimImpl();
|
||||
void InitDefaultsTensorShapeProto_Dim();
|
||||
void InitDefaultsTensorShapeProtoImpl();
|
||||
void InitDefaultsTensorShapeProto();
|
||||
inline void InitDefaults() {
|
||||
InitDefaultsTensorShapeProto_Dim();
|
||||
InitDefaultsTensorShapeProto();
|
||||
}
|
||||
} // namespace protobuf_tensor_5fshape_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
class TensorShapeProto;
|
||||
class TensorShapeProtoDefaultTypeInternal;
|
||||
extern TensorShapeProtoDefaultTypeInternal _TensorShapeProto_default_instance_;
|
||||
class TensorShapeProto_Dim;
|
||||
class TensorShapeProto_DimDefaultTypeInternal;
|
||||
extern TensorShapeProto_DimDefaultTypeInternal _TensorShapeProto_Dim_default_instance_;
|
||||
} // namespace opencv_tensorflow
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class TensorShapeProto_Dim : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:opencv_tensorflow.TensorShapeProto.Dim) */ {
|
||||
public:
|
||||
TensorShapeProto_Dim();
|
||||
virtual ~TensorShapeProto_Dim();
|
||||
|
||||
TensorShapeProto_Dim(const TensorShapeProto_Dim& from);
|
||||
|
||||
inline TensorShapeProto_Dim& operator=(const TensorShapeProto_Dim& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
#if LANG_CXX11
|
||||
TensorShapeProto_Dim(TensorShapeProto_Dim&& from) noexcept
|
||||
: TensorShapeProto_Dim() {
|
||||
*this = ::std::move(from);
|
||||
}
|
||||
|
||||
inline TensorShapeProto_Dim& operator=(TensorShapeProto_Dim&& from) noexcept {
|
||||
if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
|
||||
if (this != &from) InternalSwap(&from);
|
||||
} else {
|
||||
CopyFrom(from);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
inline ::google::protobuf::Arena* GetArena() const PROTOBUF_FINAL {
|
||||
return GetArenaNoVirtual();
|
||||
}
|
||||
inline void* GetMaybeArenaPointer() const PROTOBUF_FINAL {
|
||||
return MaybeArenaPtr();
|
||||
}
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const TensorShapeProto_Dim& default_instance();
|
||||
|
||||
static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
|
||||
static inline const TensorShapeProto_Dim* internal_default_instance() {
|
||||
return reinterpret_cast<const TensorShapeProto_Dim*>(
|
||||
&_TensorShapeProto_Dim_default_instance_);
|
||||
}
|
||||
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages =
|
||||
0;
|
||||
|
||||
void UnsafeArenaSwap(TensorShapeProto_Dim* other);
|
||||
void Swap(TensorShapeProto_Dim* other);
|
||||
friend void swap(TensorShapeProto_Dim& a, TensorShapeProto_Dim& b) {
|
||||
a.Swap(&b);
|
||||
}
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
inline TensorShapeProto_Dim* New() const PROTOBUF_FINAL { return New(NULL); }
|
||||
|
||||
TensorShapeProto_Dim* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL;
|
||||
void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void CopyFrom(const TensorShapeProto_Dim& from);
|
||||
void MergeFrom(const TensorShapeProto_Dim& from);
|
||||
void Clear() PROTOBUF_FINAL;
|
||||
bool IsInitialized() const PROTOBUF_FINAL;
|
||||
|
||||
size_t ByteSizeLong() const PROTOBUF_FINAL;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL;
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL;
|
||||
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL;
|
||||
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const PROTOBUF_FINAL;
|
||||
void InternalSwap(TensorShapeProto_Dim* other);
|
||||
protected:
|
||||
explicit TensorShapeProto_Dim(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
static void ArenaDtor(void* object);
|
||||
inline void RegisterArenaDtor(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
|
||||
return _internal_metadata_.arena();
|
||||
}
|
||||
inline void* MaybeArenaPtr() const {
|
||||
return _internal_metadata_.raw_arena_ptr();
|
||||
}
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// string name = 2;
|
||||
void clear_name();
|
||||
static const int kNameFieldNumber = 2;
|
||||
const ::std::string& name() const;
|
||||
void set_name(const ::std::string& value);
|
||||
#if LANG_CXX11
|
||||
void set_name(::std::string&& value);
|
||||
#endif
|
||||
void set_name(const char* value);
|
||||
void set_name(const char* value, size_t size);
|
||||
::std::string* mutable_name();
|
||||
::std::string* release_name();
|
||||
void set_allocated_name(::std::string* name);
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
::std::string* unsafe_arena_release_name();
|
||||
PROTOBUF_RUNTIME_DEPRECATED("The unsafe_arena_ accessors for"
|
||||
" string fields are deprecated and will be removed in a"
|
||||
" future release.")
|
||||
void unsafe_arena_set_allocated_name(
|
||||
::std::string* name);
|
||||
|
||||
// int64 size = 1;
|
||||
void clear_size();
|
||||
static const int kSizeFieldNumber = 1;
|
||||
::google::protobuf::int64 size() const;
|
||||
void set_size(::google::protobuf::int64 value);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:opencv_tensorflow.TensorShapeProto.Dim)
|
||||
private:
|
||||
|
||||
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
|
||||
template <typename T> friend class ::google::protobuf::Arena::InternalHelper;
|
||||
typedef void InternalArenaConstructable_;
|
||||
typedef void DestructorSkippable_;
|
||||
::google::protobuf::internal::ArenaStringPtr name_;
|
||||
::google::protobuf::int64 size_;
|
||||
mutable int _cached_size_;
|
||||
friend struct ::protobuf_tensor_5fshape_2eproto::TableStruct;
|
||||
friend void ::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProto_DimImpl();
|
||||
};
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class TensorShapeProto : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:opencv_tensorflow.TensorShapeProto) */ {
|
||||
public:
|
||||
TensorShapeProto();
|
||||
virtual ~TensorShapeProto();
|
||||
|
||||
TensorShapeProto(const TensorShapeProto& from);
|
||||
|
||||
inline TensorShapeProto& operator=(const TensorShapeProto& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
#if LANG_CXX11
|
||||
TensorShapeProto(TensorShapeProto&& from) noexcept
|
||||
: TensorShapeProto() {
|
||||
*this = ::std::move(from);
|
||||
}
|
||||
|
||||
inline TensorShapeProto& operator=(TensorShapeProto&& from) noexcept {
|
||||
if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
|
||||
if (this != &from) InternalSwap(&from);
|
||||
} else {
|
||||
CopyFrom(from);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
inline ::google::protobuf::Arena* GetArena() const PROTOBUF_FINAL {
|
||||
return GetArenaNoVirtual();
|
||||
}
|
||||
inline void* GetMaybeArenaPointer() const PROTOBUF_FINAL {
|
||||
return MaybeArenaPtr();
|
||||
}
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const TensorShapeProto& default_instance();
|
||||
|
||||
static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
|
||||
static inline const TensorShapeProto* internal_default_instance() {
|
||||
return reinterpret_cast<const TensorShapeProto*>(
|
||||
&_TensorShapeProto_default_instance_);
|
||||
}
|
||||
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages =
|
||||
1;
|
||||
|
||||
void UnsafeArenaSwap(TensorShapeProto* other);
|
||||
void Swap(TensorShapeProto* other);
|
||||
friend void swap(TensorShapeProto& a, TensorShapeProto& b) {
|
||||
a.Swap(&b);
|
||||
}
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
inline TensorShapeProto* New() const PROTOBUF_FINAL { return New(NULL); }
|
||||
|
||||
TensorShapeProto* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL;
|
||||
void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void CopyFrom(const TensorShapeProto& from);
|
||||
void MergeFrom(const TensorShapeProto& from);
|
||||
void Clear() PROTOBUF_FINAL;
|
||||
bool IsInitialized() const PROTOBUF_FINAL;
|
||||
|
||||
size_t ByteSizeLong() const PROTOBUF_FINAL;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL;
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL;
|
||||
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL;
|
||||
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const PROTOBUF_FINAL;
|
||||
void InternalSwap(TensorShapeProto* other);
|
||||
protected:
|
||||
explicit TensorShapeProto(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
static void ArenaDtor(void* object);
|
||||
inline void RegisterArenaDtor(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
|
||||
return _internal_metadata_.arena();
|
||||
}
|
||||
inline void* MaybeArenaPtr() const {
|
||||
return _internal_metadata_.raw_arena_ptr();
|
||||
}
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
typedef TensorShapeProto_Dim Dim;
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// repeated .opencv_tensorflow.TensorShapeProto.Dim dim = 2;
|
||||
int dim_size() const;
|
||||
void clear_dim();
|
||||
static const int kDimFieldNumber = 2;
|
||||
const ::opencv_tensorflow::TensorShapeProto_Dim& dim(int index) const;
|
||||
::opencv_tensorflow::TensorShapeProto_Dim* mutable_dim(int index);
|
||||
::opencv_tensorflow::TensorShapeProto_Dim* add_dim();
|
||||
::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::TensorShapeProto_Dim >*
|
||||
mutable_dim();
|
||||
const ::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::TensorShapeProto_Dim >&
|
||||
dim() const;
|
||||
|
||||
// bool unknown_rank = 3;
|
||||
void clear_unknown_rank();
|
||||
static const int kUnknownRankFieldNumber = 3;
|
||||
bool unknown_rank() const;
|
||||
void set_unknown_rank(bool value);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:opencv_tensorflow.TensorShapeProto)
|
||||
private:
|
||||
|
||||
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
|
||||
template <typename T> friend class ::google::protobuf::Arena::InternalHelper;
|
||||
typedef void InternalArenaConstructable_;
|
||||
typedef void DestructorSkippable_;
|
||||
::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::TensorShapeProto_Dim > dim_;
|
||||
bool unknown_rank_;
|
||||
mutable int _cached_size_;
|
||||
friend struct ::protobuf_tensor_5fshape_2eproto::TableStruct;
|
||||
friend void ::protobuf_tensor_5fshape_2eproto::InitDefaultsTensorShapeProtoImpl();
|
||||
};
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
|
||||
#endif // __GNUC__
|
||||
// TensorShapeProto_Dim
|
||||
|
||||
// int64 size = 1;
|
||||
inline void TensorShapeProto_Dim::clear_size() {
|
||||
size_ = GOOGLE_LONGLONG(0);
|
||||
}
|
||||
inline ::google::protobuf::int64 TensorShapeProto_Dim::size() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorShapeProto.Dim.size)
|
||||
return size_;
|
||||
}
|
||||
inline void TensorShapeProto_Dim::set_size(::google::protobuf::int64 value) {
|
||||
|
||||
size_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorShapeProto.Dim.size)
|
||||
}
|
||||
|
||||
// string name = 2;
|
||||
inline void TensorShapeProto_Dim::clear_name() {
|
||||
name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline const ::std::string& TensorShapeProto_Dim::name() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
return name_.Get();
|
||||
}
|
||||
inline void TensorShapeProto_Dim::set_name(const ::std::string& value) {
|
||||
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
}
|
||||
#if LANG_CXX11
|
||||
inline void TensorShapeProto_Dim::set_name(::std::string&& value) {
|
||||
|
||||
name_.Set(
|
||||
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_rvalue:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
}
|
||||
#endif
|
||||
inline void TensorShapeProto_Dim::set_name(const char* value) {
|
||||
GOOGLE_DCHECK(value != NULL);
|
||||
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_char:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
}
|
||||
inline void TensorShapeProto_Dim::set_name(const char* value,
|
||||
size_t size) {
|
||||
|
||||
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
|
||||
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_pointer:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
}
|
||||
inline ::std::string* TensorShapeProto_Dim::mutable_name() {
|
||||
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
return name_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline ::std::string* TensorShapeProto_Dim::release_name() {
|
||||
// @@protoc_insertion_point(field_release:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
|
||||
return name_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
|
||||
}
|
||||
inline void TensorShapeProto_Dim::set_allocated_name(::std::string* name) {
|
||||
if (name != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
name_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name,
|
||||
GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_set_allocated:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
}
|
||||
inline ::std::string* TensorShapeProto_Dim::unsafe_arena_release_name() {
|
||||
// @@protoc_insertion_point(field_unsafe_arena_release:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
|
||||
return name_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
GetArenaNoVirtual());
|
||||
}
|
||||
inline void TensorShapeProto_Dim::unsafe_arena_set_allocated_name(
|
||||
::std::string* name) {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
|
||||
if (name != NULL) {
|
||||
|
||||
} else {
|
||||
|
||||
}
|
||||
name_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
|
||||
name, GetArenaNoVirtual());
|
||||
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:opencv_tensorflow.TensorShapeProto.Dim.name)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TensorShapeProto
|
||||
|
||||
// repeated .opencv_tensorflow.TensorShapeProto.Dim dim = 2;
|
||||
inline int TensorShapeProto::dim_size() const {
|
||||
return dim_.size();
|
||||
}
|
||||
inline void TensorShapeProto::clear_dim() {
|
||||
dim_.Clear();
|
||||
}
|
||||
inline const ::opencv_tensorflow::TensorShapeProto_Dim& TensorShapeProto::dim(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorShapeProto.dim)
|
||||
return dim_.Get(index);
|
||||
}
|
||||
inline ::opencv_tensorflow::TensorShapeProto_Dim* TensorShapeProto::mutable_dim(int index) {
|
||||
// @@protoc_insertion_point(field_mutable:opencv_tensorflow.TensorShapeProto.dim)
|
||||
return dim_.Mutable(index);
|
||||
}
|
||||
inline ::opencv_tensorflow::TensorShapeProto_Dim* TensorShapeProto::add_dim() {
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.TensorShapeProto.dim)
|
||||
return dim_.Add();
|
||||
}
|
||||
inline ::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::TensorShapeProto_Dim >*
|
||||
TensorShapeProto::mutable_dim() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.TensorShapeProto.dim)
|
||||
return &dim_;
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedPtrField< ::opencv_tensorflow::TensorShapeProto_Dim >&
|
||||
TensorShapeProto::dim() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.TensorShapeProto.dim)
|
||||
return dim_;
|
||||
}
|
||||
|
||||
// bool unknown_rank = 3;
|
||||
inline void TensorShapeProto::clear_unknown_rank() {
|
||||
unknown_rank_ = false;
|
||||
}
|
||||
inline bool TensorShapeProto::unknown_rank() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.TensorShapeProto.unknown_rank)
|
||||
return unknown_rank_;
|
||||
}
|
||||
inline void TensorShapeProto::set_unknown_rank(bool value) {
|
||||
|
||||
unknown_rank_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.TensorShapeProto.unknown_rank)
|
||||
}
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif // __GNUC__
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_tensor_5fshape_2eproto__INCLUDED
|
144
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/types.pb.cc
vendored
Normal file
144
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/types.pb.cc
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: types.proto
|
||||
|
||||
#include "types.pb.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/port.h>
|
||||
#include <google/protobuf/stubs/once.h>
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/wire_format_lite_inl.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/generated_message_reflection.h>
|
||||
#include <google/protobuf/reflection_ops.h>
|
||||
#include <google/protobuf/wire_format.h>
|
||||
// This is a temporary google only hack
|
||||
#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
#include "third_party/protobuf/version.h"
|
||||
#endif
|
||||
// @@protoc_insertion_point(includes)
|
||||
namespace opencv_tensorflow {
|
||||
} // namespace opencv_tensorflow
|
||||
namespace protobuf_types_2eproto {
|
||||
const ::google::protobuf::EnumDescriptor* file_level_enum_descriptors[1];
|
||||
const ::google::protobuf::uint32 TableStruct::offsets[1] = {};
|
||||
static const ::google::protobuf::internal::MigrationSchema* schemas = NULL;
|
||||
static const ::google::protobuf::Message* const* file_default_instances = NULL;
|
||||
|
||||
void protobuf_AssignDescriptors() {
|
||||
AddDescriptors();
|
||||
::google::protobuf::MessageFactory* factory = NULL;
|
||||
AssignDescriptors(
|
||||
"types.proto", schemas, file_default_instances, TableStruct::offsets, factory,
|
||||
NULL, file_level_enum_descriptors, NULL);
|
||||
}
|
||||
|
||||
void protobuf_AssignDescriptorsOnce() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors);
|
||||
}
|
||||
|
||||
void protobuf_RegisterTypes(const ::std::string&) GOOGLE_PROTOBUF_ATTRIBUTE_COLD;
|
||||
void protobuf_RegisterTypes(const ::std::string&) {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
}
|
||||
|
||||
void AddDescriptorsImpl() {
|
||||
InitDefaults();
|
||||
static const char descriptor[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
"\n\013types.proto\022\021opencv_tensorflow*\234\005\n\010Dat"
|
||||
"aType\022\016\n\nDT_INVALID\020\000\022\014\n\010DT_FLOAT\020\001\022\r\n\tD"
|
||||
"T_DOUBLE\020\002\022\014\n\010DT_INT32\020\003\022\014\n\010DT_UINT8\020\004\022\014"
|
||||
"\n\010DT_INT16\020\005\022\013\n\007DT_INT8\020\006\022\r\n\tDT_STRING\020\007"
|
||||
"\022\020\n\014DT_COMPLEX64\020\010\022\014\n\010DT_INT64\020\t\022\013\n\007DT_B"
|
||||
"OOL\020\n\022\014\n\010DT_QINT8\020\013\022\r\n\tDT_QUINT8\020\014\022\r\n\tDT"
|
||||
"_QINT32\020\r\022\017\n\013DT_BFLOAT16\020\016\022\r\n\tDT_QINT16\020"
|
||||
"\017\022\016\n\nDT_QUINT16\020\020\022\r\n\tDT_UINT16\020\021\022\021\n\rDT_C"
|
||||
"OMPLEX128\020\022\022\013\n\007DT_HALF\020\023\022\020\n\014DT_FLOAT_REF"
|
||||
"\020e\022\021\n\rDT_DOUBLE_REF\020f\022\020\n\014DT_INT32_REF\020g\022"
|
||||
"\020\n\014DT_UINT8_REF\020h\022\020\n\014DT_INT16_REF\020i\022\017\n\013D"
|
||||
"T_INT8_REF\020j\022\021\n\rDT_STRING_REF\020k\022\024\n\020DT_CO"
|
||||
"MPLEX64_REF\020l\022\020\n\014DT_INT64_REF\020m\022\017\n\013DT_BO"
|
||||
"OL_REF\020n\022\020\n\014DT_QINT8_REF\020o\022\021\n\rDT_QUINT8_"
|
||||
"REF\020p\022\021\n\rDT_QINT32_REF\020q\022\023\n\017DT_BFLOAT16_"
|
||||
"REF\020r\022\021\n\rDT_QINT16_REF\020s\022\022\n\016DT_QUINT16_R"
|
||||
"EF\020t\022\021\n\rDT_UINT16_REF\020u\022\025\n\021DT_COMPLEX128"
|
||||
"_REF\020v\022\017\n\013DT_HALF_REF\020wB,\n\030org.tensorflo"
|
||||
"w.frameworkB\013TypesProtosP\001\370\001\001b\006proto3"
|
||||
};
|
||||
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
|
||||
descriptor, 757);
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
|
||||
"types.proto", &protobuf_RegisterTypes);
|
||||
}
|
||||
|
||||
void AddDescriptors() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl);
|
||||
}
|
||||
// Force AddDescriptors() to be called at dynamic initialization time.
|
||||
struct StaticDescriptorInitializer {
|
||||
StaticDescriptorInitializer() {
|
||||
AddDescriptors();
|
||||
}
|
||||
} static_descriptor_initializer;
|
||||
} // namespace protobuf_types_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
const ::google::protobuf::EnumDescriptor* DataType_descriptor() {
|
||||
protobuf_types_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return protobuf_types_2eproto::file_level_enum_descriptors[0];
|
||||
}
|
||||
bool DataType_IsValid(int value) {
|
||||
switch (value) {
|
||||
case 0:
|
||||
case 1:
|
||||
case 2:
|
||||
case 3:
|
||||
case 4:
|
||||
case 5:
|
||||
case 6:
|
||||
case 7:
|
||||
case 8:
|
||||
case 9:
|
||||
case 10:
|
||||
case 11:
|
||||
case 12:
|
||||
case 13:
|
||||
case 14:
|
||||
case 15:
|
||||
case 16:
|
||||
case 17:
|
||||
case 18:
|
||||
case 19:
|
||||
case 101:
|
||||
case 102:
|
||||
case 103:
|
||||
case 104:
|
||||
case 105:
|
||||
case 106:
|
||||
case 107:
|
||||
case 108:
|
||||
case 109:
|
||||
case 110:
|
||||
case 111:
|
||||
case 112:
|
||||
case 113:
|
||||
case 114:
|
||||
case 115:
|
||||
case 116:
|
||||
case 117:
|
||||
case 118:
|
||||
case 119:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
143
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/types.pb.h
vendored
Normal file
143
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/types.pb.h
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: types.proto
|
||||
|
||||
#ifndef PROTOBUF_types_2eproto__INCLUDED
|
||||
#define PROTOBUF_types_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 3005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 3005001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/arena.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
#include <google/protobuf/generated_message_table_driven.h>
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/metadata.h>
|
||||
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
|
||||
#include <google/protobuf/extension_set.h> // IWYU pragma: export
|
||||
#include <google/protobuf/generated_enum_reflection.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace protobuf_types_2eproto {
|
||||
// Internal implementation detail -- do not use these members.
|
||||
struct TableStruct {
|
||||
static const ::google::protobuf::internal::ParseTableField entries[];
|
||||
static const ::google::protobuf::internal::AuxillaryParseTableField aux[];
|
||||
static const ::google::protobuf::internal::ParseTable schema[1];
|
||||
static const ::google::protobuf::internal::FieldMetadata field_metadata[];
|
||||
static const ::google::protobuf::internal::SerializationTable serialization_table[];
|
||||
static const ::google::protobuf::uint32 offsets[];
|
||||
};
|
||||
void AddDescriptors();
|
||||
inline void InitDefaults() {
|
||||
}
|
||||
} // namespace protobuf_types_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
} // namespace opencv_tensorflow
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
enum DataType {
|
||||
DT_INVALID = 0,
|
||||
DT_FLOAT = 1,
|
||||
DT_DOUBLE = 2,
|
||||
DT_INT32 = 3,
|
||||
DT_UINT8 = 4,
|
||||
DT_INT16 = 5,
|
||||
DT_INT8 = 6,
|
||||
DT_STRING = 7,
|
||||
DT_COMPLEX64 = 8,
|
||||
DT_INT64 = 9,
|
||||
DT_BOOL = 10,
|
||||
DT_QINT8 = 11,
|
||||
DT_QUINT8 = 12,
|
||||
DT_QINT32 = 13,
|
||||
DT_BFLOAT16 = 14,
|
||||
DT_QINT16 = 15,
|
||||
DT_QUINT16 = 16,
|
||||
DT_UINT16 = 17,
|
||||
DT_COMPLEX128 = 18,
|
||||
DT_HALF = 19,
|
||||
DT_FLOAT_REF = 101,
|
||||
DT_DOUBLE_REF = 102,
|
||||
DT_INT32_REF = 103,
|
||||
DT_UINT8_REF = 104,
|
||||
DT_INT16_REF = 105,
|
||||
DT_INT8_REF = 106,
|
||||
DT_STRING_REF = 107,
|
||||
DT_COMPLEX64_REF = 108,
|
||||
DT_INT64_REF = 109,
|
||||
DT_BOOL_REF = 110,
|
||||
DT_QINT8_REF = 111,
|
||||
DT_QUINT8_REF = 112,
|
||||
DT_QINT32_REF = 113,
|
||||
DT_BFLOAT16_REF = 114,
|
||||
DT_QINT16_REF = 115,
|
||||
DT_QUINT16_REF = 116,
|
||||
DT_UINT16_REF = 117,
|
||||
DT_COMPLEX128_REF = 118,
|
||||
DT_HALF_REF = 119,
|
||||
DataType_INT_MIN_SENTINEL_DO_NOT_USE_ = ::google::protobuf::kint32min,
|
||||
DataType_INT_MAX_SENTINEL_DO_NOT_USE_ = ::google::protobuf::kint32max
|
||||
};
|
||||
bool DataType_IsValid(int value);
|
||||
const DataType DataType_MIN = DT_INVALID;
|
||||
const DataType DataType_MAX = DT_HALF_REF;
|
||||
const int DataType_ARRAYSIZE = DataType_MAX + 1;
|
||||
|
||||
const ::google::protobuf::EnumDescriptor* DataType_descriptor();
|
||||
inline const ::std::string& DataType_Name(DataType value) {
|
||||
return ::google::protobuf::internal::NameOfEnum(
|
||||
DataType_descriptor(), value);
|
||||
}
|
||||
inline bool DataType_Parse(
|
||||
const ::std::string& name, DataType* value) {
|
||||
return ::google::protobuf::internal::ParseNamedEnum<DataType>(
|
||||
DataType_descriptor(), name, value);
|
||||
}
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
|
||||
#endif // __GNUC__
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif // __GNUC__
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
template <> struct is_proto_enum< ::opencv_tensorflow::DataType> : ::google::protobuf::internal::true_type {};
|
||||
template <>
|
||||
inline const EnumDescriptor* GetEnumDescriptor< ::opencv_tensorflow::DataType>() {
|
||||
return ::opencv_tensorflow::DataType_descriptor();
|
||||
}
|
||||
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_types_2eproto__INCLUDED
|
492
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/versions.pb.cc
vendored
Normal file
492
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/versions.pb.cc
vendored
Normal file
@ -0,0 +1,492 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: versions.proto
|
||||
|
||||
#include "versions.pb.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/port.h>
|
||||
#include <google/protobuf/stubs/once.h>
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/wire_format_lite_inl.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/generated_message_reflection.h>
|
||||
#include <google/protobuf/reflection_ops.h>
|
||||
#include <google/protobuf/wire_format.h>
|
||||
// This is a temporary google only hack
|
||||
#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
#include "third_party/protobuf/version.h"
|
||||
#endif
|
||||
// @@protoc_insertion_point(includes)
|
||||
namespace opencv_tensorflow {
|
||||
class VersionDefDefaultTypeInternal {
|
||||
public:
|
||||
::google::protobuf::internal::ExplicitlyConstructed<VersionDef>
|
||||
_instance;
|
||||
} _VersionDef_default_instance_;
|
||||
} // namespace opencv_tensorflow
|
||||
namespace protobuf_versions_2eproto {
|
||||
void InitDefaultsVersionDefImpl() {
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
#ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
::google::protobuf::internal::InitProtobufDefaultsForceUnique();
|
||||
#else
|
||||
::google::protobuf::internal::InitProtobufDefaults();
|
||||
#endif // GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS
|
||||
{
|
||||
void* ptr = &::opencv_tensorflow::_VersionDef_default_instance_;
|
||||
new (ptr) ::opencv_tensorflow::VersionDef();
|
||||
::google::protobuf::internal::OnShutdownDestroyMessage(ptr);
|
||||
}
|
||||
::opencv_tensorflow::VersionDef::InitAsDefaultInstance();
|
||||
}
|
||||
|
||||
void InitDefaultsVersionDef() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &InitDefaultsVersionDefImpl);
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata file_level_metadata[1];
|
||||
|
||||
const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
~0u, // no _has_bits_
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::VersionDef, _internal_metadata_),
|
||||
~0u, // no _extensions_
|
||||
~0u, // no _oneof_case_
|
||||
~0u, // no _weak_field_map_
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::VersionDef, producer_),
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::VersionDef, min_consumer_),
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::opencv_tensorflow::VersionDef, bad_consumers_),
|
||||
};
|
||||
static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
{ 0, -1, sizeof(::opencv_tensorflow::VersionDef)},
|
||||
};
|
||||
|
||||
static ::google::protobuf::Message const * const file_default_instances[] = {
|
||||
reinterpret_cast<const ::google::protobuf::Message*>(&::opencv_tensorflow::_VersionDef_default_instance_),
|
||||
};
|
||||
|
||||
void protobuf_AssignDescriptors() {
|
||||
AddDescriptors();
|
||||
::google::protobuf::MessageFactory* factory = NULL;
|
||||
AssignDescriptors(
|
||||
"versions.proto", schemas, file_default_instances, TableStruct::offsets, factory,
|
||||
file_level_metadata, NULL, NULL);
|
||||
}
|
||||
|
||||
void protobuf_AssignDescriptorsOnce() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors);
|
||||
}
|
||||
|
||||
void protobuf_RegisterTypes(const ::std::string&) GOOGLE_PROTOBUF_ATTRIBUTE_COLD;
|
||||
void protobuf_RegisterTypes(const ::std::string&) {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 1);
|
||||
}
|
||||
|
||||
void AddDescriptorsImpl() {
|
||||
InitDefaults();
|
||||
static const char descriptor[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = {
|
||||
"\n\016versions.proto\022\021opencv_tensorflow\"K\n\nV"
|
||||
"ersionDef\022\020\n\010producer\030\001 \001(\005\022\024\n\014min_consu"
|
||||
"mer\030\002 \001(\005\022\025\n\rbad_consumers\030\003 \003(\005B/\n\030org."
|
||||
"tensorflow.frameworkB\016VersionsProtosP\001\370\001"
|
||||
"\001b\006proto3"
|
||||
};
|
||||
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
|
||||
descriptor, 169);
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
|
||||
"versions.proto", &protobuf_RegisterTypes);
|
||||
}
|
||||
|
||||
void AddDescriptors() {
|
||||
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
|
||||
::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl);
|
||||
}
|
||||
// Force AddDescriptors() to be called at dynamic initialization time.
|
||||
struct StaticDescriptorInitializer {
|
||||
StaticDescriptorInitializer() {
|
||||
AddDescriptors();
|
||||
}
|
||||
} static_descriptor_initializer;
|
||||
} // namespace protobuf_versions_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
void VersionDef::InitAsDefaultInstance() {
|
||||
}
|
||||
#if !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
const int VersionDef::kProducerFieldNumber;
|
||||
const int VersionDef::kMinConsumerFieldNumber;
|
||||
const int VersionDef::kBadConsumersFieldNumber;
|
||||
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
|
||||
|
||||
VersionDef::VersionDef()
|
||||
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
|
||||
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
|
||||
::protobuf_versions_2eproto::InitDefaultsVersionDef();
|
||||
}
|
||||
SharedCtor();
|
||||
// @@protoc_insertion_point(constructor:opencv_tensorflow.VersionDef)
|
||||
}
|
||||
VersionDef::VersionDef(::google::protobuf::Arena* arena)
|
||||
: ::google::protobuf::Message(),
|
||||
_internal_metadata_(arena),
|
||||
bad_consumers_(arena) {
|
||||
::protobuf_versions_2eproto::InitDefaultsVersionDef();
|
||||
SharedCtor();
|
||||
RegisterArenaDtor(arena);
|
||||
// @@protoc_insertion_point(arena_constructor:opencv_tensorflow.VersionDef)
|
||||
}
|
||||
VersionDef::VersionDef(const VersionDef& from)
|
||||
: ::google::protobuf::Message(),
|
||||
_internal_metadata_(NULL),
|
||||
bad_consumers_(from.bad_consumers_),
|
||||
_cached_size_(0) {
|
||||
_internal_metadata_.MergeFrom(from._internal_metadata_);
|
||||
::memcpy(&producer_, &from.producer_,
|
||||
static_cast<size_t>(reinterpret_cast<char*>(&min_consumer_) -
|
||||
reinterpret_cast<char*>(&producer_)) + sizeof(min_consumer_));
|
||||
// @@protoc_insertion_point(copy_constructor:opencv_tensorflow.VersionDef)
|
||||
}
|
||||
|
||||
void VersionDef::SharedCtor() {
|
||||
::memset(&producer_, 0, static_cast<size_t>(
|
||||
reinterpret_cast<char*>(&min_consumer_) -
|
||||
reinterpret_cast<char*>(&producer_)) + sizeof(min_consumer_));
|
||||
_cached_size_ = 0;
|
||||
}
|
||||
|
||||
VersionDef::~VersionDef() {
|
||||
// @@protoc_insertion_point(destructor:opencv_tensorflow.VersionDef)
|
||||
SharedDtor();
|
||||
}
|
||||
|
||||
void VersionDef::SharedDtor() {
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() == NULL);
|
||||
}
|
||||
|
||||
void VersionDef::ArenaDtor(void* object) {
|
||||
VersionDef* _this = reinterpret_cast< VersionDef* >(object);
|
||||
(void)_this;
|
||||
}
|
||||
void VersionDef::RegisterArenaDtor(::google::protobuf::Arena* arena) {
|
||||
}
|
||||
void VersionDef::SetCachedSize(int size) const {
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
}
|
||||
const ::google::protobuf::Descriptor* VersionDef::descriptor() {
|
||||
::protobuf_versions_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return ::protobuf_versions_2eproto::file_level_metadata[kIndexInFileMessages].descriptor;
|
||||
}
|
||||
|
||||
const VersionDef& VersionDef::default_instance() {
|
||||
::protobuf_versions_2eproto::InitDefaultsVersionDef();
|
||||
return *internal_default_instance();
|
||||
}
|
||||
|
||||
VersionDef* VersionDef::New(::google::protobuf::Arena* arena) const {
|
||||
return ::google::protobuf::Arena::CreateMessage<VersionDef>(arena);
|
||||
}
|
||||
|
||||
void VersionDef::Clear() {
|
||||
// @@protoc_insertion_point(message_clear_start:opencv_tensorflow.VersionDef)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
// Prevent compiler warnings about cached_has_bits being unused
|
||||
(void) cached_has_bits;
|
||||
|
||||
bad_consumers_.Clear();
|
||||
::memset(&producer_, 0, static_cast<size_t>(
|
||||
reinterpret_cast<char*>(&min_consumer_) -
|
||||
reinterpret_cast<char*>(&producer_)) + sizeof(min_consumer_));
|
||||
_internal_metadata_.Clear();
|
||||
}
|
||||
|
||||
bool VersionDef::MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) {
|
||||
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
|
||||
::google::protobuf::uint32 tag;
|
||||
// @@protoc_insertion_point(parse_start:opencv_tensorflow.VersionDef)
|
||||
for (;;) {
|
||||
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
|
||||
tag = p.first;
|
||||
if (!p.second) goto handle_unusual;
|
||||
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
|
||||
// int32 producer = 1;
|
||||
case 1: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(8u /* 8 & 0xFF */)) {
|
||||
|
||||
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
|
||||
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
|
||||
input, &producer_)));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// int32 min_consumer = 2;
|
||||
case 2: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(16u /* 16 & 0xFF */)) {
|
||||
|
||||
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
|
||||
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
|
||||
input, &min_consumer_)));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// repeated int32 bad_consumers = 3;
|
||||
case 3: {
|
||||
if (static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(26u /* 26 & 0xFF */)) {
|
||||
DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive<
|
||||
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
|
||||
input, this->mutable_bad_consumers())));
|
||||
} else if (
|
||||
static_cast< ::google::protobuf::uint8>(tag) ==
|
||||
static_cast< ::google::protobuf::uint8>(24u /* 24 & 0xFF */)) {
|
||||
DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline<
|
||||
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
|
||||
1, 26u, input, this->mutable_bad_consumers())));
|
||||
} else {
|
||||
goto handle_unusual;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
handle_unusual:
|
||||
if (tag == 0) {
|
||||
goto success;
|
||||
}
|
||||
DO_(::google::protobuf::internal::WireFormat::SkipField(
|
||||
input, tag, _internal_metadata_.mutable_unknown_fields()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
success:
|
||||
// @@protoc_insertion_point(parse_success:opencv_tensorflow.VersionDef)
|
||||
return true;
|
||||
failure:
|
||||
// @@protoc_insertion_point(parse_failure:opencv_tensorflow.VersionDef)
|
||||
return false;
|
||||
#undef DO_
|
||||
}
|
||||
|
||||
void VersionDef::SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const {
|
||||
// @@protoc_insertion_point(serialize_start:opencv_tensorflow.VersionDef)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
// int32 producer = 1;
|
||||
if (this->producer() != 0) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->producer(), output);
|
||||
}
|
||||
|
||||
// int32 min_consumer = 2;
|
||||
if (this->min_consumer() != 0) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->min_consumer(), output);
|
||||
}
|
||||
|
||||
// repeated int32 bad_consumers = 3;
|
||||
if (this->bad_consumers_size() > 0) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteTag(3, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output);
|
||||
output->WriteVarint32(static_cast< ::google::protobuf::uint32>(
|
||||
_bad_consumers_cached_byte_size_));
|
||||
}
|
||||
for (int i = 0, n = this->bad_consumers_size(); i < n; i++) {
|
||||
::google::protobuf::internal::WireFormatLite::WriteInt32NoTag(
|
||||
this->bad_consumers(i), output);
|
||||
}
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output);
|
||||
}
|
||||
// @@protoc_insertion_point(serialize_end:opencv_tensorflow.VersionDef)
|
||||
}
|
||||
|
||||
::google::protobuf::uint8* VersionDef::InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const {
|
||||
(void)deterministic; // Unused
|
||||
// @@protoc_insertion_point(serialize_to_array_start:opencv_tensorflow.VersionDef)
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
// int32 producer = 1;
|
||||
if (this->producer() != 0) {
|
||||
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->producer(), target);
|
||||
}
|
||||
|
||||
// int32 min_consumer = 2;
|
||||
if (this->min_consumer() != 0) {
|
||||
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->min_consumer(), target);
|
||||
}
|
||||
|
||||
// repeated int32 bad_consumers = 3;
|
||||
if (this->bad_consumers_size() > 0) {
|
||||
target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray(
|
||||
3,
|
||||
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,
|
||||
target);
|
||||
target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray(
|
||||
static_cast< ::google::protobuf::int32>(
|
||||
_bad_consumers_cached_byte_size_), target);
|
||||
target = ::google::protobuf::internal::WireFormatLite::
|
||||
WriteInt32NoTagToArray(this->bad_consumers_, target);
|
||||
}
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target);
|
||||
}
|
||||
// @@protoc_insertion_point(serialize_to_array_end:opencv_tensorflow.VersionDef)
|
||||
return target;
|
||||
}
|
||||
|
||||
size_t VersionDef::ByteSizeLong() const {
|
||||
// @@protoc_insertion_point(message_byte_size_start:opencv_tensorflow.VersionDef)
|
||||
size_t total_size = 0;
|
||||
|
||||
if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) {
|
||||
total_size +=
|
||||
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
|
||||
(::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()));
|
||||
}
|
||||
// repeated int32 bad_consumers = 3;
|
||||
{
|
||||
size_t data_size = ::google::protobuf::internal::WireFormatLite::
|
||||
Int32Size(this->bad_consumers_);
|
||||
if (data_size > 0) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::Int32Size(
|
||||
static_cast< ::google::protobuf::int32>(data_size));
|
||||
}
|
||||
int cached_size = ::google::protobuf::internal::ToCachedSize(data_size);
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_bad_consumers_cached_byte_size_ = cached_size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
total_size += data_size;
|
||||
}
|
||||
|
||||
// int32 producer = 1;
|
||||
if (this->producer() != 0) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::Int32Size(
|
||||
this->producer());
|
||||
}
|
||||
|
||||
// int32 min_consumer = 2;
|
||||
if (this->min_consumer() != 0) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::Int32Size(
|
||||
this->min_consumer());
|
||||
}
|
||||
|
||||
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = cached_size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
return total_size;
|
||||
}
|
||||
|
||||
void VersionDef::MergeFrom(const ::google::protobuf::Message& from) {
|
||||
// @@protoc_insertion_point(generalized_merge_from_start:opencv_tensorflow.VersionDef)
|
||||
GOOGLE_DCHECK_NE(&from, this);
|
||||
const VersionDef* source =
|
||||
::google::protobuf::internal::DynamicCastToGenerated<const VersionDef>(
|
||||
&from);
|
||||
if (source == NULL) {
|
||||
// @@protoc_insertion_point(generalized_merge_from_cast_fail:opencv_tensorflow.VersionDef)
|
||||
::google::protobuf::internal::ReflectionOps::Merge(from, this);
|
||||
} else {
|
||||
// @@protoc_insertion_point(generalized_merge_from_cast_success:opencv_tensorflow.VersionDef)
|
||||
MergeFrom(*source);
|
||||
}
|
||||
}
|
||||
|
||||
void VersionDef::MergeFrom(const VersionDef& from) {
|
||||
// @@protoc_insertion_point(class_specific_merge_from_start:opencv_tensorflow.VersionDef)
|
||||
GOOGLE_DCHECK_NE(&from, this);
|
||||
_internal_metadata_.MergeFrom(from._internal_metadata_);
|
||||
::google::protobuf::uint32 cached_has_bits = 0;
|
||||
(void) cached_has_bits;
|
||||
|
||||
bad_consumers_.MergeFrom(from.bad_consumers_);
|
||||
if (from.producer() != 0) {
|
||||
set_producer(from.producer());
|
||||
}
|
||||
if (from.min_consumer() != 0) {
|
||||
set_min_consumer(from.min_consumer());
|
||||
}
|
||||
}
|
||||
|
||||
void VersionDef::CopyFrom(const ::google::protobuf::Message& from) {
|
||||
// @@protoc_insertion_point(generalized_copy_from_start:opencv_tensorflow.VersionDef)
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void VersionDef::CopyFrom(const VersionDef& from) {
|
||||
// @@protoc_insertion_point(class_specific_copy_from_start:opencv_tensorflow.VersionDef)
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
bool VersionDef::IsInitialized() const {
|
||||
return true;
|
||||
}
|
||||
|
||||
void VersionDef::Swap(VersionDef* other) {
|
||||
if (other == this) return;
|
||||
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
|
||||
InternalSwap(other);
|
||||
} else {
|
||||
VersionDef* temp = New(GetArenaNoVirtual());
|
||||
temp->MergeFrom(*other);
|
||||
other->CopyFrom(*this);
|
||||
InternalSwap(temp);
|
||||
if (GetArenaNoVirtual() == NULL) {
|
||||
delete temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
void VersionDef::UnsafeArenaSwap(VersionDef* other) {
|
||||
if (other == this) return;
|
||||
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
|
||||
InternalSwap(other);
|
||||
}
|
||||
void VersionDef::InternalSwap(VersionDef* other) {
|
||||
using std::swap;
|
||||
bad_consumers_.InternalSwap(&other->bad_consumers_);
|
||||
swap(producer_, other->producer_);
|
||||
swap(min_consumer_, other->min_consumer_);
|
||||
_internal_metadata_.Swap(&other->_internal_metadata_);
|
||||
swap(_cached_size_, other->_cached_size_);
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata VersionDef::GetMetadata() const {
|
||||
protobuf_versions_2eproto::protobuf_AssignDescriptorsOnce();
|
||||
return ::protobuf_versions_2eproto::file_level_metadata[kIndexInFileMessages];
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
272
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/versions.pb.h
vendored
Normal file
272
3rdparty/opencv-4.5.4/modules/dnn/misc/tensorflow/versions.pb.h
vendored
Normal file
@ -0,0 +1,272 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: versions.proto
|
||||
|
||||
#ifndef PROTOBUF_versions_2eproto__INCLUDED
|
||||
#define PROTOBUF_versions_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 3005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 3005001 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/arena.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
#include <google/protobuf/generated_message_table_driven.h>
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/metadata.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
|
||||
#include <google/protobuf/extension_set.h> // IWYU pragma: export
|
||||
#include <google/protobuf/unknown_field_set.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace protobuf_versions_2eproto {
|
||||
// Internal implementation detail -- do not use these members.
|
||||
struct TableStruct {
|
||||
static const ::google::protobuf::internal::ParseTableField entries[];
|
||||
static const ::google::protobuf::internal::AuxillaryParseTableField aux[];
|
||||
static const ::google::protobuf::internal::ParseTable schema[1];
|
||||
static const ::google::protobuf::internal::FieldMetadata field_metadata[];
|
||||
static const ::google::protobuf::internal::SerializationTable serialization_table[];
|
||||
static const ::google::protobuf::uint32 offsets[];
|
||||
};
|
||||
void AddDescriptors();
|
||||
void InitDefaultsVersionDefImpl();
|
||||
void InitDefaultsVersionDef();
|
||||
inline void InitDefaults() {
|
||||
InitDefaultsVersionDef();
|
||||
}
|
||||
} // namespace protobuf_versions_2eproto
|
||||
namespace opencv_tensorflow {
|
||||
class VersionDef;
|
||||
class VersionDefDefaultTypeInternal;
|
||||
extern VersionDefDefaultTypeInternal _VersionDef_default_instance_;
|
||||
} // namespace opencv_tensorflow
|
||||
namespace opencv_tensorflow {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class VersionDef : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:opencv_tensorflow.VersionDef) */ {
|
||||
public:
|
||||
VersionDef();
|
||||
virtual ~VersionDef();
|
||||
|
||||
VersionDef(const VersionDef& from);
|
||||
|
||||
inline VersionDef& operator=(const VersionDef& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
#if LANG_CXX11
|
||||
VersionDef(VersionDef&& from) noexcept
|
||||
: VersionDef() {
|
||||
*this = ::std::move(from);
|
||||
}
|
||||
|
||||
inline VersionDef& operator=(VersionDef&& from) noexcept {
|
||||
if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) {
|
||||
if (this != &from) InternalSwap(&from);
|
||||
} else {
|
||||
CopyFrom(from);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
inline ::google::protobuf::Arena* GetArena() const PROTOBUF_FINAL {
|
||||
return GetArenaNoVirtual();
|
||||
}
|
||||
inline void* GetMaybeArenaPointer() const PROTOBUF_FINAL {
|
||||
return MaybeArenaPtr();
|
||||
}
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const VersionDef& default_instance();
|
||||
|
||||
static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY
|
||||
static inline const VersionDef* internal_default_instance() {
|
||||
return reinterpret_cast<const VersionDef*>(
|
||||
&_VersionDef_default_instance_);
|
||||
}
|
||||
static PROTOBUF_CONSTEXPR int const kIndexInFileMessages =
|
||||
0;
|
||||
|
||||
void UnsafeArenaSwap(VersionDef* other);
|
||||
void Swap(VersionDef* other);
|
||||
friend void swap(VersionDef& a, VersionDef& b) {
|
||||
a.Swap(&b);
|
||||
}
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
inline VersionDef* New() const PROTOBUF_FINAL { return New(NULL); }
|
||||
|
||||
VersionDef* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL;
|
||||
void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL;
|
||||
void CopyFrom(const VersionDef& from);
|
||||
void MergeFrom(const VersionDef& from);
|
||||
void Clear() PROTOBUF_FINAL;
|
||||
bool IsInitialized() const PROTOBUF_FINAL;
|
||||
|
||||
size_t ByteSizeLong() const PROTOBUF_FINAL;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL;
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL;
|
||||
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
|
||||
bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL;
|
||||
int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const PROTOBUF_FINAL;
|
||||
void InternalSwap(VersionDef* other);
|
||||
protected:
|
||||
explicit VersionDef(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
static void ArenaDtor(void* object);
|
||||
inline void RegisterArenaDtor(::google::protobuf::Arena* arena);
|
||||
private:
|
||||
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
|
||||
return _internal_metadata_.arena();
|
||||
}
|
||||
inline void* MaybeArenaPtr() const {
|
||||
return _internal_metadata_.raw_arena_ptr();
|
||||
}
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// repeated int32 bad_consumers = 3;
|
||||
int bad_consumers_size() const;
|
||||
void clear_bad_consumers();
|
||||
static const int kBadConsumersFieldNumber = 3;
|
||||
::google::protobuf::int32 bad_consumers(int index) const;
|
||||
void set_bad_consumers(int index, ::google::protobuf::int32 value);
|
||||
void add_bad_consumers(::google::protobuf::int32 value);
|
||||
const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >&
|
||||
bad_consumers() const;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int32 >*
|
||||
mutable_bad_consumers();
|
||||
|
||||
// int32 producer = 1;
|
||||
void clear_producer();
|
||||
static const int kProducerFieldNumber = 1;
|
||||
::google::protobuf::int32 producer() const;
|
||||
void set_producer(::google::protobuf::int32 value);
|
||||
|
||||
// int32 min_consumer = 2;
|
||||
void clear_min_consumer();
|
||||
static const int kMinConsumerFieldNumber = 2;
|
||||
::google::protobuf::int32 min_consumer() const;
|
||||
void set_min_consumer(::google::protobuf::int32 value);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:opencv_tensorflow.VersionDef)
|
||||
private:
|
||||
|
||||
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
|
||||
template <typename T> friend class ::google::protobuf::Arena::InternalHelper;
|
||||
typedef void InternalArenaConstructable_;
|
||||
typedef void DestructorSkippable_;
|
||||
::google::protobuf::RepeatedField< ::google::protobuf::int32 > bad_consumers_;
|
||||
mutable int _bad_consumers_cached_byte_size_;
|
||||
::google::protobuf::int32 producer_;
|
||||
::google::protobuf::int32 min_consumer_;
|
||||
mutable int _cached_size_;
|
||||
friend struct ::protobuf_versions_2eproto::TableStruct;
|
||||
friend void ::protobuf_versions_2eproto::InitDefaultsVersionDefImpl();
|
||||
};
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
|
||||
#endif // __GNUC__
|
||||
// VersionDef
|
||||
|
||||
// int32 producer = 1;
|
||||
inline void VersionDef::clear_producer() {
|
||||
producer_ = 0;
|
||||
}
|
||||
inline ::google::protobuf::int32 VersionDef::producer() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.VersionDef.producer)
|
||||
return producer_;
|
||||
}
|
||||
inline void VersionDef::set_producer(::google::protobuf::int32 value) {
|
||||
|
||||
producer_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.VersionDef.producer)
|
||||
}
|
||||
|
||||
// int32 min_consumer = 2;
|
||||
inline void VersionDef::clear_min_consumer() {
|
||||
min_consumer_ = 0;
|
||||
}
|
||||
inline ::google::protobuf::int32 VersionDef::min_consumer() const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.VersionDef.min_consumer)
|
||||
return min_consumer_;
|
||||
}
|
||||
inline void VersionDef::set_min_consumer(::google::protobuf::int32 value) {
|
||||
|
||||
min_consumer_ = value;
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.VersionDef.min_consumer)
|
||||
}
|
||||
|
||||
// repeated int32 bad_consumers = 3;
|
||||
inline int VersionDef::bad_consumers_size() const {
|
||||
return bad_consumers_.size();
|
||||
}
|
||||
inline void VersionDef::clear_bad_consumers() {
|
||||
bad_consumers_.Clear();
|
||||
}
|
||||
inline ::google::protobuf::int32 VersionDef::bad_consumers(int index) const {
|
||||
// @@protoc_insertion_point(field_get:opencv_tensorflow.VersionDef.bad_consumers)
|
||||
return bad_consumers_.Get(index);
|
||||
}
|
||||
inline void VersionDef::set_bad_consumers(int index, ::google::protobuf::int32 value) {
|
||||
bad_consumers_.Set(index, value);
|
||||
// @@protoc_insertion_point(field_set:opencv_tensorflow.VersionDef.bad_consumers)
|
||||
}
|
||||
inline void VersionDef::add_bad_consumers(::google::protobuf::int32 value) {
|
||||
bad_consumers_.Add(value);
|
||||
// @@protoc_insertion_point(field_add:opencv_tensorflow.VersionDef.bad_consumers)
|
||||
}
|
||||
inline const ::google::protobuf::RepeatedField< ::google::protobuf::int32 >&
|
||||
VersionDef::bad_consumers() const {
|
||||
// @@protoc_insertion_point(field_list:opencv_tensorflow.VersionDef.bad_consumers)
|
||||
return bad_consumers_;
|
||||
}
|
||||
inline ::google::protobuf::RepeatedField< ::google::protobuf::int32 >*
|
||||
VersionDef::mutable_bad_consumers() {
|
||||
// @@protoc_insertion_point(field_mutable_list:opencv_tensorflow.VersionDef.bad_consumers)
|
||||
return &bad_consumers_;
|
||||
}
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif // __GNUC__
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace opencv_tensorflow
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_versions_2eproto__INCLUDED
|
Reference in New Issue
Block a user