the-algorithm/navi/thrift_bpr_adapter/thrift/src/data.rs
twitter-team ef4c5eb65e Twitter Recommendation Algorithm
Please note we have force-pushed a new initial commit in order to remove some publicly-available Twitter user information. Note that this process may be required in the future.
2023-03-31 17:36:31 -05:00

1214 lines
47 KiB
Rust

// Autogenerated by Thrift Compiler (0.17.0)
// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#![allow(unused_imports)]
#![allow(unused_extern_crates)]
#![allow(clippy::too_many_arguments, clippy::type_complexity, clippy::vec_box)]
#![cfg_attr(rustfmt, rustfmt_skip)]
use std::cell::RefCell;
use std::collections::{BTreeMap, BTreeSet};
use std::convert::{From, TryFrom};
use std::default::Default;
use std::error::Error;
use std::fmt;
use std::fmt::{Display, Formatter};
use std::rc::Rc;
use thrift::OrderedFloat;
use thrift::{ApplicationError, ApplicationErrorKind, ProtocolError, ProtocolErrorKind, TThriftClient};
use thrift::protocol::{TFieldIdentifier, TListIdentifier, TMapIdentifier, TMessageIdentifier, TMessageType, TInputProtocol, TOutputProtocol, TSerializable, TSetIdentifier, TStructIdentifier, TType};
use thrift::protocol::field_id;
use thrift::protocol::verify_expected_message_type;
use thrift::protocol::verify_expected_sequence_number;
use thrift::protocol::verify_expected_service_call;
use thrift::protocol::verify_required_field_exists;
use thrift::server::TProcessor;
use crate::tensor;
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct FeatureType(pub i32);
impl FeatureType {
pub const BINARY: FeatureType = FeatureType(1);
pub const CONTINUOUS: FeatureType = FeatureType(2);
pub const DISCRETE: FeatureType = FeatureType(3);
pub const STRING: FeatureType = FeatureType(4);
pub const SPARSE_BINARY: FeatureType = FeatureType(5);
pub const SPARSE_CONTINUOUS: FeatureType = FeatureType(6);
pub const UNKNOWN: FeatureType = FeatureType(7);
pub const BLOB: FeatureType = FeatureType(8);
pub const TENSOR: FeatureType = FeatureType(9);
pub const SPARSE_TENSOR: FeatureType = FeatureType(10);
pub const FEATURE_TYPE11: FeatureType = FeatureType(11);
pub const FEATURE_TYPE12: FeatureType = FeatureType(12);
pub const ENUM_VALUES: &'static [Self] = &[
Self::BINARY,
Self::CONTINUOUS,
Self::DISCRETE,
Self::STRING,
Self::SPARSE_BINARY,
Self::SPARSE_CONTINUOUS,
Self::UNKNOWN,
Self::BLOB,
Self::TENSOR,
Self::SPARSE_TENSOR,
Self::FEATURE_TYPE11,
Self::FEATURE_TYPE12,
];
}
impl TSerializable for FeatureType {
#[allow(clippy::trivially_copy_pass_by_ref)]
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
o_prot.write_i32(self.0)
}
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<FeatureType> {
let enum_value = i_prot.read_i32()?;
Ok(FeatureType::from(enum_value))
}
}
impl From<i32> for FeatureType {
fn from(i: i32) -> Self {
match i {
1 => FeatureType::BINARY,
2 => FeatureType::CONTINUOUS,
3 => FeatureType::DISCRETE,
4 => FeatureType::STRING,
5 => FeatureType::SPARSE_BINARY,
6 => FeatureType::SPARSE_CONTINUOUS,
7 => FeatureType::UNKNOWN,
8 => FeatureType::BLOB,
9 => FeatureType::TENSOR,
10 => FeatureType::SPARSE_TENSOR,
11 => FeatureType::FEATURE_TYPE11,
12 => FeatureType::FEATURE_TYPE12,
_ => FeatureType(i)
}
}
}
impl From<&i32> for FeatureType {
fn from(i: &i32) -> Self {
FeatureType::from(*i)
}
}
impl From<FeatureType> for i32 {
fn from(e: FeatureType) -> i32 {
e.0
}
}
impl From<&FeatureType> for i32 {
fn from(e: &FeatureType) -> i32 {
e.0
}
}
//
// DataRecord
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct DataRecord {
pub binary_features: Option<BTreeSet<i64>>,
pub continuous_features: Option<BTreeMap<i64, OrderedFloat<f64>>>,
pub discrete_features: Option<BTreeMap<i64, i64>>,
pub string_features: Option<BTreeMap<i64, String>>,
pub sparse_binary_features: Option<BTreeMap<i64, BTreeSet<String>>>,
pub sparse_continuous_features: Option<BTreeMap<i64, BTreeMap<String, OrderedFloat<f64>>>>,
pub blob_features: Option<BTreeMap<i64, Vec<u8>>>,
pub tensors: Option<BTreeMap<i64, tensor::GeneralTensor>>,
pub sparse_tensors: Option<BTreeMap<i64, tensor::SparseTensor>>,
}
impl DataRecord {
pub fn new<F1, F2, F3, F4, F5, F6, F7, F8, F9>(binary_features: F1, continuous_features: F2, discrete_features: F3, string_features: F4, sparse_binary_features: F5, sparse_continuous_features: F6, blob_features: F7, tensors: F8, sparse_tensors: F9) -> DataRecord where F1: Into<Option<BTreeSet<i64>>>, F2: Into<Option<BTreeMap<i64, OrderedFloat<f64>>>>, F3: Into<Option<BTreeMap<i64, i64>>>, F4: Into<Option<BTreeMap<i64, String>>>, F5: Into<Option<BTreeMap<i64, BTreeSet<String>>>>, F6: Into<Option<BTreeMap<i64, BTreeMap<String, OrderedFloat<f64>>>>>, F7: Into<Option<BTreeMap<i64, Vec<u8>>>>, F8: Into<Option<BTreeMap<i64, tensor::GeneralTensor>>>, F9: Into<Option<BTreeMap<i64, tensor::SparseTensor>>> {
DataRecord {
binary_features: binary_features.into(),
continuous_features: continuous_features.into(),
discrete_features: discrete_features.into(),
string_features: string_features.into(),
sparse_binary_features: sparse_binary_features.into(),
sparse_continuous_features: sparse_continuous_features.into(),
blob_features: blob_features.into(),
tensors: tensors.into(),
sparse_tensors: sparse_tensors.into(),
}
}
}
impl TSerializable for DataRecord {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<DataRecord> {
i_prot.read_struct_begin()?;
let mut f_1: Option<BTreeSet<i64>> = None;
let mut f_2: Option<BTreeMap<i64, OrderedFloat<f64>>> = None;
let mut f_3: Option<BTreeMap<i64, i64>> = None;
let mut f_4: Option<BTreeMap<i64, String>> = None;
let mut f_5: Option<BTreeMap<i64, BTreeSet<String>>> = None;
let mut f_6: Option<BTreeMap<i64, BTreeMap<String, OrderedFloat<f64>>>> = None;
let mut f_7: Option<BTreeMap<i64, Vec<u8>>> = None;
let mut f_8: Option<BTreeMap<i64, tensor::GeneralTensor>> = None;
let mut f_9: Option<BTreeMap<i64, tensor::SparseTensor>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let set_ident = i_prot.read_set_begin()?;
let mut val: BTreeSet<i64> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_0 = i_prot.read_i64()?;
val.insert(set_elem_0);
}
i_prot.read_set_end()?;
f_1 = Some(val);
},
2 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, OrderedFloat<f64>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_1 = i_prot.read_i64()?;
let map_val_2 = OrderedFloat::from(i_prot.read_double()?);
val.insert(map_key_1, map_val_2);
}
i_prot.read_map_end()?;
f_2 = Some(val);
},
3 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, i64> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_3 = i_prot.read_i64()?;
let map_val_4 = i_prot.read_i64()?;
val.insert(map_key_3, map_val_4);
}
i_prot.read_map_end()?;
f_3 = Some(val);
},
4 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, String> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_5 = i_prot.read_i64()?;
let map_val_6 = i_prot.read_string()?;
val.insert(map_key_5, map_val_6);
}
i_prot.read_map_end()?;
f_4 = Some(val);
},
5 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeSet<String>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_7 = i_prot.read_i64()?;
let set_ident = i_prot.read_set_begin()?;
let mut map_val_8: BTreeSet<String> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_9 = i_prot.read_string()?;
map_val_8.insert(set_elem_9);
}
i_prot.read_set_end()?;
val.insert(map_key_7, map_val_8);
}
i_prot.read_map_end()?;
f_5 = Some(val);
},
6 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeMap<String, OrderedFloat<f64>>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_10 = i_prot.read_i64()?;
let map_ident = i_prot.read_map_begin()?;
let mut map_val_11: BTreeMap<String, OrderedFloat<f64>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_12 = i_prot.read_string()?;
let map_val_13 = OrderedFloat::from(i_prot.read_double()?);
map_val_11.insert(map_key_12, map_val_13);
}
i_prot.read_map_end()?;
val.insert(map_key_10, map_val_11);
}
i_prot.read_map_end()?;
f_6 = Some(val);
},
7 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, Vec<u8>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_14 = i_prot.read_i64()?;
let map_val_15 = i_prot.read_bytes()?;
val.insert(map_key_14, map_val_15);
}
i_prot.read_map_end()?;
f_7 = Some(val);
},
8 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, tensor::GeneralTensor> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_16 = i_prot.read_i64()?;
let map_val_17 = tensor::GeneralTensor::read_from_in_protocol(i_prot)?;
val.insert(map_key_16, map_val_17);
}
i_prot.read_map_end()?;
f_8 = Some(val);
},
9 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, tensor::SparseTensor> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_18 = i_prot.read_i64()?;
let map_val_19 = tensor::SparseTensor::read_from_in_protocol(i_prot)?;
val.insert(map_key_18, map_val_19);
}
i_prot.read_map_end()?;
f_9 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
let ret = DataRecord {
binary_features: f_1,
continuous_features: f_2,
discrete_features: f_3,
string_features: f_4,
sparse_binary_features: f_5,
sparse_continuous_features: f_6,
blob_features: f_7,
tensors: f_8,
sparse_tensors: f_9,
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("DataRecord");
o_prot.write_struct_begin(&struct_ident)?;
if let Some(ref fld_var) = self.binary_features {
o_prot.write_field_begin(&TFieldIdentifier::new("binaryFeatures", TType::Set, 1))?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::I64, fld_var.len() as i32))?;
for e in fld_var {
o_prot.write_i64(*e)?;
}
o_prot.write_set_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.continuous_features {
o_prot.write_field_begin(&TFieldIdentifier::new("continuousFeatures", TType::Map, 2))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Double, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_double((*v).into())?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.discrete_features {
o_prot.write_field_begin(&TFieldIdentifier::new("discreteFeatures", TType::Map, 3))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::I64, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_i64(*v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.string_features {
o_prot.write_field_begin(&TFieldIdentifier::new("stringFeatures", TType::Map, 4))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::String, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_string(v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_binary_features {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseBinaryFeatures", TType::Map, 5))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Set, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::String, v.len() as i32))?;
for e in v {
o_prot.write_string(e)?;
}
o_prot.write_set_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_continuous_features {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseContinuousFeatures", TType::Map, 6))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Map, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::String, TType::Double, v.len() as i32))?;
for (k, v) in v {
o_prot.write_string(k)?;
o_prot.write_double((*v).into())?;
}
o_prot.write_map_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.blob_features {
o_prot.write_field_begin(&TFieldIdentifier::new("blobFeatures", TType::Map, 7))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::String, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_bytes(v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.tensors {
o_prot.write_field_begin(&TFieldIdentifier::new("tensors", TType::Map, 8))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Struct, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
v.write_to_out_protocol(o_prot)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_tensors {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseTensors", TType::Map, 9))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Struct, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
v.write_to_out_protocol(o_prot)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
impl Default for DataRecord {
fn default() -> Self {
DataRecord{
binary_features: Some(BTreeSet::new()),
continuous_features: Some(BTreeMap::new()),
discrete_features: Some(BTreeMap::new()),
string_features: Some(BTreeMap::new()),
sparse_binary_features: Some(BTreeMap::new()),
sparse_continuous_features: Some(BTreeMap::new()),
blob_features: Some(BTreeMap::new()),
tensors: Some(BTreeMap::new()),
sparse_tensors: Some(BTreeMap::new()),
}
}
}
//
// CompactDataRecord
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct CompactDataRecord {
pub binary_features: Option<BTreeSet<i64>>,
pub continuous_features: Option<BTreeMap<i64, i32>>,
pub discrete_features: Option<BTreeMap<i64, i64>>,
pub string_features: Option<BTreeMap<i64, String>>,
pub sparse_binary_features: Option<BTreeMap<i64, BTreeSet<String>>>,
pub sparse_binary_features_with16b_sparse_key: Option<BTreeMap<i64, BTreeSet<i16>>>,
pub sparse_binary_features_with32b_sparse_key: Option<BTreeMap<i64, BTreeSet<i32>>>,
pub sparse_binary_features_with64b_sparse_key: Option<BTreeMap<i64, BTreeSet<i64>>>,
pub sparse_continuous_features: Option<BTreeMap<i64, BTreeMap<String, i32>>>,
pub sparse_continuous_features_with16b_sparse_key: Option<BTreeMap<i64, BTreeMap<i16, i32>>>,
pub sparse_continuous_features_with32b_sparse_key: Option<BTreeMap<i64, BTreeMap<i32, i32>>>,
pub sparse_continuous_features_with64b_sparse_key: Option<BTreeMap<i64, BTreeMap<i64, i32>>>,
pub blob_features: Option<BTreeMap<i64, Vec<u8>>>,
pub tensors: Option<BTreeMap<i64, tensor::GeneralTensor>>,
pub sparse_tensors: Option<BTreeMap<i64, tensor::SparseTensor>>,
}
impl CompactDataRecord {
pub fn new<F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12, F13, F14, F15>(binary_features: F1, continuous_features: F2, discrete_features: F3, string_features: F4, sparse_binary_features: F5, sparse_binary_features_with16b_sparse_key: F6, sparse_binary_features_with32b_sparse_key: F7, sparse_binary_features_with64b_sparse_key: F8, sparse_continuous_features: F9, sparse_continuous_features_with16b_sparse_key: F10, sparse_continuous_features_with32b_sparse_key: F11, sparse_continuous_features_with64b_sparse_key: F12, blob_features: F13, tensors: F14, sparse_tensors: F15) -> CompactDataRecord where F1: Into<Option<BTreeSet<i64>>>, F2: Into<Option<BTreeMap<i64, i32>>>, F3: Into<Option<BTreeMap<i64, i64>>>, F4: Into<Option<BTreeMap<i64, String>>>, F5: Into<Option<BTreeMap<i64, BTreeSet<String>>>>, F6: Into<Option<BTreeMap<i64, BTreeSet<i16>>>>, F7: Into<Option<BTreeMap<i64, BTreeSet<i32>>>>, F8: Into<Option<BTreeMap<i64, BTreeSet<i64>>>>, F9: Into<Option<BTreeMap<i64, BTreeMap<String, i32>>>>, F10: Into<Option<BTreeMap<i64, BTreeMap<i16, i32>>>>, F11: Into<Option<BTreeMap<i64, BTreeMap<i32, i32>>>>, F12: Into<Option<BTreeMap<i64, BTreeMap<i64, i32>>>>, F13: Into<Option<BTreeMap<i64, Vec<u8>>>>, F14: Into<Option<BTreeMap<i64, tensor::GeneralTensor>>>, F15: Into<Option<BTreeMap<i64, tensor::SparseTensor>>> {
CompactDataRecord {
binary_features: binary_features.into(),
continuous_features: continuous_features.into(),
discrete_features: discrete_features.into(),
string_features: string_features.into(),
sparse_binary_features: sparse_binary_features.into(),
sparse_binary_features_with16b_sparse_key: sparse_binary_features_with16b_sparse_key.into(),
sparse_binary_features_with32b_sparse_key: sparse_binary_features_with32b_sparse_key.into(),
sparse_binary_features_with64b_sparse_key: sparse_binary_features_with64b_sparse_key.into(),
sparse_continuous_features: sparse_continuous_features.into(),
sparse_continuous_features_with16b_sparse_key: sparse_continuous_features_with16b_sparse_key.into(),
sparse_continuous_features_with32b_sparse_key: sparse_continuous_features_with32b_sparse_key.into(),
sparse_continuous_features_with64b_sparse_key: sparse_continuous_features_with64b_sparse_key.into(),
blob_features: blob_features.into(),
tensors: tensors.into(),
sparse_tensors: sparse_tensors.into(),
}
}
}
impl TSerializable for CompactDataRecord {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<CompactDataRecord> {
i_prot.read_struct_begin()?;
let mut f_1: Option<BTreeSet<i64>> = None;
let mut f_2: Option<BTreeMap<i64, i32>> = None;
let mut f_3: Option<BTreeMap<i64, i64>> = None;
let mut f_4: Option<BTreeMap<i64, String>> = None;
let mut f_5: Option<BTreeMap<i64, BTreeSet<String>>> = None;
let mut f_6: Option<BTreeMap<i64, BTreeSet<i16>>> = None;
let mut f_7: Option<BTreeMap<i64, BTreeSet<i32>>> = None;
let mut f_8: Option<BTreeMap<i64, BTreeSet<i64>>> = None;
let mut f_9: Option<BTreeMap<i64, BTreeMap<String, i32>>> = None;
let mut f_10: Option<BTreeMap<i64, BTreeMap<i16, i32>>> = None;
let mut f_11: Option<BTreeMap<i64, BTreeMap<i32, i32>>> = None;
let mut f_12: Option<BTreeMap<i64, BTreeMap<i64, i32>>> = None;
let mut f_13: Option<BTreeMap<i64, Vec<u8>>> = None;
let mut f_14: Option<BTreeMap<i64, tensor::GeneralTensor>> = None;
let mut f_15: Option<BTreeMap<i64, tensor::SparseTensor>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let set_ident = i_prot.read_set_begin()?;
let mut val: BTreeSet<i64> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_20 = i_prot.read_i64()?;
val.insert(set_elem_20);
}
i_prot.read_set_end()?;
f_1 = Some(val);
},
2 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, i32> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_21 = i_prot.read_i64()?;
let map_val_22 = i_prot.read_i32()?;
val.insert(map_key_21, map_val_22);
}
i_prot.read_map_end()?;
f_2 = Some(val);
},
3 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, i64> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_23 = i_prot.read_i64()?;
let map_val_24 = i_prot.read_i64()?;
val.insert(map_key_23, map_val_24);
}
i_prot.read_map_end()?;
f_3 = Some(val);
},
4 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, String> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_25 = i_prot.read_i64()?;
let map_val_26 = i_prot.read_string()?;
val.insert(map_key_25, map_val_26);
}
i_prot.read_map_end()?;
f_4 = Some(val);
},
5 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeSet<String>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_27 = i_prot.read_i64()?;
let set_ident = i_prot.read_set_begin()?;
let mut map_val_28: BTreeSet<String> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_29 = i_prot.read_string()?;
map_val_28.insert(set_elem_29);
}
i_prot.read_set_end()?;
val.insert(map_key_27, map_val_28);
}
i_prot.read_map_end()?;
f_5 = Some(val);
},
6 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeSet<i16>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_30 = i_prot.read_i64()?;
let set_ident = i_prot.read_set_begin()?;
let mut map_val_31: BTreeSet<i16> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_32 = i_prot.read_i16()?;
map_val_31.insert(set_elem_32);
}
i_prot.read_set_end()?;
val.insert(map_key_30, map_val_31);
}
i_prot.read_map_end()?;
f_6 = Some(val);
},
7 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeSet<i32>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_33 = i_prot.read_i64()?;
let set_ident = i_prot.read_set_begin()?;
let mut map_val_34: BTreeSet<i32> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_35 = i_prot.read_i32()?;
map_val_34.insert(set_elem_35);
}
i_prot.read_set_end()?;
val.insert(map_key_33, map_val_34);
}
i_prot.read_map_end()?;
f_7 = Some(val);
},
8 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeSet<i64>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_36 = i_prot.read_i64()?;
let set_ident = i_prot.read_set_begin()?;
let mut map_val_37: BTreeSet<i64> = BTreeSet::new();
for _ in 0..set_ident.size {
let set_elem_38 = i_prot.read_i64()?;
map_val_37.insert(set_elem_38);
}
i_prot.read_set_end()?;
val.insert(map_key_36, map_val_37);
}
i_prot.read_map_end()?;
f_8 = Some(val);
},
9 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeMap<String, i32>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_39 = i_prot.read_i64()?;
let map_ident = i_prot.read_map_begin()?;
let mut map_val_40: BTreeMap<String, i32> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_41 = i_prot.read_string()?;
let map_val_42 = i_prot.read_i32()?;
map_val_40.insert(map_key_41, map_val_42);
}
i_prot.read_map_end()?;
val.insert(map_key_39, map_val_40);
}
i_prot.read_map_end()?;
f_9 = Some(val);
},
10 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeMap<i16, i32>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_43 = i_prot.read_i64()?;
let map_ident = i_prot.read_map_begin()?;
let mut map_val_44: BTreeMap<i16, i32> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_45 = i_prot.read_i16()?;
let map_val_46 = i_prot.read_i32()?;
map_val_44.insert(map_key_45, map_val_46);
}
i_prot.read_map_end()?;
val.insert(map_key_43, map_val_44);
}
i_prot.read_map_end()?;
f_10 = Some(val);
},
11 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeMap<i32, i32>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_47 = i_prot.read_i64()?;
let map_ident = i_prot.read_map_begin()?;
let mut map_val_48: BTreeMap<i32, i32> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_49 = i_prot.read_i32()?;
let map_val_50 = i_prot.read_i32()?;
map_val_48.insert(map_key_49, map_val_50);
}
i_prot.read_map_end()?;
val.insert(map_key_47, map_val_48);
}
i_prot.read_map_end()?;
f_11 = Some(val);
},
12 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, BTreeMap<i64, i32>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_51 = i_prot.read_i64()?;
let map_ident = i_prot.read_map_begin()?;
let mut map_val_52: BTreeMap<i64, i32> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_53 = i_prot.read_i64()?;
let map_val_54 = i_prot.read_i32()?;
map_val_52.insert(map_key_53, map_val_54);
}
i_prot.read_map_end()?;
val.insert(map_key_51, map_val_52);
}
i_prot.read_map_end()?;
f_12 = Some(val);
},
13 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, Vec<u8>> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_55 = i_prot.read_i64()?;
let map_val_56 = i_prot.read_bytes()?;
val.insert(map_key_55, map_val_56);
}
i_prot.read_map_end()?;
f_13 = Some(val);
},
14 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, tensor::GeneralTensor> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_57 = i_prot.read_i64()?;
let map_val_58 = tensor::GeneralTensor::read_from_in_protocol(i_prot)?;
val.insert(map_key_57, map_val_58);
}
i_prot.read_map_end()?;
f_14 = Some(val);
},
15 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, tensor::SparseTensor> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_59 = i_prot.read_i64()?;
let map_val_60 = tensor::SparseTensor::read_from_in_protocol(i_prot)?;
val.insert(map_key_59, map_val_60);
}
i_prot.read_map_end()?;
f_15 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
let ret = CompactDataRecord {
binary_features: f_1,
continuous_features: f_2,
discrete_features: f_3,
string_features: f_4,
sparse_binary_features: f_5,
sparse_binary_features_with16b_sparse_key: f_6,
sparse_binary_features_with32b_sparse_key: f_7,
sparse_binary_features_with64b_sparse_key: f_8,
sparse_continuous_features: f_9,
sparse_continuous_features_with16b_sparse_key: f_10,
sparse_continuous_features_with32b_sparse_key: f_11,
sparse_continuous_features_with64b_sparse_key: f_12,
blob_features: f_13,
tensors: f_14,
sparse_tensors: f_15,
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("CompactDataRecord");
o_prot.write_struct_begin(&struct_ident)?;
if let Some(ref fld_var) = self.binary_features {
o_prot.write_field_begin(&TFieldIdentifier::new("binaryFeatures", TType::Set, 1))?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::I64, fld_var.len() as i32))?;
for e in fld_var {
o_prot.write_i64(*e)?;
}
o_prot.write_set_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.continuous_features {
o_prot.write_field_begin(&TFieldIdentifier::new("continuousFeatures", TType::Map, 2))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::I32, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_i32(*v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.discrete_features {
o_prot.write_field_begin(&TFieldIdentifier::new("discreteFeatures", TType::Map, 3))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::I64, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_i64(*v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.string_features {
o_prot.write_field_begin(&TFieldIdentifier::new("stringFeatures", TType::Map, 4))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::String, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_string(v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_binary_features {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseBinaryFeatures", TType::Map, 5))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Set, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::String, v.len() as i32))?;
for e in v {
o_prot.write_string(e)?;
}
o_prot.write_set_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_binary_features_with16b_sparse_key {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseBinaryFeaturesWith16bSparseKey", TType::Map, 6))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Set, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::I16, v.len() as i32))?;
for e in v {
o_prot.write_i16(*e)?;
}
o_prot.write_set_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_binary_features_with32b_sparse_key {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseBinaryFeaturesWith32bSparseKey", TType::Map, 7))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Set, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::I32, v.len() as i32))?;
for e in v {
o_prot.write_i32(*e)?;
}
o_prot.write_set_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_binary_features_with64b_sparse_key {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseBinaryFeaturesWith64bSparseKey", TType::Map, 8))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Set, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_set_begin(&TSetIdentifier::new(TType::I64, v.len() as i32))?;
for e in v {
o_prot.write_i64(*e)?;
}
o_prot.write_set_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_continuous_features {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseContinuousFeatures", TType::Map, 9))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Map, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::String, TType::I32, v.len() as i32))?;
for (k, v) in v {
o_prot.write_string(k)?;
o_prot.write_i32(*v)?;
}
o_prot.write_map_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_continuous_features_with16b_sparse_key {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseContinuousFeaturesWith16bSparseKey", TType::Map, 10))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Map, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I16, TType::I32, v.len() as i32))?;
for (k, v) in v {
o_prot.write_i16(*k)?;
o_prot.write_i32(*v)?;
}
o_prot.write_map_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_continuous_features_with32b_sparse_key {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseContinuousFeaturesWith32bSparseKey", TType::Map, 11))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Map, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I32, TType::I32, v.len() as i32))?;
for (k, v) in v {
o_prot.write_i32(*k)?;
o_prot.write_i32(*v)?;
}
o_prot.write_map_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_continuous_features_with64b_sparse_key {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseContinuousFeaturesWith64bSparseKey", TType::Map, 12))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Map, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::I32, v.len() as i32))?;
for (k, v) in v {
o_prot.write_i64(*k)?;
o_prot.write_i32(*v)?;
}
o_prot.write_map_end()?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.blob_features {
o_prot.write_field_begin(&TFieldIdentifier::new("blobFeatures", TType::Map, 13))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::String, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
o_prot.write_bytes(v)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.tensors {
o_prot.write_field_begin(&TFieldIdentifier::new("tensors", TType::Map, 14))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Struct, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
v.write_to_out_protocol(o_prot)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_tensors {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseTensors", TType::Map, 15))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Struct, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
v.write_to_out_protocol(o_prot)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
impl Default for CompactDataRecord {
fn default() -> Self {
CompactDataRecord{
binary_features: Some(BTreeSet::new()),
continuous_features: Some(BTreeMap::new()),
discrete_features: Some(BTreeMap::new()),
string_features: Some(BTreeMap::new()),
sparse_binary_features: Some(BTreeMap::new()),
sparse_binary_features_with16b_sparse_key: Some(BTreeMap::new()),
sparse_binary_features_with32b_sparse_key: Some(BTreeMap::new()),
sparse_binary_features_with64b_sparse_key: Some(BTreeMap::new()),
sparse_continuous_features: Some(BTreeMap::new()),
sparse_continuous_features_with16b_sparse_key: Some(BTreeMap::new()),
sparse_continuous_features_with32b_sparse_key: Some(BTreeMap::new()),
sparse_continuous_features_with64b_sparse_key: Some(BTreeMap::new()),
blob_features: Some(BTreeMap::new()),
tensors: Some(BTreeMap::new()),
sparse_tensors: Some(BTreeMap::new()),
}
}
}
//
// TensorRecord
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct TensorRecord {
pub tensors: Option<BTreeMap<i64, tensor::GeneralTensor>>,
pub sparse_tensors: Option<BTreeMap<i64, tensor::SparseTensor>>,
}
impl TensorRecord {
pub fn new<F1, F2>(tensors: F1, sparse_tensors: F2) -> TensorRecord where F1: Into<Option<BTreeMap<i64, tensor::GeneralTensor>>>, F2: Into<Option<BTreeMap<i64, tensor::SparseTensor>>> {
TensorRecord {
tensors: tensors.into(),
sparse_tensors: sparse_tensors.into(),
}
}
}
impl TSerializable for TensorRecord {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<TensorRecord> {
i_prot.read_struct_begin()?;
let mut f_1: Option<BTreeMap<i64, tensor::GeneralTensor>> = None;
let mut f_2: Option<BTreeMap<i64, tensor::SparseTensor>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, tensor::GeneralTensor> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_61 = i_prot.read_i64()?;
let map_val_62 = tensor::GeneralTensor::read_from_in_protocol(i_prot)?;
val.insert(map_key_61, map_val_62);
}
i_prot.read_map_end()?;
f_1 = Some(val);
},
2 => {
let map_ident = i_prot.read_map_begin()?;
let mut val: BTreeMap<i64, tensor::SparseTensor> = BTreeMap::new();
for _ in 0..map_ident.size {
let map_key_63 = i_prot.read_i64()?;
let map_val_64 = tensor::SparseTensor::read_from_in_protocol(i_prot)?;
val.insert(map_key_63, map_val_64);
}
i_prot.read_map_end()?;
f_2 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
let ret = TensorRecord {
tensors: f_1,
sparse_tensors: f_2,
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("TensorRecord");
o_prot.write_struct_begin(&struct_ident)?;
if let Some(ref fld_var) = self.tensors {
o_prot.write_field_begin(&TFieldIdentifier::new("tensors", TType::Map, 1))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Struct, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
v.write_to_out_protocol(o_prot)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.sparse_tensors {
o_prot.write_field_begin(&TFieldIdentifier::new("sparseTensors", TType::Map, 2))?;
o_prot.write_map_begin(&TMapIdentifier::new(TType::I64, TType::Struct, fld_var.len() as i32))?;
for (k, v) in fld_var {
o_prot.write_i64(*k)?;
v.write_to_out_protocol(o_prot)?;
}
o_prot.write_map_end()?;
o_prot.write_field_end()?
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
impl Default for TensorRecord {
fn default() -> Self {
TensorRecord{
tensors: Some(BTreeMap::new()),
sparse_tensors: Some(BTreeMap::new()),
}
}
}
//
// FeatureMetaInfo
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct FeatureMetaInfo {
pub feature_id: Option<i64>,
pub full_feature_name: Option<String>,
pub feature_type: Option<FeatureType>,
}
impl FeatureMetaInfo {
pub fn new<F1, F2, F3>(feature_id: F1, full_feature_name: F2, feature_type: F3) -> FeatureMetaInfo where F1: Into<Option<i64>>, F2: Into<Option<String>>, F3: Into<Option<FeatureType>> {
FeatureMetaInfo {
feature_id: feature_id.into(),
full_feature_name: full_feature_name.into(),
feature_type: feature_type.into(),
}
}
}
impl TSerializable for FeatureMetaInfo {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<FeatureMetaInfo> {
i_prot.read_struct_begin()?;
let mut f_1: Option<i64> = None;
let mut f_2: Option<String> = None;
let mut f_3: Option<FeatureType> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let val = i_prot.read_i64()?;
f_1 = Some(val);
},
2 => {
let val = i_prot.read_string()?;
f_2 = Some(val);
},
3 => {
let val = FeatureType::read_from_in_protocol(i_prot)?;
f_3 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
let ret = FeatureMetaInfo {
feature_id: f_1,
full_feature_name: f_2,
feature_type: f_3,
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("FeatureMetaInfo");
o_prot.write_struct_begin(&struct_ident)?;
if let Some(fld_var) = self.feature_id {
o_prot.write_field_begin(&TFieldIdentifier::new("featureId", TType::I64, 1))?;
o_prot.write_i64(fld_var)?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.full_feature_name {
o_prot.write_field_begin(&TFieldIdentifier::new("fullFeatureName", TType::String, 2))?;
o_prot.write_string(fld_var)?;
o_prot.write_field_end()?
}
if let Some(ref fld_var) = self.feature_type {
o_prot.write_field_begin(&TFieldIdentifier::new("featureType", TType::I32, 3))?;
fld_var.write_to_out_protocol(o_prot)?;
o_prot.write_field_end()?
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
impl Default for FeatureMetaInfo {
fn default() -> Self {
FeatureMetaInfo{
feature_id: Some(0),
full_feature_name: Some("".to_owned()),
feature_type: None,
}
}
}
//
// FeatureMetaInfoList
//
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct FeatureMetaInfoList {
pub contents: Option<Vec<FeatureMetaInfo>>,
}
impl FeatureMetaInfoList {
pub fn new<F1>(contents: F1) -> FeatureMetaInfoList where F1: Into<Option<Vec<FeatureMetaInfo>>> {
FeatureMetaInfoList {
contents: contents.into(),
}
}
}
impl TSerializable for FeatureMetaInfoList {
fn read_from_in_protocol(i_prot: &mut dyn TInputProtocol) -> thrift::Result<FeatureMetaInfoList> {
i_prot.read_struct_begin()?;
let mut f_1: Option<Vec<FeatureMetaInfo>> = None;
loop {
let field_ident = i_prot.read_field_begin()?;
if field_ident.field_type == TType::Stop {
break;
}
let field_id = field_id(&field_ident)?;
match field_id {
1 => {
let list_ident = i_prot.read_list_begin()?;
let mut val: Vec<FeatureMetaInfo> = Vec::with_capacity(list_ident.size as usize);
for _ in 0..list_ident.size {
let list_elem_65 = FeatureMetaInfo::read_from_in_protocol(i_prot)?;
val.push(list_elem_65);
}
i_prot.read_list_end()?;
f_1 = Some(val);
},
_ => {
i_prot.skip(field_ident.field_type)?;
},
};
i_prot.read_field_end()?;
}
i_prot.read_struct_end()?;
let ret = FeatureMetaInfoList {
contents: f_1,
};
Ok(ret)
}
fn write_to_out_protocol(&self, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {
let struct_ident = TStructIdentifier::new("FeatureMetaInfoList");
o_prot.write_struct_begin(&struct_ident)?;
if let Some(ref fld_var) = self.contents {
o_prot.write_field_begin(&TFieldIdentifier::new("contents", TType::List, 1))?;
o_prot.write_list_begin(&TListIdentifier::new(TType::Struct, fld_var.len() as i32))?;
for e in fld_var {
e.write_to_out_protocol(o_prot)?;
}
o_prot.write_list_end()?;
o_prot.write_field_end()?
}
o_prot.write_field_stop()?;
o_prot.write_struct_end()
}
}
impl Default for FeatureMetaInfoList {
fn default() -> Self {
FeatureMetaInfoList{
contents: Some(Vec::new()),
}
}
}