mirror of
https://github.com/twitter/the-algorithm.git
synced 2024-12-22 10:11:52 +01:00
Compare commits
No commits in common. "7f90d0ca342b928b479b512ec51ac2c3821f5922" and "ec83d01dcaebf369444d75ed04b3625a0a645eb9" have entirely different histories.
7f90d0ca34
...
ec83d01dca
@ -10,7 +10,7 @@ These are the main components of the Recommendation Algorithm included in this r
|
|||||||
|
|
||||||
| Type | Component | Description |
|
| Type | Component | Description |
|
||||||
|------------|------------|------------|
|
|------------|------------|------------|
|
||||||
| Feature | [simclusters-ann](simclusters-ann/README.md) | Community detection and sparse embeddings into those communities. |
|
| Feature | [SimClusters](src/scala/com/twitter/simclusters_v2/README.md) | Community detection and sparse embeddings into those communities. |
|
||||||
| | [TwHIN](https://github.com/twitter/the-algorithm-ml/blob/main/projects/twhin/README.md) | Dense knowledge graph embeddings for Users and Tweets. |
|
| | [TwHIN](https://github.com/twitter/the-algorithm-ml/blob/main/projects/twhin/README.md) | Dense knowledge graph embeddings for Users and Tweets. |
|
||||||
| | [trust-and-safety-models](trust_and_safety_models/README.md) | Models for detecting NSFW or abusive content. |
|
| | [trust-and-safety-models](trust_and_safety_models/README.md) | Models for detecting NSFW or abusive content. |
|
||||||
| | [real-graph](src/scala/com/twitter/interaction_graph/README.md) | Model to predict likelihood of a Twitter User interacting with another User. |
|
| | [real-graph](src/scala/com/twitter/interaction_graph/README.md) | Model to predict likelihood of a Twitter User interacting with another User. |
|
||||||
|
@ -24,10 +24,6 @@ class FollowRecommendationsServiceWarmupHandler @Inject() (warmup: ThriftWarmup)
|
|||||||
extends Handler
|
extends Handler
|
||||||
with Logging {
|
with Logging {
|
||||||
|
|
||||||
/**
|
|
||||||
* this would need to be added to src/main/resources/client_whitelist.yml
|
|
||||||
* if we implement ClientId filtering in the future
|
|
||||||
*/
|
|
||||||
private val clientId = ClientId("thrift-warmup-client")
|
private val clientId = ClientId("thrift-warmup-client")
|
||||||
|
|
||||||
override def handle(): Unit = {
|
override def handle(): Unit = {
|
||||||
@ -48,7 +44,7 @@ class FollowRecommendationsServiceWarmupHandler @Inject() (warmup: ThriftWarmup)
|
|||||||
RecommendationRequest(
|
RecommendationRequest(
|
||||||
clientContext = clientContext,
|
clientContext = clientContext,
|
||||||
displayLocation = displayLocation,
|
displayLocation = displayLocation,
|
||||||
displayContext = Some(DisplayContext.Profile(Profile(12L))),
|
displayContext = None,
|
||||||
maxResults = Some(3),
|
maxResults = Some(3),
|
||||||
fetchPromotedContent = Some(false),
|
fetchPromotedContent = Some(false),
|
||||||
debugParams = Some(DebugParams(doNotLog = Some(true)))
|
debugParams = Some(DebugParams(doNotLog = Some(true)))
|
||||||
|
@ -25,7 +25,7 @@ class ServerController @Inject() (
|
|||||||
.andThen(Service.mk(serverGetIntersectionHandler))
|
.andThen(Service.mk(serverGetIntersectionHandler))
|
||||||
|
|
||||||
val getIntersection: Service[GetIntersection.Args, GfsIntersectionResponse] = { args =>
|
val getIntersection: Service[GetIntersection.Args, GfsIntersectionResponse] = { args =>
|
||||||
// TODO(yqian): Disable updateCache after HTL switch to use PresetIntersection endpoint.
|
// TODO: Disable updateCache after HTL switch to use PresetIntersection endpoint.
|
||||||
getIntersectionService(
|
getIntersectionService(
|
||||||
GetIntersectionRequest.fromGfsIntersectionRequest(args.request, cacheable = true))
|
GetIntersectionRequest.fromGfsIntersectionRequest(args.request, cacheable = true))
|
||||||
}
|
}
|
||||||
@ -35,7 +35,7 @@ class ServerController @Inject() (
|
|||||||
GetPresetIntersection.Args,
|
GetPresetIntersection.Args,
|
||||||
GfsIntersectionResponse
|
GfsIntersectionResponse
|
||||||
] = { args =>
|
] = { args =>
|
||||||
// TODO(yqian): Refactor after HTL switch to PresetIntersection
|
// TODO: Refactor after HTL switch to PresetIntersection
|
||||||
val cacheable = args.request.presetFeatureTypes == PresetFeatureTypes.HtlTwoHop
|
val cacheable = args.request.presetFeatureTypes == PresetFeatureTypes.HtlTwoHop
|
||||||
getIntersectionService(
|
getIntersectionService(
|
||||||
GetIntersectionRequest.fromGfsPresetIntersectionRequest(args.request, cacheable))
|
GetIntersectionRequest.fromGfsPresetIntersectionRequest(args.request, cacheable))
|
||||||
|
@ -28,7 +28,7 @@ class ServerGetIntersectionHandler @Inject() (
|
|||||||
|
|
||||||
import ServerGetIntersectionHandler._
|
import ServerGetIntersectionHandler._
|
||||||
|
|
||||||
// TODO(yqian): Track all the stats based on PresetFeatureType and update the dashboard
|
// TODO: Track all the stats based on PresetFeatureType and update the dashboard
|
||||||
private val stats: StatsReceiver = statsReceiver.scope("srv").scope("get_intersection")
|
private val stats: StatsReceiver = statsReceiver.scope("srv").scope("get_intersection")
|
||||||
private val numCandidatesCount = stats.counter("total_num_candidates")
|
private val numCandidatesCount = stats.counter("total_num_candidates")
|
||||||
private val numCandidatesStat = stats.stat("num_candidates")
|
private val numCandidatesStat = stats.stat("num_candidates")
|
||||||
|
@ -1,13 +1,17 @@
|
|||||||
package com.twitter.graph_feature_service.server.handlers
|
package com.twitter.graph_feature_service.server.handlers
|
||||||
|
|
||||||
import com.twitter.finatra.thrift.routing.ThriftWarmup
|
import com.twitter.finatra.thrift.routing.ThriftWarmup
|
||||||
import com.twitter.graph_feature_service.thriftscala.EdgeType.{FavoritedBy, FollowedBy, Following}
|
import com.twitter.graph_feature_service.thriftscala.EdgeType.FavoritedBy
|
||||||
|
import com.twitter.graph_feature_service.thriftscala.EdgeType.FollowedBy
|
||||||
|
import com.twitter.graph_feature_service.thriftscala.EdgeType.Following
|
||||||
import com.twitter.graph_feature_service.thriftscala.Server.GetIntersection
|
import com.twitter.graph_feature_service.thriftscala.Server.GetIntersection
|
||||||
import com.twitter.graph_feature_service.thriftscala.{FeatureType, GfsIntersectionRequest}
|
import com.twitter.graph_feature_service.thriftscala.FeatureType
|
||||||
|
import com.twitter.graph_feature_service.thriftscala.GfsIntersectionRequest
|
||||||
import com.twitter.inject.utils.Handler
|
import com.twitter.inject.utils.Handler
|
||||||
import com.twitter.scrooge.Request
|
import com.twitter.scrooge.Request
|
||||||
import com.twitter.util.logging.Logger
|
import com.twitter.util.logging.Logger
|
||||||
import javax.inject.{Inject, Singleton}
|
import javax.inject.Inject
|
||||||
|
import javax.inject.Singleton
|
||||||
import scala.util.Random
|
import scala.util.Random
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
@ -15,14 +19,8 @@ class ServerWarmupHandler @Inject() (warmup: ThriftWarmup) extends Handler {
|
|||||||
|
|
||||||
val logger: Logger = Logger("WarmupHandler")
|
val logger: Logger = Logger("WarmupHandler")
|
||||||
|
|
||||||
private val testingAccounts: Array[Long] = {
|
// TODO: Add the testing accounts to warm-up the service.
|
||||||
Seq(
|
private val testingAccounts: Array[Long] = Seq.empty.toArray
|
||||||
12L, //jack
|
|
||||||
21447363L, // KATY PERRY
|
|
||||||
42562446L, // Stephen Curry
|
|
||||||
813286L // Barack Obama
|
|
||||||
).toArray
|
|
||||||
}
|
|
||||||
|
|
||||||
private def getRandomRequest: GfsIntersectionRequest = {
|
private def getRandomRequest: GfsIntersectionRequest = {
|
||||||
GfsIntersectionRequest(
|
GfsIntersectionRequest(
|
||||||
|
@ -108,7 +108,7 @@ object IntersectionValueCalculator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TODO(yaow): for now it only computes intersection size. Will add more feature types (e.g., dot
|
* TODO: for now it only computes intersection size. Will add more feature types (e.g., dot
|
||||||
* product, maximum value).
|
* product, maximum value).
|
||||||
*
|
*
|
||||||
* NOTE that this function assumes both x and y are SORTED arrays.
|
* NOTE that this function assumes both x and y are SORTED arrays.
|
||||||
|
@ -220,30 +220,7 @@ object HomeTweetTypePredicates {
|
|||||||
_.getOrElse(ConversationModule2DisplayedTweetsFeature, false)),
|
_.getOrElse(ConversationModule2DisplayedTweetsFeature, false)),
|
||||||
("conversation_module_has_gap", _.getOrElse(ConversationModuleHasGapFeature, false)),
|
("conversation_module_has_gap", _.getOrElse(ConversationModuleHasGapFeature, false)),
|
||||||
("served_in_recap_tweet_candidate_module_injection", _ => false),
|
("served_in_recap_tweet_candidate_module_injection", _ => false),
|
||||||
("served_in_threaded_conversation_module", _ => false),
|
("served_in_threaded_conversation_module", _ => false)
|
||||||
(
|
|
||||||
"author_is_elon",
|
|
||||||
candidate =>
|
|
||||||
candidate
|
|
||||||
.getOrElse(AuthorIdFeature, None).contains(candidate.getOrElse(DDGStatsElonFeature, 0L))),
|
|
||||||
(
|
|
||||||
"author_is_power_user",
|
|
||||||
candidate =>
|
|
||||||
candidate
|
|
||||||
.getOrElse(AuthorIdFeature, None)
|
|
||||||
.exists(candidate.getOrElse(DDGStatsVitsFeature, Set.empty[Long]).contains)),
|
|
||||||
(
|
|
||||||
"author_is_democrat",
|
|
||||||
candidate =>
|
|
||||||
candidate
|
|
||||||
.getOrElse(AuthorIdFeature, None)
|
|
||||||
.exists(candidate.getOrElse(DDGStatsDemocratsFeature, Set.empty[Long]).contains)),
|
|
||||||
(
|
|
||||||
"author_is_republican",
|
|
||||||
candidate =>
|
|
||||||
candidate
|
|
||||||
.getOrElse(AuthorIdFeature, None)
|
|
||||||
.exists(candidate.getOrElse(DDGStatsRepublicansFeature, Set.empty[Long]).contains)),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
val PredicateMap = CandidatePredicates.toMap
|
val PredicateMap = CandidatePredicates.toMap
|
||||||
|
@ -1,12 +1,10 @@
|
|||||||
package com.twitter.home_mixer.functional_component.feature_hydrator
|
package com.twitter.home_mixer.functional_component.feature_hydrator
|
||||||
|
|
||||||
import com.twitter.config.yaml.YamlMap
|
|
||||||
import com.twitter.finagle.tracing.Annotation.BinaryAnnotation
|
import com.twitter.finagle.tracing.Annotation.BinaryAnnotation
|
||||||
import com.twitter.finagle.tracing.ForwardAnnotation
|
import com.twitter.finagle.tracing.ForwardAnnotation
|
||||||
import com.twitter.home_mixer.model.HomeFeatures._
|
import com.twitter.home_mixer.model.HomeFeatures._
|
||||||
import com.twitter.home_mixer.model.request.DeviceContext.RequestContext
|
import com.twitter.home_mixer.model.request.DeviceContext.RequestContext
|
||||||
import com.twitter.home_mixer.model.request.HasDeviceContext
|
import com.twitter.home_mixer.model.request.HasDeviceContext
|
||||||
import com.twitter.home_mixer.param.HomeMixerInjectionNames.DDGStatsAuthors
|
|
||||||
import com.twitter.joinkey.context.RequestJoinKeyContext
|
import com.twitter.joinkey.context.RequestJoinKeyContext
|
||||||
import com.twitter.product_mixer.component_library.model.cursor.UrtOrderedCursor
|
import com.twitter.product_mixer.component_library.model.cursor.UrtOrderedCursor
|
||||||
import com.twitter.product_mixer.core.feature.Feature
|
import com.twitter.product_mixer.core.feature.Feature
|
||||||
@ -24,22 +22,16 @@ import com.twitter.snowflake.id.SnowflakeId
|
|||||||
import com.twitter.stitch.Stitch
|
import com.twitter.stitch.Stitch
|
||||||
import java.util.UUID
|
import java.util.UUID
|
||||||
import javax.inject.Inject
|
import javax.inject.Inject
|
||||||
import javax.inject.Named
|
|
||||||
import javax.inject.Singleton
|
import javax.inject.Singleton
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
class RequestQueryFeatureHydrator[
|
class RequestQueryFeatureHydrator[
|
||||||
Query <: PipelineQuery with HasPipelineCursor[UrtOrderedCursor] with HasDeviceContext] @Inject() (
|
Query <: PipelineQuery with HasPipelineCursor[UrtOrderedCursor] with HasDeviceContext] @Inject() (
|
||||||
@Named(DDGStatsAuthors) ddgStatsAuthors: YamlMap)
|
) extends QueryFeatureHydrator[Query] {
|
||||||
extends QueryFeatureHydrator[Query] {
|
|
||||||
|
|
||||||
override val features: Set[Feature[_, _]] = Set(
|
override val features: Set[Feature[_, _]] = Set(
|
||||||
AccountAgeFeature,
|
AccountAgeFeature,
|
||||||
ClientIdFeature,
|
ClientIdFeature,
|
||||||
DDGStatsDemocratsFeature,
|
|
||||||
DDGStatsRepublicansFeature,
|
|
||||||
DDGStatsElonFeature,
|
|
||||||
DDGStatsVitsFeature,
|
|
||||||
DeviceLanguageFeature,
|
DeviceLanguageFeature,
|
||||||
GetInitialFeature,
|
GetInitialFeature,
|
||||||
GetMiddleFeature,
|
GetMiddleFeature,
|
||||||
@ -59,10 +51,6 @@ class RequestQueryFeatureHydrator[
|
|||||||
override val identifier: FeatureHydratorIdentifier = FeatureHydratorIdentifier("Request")
|
override val identifier: FeatureHydratorIdentifier = FeatureHydratorIdentifier("Request")
|
||||||
|
|
||||||
private val DarkRequestAnnotation = "clnt/has_dark_request"
|
private val DarkRequestAnnotation = "clnt/has_dark_request"
|
||||||
private val Democrats = "democrats"
|
|
||||||
private val Republicans = "republicans"
|
|
||||||
private val Elon = "elon"
|
|
||||||
private val Vits = "vits"
|
|
||||||
|
|
||||||
// Convert Language code to ISO 639-3 format
|
// Convert Language code to ISO 639-3 format
|
||||||
private def getLanguageISOFormatByCode(languageCode: String): String =
|
private def getLanguageISOFormatByCode(languageCode: String): String =
|
||||||
@ -83,16 +71,6 @@ class RequestQueryFeatureHydrator[
|
|||||||
val featureMap = FeatureMapBuilder()
|
val featureMap = FeatureMapBuilder()
|
||||||
.add(AccountAgeFeature, query.getOptionalUserId.flatMap(SnowflakeId.timeFromIdOpt))
|
.add(AccountAgeFeature, query.getOptionalUserId.flatMap(SnowflakeId.timeFromIdOpt))
|
||||||
.add(ClientIdFeature, query.clientContext.appId)
|
.add(ClientIdFeature, query.clientContext.appId)
|
||||||
/**
|
|
||||||
* These author ID lists are used purely for metrics collection. We track how often we are
|
|
||||||
* serving Tweets from these authors and how often their tweets are being impressed by users.
|
|
||||||
* This helps us validate in our A/B experimentation platform that we do not ship changes
|
|
||||||
* that negatively impacts one group over others.
|
|
||||||
*/
|
|
||||||
.add(DDGStatsDemocratsFeature, ddgStatsAuthors.longSeq(Democrats).toSet)
|
|
||||||
.add(DDGStatsRepublicansFeature, ddgStatsAuthors.longSeq(Republicans).toSet)
|
|
||||||
.add(DDGStatsVitsFeature, ddgStatsAuthors.longSeq(Vits).toSet)
|
|
||||||
.add(DDGStatsElonFeature, ddgStatsAuthors.longValue(Elon))
|
|
||||||
.add(DeviceLanguageFeature, query.getLanguageCode.map(getLanguageISOFormatByCode))
|
.add(DeviceLanguageFeature, query.getLanguageCode.map(getLanguageISOFormatByCode))
|
||||||
.add(
|
.add(
|
||||||
GetInitialFeature,
|
GetInitialFeature,
|
||||||
|
@ -176,10 +176,6 @@ object HomeFeatures {
|
|||||||
override def personalDataTypes: Set[pd.PersonalDataType] = Set(pd.PersonalDataType.ClientType)
|
override def personalDataTypes: Set[pd.PersonalDataType] = Set(pd.PersonalDataType.ClientType)
|
||||||
}
|
}
|
||||||
object CachedScoredTweetsFeature extends Feature[PipelineQuery, Seq[hmt.CachedScoredTweet]]
|
object CachedScoredTweetsFeature extends Feature[PipelineQuery, Seq[hmt.CachedScoredTweet]]
|
||||||
object DDGStatsElonFeature extends Feature[PipelineQuery, Long]
|
|
||||||
object DDGStatsVitsFeature extends Feature[PipelineQuery, Set[Long]]
|
|
||||||
object DDGStatsDemocratsFeature extends Feature[PipelineQuery, Set[Long]]
|
|
||||||
object DDGStatsRepublicansFeature extends Feature[PipelineQuery, Set[Long]]
|
|
||||||
object DeviceLanguageFeature extends Feature[PipelineQuery, Option[String]]
|
object DeviceLanguageFeature extends Feature[PipelineQuery, Option[String]]
|
||||||
object DismissInfoFeature
|
object DismissInfoFeature
|
||||||
extends FeatureWithDefaultOnFailure[PipelineQuery, Map[st.SuggestType, Option[DismissInfo]]] {
|
extends FeatureWithDefaultOnFailure[PipelineQuery, Map[st.SuggestType, Option[DismissInfo]]] {
|
||||||
|
@ -1,18 +1,5 @@
|
|||||||
package com.twitter.home_mixer.module
|
package com.twitter.home_mixer.module
|
||||||
|
|
||||||
import com.google.inject.Provides
|
|
||||||
import com.twitter.config.yaml.YamlMap
|
|
||||||
import com.twitter.home_mixer.param.HomeMixerInjectionNames.DDGStatsAuthors
|
|
||||||
import com.twitter.inject.TwitterModule
|
import com.twitter.inject.TwitterModule
|
||||||
import javax.inject.Named
|
|
||||||
import javax.inject.Singleton
|
|
||||||
|
|
||||||
object HomeMixerResourcesModule extends TwitterModule {
|
object HomeMixerResourcesModule extends TwitterModule {}
|
||||||
|
|
||||||
private val AuthorsFile = "/config/authors.yml"
|
|
||||||
|
|
||||||
@Provides
|
|
||||||
@Singleton
|
|
||||||
@Named(DDGStatsAuthors)
|
|
||||||
def providesDDGStatsAuthors(): YamlMap = YamlMap.load(AuthorsFile)
|
|
||||||
}
|
|
||||||
|
@ -4,7 +4,6 @@ object HomeMixerInjectionNames {
|
|||||||
final val AuthorFeatureRepository = "AuthorFeatureRepository"
|
final val AuthorFeatureRepository = "AuthorFeatureRepository"
|
||||||
final val CandidateFeaturesScribeEventPublisher = "CandidateFeaturesScribeEventPublisher"
|
final val CandidateFeaturesScribeEventPublisher = "CandidateFeaturesScribeEventPublisher"
|
||||||
final val CommonFeaturesScribeEventPublisher = "CommonFeaturesScribeEventPublisher"
|
final val CommonFeaturesScribeEventPublisher = "CommonFeaturesScribeEventPublisher"
|
||||||
final val DDGStatsAuthors = "DDGStatsAuthors"
|
|
||||||
final val EarlybirdRepository = "EarlybirdRepository"
|
final val EarlybirdRepository = "EarlybirdRepository"
|
||||||
final val EngagementsReceivedByAuthorCache = "EngagementsReceivedByAuthorCache"
|
final val EngagementsReceivedByAuthorCache = "EngagementsReceivedByAuthorCache"
|
||||||
final val GraphTwoHopRepository = "GraphTwoHopRepository"
|
final val GraphTwoHopRepository = "GraphTwoHopRepository"
|
||||||
|
@ -122,7 +122,7 @@ enum FullTypeId {
|
|||||||
// TFT_TENSOR[TFT_INT32, TFT_UNKNOWN]
|
// TFT_TENSOR[TFT_INT32, TFT_UNKNOWN]
|
||||||
// is a Tensor of int32 element type and unknown shape.
|
// is a Tensor of int32 element type and unknown shape.
|
||||||
//
|
//
|
||||||
// TODO(mdan): Define TFT_SHAPE and add more examples.
|
// TODO: Define TFT_SHAPE and add more examples.
|
||||||
TFT_TENSOR = 1000;
|
TFT_TENSOR = 1000;
|
||||||
|
|
||||||
// Array (or tensorflow::TensorList in the variant type registry).
|
// Array (or tensorflow::TensorList in the variant type registry).
|
||||||
@ -178,7 +178,7 @@ enum FullTypeId {
|
|||||||
// object (for now).
|
// object (for now).
|
||||||
|
|
||||||
// The bool element type.
|
// The bool element type.
|
||||||
// TODO(mdan): Quantized types, legacy representations (e.g. ref)
|
// TODO
|
||||||
TFT_BOOL = 200;
|
TFT_BOOL = 200;
|
||||||
// Integer element types.
|
// Integer element types.
|
||||||
TFT_UINT8 = 201;
|
TFT_UINT8 = 201;
|
||||||
@ -195,7 +195,7 @@ enum FullTypeId {
|
|||||||
TFT_DOUBLE = 211;
|
TFT_DOUBLE = 211;
|
||||||
TFT_BFLOAT16 = 215;
|
TFT_BFLOAT16 = 215;
|
||||||
// Complex element types.
|
// Complex element types.
|
||||||
// TODO(mdan): Represent as TFT_COMPLEX[TFT_DOUBLE] instead?
|
// TODO: Represent as TFT_COMPLEX[TFT_DOUBLE] instead?
|
||||||
TFT_COMPLEX64 = 212;
|
TFT_COMPLEX64 = 212;
|
||||||
TFT_COMPLEX128 = 213;
|
TFT_COMPLEX128 = 213;
|
||||||
// The string element type.
|
// The string element type.
|
||||||
@ -240,7 +240,7 @@ enum FullTypeId {
|
|||||||
// ownership is in the true sense: "the op argument representing the lock is
|
// ownership is in the true sense: "the op argument representing the lock is
|
||||||
// available".
|
// available".
|
||||||
// Mutex locks are the dynamic counterpart of control dependencies.
|
// Mutex locks are the dynamic counterpart of control dependencies.
|
||||||
// TODO(mdan): Properly document this thing.
|
// TODO: Properly document this thing.
|
||||||
//
|
//
|
||||||
// Parametrization: TFT_MUTEX_LOCK[].
|
// Parametrization: TFT_MUTEX_LOCK[].
|
||||||
TFT_MUTEX_LOCK = 10202;
|
TFT_MUTEX_LOCK = 10202;
|
||||||
@ -271,6 +271,6 @@ message FullTypeDef {
|
|||||||
oneof attr {
|
oneof attr {
|
||||||
string s = 3;
|
string s = 3;
|
||||||
int64 i = 4;
|
int64 i = 4;
|
||||||
// TODO(mdan): list/tensor, map? Need to reconcile with TFT_RECORD, etc.
|
// TODO: list/tensor, map? Need to reconcile with TFT_RECORD, etc.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,7 @@ message FunctionDefLibrary {
|
|||||||
// with a value. When a GraphDef has a call to a function, it must
|
// with a value. When a GraphDef has a call to a function, it must
|
||||||
// have binding for every attr defined in the signature.
|
// have binding for every attr defined in the signature.
|
||||||
//
|
//
|
||||||
// TODO(zhifengc):
|
// TODO:
|
||||||
// * device spec, etc.
|
// * device spec, etc.
|
||||||
message FunctionDef {
|
message FunctionDef {
|
||||||
// The definition of the function's name, arguments, return values,
|
// The definition of the function's name, arguments, return values,
|
||||||
|
@ -61,7 +61,7 @@ message NodeDef {
|
|||||||
// one of the names from the corresponding OpDef's attr field).
|
// one of the names from the corresponding OpDef's attr field).
|
||||||
// The values must have a type matching the corresponding OpDef
|
// The values must have a type matching the corresponding OpDef
|
||||||
// attr's type field.
|
// attr's type field.
|
||||||
// TODO(josh11b): Add some examples here showing best practices.
|
// TODO: Add some examples here showing best practices.
|
||||||
map<string, AttrValue> attr = 5;
|
map<string, AttrValue> attr = 5;
|
||||||
|
|
||||||
message ExperimentalDebugInfo {
|
message ExperimentalDebugInfo {
|
||||||
|
@ -96,7 +96,7 @@ message OpDef {
|
|||||||
// Human-readable description.
|
// Human-readable description.
|
||||||
string description = 4;
|
string description = 4;
|
||||||
|
|
||||||
// TODO(josh11b): bool is_optional?
|
// TODO: bool is_optional?
|
||||||
|
|
||||||
// --- Constraints ---
|
// --- Constraints ---
|
||||||
// These constraints are only in effect if specified. Default is no
|
// These constraints are only in effect if specified. Default is no
|
||||||
@ -139,7 +139,7 @@ message OpDef {
|
|||||||
// taking input from multiple devices with a tree of aggregate ops
|
// taking input from multiple devices with a tree of aggregate ops
|
||||||
// that aggregate locally within each device (and possibly within
|
// that aggregate locally within each device (and possibly within
|
||||||
// groups of nearby devices) before communicating.
|
// groups of nearby devices) before communicating.
|
||||||
// TODO(josh11b): Implement that optimization.
|
// TODO: Implement that optimization.
|
||||||
bool is_aggregate = 16; // for things like add
|
bool is_aggregate = 16; // for things like add
|
||||||
|
|
||||||
// Other optimizations go here, like
|
// Other optimizations go here, like
|
||||||
|
@ -53,7 +53,7 @@ message MemoryStats {
|
|||||||
|
|
||||||
// Time/size stats recorded for a single execution of a graph node.
|
// Time/size stats recorded for a single execution of a graph node.
|
||||||
message NodeExecStats {
|
message NodeExecStats {
|
||||||
// TODO(tucker): Use some more compact form of node identity than
|
// TODO: Use some more compact form of node identity than
|
||||||
// the full string name. Either all processes should agree on a
|
// the full string name. Either all processes should agree on a
|
||||||
// global id (cost_id?) for each node, or we should use a hash of
|
// global id (cost_id?) for each node, or we should use a hash of
|
||||||
// the name.
|
// the name.
|
||||||
|
@ -16,7 +16,7 @@ option go_package = "github.com/tensorflow/tensorflow/tensorflow/go/core/framewo
|
|||||||
message TensorProto {
|
message TensorProto {
|
||||||
DataType dtype = 1;
|
DataType dtype = 1;
|
||||||
|
|
||||||
// Shape of the tensor. TODO(touts): sort out the 0-rank issues.
|
// Shape of the tensor. TODO: sort out the 0-rank issues.
|
||||||
TensorShapeProto tensor_shape = 2;
|
TensorShapeProto tensor_shape = 2;
|
||||||
|
|
||||||
// Only one of the representations below is set, one of "tensor_contents" and
|
// Only one of the representations below is set, one of "tensor_contents" and
|
||||||
|
@ -532,7 +532,7 @@ message ConfigProto {
|
|||||||
|
|
||||||
// We removed the flag client_handles_error_formatting. Marking the tag
|
// We removed the flag client_handles_error_formatting. Marking the tag
|
||||||
// number as reserved.
|
// number as reserved.
|
||||||
// TODO(shikharagarwal): Should we just remove this tag so that it can be
|
// TODO: Should we just remove this tag so that it can be
|
||||||
// used in future for other purpose?
|
// used in future for other purpose?
|
||||||
reserved 2;
|
reserved 2;
|
||||||
|
|
||||||
@ -576,7 +576,7 @@ message ConfigProto {
|
|||||||
// - If isolate_session_state is true, session states are isolated.
|
// - If isolate_session_state is true, session states are isolated.
|
||||||
// - If isolate_session_state is false, session states are shared.
|
// - If isolate_session_state is false, session states are shared.
|
||||||
//
|
//
|
||||||
// TODO(b/129330037): Add a single API that consistently treats
|
// TODO: Add a single API that consistently treats
|
||||||
// isolate_session_state and ClusterSpec propagation.
|
// isolate_session_state and ClusterSpec propagation.
|
||||||
bool share_session_state_in_clusterspec_propagation = 8;
|
bool share_session_state_in_clusterspec_propagation = 8;
|
||||||
|
|
||||||
@ -704,7 +704,7 @@ message ConfigProto {
|
|||||||
|
|
||||||
// Options for a single Run() call.
|
// Options for a single Run() call.
|
||||||
message RunOptions {
|
message RunOptions {
|
||||||
// TODO(pbar) Turn this into a TraceOptions proto which allows
|
// TODO Turn this into a TraceOptions proto which allows
|
||||||
// tracing to be controlled in a more orthogonal manner?
|
// tracing to be controlled in a more orthogonal manner?
|
||||||
enum TraceLevel {
|
enum TraceLevel {
|
||||||
NO_TRACE = 0;
|
NO_TRACE = 0;
|
||||||
@ -781,7 +781,7 @@ message RunMetadata {
|
|||||||
repeated GraphDef partition_graphs = 3;
|
repeated GraphDef partition_graphs = 3;
|
||||||
|
|
||||||
message FunctionGraphs {
|
message FunctionGraphs {
|
||||||
// TODO(nareshmodi): Include some sort of function/cache-key identifier?
|
// TODO: Include some sort of function/cache-key identifier?
|
||||||
repeated GraphDef partition_graphs = 1;
|
repeated GraphDef partition_graphs = 1;
|
||||||
|
|
||||||
GraphDef pre_optimization_graph = 2;
|
GraphDef pre_optimization_graph = 2;
|
||||||
|
@ -194,7 +194,7 @@ service CoordinationService {
|
|||||||
|
|
||||||
// Report error to the task. RPC sets the receiving instance of coordination
|
// Report error to the task. RPC sets the receiving instance of coordination
|
||||||
// service agent to error state permanently.
|
// service agent to error state permanently.
|
||||||
// TODO(b/195990880): Consider splitting this into a different RPC service.
|
// TODO: Consider splitting this into a different RPC service.
|
||||||
rpc ReportErrorToAgent(ReportErrorToAgentRequest)
|
rpc ReportErrorToAgent(ReportErrorToAgentRequest)
|
||||||
returns (ReportErrorToAgentResponse);
|
returns (ReportErrorToAgentResponse);
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ message DebugTensorWatch {
|
|||||||
// are to be debugged, the callers of Session::Run() must use distinct
|
// are to be debugged, the callers of Session::Run() must use distinct
|
||||||
// debug_urls to make sure that the streamed or dumped events do not overlap
|
// debug_urls to make sure that the streamed or dumped events do not overlap
|
||||||
// among the invocations.
|
// among the invocations.
|
||||||
// TODO(cais): More visible documentation of this in g3docs.
|
// TODO: More visible documentation of this in g3docs.
|
||||||
repeated string debug_urls = 4;
|
repeated string debug_urls = 4;
|
||||||
|
|
||||||
// Do not error out if debug op creation fails (e.g., due to dtype
|
// Do not error out if debug op creation fails (e.g., due to dtype
|
||||||
|
@ -12,7 +12,7 @@ option java_package = "org.tensorflow.util";
|
|||||||
option go_package = "github.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto";
|
option go_package = "github.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto";
|
||||||
|
|
||||||
// Available modes for extracting debugging information from a Tensor.
|
// Available modes for extracting debugging information from a Tensor.
|
||||||
// TODO(cais): Document the detailed column names and semantics in a separate
|
// TODO: Document the detailed column names and semantics in a separate
|
||||||
// markdown file once the implementation settles.
|
// markdown file once the implementation settles.
|
||||||
enum TensorDebugMode {
|
enum TensorDebugMode {
|
||||||
UNSPECIFIED = 0;
|
UNSPECIFIED = 0;
|
||||||
@ -223,7 +223,7 @@ message DebuggedDevice {
|
|||||||
// A debugger-generated ID for the device. Guaranteed to be unique within
|
// A debugger-generated ID for the device. Guaranteed to be unique within
|
||||||
// the scope of the debugged TensorFlow program, including single-host and
|
// the scope of the debugged TensorFlow program, including single-host and
|
||||||
// multi-host settings.
|
// multi-host settings.
|
||||||
// TODO(cais): Test the uniqueness guarantee in multi-host settings.
|
// TODO: Test the uniqueness guarantee in multi-host settings.
|
||||||
int32 device_id = 2;
|
int32 device_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -264,7 +264,7 @@ message Execution {
|
|||||||
// field with the DebuggedDevice messages.
|
// field with the DebuggedDevice messages.
|
||||||
repeated int32 output_tensor_device_ids = 9;
|
repeated int32 output_tensor_device_ids = 9;
|
||||||
|
|
||||||
// TODO(cais): When backporting to V1 Session.run() support, add more fields
|
// TODO support, add more fields
|
||||||
// such as fetches and feeds.
|
// such as fetches and feeds.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ option go_package = "github.com/tensorflow/tensorflow/tensorflow/go/core/protobu
|
|||||||
|
|
||||||
// Used to serialize and transmit tensorflow::Status payloads through
|
// Used to serialize and transmit tensorflow::Status payloads through
|
||||||
// grpc::Status `error_details` since grpc::Status lacks payload API.
|
// grpc::Status `error_details` since grpc::Status lacks payload API.
|
||||||
// TODO(b/204231601): Use GRPC API once supported.
|
// TODO: Use GRPC API once supported.
|
||||||
message GrpcPayloadContainer {
|
message GrpcPayloadContainer {
|
||||||
map<string, bytes> payloads = 1;
|
map<string, bytes> payloads = 1;
|
||||||
}
|
}
|
||||||
|
@ -172,7 +172,7 @@ message WaitQueueDoneRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
message WaitQueueDoneResponse {
|
message WaitQueueDoneResponse {
|
||||||
// TODO(nareshmodi): Consider adding NodeExecStats here to be able to
|
// TODO: Consider adding NodeExecStats here to be able to
|
||||||
// propagate some stats.
|
// propagate some stats.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ message ExtendSessionRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
message ExtendSessionResponse {
|
message ExtendSessionResponse {
|
||||||
// TODO(mrry): Return something about the operation?
|
// TODO: Return something about the operation?
|
||||||
|
|
||||||
// The new version number for the extended graph, to be used in the next call
|
// The new version number for the extended graph, to be used in the next call
|
||||||
// to ExtendSession.
|
// to ExtendSession.
|
||||||
|
@ -176,7 +176,7 @@ message SavedBareConcreteFunction {
|
|||||||
// allows the ConcreteFunction to be called with nest structure inputs. This
|
// allows the ConcreteFunction to be called with nest structure inputs. This
|
||||||
// field may not be populated. If this field is absent, the concrete function
|
// field may not be populated. If this field is absent, the concrete function
|
||||||
// can only be called with flat inputs.
|
// can only be called with flat inputs.
|
||||||
// TODO(b/169361281): support calling saved ConcreteFunction with structured
|
// TODO: support calling saved ConcreteFunction with structured
|
||||||
// inputs in C++ SavedModel API.
|
// inputs in C++ SavedModel API.
|
||||||
FunctionSpec function_spec = 4;
|
FunctionSpec function_spec = 4;
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@ option go_package = "github.com/tensorflow/tensorflow/tensorflow/go/core/protobu
|
|||||||
|
|
||||||
// Special header that is associated with a bundle.
|
// Special header that is associated with a bundle.
|
||||||
//
|
//
|
||||||
// TODO(zongheng,zhifengc): maybe in the future, we can add information about
|
// TODO: maybe in the future, we can add information about
|
||||||
// which binary produced this checkpoint, timestamp, etc. Sometime, these can be
|
// which binary produced this checkpoint, timestamp, etc. Sometime, these can be
|
||||||
// valuable debugging information. And if needed, these can be used as defensive
|
// valuable debugging information. And if needed, these can be used as defensive
|
||||||
// information ensuring reader (binary version) of the checkpoint and the writer
|
// information ensuring reader (binary version) of the checkpoint and the writer
|
||||||
|
@ -188,7 +188,7 @@ message DeregisterGraphRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
message DeregisterGraphResponse {
|
message DeregisterGraphResponse {
|
||||||
// TODO(mrry): Optionally add summary stats for the graph.
|
// TODO: Optionally add summary stats for the graph.
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////
|
||||||
@ -294,7 +294,7 @@ message RunGraphResponse {
|
|||||||
|
|
||||||
// If the request asked for execution stats, the cost graph, or the partition
|
// If the request asked for execution stats, the cost graph, or the partition
|
||||||
// graphs, these are returned here.
|
// graphs, these are returned here.
|
||||||
// TODO(suharshs): Package these in a RunMetadata instead.
|
// TODO: Package these in a RunMetadata instead.
|
||||||
StepStats step_stats = 2;
|
StepStats step_stats = 2;
|
||||||
CostGraphDef cost_graph = 3;
|
CostGraphDef cost_graph = 3;
|
||||||
repeated GraphDef partition_graph = 4;
|
repeated GraphDef partition_graph = 4;
|
||||||
|
@ -13,5 +13,5 @@ message LogMetadata {
|
|||||||
SamplingConfig sampling_config = 2;
|
SamplingConfig sampling_config = 2;
|
||||||
// List of tags used to load the relevant MetaGraphDef from SavedModel.
|
// List of tags used to load the relevant MetaGraphDef from SavedModel.
|
||||||
repeated string saved_model_tags = 3;
|
repeated string saved_model_tags = 3;
|
||||||
// TODO(b/33279154): Add more metadata as mentioned in the bug.
|
// TODO: Add more metadata as mentioned in the bug.
|
||||||
}
|
}
|
||||||
|
@ -58,7 +58,7 @@ message FileSystemStoragePathSourceConfig {
|
|||||||
|
|
||||||
// A single servable name/base_path pair to monitor.
|
// A single servable name/base_path pair to monitor.
|
||||||
// DEPRECATED: Use 'servables' instead.
|
// DEPRECATED: Use 'servables' instead.
|
||||||
// TODO(b/30898016): Stop using these fields, and ultimately remove them here.
|
// TODO: Stop using these fields, and ultimately remove them here.
|
||||||
string servable_name = 1 [deprecated = true];
|
string servable_name = 1 [deprecated = true];
|
||||||
string base_path = 2 [deprecated = true];
|
string base_path = 2 [deprecated = true];
|
||||||
|
|
||||||
@ -76,7 +76,7 @@ message FileSystemStoragePathSourceConfig {
|
|||||||
// check for a version to appear later.)
|
// check for a version to appear later.)
|
||||||
// DEPRECATED: Use 'servable_versions_always_present' instead, which includes
|
// DEPRECATED: Use 'servable_versions_always_present' instead, which includes
|
||||||
// this behavior.
|
// this behavior.
|
||||||
// TODO(b/30898016): Remove 2019-10-31 or later.
|
// TODO: Remove 2019-10-31 or later.
|
||||||
bool fail_if_zero_versions_at_startup = 4 [deprecated = true];
|
bool fail_if_zero_versions_at_startup = 4 [deprecated = true];
|
||||||
|
|
||||||
// If true, the servable is always expected to exist on the underlying
|
// If true, the servable is always expected to exist on the underlying
|
||||||
|
@ -9,7 +9,7 @@ import "tensorflow_serving/config/logging_config.proto";
|
|||||||
option cc_enable_arenas = true;
|
option cc_enable_arenas = true;
|
||||||
|
|
||||||
// The type of model.
|
// The type of model.
|
||||||
// TODO(b/31336131): DEPRECATED.
|
// TODO: DEPRECATED.
|
||||||
enum ModelType {
|
enum ModelType {
|
||||||
MODEL_TYPE_UNSPECIFIED = 0 [deprecated = true];
|
MODEL_TYPE_UNSPECIFIED = 0 [deprecated = true];
|
||||||
TENSORFLOW = 1 [deprecated = true];
|
TENSORFLOW = 1 [deprecated = true];
|
||||||
@ -31,7 +31,7 @@ message ModelConfig {
|
|||||||
string base_path = 2;
|
string base_path = 2;
|
||||||
|
|
||||||
// Type of model.
|
// Type of model.
|
||||||
// TODO(b/31336131): DEPRECATED. Please use 'model_platform' instead.
|
// TODO: DEPRECATED. Please use 'model_platform' instead.
|
||||||
ModelType model_type = 3 [deprecated = true];
|
ModelType model_type = 3 [deprecated = true];
|
||||||
|
|
||||||
// Type of model (e.g. "tensorflow").
|
// Type of model (e.g. "tensorflow").
|
||||||
|
@ -231,7 +231,7 @@ object TypeaheadEventCandidate {
|
|||||||
/**
|
/**
|
||||||
* Canonical TweetAnnotationQueryCandidate model. Always prefer this version over all other variants.
|
* Canonical TweetAnnotationQueryCandidate model. Always prefer this version over all other variants.
|
||||||
*
|
*
|
||||||
* TODO(jhara) Remove score from the candidate and use a Feature instead
|
* TODO Remove score from the candidate and use a Feature instead
|
||||||
*/
|
*/
|
||||||
final class TweetAnnotationQueryCandidate private (
|
final class TweetAnnotationQueryCandidate private (
|
||||||
override val id: String,
|
override val id: String,
|
||||||
|
@ -8,7 +8,6 @@ import scala.collection.mutable.ListBuffer
|
|||||||
* The helper class encodes and decodes tweet ids with tweetypie's card information
|
* The helper class encodes and decodes tweet ids with tweetypie's card information
|
||||||
* when querying recos salsa library. Inside salsa library, all tweet ids are
|
* when querying recos salsa library. Inside salsa library, all tweet ids are
|
||||||
* encoded with card information for the purpose of inline filtering.
|
* encoded with card information for the purpose of inline filtering.
|
||||||
* TODO (wenqih) change TweetIDMask to a mask interface for future extension
|
|
||||||
*/
|
*/
|
||||||
class BipartiteGraphHelper(graph: BipartiteGraph) {
|
class BipartiteGraphHelper(graph: BipartiteGraph) {
|
||||||
private val tweetIDMask = new TweetIDMask
|
private val tweetIDMask = new TweetIDMask
|
||||||
|
Loading…
Reference in New Issue
Block a user