mirror of
https://github.com/twitter/the-algorithm.git
synced 2024-11-16 00:25:11 +01:00
[docx] split commit for file 5400
Signed-off-by: Ari Archer <ari.web.xyz@gmail.com>
This commit is contained in:
parent
bc2f1fc165
commit
f0aa618947
BIN
src/thrift/com/twitter/simclusters_v2/top_k_map.docx
Normal file
BIN
src/thrift/com/twitter/simclusters_v2/top_k_map.docx
Normal file
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
namespace java com.twitter.simclusters_v2.thriftjava
|
||||
namespace py gen.twitter.simclusters_v2.top_k_map
|
||||
#@namespace scala com.twitter.simclusters_v2.thriftscala
|
||||
#@namespace strato com.twitter.simclusters_v2
|
||||
|
||||
include "com/twitter/algebird_internal/algebird.thrift"
|
||||
|
||||
struct TopKClusters {
|
||||
1: required map<i32, algebird.DecayedValue> topK(personalDataTypeKey = 'InferredInterests')
|
||||
}(hasPersonalData = 'true')
|
||||
|
||||
struct TopKTweets {
|
||||
1: required map<i64, algebird.DecayedValue> topK(personalDataTypeKey='TweetId')
|
||||
}(hasPersonalData = 'true')
|
BIN
src/thrift/com/twitter/simclusters_v2/tweet_similarity.docx
Normal file
BIN
src/thrift/com/twitter/simclusters_v2/tweet_similarity.docx
Normal file
Binary file not shown.
@ -1,16 +0,0 @@
|
||||
namespace java com.twitter.simclusters_v2.thriftjava
|
||||
namespace py gen.twitter.simclusters_v2.tweet_similarity
|
||||
#@namespace scala com.twitter.simclusters_v2.thriftscala
|
||||
#@namespace strato com.twitter.simclusters_v2
|
||||
|
||||
struct FeaturedTweet {
|
||||
1: required i64 tweetId(personalDataType = 'TweetId')
|
||||
# timestamp when the user engaged or impressed the tweet
|
||||
2: required i64 timestamp(personalDataType = 'PrivateTimestamp')
|
||||
}(persisted = 'true', hasPersonalData = 'true')
|
||||
|
||||
struct LabelledTweetPairs {
|
||||
1: required FeaturedTweet queryFeaturedTweet
|
||||
2: required FeaturedTweet candidateFeaturedTweet
|
||||
3: required bool label
|
||||
}(persisted = 'true', hasPersonalData = 'true')
|
BIN
timelineranker/README.docx
Normal file
BIN
timelineranker/README.docx
Normal file
Binary file not shown.
@ -1,13 +0,0 @@
|
||||
# TimelineRanker
|
||||
|
||||
**TimelineRanker** (TLR) is a legacy service that provides relevance-scored tweets from the Earlybird Search Index and User Tweet Entity Graph (UTEG) service. Despite its name, it no longer performs heavy ranking or model-based ranking itself; it only uses relevance scores from the Search Index for ranked tweet endpoints.
|
||||
|
||||
The following is a list of major services that Timeline Ranker interacts with:
|
||||
|
||||
- **Earlybird-root-superroot (a.k.a Search):** Timeline Ranker calls the Search Index's super root to fetch a list of Tweets.
|
||||
- **User Tweet Entity Graph (UTEG):** Timeline Ranker calls UTEG to fetch a list of tweets liked by the users you follow.
|
||||
- **Socialgraph:** Timeline Ranker calls Social Graph Service to obtain the follow graph and user states such as blocked, muted, retweets muted, etc.
|
||||
- **TweetyPie:** Timeline Ranker hydrates tweets by calling TweetyPie to post-filter tweets based on certain hydrated fields.
|
||||
- **Manhattan:** Timeline Ranker hydrates some tweet features (e.g., user languages) from Manhattan.
|
||||
|
||||
**Home Mixer** calls Timeline Ranker to fetch tweets from the Earlybird Search Index and User Tweet Entity Graph (UTEG) service to power both the For You and Following Home Timelines. Timeline Ranker performs light ranking based on Earlybird tweet candidate scores and truncates to the number of candidates requested by Home Mixer based on these scores.
|
@ -1,6 +0,0 @@
|
||||
target(
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"timelineranker/client/builder/src/main/scala",
|
||||
],
|
||||
)
|
BIN
timelineranker/client/builder/BUILD.docx
Normal file
BIN
timelineranker/client/builder/BUILD.docx
Normal file
Binary file not shown.
BIN
timelineranker/client/builder/README.docx
Normal file
BIN
timelineranker/client/builder/README.docx
Normal file
Binary file not shown.
@ -1,4 +0,0 @@
|
||||
# TimelineRanker client
|
||||
|
||||
Library for creating a client to talk to TLR. It contains a ClientBuilder implementation
|
||||
with some preferred settings for clients.
|
@ -1,16 +0,0 @@
|
||||
scala_library(
|
||||
sources = ["com/twitter/timelineranker/client/*.scala"],
|
||||
platform = "java8",
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"finagle/finagle-core/src/main",
|
||||
"finagle/finagle-stats",
|
||||
"finagle/finagle-thrift/src/main/java",
|
||||
"servo/client/src/main/scala/com/twitter/servo/client",
|
||||
"src/thrift/com/twitter/timelineranker:thrift-scala",
|
||||
"src/thrift/com/twitter/timelineranker/server/model:thrift-scala",
|
||||
"timelineranker/common:model",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util/stats",
|
||||
"util/util-stats/src/main/scala",
|
||||
],
|
||||
)
|
BIN
timelineranker/client/builder/src/main/scala/BUILD.docx
Normal file
BIN
timelineranker/client/builder/src/main/scala/BUILD.docx
Normal file
Binary file not shown.
Binary file not shown.
@ -1,195 +0,0 @@
|
||||
package com.twitter.timelineranker.client
|
||||
|
||||
import com.twitter.finagle.SourcedException
|
||||
import com.twitter.finagle.stats.StatsReceiver
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelineranker.model._
|
||||
import com.twitter.timelines.util.stats.RequestStats
|
||||
import com.twitter.timelines.util.stats.RequestStatsReceiver
|
||||
import com.twitter.util.Future
|
||||
import com.twitter.util.Return
|
||||
import com.twitter.util.Throw
|
||||
import com.twitter.util.Try
|
||||
|
||||
case class TimelineRankerException(message: String)
|
||||
extends Exception(message)
|
||||
with SourcedException {
|
||||
serviceName = "timelineranker"
|
||||
}
|
||||
|
||||
/**
|
||||
* A timeline ranker client whose methods accept and produce model object instances
|
||||
* instead of thrift instances.
|
||||
*/
|
||||
class TimelineRankerClient(
|
||||
private val client: thrift.TimelineRanker.MethodPerEndpoint,
|
||||
statsReceiver: StatsReceiver)
|
||||
extends RequestStats {
|
||||
|
||||
private[this] val baseScope = statsReceiver.scope("timelineRankerClient")
|
||||
private[this] val timelinesRequestStats = RequestStatsReceiver(baseScope.scope("timelines"))
|
||||
private[this] val recycledTweetRequestStats = RequestStatsReceiver(
|
||||
baseScope.scope("recycledTweet"))
|
||||
private[this] val recapHydrationRequestStats = RequestStatsReceiver(
|
||||
baseScope.scope("recapHydration"))
|
||||
private[this] val recapAuthorRequestStats = RequestStatsReceiver(baseScope.scope("recapAuthor"))
|
||||
private[this] val entityTweetsRequestStats = RequestStatsReceiver(baseScope.scope("entityTweets"))
|
||||
private[this] val utegLikedByTweetsRequestStats = RequestStatsReceiver(
|
||||
baseScope.scope("utegLikedByTweets"))
|
||||
|
||||
private[this] def fetchRecapQueryResultHead(
|
||||
results: Seq[Try[CandidateTweetsResult]]
|
||||
): CandidateTweetsResult = {
|
||||
results.head match {
|
||||
case Return(result) => result
|
||||
case Throw(e) => throw e
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def tryResults[Req, Rep](
|
||||
reqs: Seq[Req],
|
||||
stats: RequestStatsReceiver,
|
||||
findError: Req => Option[thrift.TimelineError],
|
||||
)(
|
||||
getRep: (Req, RequestStatsReceiver) => Try[Rep]
|
||||
): Seq[Try[Rep]] = {
|
||||
reqs.map { req =>
|
||||
findError(req) match {
|
||||
case Some(error) if error.reason.exists { _ == thrift.ErrorReason.OverCapacity } =>
|
||||
// bubble up over capacity error, server shall handle it
|
||||
stats.onFailure(error)
|
||||
Throw(error)
|
||||
case Some(error) =>
|
||||
stats.onFailure(error)
|
||||
Throw(TimelineRankerException(error.message))
|
||||
case None =>
|
||||
getRep(req, stats)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def tryCandidateTweetsResults(
|
||||
responses: Seq[thrift.GetCandidateTweetsResponse],
|
||||
requestScopedStats: RequestStatsReceiver
|
||||
): Seq[Try[CandidateTweetsResult]] = {
|
||||
def errorInResponse(
|
||||
response: thrift.GetCandidateTweetsResponse
|
||||
): Option[thrift.TimelineError] = {
|
||||
response.error
|
||||
}
|
||||
|
||||
tryResults(
|
||||
responses,
|
||||
requestScopedStats,
|
||||
errorInResponse
|
||||
) { (response, stats) =>
|
||||
stats.onSuccess()
|
||||
Return(CandidateTweetsResult.fromThrift(response))
|
||||
}
|
||||
}
|
||||
|
||||
def getTimeline(query: TimelineQuery): Future[Try[Timeline]] = {
|
||||
getTimelines(Seq(query)).map(_.head)
|
||||
}
|
||||
|
||||
def getTimelines(queries: Seq[TimelineQuery]): Future[Seq[Try[Timeline]]] = {
|
||||
def errorInResponse(response: thrift.GetTimelineResponse): Option[thrift.TimelineError] = {
|
||||
response.error
|
||||
}
|
||||
val thriftQueries = queries.map(_.toThrift)
|
||||
timelinesRequestStats.latency {
|
||||
client.getTimelines(thriftQueries).map { responses =>
|
||||
tryResults(
|
||||
responses,
|
||||
timelinesRequestStats,
|
||||
errorInResponse
|
||||
) { (response, stats) =>
|
||||
response.timeline match {
|
||||
case Some(timeline) =>
|
||||
stats.onSuccess()
|
||||
Return(Timeline.fromThrift(timeline))
|
||||
// Should not really happen.
|
||||
case None =>
|
||||
val tlrException =
|
||||
TimelineRankerException("No timeline returned even when no error occurred.")
|
||||
stats.onFailure(tlrException)
|
||||
Throw(tlrException)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getRecycledTweetCandidates(query: RecapQuery): Future[CandidateTweetsResult] = {
|
||||
getRecycledTweetCandidates(Seq(query)).map(fetchRecapQueryResultHead)
|
||||
}
|
||||
|
||||
def getRecycledTweetCandidates(
|
||||
queries: Seq[RecapQuery]
|
||||
): Future[Seq[Try[CandidateTweetsResult]]] = {
|
||||
val thriftQueries = queries.map(_.toThriftRecapQuery)
|
||||
recycledTweetRequestStats.latency {
|
||||
client.getRecycledTweetCandidates(thriftQueries).map {
|
||||
tryCandidateTweetsResults(_, recycledTweetRequestStats)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def hydrateTweetCandidates(query: RecapQuery): Future[CandidateTweetsResult] = {
|
||||
hydrateTweetCandidates(Seq(query)).map(fetchRecapQueryResultHead)
|
||||
}
|
||||
|
||||
def hydrateTweetCandidates(queries: Seq[RecapQuery]): Future[Seq[Try[CandidateTweetsResult]]] = {
|
||||
val thriftQueries = queries.map(_.toThriftRecapHydrationQuery)
|
||||
recapHydrationRequestStats.latency {
|
||||
client.hydrateTweetCandidates(thriftQueries).map {
|
||||
tryCandidateTweetsResults(_, recapHydrationRequestStats)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getRecapCandidatesFromAuthors(query: RecapQuery): Future[CandidateTweetsResult] = {
|
||||
getRecapCandidatesFromAuthors(Seq(query)).map(fetchRecapQueryResultHead)
|
||||
}
|
||||
|
||||
def getRecapCandidatesFromAuthors(
|
||||
queries: Seq[RecapQuery]
|
||||
): Future[Seq[Try[CandidateTweetsResult]]] = {
|
||||
val thriftQueries = queries.map(_.toThriftRecapQuery)
|
||||
recapAuthorRequestStats.latency {
|
||||
client.getRecapCandidatesFromAuthors(thriftQueries).map {
|
||||
tryCandidateTweetsResults(_, recapAuthorRequestStats)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getEntityTweetCandidates(query: RecapQuery): Future[CandidateTweetsResult] = {
|
||||
getEntityTweetCandidates(Seq(query)).map(fetchRecapQueryResultHead)
|
||||
}
|
||||
|
||||
def getEntityTweetCandidates(
|
||||
queries: Seq[RecapQuery]
|
||||
): Future[Seq[Try[CandidateTweetsResult]]] = {
|
||||
val thriftQueries = queries.map(_.toThriftEntityTweetsQuery)
|
||||
entityTweetsRequestStats.latency {
|
||||
client.getEntityTweetCandidates(thriftQueries).map {
|
||||
tryCandidateTweetsResults(_, entityTweetsRequestStats)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def getUtegLikedByTweetCandidates(query: RecapQuery): Future[CandidateTweetsResult] = {
|
||||
getUtegLikedByTweetCandidates(Seq(query)).map(fetchRecapQueryResultHead)
|
||||
}
|
||||
|
||||
def getUtegLikedByTweetCandidates(
|
||||
queries: Seq[RecapQuery]
|
||||
): Future[Seq[Try[CandidateTweetsResult]]] = {
|
||||
val thriftQueries = queries.map(_.toThriftUtegLikedByTweetsQuery)
|
||||
utegLikedByTweetsRequestStats.latency {
|
||||
client.getUtegLikedByTweetCandidates(thriftQueries).map {
|
||||
tryCandidateTweetsResults(_, utegLikedByTweetsRequestStats)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,89 +0,0 @@
|
||||
package com.twitter.timelineranker.client
|
||||
|
||||
import com.twitter.conversions.DurationOps._
|
||||
import com.twitter.finagle.builder.ClientBuilder
|
||||
import com.twitter.finagle.mtls.authentication.EmptyServiceIdentifier
|
||||
import com.twitter.finagle.mtls.authentication.ServiceIdentifier
|
||||
import com.twitter.finagle.mtls.client.MtlsClientBuilder._
|
||||
import com.twitter.finagle.param.OppTls
|
||||
import com.twitter.finagle.service.RetryPolicy
|
||||
import com.twitter.finagle.service.RetryPolicy._
|
||||
import com.twitter.finagle.ssl.OpportunisticTls
|
||||
import com.twitter.finagle.thrift.ThriftClientRequest
|
||||
import com.twitter.servo.client.Environment.Local
|
||||
import com.twitter.servo.client.Environment.Staging
|
||||
import com.twitter.servo.client.Environment.Production
|
||||
import com.twitter.servo.client.Environment
|
||||
import com.twitter.servo.client.FinagleClientBuilder
|
||||
import com.twitter.util.Try
|
||||
import com.twitter.util.Duration
|
||||
|
||||
sealed trait TimelineRankerClientBuilderBase {
|
||||
def DefaultName: String = "timelineranker"
|
||||
|
||||
def DefaultProdDest: String
|
||||
|
||||
def DefaultProdRequestTimeout: Duration = 2.seconds
|
||||
def DefaultProdTimeout: Duration = 3.seconds
|
||||
def DefaultProdRetryPolicy: RetryPolicy[Try[Nothing]] =
|
||||
tries(2, TimeoutAndWriteExceptionsOnly orElse ChannelClosedExceptionsOnly)
|
||||
|
||||
def DefaultLocalTcpConnectTimeout: Duration = 1.second
|
||||
def DefaultLocalConnectTimeout: Duration = 1.second
|
||||
def DefaultLocalRetryPolicy: RetryPolicy[Try[Nothing]] = tries(2, TimeoutAndWriteExceptionsOnly)
|
||||
|
||||
def apply(
|
||||
finagleClientBuilder: FinagleClientBuilder,
|
||||
environment: Environment,
|
||||
name: String = DefaultName,
|
||||
serviceIdentifier: ServiceIdentifier = EmptyServiceIdentifier,
|
||||
opportunisticTlsOpt: Option[OpportunisticTls.Level] = None,
|
||||
): ClientBuilder.Complete[ThriftClientRequest, Array[Byte]] = {
|
||||
val defaultBuilder = finagleClientBuilder.thriftMuxClientBuilder(name)
|
||||
val destination = getDestOverride(environment)
|
||||
|
||||
val partialClient = environment match {
|
||||
case Production | Staging =>
|
||||
defaultBuilder
|
||||
.requestTimeout(DefaultProdRequestTimeout)
|
||||
.timeout(DefaultProdTimeout)
|
||||
.retryPolicy(DefaultProdRetryPolicy)
|
||||
.daemon(daemonize = true)
|
||||
.dest(destination)
|
||||
.mutualTls(serviceIdentifier)
|
||||
case Local =>
|
||||
defaultBuilder
|
||||
.tcpConnectTimeout(DefaultLocalTcpConnectTimeout)
|
||||
.connectTimeout(DefaultLocalConnectTimeout)
|
||||
.retryPolicy(DefaultLocalRetryPolicy)
|
||||
.failFast(enabled = false)
|
||||
.daemon(daemonize = false)
|
||||
.dest(destination)
|
||||
.mutualTls(serviceIdentifier)
|
||||
}
|
||||
|
||||
opportunisticTlsOpt match {
|
||||
case Some(_) =>
|
||||
val opportunisticTlsParam = OppTls(level = opportunisticTlsOpt)
|
||||
partialClient
|
||||
.configured(opportunisticTlsParam)
|
||||
case None => partialClient
|
||||
}
|
||||
}
|
||||
|
||||
private def getDestOverride(environment: Environment): String = {
|
||||
val defaultDest = DefaultProdDest
|
||||
environment match {
|
||||
// Allow overriding the target TimelineRanker instance in staging.
|
||||
// This is typically useful for redline testing of TimelineRanker.
|
||||
case Staging =>
|
||||
sys.props.getOrElse("target.timelineranker.instance", defaultDest)
|
||||
case _ =>
|
||||
defaultDest
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object TimelineRankerClientBuilder extends TimelineRankerClientBuilderBase {
|
||||
override def DefaultProdDest: String = "/s/timelineranker/timelineranker"
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
target(
|
||||
name = "adapter",
|
||||
dependencies = ["timelineranker/common/src/main/scala/com/twitter/timelineranker/adapter"],
|
||||
)
|
||||
|
||||
target(
|
||||
name = "model",
|
||||
dependencies = ["timelineranker/common/src/main/scala/com/twitter/timelineranker/model"],
|
||||
)
|
||||
|
||||
target(
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
":adapter",
|
||||
":model",
|
||||
],
|
||||
)
|
BIN
timelineranker/common/BUILD.docx
Normal file
BIN
timelineranker/common/BUILD.docx
Normal file
Binary file not shown.
@ -1,6 +0,0 @@
|
||||
target(
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"timelineranker/common/src/main/scala/com/twitter/timelineranker/model",
|
||||
],
|
||||
)
|
BIN
timelineranker/common/src/main/scala/BUILD.docx
Normal file
BIN
timelineranker/common/src/main/scala/BUILD.docx
Normal file
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
scala_library(
|
||||
sources = ["*.scala"],
|
||||
compiler_option_sets = ["fatal_warnings"],
|
||||
platform = "java8",
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"configapi/configapi-core",
|
||||
"src/thrift/com/twitter/timelineservice/server/internal:thrift-scala",
|
||||
"src/thrift/com/twitter/tweetypie:tweet-scala",
|
||||
"timelineranker/common:model",
|
||||
"timelines/src/main/scala/com/twitter/timelines/clientconfig",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/tweet",
|
||||
],
|
||||
)
|
Binary file not shown.
Binary file not shown.
@ -1,139 +0,0 @@
|
||||
package com.twitter.timelineranker.adapter
|
||||
|
||||
import com.twitter.timelineranker.model._
|
||||
import com.twitter.timelines.model.tweet.HydratedTweet
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.timelineservice.model.TimelineId
|
||||
import com.twitter.timelineservice.model.core
|
||||
import com.twitter.timelineservice.{model => tls}
|
||||
import com.twitter.timelineservice.{thriftscala => tlsthrift}
|
||||
import com.twitter.timelineservice.model.core._
|
||||
import com.twitter.util.Return
|
||||
import com.twitter.util.Throw
|
||||
import com.twitter.util.Try
|
||||
|
||||
/**
|
||||
* Enables TLR model objects to be converted to/from TLS model/thrift objects.
|
||||
*/
|
||||
object TimelineServiceAdapter {
|
||||
def toTlrQuery(
|
||||
id: Long,
|
||||
tlsRange: tls.TimelineRange,
|
||||
getTweetsFromArchiveIndex: Boolean = true
|
||||
): ReverseChronTimelineQuery = {
|
||||
val timelineId = TimelineId(id, TimelineKind.home)
|
||||
val maxCount = tlsRange.maxCount
|
||||
val tweetIdRange = tlsRange.cursor.map { cursor =>
|
||||
TweetIdRange(
|
||||
fromId = cursor.tweetIdBounds.bottom,
|
||||
toId = cursor.tweetIdBounds.top
|
||||
)
|
||||
}
|
||||
val options = ReverseChronTimelineQueryOptions(
|
||||
getTweetsFromArchiveIndex = getTweetsFromArchiveIndex
|
||||
)
|
||||
ReverseChronTimelineQuery(timelineId, Some(maxCount), tweetIdRange, Some(options))
|
||||
}
|
||||
|
||||
def toTlsQuery(query: ReverseChronTimelineQuery): tls.TimelineQuery = {
|
||||
val tlsRange = toTlsRange(query.range, query.maxCount)
|
||||
tls.TimelineQuery(
|
||||
id = query.id.id,
|
||||
kind = query.id.kind,
|
||||
range = tlsRange
|
||||
)
|
||||
}
|
||||
|
||||
def toTlsRange(range: Option[TimelineRange], maxCount: Option[Int]): tls.TimelineRange = {
|
||||
val cursor = range.map {
|
||||
case tweetIdRange: TweetIdRange =>
|
||||
RequestCursor(
|
||||
top = tweetIdRange.toId.map(CursorState.fromTweetId),
|
||||
bottom = tweetIdRange.fromId.map(core.CursorState.fromTweetId)
|
||||
)
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(s"Only TweetIdRange is supported. Found: $range")
|
||||
}
|
||||
maxCount
|
||||
.map { count => tls.TimelineRange(cursor, count) }
|
||||
.getOrElse(tls.TimelineRange(cursor))
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts TLS timeline to a Try of TLR timeline.
|
||||
*
|
||||
* TLS timeline not only contains timeline entries/attributes but also the retrieval state;
|
||||
* whereas TLR timeline only has entries/attributes. Therefore, the TLS timeline is
|
||||
* mapped to a Try[Timeline] where the Try part captures retrieval state and
|
||||
* Timeline captures entries/attributes.
|
||||
*/
|
||||
def toTlrTimelineTry(tlsTimeline: tls.Timeline[tls.TimelineEntry]): Try[Timeline] = {
|
||||
require(
|
||||
tlsTimeline.kind == TimelineKind.home,
|
||||
s"Only home timelines are supported. Found: ${tlsTimeline.kind}"
|
||||
)
|
||||
|
||||
tlsTimeline.state match {
|
||||
case Some(TimelineHit) | None =>
|
||||
val tweetEnvelopes = tlsTimeline.entries.map {
|
||||
case tweet: tls.Tweet =>
|
||||
TimelineEntryEnvelope(Tweet(tweet.tweetId))
|
||||
case entry =>
|
||||
throw new Exception(s"Only tweet timelines are supported. Found: $entry")
|
||||
}
|
||||
Return(Timeline(TimelineId(tlsTimeline.id, tlsTimeline.kind), tweetEnvelopes))
|
||||
case Some(TimelineNotFound) | Some(TimelineUnavailable) =>
|
||||
Throw(new tls.core.TimelineUnavailableException(tlsTimeline.id, Some(tlsTimeline.kind)))
|
||||
}
|
||||
}
|
||||
|
||||
def toTlsTimeline(timeline: Timeline): tls.Timeline[tls.Tweet] = {
|
||||
val entries = timeline.entries.map { entry =>
|
||||
entry.entry match {
|
||||
case tweet: Tweet => tls.Tweet(tweet.id)
|
||||
case entry: HydratedTweetEntry => tls.Tweet.fromThrift(entry.tweet)
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(
|
||||
s"Only tweet timelines are supported. Found: ${entry.entry}"
|
||||
)
|
||||
}
|
||||
}
|
||||
tls.Timeline(
|
||||
id = timeline.id.id,
|
||||
kind = timeline.id.kind,
|
||||
entries = entries
|
||||
)
|
||||
}
|
||||
|
||||
def toTweetIds(timeline: tlsthrift.Timeline): Seq[TweetId] = {
|
||||
timeline.entries.map {
|
||||
case tlsthrift.TimelineEntry.Tweet(tweet) =>
|
||||
tweet.statusId
|
||||
case entry =>
|
||||
throw new IllegalArgumentException(s"Only tweet timelines are supported. Found: ${entry}")
|
||||
}
|
||||
}
|
||||
|
||||
def toTweetIds(timeline: Timeline): Seq[TweetId] = {
|
||||
timeline.entries.map { entry =>
|
||||
entry.entry match {
|
||||
case tweet: Tweet => tweet.id
|
||||
case entry: HydratedTweetEntry => entry.tweet.id
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(
|
||||
s"Only tweet timelines are supported. Found: ${entry.entry}"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def toHydratedTweets(timeline: Timeline): Seq[HydratedTweet] = {
|
||||
timeline.entries.map { entry =>
|
||||
entry.entry match {
|
||||
case hydratedTweet: HydratedTweet => hydratedTweet
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(s"Expected hydrated tweet. Found: ${entry.entry}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
scala_library(
|
||||
sources = ["*.scala"],
|
||||
compiler_option_sets = ["fatal_warnings"],
|
||||
platform = "java8",
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"src/java/com/twitter/common/text/language:locale-util",
|
||||
"src/thrift/com/twitter/search:earlybird-scala",
|
||||
"src/thrift/com/twitter/search/common:features-scala",
|
||||
"src/thrift/com/twitter/timelineranker/server/model:thrift-scala",
|
||||
"timelines:config-api-base",
|
||||
"timelines/src/main/scala/com/twitter/timelines/common/model",
|
||||
"timelines/src/main/scala/com/twitter/timelines/earlybird/common/options",
|
||||
"timelines/src/main/scala/com/twitter/timelines/earlybird/common/utils",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/candidate",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/tweet",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util",
|
||||
"timelineservice/common/src/main/scala/com/twitter/timelineservice/model",
|
||||
],
|
||||
exports = [
|
||||
"timelines:config-api-base",
|
||||
],
|
||||
)
|
Binary file not shown.
Binary file not shown.
@ -1,35 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.search.common.features.thriftscala.ThriftTweetFeatures
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.tweet.HydratedTweet
|
||||
import com.twitter.tweetypie.thriftscala
|
||||
|
||||
object CandidateTweet {
|
||||
val DefaultFeatures: ThriftTweetFeatures = ThriftTweetFeatures()
|
||||
|
||||
def fromThrift(candidate: thrift.CandidateTweet): CandidateTweet = {
|
||||
val tweet: thriftscala.Tweet = candidate.tweet.getOrElse(
|
||||
throw new IllegalArgumentException(s"CandidateTweet.tweet must have a value")
|
||||
)
|
||||
val features = candidate.features.getOrElse(
|
||||
throw new IllegalArgumentException(s"CandidateTweet.features must have a value")
|
||||
)
|
||||
|
||||
CandidateTweet(HydratedTweet(tweet), features)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A candidate Tweet and associated information.
|
||||
* Model object for CandidateTweet thrift struct.
|
||||
*/
|
||||
case class CandidateTweet(hydratedTweet: HydratedTweet, features: ThriftTweetFeatures) {
|
||||
|
||||
def toThrift: thrift.CandidateTweet = {
|
||||
thrift.CandidateTweet(
|
||||
tweet = Some(hydratedTweet.tweet),
|
||||
features = Some(features)
|
||||
)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,37 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.util.Future
|
||||
|
||||
object CandidateTweetsResult {
|
||||
val Empty: CandidateTweetsResult = CandidateTweetsResult(Nil, Nil)
|
||||
val EmptyFuture: Future[CandidateTweetsResult] = Future.value(Empty)
|
||||
val EmptyCandidateTweet: Seq[CandidateTweet] = Seq.empty[CandidateTweet]
|
||||
|
||||
def fromThrift(response: thrift.GetCandidateTweetsResponse): CandidateTweetsResult = {
|
||||
val candidates = response.candidates
|
||||
.map(_.map(CandidateTweet.fromThrift))
|
||||
.getOrElse(EmptyCandidateTweet)
|
||||
val sourceTweets = response.sourceTweets
|
||||
.map(_.map(CandidateTweet.fromThrift))
|
||||
.getOrElse(EmptyCandidateTweet)
|
||||
if (sourceTweets.nonEmpty) {
|
||||
require(candidates.nonEmpty, "sourceTweets cannot have a value if candidates list is empty.")
|
||||
}
|
||||
CandidateTweetsResult(candidates, sourceTweets)
|
||||
}
|
||||
}
|
||||
|
||||
case class CandidateTweetsResult(
|
||||
candidates: Seq[CandidateTweet],
|
||||
sourceTweets: Seq[CandidateTweet]) {
|
||||
|
||||
def toThrift: thrift.GetCandidateTweetsResponse = {
|
||||
val thriftCandidates = candidates.map(_.toThrift)
|
||||
val thriftSourceTweets = sourceTweets.map(_.toThrift)
|
||||
thrift.GetCandidateTweetsResponse(
|
||||
candidates = Some(thriftCandidates),
|
||||
sourceTweets = Some(thriftSourceTweets)
|
||||
)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,21 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.tweet.HydratedTweet
|
||||
import com.twitter.tweetypie.{thriftscala => tweetypie}
|
||||
|
||||
/**
|
||||
* Enables HydratedTweet entries to be included in a Timeline.
|
||||
*/
|
||||
class HydratedTweetEntry(tweet: tweetypie.Tweet) extends HydratedTweet(tweet) with TimelineEntry {
|
||||
|
||||
def this(hydratedTweet: HydratedTweet) = this(hydratedTweet.tweet)
|
||||
|
||||
override def toTimelineEntryThrift: thrift.TimelineEntry = {
|
||||
thrift.TimelineEntry.TweetypieTweet(tweet)
|
||||
}
|
||||
|
||||
override def throwIfInvalid(): Unit = {
|
||||
// No validation performed.
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,31 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.common.text.language.LocaleUtil
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object Language {
|
||||
|
||||
def fromThrift(lang: thrift.Language): Language = {
|
||||
require(lang.language.isDefined, "language can't be None")
|
||||
require(lang.scope.isDefined, "scope can't be None")
|
||||
Language(lang.language.get, LanguageScope.fromThrift(lang.scope.get))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a language and the scope that it relates to.
|
||||
*/
|
||||
case class Language(language: String, scope: LanguageScope.Value) {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def toThrift: thrift.Language = {
|
||||
val scopeOption = Some(LanguageScope.toThrift(scope))
|
||||
thrift.Language(Some(language), scopeOption)
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
val result = LocaleUtil.getLocaleOf(language)
|
||||
require(result != LocaleUtil.UNKNOWN, s"Language ${language} is unsupported")
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,46 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
/**
|
||||
* Represents what this language is associated with.
|
||||
* For example, "user" is one of the scopes and "event"
|
||||
* could be another scope.
|
||||
*/
|
||||
object LanguageScope extends Enumeration {
|
||||
|
||||
// User scope means that the language is the user's language.
|
||||
val User: Value = Value(thrift.LanguageScope.User.value)
|
||||
|
||||
// Event scope means that the language is the event's language.
|
||||
val Event: Value = Value(thrift.LanguageScope.Event.value)
|
||||
|
||||
// list of all LanguageScope values
|
||||
val All: ValueSet = LanguageScope.ValueSet(User, Event)
|
||||
|
||||
def apply(scope: thrift.LanguageScope): LanguageScope.Value = {
|
||||
scope match {
|
||||
case thrift.LanguageScope.User =>
|
||||
User
|
||||
case thrift.LanguageScope.Event =>
|
||||
Event
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(s"Unsupported language scope: $scope")
|
||||
}
|
||||
}
|
||||
|
||||
def fromThrift(scope: thrift.LanguageScope): LanguageScope.Value = {
|
||||
apply(scope)
|
||||
}
|
||||
|
||||
def toThrift(scope: LanguageScope.Value): thrift.LanguageScope = {
|
||||
scope match {
|
||||
case LanguageScope.User =>
|
||||
thrift.LanguageScope.User
|
||||
case LanguageScope.Event =>
|
||||
thrift.LanguageScope.Event
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(s"Unsupported language scope: $scope")
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,184 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.search.earlybird.thriftscala.ThriftSearchResult
|
||||
import com.twitter.timelines.model.tweet.HydratedTweet
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.timelines.model.UserId
|
||||
import com.twitter.timelines.util.SnowflakeSortIndexHelper
|
||||
import com.twitter.tweetypie.{thriftscala => tweetypie}
|
||||
|
||||
object PartiallyHydratedTweet {
|
||||
private val InvalidValue = "Invalid value"
|
||||
|
||||
/**
|
||||
* Creates an instance of PartiallyHydratedTweet based on the given search result.
|
||||
*/
|
||||
def fromSearchResult(result: ThriftSearchResult): PartiallyHydratedTweet = {
|
||||
val tweetId = result.id
|
||||
val metadata = result.metadata.getOrElse(
|
||||
throw new IllegalArgumentException(
|
||||
s"cannot initialize PartiallyHydratedTweet $tweetId without ThriftSearchResult metadata."
|
||||
)
|
||||
)
|
||||
|
||||
val extraMetadataOpt = metadata.extraMetadata
|
||||
|
||||
val userId = metadata.fromUserId
|
||||
|
||||
// The value of referencedTweetAuthorId and sharedStatusId is only considered valid if it is greater than 0.
|
||||
val referencedTweetAuthorId =
|
||||
if (metadata.referencedTweetAuthorId > 0) Some(metadata.referencedTweetAuthorId) else None
|
||||
val sharedStatusId = if (metadata.sharedStatusId > 0) Some(metadata.sharedStatusId) else None
|
||||
|
||||
val isRetweet = metadata.isRetweet.getOrElse(false)
|
||||
val retweetSourceTweetId = if (isRetweet) sharedStatusId else None
|
||||
val retweetSourceUserId = if (isRetweet) referencedTweetAuthorId else None
|
||||
|
||||
// The fields sharedStatusId and referencedTweetAuthorId have overloaded meaning when
|
||||
// this tweet is not a retweet (for retweet, there is only 1 meaning).
|
||||
// When not a retweet,
|
||||
// if referencedTweetAuthorId and sharedStatusId are both set, it is considered a reply
|
||||
// if referencedTweetAuthorId is set and sharedStatusId is not set, it is a directed at tweet.
|
||||
// References: SEARCH-8561 and SEARCH-13142
|
||||
val inReplyToTweetId = if (!isRetweet) sharedStatusId else None
|
||||
val inReplyToUserId = if (!isRetweet) referencedTweetAuthorId else None
|
||||
val isReply = metadata.isReply.contains(true)
|
||||
|
||||
val quotedTweetId = extraMetadataOpt.flatMap(_.quotedTweetId)
|
||||
val quotedUserId = extraMetadataOpt.flatMap(_.quotedUserId)
|
||||
|
||||
val isNullcast = metadata.isNullcast.contains(true)
|
||||
|
||||
val conversationId = extraMetadataOpt.flatMap(_.conversationId)
|
||||
|
||||
// Root author id for the user who posts an exclusive tweet
|
||||
val exclusiveConversationAuthorId = extraMetadataOpt.flatMap(_.exclusiveConversationAuthorId)
|
||||
|
||||
// Card URI associated with an attached card to this tweet, if it contains one
|
||||
val cardUri = extraMetadataOpt.flatMap(_.cardUri)
|
||||
|
||||
val tweet = makeTweetyPieTweet(
|
||||
tweetId,
|
||||
userId,
|
||||
inReplyToTweetId,
|
||||
inReplyToUserId,
|
||||
retweetSourceTweetId,
|
||||
retweetSourceUserId,
|
||||
quotedTweetId,
|
||||
quotedUserId,
|
||||
isNullcast,
|
||||
isReply,
|
||||
conversationId,
|
||||
exclusiveConversationAuthorId,
|
||||
cardUri
|
||||
)
|
||||
new PartiallyHydratedTweet(tweet)
|
||||
}
|
||||
|
||||
def makeTweetyPieTweet(
|
||||
tweetId: TweetId,
|
||||
userId: UserId,
|
||||
inReplyToTweetId: Option[TweetId],
|
||||
inReplyToUserId: Option[TweetId],
|
||||
retweetSourceTweetId: Option[TweetId],
|
||||
retweetSourceUserId: Option[UserId],
|
||||
quotedTweetId: Option[TweetId],
|
||||
quotedUserId: Option[UserId],
|
||||
isNullcast: Boolean,
|
||||
isReply: Boolean,
|
||||
conversationId: Option[Long],
|
||||
exclusiveConversationAuthorId: Option[Long] = None,
|
||||
cardUri: Option[String] = None
|
||||
): tweetypie.Tweet = {
|
||||
val isDirectedAt = inReplyToUserId.isDefined
|
||||
val isRetweet = retweetSourceTweetId.isDefined && retweetSourceUserId.isDefined
|
||||
|
||||
val reply = if (isReply) {
|
||||
Some(
|
||||
tweetypie.Reply(
|
||||
inReplyToStatusId = inReplyToTweetId,
|
||||
inReplyToUserId = inReplyToUserId.getOrElse(0L) // Required
|
||||
)
|
||||
)
|
||||
} else None
|
||||
|
||||
val directedAt = if (isDirectedAt) {
|
||||
Some(
|
||||
tweetypie.DirectedAtUser(
|
||||
userId = inReplyToUserId.get,
|
||||
screenName = "" // not available from search
|
||||
)
|
||||
)
|
||||
} else None
|
||||
|
||||
val share = if (isRetweet) {
|
||||
Some(
|
||||
tweetypie.Share(
|
||||
sourceStatusId = retweetSourceTweetId.get,
|
||||
sourceUserId = retweetSourceUserId.get,
|
||||
parentStatusId =
|
||||
retweetSourceTweetId.get // Not always correct (eg, retweet of a retweet).
|
||||
)
|
||||
)
|
||||
} else None
|
||||
|
||||
val quotedTweet =
|
||||
for {
|
||||
tweetId <- quotedTweetId
|
||||
userId <- quotedUserId
|
||||
} yield tweetypie.QuotedTweet(tweetId = tweetId, userId = userId)
|
||||
|
||||
val coreData = tweetypie.TweetCoreData(
|
||||
userId = userId,
|
||||
text = InvalidValue,
|
||||
createdVia = InvalidValue,
|
||||
createdAtSecs = SnowflakeSortIndexHelper.idToTimestamp(tweetId).inSeconds,
|
||||
directedAtUser = directedAt,
|
||||
reply = reply,
|
||||
share = share,
|
||||
nullcast = isNullcast,
|
||||
conversationId = conversationId
|
||||
)
|
||||
|
||||
// Hydrate exclusiveTweetControl which determines whether the user is able to view an exclusive / SuperFollow tweet.
|
||||
val exclusiveTweetControl = exclusiveConversationAuthorId.map { authorId =>
|
||||
tweetypie.ExclusiveTweetControl(conversationAuthorId = authorId)
|
||||
}
|
||||
|
||||
val cardReference = cardUri.map { cardUriFromEB =>
|
||||
tweetypie.CardReference(cardUri = cardUriFromEB)
|
||||
}
|
||||
|
||||
tweetypie.Tweet(
|
||||
id = tweetId,
|
||||
quotedTweet = quotedTweet,
|
||||
coreData = Some(coreData),
|
||||
exclusiveTweetControl = exclusiveTweetControl,
|
||||
cardReference = cardReference
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an instance of HydratedTweet that is hydrated using search result
|
||||
* (instead of being hydrated using TweetyPie service).
|
||||
*
|
||||
* Not all fields are available using search therefore such fields if accessed
|
||||
* throw UnsupportedOperationException to ensure that they are not inadvertently
|
||||
* accessed and relied upon.
|
||||
*/
|
||||
class PartiallyHydratedTweet(tweet: tweetypie.Tweet) extends HydratedTweet(tweet) {
|
||||
override def parentTweetId: Option[TweetId] = throw notSupported("parentTweetId")
|
||||
override def mentionedUserIds: Seq[UserId] = throw notSupported("mentionedUserIds")
|
||||
override def takedownCountryCodes: Set[String] = throw notSupported("takedownCountryCodes")
|
||||
override def hasMedia: Boolean = throw notSupported("hasMedia")
|
||||
override def isNarrowcast: Boolean = throw notSupported("isNarrowcast")
|
||||
override def hasTakedown: Boolean = throw notSupported("hasTakedown")
|
||||
override def isNsfw: Boolean = throw notSupported("isNsfw")
|
||||
override def isNsfwUser: Boolean = throw notSupported("isNsfwUser")
|
||||
override def isNsfwAdmin: Boolean = throw notSupported("isNsfwAdmin")
|
||||
|
||||
private def notSupported(name: String): UnsupportedOperationException = {
|
||||
new UnsupportedOperationException(s"Not supported: $name")
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,23 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.TweetId
|
||||
|
||||
object PriorSeenEntries {
|
||||
def fromThrift(entries: thrift.PriorSeenEntries): PriorSeenEntries = {
|
||||
PriorSeenEntries(seenEntries = entries.seenEntries)
|
||||
}
|
||||
}
|
||||
|
||||
case class PriorSeenEntries(seenEntries: Seq[TweetId]) {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def toThrift: thrift.PriorSeenEntries = {
|
||||
thrift.PriorSeenEntries(seenEntries = seenEntries)
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
// No validation performed.
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelineservice.model.TimelineId
|
||||
|
||||
case class RankedTimelineQuery(
|
||||
override val id: TimelineId,
|
||||
override val maxCount: Option[Int] = None,
|
||||
override val range: Option[TimelineRange] = None,
|
||||
override val options: Option[RankedTimelineQueryOptions] = None)
|
||||
extends TimelineQuery(thrift.TimelineQueryType.Ranked, id, maxCount, range, options) {
|
||||
|
||||
throwIfInvalid()
|
||||
}
|
Binary file not shown.
@ -1,29 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object RankedTimelineQueryOptions {
|
||||
def fromThrift(options: thrift.RankedTimelineQueryOptions): RankedTimelineQueryOptions = {
|
||||
RankedTimelineQueryOptions(
|
||||
seenEntries = options.seenEntries.map(PriorSeenEntries.fromThrift)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
case class RankedTimelineQueryOptions(seenEntries: Option[PriorSeenEntries])
|
||||
extends TimelineQueryOptions {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def toThrift: thrift.RankedTimelineQueryOptions = {
|
||||
thrift.RankedTimelineQueryOptions(seenEntries = seenEntries.map(_.toThrift))
|
||||
}
|
||||
|
||||
def toTimelineQueryOptionsThrift: thrift.TimelineQueryOptions = {
|
||||
thrift.TimelineQueryOptions.RankedTimelineQueryOptions(toThrift)
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
seenEntries.foreach(_.throwIfInvalid)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,278 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.servo.util.Gate
|
||||
import com.twitter.timelines.model.candidate.CandidateTweetSourceId
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.common.model._
|
||||
import com.twitter.timelines.earlybird.common.options.EarlybirdOptions
|
||||
import com.twitter.timelines.earlybird.common.utils.SearchOperator
|
||||
import com.twitter.timelines.configapi.{
|
||||
DependencyProvider => ConfigApiDependencyProvider,
|
||||
FutureDependencyProvider => ConfigApiFutureDependencyProvider,
|
||||
_
|
||||
}
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.timelines.model.UserId
|
||||
import com.twitter.timelineservice.DeviceContext
|
||||
|
||||
object RecapQuery {
|
||||
|
||||
val EngagedTweetsSupportedTweetKindOption: TweetKindOption.ValueSet = TweetKindOption(
|
||||
includeReplies = false,
|
||||
includeRetweets = false,
|
||||
includeExtendedReplies = false,
|
||||
includeOriginalTweetsAndQuotes = true
|
||||
)
|
||||
|
||||
val DefaultSearchOperator: SearchOperator.Value = SearchOperator.Exclude
|
||||
def fromThrift(query: thrift.RecapQuery): RecapQuery = {
|
||||
|
||||
RecapQuery(
|
||||
userId = query.userId,
|
||||
maxCount = query.maxCount,
|
||||
range = query.range.map(TimelineRange.fromThrift),
|
||||
options = query.options
|
||||
.map(options => TweetKindOption.fromThrift(options.to[Set]))
|
||||
.getOrElse(TweetKindOption.None),
|
||||
searchOperator = query.searchOperator
|
||||
.map(SearchOperator.fromThrift)
|
||||
.getOrElse(DefaultSearchOperator),
|
||||
earlybirdOptions = query.earlybirdOptions.map(EarlybirdOptions.fromThrift),
|
||||
deviceContext = query.deviceContext.map(DeviceContext.fromThrift),
|
||||
authorIds = query.authorIds,
|
||||
excludedTweetIds = query.excludedTweetIds,
|
||||
searchClientSubId = query.searchClientSubId,
|
||||
candidateTweetSourceId =
|
||||
query.candidateTweetSourceId.flatMap(CandidateTweetSourceId.fromThrift),
|
||||
hydratesContentFeatures = query.hydratesContentFeatures
|
||||
)
|
||||
}
|
||||
|
||||
def fromThrift(query: thrift.RecapHydrationQuery): RecapQuery = {
|
||||
require(query.tweetIds.nonEmpty, "tweetIds must be non-empty")
|
||||
|
||||
RecapQuery(
|
||||
userId = query.userId,
|
||||
tweetIds = Some(query.tweetIds),
|
||||
searchOperator = DefaultSearchOperator,
|
||||
earlybirdOptions = query.earlybirdOptions.map(EarlybirdOptions.fromThrift),
|
||||
deviceContext = query.deviceContext.map(DeviceContext.fromThrift),
|
||||
candidateTweetSourceId =
|
||||
query.candidateTweetSourceId.flatMap(CandidateTweetSourceId.fromThrift),
|
||||
hydratesContentFeatures = query.hydratesContentFeatures
|
||||
)
|
||||
}
|
||||
|
||||
def fromThrift(query: thrift.EngagedTweetsQuery): RecapQuery = {
|
||||
val options = query.tweetKindOptions
|
||||
.map(tweetKindOptions => TweetKindOption.fromThrift(tweetKindOptions.to[Set]))
|
||||
.getOrElse(TweetKindOption.None)
|
||||
|
||||
if (!(options.isEmpty ||
|
||||
(options == EngagedTweetsSupportedTweetKindOption))) {
|
||||
throw new IllegalArgumentException(s"Unsupported TweetKindOption value: $options")
|
||||
}
|
||||
|
||||
RecapQuery(
|
||||
userId = query.userId,
|
||||
maxCount = query.maxCount,
|
||||
range = query.range.map(TimelineRange.fromThrift),
|
||||
options = options,
|
||||
searchOperator = DefaultSearchOperator,
|
||||
earlybirdOptions = query.earlybirdOptions.map(EarlybirdOptions.fromThrift),
|
||||
deviceContext = query.deviceContext.map(DeviceContext.fromThrift),
|
||||
authorIds = query.userIds,
|
||||
excludedTweetIds = query.excludedTweetIds,
|
||||
)
|
||||
}
|
||||
|
||||
def fromThrift(query: thrift.EntityTweetsQuery): RecapQuery = {
|
||||
require(
|
||||
query.semanticCoreIds.isDefined,
|
||||
"entities(semanticCoreIds) can't be None"
|
||||
)
|
||||
val options = query.tweetKindOptions
|
||||
.map(tweetKindOptions => TweetKindOption.fromThrift(tweetKindOptions.to[Set]))
|
||||
.getOrElse(TweetKindOption.None)
|
||||
|
||||
RecapQuery(
|
||||
userId = query.userId,
|
||||
maxCount = query.maxCount,
|
||||
range = query.range.map(TimelineRange.fromThrift),
|
||||
options = options,
|
||||
searchOperator = DefaultSearchOperator,
|
||||
earlybirdOptions = query.earlybirdOptions.map(EarlybirdOptions.fromThrift),
|
||||
deviceContext = query.deviceContext.map(DeviceContext.fromThrift),
|
||||
excludedTweetIds = query.excludedTweetIds,
|
||||
semanticCoreIds = query.semanticCoreIds.map(_.map(SemanticCoreAnnotation.fromThrift).toSet),
|
||||
hashtags = query.hashtags.map(_.toSet),
|
||||
languages = query.languages.map(_.map(Language.fromThrift).toSet),
|
||||
candidateTweetSourceId =
|
||||
query.candidateTweetSourceId.flatMap(CandidateTweetSourceId.fromThrift),
|
||||
includeNullcastTweets = query.includeNullcastTweets,
|
||||
includeTweetsFromArchiveIndex = query.includeTweetsFromArchiveIndex,
|
||||
authorIds = query.authorIds,
|
||||
hydratesContentFeatures = query.hydratesContentFeatures
|
||||
)
|
||||
}
|
||||
|
||||
def fromThrift(query: thrift.UtegLikedByTweetsQuery): RecapQuery = {
|
||||
val options = query.tweetKindOptions
|
||||
.map(tweetKindOptions => TweetKindOption.fromThrift(tweetKindOptions.to[Set]))
|
||||
.getOrElse(TweetKindOption.None)
|
||||
|
||||
RecapQuery(
|
||||
userId = query.userId,
|
||||
maxCount = query.maxCount,
|
||||
range = query.range.map(TimelineRange.fromThrift),
|
||||
options = options,
|
||||
earlybirdOptions = query.earlybirdOptions.map(EarlybirdOptions.fromThrift),
|
||||
deviceContext = query.deviceContext.map(DeviceContext.fromThrift),
|
||||
excludedTweetIds = query.excludedTweetIds,
|
||||
utegLikedByTweetsOptions = for {
|
||||
utegCount <- query.utegCount
|
||||
weightedFollowings <- query.weightedFollowings.map(_.toMap)
|
||||
} yield {
|
||||
UtegLikedByTweetsOptions(
|
||||
utegCount = utegCount,
|
||||
isInNetwork = query.isInNetwork,
|
||||
weightedFollowings = weightedFollowings
|
||||
)
|
||||
},
|
||||
candidateTweetSourceId =
|
||||
query.candidateTweetSourceId.flatMap(CandidateTweetSourceId.fromThrift),
|
||||
hydratesContentFeatures = query.hydratesContentFeatures
|
||||
)
|
||||
}
|
||||
|
||||
val paramGate: (Param[Boolean] => Gate[RecapQuery]) = HasParams.paramGate
|
||||
|
||||
type DependencyProvider[+T] = ConfigApiDependencyProvider[RecapQuery, T]
|
||||
object DependencyProvider extends DependencyProviderFunctions[RecapQuery]
|
||||
|
||||
type FutureDependencyProvider[+T] = ConfigApiFutureDependencyProvider[RecapQuery, T]
|
||||
object FutureDependencyProvider extends FutureDependencyProviderFunctions[RecapQuery]
|
||||
}
|
||||
|
||||
/**
|
||||
* Model object corresponding to RecapQuery thrift struct.
|
||||
*/
|
||||
case class RecapQuery(
|
||||
userId: UserId,
|
||||
maxCount: Option[Int] = None,
|
||||
range: Option[TimelineRange] = None,
|
||||
options: TweetKindOption.ValueSet = TweetKindOption.None,
|
||||
searchOperator: SearchOperator.Value = RecapQuery.DefaultSearchOperator,
|
||||
earlybirdOptions: Option[EarlybirdOptions] = None,
|
||||
deviceContext: Option[DeviceContext] = None,
|
||||
authorIds: Option[Seq[UserId]] = None,
|
||||
tweetIds: Option[Seq[TweetId]] = None,
|
||||
semanticCoreIds: Option[Set[SemanticCoreAnnotation]] = None,
|
||||
hashtags: Option[Set[String]] = None,
|
||||
languages: Option[Set[Language]] = None,
|
||||
excludedTweetIds: Option[Seq[TweetId]] = None,
|
||||
// options used only for yml tweets
|
||||
utegLikedByTweetsOptions: Option[UtegLikedByTweetsOptions] = None,
|
||||
searchClientSubId: Option[String] = None,
|
||||
override val params: Params = Params.Empty,
|
||||
candidateTweetSourceId: Option[CandidateTweetSourceId.Value] = None,
|
||||
includeNullcastTweets: Option[Boolean] = None,
|
||||
includeTweetsFromArchiveIndex: Option[Boolean] = None,
|
||||
hydratesContentFeatures: Option[Boolean] = None)
|
||||
extends HasParams {
|
||||
|
||||
override def toString: String = {
|
||||
s"RecapQuery(userId: $userId, maxCount: $maxCount, range: $range, options: $options, searchOperator: $searchOperator, " +
|
||||
s"earlybirdOptions: $earlybirdOptions, deviceContext: $deviceContext, authorIds: $authorIds, " +
|
||||
s"tweetIds: $tweetIds, semanticCoreIds: $semanticCoreIds, hashtags: $hashtags, languages: $languages, excludedTweetIds: $excludedTweetIds, " +
|
||||
s"utegLikedByTweetsOptions: $utegLikedByTweetsOptions, searchClientSubId: $searchClientSubId, " +
|
||||
s"params: $params, candidateTweetSourceId: $candidateTweetSourceId, includeNullcastTweets: $includeNullcastTweets, " +
|
||||
s"includeTweetsFromArchiveIndex: $includeTweetsFromArchiveIndex), hydratesContentFeatures: $hydratesContentFeatures"
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
def noDuplicates[T <: Traversable[_]](elements: T) = {
|
||||
elements.toSet.size == elements.size
|
||||
}
|
||||
|
||||
maxCount.foreach { max => require(max > 0, "maxCount must be a positive integer") }
|
||||
range.foreach(_.throwIfInvalid())
|
||||
earlybirdOptions.foreach(_.throwIfInvalid())
|
||||
tweetIds.foreach { ids => require(ids.nonEmpty, "tweetIds must be nonEmpty if present") }
|
||||
semanticCoreIds.foreach(_.foreach(_.throwIfInvalid()))
|
||||
languages.foreach(_.foreach(_.throwIfInvalid()))
|
||||
languages.foreach { langs =>
|
||||
require(langs.nonEmpty, "languages must be nonEmpty if present")
|
||||
require(noDuplicates(langs.map(_.language)), "languages must be unique")
|
||||
}
|
||||
}
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def toThriftRecapQuery: thrift.RecapQuery = {
|
||||
val thriftOptions = Some(TweetKindOption.toThrift(options))
|
||||
thrift.RecapQuery(
|
||||
userId,
|
||||
maxCount,
|
||||
range.map(_.toTimelineRangeThrift),
|
||||
deprecatedMinCount = None,
|
||||
thriftOptions,
|
||||
earlybirdOptions.map(_.toThrift),
|
||||
deviceContext.map(_.toThrift),
|
||||
authorIds,
|
||||
excludedTweetIds,
|
||||
Some(SearchOperator.toThrift(searchOperator)),
|
||||
searchClientSubId,
|
||||
candidateTweetSourceId.flatMap(CandidateTweetSourceId.toThrift)
|
||||
)
|
||||
}
|
||||
|
||||
def toThriftRecapHydrationQuery: thrift.RecapHydrationQuery = {
|
||||
require(tweetIds.isDefined && tweetIds.get.nonEmpty, "tweetIds must be present")
|
||||
thrift.RecapHydrationQuery(
|
||||
userId,
|
||||
tweetIds.get,
|
||||
earlybirdOptions.map(_.toThrift),
|
||||
deviceContext.map(_.toThrift),
|
||||
candidateTweetSourceId.flatMap(CandidateTweetSourceId.toThrift)
|
||||
)
|
||||
}
|
||||
|
||||
def toThriftEntityTweetsQuery: thrift.EntityTweetsQuery = {
|
||||
val thriftTweetKindOptions = Some(TweetKindOption.toThrift(options))
|
||||
thrift.EntityTweetsQuery(
|
||||
userId = userId,
|
||||
maxCount = maxCount,
|
||||
range = range.map(_.toTimelineRangeThrift),
|
||||
tweetKindOptions = thriftTweetKindOptions,
|
||||
earlybirdOptions = earlybirdOptions.map(_.toThrift),
|
||||
deviceContext = deviceContext.map(_.toThrift),
|
||||
excludedTweetIds = excludedTweetIds,
|
||||
semanticCoreIds = semanticCoreIds.map(_.map(_.toThrift)),
|
||||
hashtags = hashtags,
|
||||
languages = languages.map(_.map(_.toThrift)),
|
||||
candidateTweetSourceId.flatMap(CandidateTweetSourceId.toThrift),
|
||||
includeNullcastTweets = includeNullcastTweets,
|
||||
includeTweetsFromArchiveIndex = includeTweetsFromArchiveIndex,
|
||||
authorIds = authorIds
|
||||
)
|
||||
}
|
||||
|
||||
def toThriftUtegLikedByTweetsQuery: thrift.UtegLikedByTweetsQuery = {
|
||||
|
||||
val thriftTweetKindOptions = Some(TweetKindOption.toThrift(options))
|
||||
thrift.UtegLikedByTweetsQuery(
|
||||
userId = userId,
|
||||
maxCount = maxCount,
|
||||
utegCount = utegLikedByTweetsOptions.map(_.utegCount),
|
||||
range = range.map(_.toTimelineRangeThrift),
|
||||
tweetKindOptions = thriftTweetKindOptions,
|
||||
earlybirdOptions = earlybirdOptions.map(_.toThrift),
|
||||
deviceContext = deviceContext.map(_.toThrift),
|
||||
excludedTweetIds = excludedTweetIds,
|
||||
isInNetwork = utegLikedByTweetsOptions.map(_.isInNetwork).get,
|
||||
weightedFollowings = utegLikedByTweetsOptions.map(_.weightedFollowings),
|
||||
candidateTweetSourceId = candidateTweetSourceId.flatMap(CandidateTweetSourceId.toThrift)
|
||||
)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,23 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelineservice.model.TimelineId
|
||||
|
||||
object ReverseChronTimelineQuery {
|
||||
def fromTimelineQuery(query: TimelineQuery): ReverseChronTimelineQuery = {
|
||||
query match {
|
||||
case q: ReverseChronTimelineQuery => q
|
||||
case _ => throw new IllegalArgumentException(s"Unsupported query type: $query")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case class ReverseChronTimelineQuery(
|
||||
override val id: TimelineId,
|
||||
override val maxCount: Option[Int] = None,
|
||||
override val range: Option[TimelineRange] = None,
|
||||
override val options: Option[ReverseChronTimelineQueryOptions] = None)
|
||||
extends TimelineQuery(thrift.TimelineQueryType.ReverseChron, id, maxCount, range, options) {
|
||||
|
||||
throwIfInvalid()
|
||||
}
|
Binary file not shown.
@ -1,31 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object ReverseChronTimelineQueryOptions {
|
||||
val Default: ReverseChronTimelineQueryOptions = ReverseChronTimelineQueryOptions()
|
||||
|
||||
def fromThrift(
|
||||
options: thrift.ReverseChronTimelineQueryOptions
|
||||
): ReverseChronTimelineQueryOptions = {
|
||||
ReverseChronTimelineQueryOptions(
|
||||
getTweetsFromArchiveIndex = options.getTweetsFromArchiveIndex
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
case class ReverseChronTimelineQueryOptions(getTweetsFromArchiveIndex: Boolean = true)
|
||||
extends TimelineQueryOptions {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def toThrift: thrift.ReverseChronTimelineQueryOptions = {
|
||||
thrift.ReverseChronTimelineQueryOptions(getTweetsFromArchiveIndex = getTweetsFromArchiveIndex)
|
||||
}
|
||||
|
||||
def toTimelineQueryOptionsThrift: thrift.TimelineQueryOptions = {
|
||||
thrift.TimelineQueryOptions.ReverseChronTimelineQueryOptions(toThrift)
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {}
|
||||
}
|
Binary file not shown.
@ -1,39 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.util.Time
|
||||
|
||||
object TimeRange {
|
||||
val default: TimeRange = TimeRange(None, None)
|
||||
|
||||
def fromThrift(range: thrift.TimeRange): TimeRange = {
|
||||
TimeRange(
|
||||
from = range.fromMs.map(Time.fromMilliseconds),
|
||||
to = range.toMs.map(Time.fromMilliseconds)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
case class TimeRange(from: Option[Time], to: Option[Time]) extends TimelineRange {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
(from, to) match {
|
||||
case (Some(fromTime), Some(toTime)) =>
|
||||
require(fromTime <= toTime, "from-time must be less than or equal to-time.")
|
||||
case _ => // valid, do nothing.
|
||||
}
|
||||
}
|
||||
|
||||
def toThrift: thrift.TimeRange = {
|
||||
thrift.TimeRange(
|
||||
fromMs = from.map(_.inMilliseconds),
|
||||
toMs = to.map(_.inMilliseconds)
|
||||
)
|
||||
}
|
||||
|
||||
def toTimelineRangeThrift: thrift.TimelineRange = {
|
||||
thrift.TimelineRange.TimeRange(toThrift)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,46 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.UserId
|
||||
import com.twitter.timelineservice.model.TimelineId
|
||||
import com.twitter.timelineservice.model.core.TimelineKind
|
||||
|
||||
object Timeline {
|
||||
def empty(id: TimelineId): Timeline = {
|
||||
Timeline(id, Nil)
|
||||
}
|
||||
|
||||
def fromThrift(timeline: thrift.Timeline): Timeline = {
|
||||
Timeline(
|
||||
id = TimelineId.fromThrift(timeline.id),
|
||||
entries = timeline.entries.map(TimelineEntryEnvelope.fromThrift)
|
||||
)
|
||||
}
|
||||
|
||||
def throwIfIdInvalid(id: TimelineId): Unit = {
|
||||
// Note: if we support timelines other than TimelineKind.home, we need to update
|
||||
// the implementation of userId method here and in TimelineQuery class.
|
||||
require(id.kind == TimelineKind.home, s"Expected TimelineKind.home, found: ${id.kind}")
|
||||
}
|
||||
}
|
||||
|
||||
case class Timeline(id: TimelineId, entries: Seq[TimelineEntryEnvelope]) {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def userId: UserId = {
|
||||
id.id
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
Timeline.throwIfIdInvalid(id)
|
||||
entries.foreach(_.throwIfInvalid())
|
||||
}
|
||||
|
||||
def toThrift: thrift.Timeline = {
|
||||
thrift.Timeline(
|
||||
id = id.toThrift,
|
||||
entries = entries.map(_.toThrift)
|
||||
)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object TimelineEntry {
|
||||
def fromThrift(entry: thrift.TimelineEntry): TimelineEntry = {
|
||||
entry match {
|
||||
case thrift.TimelineEntry.Tweet(e) => Tweet.fromThrift(e)
|
||||
case thrift.TimelineEntry.TweetypieTweet(e) => new HydratedTweetEntry(e)
|
||||
case _ => throw new IllegalArgumentException(s"Unsupported type: $entry")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait TimelineEntry {
|
||||
def toTimelineEntryThrift: thrift.TimelineEntry
|
||||
def throwIfInvalid(): Unit
|
||||
}
|
Binary file not shown.
@ -1,24 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object TimelineEntryEnvelope {
|
||||
def fromThrift(entryEnvelope: thrift.TimelineEntryEnvelope): TimelineEntryEnvelope = {
|
||||
TimelineEntryEnvelope(
|
||||
entry = TimelineEntry.fromThrift(entryEnvelope.entry)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
case class TimelineEntryEnvelope(entry: TimelineEntry) {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def toThrift: thrift.TimelineEntryEnvelope = {
|
||||
thrift.TimelineEntryEnvelope(entry.toTimelineEntryThrift)
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
entry.throwIfInvalid()
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,82 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.UserId
|
||||
import com.twitter.timelineservice.model.TimelineId
|
||||
|
||||
object TimelineQuery {
|
||||
def fromThrift(query: thrift.TimelineQuery): TimelineQuery = {
|
||||
val queryType = query.queryType
|
||||
val id = TimelineId.fromThrift(query.timelineId)
|
||||
val maxCount = query.maxCount
|
||||
val range = query.range.map(TimelineRange.fromThrift)
|
||||
val options = query.options.map(TimelineQueryOptions.fromThrift)
|
||||
|
||||
queryType match {
|
||||
case thrift.TimelineQueryType.Ranked =>
|
||||
val rankedOptions = getRankedOptions(options)
|
||||
RankedTimelineQuery(id, maxCount, range, rankedOptions)
|
||||
|
||||
case thrift.TimelineQueryType.ReverseChron =>
|
||||
val reverseChronOptions = getReverseChronOptions(options)
|
||||
ReverseChronTimelineQuery(id, maxCount, range, reverseChronOptions)
|
||||
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(s"Unsupported query type: $queryType")
|
||||
}
|
||||
}
|
||||
|
||||
def getRankedOptions(
|
||||
options: Option[TimelineQueryOptions]
|
||||
): Option[RankedTimelineQueryOptions] = {
|
||||
options.map {
|
||||
case o: RankedTimelineQueryOptions => o
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(
|
||||
"Only RankedTimelineQueryOptions are supported when queryType is TimelineQueryType.Ranked"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def getReverseChronOptions(
|
||||
options: Option[TimelineQueryOptions]
|
||||
): Option[ReverseChronTimelineQueryOptions] = {
|
||||
options.map {
|
||||
case o: ReverseChronTimelineQueryOptions => o
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(
|
||||
"Only ReverseChronTimelineQueryOptions are supported when queryType is TimelineQueryType.ReverseChron"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
abstract class TimelineQuery(
|
||||
private val queryType: thrift.TimelineQueryType,
|
||||
val id: TimelineId,
|
||||
val maxCount: Option[Int],
|
||||
val range: Option[TimelineRange],
|
||||
val options: Option[TimelineQueryOptions]) {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def userId: UserId = {
|
||||
id.id
|
||||
}
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
Timeline.throwIfIdInvalid(id)
|
||||
range.foreach(_.throwIfInvalid())
|
||||
options.foreach(_.throwIfInvalid())
|
||||
}
|
||||
|
||||
def toThrift: thrift.TimelineQuery = {
|
||||
thrift.TimelineQuery(
|
||||
queryType = queryType,
|
||||
timelineId = id.toThrift,
|
||||
maxCount = maxCount,
|
||||
range = range.map(_.toTimelineRangeThrift),
|
||||
options = options.map(_.toTimelineQueryOptionsThrift)
|
||||
)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,20 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object TimelineQueryOptions {
|
||||
def fromThrift(options: thrift.TimelineQueryOptions): TimelineQueryOptions = {
|
||||
options match {
|
||||
case thrift.TimelineQueryOptions.RankedTimelineQueryOptions(r) =>
|
||||
RankedTimelineQueryOptions.fromThrift(r)
|
||||
case thrift.TimelineQueryOptions.ReverseChronTimelineQueryOptions(r) =>
|
||||
ReverseChronTimelineQueryOptions.fromThrift(r)
|
||||
case _ => throw new IllegalArgumentException(s"Unsupported type: $options")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait TimelineQueryOptions {
|
||||
def toTimelineQueryOptionsThrift: thrift.TimelineQueryOptions
|
||||
def throwIfInvalid(): Unit
|
||||
}
|
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
|
||||
object TimelineRange {
|
||||
def fromThrift(range: thrift.TimelineRange): TimelineRange = {
|
||||
range match {
|
||||
case thrift.TimelineRange.TimeRange(r) => TimeRange.fromThrift(r)
|
||||
case thrift.TimelineRange.TweetIdRange(r) => TweetIdRange.fromThrift(r)
|
||||
case _ => throw new IllegalArgumentException(s"Unsupported type: $range")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait TimelineRange {
|
||||
def toTimelineRangeThrift: thrift.TimelineRange
|
||||
def throwIfInvalid(): Unit
|
||||
}
|
Binary file not shown.
@ -1,62 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.search.earlybird.thriftscala._
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.timelines.model.UserId
|
||||
|
||||
object Tweet {
|
||||
def fromThrift(tweet: thrift.Tweet): Tweet = {
|
||||
Tweet(id = tweet.id)
|
||||
}
|
||||
}
|
||||
|
||||
case class Tweet(
|
||||
id: TweetId,
|
||||
userId: Option[UserId] = None,
|
||||
sourceTweetId: Option[TweetId] = None,
|
||||
sourceUserId: Option[UserId] = None)
|
||||
extends TimelineEntry {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def throwIfInvalid(): Unit = {}
|
||||
|
||||
def toThrift: thrift.Tweet = {
|
||||
thrift.Tweet(
|
||||
id = id,
|
||||
userId = userId,
|
||||
sourceTweetId = sourceTweetId,
|
||||
sourceUserId = sourceUserId)
|
||||
}
|
||||
|
||||
def toTimelineEntryThrift: thrift.TimelineEntry = {
|
||||
thrift.TimelineEntry.Tweet(toThrift)
|
||||
}
|
||||
|
||||
def toThriftSearchResult: ThriftSearchResult = {
|
||||
val metadata = ThriftSearchResultMetadata(
|
||||
resultType = ThriftSearchResultType.Recency,
|
||||
fromUserId = userId match {
|
||||
case Some(id) => id
|
||||
case None => 0L
|
||||
},
|
||||
isRetweet =
|
||||
if (sourceUserId.isDefined || sourceUserId.isDefined) Some(true)
|
||||
else
|
||||
None,
|
||||
sharedStatusId = sourceTweetId match {
|
||||
case Some(id) => id
|
||||
case None => 0L
|
||||
},
|
||||
referencedTweetAuthorId = sourceUserId match {
|
||||
case Some(id) => id
|
||||
case None => 0L
|
||||
}
|
||||
)
|
||||
ThriftSearchResult(
|
||||
id = id,
|
||||
metadata = Some(metadata)
|
||||
)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,53 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelineranker.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.TweetId
|
||||
|
||||
object TweetIdRange {
|
||||
val default: TweetIdRange = TweetIdRange(None, None)
|
||||
val empty: TweetIdRange = TweetIdRange(Some(0L), Some(0L))
|
||||
|
||||
def fromThrift(range: thrift.TweetIdRange): TweetIdRange = {
|
||||
TweetIdRange(fromId = range.fromId, toId = range.toId)
|
||||
}
|
||||
|
||||
def fromTimelineRange(range: TimelineRange): TweetIdRange = {
|
||||
range match {
|
||||
case r: TweetIdRange => r
|
||||
case _ =>
|
||||
throw new IllegalArgumentException(s"Only Tweet ID range is supported. Found: $range")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A range of Tweet IDs with exclusive bounds.
|
||||
*/
|
||||
case class TweetIdRange(fromId: Option[TweetId] = None, toId: Option[TweetId] = None)
|
||||
extends TimelineRange {
|
||||
|
||||
throwIfInvalid()
|
||||
|
||||
def throwIfInvalid(): Unit = {
|
||||
(fromId, toId) match {
|
||||
case (Some(fromTweetId), Some(toTweetId)) =>
|
||||
require(fromTweetId <= toTweetId, "fromId must be less than or equal to toId.")
|
||||
case _ => // valid, do nothing.
|
||||
}
|
||||
}
|
||||
|
||||
def toThrift: thrift.TweetIdRange = {
|
||||
thrift.TweetIdRange(fromId = fromId, toId = toId)
|
||||
}
|
||||
|
||||
def toTimelineRangeThrift: thrift.TimelineRange = {
|
||||
thrift.TimelineRange.TweetIdRange(toThrift)
|
||||
}
|
||||
|
||||
def isEmpty: Boolean = {
|
||||
(fromId, toId) match {
|
||||
case (Some(fromId), Some(toId)) if fromId == toId => true
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,8 +0,0 @@
|
||||
package com.twitter.timelineranker.model
|
||||
|
||||
import com.twitter.timelines.model.UserId
|
||||
|
||||
case class UtegLikedByTweetsOptions(
|
||||
utegCount: Int,
|
||||
isInNetwork: Boolean,
|
||||
weightedFollowings: Map[UserId, Double])
|
@ -1,17 +0,0 @@
|
||||
target(
|
||||
dependencies = [
|
||||
"timelineranker/config",
|
||||
"timelineranker/server/src/main/scala",
|
||||
],
|
||||
)
|
||||
|
||||
jvm_app(
|
||||
name = "bundle",
|
||||
basename = "timelineranker-server-package-dist",
|
||||
binary = "timelineranker/server/src/main/scala:bin",
|
||||
bundles = [bundle(
|
||||
fileset = ["config/**/*"],
|
||||
owning_target = "timelineranker/server/config:files",
|
||||
)],
|
||||
tags = ["bazel-compatible"],
|
||||
)
|
BIN
timelineranker/server/BUILD.docx
Normal file
BIN
timelineranker/server/BUILD.docx
Normal file
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
resources(
|
||||
sources = ["**/*.yml"],
|
||||
)
|
||||
|
||||
# Created for Bazel compatibility.
|
||||
# In Bazel, loose files must be part of a target to be included into a bundle.
|
||||
# See also http://go/bazel-compatibility/bundle_does_not_match_any_files
|
||||
files(
|
||||
name = "files",
|
||||
sources = [
|
||||
"!BUILD",
|
||||
"**/*",
|
||||
],
|
||||
)
|
BIN
timelineranker/server/config/BUILD.docx
Normal file
BIN
timelineranker/server/config/BUILD.docx
Normal file
Binary file not shown.
BIN
timelineranker/server/config/decider.docx
Normal file
BIN
timelineranker/server/config/decider.docx
Normal file
Binary file not shown.
@ -1,153 +0,0 @@
|
||||
# Deciders that can be used to control load on TLR or its backends.
|
||||
enable_max_concurrency_limiting:
|
||||
comment: "When enabled, limit maxConcurrency filter. Note: Requires system property maxConcurrency to be set."
|
||||
default_availability: 0
|
||||
|
||||
# Deciders related to testing / debugging.
|
||||
enable_routing_to_ranker_dev_proxy:
|
||||
comment: "Route dark traffic to the TimelineRanker development proxy. 100% means ~100% of requests to a host."
|
||||
default_availability: 0
|
||||
|
||||
# Deciders related to authorization.
|
||||
client_request_authorization:
|
||||
comment: "Enable client request authorization and rate limiting"
|
||||
default_availability: 10000
|
||||
client_write_whitelist:
|
||||
comment: "Enable authorization of write protected requests from only whitelisted clients"
|
||||
default_availability: 0
|
||||
allow_timeline_mixer_recap_prod:
|
||||
comment: "Allow requests from production TimelineMixer/recap"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_recycled_prod:
|
||||
comment: "Allow requests from production TimelineMixer/recycled"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_hydrate_prod:
|
||||
comment: "Allow requests from production TimelineMixer/hydrate"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_hydrate_recos_prod:
|
||||
comment: "Allow requests from production TimelineMixer/hydrate_recos"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_seed_authors_prod:
|
||||
comment: "Allow requests from production TimelineMixer/seed_author_ids"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_simcluster_prod:
|
||||
comment: "Allow requests from production TimelineMixer/simcluster"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_entity_tweets_prod:
|
||||
comment: "Allow requests from production TimelineMixer/entity_tweets"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_list_prod:
|
||||
comment: "Allow requests from production TimelineMixer/list"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_list_tweet_prod:
|
||||
comment: "Allow requests from production TimelineMixer/list_tweet"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_uteg_liked_by_tweets_prod:
|
||||
comment: "Allow requests from production TimelineMixer/uteg_liked_by_tweets"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_community_prod:
|
||||
comment: "Allow requests from production TimelineMixer/community"
|
||||
default_availability: 10000
|
||||
allow_timeline_mixer_community_tweet_prod:
|
||||
comment: "Allow requests from production TimelineMixer/community_tweet"
|
||||
default_availability: 10000
|
||||
allow_timeline_scorer_recommended_trend_tweet_prod:
|
||||
comment: "Allow requests from production TimelineMixer/recommended_trend_tweet"
|
||||
default_availability: 10000
|
||||
|
||||
allow_timeline_scorer_rec_topic_tweets_prod:
|
||||
comment: "Allow requests from production TimelineScorer/rec_topic_tweets"
|
||||
default_availability: 10000
|
||||
allow_timeline_scorer_popular_topic_tweets_prod:
|
||||
comment: "Allow requests from production TimelineScorer/popular_topic_tweets"
|
||||
default_availability: 10000
|
||||
|
||||
allow_timelinescorer_hydrate_tweet_scoring_prod:
|
||||
comment: "Allow requests from production TimelineScorer/hydrate_tweet_scoring"
|
||||
default_availability: 10000
|
||||
|
||||
allow_timeline_mixer_staging:
|
||||
comment: "Allow requests from staging TimelineMixer"
|
||||
default_availability: 10000
|
||||
allow_timeline_ranker_warmup:
|
||||
comment: "Allow warmup requests from the TLR cluster"
|
||||
default_availability: 10000
|
||||
allow_timeline_ranker_proxy:
|
||||
comment: "Allow warmup requests from the TimelineRanker proxy"
|
||||
default_availability: 10000
|
||||
allow_timeline_service_prod:
|
||||
comment: "Allow requests from production TimelineService"
|
||||
default_availability: 10000
|
||||
allow_timeline_service_staging:
|
||||
comment: "Allow requests from staging TimelineService"
|
||||
default_availability: 10000
|
||||
rate_limit_override_unknown:
|
||||
comment: "Override the rate limit for unknown clients"
|
||||
default_availability: 0
|
||||
|
||||
# Deciders related to reverse-chron home timeline materialization.
|
||||
multiplier_of_materialization_tweets_fetched:
|
||||
comment: "Multiplier applied to the number of tweets fetched from search expressed as percentage. 100 means 100%. It can be used to fetch more than the number tweets requested by a caller (to improve similarity) or to fetch less than requested to reduce load."
|
||||
default_availability: 100
|
||||
enable_backfill_filtered_entries:
|
||||
comment: "Controls whether to back-fill timeline entries that get filtered out by TweetsPostFilter during home timeline materialization."
|
||||
default_availability: 0
|
||||
tweets_filtering_lossage_threshold:
|
||||
comment: "If back-filling filtered entries is enabled and if percentage of tweets that get filtered out exceeds this value then we will issue a second call to get more tweets. Default value 2000 == 20%"
|
||||
default_availability: 2000
|
||||
tweets_filtering_lossage_limit:
|
||||
comment: "We need to ensure that the number of tweets requested by the second call are not unbounded (for example, if everything is filtered out in the first call) therefore we limit the actual filtered out percentage to be no greater than the value below. Default value 6000 == 60%. That is, even if the actual lossage is 90% we will consider it to be only 60% for the purpose of back-filling."
|
||||
default_availability: 6000
|
||||
supplement_follows_with_real_graph:
|
||||
comment: "Whether to fetch additional follows from RealGraph for users with more than the max follows fetched from SGS during home timeline materialization."
|
||||
default_availability: 0
|
||||
|
||||
# Deciders related to recap.
|
||||
recap_enable_content_features_hydration:
|
||||
comment: "If true, semantic core, penguin, and tweetypie based expensive features will be hydrated for recap Tweets. Otherwise those features are not set"
|
||||
default_availability: 10000
|
||||
recap_max_count_multiplier:
|
||||
comment: "We multiply maxCount (caller supplied value) by this multiplier and fetch those many candidates from search so that we are left with sufficient number of candidates after hydration and filtering. 100 == 1.0"
|
||||
default_availability: 100
|
||||
recap_enable_extra_sorting_in_results:
|
||||
comment: "If TLR will do extra sorting in search results"
|
||||
default_availability: 10000
|
||||
|
||||
# Deciders related to recycled tweets.
|
||||
recycled_enable_content_features_hydration:
|
||||
comment: "If true, semantic core, penguin, and tweetypie based expensive features will be hydrated for recycled Tweets. Otherwise those features are not set"
|
||||
default_availability: 0
|
||||
recycled_max_count_multiplier:
|
||||
comment: "We multiply maxCount (caller supplied value) by this multiplier and fetch those many candidates from search so that we are left with sufficient number of candidates after hydration and filtering. 100 == 1.0"
|
||||
default_availability: 100
|
||||
|
||||
# Deciders related to entity tweets.
|
||||
entity_tweets_enable_content_features_hydration:
|
||||
comment: "If true, semantic core, penguin, and tweetypie based expensive features will be hydrated for entity Tweets. Otherwise those features are not set"
|
||||
default_availability: 10000
|
||||
|
||||
# Deciders related to both recap and recycled tweets
|
||||
enable_real_graph_users:
|
||||
comment: "This is used only if user follows >= 1000. If true, expands user seedset with real graph users and recent followed users. Otherwise, user seedset only includes followed users."
|
||||
default_availability: 0
|
||||
max_real_graph_and_followed_users:
|
||||
comment: "Maximum number of combined real graph users and recent followed users in the user seedset for recap and recycled tweets if enable_real_graph_users is true and only_real_graph_users is false. This is upper bounded by 2000."
|
||||
default_availability: 1000
|
||||
|
||||
# Deciders related to recap author
|
||||
recap_author_enable_new_pipeline:
|
||||
comment: "Enable new recap author pipeline"
|
||||
default_availability: 0
|
||||
recap_author_enable_content_features_hydration:
|
||||
comment: "If true, semantic core, penguin, and tweetypie based expensive features will be hydrated for PYLE Tweets. Otherwise those features are not set"
|
||||
default_availability: 0
|
||||
|
||||
# Deciders related to recap hydration(rectweet+ranked organic).
|
||||
recap_hydration_enable_content_features_hydration:
|
||||
comment: "If true, semantic core, penguin, and tweetypie based expensive features will be hydrated for rectweet+ranked organic Tweets. Otherwise those features are not set"
|
||||
default_availability: 0
|
||||
|
||||
# Deciders related to uteg liked by tweets
|
||||
uteg_liked_by_tweets_enable_content_features_hydration:
|
||||
comment: "If true, semantic core, penguin, and tweetypie based expensive features will be hydrated for rectweet+recycled utegLikedBy Tweets. Otherwise those features are not set"
|
||||
default_availability: 0
|
@ -1,5 +0,0 @@
|
||||
resources(
|
||||
sources = [
|
||||
"*.xml",
|
||||
],
|
||||
)
|
BIN
timelineranker/server/src/main/resources/BUILD.docx
Normal file
BIN
timelineranker/server/src/main/resources/BUILD.docx
Normal file
Binary file not shown.
Binary file not shown.
@ -1,124 +0,0 @@
|
||||
<configuration>
|
||||
<shutdownHook class="ch.qos.logback.core.hook.DelayingShutdownHook"/>
|
||||
<property name="async_queue_size" value="${queue.size:-50000}"/>
|
||||
<property name="async_max_flush_time" value="${max.flush.time:-0}"/>
|
||||
<property name="SERVICE_OUTPUT" value="${log.service.output:-server.log}"/>
|
||||
<property name="DEBUG_TRANSCRIPTS_OUTPUT"
|
||||
value="${log.debug_transcripts.output:-debug_transcripts.log}"/>
|
||||
<property name="DEFAULT_SERVICE_PATTERN"
|
||||
value="%5p [%d{yyyyMMdd-HH:mm:ss.SSS}] %logger{0}: %m%n"/>
|
||||
<!-- JUL/JDK14 to Logback bridge -->
|
||||
<contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
|
||||
<resetJUL>true</resetJUL>
|
||||
</contextListener>
|
||||
|
||||
<!-- Service Log -->
|
||||
<appender name="SERVICE" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<file>${SERVICE_OUTPUT}</file>
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
|
||||
<!-- daily rollover -->
|
||||
<fileNamePattern>${SERVICE_OUTPUT}.%d.%i.gz</fileNamePattern>
|
||||
<maxFileSize>500MB</maxFileSize>
|
||||
<!-- keep 21 days' worth of history -->
|
||||
<maxHistory>21</maxHistory>
|
||||
<cleanHistoryOnStart>true</cleanHistoryOnStart>
|
||||
</rollingPolicy>
|
||||
<encoder>
|
||||
<pattern>${DEFAULT_SERVICE_PATTERN}</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- debug transcripts -->
|
||||
<appender name="DEBUG-TRANSCRIPTS" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<file>${DEBUG_TRANSCRIPTS_OUTPUT}</file>
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
|
||||
<!-- daily rollover -->
|
||||
<fileNamePattern>${DEBUG_TRANSCRIPTS_OUTPUT}.%d.%i.gz</fileNamePattern>
|
||||
<maxFileSize>500MB</maxFileSize>
|
||||
<!-- keep 21 days' worth of history -->
|
||||
<maxHistory>21</maxHistory>
|
||||
<cleanHistoryOnStart>true</cleanHistoryOnStart>
|
||||
</rollingPolicy>
|
||||
<encoder>
|
||||
<pattern>${DEFAULT_SERVICE_PATTERN}</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- LogLens/splunk -->
|
||||
<appender name="LOGLENS" class="com.twitter.loglens.logback.LoglensAppender">
|
||||
<mdcAdditionalContext>true</mdcAdditionalContext>
|
||||
<category>loglens</category>
|
||||
<index>${log.lens.index:-timelineranker}</index>
|
||||
<tag>${log.lens.tag}</tag>
|
||||
<encoder>
|
||||
<pattern>%msg%n</pattern>
|
||||
</encoder>
|
||||
<filter class="com.twitter.strato.logging.logback.RegexFilter">
|
||||
<forLogger>manhattan-client</forLogger>
|
||||
<excludeRegex>.*InvalidRequest.*</excludeRegex>
|
||||
</filter>
|
||||
</appender>
|
||||
|
||||
<!-- ===================================================== -->
|
||||
<!-- Primary Async Appenders -->
|
||||
<!-- ===================================================== -->
|
||||
|
||||
<appender name="ASYNC-SERVICE" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>${async_queue_size}</queueSize>
|
||||
<maxFlushTime>${async_max_flush_time}</maxFlushTime>
|
||||
<appender-ref ref="SERVICE"/>
|
||||
</appender>
|
||||
|
||||
<appender name="ASYNC-DEBUG-TRANSCRIPTS" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>${async_queue_size}</queueSize>
|
||||
<maxFlushTime>${async_max_flush_time}</maxFlushTime>
|
||||
<appender-ref ref="DEBUG-TRANSCRIPTS"/>
|
||||
</appender>
|
||||
|
||||
<appender name="ASYNC-LOGLENS" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>${async_queue_size}</queueSize>
|
||||
<maxFlushTime>${async_max_flush_time}</maxFlushTime>
|
||||
<appender-ref ref="LOGLENS"/>
|
||||
</appender>
|
||||
|
||||
<!-- ===================================================== -->
|
||||
<!-- Package Config -->
|
||||
<!-- ===================================================== -->
|
||||
|
||||
<!-- Per-Package Config -->
|
||||
<logger name="OptimisticLockingCache" level="off"/>
|
||||
<logger name="ZkSession" level="info"/>
|
||||
<logger name="com.twitter" level="info"/>
|
||||
<logger name="com.twitter.decider.StoreDecider" level="warn"/>
|
||||
<logger name="com.twitter.distributedlog.client" level="warn"/>
|
||||
<logger name="com.twitter.finagle.liveness" level="warn"/>
|
||||
<logger name="com.twitter.finagle.mtls.authorization.config.AccessControlListConfiguration" level="warn"/>
|
||||
<logger name="com.twitter.finagle.mux" level="warn"/>
|
||||
<logger name="com.twitter.finagle.serverset2" level="warn"/>
|
||||
<logger name="com.twitter.finatra.kafka.common.kerberoshelpers" level="warn"/>
|
||||
<logger name="com.twitter.finatra.kafka.utils.BootstrapServerUtils" level="warn"/>
|
||||
<logger name="com.twitter.logging.ScribeHandler" level="warn"/>
|
||||
<logger name="com.twitter.server.coordinate" level="error"/>
|
||||
<logger name="com.twitter.wilyns" level="warn"/>
|
||||
<logger name="com.twitter.zookeeper.client" level="info"/>
|
||||
<logger name="com.twitter.zookeeper.client.internal" level="warn"/>
|
||||
<logger name="manhattan-client" level="warn"/>
|
||||
<logger name="org.apache.kafka.clients.NetworkClient" level="error"/>
|
||||
<logger name="org.apache.kafka.clients.consumer.internals" level="error"/>
|
||||
<logger name="org.apache.kafka.clients.producer.internals" level="error"/>
|
||||
<logger name="org.apache.kafka.common.network" level="warn"/>
|
||||
<logger name="org.apache.zookeeper" level="error"/>
|
||||
<logger name="org.apache.zookeeper.ClientCnxn" level="warn"/>
|
||||
|
||||
<!-- Root Config -->
|
||||
<root level="${log_level:-INFO}">
|
||||
<appender-ref ref="ASYNC-SERVICE"/>
|
||||
<appender-ref ref="ASYNC-LOGLENS"/>
|
||||
</root>
|
||||
|
||||
<!-- debug transcripts: logger name MUST be c.t.timelines.util.debuglog.DebugLog.DebugTranscriptsLog -->
|
||||
<logger name="DebugTranscripts" level="info">
|
||||
<appender-ref ref="ASYNC-DEBUG-TRANSCRIPTS"/>
|
||||
<appender-ref ref="ASYNC-LOGLENS"/>
|
||||
</logger>
|
||||
</configuration>
|
@ -1,32 +0,0 @@
|
||||
target(
|
||||
dependencies = [
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/repository",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/server",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/source",
|
||||
],
|
||||
)
|
||||
|
||||
jvm_binary(
|
||||
name = "bin",
|
||||
basename = "timelineranker-server",
|
||||
main = "com.twitter.timelineranker.server.Main",
|
||||
runtime_platform = "java11",
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
":scala",
|
||||
"3rdparty/jvm/org/slf4j:jcl-over-slf4j", # [1]
|
||||
"3rdparty/jvm/org/slf4j:log4j-over-slf4j", # [1]
|
||||
"loglens/loglens-logback/src/main/scala/com/twitter/loglens/logback", # [2]
|
||||
"strato/src/main/scala/com/twitter/strato/logging/logback", # [2]
|
||||
"timelineranker/server/src/main/resources", # [2]
|
||||
"twitter-server/logback-classic/src/main/scala", #[2]
|
||||
],
|
||||
)
|
||||
|
||||
# [1] bridge other logging implementations to slf4j-api in addition to JUL
|
||||
# https://docbird.twitter.biz/core_libraries_guide/logging/twitter_server.html
|
||||
# without these, c.t.l.Logger become silent/null logger since no proper
|
||||
# configuration can be found. This can be removed once there are no
|
||||
# depdency from service to c.t.l.Logger
|
||||
#
|
||||
# [2] incur logback implementation
|
BIN
timelineranker/server/src/main/scala/BUILD.docx
Normal file
BIN
timelineranker/server/src/main/scala/BUILD.docx
Normal file
Binary file not shown.
@ -1,22 +0,0 @@
|
||||
scala_library(
|
||||
sources = ["*.scala"],
|
||||
compiler_option_sets = ["fatal_warnings"],
|
||||
strict_deps = True,
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"cortex-core/thrift/src/main/thrift:thrift-scala",
|
||||
"cortex-tweet-annotate/service/src/main/thrift:thrift-scala",
|
||||
"finagle/finagle-memcached/src/main/scala",
|
||||
"mediaservices/commons/src/main/thrift:thrift-scala",
|
||||
"servo/repo",
|
||||
"servo/util/src/main/scala",
|
||||
"src/thrift/com/twitter/ml/api:data-scala",
|
||||
"src/thrift/com/twitter/ml/prediction_service:prediction_service-scala",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/types",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util/stats",
|
||||
"util/util-core:util-core-util",
|
||||
"util/util-logging/src/main/scala",
|
||||
"util/util-stats/src/main/scala",
|
||||
],
|
||||
)
|
Binary file not shown.
Binary file not shown.
@ -1,113 +0,0 @@
|
||||
package com.twitter.timelineranker.clients
|
||||
|
||||
import com.twitter.cortex_core.thriftscala.ModelName
|
||||
import com.twitter.cortex_tweet_annotate.thriftscala._
|
||||
import com.twitter.finagle.stats.StatsReceiver
|
||||
import com.twitter.logging.Logger
|
||||
import com.twitter.mediaservices.commons.mediainformation.thriftscala.CalibrationLevel
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.timelines.util.stats.RequestScope
|
||||
import com.twitter.timelines.util.stats.RequestStats
|
||||
import com.twitter.timelines.util.stats.ScopedFactory
|
||||
import com.twitter.timelines.util.FailOpenHandler
|
||||
import com.twitter.util.Future
|
||||
|
||||
object CortexTweetQueryServiceClient {
|
||||
private[this] val logger = Logger.get(getClass.getSimpleName)
|
||||
|
||||
/**
|
||||
* A tweet is considered safe if Cortex NSFA model gives it a score that is above the threshold.
|
||||
* Both the score and the threshold are returned in a response from getTweetSignalByIds endpoint.
|
||||
*/
|
||||
private def getSafeTweet(
|
||||
request: TweetSignalRequest,
|
||||
response: ModelResponseResult
|
||||
): Option[TweetId] = {
|
||||
val tweetId = request.tweetId
|
||||
response match {
|
||||
case ModelResponseResult(ModelResponseState.Success, Some(tid), Some(modelResponse), _) =>
|
||||
val prediction = modelResponse.predictions.flatMap(_.headOption)
|
||||
val score = prediction.map(_.score.score)
|
||||
val highRecallBucket = prediction.flatMap(_.calibrationBuckets).flatMap { buckets =>
|
||||
buckets.find(_.description.contains(CalibrationLevel.HighRecall))
|
||||
}
|
||||
val threshold = highRecallBucket.map(_.threshold)
|
||||
(score, threshold) match {
|
||||
case (Some(s), Some(t)) if (s > t) =>
|
||||
Some(tid)
|
||||
case (Some(s), Some(t)) =>
|
||||
logger.ifDebug(
|
||||
s"Cortex NSFA score for tweet $tweetId is $s (threshold is $t), removing as unsafe."
|
||||
)
|
||||
None
|
||||
case _ =>
|
||||
logger.ifDebug(s"Unexpected response, removing tweet $tweetId as unsafe.")
|
||||
None
|
||||
}
|
||||
case _ =>
|
||||
logger.ifWarning(
|
||||
s"Cortex tweet NSFA call was not successful, removing tweet $tweetId as unsafe."
|
||||
)
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables calling cortex tweet query service to get NSFA scores on the tweet.
|
||||
*/
|
||||
class CortexTweetQueryServiceClient(
|
||||
cortexClient: CortexTweetQueryService.MethodPerEndpoint,
|
||||
requestScope: RequestScope,
|
||||
statsReceiver: StatsReceiver)
|
||||
extends RequestStats {
|
||||
import CortexTweetQueryServiceClient._
|
||||
|
||||
private[this] val logger = Logger.get(getClass.getSimpleName)
|
||||
|
||||
private[this] val getTweetSignalByIdsRequestStats =
|
||||
requestScope.stats("cortex", statsReceiver, suffix = Some("getTweetSignalByIds"))
|
||||
private[this] val getTweetSignalByIdsRequestScopedStatsReceiver =
|
||||
getTweetSignalByIdsRequestStats.scopedStatsReceiver
|
||||
|
||||
private[this] val failedCortexTweetQueryServiceScope =
|
||||
getTweetSignalByIdsRequestScopedStatsReceiver.scope(Failures)
|
||||
private[this] val failedCortexTweetQueryServiceCallCounter =
|
||||
failedCortexTweetQueryServiceScope.counter("failOpen")
|
||||
|
||||
private[this] val cortexTweetQueryServiceFailOpenHandler = new FailOpenHandler(
|
||||
getTweetSignalByIdsRequestScopedStatsReceiver
|
||||
)
|
||||
|
||||
def getSafeTweets(tweetIds: Seq[TweetId]): Future[Seq[TweetId]] = {
|
||||
val requests = tweetIds.map { id => TweetSignalRequest(id, ModelName.TweetToNsfa) }
|
||||
val results = cortexClient
|
||||
.getTweetSignalByIds(
|
||||
GetTweetSignalByIdsRequest(requests)
|
||||
)
|
||||
.map(_.results)
|
||||
|
||||
cortexTweetQueryServiceFailOpenHandler(
|
||||
results.map { responses =>
|
||||
requests.zip(responses).flatMap {
|
||||
case (request, response) =>
|
||||
getSafeTweet(request, response)
|
||||
}
|
||||
}
|
||||
) { _ =>
|
||||
failedCortexTweetQueryServiceCallCounter.incr()
|
||||
logger.ifWarning(s"Cortex tweet NSFA call failed, considering tweets $tweetIds as unsafe.")
|
||||
Future.value(Seq())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ScopedCortexTweetQueryServiceClientFactory(
|
||||
cortexClient: CortexTweetQueryService.MethodPerEndpoint,
|
||||
statsReceiver: StatsReceiver)
|
||||
extends ScopedFactory[CortexTweetQueryServiceClient] {
|
||||
|
||||
override def scope(scope: RequestScope): CortexTweetQueryServiceClient = {
|
||||
new CortexTweetQueryServiceClient(cortexClient, scope, statsReceiver)
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,48 +0,0 @@
|
||||
package com.twitter.timelineranker.clients
|
||||
|
||||
import com.twitter.finagle.memcached.{Client => FinagleMemcacheClient}
|
||||
import com.twitter.finagle.stats.StatsReceiver
|
||||
import com.twitter.logging.Logger
|
||||
import com.twitter.servo.cache.FinagleMemcache
|
||||
import com.twitter.servo.cache.MemcacheCache
|
||||
import com.twitter.servo.cache.ObservableMemcache
|
||||
import com.twitter.servo.cache.Serializer
|
||||
import com.twitter.servo.cache.StatsReceiverCacheObserver
|
||||
import com.twitter.timelines.util.stats.RequestScope
|
||||
import com.twitter.timelines.util.stats.ScopedFactory
|
||||
import com.twitter.util.Duration
|
||||
|
||||
/**
|
||||
* Factory to create a servo Memcache-backed Cache object. Clients are required to provide a
|
||||
* serializer/deserializer for keys and values.
|
||||
*/
|
||||
class MemcacheFactory(memcacheClient: FinagleMemcacheClient, statsReceiver: StatsReceiver) {
|
||||
private[this] val logger = Logger.get(getClass.getSimpleName)
|
||||
|
||||
def apply[K, V](
|
||||
keySerializer: K => String,
|
||||
valueSerializer: Serializer[V],
|
||||
ttl: Duration
|
||||
): MemcacheCache[K, V] = {
|
||||
new MemcacheCache[K, V](
|
||||
memcache = new ObservableMemcache(
|
||||
new FinagleMemcache(memcacheClient),
|
||||
new StatsReceiverCacheObserver(statsReceiver, 1000, logger)
|
||||
),
|
||||
ttl = ttl,
|
||||
serializer = valueSerializer,
|
||||
transformKey = keySerializer
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class ScopedMemcacheFactory(memcacheClient: FinagleMemcacheClient, statsReceiver: StatsReceiver)
|
||||
extends ScopedFactory[MemcacheFactory] {
|
||||
|
||||
override def scope(scope: RequestScope): MemcacheFactory = {
|
||||
new MemcacheFactory(
|
||||
memcacheClient,
|
||||
statsReceiver.scope("memcache", scope.scope)
|
||||
)
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
scala_library(
|
||||
sources = ["*.scala"],
|
||||
platform = "java8",
|
||||
strict_deps = True,
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"3rdparty/jvm/com/twitter/algebird:bijection",
|
||||
"3rdparty/jvm/com/twitter/bijection:core",
|
||||
"3rdparty/jvm/com/twitter/bijection:netty",
|
||||
"3rdparty/jvm/com/twitter/bijection:scrooge",
|
||||
"3rdparty/jvm/com/twitter/bijection:thrift",
|
||||
"3rdparty/jvm/com/twitter/bijection:util",
|
||||
"3rdparty/jvm/com/twitter/storehaus:core",
|
||||
"finagle/finagle-stats",
|
||||
"scrooge/scrooge-core/src/main/scala",
|
||||
"src/scala/com/twitter/summingbird_internal/bijection:bijection-implicits",
|
||||
"src/thrift/com/twitter/timelines/content_features:thrift-scala",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/recap/model",
|
||||
"timelines/src/main/scala/com/twitter/timelines/clients/memcache_common",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/types",
|
||||
"util/util-core:util-core-util",
|
||||
"util/util-stats/src/main/scala/com/twitter/finagle/stats",
|
||||
],
|
||||
)
|
Binary file not shown.
Binary file not shown.
@ -1,39 +0,0 @@
|
||||
package com.twitter.timelineranker.clients.content_features_cache
|
||||
|
||||
import com.twitter.bijection.Injection
|
||||
import com.twitter.bijection.scrooge.CompactScalaCodec
|
||||
import com.twitter.finagle.stats.StatsReceiver
|
||||
import com.twitter.storehaus.Store
|
||||
import com.twitter.timelineranker.recap.model.ContentFeatures
|
||||
import com.twitter.timelines.clients.memcache_common._
|
||||
import com.twitter.timelines.content_features.{thriftscala => thrift}
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.util.Duration
|
||||
|
||||
/**
|
||||
* Content features will be stored by tweetId
|
||||
*/
|
||||
class ContentFeaturesMemcacheBuilder(
|
||||
config: StorehausMemcacheConfig,
|
||||
ttl: Duration,
|
||||
statsReceiver: StatsReceiver) {
|
||||
private[this] val scalaToThriftInjection: Injection[ContentFeatures, thrift.ContentFeatures] =
|
||||
Injection.build[ContentFeatures, thrift.ContentFeatures](_.toThrift)(
|
||||
ContentFeatures.tryFromThrift)
|
||||
|
||||
private[this] val thriftToBytesInjection: Injection[thrift.ContentFeatures, Array[Byte]] =
|
||||
CompactScalaCodec(thrift.ContentFeatures)
|
||||
|
||||
private[this] implicit val valueInjection: Injection[ContentFeatures, Array[Byte]] =
|
||||
scalaToThriftInjection.andThen(thriftToBytesInjection)
|
||||
|
||||
private[this] val underlyingBuilder =
|
||||
new MemcacheStoreBuilder[TweetId, ContentFeatures](
|
||||
config = config,
|
||||
scopeName = "contentFeaturesCache",
|
||||
statsReceiver = statsReceiver,
|
||||
ttl = ttl
|
||||
)
|
||||
|
||||
def build(): Store[TweetId, ContentFeatures] = underlyingBuilder.build()
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
scala_library(
|
||||
sources = ["*.scala"],
|
||||
compiler_option_sets = ["fatal_warnings"],
|
||||
strict_deps = True,
|
||||
tags = ["bazel-compatible"],
|
||||
dependencies = [
|
||||
"3rdparty/jvm/com/twitter/storehaus:core",
|
||||
"configapi/configapi-core/src/main/scala/com/twitter/timelines/configapi",
|
||||
"finagle/finagle-core/src/main",
|
||||
"servo/util/src/main/scala",
|
||||
"src/thrift/com/twitter/search:earlybird-scala",
|
||||
"src/thrift/com/twitter/search/common:constants-scala",
|
||||
"src/thrift/com/twitter/search/common:features-scala",
|
||||
"src/thrift/com/twitter/service/metastore/gen:thrift-scala",
|
||||
"timelineranker/common/src/main/scala/com/twitter/timelineranker/model",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/contentfeatures",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/core",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/parameters/in_network_tweets",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/parameters/recap",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/parameters/uteg_liked_by_tweets",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/recap/model",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/util",
|
||||
"timelineranker/server/src/main/scala/com/twitter/timelineranker/visibility",
|
||||
"timelines/src/main/scala/com/twitter/timelines/clients/gizmoduck",
|
||||
"timelines/src/main/scala/com/twitter/timelines/clients/manhattan",
|
||||
"timelines/src/main/scala/com/twitter/timelines/clients/relevance_search",
|
||||
"timelines/src/main/scala/com/twitter/timelines/clients/tweetypie",
|
||||
"timelines/src/main/scala/com/twitter/timelines/common/model",
|
||||
"timelines/src/main/scala/com/twitter/timelines/earlybird/common/options",
|
||||
"timelines/src/main/scala/com/twitter/timelines/earlybird/common/utils",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/candidate",
|
||||
"timelines/src/main/scala/com/twitter/timelines/model/tweet",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util/bounds",
|
||||
"timelines/src/main/scala/com/twitter/timelines/util/stats",
|
||||
"timelines/src/main/scala/com/twitter/timelines/visibility",
|
||||
"timelines/src/main/scala/com/twitter/timelines/visibility/model",
|
||||
"util/util-core:util-core-util",
|
||||
"util/util-core/src/main/scala/com/twitter/conversions",
|
||||
"util/util-logging/src/main/scala/com/twitter/logging",
|
||||
"util/util-stats/src/main/scala",
|
||||
],
|
||||
)
|
Binary file not shown.
Binary file not shown.
@ -1,40 +0,0 @@
|
||||
package com.twitter.timelineranker.common
|
||||
|
||||
import com.twitter.finagle.stats.StatsReceiver
|
||||
import com.twitter.servo.util.FutureArrow
|
||||
import com.twitter.timelineranker.core.HydratedCandidatesAndFeaturesEnvelope
|
||||
import com.twitter.timelineranker.model.CandidateTweet
|
||||
import com.twitter.timelineranker.model.CandidateTweetsResult
|
||||
import com.twitter.util.Future
|
||||
|
||||
class CandidateGenerationTransform(statsReceiver: StatsReceiver)
|
||||
extends FutureArrow[HydratedCandidatesAndFeaturesEnvelope, CandidateTweetsResult] {
|
||||
private[this] val numCandidateTweetsStat = statsReceiver.stat("numCandidateTweets")
|
||||
private[this] val numSourceTweetsStat = statsReceiver.stat("numSourceTweets")
|
||||
|
||||
override def apply(
|
||||
candidatesAndFeaturesEnvelope: HydratedCandidatesAndFeaturesEnvelope
|
||||
): Future[CandidateTweetsResult] = {
|
||||
val hydratedTweets = candidatesAndFeaturesEnvelope.candidateEnvelope.hydratedTweets.outerTweets
|
||||
|
||||
if (hydratedTweets.nonEmpty) {
|
||||
val candidates = hydratedTweets.map { hydratedTweet =>
|
||||
CandidateTweet(hydratedTweet, candidatesAndFeaturesEnvelope.features(hydratedTweet.tweetId))
|
||||
}
|
||||
numCandidateTweetsStat.add(candidates.size)
|
||||
|
||||
val sourceTweets =
|
||||
candidatesAndFeaturesEnvelope.candidateEnvelope.sourceHydratedTweets.outerTweets.map {
|
||||
hydratedTweet =>
|
||||
CandidateTweet(
|
||||
hydratedTweet,
|
||||
candidatesAndFeaturesEnvelope.features(hydratedTweet.tweetId))
|
||||
}
|
||||
numSourceTweetsStat.add(sourceTweets.size)
|
||||
|
||||
Future.value(CandidateTweetsResult(candidates, sourceTweets))
|
||||
} else {
|
||||
Future.value(CandidateTweetsResult.Empty)
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,112 +0,0 @@
|
||||
package com.twitter.timelineranker.common
|
||||
|
||||
import com.twitter.finagle.stats.StatsReceiver
|
||||
import com.twitter.servo.util.FutureArrow
|
||||
import com.twitter.servo.util.Gate
|
||||
import com.twitter.storehaus.Store
|
||||
import com.twitter.timelineranker.contentfeatures.ContentFeaturesProvider
|
||||
import com.twitter.timelineranker.core.FutureDependencyTransformer
|
||||
import com.twitter.timelineranker.core.HydratedCandidatesAndFeaturesEnvelope
|
||||
import com.twitter.timelineranker.model.RecapQuery
|
||||
import com.twitter.timelineranker.recap.model.ContentFeatures
|
||||
import com.twitter.timelineranker.util.SearchResultUtil._
|
||||
import com.twitter.timelineranker.util.CachingContentFeaturesProvider
|
||||
import com.twitter.timelineranker.util.TweetHydrator
|
||||
import com.twitter.timelineranker.util.TweetypieContentFeaturesProvider
|
||||
import com.twitter.timelines.clients.tweetypie.TweetyPieClient
|
||||
import com.twitter.timelines.model.TweetId
|
||||
import com.twitter.util.Future
|
||||
import com.twitter.timelines.configapi
|
||||
import com.twitter.timelines.util.FutureUtils
|
||||
|
||||
class ContentFeaturesHydrationTransformBuilder(
|
||||
tweetyPieClient: TweetyPieClient,
|
||||
contentFeaturesCache: Store[TweetId, ContentFeatures],
|
||||
enableContentFeaturesGate: Gate[RecapQuery],
|
||||
enableTokensInContentFeaturesGate: Gate[RecapQuery],
|
||||
enableTweetTextInContentFeaturesGate: Gate[RecapQuery],
|
||||
enableConversationControlContentFeaturesGate: Gate[RecapQuery],
|
||||
enableTweetMediaHydrationGate: Gate[RecapQuery],
|
||||
hydrateInReplyToTweets: Boolean,
|
||||
statsReceiver: StatsReceiver) {
|
||||
val scopedStatsReceiver: StatsReceiver = statsReceiver.scope("ContentFeaturesHydrationTransform")
|
||||
val tweetHydrator: TweetHydrator = new TweetHydrator(tweetyPieClient, scopedStatsReceiver)
|
||||
val tweetypieContentFeaturesProvider: ContentFeaturesProvider =
|
||||
new TweetypieContentFeaturesProvider(
|
||||
tweetHydrator,
|
||||
enableContentFeaturesGate,
|
||||
enableTokensInContentFeaturesGate,
|
||||
enableTweetTextInContentFeaturesGate,
|
||||
enableConversationControlContentFeaturesGate,
|
||||
enableTweetMediaHydrationGate,
|
||||
scopedStatsReceiver
|
||||
)
|
||||
|
||||
val cachingContentFeaturesProvider: ContentFeaturesProvider = new CachingContentFeaturesProvider(
|
||||
underlying = tweetypieContentFeaturesProvider,
|
||||
contentFeaturesCache = contentFeaturesCache,
|
||||
statsReceiver = scopedStatsReceiver
|
||||
)
|
||||
|
||||
val contentFeaturesProvider: configapi.FutureDependencyTransformer[RecapQuery, Seq[TweetId], Map[
|
||||
TweetId,
|
||||
ContentFeatures
|
||||
]] = FutureDependencyTransformer.partition(
|
||||
gate = enableContentFeaturesGate,
|
||||
ifTrue = cachingContentFeaturesProvider,
|
||||
ifFalse = tweetypieContentFeaturesProvider
|
||||
)
|
||||
|
||||
lazy val contentFeaturesHydrationTransform: ContentFeaturesHydrationTransform =
|
||||
new ContentFeaturesHydrationTransform(
|
||||
contentFeaturesProvider,
|
||||
enableContentFeaturesGate,
|
||||
hydrateInReplyToTweets
|
||||
)
|
||||
def build(): ContentFeaturesHydrationTransform = contentFeaturesHydrationTransform
|
||||
}
|
||||
|
||||
class ContentFeaturesHydrationTransform(
|
||||
contentFeaturesProvider: ContentFeaturesProvider,
|
||||
enableContentFeaturesGate: Gate[RecapQuery],
|
||||
hydrateInReplyToTweets: Boolean)
|
||||
extends FutureArrow[
|
||||
HydratedCandidatesAndFeaturesEnvelope,
|
||||
HydratedCandidatesAndFeaturesEnvelope
|
||||
] {
|
||||
override def apply(
|
||||
request: HydratedCandidatesAndFeaturesEnvelope
|
||||
): Future[HydratedCandidatesAndFeaturesEnvelope] = {
|
||||
if (enableContentFeaturesGate(request.candidateEnvelope.query)) {
|
||||
val searchResults = request.candidateEnvelope.searchResults
|
||||
|
||||
val sourceTweetIdMap = searchResults.map { searchResult =>
|
||||
(searchResult.id, getRetweetSourceTweetId(searchResult).getOrElse(searchResult.id))
|
||||
}.toMap
|
||||
|
||||
val inReplyToTweetIds = if (hydrateInReplyToTweets) {
|
||||
searchResults.flatMap(getInReplyToTweetId)
|
||||
} else {
|
||||
Seq.empty
|
||||
}
|
||||
|
||||
val tweetIdsToHydrate = (sourceTweetIdMap.values ++ inReplyToTweetIds).toSeq.distinct
|
||||
|
||||
val contentFeaturesMapFuture = if (tweetIdsToHydrate.nonEmpty) {
|
||||
contentFeaturesProvider(request.candidateEnvelope.query, tweetIdsToHydrate)
|
||||
} else {
|
||||
FutureUtils.EmptyMap[TweetId, ContentFeatures]
|
||||
}
|
||||
|
||||
Future.value(
|
||||
request.copy(
|
||||
contentFeaturesFuture = contentFeaturesMapFuture,
|
||||
tweetSourceTweetMap = sourceTweetIdMap,
|
||||
inReplyToTweetIds = inReplyToTweetIds.toSet
|
||||
)
|
||||
)
|
||||
} else {
|
||||
Future.value(request)
|
||||
}
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,15 +0,0 @@
|
||||
package com.twitter.timelineranker.common
|
||||
|
||||
import com.twitter.servo.util.FutureArrow
|
||||
import com.twitter.timelineranker.core.CandidateEnvelope
|
||||
import com.twitter.timelineranker.model.RecapQuery
|
||||
import com.twitter.util.Future
|
||||
|
||||
/**
|
||||
* Create a CandidateEnvelope based on the incoming RecapQuery
|
||||
*/
|
||||
object CreateCandidateEnvelopeTransform extends FutureArrow[RecapQuery, CandidateEnvelope] {
|
||||
override def apply(query: RecapQuery): Future[CandidateEnvelope] = {
|
||||
Future.value(CandidateEnvelope(query))
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user