2017-06-25 19:49:39 -07:00
const os = require ( 'os' ) ;
const throng = require ( 'throng' ) ;
const dotenv = require ( 'dotenv' ) ;
const express = require ( 'express' ) ;
const http = require ( 'http' ) ;
const redis = require ( 'redis' ) ;
const pg = require ( 'pg' ) ;
const log = require ( 'npmlog' ) ;
const url = require ( 'url' ) ;
const WebSocket = require ( 'uws' ) ;
const uuid = require ( 'uuid' ) ;
2017-05-20 08:31:47 -07:00
const env = process . env . NODE _ENV || 'development' ;
2017-02-02 07:11:36 -08:00
dotenv . config ( {
2017-05-20 08:31:47 -07:00
path : env === 'production' ? '.env.production' : '.env' ,
} ) ;
2017-02-01 16:31:09 -08:00
2017-05-28 07:25:26 -07:00
log . level = process . env . LOG _LEVEL || 'verbose' ;
2017-05-03 14:18:13 -07:00
const dbUrlToConfig = ( dbUrl ) => {
if ( ! dbUrl ) {
2017-05-20 08:31:47 -07:00
return { } ;
2017-05-03 14:18:13 -07:00
}
2017-05-20 08:31:47 -07:00
const params = url . parse ( dbUrl ) ;
const config = { } ;
2017-05-04 06:53:44 -07:00
if ( params . auth ) {
2017-05-20 08:31:47 -07:00
[ config . user , config . password ] = params . auth . split ( ':' ) ;
2017-05-04 06:53:44 -07:00
}
if ( params . hostname ) {
2017-05-20 08:31:47 -07:00
config . host = params . hostname ;
2017-05-04 06:53:44 -07:00
}
if ( params . port ) {
2017-05-20 08:31:47 -07:00
config . port = params . port ;
2017-05-03 14:18:13 -07:00
}
2017-05-04 06:53:44 -07:00
if ( params . pathname ) {
2017-05-20 08:31:47 -07:00
config . database = params . pathname . split ( '/' ) [ 1 ] ;
2017-05-04 06:53:44 -07:00
}
2017-05-20 08:31:47 -07:00
const ssl = params . query && params . query . ssl ;
2017-05-20 12:06:09 -07:00
2017-05-04 06:53:44 -07:00
if ( ssl ) {
2017-05-20 08:31:47 -07:00
config . ssl = ssl === 'true' || ssl === '1' ;
2017-05-04 06:53:44 -07:00
}
2017-05-20 08:31:47 -07:00
return config ;
} ;
2017-05-03 14:18:13 -07:00
2017-05-20 12:06:09 -07:00
const redisUrlToClient = ( defaultConfig , redisUrl ) => {
const config = defaultConfig ;
if ( ! redisUrl ) {
return redis . createClient ( config ) ;
}
if ( redisUrl . startsWith ( 'unix://' ) ) {
return redis . createClient ( redisUrl . slice ( 7 ) , config ) ;
}
return redis . createClient ( Object . assign ( config , {
url : redisUrl ,
} ) ) ;
} ;
2017-05-28 07:25:26 -07:00
const numWorkers = + process . env . STREAMING _CLUSTER _NUM || ( env === 'development' ? 1 : Math . max ( os . cpus ( ) . length - 1 , 1 ) ) ;
2017-05-03 14:18:13 -07:00
2017-05-28 07:25:26 -07:00
const startMaster = ( ) => {
log . info ( ` Starting streaming API server master with ${ numWorkers } workers ` ) ;
} ;
2017-05-03 14:18:13 -07:00
2017-05-28 07:25:26 -07:00
const startWorker = ( workerId ) => {
log . info ( ` Starting worker ${ workerId } ` ) ;
2017-04-16 19:32:30 -07:00
const pgConfigs = {
development : {
2017-06-25 09:13:31 -07:00
user : process . env . DB _USER || pg . defaults . user ,
password : process . env . DB _PASS || pg . defaults . password ,
2017-10-17 02:45:37 -07:00
database : process . env . DB _NAME || 'mastodon_development' ,
2017-06-25 09:13:31 -07:00
host : process . env . DB _HOST || pg . defaults . host ,
port : process . env . DB _PORT || pg . defaults . port ,
2017-05-20 08:31:47 -07:00
max : 10 ,
2017-04-16 19:32:30 -07:00
} ,
production : {
user : process . env . DB _USER || 'mastodon' ,
password : process . env . DB _PASS || '' ,
database : process . env . DB _NAME || 'mastodon_production' ,
host : process . env . DB _HOST || 'localhost' ,
port : process . env . DB _PORT || 5432 ,
2017-05-20 08:31:47 -07:00
max : 10 ,
} ,
} ;
2017-02-01 16:31:09 -08:00
2017-05-20 08:31:47 -07:00
const app = express ( ) ;
2017-12-12 06:13:24 -08:00
app . set ( 'trusted proxy' , process . env . TRUSTED _PROXY _IP || 'loopback,uniquelocal' ) ;
2017-05-20 08:31:47 -07:00
const pgPool = new pg . Pool ( Object . assign ( pgConfigs [ env ] , dbUrlToConfig ( process . env . DATABASE _URL ) ) ) ;
const server = http . createServer ( app ) ;
const redisNamespace = process . env . REDIS _NAMESPACE || null ;
2017-02-07 05:37:12 -08:00
2017-05-07 10:42:32 -07:00
const redisParams = {
2017-04-16 19:32:30 -07:00
host : process . env . REDIS _HOST || '127.0.0.1' ,
port : process . env . REDIS _PORT || 6379 ,
2017-05-17 06:36:34 -07:00
db : process . env . REDIS _DB || 0 ,
2017-05-03 14:18:13 -07:00
password : process . env . REDIS _PASSWORD ,
2017-05-20 08:31:47 -07:00
} ;
2017-05-07 10:42:32 -07:00
if ( redisNamespace ) {
2017-05-20 08:31:47 -07:00
redisParams . namespace = redisNamespace ;
2017-05-07 10:42:32 -07:00
}
2017-05-20 12:06:09 -07:00
2017-05-20 08:31:47 -07:00
const redisPrefix = redisNamespace ? ` ${ redisNamespace } : ` : '' ;
2017-05-07 10:42:32 -07:00
2017-06-03 11:50:53 -07:00
const redisSubscribeClient = redisUrlToClient ( redisParams , process . env . REDIS _URL ) ;
2017-05-20 12:06:09 -07:00
const redisClient = redisUrlToClient ( redisParams , process . env . REDIS _URL ) ;
2017-02-07 05:37:12 -08:00
2017-05-20 08:31:47 -07:00
const subs = { } ;
2017-02-07 05:37:12 -08:00
2017-06-20 11:41:41 -07:00
redisSubscribeClient . on ( 'message' , ( channel , message ) => {
2017-05-20 08:31:47 -07:00
const callbacks = subs [ channel ] ;
2017-02-07 05:37:12 -08:00
2017-05-20 08:31:47 -07:00
log . silly ( ` New message on channel ${ channel } ` ) ;
2017-02-07 05:37:12 -08:00
2017-04-16 19:32:30 -07:00
if ( ! callbacks ) {
2017-05-20 08:31:47 -07:00
return ;
2017-04-16 19:32:30 -07:00
}
2017-05-28 07:25:26 -07:00
2017-05-20 08:31:47 -07:00
callbacks . forEach ( callback => callback ( message ) ) ;
} ) ;
2017-02-07 05:37:12 -08:00
2017-06-03 11:50:53 -07:00
const subscriptionHeartbeat = ( channel ) => {
const interval = 6 * 60 ;
const tellSubscribed = ( ) => {
redisClient . set ( ` ${ redisPrefix } subscribed: ${ channel } ` , '1' , 'EX' , interval * 3 ) ;
} ;
tellSubscribed ( ) ;
const heartbeat = setInterval ( tellSubscribed , interval * 1000 ) ;
return ( ) => {
clearInterval ( heartbeat ) ;
} ;
} ;
2017-02-07 05:37:12 -08:00
2017-04-16 19:32:30 -07:00
const subscribe = ( channel , callback ) => {
2017-05-20 08:31:47 -07:00
log . silly ( ` Adding listener for ${ channel } ` ) ;
subs [ channel ] = subs [ channel ] || [ ] ;
2017-06-20 11:41:41 -07:00
if ( subs [ channel ] . length === 0 ) {
log . verbose ( ` Subscribe ${ channel } ` ) ;
redisSubscribeClient . subscribe ( channel ) ;
}
2017-05-20 08:31:47 -07:00
subs [ channel ] . push ( callback ) ;
} ;
2017-02-03 09:27:42 -08:00
2017-04-16 19:32:30 -07:00
const unsubscribe = ( channel , callback ) => {
2017-05-20 08:31:47 -07:00
log . silly ( ` Removing listener for ${ channel } ` ) ;
subs [ channel ] = subs [ channel ] . filter ( item => item !== callback ) ;
2017-06-20 11:41:41 -07:00
if ( subs [ channel ] . length === 0 ) {
log . verbose ( ` Unsubscribe ${ channel } ` ) ;
redisSubscribeClient . unsubscribe ( channel ) ;
}
2017-05-20 08:31:47 -07:00
} ;
2017-02-03 09:27:42 -08:00
2017-04-16 19:32:30 -07:00
const allowCrossDomain = ( req , res , next ) => {
2017-05-20 08:31:47 -07:00
res . header ( 'Access-Control-Allow-Origin' , '*' ) ;
res . header ( 'Access-Control-Allow-Headers' , 'Authorization, Accept, Cache-Control' ) ;
res . header ( 'Access-Control-Allow-Methods' , 'GET, OPTIONS' ) ;
2017-02-05 14:37:25 -08:00
2017-05-20 08:31:47 -07:00
next ( ) ;
} ;
2017-02-05 14:37:25 -08:00
2017-04-16 19:32:30 -07:00
const setRequestId = ( req , res , next ) => {
2017-05-20 08:31:47 -07:00
req . requestId = uuid . v4 ( ) ;
res . header ( 'X-Request-Id' , req . requestId ) ;
2017-02-01 16:31:09 -08:00
2017-05-20 08:31:47 -07:00
next ( ) ;
} ;
2017-02-01 16:31:09 -08:00
2017-12-12 06:13:24 -08:00
const setRemoteAddress = ( req , res , next ) => {
req . remoteAddress = req . connection . remoteAddress ;
next ( ) ;
} ;
2017-04-16 19:32:30 -07:00
const accountFromToken = ( token , req , next ) => {
pgPool . connect ( ( err , client , done ) => {
2017-02-01 16:31:09 -08:00
if ( err ) {
2017-05-20 08:31:47 -07:00
next ( err ) ;
return ;
2017-02-01 16:31:09 -08:00
}
2017-05-27 14:27:54 -07:00
client . query ( 'SELECT oauth_access_tokens.resource_owner_id, users.account_id, users.filtered_languages FROM oauth_access_tokens INNER JOIN users ON oauth_access_tokens.resource_owner_id = users.id WHERE oauth_access_tokens.token = $1 AND oauth_access_tokens.revoked_at IS NULL LIMIT 1' , [ token ] , ( err , result ) => {
2017-05-20 08:31:47 -07:00
done ( ) ;
2017-02-01 16:31:09 -08:00
2017-04-16 19:32:30 -07:00
if ( err ) {
2017-05-20 08:31:47 -07:00
next ( err ) ;
return ;
2017-04-16 19:32:30 -07:00
}
2017-02-01 16:31:09 -08:00
2017-04-16 19:32:30 -07:00
if ( result . rows . length === 0 ) {
2017-05-20 08:31:47 -07:00
err = new Error ( 'Invalid access token' ) ;
err . statusCode = 401 ;
2017-02-03 15:34:31 -08:00
2017-05-20 08:31:47 -07:00
next ( err ) ;
return ;
2017-04-16 19:32:30 -07:00
}
2017-02-03 15:34:31 -08:00
2017-05-20 08:31:47 -07:00
req . accountId = result . rows [ 0 ] . account _id ;
2017-05-26 15:53:48 -07:00
req . filteredLanguages = result . rows [ 0 ] . filtered _languages ;
2017-02-03 15:34:31 -08:00
2017-05-20 08:31:47 -07:00
next ( ) ;
} ) ;
} ) ;
} ;
2017-02-03 15:34:31 -08:00
2017-12-12 06:13:24 -08:00
const accountFromRequest = ( req , next , required = true ) => {
2017-05-29 09:20:53 -07:00
const authorization = req . headers . authorization ;
const location = url . parse ( req . url , true ) ;
const accessToken = location . query . access _token ;
2017-02-03 15:34:31 -08:00
2017-05-21 12:13:11 -07:00
if ( ! authorization && ! accessToken ) {
2017-12-12 06:13:24 -08:00
if ( required ) {
const err = new Error ( 'Missing access token' ) ;
err . statusCode = 401 ;
2017-02-01 16:31:09 -08:00
2017-12-12 06:13:24 -08:00
next ( err ) ;
return ;
} else {
next ( ) ;
return ;
}
2017-04-16 19:32:30 -07:00
}
2017-02-02 08:10:59 -08:00
2017-05-21 12:13:11 -07:00
const token = authorization ? authorization . replace ( /^Bearer / , '' ) : accessToken ;
2017-02-02 04:56:14 -08:00
2017-05-20 08:31:47 -07:00
accountFromToken ( token , req , next ) ;
} ;
2017-02-04 18:19:04 -08:00
2017-12-12 06:13:24 -08:00
const PUBLIC _STREAMS = [
'public' ,
'public:local' ,
'hashtag' ,
'hashtag:local' ,
] ;
2017-05-29 09:20:53 -07:00
const wsVerifyClient = ( info , cb ) => {
2017-12-12 06:13:24 -08:00
const location = url . parse ( info . req . url , true ) ;
const authRequired = ! PUBLIC _STREAMS . some ( stream => stream === location . query . stream ) ;
2017-05-29 09:20:53 -07:00
accountFromRequest ( info . req , err => {
if ( ! err ) {
cb ( true , undefined , undefined ) ;
} else {
log . error ( info . req . requestId , err . toString ( ) ) ;
cb ( false , 401 , 'Unauthorized' ) ;
}
2017-12-12 06:13:24 -08:00
} , authRequired ) ;
2017-05-29 09:20:53 -07:00
} ;
2017-12-12 06:13:24 -08:00
const PUBLIC _ENDPOINTS = [
'/api/v1/streaming/public' ,
'/api/v1/streaming/public/local' ,
'/api/v1/streaming/hashtag' ,
'/api/v1/streaming/hashtag/local' ,
] ;
2017-05-29 09:20:53 -07:00
const authenticationMiddleware = ( req , res , next ) => {
if ( req . method === 'OPTIONS' ) {
next ( ) ;
return ;
}
2017-12-12 06:13:24 -08:00
const authRequired = ! PUBLIC _ENDPOINTS . some ( endpoint => endpoint === req . path ) ;
accountFromRequest ( req , next , authRequired ) ;
2017-05-29 09:20:53 -07:00
} ;
2017-06-25 16:46:15 -07:00
const errorMiddleware = ( err , req , res , { } ) => {
2017-05-28 07:25:26 -07:00
log . error ( req . requestId , err . toString ( ) ) ;
2017-05-20 08:31:47 -07:00
res . writeHead ( err . statusCode || 500 , { 'Content-Type' : 'application/json' } ) ;
2017-05-28 07:25:26 -07:00
res . end ( JSON . stringify ( { error : err . statusCode ? err . toString ( ) : 'An unexpected error occurred' } ) ) ;
2017-05-20 08:31:47 -07:00
} ;
2017-02-04 18:19:04 -08:00
2017-04-16 19:32:30 -07:00
const placeholders = ( arr , shift = 0 ) => arr . map ( ( _ , i ) => ` $ ${ i + 1 + shift } ` ) . join ( ', ' ) ;
2017-02-01 16:31:09 -08:00
2017-11-17 15:16:48 -08:00
const authorizeListAccess = ( id , req , next ) => {
pgPool . connect ( ( err , client , done ) => {
if ( err ) {
next ( false ) ;
return ;
}
client . query ( 'SELECT id, account_id FROM lists WHERE id = $1 LIMIT 1' , [ id ] , ( err , result ) => {
done ( ) ;
if ( err || result . rows . length === 0 || result . rows [ 0 ] . account _id !== req . accountId ) {
next ( false ) ;
return ;
}
next ( true ) ;
} ) ;
} ) ;
} ;
2017-06-03 11:50:53 -07:00
const streamFrom = ( id , req , output , attachCloseHandler , needsFiltering = false , notificationOnly = false ) => {
2017-12-12 06:13:24 -08:00
const accountId = req . accountId || req . remoteAddress ;
2017-06-03 11:50:53 -07:00
const streamType = notificationOnly ? ' (notification)' : '' ;
2017-12-12 06:13:24 -08:00
log . verbose ( req . requestId , ` Starting stream from ${ id } for ${ accountId } ${ streamType } ` ) ;
2017-04-16 19:32:30 -07:00
const listener = message => {
2017-05-20 08:31:47 -07:00
const { event , payload , queued _at } = JSON . parse ( message ) ;
2017-02-02 04:56:14 -08:00
2017-04-16 19:32:30 -07:00
const transmit = ( ) => {
2017-07-07 07:56:52 -07:00
const now = new Date ( ) . getTime ( ) ;
const delta = now - queued _at ;
2017-09-24 06:31:03 -07:00
const encodedPayload = typeof payload === 'object' ? JSON . stringify ( payload ) : payload ;
2017-02-02 04:56:14 -08:00
2017-12-12 06:13:24 -08:00
log . silly ( req . requestId , ` Transmitting for ${ accountId } : ${ event } ${ encodedPayload } Delay: ${ delta } ms ` ) ;
2017-07-07 07:56:52 -07:00
output ( event , encodedPayload ) ;
2017-05-20 08:31:47 -07:00
} ;
2017-02-02 04:56:14 -08:00
2017-06-03 11:50:53 -07:00
if ( notificationOnly && event !== 'notification' ) {
return ;
}
2017-04-16 19:32:30 -07:00
// Only messages that may require filtering are statuses, since notifications
// are already personalized and deletes do not matter
2018-04-17 04:49:09 -07:00
if ( ! needsFiltering || event !== 'update' ) {
transmit ( ) ;
return ;
}
2017-02-02 04:56:14 -08:00
2018-04-17 04:49:09 -07:00
const unpackedPayload = payload ;
const targetAccountIds = [ unpackedPayload . account . id ] . concat ( unpackedPayload . mentions . map ( item => item . id ) ) ;
const accountDomain = unpackedPayload . account . acct . split ( '@' ) [ 1 ] ;
2017-04-16 19:32:30 -07:00
2018-04-17 04:49:09 -07:00
if ( Array . isArray ( req . filteredLanguages ) && req . filteredLanguages . indexOf ( unpackedPayload . language ) !== - 1 ) {
log . silly ( req . requestId , ` Message ${ unpackedPayload . id } filtered by language ( ${ unpackedPayload . language } ) ` ) ;
return ;
}
// When the account is not logged in, it is not necessary to confirm the block or mute
if ( ! req . accountId ) {
transmit ( ) ;
return ;
}
pgPool . connect ( ( err , client , done ) => {
if ( err ) {
log . error ( err ) ;
return ;
}
const queries = [
client . query ( ` SELECT 1 FROM blocks WHERE (account_id = $ 1 AND target_account_id IN ( ${ placeholders ( targetAccountIds , 2 ) } )) OR (account_id = $ 2 AND target_account_id = $ 1) UNION SELECT 1 FROM mutes WHERE account_id = $ 1 AND target_account_id IN ( ${ placeholders ( targetAccountIds , 2 ) } ) ` , [ req . accountId , unpackedPayload . account . id ] . concat ( targetAccountIds ) ) ,
] ;
if ( accountDomain ) {
queries . push ( client . query ( 'SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2' , [ req . accountId , accountDomain ] ) ) ;
}
Promise . all ( queries ) . then ( values => {
done ( ) ;
if ( values [ 0 ] . rows . length > 0 || ( values . length > 1 && values [ 1 ] . rows . length > 0 ) ) {
2017-05-26 15:53:48 -07:00
return ;
}
2018-04-17 04:49:09 -07:00
transmit ( ) ;
} ) . catch ( err => {
done ( ) ;
log . error ( err ) ;
2017-05-20 08:31:47 -07:00
} ) ;
2018-04-17 04:49:09 -07:00
} ) ;
2017-05-20 08:31:47 -07:00
} ;
2017-04-16 19:32:30 -07:00
2017-05-20 08:31:47 -07:00
subscribe ( ` ${ redisPrefix } ${ id } ` , listener ) ;
attachCloseHandler ( ` ${ redisPrefix } ${ id } ` , listener ) ;
} ;
2017-02-01 16:31:09 -08:00
2017-04-16 19:32:30 -07:00
// Setup stream output to HTTP
const streamToHttp = ( req , res ) => {
2017-12-12 06:13:24 -08:00
const accountId = req . accountId || req . remoteAddress ;
2017-05-20 08:31:47 -07:00
res . setHeader ( 'Content-Type' , 'text/event-stream' ) ;
res . setHeader ( 'Transfer-Encoding' , 'chunked' ) ;
2017-02-03 15:34:31 -08:00
2017-05-20 08:31:47 -07:00
const heartbeat = setInterval ( ( ) => res . write ( ':thump\n' ) , 15000 ) ;
2017-02-03 15:34:31 -08:00
2017-04-16 19:32:30 -07:00
req . on ( 'close' , ( ) => {
2017-12-12 06:13:24 -08:00
log . verbose ( req . requestId , ` Ending stream for ${ accountId } ` ) ;
2017-05-20 08:31:47 -07:00
clearInterval ( heartbeat ) ;
} ) ;
2017-02-02 06:20:31 -08:00
2017-04-16 19:32:30 -07:00
return ( event , payload ) => {
2017-05-20 08:31:47 -07:00
res . write ( ` event: ${ event } \n ` ) ;
res . write ( ` data: ${ payload } \n \n ` ) ;
} ;
} ;
2017-02-01 16:31:09 -08:00
2017-04-16 19:32:30 -07:00
// Setup stream end for HTTP
2017-06-03 11:50:53 -07:00
const streamHttpEnd = ( req , closeHandler = false ) => ( id , listener ) => {
2017-04-16 19:32:30 -07:00
req . on ( 'close' , ( ) => {
2017-05-20 08:31:47 -07:00
unsubscribe ( id , listener ) ;
2017-06-03 11:50:53 -07:00
if ( closeHandler ) {
closeHandler ( ) ;
}
2017-05-20 08:31:47 -07:00
} ) ;
} ;
2017-02-03 15:34:31 -08:00
2017-04-16 19:32:30 -07:00
// Setup stream output to WebSockets
2017-05-28 07:25:26 -07:00
const streamToWs = ( req , ws ) => ( event , payload ) => {
if ( ws . readyState !== ws . OPEN ) {
log . error ( req . requestId , 'Tried writing to closed socket' ) ;
return ;
}
2017-02-03 15:34:31 -08:00
2017-05-28 07:25:26 -07:00
ws . send ( JSON . stringify ( { event , payload } ) ) ;
2017-05-20 08:31:47 -07:00
} ;
2017-02-01 16:31:09 -08:00
2017-04-16 19:32:30 -07:00
// Setup stream end for WebSockets
2017-06-03 11:50:53 -07:00
const streamWsEnd = ( req , ws , closeHandler = false ) => ( id , listener ) => {
2017-12-12 06:13:24 -08:00
const accountId = req . accountId || req . remoteAddress ;
2017-04-16 19:32:30 -07:00
ws . on ( 'close' , ( ) => {
2017-12-12 06:13:24 -08:00
log . verbose ( req . requestId , ` Ending stream for ${ accountId } ` ) ;
2017-05-20 08:31:47 -07:00
unsubscribe ( id , listener ) ;
2017-06-03 11:50:53 -07:00
if ( closeHandler ) {
closeHandler ( ) ;
}
2017-05-20 08:31:47 -07:00
} ) ;
2017-04-02 12:27:14 -07:00
2017-06-23 07:05:04 -07:00
ws . on ( 'error' , ( ) => {
2017-12-12 06:13:24 -08:00
log . verbose ( req . requestId , ` Ending stream for ${ accountId } ` ) ;
2017-05-20 08:31:47 -07:00
unsubscribe ( id , listener ) ;
2017-06-03 11:50:53 -07:00
if ( closeHandler ) {
closeHandler ( ) ;
}
2017-05-20 08:31:47 -07:00
} ) ;
} ;
2017-02-03 15:34:31 -08:00
2017-05-20 08:31:47 -07:00
app . use ( setRequestId ) ;
2017-12-12 06:13:24 -08:00
app . use ( setRemoteAddress ) ;
2017-05-20 08:31:47 -07:00
app . use ( allowCrossDomain ) ;
app . use ( authenticationMiddleware ) ;
app . use ( errorMiddleware ) ;
2017-02-01 16:31:09 -08:00
2017-04-16 19:32:30 -07:00
app . get ( '/api/v1/streaming/user' , ( req , res ) => {
2017-06-03 11:50:53 -07:00
const channel = ` timeline: ${ req . accountId } ` ;
streamFrom ( channel , req , streamToHttp ( req , res ) , streamHttpEnd ( req , subscriptionHeartbeat ( channel ) ) ) ;
} ) ;
app . get ( '/api/v1/streaming/user/notification' , ( req , res ) => {
streamFrom ( ` timeline: ${ req . accountId } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , false , true ) ;
2017-05-20 08:31:47 -07:00
} ) ;
2017-02-03 15:34:31 -08:00
2017-04-16 19:32:30 -07:00
app . get ( '/api/v1/streaming/public' , ( req , res ) => {
2017-05-20 08:31:47 -07:00
streamFrom ( 'timeline:public' , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-03 15:34:31 -08:00
2017-04-16 19:32:30 -07:00
app . get ( '/api/v1/streaming/public/local' , ( req , res ) => {
2017-05-20 08:31:47 -07:00
streamFrom ( 'timeline:public:local' , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
} ) ;
2017-02-06 14:46:14 -08:00
2017-04-16 19:32:30 -07:00
app . get ( '/api/v1/streaming/hashtag' , ( req , res ) => {
2017-09-04 03:52:06 -07:00
streamFrom ( ` timeline:hashtag: ${ req . query . tag . toLowerCase ( ) } ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
2017-05-20 08:31:47 -07:00
} ) ;
2017-02-02 04:56:14 -08:00
2017-04-16 19:32:30 -07:00
app . get ( '/api/v1/streaming/hashtag/local' , ( req , res ) => {
2017-09-04 03:52:06 -07:00
streamFrom ( ` timeline:hashtag: ${ req . query . tag . toLowerCase ( ) } :local ` , req , streamToHttp ( req , res ) , streamHttpEnd ( req ) , true ) ;
2017-05-20 08:31:47 -07:00
} ) ;
2017-02-06 14:46:14 -08:00
2017-11-17 15:16:48 -08:00
app . get ( '/api/v1/streaming/list' , ( req , res ) => {
const listId = req . query . list ;
authorizeListAccess ( listId , req , authorized => {
if ( ! authorized ) {
res . writeHead ( 404 , { 'Content-Type' : 'application/json' } ) ;
res . end ( JSON . stringify ( { error : 'Not found' } ) ) ;
return ;
}
const channel = ` timeline:list: ${ listId } ` ;
streamFrom ( channel , req , streamToHttp ( req , res ) , streamHttpEnd ( req , subscriptionHeartbeat ( channel ) ) ) ;
} ) ;
} ) ;
const wss = new WebSocket . Server ( { server , verifyClient : wsVerifyClient } ) ;
2017-05-29 09:20:53 -07:00
2017-04-16 19:32:30 -07:00
wss . on ( 'connection' , ws => {
2017-05-29 09:20:53 -07:00
const req = ws . upgradeReq ;
const location = url . parse ( req . url , true ) ;
req . requestId = uuid . v4 ( ) ;
2017-12-12 06:13:24 -08:00
req . remoteAddress = ws . _socket . remoteAddress ;
2017-02-01 16:31:09 -08:00
2017-05-28 07:25:26 -07:00
ws . isAlive = true ;
ws . on ( 'pong' , ( ) => {
ws . isAlive = true ;
} ) ;
2017-05-29 09:20:53 -07:00
switch ( location . query . stream ) {
case 'user' :
2017-06-03 11:50:53 -07:00
const channel = ` timeline: ${ req . accountId } ` ;
streamFrom ( channel , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws , subscriptionHeartbeat ( channel ) ) ) ;
break ;
case 'user:notification' :
streamFrom ( ` timeline: ${ req . accountId } ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , false , true ) ;
2017-05-29 09:20:53 -07:00
break ;
case 'public' :
streamFrom ( 'timeline:public' , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
break ;
case 'public:local' :
streamFrom ( 'timeline:public:local' , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
break ;
case 'hashtag' :
2017-09-04 03:52:06 -07:00
streamFrom ( ` timeline:hashtag: ${ location . query . tag . toLowerCase ( ) } ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
2017-05-29 09:20:53 -07:00
break ;
case 'hashtag:local' :
2017-09-04 03:52:06 -07:00
streamFrom ( ` timeline:hashtag: ${ location . query . tag . toLowerCase ( ) } :local ` , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws ) , true ) ;
2017-05-29 09:20:53 -07:00
break ;
2017-11-17 15:16:48 -08:00
case 'list' :
const listId = location . query . list ;
authorizeListAccess ( listId , req , authorized => {
if ( ! authorized ) {
ws . close ( ) ;
return ;
}
const channel = ` timeline:list: ${ listId } ` ;
streamFrom ( channel , req , streamToWs ( req , ws ) , streamWsEnd ( req , ws , subscriptionHeartbeat ( channel ) ) ) ;
} ) ;
break ;
2017-05-29 09:20:53 -07:00
default :
ws . close ( ) ;
}
2017-05-20 08:31:47 -07:00
} ) ;
2017-02-03 15:34:31 -08:00
2017-06-23 07:05:04 -07:00
setInterval ( ( ) => {
2017-05-28 07:25:26 -07:00
wss . clients . forEach ( ws => {
if ( ws . isAlive === false ) {
ws . terminate ( ) ;
return ;
}
ws . isAlive = false ;
ws . ping ( '' , false , true ) ;
} ) ;
} , 30000 ) ;
2017-11-17 19:44:19 -08:00
server . listen ( process . env . PORT || 4000 , process . env . BIND || '0.0.0.0' , ( ) => {
2017-05-28 07:25:26 -07:00
log . info ( ` Worker ${ workerId } now listening on ${ server . address ( ) . address } : ${ server . address ( ) . port } ` ) ;
2017-05-20 08:31:47 -07:00
} ) ;
2017-04-21 10:24:31 -07:00
2017-05-28 07:25:26 -07:00
const onExit = ( ) => {
log . info ( ` Worker ${ workerId } exiting, bye bye ` ) ;
2017-05-20 08:31:47 -07:00
server . close ( ) ;
2017-07-07 11:01:00 -07:00
process . exit ( 0 ) ;
2017-05-28 07:25:26 -07:00
} ;
const onError = ( err ) => {
log . error ( err ) ;
2017-12-12 11:19:33 -08:00
server . close ( ) ;
process . exit ( 0 ) ;
2017-05-28 07:25:26 -07:00
} ;
process . on ( 'SIGINT' , onExit ) ;
process . on ( 'SIGTERM' , onExit ) ;
process . on ( 'exit' , onExit ) ;
2017-12-12 11:19:33 -08:00
process . on ( 'uncaughtException' , onError ) ;
2017-05-28 07:25:26 -07:00
} ;
throng ( {
workers : numWorkers ,
lifetime : Infinity ,
start : startWorker ,
master : startMaster ,
} ) ;