2021-08-27 08:32:44 -04:00
|
|
|
import Bluebird from 'bluebird'
|
2020-01-31 10:56:52 -05:00
|
|
|
import { URL } from 'url'
|
2021-08-27 08:32:44 -04:00
|
|
|
import { retryTransactionWrapper } from '@server/helpers/database-utils'
|
2021-03-08 08:24:11 -05:00
|
|
|
import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
|
|
|
|
import { logger } from '../../helpers/logger'
|
|
|
|
import { doJSONRequest } from '../../helpers/requests'
|
2021-06-14 12:06:58 -04:00
|
|
|
import { ACTIVITY_PUB, WEBSERVER } from '../../initializers/constants'
|
2018-05-25 10:21:16 -04:00
|
|
|
|
2019-03-19 11:23:02 -04:00
|
|
|
type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
|
2021-08-27 08:32:44 -04:00
|
|
|
type CleanerFunction = (startedDate: Date) => Promise<any>
|
2019-03-19 11:23:02 -04:00
|
|
|
|
2021-03-08 08:24:11 -05:00
|
|
|
async function crawlCollectionPage <T> (argUrl: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
|
|
|
|
let url = argUrl
|
|
|
|
|
|
|
|
logger.info('Crawling ActivityPub data on %s.', url)
|
2018-05-25 10:21:16 -04:00
|
|
|
|
2021-06-14 12:06:58 -04:00
|
|
|
const options = { activityPub: true }
|
2018-05-25 10:21:16 -04:00
|
|
|
|
2019-03-19 11:23:02 -04:00
|
|
|
const startDate = new Date()
|
|
|
|
|
2021-03-08 08:24:11 -05:00
|
|
|
const response = await doJSONRequest<ActivityPubOrderedCollection<T>>(url, options)
|
2018-05-25 10:21:16 -04:00
|
|
|
const firstBody = response.body
|
|
|
|
|
2020-01-31 10:56:52 -05:00
|
|
|
const limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
|
2018-05-25 10:21:16 -04:00
|
|
|
let i = 0
|
|
|
|
let nextLink = firstBody.first
|
|
|
|
while (nextLink && i < limit) {
|
2019-05-31 09:14:40 -04:00
|
|
|
let body: any
|
2019-04-25 08:23:15 -04:00
|
|
|
|
2019-05-31 09:14:40 -04:00
|
|
|
if (typeof nextLink === 'string') {
|
|
|
|
// Don't crawl ourselves
|
2020-01-31 10:56:52 -05:00
|
|
|
const remoteHost = new URL(nextLink).host
|
2019-05-31 09:14:40 -04:00
|
|
|
if (remoteHost === WEBSERVER.HOST) continue
|
|
|
|
|
2021-03-08 08:24:11 -05:00
|
|
|
url = nextLink
|
2019-05-31 09:14:40 -04:00
|
|
|
|
2021-03-08 08:24:11 -05:00
|
|
|
const res = await doJSONRequest<ActivityPubOrderedCollection<T>>(url, options)
|
2019-05-31 09:14:40 -04:00
|
|
|
body = res.body
|
|
|
|
} else {
|
|
|
|
// nextLink is already the object we want
|
|
|
|
body = nextLink
|
|
|
|
}
|
2018-05-25 10:21:16 -04:00
|
|
|
|
|
|
|
nextLink = body.next
|
|
|
|
i++
|
|
|
|
|
|
|
|
if (Array.isArray(body.orderedItems)) {
|
|
|
|
const items = body.orderedItems
|
2021-03-08 08:24:11 -05:00
|
|
|
logger.info('Processing %i ActivityPub items for %s.', items.length, url)
|
2018-05-25 10:21:16 -04:00
|
|
|
|
|
|
|
await handler(items)
|
|
|
|
}
|
|
|
|
}
|
2019-03-19 11:23:02 -04:00
|
|
|
|
2021-07-20 08:15:15 -04:00
|
|
|
if (cleaner) await retryTransactionWrapper(cleaner, startDate)
|
2018-05-25 10:21:16 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
export {
|
|
|
|
crawlCollectionPage
|
|
|
|
}
|