mirror of
https://github.com/DIYgod/RSSHub.git
synced 2025-12-16 02:42:57 +08:00
feat: Add the Vulture router. (#3581)
This commit is contained in:
@@ -85,3 +85,14 @@ Compared to the official one, this feed:
|
|||||||
Provides a better reading experience (full text articles) over the official one.
|
Provides a better reading experience (full text articles) over the official one.
|
||||||
|
|
||||||
</RouteEn>
|
</RouteEn>
|
||||||
|
|
||||||
|
## Vulture
|
||||||
|
|
||||||
|
<RouteEn author="loganrockmore" example="/vulture/movies" path="/vulture/:type" :paramsDesc="['The sub-site name']">
|
||||||
|
|
||||||
|
Supported sub-sites:
|
||||||
|
| TV | Movies | Comedy | Music | TV Recaps | Books | Theater | Art | Awards | Video |
|
||||||
|
| ----- | ------ | ------ | ------ | ------ | ------ | ------ | ------ | ------ | ------ |
|
||||||
|
| tv | movies | comedy | music | tvrecaps | books | theater | art | awards | video |
|
||||||
|
|
||||||
|
</RouteEn>
|
||||||
|
|||||||
@@ -2036,6 +2036,9 @@ router.get('/mastodon/timeline/:site/:only_media?', require('./routes/mastodon/t
|
|||||||
// Kernel Aliyun
|
// Kernel Aliyun
|
||||||
router.get('/aliyun-kernel/index', require('./routes/aliyun-kernel/index'));
|
router.get('/aliyun-kernel/index', require('./routes/aliyun-kernel/index'));
|
||||||
|
|
||||||
|
// Vulture
|
||||||
|
router.get('/vulture/:type', require('./routes/vulture/index'));
|
||||||
|
|
||||||
// xinwenlianbo
|
// xinwenlianbo
|
||||||
router.get('/xinwenlianbo/index', require('./routes/xinwenlianbo/index'));
|
router.get('/xinwenlianbo/index', require('./routes/xinwenlianbo/index'));
|
||||||
|
|
||||||
|
|||||||
8
lib/routes/vulture/index.js
Normal file
8
lib/routes/vulture/index.js
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
const utils = require('./utils');
|
||||||
|
|
||||||
|
module.exports = async (ctx) => {
|
||||||
|
const url = `https://www.vulture.com/${ctx.params.type}/`;
|
||||||
|
const title = `Vulture - ${ctx.params.type}`;
|
||||||
|
|
||||||
|
ctx.state.data = await utils.getData(ctx, url, title);
|
||||||
|
};
|
||||||
93
lib/routes/vulture/utils.js
Normal file
93
lib/routes/vulture/utils.js
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
const got = require('@/utils/got');
|
||||||
|
const cheerio = require('cheerio');
|
||||||
|
|
||||||
|
async function load(link) {
|
||||||
|
const response = await got.get(link);
|
||||||
|
const $ = cheerio.load(response.data);
|
||||||
|
|
||||||
|
const description = $('div.article-content');
|
||||||
|
|
||||||
|
// remove the content that we don't want to show
|
||||||
|
description.find('aside.related').remove();
|
||||||
|
description.find('aside.article-details_heading-with-paragraph').remove();
|
||||||
|
description.find('section.package-list').remove();
|
||||||
|
description.find('div.source-links h2').remove();
|
||||||
|
description.find('div.source-links svg').remove();
|
||||||
|
description.find('div.mobile-secondary-area').remove();
|
||||||
|
description.find('aside.newsletter-flex-text').remove();
|
||||||
|
|
||||||
|
return {
|
||||||
|
description: description.html(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ProcessFeed(list, caches) {
|
||||||
|
return await Promise.all(
|
||||||
|
list.map(async (item) => {
|
||||||
|
const itemUrl = item.canonicalUrl;
|
||||||
|
|
||||||
|
let bylineString = '';
|
||||||
|
if (item.byline) {
|
||||||
|
const byline = item.byline[0];
|
||||||
|
const bylineNames = byline.names.map((name) => name.text);
|
||||||
|
const bylineNamesString = bylineNames.join(', ');
|
||||||
|
|
||||||
|
bylineString = 'by ' + bylineNamesString;
|
||||||
|
}
|
||||||
|
|
||||||
|
const single = {
|
||||||
|
title: item.primaryHeadline,
|
||||||
|
link: itemUrl,
|
||||||
|
author: bylineString,
|
||||||
|
guid: itemUrl,
|
||||||
|
pubDate: item.date,
|
||||||
|
};
|
||||||
|
|
||||||
|
const other = await caches.tryGet(itemUrl, async () => await load(itemUrl));
|
||||||
|
|
||||||
|
return Promise.resolve(Object.assign({}, single, other));
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const getData = async (ctx, url, title) => {
|
||||||
|
const htmlResponse = await got({
|
||||||
|
method: 'get',
|
||||||
|
url: url,
|
||||||
|
headers: {
|
||||||
|
Referer: url,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const htmlData = htmlResponse.data;
|
||||||
|
|
||||||
|
const $ = cheerio.load(htmlData);
|
||||||
|
const dataUri = $('section.paginated-feed').attr('data-uri');
|
||||||
|
|
||||||
|
// get the raw data
|
||||||
|
const response = await got({
|
||||||
|
method: 'get',
|
||||||
|
url: dataUri,
|
||||||
|
headers: {
|
||||||
|
Referer: dataUri,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = response.data;
|
||||||
|
|
||||||
|
// limit the list to only 25 articles, to make sure that load times remain reasonable
|
||||||
|
const list = data.articles.slice(0, 25);
|
||||||
|
|
||||||
|
const result = await ProcessFeed(list, ctx.cache);
|
||||||
|
|
||||||
|
return {
|
||||||
|
title: title,
|
||||||
|
link: url,
|
||||||
|
description: $('meta[name="description"]').attr('content'),
|
||||||
|
item: result,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getData,
|
||||||
|
};
|
||||||
Reference in New Issue
Block a user