Skip to content

Commit

Permalink
clean: リトライ処理きれいに
Browse files Browse the repository at this point in the history
  • Loading branch information
yamadashy committed May 1, 2022
1 parent 3c831e0 commit a697ba1
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 36 deletions.
8 changes: 8 additions & 0 deletions src/feed/utils/common-util.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import * as v8 from 'v8';
import * as crypto from 'crypto';
import retry from 'async-retry';

export const objectDeepCopy = <T>(data: T): T => {
// TODO: Node.js 17 以上にしたら structuredClone 使う
Expand Down Expand Up @@ -34,3 +35,10 @@ export const isValidHttpUrl = (url: string) => {

return urlObject.protocol === 'http:' || urlObject.protocol === 'https:';
};

export const backoff = async <A>(retrier: retry.RetryFunction<A>): Promise<A> => {
return await retry(retrier, {
retries: 3,
factor: 2,
});
};
36 changes: 10 additions & 26 deletions src/feed/utils/feed-crawler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@ import { FeedInfo } from '../../resources/feed-info-list';
import dayjs from 'dayjs';
import axios from 'axios';
import { URL } from 'url';
import retry from 'async-retry';
import { isValidHttpUrl, objectDeepCopy, urlRemoveQueryParams } from './common-util';
import { backoff, isValidHttpUrl, objectDeepCopy, urlRemoveQueryParams } from './common-util';
import { logger } from './logger';
import constants from '../../common/constants';
const ogs = require('open-graph-scraper');
Expand Down Expand Up @@ -67,14 +66,9 @@ export class FeedCrawler {
logger.trace(error);
})
.process(async (feedInfo) => {
const feed = await retry(
async () => {
return (await this.rssParser.parseURL(feedInfo.url)) as CustomRssParserFeed;
},
{
retries: 3,
},
);
const feed = await backoff(async () => {
return this.rssParser.parseURL(feedInfo.url) as Promise<CustomRssParserFeed>;
});
const postProcessedFeed = FeedCrawler.postProcessFeed(feedInfo, feed);
feeds.push(postProcessedFeed);
logger.info('[fetch-feed] fetched', `${fetchProcessCounter++}/${feedInfoListLength}`, feedInfo.label);
Expand Down Expand Up @@ -220,14 +214,9 @@ export class FeedCrawler {
logger.trace(error);
})
.process(async (feedItem) => {
const ogsResult = await retry(
async () => {
return await FeedCrawler.fetchOgsResult(feedItem.link);
},
{
retries: 3,
},
);
const ogsResult = await backoff(async () => {
return FeedCrawler.fetchOgsResult(feedItem.link);
});
feedItemOgsResultMap.set(feedItem.link, ogsResult);
logger.info('[fetch-feed-item-ogp] fetched', `${fetchProcessCounter++}/${feedItemsLength}`, feedItem.title);
});
Expand All @@ -247,14 +236,9 @@ export class FeedCrawler {
logger.trace(error);
})
.process(async (feed) => {
const ogsResult = await retry(
async () => {
return await FeedCrawler.fetchOgsResult(feed.link);
},
{
retries: 3,
},
);
const ogsResult = await backoff(async () => {
return FeedCrawler.fetchOgsResult(feed.link);
});
feedOgsResultMap.set(feed.link, ogsResult);
logger.info('[fetch-feed-ogp] fetched', `${fetchProcessCounter++}/${feedsLength}`, feed.title);
});
Expand Down
16 changes: 6 additions & 10 deletions tests/feed-info-list.test.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { FEED_INFO_LIST, FeedInfo } from '../src/resources/feed-info-list';
import { FeedCrawler } from '../src/feed/utils/feed-crawler';
import { describe, it, expect } from 'vitest';
import { backoff } from '../src/feed/utils/common-util';
const RssParser = require('rss-parser');
const retry = require('async-retry');

const rssParser = new RssParser();

Expand All @@ -18,17 +18,13 @@ describe('FEED_INFO_LIST', () => {
// フィード取得テスト
describe('フィードが取得可能', () => {
FEED_INFO_LIST.map((feedInfo: FeedInfo) => {
const testTitle = `${feedInfo.label} / ${feedInfo.url}`;
it.concurrent(
`${feedInfo.label} / ${feedInfo.url}`,
testTitle,
async () => {
const feed = await retry(
async () => {
return rssParser.parseURL(feedInfo.url);
},
{
retries: 3,
},
);
const feed = await backoff(async () => {
return rssParser.parseURL(feedInfo.url);
});
expect(feed.items.length).toBeGreaterThanOrEqual(0);
},
60 * 1000,
Expand Down

0 comments on commit a697ba1

Please sign in to comment.