4 Commits

Author SHA1 Message Date
08bd36c133 feat: support skipping based on labels 2019-02-28 17:34:00 -07:00
e8959eea14 feat: support multiple matching labels
must match all labels to pass
2019-02-28 17:33:34 -07:00
7bf6ff373d feat: allow customizing history json filename 2019-02-28 17:15:31 -07:00
ad3ebecb62 feat: make comment page count configurable
and fetch twice as many pages as before
2019-02-28 17:15:15 -07:00
3 changed files with 36 additions and 14 deletions

View File

@@ -23,6 +23,7 @@ async function ghActionBot() {
COMMENT_BODY_REGEXP_FLAGS, COMMENT_BODY_REGEXP_FLAGS,
COMMENT_ACTOR, COMMENT_ACTOR,
PULL_LABEL_FILTER, PULL_LABEL_FILTER,
PULL_LABEL_EXCLUDE,
PULL_AUTHOR_FILTER, PULL_AUTHOR_FILTER,
PULL_RETEST_BODY, PULL_RETEST_BODY,
ACTION_RETRY_DELAY = 3000, ACTION_RETRY_DELAY = 3000,
@@ -62,7 +63,7 @@ async function ghActionBot() {
} }
// filter on label // filter on label
if (PULL_LABEL_FILTER && !pull.labels.includes(PULL_LABEL_FILTER)) { if (!PULL_LABEL_FILTER.split(',').every(label => pull.labels.includes(label))) {
logger.debug( logger.debug(
`SKIP PULL #${ `SKIP PULL #${
pull.number pull.number
@@ -71,6 +72,17 @@ async function ghActionBot() {
return false; return false;
} }
// filter out matching exclusion tags
if (PULL_LABEL_EXCLUDE) {
const exclude = PULL_LABEL_EXCLUDE.split(',').some(label => pull.labels.includes(label));
if (exclude) {
logger.debug(
`SKIP PULL #${pull.number}: label matches exclusion labels '${PULL_LABEL_EXCLUDE}'`
);
return false;
}
}
return { comment, pull }; return { comment, pull };
}) })
)).filter(Boolean); )).filter(Boolean);

View File

@@ -5,20 +5,30 @@ function truncate(str, len = 26) {
return `${str.slice(0, len)}...`; return `${str.slice(0, len)}...`;
} }
export default async function getEvents(repo, { body, actor } = {}) { const getCommentEvents = async (repo, lastEvents) => {
const events = await repo.events.fetch(); // just fetch and return the results
if (lastEvents == null) return repo.events.fetch();
const items = await (async () => { // if there is no next page, just return the passed in object
// process 2 pages of events if (!lastEvents.nextPage) return lastEvents;
if (events.nextPage) {
const moreEvents = await events.nextPage.fetch();
return [...events.items, ...moreEvents.items];
}
return events.items; // fetch next items and append previous ones
})(); const newEvents = await lastEvents.nextPage.fetch();
newEvents.items = [...lastEvents.items, ...newEvents.items];
return newEvents;
};
return items export default async function getEvents(repo, { body, actor, pages = 4 } = {}) {
// create a task array that we can reduce into a promise chain
const chain = [];
while (chain.length < pages) chain.push('page');
const events = await chain.reduce(
acc => acc.then(res => getCommentEvents(repo, res)),
Promise.resolve()
);
return events.items
.map(comment => { .map(comment => {
let skip = false; let skip = false;
const isComment = comment.type === 'IssueCommentEvent'; const isComment = comment.type === 'IssueCommentEvent';

View File

@@ -5,8 +5,8 @@ const MAX_ITEMS = 1000; // max entries in history
const TRUNCATE_AMOUNT = 200; // count of entries to remove when max is hit const TRUNCATE_AMOUNT = 200; // count of entries to remove when max is hit
export default class History { export default class History {
constructor() { constructor(filename = 'history.json') {
this.filePath = path.resolve('data', 'history.json'); this.filePath = path.resolve('data', filename);
try { try {
const data = fs.readFileSync(this.filePath); const data = fs.readFileSync(this.filePath);
this.db = data.length === 0 ? [] : JSON.parse(data); this.db = data.length === 0 ? [] : JSON.parse(data);