Skip to content

Commit

Permalink
refactor: dynamic tesla URL
Browse files Browse the repository at this point in the history
  • Loading branch information
Kikobeats committed Nov 25, 2023
1 parent 762894f commit ba40a0a
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
"contributors": "(npx git-authors-cli && npx finepack && git add package.json && git commit -m 'build: contributors' --no-verify) || true",
"cronjob": "npm run update:crawler-agents && npm run healthcheck",
"healthcheck": "curl -fsS -m 10 --retry 5 -o /dev/null https://hc-ping.com/b7995d4b-75cb-4434-9338-8c1aa4679dc6",
"lint": "standard-markdown README.md && standard",
"lint": "standard-markdown README.md",
"postrelease": "npm run release:tags && npm run release:github && (ci-publish || npm publish --access=public)",
"prerelease": "npm run update:check",
"pretest": "npm run lint",
Expand All @@ -73,7 +73,7 @@
"test": "exit 0",
"update": "ncu -u",
"update:check": "ncu -- --error-level 2",
"update:crawler-agents": "(node scripts/generate.js && git add index.json && git commit -m 'build(update): crawlers' --no-verify && git push) || true"
"update:crawler-agents": "(node scripts/generate.mjs && git add index.json && git commit -m 'build(update): crawlers' --no-verify && git push) || true"
},
"license": "MIT",
"commitlint": {
Expand Down
18 changes: 12 additions & 6 deletions scripts/generate.js → scripts/generate.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,27 @@

'use strict'

const crawlers = require('crawler-user-agents')
const { writeFile } = require('fs/promises')
const { load } = require('cheerio')
const pFilter = require('p-filter')
const pEvery = require('p-every')
import { createRequire } from 'module'
import { writeFile } from 'fs/promises'
import { load } from 'cheerio'
import pFilter from 'p-filter'
import pEvery from 'p-every'

const crawlers = createRequire(import.meta.url)('crawler-user-agents/crawler-user-agents.json')

const CHECK = { true: '✅', false: '❌' }
const MAX_CONCURRENCY = 10
const REQ_TIMEOUT = 10000

const candidates = [...new Set(crawlers.flatMap(crawler => crawler.instances))]

const teslaUrl = await fetch('https://api.teslahunt.io/cars?maxRecords=1', { headers: { 'x-api-key': process.env.TESLAHUNT_API_KEY } })
.then(res => res.json())
.then(payload => payload.detailsUrl)

const URLS = [
'https://twitter.com/Kikobeats/status/1687837848802578432',
'https://www.tesla.com/ms/order/5YJSA1E21MF426731'
teslaUrl
]

const verifyUrl = userAgent => async url => {
Expand Down

0 comments on commit ba40a0a

Please sign in to comment.