Update scripts

This commit is contained in:
freearhey 2023-10-07 05:14:21 +03:00
parent 66ec908b6e
commit 179ef6a41d
28 changed files with 958 additions and 866 deletions

5
scripts/constants.ts Normal file
View file

@ -0,0 +1,5 @@
export const OWNER = 'iptv-org'
export const REPO = 'database'
export const DATA_DIR = process.env.DATA_DIR || './data'
export const API_DIR = process.env.API_DIR || './.api'
export const TESTING = process.env.NODE_ENV === 'test' ? true : false

View file

@ -1,105 +0,0 @@
const csv2json = require('csvtojson')
const chalk = require('chalk')
const logger = require('./logger')
const fs = require('mz/fs')
const {
Parser,
transforms: { flatten },
formatters: { stringQuoteOnlyIfNecessary }
} = require('json2csv')
const csv2jsonOptions = {
checkColumn: true,
trim: true,
delimiter: ',',
eol: '\r\n',
colParser: {
alt_names: listParser,
network: nullable,
owners: listParser,
subdivision: nullable,
city: nullable,
broadcast_area: listParser,
languages: listParser,
categories: listParser,
is_nsfw: boolParser,
launched: nullable,
closed: nullable,
replaced_by: nullable,
website: nullable,
logo: nullable,
countries: listParser
}
}
const json2csv = new Parser({
transforms: [flattenArray, formatBool],
formatters: {
string: stringQuoteOnlyIfNecessary()
},
eol: '\r\n'
})
const csv = {}
csv.fromFile = async function (filepath) {
return csv2json(csv2jsonOptions).fromFile(filepath)
}
csv.fromString = async function (filepath) {
return csv2json(csv2jsonOptions).fromString(filepath)
}
csv.save = async function (filepath, data) {
const string = json2csv.parse(data)
return fs.writeFile(filepath, string)
}
csv.saveSync = function (filepath, data) {
const string = json2csv.parse(data)
return fs.writeFileSync(filepath, string)
}
module.exports = csv
function flattenArray(item) {
for (let prop in item) {
const value = item[prop]
item[prop] = Array.isArray(value) ? value.join(';') : value
}
return item
}
function formatBool(item) {
for (let prop in item) {
if (item[prop] === false) {
item[prop] = 'FALSE'
} else if (item[prop] === true) {
item[prop] = 'TRUE'
}
}
return item
}
function listParser(value) {
return value.split(';').filter(i => i)
}
function boolParser(value) {
switch (value) {
case 'TRUE':
return true
case 'FALSE':
return false
default:
return value
}
}
function nullable(value) {
return value === '' ? null : value
}

44
scripts/core/csv.ts Normal file
View file

@ -0,0 +1,44 @@
import { Collection } from '@freearhey/core'
import { Parser } from '@json2csv/plainjs'
import { stringQuoteOnlyIfNecessary } from '@json2csv/formatters'
export class CSV {
items: Collection
constructor({ items }: { items: Collection }) {
this.items = items
}
toString(): string {
const parser = new Parser({
transforms: [flattenArray, formatBool],
formatters: {
string: stringQuoteOnlyIfNecessary()
},
eol: '\r\n'
})
return parser.parse(this.items.all())
}
}
function flattenArray(item: { [key: string]: string[] | string | boolean }) {
for (const prop in item) {
const value = item[prop]
item[prop] = Array.isArray(value) ? value.join(';') : value
}
return item
}
function formatBool(item: { [key: string]: string[] | string | boolean }) {
for (const prop in item) {
if (item[prop] === false) {
item[prop] = 'FALSE'
} else if (item[prop] === true) {
item[prop] = 'TRUE'
}
}
return item
}

53
scripts/core/csvParser.ts Normal file
View file

@ -0,0 +1,53 @@
import { Collection } from '@freearhey/core'
import csv2json from 'csvtojson'
const opts = {
checkColumn: true,
trim: true,
delimiter: ',',
eol: '\r\n',
colParser: {
alt_names: listParser,
network: nullable,
owners: listParser,
subdivision: nullable,
city: nullable,
broadcast_area: listParser,
languages: listParser,
categories: listParser,
is_nsfw: boolParser,
launched: nullable,
closed: nullable,
replaced_by: nullable,
website: nullable,
logo: nullable,
countries: listParser
}
}
export class CSVParser {
async parse(data: string): Promise<Collection> {
const items = await csv2json(opts).fromString(data)
return new Collection(items)
}
}
function listParser(value: string) {
return value.split(';').filter(i => i)
}
function boolParser(value: string) {
switch (value) {
case 'TRUE':
return true
case 'FALSE':
return false
default:
return value
}
}
function nullable(value: string) {
return value === '' ? null : value
}

View file

@ -1,78 +0,0 @@
const path = require('path')
const glob = require('glob')
const fs = require('mz/fs')
const crlf = require('crlf')
const file = {}
file.list = function (pattern) {
return new Promise(resolve => {
glob(pattern, function (err, files) {
resolve(files)
})
})
}
file.getFilename = function (filepath) {
return path.parse(filepath).name
}
file.createDir = async function (dir) {
if (await file.exists(dir)) return
return fs.mkdir(dir, { recursive: true }).catch(console.error)
}
file.exists = function (filepath) {
return fs.exists(path.resolve(filepath))
}
file.read = function (filepath) {
return fs.readFile(path.resolve(filepath), { encoding: 'utf8' }).catch(console.error)
}
file.append = function (filepath, data) {
return fs.appendFile(path.resolve(filepath), data).catch(console.error)
}
file.create = function (filepath, data = '') {
filepath = path.resolve(filepath)
const dir = path.dirname(filepath)
return file
.createDir(dir)
.then(() => file.write(filepath, data))
.catch(console.error)
}
file.write = function (filepath, data = '') {
return fs.writeFile(path.resolve(filepath), data, { encoding: 'utf8' }).catch(console.error)
}
file.clear = async function (filepath) {
if (await file.exists(filepath)) return file.write(filepath, '')
return true
}
file.resolve = function (filepath) {
return path.resolve(filepath)
}
file.dirname = function (filepath) {
return path.dirname(filepath)
}
file.basename = function (filepath) {
return path.basename(filepath)
}
file.eol = function (filepath) {
return new Promise((resolve, reject) => {
crlf.get(filepath, null, function (err, endingType) {
if (err) reject(err)
resolve(endingType)
})
})
}
module.exports = file

View file

@ -1,3 +0,0 @@
exports.csv = require('./csv')
exports.file = require('./file')
exports.logger = require('./logger')

4
scripts/core/index.ts Normal file
View file

@ -0,0 +1,4 @@
export * from './csv'
export * from './issueParser'
export * from './issueLoader'
export * from './csvParser'

View file

@ -0,0 +1,49 @@
import { Collection } from '@freearhey/core'
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
import { paginateRest } from '@octokit/plugin-paginate-rest'
import { Octokit } from '@octokit/core'
import { IssueParser } from './'
import { TESTING, OWNER, REPO } from '../constants'
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
const octokit = new CustomOctokit()
export class IssueLoader {
async load({ labels }: { labels: string[] | string }) {
labels = Array.isArray(labels) ? labels.join(',') : labels
let issues: object[] = []
if (TESTING) {
switch (labels) {
case 'channels:add,approved':
issues = require('../../tests/__data__/input/issues/channels_add_approved.js')
break
case 'channels:edit,approved':
issues = require('../../tests/__data__/input/issues/channels_edit_approved.js')
break
case 'channels:remove,approved':
issues = require('../../tests/__data__/input/issues/channels_remove_approved.js')
break
case 'blocklist:add,approved':
issues = require('../../tests/__data__/input/issues/blocklist_add_approved.js')
break
case 'blocklist:remove,approved':
issues = require('../../tests/__data__/input/issues/blocklist_remove_approved.js')
break
}
} else {
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
owner: OWNER,
repo: REPO,
per_page: 100,
labels,
headers: {
'X-GitHub-Api-Version': '2022-11-28'
}
})
}
const parser = new IssueParser()
return new Collection(issues).map(parser.parse)
}
}

View file

@ -0,0 +1,66 @@
import { Dictionary } from '@freearhey/core'
import { Issue } from '../models'
const FIELDS = new Dictionary({
'Channel ID': 'channel_id',
'Channel ID (required)': 'channel_id',
'Channel ID (optional)': 'channel_id',
'Channel Name': 'name',
'Alternative Names': 'alt_names',
'Alternative Names (optional)': 'alt_names',
Network: 'network',
'Network (optional)': 'network',
Owners: 'owners',
'Owners (optional)': 'owners',
Country: 'country',
Subdivision: 'subdivision',
'Subdivision (optional)': 'subdivision',
City: 'city',
'City (optional)': 'city',
'Broadcast Area': 'broadcast_area',
Languages: 'languages',
Categories: 'categories',
'Categories (optional)': 'categories',
NSFW: 'is_nsfw',
Launched: 'launched',
'Launched (optional)': 'launched',
Closed: 'closed',
'Closed (optional)': 'closed',
'Replaced By': 'replaced_by',
'Replaced By (optional)': 'replaced_by',
Website: 'website',
'Website (optional)': 'website',
Logo: 'logo',
Reason: 'reason',
Notes: 'notes',
'Notes (optional)': 'notes',
Reference: 'ref',
'Reference (optional)': 'ref',
'Reference (required)': 'ref'
})
export class IssueParser {
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
const fields = issue.body.split('###')
const data = new Dictionary()
fields.forEach((field: string) => {
let [_label, , _value] = field.split(/\r?\n/)
_label = _label ? _label.trim() : ''
_value = _value ? _value.trim() : ''
if (!_label || !_value) return data
const id: string = FIELDS.get(_label)
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
if (!id) return
data.set(id, value)
})
const labels = issue.labels.map(label => label.name)
return new Issue({ number: issue.number, labels, data })
}
}

View file

@ -1,13 +0,0 @@
const { Signale } = require('signale')
const options = {}
const logger = new Signale(options)
logger.config({
displayLabel: false,
displayScope: false,
displayBadge: false
})
module.exports = logger

View file

@ -1,19 +0,0 @@
const { csv, file, logger } = require('../core')
const chalk = require('chalk')
const DATA_DIR = process.env.DATA_DIR || './data'
const OUTPUT_DIR = process.env.OUTPUT_DIR || './.api'
async function main() {
const files = await file.list(`${DATA_DIR}/*.csv`)
for (const filepath of files) {
const filename = file.getFilename(filepath)
const json = await csv.fromFile(filepath).catch(err => {
logger.error(chalk.red(`\n${err.message} (${filepath})`))
process.exit(1)
})
await file.create(`${OUTPUT_DIR}/${filename}.json`, JSON.stringify(json))
}
}
main()

21
scripts/db/export.ts Normal file
View file

@ -0,0 +1,21 @@
import { Storage, File } from '@freearhey/core'
import { DATA_DIR, API_DIR } from '../constants'
import { CSVParser } from '../core'
async function main() {
const dataStorage = new Storage(DATA_DIR)
const apiStorage = new Storage(API_DIR)
const parser = new CSVParser()
const files = await dataStorage.list('*.csv')
for (const filepath of files) {
const file = new File(filepath)
const filename = file.name()
const data = await dataStorage.load(file.basename())
const items = await parser.parse(data)
await apiStorage.save(`${filename}.json`, items.toJSON())
}
}
main()

View file

@ -1,172 +0,0 @@
const { csv, file } = require('../core')
const channelScheme = require('../db/schemes/channels')
const { Octokit } = require('@octokit/core')
const { paginateRest } = require('@octokit/plugin-paginate-rest')
const CustomOctokit = Octokit.plugin(paginateRest)
const _ = require('lodash')
const octokit = new CustomOctokit()
const DATA_DIR = process.env.DATA_DIR || './data'
const OWNER = 'iptv-org'
const REPO = 'database'
let channels = []
let processedIssues = []
async function main() {
try {
const filepath = `${DATA_DIR}/channels.csv`
channels = await csv.fromFile(filepath)
await removeChannels()
await editChannels()
await addChannels()
channels = _.orderBy(channels, [channels => channels.id.toLowerCase()], ['asc'])
await csv.save(filepath, channels)
const output = processedIssues.map(issue => `closes #${issue.number}`).join(', ')
console.log(`OUTPUT=${output}`)
} catch (err) {
console.log(err.message)
}
}
main()
async function removeChannels() {
const issues = await fetchIssues('channels:remove,approved')
issues.map(parseIssue).forEach(({ issue, channel }) => {
if (!channel) return
const index = _.findIndex(channels, { id: channel.id })
if (index < 0) return
channels.splice(index, 1)
processedIssues.push(issue)
})
}
async function editChannels() {
const issues = await fetchIssues('channels:edit,approved')
issues.map(parseIssue).forEach(({ issue, channel }) => {
if (!channel) return
const index = _.findIndex(channels, { id: channel.id })
if (index < 0) return
const found = channels[index]
for (let prop in channel) {
if (channel[prop] !== undefined) {
found[prop] = channel[prop]
}
}
found.id = generateChannelId(found.name, found.country)
channels.splice(index, 1, found)
processedIssues.push(issue)
})
}
async function addChannels() {
const issues = await fetchIssues('channels:add,approved')
issues.map(parseIssue).forEach(({ issue, channel }) => {
if (!channel) return
const found = channels.find(c => c.id === channel.id)
if (found) return
channels.push(channel)
processedIssues.push(issue)
})
}
async function fetchIssues(labels) {
const issues = await octokit.paginate('GET /repos/{owner}/{repo}/issues', {
owner: OWNER,
repo: REPO,
per_page: 100,
labels,
headers: {
'X-GitHub-Api-Version': '2022-11-28'
}
})
return issues
}
function parseIssue(issue) {
const buffer = {}
const channel = {}
const fieldLabels = {
'Channel ID (required)': 'id',
'Channel Name': 'name',
'Alternative Names': 'alt_names',
'Alternative Names (optional)': 'alt_names',
Network: 'network',
'Network (optional)': 'network',
Owners: 'owners',
'Owners (optional)': 'owners',
Country: 'country',
Subdivision: 'subdivision',
'Subdivision (optional)': 'subdivision',
City: 'city',
'City (optional)': 'city',
'Broadcast Area': 'broadcast_area',
Languages: 'languages',
Categories: 'categories',
'Categories (optional)': 'categories',
NSFW: 'is_nsfw',
Launched: 'launched',
'Launched (optional)': 'launched',
Closed: 'closed',
'Closed (optional)': 'closed',
'Replaced By': 'replaced_by',
'Replaced By (optional)': 'replaced_by',
Website: 'website',
'Website (optional)': 'website',
Logo: 'logo'
}
const fields = issue.body.split('###')
if (!fields.length) return { issue, channel: null }
fields.forEach(item => {
const [fieldLabel, , value] = item.split(/\r?\n/)
const field = fieldLabel ? fieldLabels[fieldLabel.trim()] : null
if (!field) return
buffer[field] = value.includes('_No response_') ? undefined : value.trim()
})
for (let field of Object.keys(channelScheme)) {
channel[field] = buffer[field]
}
if (!channel.id) {
channel.id = generateChannelId(channel.name, channel.country)
}
return { issue, channel }
}
function generateChannelId(name, country) {
if (name && country) {
const slug = name
.replace(/\+/gi, 'Plus')
.replace(/^@/gi, 'At')
.replace(/[^a-z\d]+/gi, '')
country = country.toLowerCase()
return `${slug}.${country}`
}
return null
}

182
scripts/db/update.ts Normal file
View file

@ -0,0 +1,182 @@
import { CSV, IssueLoader, CSVParser } from '../core'
import { Channel, Blocked, Issue } from '../models'
import { DATA_DIR } from '../constants'
import { Storage, Collection } from '@freearhey/core'
let blocklist = new Collection()
let channels = new Collection()
const processedIssues = new Collection()
async function main() {
const dataStorage = new Storage(DATA_DIR)
const parser = new CSVParser()
const _channels = await dataStorage.load('channels.csv')
channels = (await parser.parse(_channels)).map(data => new Channel(data))
const _blocklist = await dataStorage.load('blocklist.csv')
blocklist = (await parser.parse(_blocklist)).map(data => new Blocked(data))
const loader = new IssueLoader()
await removeChannels({ loader })
await editChannels({ loader })
await addChannels({ loader })
await blockChannels({ loader })
await unblockChannels({ loader })
channels = channels.orderBy([(channel: Channel) => channel.id], ['asc'])
const channelsOutput = new CSV({ items: channels }).toString()
await dataStorage.save('channels.csv', channelsOutput)
blocklist = blocklist.orderBy([record => record.channel.toLowerCase()], ['asc'])
const blocklistOutput = new CSV({ items: blocklist }).toString()
await dataStorage.save('blocklist.csv', blocklistOutput)
const output = processedIssues.map((issue: Issue) => `closes #${issue.number}`).join(', ')
process.stdout.write(`OUTPUT=${output}`)
}
main()
async function removeChannels({ loader }: { loader: IssueLoader }) {
const issues = await loader.load({ labels: ['channels:remove,approved'] })
issues.forEach((issue: Issue) => {
if (issue.data.missing('channel_id')) return
const found = channels.first((channel: Channel) => channel.id === issue.data.get('channel_id'))
if (!found) return
channels.remove((channel: Channel) => channel.id === found.id)
processedIssues.push(issue)
})
}
async function editChannels({ loader }: { loader: IssueLoader }) {
const issues = await loader.load({ labels: ['channels:edit,approved'] })
issues.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('channel_id')) return
const found: Channel = channels.first(
(channel: Channel) => channel.id === data.get('channel_id')
)
if (!found) return
let channelId = found.id
if (data.has('name') || data.has('country')) {
const name = data.get('name') || found.name
const country = data.get('country') || found.country
channelId = generateChannelId(name, country)
}
found.update({
id: channelId,
name: data.get('name'),
alt_names: data.get('alt_names'),
network: data.get('network'),
owners: data.get('owners'),
country: data.get('country'),
subdivision: data.get('subdivision'),
city: data.get('city'),
broadcast_area: data.get('broadcast_area'),
languages: data.get('languages'),
categories: data.get('categories'),
is_nsfw: data.get('is_nsfw'),
launched: data.get('launched'),
closed: data.get('closed'),
replaced_by: data.get('replaced_by'),
website: data.get('website'),
logo: data.get('logo')
})
processedIssues.push(issue)
})
}
async function addChannels({ loader }: { loader: IssueLoader }) {
const issues = await loader.load({ labels: ['channels:add,approved'] })
issues.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('name') || data.missing('country')) return
const channelId = generateChannelId(data.get('name'), data.get('country'))
const found: Channel = channels.first((channel: Channel) => channel.id === channelId)
if (found) return
channels.push(
new Channel({
id: channelId,
name: data.get('name'),
alt_names: data.get('alt_names'),
network: data.get('network'),
owners: data.get('owners'),
country: data.get('country'),
subdivision: data.get('subdivision'),
city: data.get('city'),
broadcast_area: data.get('broadcast_area'),
languages: data.get('languages'),
categories: data.get('categories'),
is_nsfw: data.get('is_nsfw'),
launched: data.get('launched'),
closed: data.get('closed'),
replaced_by: data.get('replaced_by'),
website: data.get('website'),
logo: data.get('logo')
})
)
processedIssues.push(issue)
})
}
async function unblockChannels({ loader }: { loader: IssueLoader }) {
const issues = await loader.load({ labels: ['blocklist:remove,approved'] })
issues.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('channel_id')) return
const found: Blocked = blocklist.first(
(blocked: Blocked) => blocked.channel === data.get('channel_id')
)
if (!found) return
blocklist.remove((blocked: Blocked) => blocked.channel === found.channel)
processedIssues.push(issue)
})
}
async function blockChannels({ loader }: { loader: IssueLoader }) {
const issues = await loader.load({ labels: ['blocklist:add,approved'] })
issues.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('channel_id')) return
const found: Blocked = blocklist.first(
(blocked: Blocked) => blocked.channel === data.get('channel_id')
)
if (found) return
blocklist.push(
new Blocked({
channel: data.get('channel_id'),
ref: data.get('ref')
})
)
processedIssues.push(issue)
})
}
function generateChannelId(name: string, country: string): string {
const slug = name
.replace(/\+/gi, 'Plus')
.replace(/^@/gi, 'At')
.replace(/[^a-z\d]+/gi, '')
country = country.toLowerCase()
return `${slug}.${country}`
}

View file

@ -1,334 +0,0 @@
const { transliterate } = require('transliteration')
const { logger, file, csv } = require('../core')
const { program } = require('commander')
const schemes = require('./schemes')
const chalk = require('chalk')
const Joi = require('joi')
const _ = require('lodash')
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
const allFiles = [
'data/blocklist.csv',
'data/categories.csv',
'data/channels.csv',
'data/countries.csv',
'data/languages.csv',
'data/regions.csv',
'data/subdivisions.csv'
]
let db = {}
let files = {}
async function main() {
let globalErrors = []
for (let filepath of allFiles) {
if (!filepath.endsWith('.csv')) continue
const csvString = await file.read(filepath)
if (/\s+$/.test(csvString))
return handleError(`Error: empty lines at the end of file not allowed (${filepath})`)
const rows = csvString.split(/\r\n/)
const headers = rows[0].split(',')
for (let [i, line] of rows.entries()) {
if (line.indexOf('\n') > -1)
return handleError(
`Error: row ${i + 1} has the wrong line ending character, should be CRLF (${filepath})`
)
if (line.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/).length !== headers.length)
return handleError(`Error: row ${i + 1} has the wrong number of columns (${filepath})`)
}
const filename = file.getFilename(filepath)
let data = await csv
.fromString(csvString)
.catch(err => handleError(`${err.message} (${filepath})`))
let grouped
switch (filename) {
case 'blocklist':
grouped = _.keyBy(data, 'channel')
break
case 'categories':
case 'channels':
grouped = _.keyBy(data, 'id')
break
default:
grouped = _.keyBy(data, 'code')
break
}
db[filename] = grouped
files[filename] = data
}
const toCheck = program.args.length ? program.args : allFiles
for (const filepath of toCheck) {
const filename = file.getFilename(filepath)
if (!schemes[filename]) return handleError(`Error: "${filename}" scheme is missing`)
const rows = files[filename]
const rowsCopy = JSON.parse(JSON.stringify(rows))
let fileErrors = []
if (filename === 'channels') {
fileErrors = fileErrors.concat(findDuplicatesById(rowsCopy))
// fileErrors = fileErrors.concat(findDuplicatesByName(rowsCopy))
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateChannelId(row, i))
fileErrors = fileErrors.concat(validateChannelBroadcastArea(row, i))
fileErrors = fileErrors.concat(validateChannelSubdivision(row, i))
fileErrors = fileErrors.concat(validateChannelCategories(row, i))
fileErrors = fileErrors.concat(validateChannelReplacedBy(row, i))
fileErrors = fileErrors.concat(validateChannelLanguages(row, i))
fileErrors = fileErrors.concat(validateChannelCountry(row, i))
}
} else if (filename === 'blocklist') {
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateChannel(row, i))
}
} else if (filename === 'countries') {
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateCountryLanguages(row, i))
}
} else if (filename === 'subdivisions') {
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateSubdivisionCountry(row, i))
}
} else if (filename === 'regions') {
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateRegionCountries(row, i))
}
}
const schema = Joi.object(schemes[filename])
rows.forEach((row, i) => {
const { error } = schema.validate(row, { abortEarly: false })
if (error) {
error.details.forEach(detail => {
fileErrors.push({ line: i + 2, message: detail.message })
})
}
})
if (fileErrors.length) {
logger.info(`\n${chalk.underline(filepath)}`)
fileErrors.forEach(err => {
const position = err.line.toString().padEnd(6, ' ')
logger.info(` ${chalk.gray(position)} ${err.message}`)
})
globalErrors = globalErrors.concat(fileErrors)
}
}
if (globalErrors.length) return handleError(`${globalErrors.length} error(s)`)
}
main()
function findDuplicatesById(rows) {
const errors = []
const buffer = {}
rows.forEach((row, i) => {
const normId = row.id.toLowerCase()
if (buffer[normId]) {
errors.push({
line: i + 2,
message: `entry with the id "${row.id}" already exists`
})
}
buffer[normId] = true
})
return errors
}
function findDuplicatesByName(rows) {
const errors = []
const buffer = {}
rows.forEach((row, i) => {
const normName = row.name.toLowerCase()
if (buffer[normName]) {
errors.push({
line: i + 2,
message: `entry with the name "${row.name}" already exists`
})
}
buffer[normName] = true
})
return errors
}
function validateChannelId(row, i) {
const errors = []
let name = normalize(row.name)
let code = row.country.toLowerCase()
let expected = `${name}.${code}`
if (expected !== row.id) {
errors.push({
line: i + 2,
message: `"${row.id}" must be derived from the channel name "${row.name}" and the country code "${row.country}"`
})
}
function normalize(name) {
let translit = transliterate(name)
return translit
.replace(/^@/i, 'At')
.replace(/^&/i, 'And')
.replace(/\+/gi, 'Plus')
.replace(/\s\-(\d)/gi, ' Minus$1')
.replace(/[^a-z\d]+/gi, '')
}
return errors
}
function validateChannelCategories(row, i) {
const errors = []
row.categories.forEach(category => {
if (!db.categories[category]) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong category "${category}"`
})
}
})
return errors
}
function validateChannelCountry(row, i) {
const errors = []
if (!db.countries[row.country]) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong country "${row.country}"`
})
}
return errors
}
function validateChannelReplacedBy(row, i) {
const errors = []
if (row.replaced_by && !db.channels[row.replaced_by]) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong replaced_by "${row.replaced_by}"`
})
}
return errors
}
function validateChannelSubdivision(row, i) {
const errors = []
if (row.subdivision && !db.subdivisions[row.subdivision]) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong subdivision "${row.subdivision}"`
})
}
return errors
}
function validateChannelBroadcastArea(row, i) {
const errors = []
row.broadcast_area.forEach(area => {
const [type, code] = area.split('/')
if (
(type === 'r' && !db.regions[code]) ||
(type === 'c' && !db.countries[code]) ||
(type === 's' && !db.subdivisions[code])
) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong broadcast_area "${area}"`
})
}
})
return errors
}
function validateChannelLanguages(row, i) {
const errors = []
row.languages.forEach(language => {
if (!db.languages[language]) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong language "${language}"`
})
}
})
return errors
}
function validateChannel(row, i) {
const errors = []
if (!db.channels[row.channel]) {
errors.push({
line: i + 2,
message: `"${row.channel}" is missing in the channels.csv`
})
}
return errors
}
function validateCountryLanguages(row, i) {
const errors = []
for (let lang of row.languages) {
if (!db.languages[lang]) {
errors.push({
line: i + 2,
message: `"${row.code}" has the wrong language "${lang}"`
})
}
}
return errors
}
function validateSubdivisionCountry(row, i) {
const errors = []
if (!db.countries[row.country]) {
errors.push({
line: i + 2,
message: `"${row.code}" has the wrong country "${row.country}"`
})
}
return errors
}
function validateRegionCountries(row, i) {
const errors = []
row.countries.forEach(country => {
if (!db.countries[country]) {
errors.push({
line: i + 2,
message: `"${row.code}" has the wrong country "${country}"`
})
}
})
return errors
}
function handleError(message) {
logger.error(chalk.red(`\n${message}`))
process.exit(1)
}

271
scripts/db/validate.ts Normal file
View file

@ -0,0 +1,271 @@
import { Collection, Storage, File, Dictionary, Logger } from '@freearhey/core'
import { DATA_DIR } from '../constants'
import { transliterate } from 'transliteration'
import { program } from 'commander'
import Joi from 'joi'
import { CSVParser } from '../core'
import chalk from 'chalk'
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
const logger = new Logger()
const buffer = new Dictionary()
const files = new Dictionary()
const schemes: { [key: string]: object } = require('../schemes')
async function main() {
const dataStorage = new Storage(DATA_DIR)
const _files = await dataStorage.list('*.csv')
let globalErrors = new Collection()
const parser = new CSVParser()
for (const filepath of _files) {
const file = new File(filepath)
if (file.extension() !== 'csv') continue
const csv = await dataStorage.load(file.basename())
if (/\s+$/.test(csv))
return handleError(`Error: empty lines at the end of file not allowed (${filepath})`)
const rows = csv.split(/\r\n/)
const headers = rows[0].split(',')
for (const [i, line] of rows.entries()) {
if (line.indexOf('\n') > -1)
return handleError(
`Error: row ${i + 1} has the wrong line ending character, should be CRLF (${filepath})`
)
if (line.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/).length !== headers.length)
return handleError(`Error: row ${i + 1} has the wrong number of columns (${filepath})`)
}
const data = await parser.parse(csv)
const filename = file.name()
let grouped
switch (filename) {
case 'blocklist':
grouped = data.keyBy(item => item.channel)
break
case 'categories':
case 'channels':
grouped = data.keyBy(item => item.id)
break
default:
grouped = data.keyBy(item => item.code)
break
}
buffer.set(filename, grouped)
files.set(filename, data)
}
const filesToCheck = program.args.length ? program.args : _files
for (const filepath of filesToCheck) {
const file = new File(filepath)
const filename = file.name()
if (!schemes[filename]) return handleError(`Error: "${filename}" scheme is missing`)
const rows: Collection = files.get(filename)
const rowsCopy = JSON.parse(JSON.stringify(rows.all()))
let fileErrors = new Collection()
switch (filename) {
case 'channels':
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'id'))
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateChannelId(row, i))
fileErrors = fileErrors.concat(validateChannelBroadcastArea(row, i))
fileErrors = fileErrors.concat(
checkValue(i, row, 'id', 'subdivision', buffer.get('subdivisions'))
)
fileErrors = fileErrors.concat(
checkValue(i, row, 'id', 'categories', buffer.get('categories'))
)
fileErrors = fileErrors.concat(
checkValue(i, row, 'id', 'replaced_by', buffer.get('channels'))
)
fileErrors = fileErrors.concat(
checkValue(i, row, 'id', 'languages', buffer.get('languages'))
)
fileErrors = fileErrors.concat(
checkValue(i, row, 'id', 'country', buffer.get('countries'))
)
}
break
case 'blocklist':
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(validateChannel(row.channel, i))
}
break
case 'countries':
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code'))
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(
checkValue(i, row, 'code', 'languages', buffer.get('languages'))
)
}
break
case 'subdivisions':
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code'))
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(
checkValue(i, row, 'code', 'country', buffer.get('countries'))
)
}
break
case 'regions':
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code'))
for (const [i, row] of rowsCopy.entries()) {
fileErrors = fileErrors.concat(
checkValue(i, row, 'code', 'countries', buffer.get('countries'))
)
}
break
case 'categories':
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'id'))
break
case 'languages':
fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code'))
break
}
const schema = Joi.object(schemes[filename])
rows.forEach((row: string | string[] | boolean, i: number) => {
const { error } = schema.validate(row, { abortEarly: false })
if (error) {
error.details.forEach(detail => {
fileErrors.push({ line: i + 2, message: detail.message })
})
}
})
if (fileErrors.count()) {
logger.info(`\n${chalk.underline(filepath)}`)
fileErrors.forEach(err => {
const position = err.line.toString().padEnd(6, ' ')
logger.info(` ${chalk.gray(position)} ${err.message}`)
})
globalErrors = globalErrors.concat(fileErrors)
}
}
if (globalErrors.count()) return handleError(`${globalErrors.count()} error(s)`)
}
main()
function checkValue(
i: number,
row: { [key: string]: string[] | string | boolean },
key: string,
field: string,
collection: Collection
) {
const errors = new Collection()
let values: string[] = []
if (Array.isArray(row[field])) {
values = row[field] as string[]
} else if (typeof row[field] === 'string') {
values = new Array(row[field]) as string[]
}
values.forEach((value: string) => {
if (collection.missing(value)) {
errors.push({
line: i + 2,
message: `"${row[key]}" has an invalid ${field} "${value}"`
})
}
})
return errors
}
function validateChannel(channelId: string, i: number) {
const errors = new Collection()
const channels = buffer.get('channels')
if (channels.missing(channelId)) {
errors.push({
line: i + 2,
message: `"${channelId}" is missing in the channels.csv`
})
}
return errors
}
function findDuplicatesBy(rows: { [key: string]: string }[], key: string) {
const errors = new Collection()
const buffer = new Dictionary()
rows.forEach((row, i) => {
const normId = row[key].toLowerCase()
if (buffer.has(normId)) {
errors.push({
line: i + 2,
message: `entry with the ${key} "${row[key]}" already exists`
})
}
buffer.set(normId, true)
})
return errors
}
function validateChannelId(row: { [key: string]: string }, i: number) {
const errors = new Collection()
const name = normalize(row.name)
const code = row.country.toLowerCase()
const expected = `${name}.${code}`
if (expected !== row.id) {
errors.push({
line: i + 2,
message: `"${row.id}" must be derived from the channel name "${row.name}" and the country code "${row.country}"`
})
}
function normalize(name: string) {
const translit = transliterate(name)
return translit
.replace(/^@/i, 'At')
.replace(/^&/i, 'And')
.replace(/\+/gi, 'Plus')
.replace(/\s-(\d)/gi, ' Minus$1')
.replace(/[^a-z\d]+/gi, '')
}
return errors
}
function validateChannelBroadcastArea(row: { [key: string]: string[] }, i: number) {
const errors = new Collection()
const regions = buffer.get('regions')
const countries = buffer.get('countries')
const subdivisions = buffer.get('subdivisions')
row.broadcast_area.forEach((areaCode: string) => {
const [type, code] = areaCode.split('/')
if (
(type === 'r' && regions.missing(code)) ||
(type === 'c' && countries.missing(code)) ||
(type === 's' && subdivisions.missing(code))
) {
errors.push({
line: i + 2,
message: `"${row.id}" has the wrong broadcast_area "${areaCode}"`
})
}
})
return errors
}
function handleError(message: string) {
logger.error(chalk.red(message))
process.exit(1)
}

14
scripts/models/blocked.ts Normal file
View file

@ -0,0 +1,14 @@
type BlockedProps = {
channel: string
ref: string
}
export class Blocked {
channel: string
ref: string
constructor({ ref, channel }: BlockedProps) {
this.channel = channel
this.ref = ref
}
}

85
scripts/models/channel.ts Normal file
View file

@ -0,0 +1,85 @@
type ChannelProps = {
id: string
name: string
alt_names: string[]
network: string
owners: string[]
country: string
subdivision: string
city: string
broadcast_area: string[]
languages: string[]
categories: string[]
is_nsfw: boolean
launched: string
closed: string
replaced_by: string
website: string
logo: string
}
export class Channel {
id: string
name: string
alt_names: string[]
network: string
owners: string[]
country: string
subdivision: string
city: string
broadcast_area: string[]
languages: string[]
categories: string[]
is_nsfw: boolean
launched: string
closed: string
replaced_by: string
website: string
logo: string
constructor({
id,
name,
alt_names,
network,
owners,
country,
subdivision,
city,
broadcast_area,
languages,
categories,
is_nsfw,
launched,
closed,
replaced_by,
website,
logo
}: ChannelProps) {
this.id = id
this.name = name
this.alt_names = alt_names
this.network = network
this.owners = owners
this.country = country
this.subdivision = subdivision
this.city = city
this.broadcast_area = broadcast_area
this.languages = languages
this.categories = categories
this.is_nsfw = is_nsfw
this.launched = launched
this.closed = closed
this.replaced_by = replaced_by
this.website = website
this.logo = logo
}
update(data: { [key: string]: string }) {
for (const key in data) {
if (this[key] && data[key]) {
this[key] = data[key]
}
}
}
}

3
scripts/models/index.ts Normal file
View file

@ -0,0 +1,3 @@
export * from './channel'
export * from './issue'
export * from './blocked'

19
scripts/models/issue.ts Normal file
View file

@ -0,0 +1,19 @@
import { Dictionary } from '@freearhey/core'
type IssueProps = {
number: number
labels: string[]
data: Dictionary
}
export class Issue {
number: number
labels: string[]
data: Dictionary
constructor({ number, labels, data }: IssueProps) {
this.number = number
this.labels = labels
this.data = data
}
}