diff --git a/scripts/constants.ts b/scripts/constants.ts new file mode 100644 index 00000000..258e056a --- /dev/null +++ b/scripts/constants.ts @@ -0,0 +1,5 @@ +export const OWNER = 'iptv-org' +export const REPO = 'database' +export const DATA_DIR = process.env.DATA_DIR || './data' +export const API_DIR = process.env.API_DIR || './.api' +export const TESTING = process.env.NODE_ENV === 'test' ? true : false diff --git a/scripts/core/csv.js b/scripts/core/csv.js deleted file mode 100644 index a2493bf8..00000000 --- a/scripts/core/csv.js +++ /dev/null @@ -1,105 +0,0 @@ -const csv2json = require('csvtojson') -const chalk = require('chalk') -const logger = require('./logger') -const fs = require('mz/fs') -const { - Parser, - transforms: { flatten }, - formatters: { stringQuoteOnlyIfNecessary } -} = require('json2csv') - -const csv2jsonOptions = { - checkColumn: true, - trim: true, - delimiter: ',', - eol: '\r\n', - colParser: { - alt_names: listParser, - network: nullable, - owners: listParser, - subdivision: nullable, - city: nullable, - broadcast_area: listParser, - languages: listParser, - categories: listParser, - is_nsfw: boolParser, - launched: nullable, - closed: nullable, - replaced_by: nullable, - website: nullable, - logo: nullable, - countries: listParser - } -} - -const json2csv = new Parser({ - transforms: [flattenArray, formatBool], - formatters: { - string: stringQuoteOnlyIfNecessary() - }, - eol: '\r\n' -}) - -const csv = {} - -csv.fromFile = async function (filepath) { - return csv2json(csv2jsonOptions).fromFile(filepath) -} - -csv.fromString = async function (filepath) { - return csv2json(csv2jsonOptions).fromString(filepath) -} - -csv.save = async function (filepath, data) { - const string = json2csv.parse(data) - - return fs.writeFile(filepath, string) -} - -csv.saveSync = function (filepath, data) { - const string = json2csv.parse(data) - - return fs.writeFileSync(filepath, string) -} - -module.exports = csv - -function flattenArray(item) { - for (let prop in item) { - const value = item[prop] - item[prop] = Array.isArray(value) ? value.join(';') : value - } - - return item -} - -function formatBool(item) { - for (let prop in item) { - if (item[prop] === false) { - item[prop] = 'FALSE' - } else if (item[prop] === true) { - item[prop] = 'TRUE' - } - } - - return item -} - -function listParser(value) { - return value.split(';').filter(i => i) -} - -function boolParser(value) { - switch (value) { - case 'TRUE': - return true - case 'FALSE': - return false - default: - return value - } -} - -function nullable(value) { - return value === '' ? null : value -} diff --git a/scripts/core/csv.ts b/scripts/core/csv.ts new file mode 100644 index 00000000..a5edb74a --- /dev/null +++ b/scripts/core/csv.ts @@ -0,0 +1,44 @@ +import { Collection } from '@freearhey/core' +import { Parser } from '@json2csv/plainjs' +import { stringQuoteOnlyIfNecessary } from '@json2csv/formatters' + +export class CSV { + items: Collection + + constructor({ items }: { items: Collection }) { + this.items = items + } + + toString(): string { + const parser = new Parser({ + transforms: [flattenArray, formatBool], + formatters: { + string: stringQuoteOnlyIfNecessary() + }, + eol: '\r\n' + }) + + return parser.parse(this.items.all()) + } +} + +function flattenArray(item: { [key: string]: string[] | string | boolean }) { + for (const prop in item) { + const value = item[prop] + item[prop] = Array.isArray(value) ? value.join(';') : value + } + + return item +} + +function formatBool(item: { [key: string]: string[] | string | boolean }) { + for (const prop in item) { + if (item[prop] === false) { + item[prop] = 'FALSE' + } else if (item[prop] === true) { + item[prop] = 'TRUE' + } + } + + return item +} diff --git a/scripts/core/csvParser.ts b/scripts/core/csvParser.ts new file mode 100644 index 00000000..15451a06 --- /dev/null +++ b/scripts/core/csvParser.ts @@ -0,0 +1,53 @@ +import { Collection } from '@freearhey/core' +import csv2json from 'csvtojson' + +const opts = { + checkColumn: true, + trim: true, + delimiter: ',', + eol: '\r\n', + colParser: { + alt_names: listParser, + network: nullable, + owners: listParser, + subdivision: nullable, + city: nullable, + broadcast_area: listParser, + languages: listParser, + categories: listParser, + is_nsfw: boolParser, + launched: nullable, + closed: nullable, + replaced_by: nullable, + website: nullable, + logo: nullable, + countries: listParser + } +} + +export class CSVParser { + async parse(data: string): Promise { + const items = await csv2json(opts).fromString(data) + + return new Collection(items) + } +} + +function listParser(value: string) { + return value.split(';').filter(i => i) +} + +function boolParser(value: string) { + switch (value) { + case 'TRUE': + return true + case 'FALSE': + return false + default: + return value + } +} + +function nullable(value: string) { + return value === '' ? null : value +} diff --git a/scripts/core/file.js b/scripts/core/file.js deleted file mode 100644 index 440b3928..00000000 --- a/scripts/core/file.js +++ /dev/null @@ -1,78 +0,0 @@ -const path = require('path') -const glob = require('glob') -const fs = require('mz/fs') -const crlf = require('crlf') - -const file = {} - -file.list = function (pattern) { - return new Promise(resolve => { - glob(pattern, function (err, files) { - resolve(files) - }) - }) -} - -file.getFilename = function (filepath) { - return path.parse(filepath).name -} - -file.createDir = async function (dir) { - if (await file.exists(dir)) return - - return fs.mkdir(dir, { recursive: true }).catch(console.error) -} - -file.exists = function (filepath) { - return fs.exists(path.resolve(filepath)) -} - -file.read = function (filepath) { - return fs.readFile(path.resolve(filepath), { encoding: 'utf8' }).catch(console.error) -} - -file.append = function (filepath, data) { - return fs.appendFile(path.resolve(filepath), data).catch(console.error) -} - -file.create = function (filepath, data = '') { - filepath = path.resolve(filepath) - const dir = path.dirname(filepath) - - return file - .createDir(dir) - .then(() => file.write(filepath, data)) - .catch(console.error) -} - -file.write = function (filepath, data = '') { - return fs.writeFile(path.resolve(filepath), data, { encoding: 'utf8' }).catch(console.error) -} - -file.clear = async function (filepath) { - if (await file.exists(filepath)) return file.write(filepath, '') - return true -} - -file.resolve = function (filepath) { - return path.resolve(filepath) -} - -file.dirname = function (filepath) { - return path.dirname(filepath) -} - -file.basename = function (filepath) { - return path.basename(filepath) -} - -file.eol = function (filepath) { - return new Promise((resolve, reject) => { - crlf.get(filepath, null, function (err, endingType) { - if (err) reject(err) - resolve(endingType) - }) - }) -} - -module.exports = file diff --git a/scripts/core/index.js b/scripts/core/index.js deleted file mode 100644 index dee10491..00000000 --- a/scripts/core/index.js +++ /dev/null @@ -1,3 +0,0 @@ -exports.csv = require('./csv') -exports.file = require('./file') -exports.logger = require('./logger') diff --git a/scripts/core/index.ts b/scripts/core/index.ts new file mode 100644 index 00000000..b52c75ce --- /dev/null +++ b/scripts/core/index.ts @@ -0,0 +1,4 @@ +export * from './csv' +export * from './issueParser' +export * from './issueLoader' +export * from './csvParser' diff --git a/scripts/core/issueLoader.ts b/scripts/core/issueLoader.ts new file mode 100644 index 00000000..bc0791d3 --- /dev/null +++ b/scripts/core/issueLoader.ts @@ -0,0 +1,49 @@ +import { Collection } from '@freearhey/core' +import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods' +import { paginateRest } from '@octokit/plugin-paginate-rest' +import { Octokit } from '@octokit/core' +import { IssueParser } from './' +import { TESTING, OWNER, REPO } from '../constants' + +const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods) +const octokit = new CustomOctokit() + +export class IssueLoader { + async load({ labels }: { labels: string[] | string }) { + labels = Array.isArray(labels) ? labels.join(',') : labels + let issues: object[] = [] + if (TESTING) { + switch (labels) { + case 'channels:add,approved': + issues = require('../../tests/__data__/input/issues/channels_add_approved.js') + break + case 'channels:edit,approved': + issues = require('../../tests/__data__/input/issues/channels_edit_approved.js') + break + case 'channels:remove,approved': + issues = require('../../tests/__data__/input/issues/channels_remove_approved.js') + break + case 'blocklist:add,approved': + issues = require('../../tests/__data__/input/issues/blocklist_add_approved.js') + break + case 'blocklist:remove,approved': + issues = require('../../tests/__data__/input/issues/blocklist_remove_approved.js') + break + } + } else { + issues = await octokit.paginate(octokit.rest.issues.listForRepo, { + owner: OWNER, + repo: REPO, + per_page: 100, + labels, + headers: { + 'X-GitHub-Api-Version': '2022-11-28' + } + }) + } + + const parser = new IssueParser() + + return new Collection(issues).map(parser.parse) + } +} diff --git a/scripts/core/issueParser.ts b/scripts/core/issueParser.ts new file mode 100644 index 00000000..9ea6cfde --- /dev/null +++ b/scripts/core/issueParser.ts @@ -0,0 +1,66 @@ +import { Dictionary } from '@freearhey/core' +import { Issue } from '../models' + +const FIELDS = new Dictionary({ + 'Channel ID': 'channel_id', + 'Channel ID (required)': 'channel_id', + 'Channel ID (optional)': 'channel_id', + 'Channel Name': 'name', + 'Alternative Names': 'alt_names', + 'Alternative Names (optional)': 'alt_names', + Network: 'network', + 'Network (optional)': 'network', + Owners: 'owners', + 'Owners (optional)': 'owners', + Country: 'country', + Subdivision: 'subdivision', + 'Subdivision (optional)': 'subdivision', + City: 'city', + 'City (optional)': 'city', + 'Broadcast Area': 'broadcast_area', + Languages: 'languages', + Categories: 'categories', + 'Categories (optional)': 'categories', + NSFW: 'is_nsfw', + Launched: 'launched', + 'Launched (optional)': 'launched', + Closed: 'closed', + 'Closed (optional)': 'closed', + 'Replaced By': 'replaced_by', + 'Replaced By (optional)': 'replaced_by', + Website: 'website', + 'Website (optional)': 'website', + Logo: 'logo', + Reason: 'reason', + Notes: 'notes', + 'Notes (optional)': 'notes', + Reference: 'ref', + 'Reference (optional)': 'ref', + 'Reference (required)': 'ref' +}) + +export class IssueParser { + parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue { + const fields = issue.body.split('###') + + const data = new Dictionary() + fields.forEach((field: string) => { + let [_label, , _value] = field.split(/\r?\n/) + _label = _label ? _label.trim() : '' + _value = _value ? _value.trim() : '' + + if (!_label || !_value) return data + + const id: string = FIELDS.get(_label) + const value: string = _value === '_No response_' || _value === 'None' ? '' : _value + + if (!id) return + + data.set(id, value) + }) + + const labels = issue.labels.map(label => label.name) + + return new Issue({ number: issue.number, labels, data }) + } +} diff --git a/scripts/core/logger.js b/scripts/core/logger.js deleted file mode 100644 index 2be5eda2..00000000 --- a/scripts/core/logger.js +++ /dev/null @@ -1,13 +0,0 @@ -const { Signale } = require('signale') - -const options = {} - -const logger = new Signale(options) - -logger.config({ - displayLabel: false, - displayScope: false, - displayBadge: false -}) - -module.exports = logger diff --git a/scripts/db/export.js b/scripts/db/export.js deleted file mode 100644 index 97e5d347..00000000 --- a/scripts/db/export.js +++ /dev/null @@ -1,19 +0,0 @@ -const { csv, file, logger } = require('../core') -const chalk = require('chalk') - -const DATA_DIR = process.env.DATA_DIR || './data' -const OUTPUT_DIR = process.env.OUTPUT_DIR || './.api' - -async function main() { - const files = await file.list(`${DATA_DIR}/*.csv`) - for (const filepath of files) { - const filename = file.getFilename(filepath) - const json = await csv.fromFile(filepath).catch(err => { - logger.error(chalk.red(`\n${err.message} (${filepath})`)) - process.exit(1) - }) - await file.create(`${OUTPUT_DIR}/${filename}.json`, JSON.stringify(json)) - } -} - -main() diff --git a/scripts/db/export.ts b/scripts/db/export.ts new file mode 100644 index 00000000..b221fe78 --- /dev/null +++ b/scripts/db/export.ts @@ -0,0 +1,21 @@ +import { Storage, File } from '@freearhey/core' +import { DATA_DIR, API_DIR } from '../constants' +import { CSVParser } from '../core' + +async function main() { + const dataStorage = new Storage(DATA_DIR) + const apiStorage = new Storage(API_DIR) + const parser = new CSVParser() + + const files = await dataStorage.list('*.csv') + for (const filepath of files) { + const file = new File(filepath) + const filename = file.name() + const data = await dataStorage.load(file.basename()) + const items = await parser.parse(data) + + await apiStorage.save(`${filename}.json`, items.toJSON()) + } +} + +main() diff --git a/scripts/db/update.js b/scripts/db/update.js deleted file mode 100644 index 071eaab4..00000000 --- a/scripts/db/update.js +++ /dev/null @@ -1,172 +0,0 @@ -const { csv, file } = require('../core') -const channelScheme = require('../db/schemes/channels') -const { Octokit } = require('@octokit/core') -const { paginateRest } = require('@octokit/plugin-paginate-rest') -const CustomOctokit = Octokit.plugin(paginateRest) -const _ = require('lodash') - -const octokit = new CustomOctokit() - -const DATA_DIR = process.env.DATA_DIR || './data' -const OWNER = 'iptv-org' -const REPO = 'database' - -let channels = [] -let processedIssues = [] - -async function main() { - try { - const filepath = `${DATA_DIR}/channels.csv` - channels = await csv.fromFile(filepath) - - await removeChannels() - await editChannels() - await addChannels() - - channels = _.orderBy(channels, [channels => channels.id.toLowerCase()], ['asc']) - await csv.save(filepath, channels) - - const output = processedIssues.map(issue => `closes #${issue.number}`).join(', ') - console.log(`OUTPUT=${output}`) - } catch (err) { - console.log(err.message) - } -} - -main() - -async function removeChannels() { - const issues = await fetchIssues('channels:remove,approved') - issues.map(parseIssue).forEach(({ issue, channel }) => { - if (!channel) return - - const index = _.findIndex(channels, { id: channel.id }) - if (index < 0) return - - channels.splice(index, 1) - - processedIssues.push(issue) - }) -} - -async function editChannels() { - const issues = await fetchIssues('channels:edit,approved') - issues.map(parseIssue).forEach(({ issue, channel }) => { - if (!channel) return - - const index = _.findIndex(channels, { id: channel.id }) - if (index < 0) return - - const found = channels[index] - - for (let prop in channel) { - if (channel[prop] !== undefined) { - found[prop] = channel[prop] - } - } - - found.id = generateChannelId(found.name, found.country) - - channels.splice(index, 1, found) - - processedIssues.push(issue) - }) -} - -async function addChannels() { - const issues = await fetchIssues('channels:add,approved') - issues.map(parseIssue).forEach(({ issue, channel }) => { - if (!channel) return - - const found = channels.find(c => c.id === channel.id) - if (found) return - - channels.push(channel) - processedIssues.push(issue) - }) -} - -async function fetchIssues(labels) { - const issues = await octokit.paginate('GET /repos/{owner}/{repo}/issues', { - owner: OWNER, - repo: REPO, - per_page: 100, - labels, - headers: { - 'X-GitHub-Api-Version': '2022-11-28' - } - }) - - return issues -} - -function parseIssue(issue) { - const buffer = {} - const channel = {} - const fieldLabels = { - 'Channel ID (required)': 'id', - 'Channel Name': 'name', - 'Alternative Names': 'alt_names', - 'Alternative Names (optional)': 'alt_names', - Network: 'network', - 'Network (optional)': 'network', - Owners: 'owners', - 'Owners (optional)': 'owners', - Country: 'country', - Subdivision: 'subdivision', - 'Subdivision (optional)': 'subdivision', - City: 'city', - 'City (optional)': 'city', - 'Broadcast Area': 'broadcast_area', - Languages: 'languages', - Categories: 'categories', - 'Categories (optional)': 'categories', - NSFW: 'is_nsfw', - Launched: 'launched', - 'Launched (optional)': 'launched', - Closed: 'closed', - 'Closed (optional)': 'closed', - 'Replaced By': 'replaced_by', - 'Replaced By (optional)': 'replaced_by', - Website: 'website', - 'Website (optional)': 'website', - Logo: 'logo' - } - - const fields = issue.body.split('###') - - if (!fields.length) return { issue, channel: null } - - fields.forEach(item => { - const [fieldLabel, , value] = item.split(/\r?\n/) - const field = fieldLabel ? fieldLabels[fieldLabel.trim()] : null - - if (!field) return - - buffer[field] = value.includes('_No response_') ? undefined : value.trim() - }) - - for (let field of Object.keys(channelScheme)) { - channel[field] = buffer[field] - } - - if (!channel.id) { - channel.id = generateChannelId(channel.name, channel.country) - } - - return { issue, channel } -} - -function generateChannelId(name, country) { - if (name && country) { - const slug = name - .replace(/\+/gi, 'Plus') - .replace(/^@/gi, 'At') - .replace(/[^a-z\d]+/gi, '') - country = country.toLowerCase() - - return `${slug}.${country}` - } - - return null -} diff --git a/scripts/db/update.ts b/scripts/db/update.ts new file mode 100644 index 00000000..8f616008 --- /dev/null +++ b/scripts/db/update.ts @@ -0,0 +1,182 @@ +import { CSV, IssueLoader, CSVParser } from '../core' +import { Channel, Blocked, Issue } from '../models' +import { DATA_DIR } from '../constants' +import { Storage, Collection } from '@freearhey/core' + +let blocklist = new Collection() +let channels = new Collection() +const processedIssues = new Collection() + +async function main() { + const dataStorage = new Storage(DATA_DIR) + const parser = new CSVParser() + + const _channels = await dataStorage.load('channels.csv') + channels = (await parser.parse(_channels)).map(data => new Channel(data)) + + const _blocklist = await dataStorage.load('blocklist.csv') + blocklist = (await parser.parse(_blocklist)).map(data => new Blocked(data)) + + const loader = new IssueLoader() + + await removeChannels({ loader }) + await editChannels({ loader }) + await addChannels({ loader }) + await blockChannels({ loader }) + await unblockChannels({ loader }) + + channels = channels.orderBy([(channel: Channel) => channel.id], ['asc']) + const channelsOutput = new CSV({ items: channels }).toString() + await dataStorage.save('channels.csv', channelsOutput) + + blocklist = blocklist.orderBy([record => record.channel.toLowerCase()], ['asc']) + const blocklistOutput = new CSV({ items: blocklist }).toString() + await dataStorage.save('blocklist.csv', blocklistOutput) + + const output = processedIssues.map((issue: Issue) => `closes #${issue.number}`).join(', ') + process.stdout.write(`OUTPUT=${output}`) +} + +main() + +async function removeChannels({ loader }: { loader: IssueLoader }) { + const issues = await loader.load({ labels: ['channels:remove,approved'] }) + issues.forEach((issue: Issue) => { + if (issue.data.missing('channel_id')) return + + const found = channels.first((channel: Channel) => channel.id === issue.data.get('channel_id')) + if (!found) return + + channels.remove((channel: Channel) => channel.id === found.id) + + processedIssues.push(issue) + }) +} + +async function editChannels({ loader }: { loader: IssueLoader }) { + const issues = await loader.load({ labels: ['channels:edit,approved'] }) + issues.forEach((issue: Issue) => { + const data = issue.data + if (data.missing('channel_id')) return + + const found: Channel = channels.first( + (channel: Channel) => channel.id === data.get('channel_id') + ) + if (!found) return + + let channelId = found.id + if (data.has('name') || data.has('country')) { + const name = data.get('name') || found.name + const country = data.get('country') || found.country + channelId = generateChannelId(name, country) + } + + found.update({ + id: channelId, + name: data.get('name'), + alt_names: data.get('alt_names'), + network: data.get('network'), + owners: data.get('owners'), + country: data.get('country'), + subdivision: data.get('subdivision'), + city: data.get('city'), + broadcast_area: data.get('broadcast_area'), + languages: data.get('languages'), + categories: data.get('categories'), + is_nsfw: data.get('is_nsfw'), + launched: data.get('launched'), + closed: data.get('closed'), + replaced_by: data.get('replaced_by'), + website: data.get('website'), + logo: data.get('logo') + }) + + processedIssues.push(issue) + }) +} + +async function addChannels({ loader }: { loader: IssueLoader }) { + const issues = await loader.load({ labels: ['channels:add,approved'] }) + issues.forEach((issue: Issue) => { + const data = issue.data + if (data.missing('name') || data.missing('country')) return + + const channelId = generateChannelId(data.get('name'), data.get('country')) + + const found: Channel = channels.first((channel: Channel) => channel.id === channelId) + if (found) return + + channels.push( + new Channel({ + id: channelId, + name: data.get('name'), + alt_names: data.get('alt_names'), + network: data.get('network'), + owners: data.get('owners'), + country: data.get('country'), + subdivision: data.get('subdivision'), + city: data.get('city'), + broadcast_area: data.get('broadcast_area'), + languages: data.get('languages'), + categories: data.get('categories'), + is_nsfw: data.get('is_nsfw'), + launched: data.get('launched'), + closed: data.get('closed'), + replaced_by: data.get('replaced_by'), + website: data.get('website'), + logo: data.get('logo') + }) + ) + + processedIssues.push(issue) + }) +} + +async function unblockChannels({ loader }: { loader: IssueLoader }) { + const issues = await loader.load({ labels: ['blocklist:remove,approved'] }) + issues.forEach((issue: Issue) => { + const data = issue.data + if (data.missing('channel_id')) return + + const found: Blocked = blocklist.first( + (blocked: Blocked) => blocked.channel === data.get('channel_id') + ) + if (!found) return + + blocklist.remove((blocked: Blocked) => blocked.channel === found.channel) + + processedIssues.push(issue) + }) +} + +async function blockChannels({ loader }: { loader: IssueLoader }) { + const issues = await loader.load({ labels: ['blocklist:add,approved'] }) + issues.forEach((issue: Issue) => { + const data = issue.data + if (data.missing('channel_id')) return + + const found: Blocked = blocklist.first( + (blocked: Blocked) => blocked.channel === data.get('channel_id') + ) + if (found) return + + blocklist.push( + new Blocked({ + channel: data.get('channel_id'), + ref: data.get('ref') + }) + ) + + processedIssues.push(issue) + }) +} + +function generateChannelId(name: string, country: string): string { + const slug = name + .replace(/\+/gi, 'Plus') + .replace(/^@/gi, 'At') + .replace(/[^a-z\d]+/gi, '') + country = country.toLowerCase() + + return `${slug}.${country}` +} diff --git a/scripts/db/validate.js b/scripts/db/validate.js deleted file mode 100644 index 0395db46..00000000 --- a/scripts/db/validate.js +++ /dev/null @@ -1,334 +0,0 @@ -const { transliterate } = require('transliteration') -const { logger, file, csv } = require('../core') -const { program } = require('commander') -const schemes = require('./schemes') -const chalk = require('chalk') -const Joi = require('joi') -const _ = require('lodash') - -program.argument('[filepath]', 'Path to file to validate').parse(process.argv) - -const allFiles = [ - 'data/blocklist.csv', - 'data/categories.csv', - 'data/channels.csv', - 'data/countries.csv', - 'data/languages.csv', - 'data/regions.csv', - 'data/subdivisions.csv' -] - -let db = {} -let files = {} - -async function main() { - let globalErrors = [] - - for (let filepath of allFiles) { - if (!filepath.endsWith('.csv')) continue - - const csvString = await file.read(filepath) - if (/\s+$/.test(csvString)) - return handleError(`Error: empty lines at the end of file not allowed (${filepath})`) - - const rows = csvString.split(/\r\n/) - const headers = rows[0].split(',') - for (let [i, line] of rows.entries()) { - if (line.indexOf('\n') > -1) - return handleError( - `Error: row ${i + 1} has the wrong line ending character, should be CRLF (${filepath})` - ) - if (line.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/).length !== headers.length) - return handleError(`Error: row ${i + 1} has the wrong number of columns (${filepath})`) - } - - const filename = file.getFilename(filepath) - let data = await csv - .fromString(csvString) - .catch(err => handleError(`${err.message} (${filepath})`)) - - let grouped - switch (filename) { - case 'blocklist': - grouped = _.keyBy(data, 'channel') - break - case 'categories': - case 'channels': - grouped = _.keyBy(data, 'id') - break - default: - grouped = _.keyBy(data, 'code') - break - } - - db[filename] = grouped - files[filename] = data - } - - const toCheck = program.args.length ? program.args : allFiles - for (const filepath of toCheck) { - const filename = file.getFilename(filepath) - if (!schemes[filename]) return handleError(`Error: "${filename}" scheme is missing`) - - const rows = files[filename] - const rowsCopy = JSON.parse(JSON.stringify(rows)) - - let fileErrors = [] - if (filename === 'channels') { - fileErrors = fileErrors.concat(findDuplicatesById(rowsCopy)) - // fileErrors = fileErrors.concat(findDuplicatesByName(rowsCopy)) - for (const [i, row] of rowsCopy.entries()) { - fileErrors = fileErrors.concat(validateChannelId(row, i)) - fileErrors = fileErrors.concat(validateChannelBroadcastArea(row, i)) - fileErrors = fileErrors.concat(validateChannelSubdivision(row, i)) - fileErrors = fileErrors.concat(validateChannelCategories(row, i)) - fileErrors = fileErrors.concat(validateChannelReplacedBy(row, i)) - fileErrors = fileErrors.concat(validateChannelLanguages(row, i)) - fileErrors = fileErrors.concat(validateChannelCountry(row, i)) - } - } else if (filename === 'blocklist') { - for (const [i, row] of rowsCopy.entries()) { - fileErrors = fileErrors.concat(validateChannel(row, i)) - } - } else if (filename === 'countries') { - for (const [i, row] of rowsCopy.entries()) { - fileErrors = fileErrors.concat(validateCountryLanguages(row, i)) - } - } else if (filename === 'subdivisions') { - for (const [i, row] of rowsCopy.entries()) { - fileErrors = fileErrors.concat(validateSubdivisionCountry(row, i)) - } - } else if (filename === 'regions') { - for (const [i, row] of rowsCopy.entries()) { - fileErrors = fileErrors.concat(validateRegionCountries(row, i)) - } - } - - const schema = Joi.object(schemes[filename]) - rows.forEach((row, i) => { - const { error } = schema.validate(row, { abortEarly: false }) - if (error) { - error.details.forEach(detail => { - fileErrors.push({ line: i + 2, message: detail.message }) - }) - } - }) - - if (fileErrors.length) { - logger.info(`\n${chalk.underline(filepath)}`) - fileErrors.forEach(err => { - const position = err.line.toString().padEnd(6, ' ') - logger.info(` ${chalk.gray(position)} ${err.message}`) - }) - globalErrors = globalErrors.concat(fileErrors) - } - } - - if (globalErrors.length) return handleError(`${globalErrors.length} error(s)`) -} - -main() - -function findDuplicatesById(rows) { - const errors = [] - const buffer = {} - rows.forEach((row, i) => { - const normId = row.id.toLowerCase() - if (buffer[normId]) { - errors.push({ - line: i + 2, - message: `entry with the id "${row.id}" already exists` - }) - } - - buffer[normId] = true - }) - - return errors -} - -function findDuplicatesByName(rows) { - const errors = [] - const buffer = {} - rows.forEach((row, i) => { - const normName = row.name.toLowerCase() - if (buffer[normName]) { - errors.push({ - line: i + 2, - message: `entry with the name "${row.name}" already exists` - }) - } - - buffer[normName] = true - }) - - return errors -} - -function validateChannelId(row, i) { - const errors = [] - - let name = normalize(row.name) - let code = row.country.toLowerCase() - let expected = `${name}.${code}` - - if (expected !== row.id) { - errors.push({ - line: i + 2, - message: `"${row.id}" must be derived from the channel name "${row.name}" and the country code "${row.country}"` - }) - } - - function normalize(name) { - let translit = transliterate(name) - - return translit - .replace(/^@/i, 'At') - .replace(/^&/i, 'And') - .replace(/\+/gi, 'Plus') - .replace(/\s\-(\d)/gi, ' Minus$1') - .replace(/[^a-z\d]+/gi, '') - } - - return errors -} - -function validateChannelCategories(row, i) { - const errors = [] - row.categories.forEach(category => { - if (!db.categories[category]) { - errors.push({ - line: i + 2, - message: `"${row.id}" has the wrong category "${category}"` - }) - } - }) - - return errors -} - -function validateChannelCountry(row, i) { - const errors = [] - if (!db.countries[row.country]) { - errors.push({ - line: i + 2, - message: `"${row.id}" has the wrong country "${row.country}"` - }) - } - - return errors -} - -function validateChannelReplacedBy(row, i) { - const errors = [] - if (row.replaced_by && !db.channels[row.replaced_by]) { - errors.push({ - line: i + 2, - message: `"${row.id}" has the wrong replaced_by "${row.replaced_by}"` - }) - } - - return errors -} - -function validateChannelSubdivision(row, i) { - const errors = [] - if (row.subdivision && !db.subdivisions[row.subdivision]) { - errors.push({ - line: i + 2, - message: `"${row.id}" has the wrong subdivision "${row.subdivision}"` - }) - } - - return errors -} - -function validateChannelBroadcastArea(row, i) { - const errors = [] - row.broadcast_area.forEach(area => { - const [type, code] = area.split('/') - if ( - (type === 'r' && !db.regions[code]) || - (type === 'c' && !db.countries[code]) || - (type === 's' && !db.subdivisions[code]) - ) { - errors.push({ - line: i + 2, - message: `"${row.id}" has the wrong broadcast_area "${area}"` - }) - } - }) - - return errors -} - -function validateChannelLanguages(row, i) { - const errors = [] - row.languages.forEach(language => { - if (!db.languages[language]) { - errors.push({ - line: i + 2, - message: `"${row.id}" has the wrong language "${language}"` - }) - } - }) - - return errors -} - -function validateChannel(row, i) { - const errors = [] - if (!db.channels[row.channel]) { - errors.push({ - line: i + 2, - message: `"${row.channel}" is missing in the channels.csv` - }) - } - - return errors -} - -function validateCountryLanguages(row, i) { - const errors = [] - for (let lang of row.languages) { - if (!db.languages[lang]) { - errors.push({ - line: i + 2, - message: `"${row.code}" has the wrong language "${lang}"` - }) - } - } - - return errors -} - -function validateSubdivisionCountry(row, i) { - const errors = [] - if (!db.countries[row.country]) { - errors.push({ - line: i + 2, - message: `"${row.code}" has the wrong country "${row.country}"` - }) - } - - return errors -} - -function validateRegionCountries(row, i) { - const errors = [] - row.countries.forEach(country => { - if (!db.countries[country]) { - errors.push({ - line: i + 2, - message: `"${row.code}" has the wrong country "${country}"` - }) - } - }) - - return errors -} - -function handleError(message) { - logger.error(chalk.red(`\n${message}`)) - process.exit(1) -} diff --git a/scripts/db/validate.ts b/scripts/db/validate.ts new file mode 100644 index 00000000..174461c9 --- /dev/null +++ b/scripts/db/validate.ts @@ -0,0 +1,271 @@ +import { Collection, Storage, File, Dictionary, Logger } from '@freearhey/core' +import { DATA_DIR } from '../constants' +import { transliterate } from 'transliteration' +import { program } from 'commander' +import Joi from 'joi' +import { CSVParser } from '../core' +import chalk from 'chalk' + +program.argument('[filepath]', 'Path to file to validate').parse(process.argv) + +const logger = new Logger() +const buffer = new Dictionary() +const files = new Dictionary() +const schemes: { [key: string]: object } = require('../schemes') + +async function main() { + const dataStorage = new Storage(DATA_DIR) + const _files = await dataStorage.list('*.csv') + let globalErrors = new Collection() + const parser = new CSVParser() + + for (const filepath of _files) { + const file = new File(filepath) + if (file.extension() !== 'csv') continue + + const csv = await dataStorage.load(file.basename()) + if (/\s+$/.test(csv)) + return handleError(`Error: empty lines at the end of file not allowed (${filepath})`) + + const rows = csv.split(/\r\n/) + const headers = rows[0].split(',') + for (const [i, line] of rows.entries()) { + if (line.indexOf('\n') > -1) + return handleError( + `Error: row ${i + 1} has the wrong line ending character, should be CRLF (${filepath})` + ) + if (line.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/).length !== headers.length) + return handleError(`Error: row ${i + 1} has the wrong number of columns (${filepath})`) + } + + const data = await parser.parse(csv) + const filename = file.name() + + let grouped + switch (filename) { + case 'blocklist': + grouped = data.keyBy(item => item.channel) + break + case 'categories': + case 'channels': + grouped = data.keyBy(item => item.id) + break + default: + grouped = data.keyBy(item => item.code) + break + } + + buffer.set(filename, grouped) + files.set(filename, data) + } + + const filesToCheck = program.args.length ? program.args : _files + for (const filepath of filesToCheck) { + const file = new File(filepath) + const filename = file.name() + if (!schemes[filename]) return handleError(`Error: "${filename}" scheme is missing`) + + const rows: Collection = files.get(filename) + const rowsCopy = JSON.parse(JSON.stringify(rows.all())) + + let fileErrors = new Collection() + switch (filename) { + case 'channels': + fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'id')) + for (const [i, row] of rowsCopy.entries()) { + fileErrors = fileErrors.concat(validateChannelId(row, i)) + fileErrors = fileErrors.concat(validateChannelBroadcastArea(row, i)) + fileErrors = fileErrors.concat( + checkValue(i, row, 'id', 'subdivision', buffer.get('subdivisions')) + ) + fileErrors = fileErrors.concat( + checkValue(i, row, 'id', 'categories', buffer.get('categories')) + ) + fileErrors = fileErrors.concat( + checkValue(i, row, 'id', 'replaced_by', buffer.get('channels')) + ) + fileErrors = fileErrors.concat( + checkValue(i, row, 'id', 'languages', buffer.get('languages')) + ) + fileErrors = fileErrors.concat( + checkValue(i, row, 'id', 'country', buffer.get('countries')) + ) + } + break + case 'blocklist': + for (const [i, row] of rowsCopy.entries()) { + fileErrors = fileErrors.concat(validateChannel(row.channel, i)) + } + break + case 'countries': + fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code')) + for (const [i, row] of rowsCopy.entries()) { + fileErrors = fileErrors.concat( + checkValue(i, row, 'code', 'languages', buffer.get('languages')) + ) + } + break + case 'subdivisions': + fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code')) + for (const [i, row] of rowsCopy.entries()) { + fileErrors = fileErrors.concat( + checkValue(i, row, 'code', 'country', buffer.get('countries')) + ) + } + break + case 'regions': + fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code')) + for (const [i, row] of rowsCopy.entries()) { + fileErrors = fileErrors.concat( + checkValue(i, row, 'code', 'countries', buffer.get('countries')) + ) + } + break + case 'categories': + fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'id')) + break + case 'languages': + fileErrors = fileErrors.concat(findDuplicatesBy(rowsCopy, 'code')) + break + } + + const schema = Joi.object(schemes[filename]) + rows.forEach((row: string | string[] | boolean, i: number) => { + const { error } = schema.validate(row, { abortEarly: false }) + if (error) { + error.details.forEach(detail => { + fileErrors.push({ line: i + 2, message: detail.message }) + }) + } + }) + + if (fileErrors.count()) { + logger.info(`\n${chalk.underline(filepath)}`) + fileErrors.forEach(err => { + const position = err.line.toString().padEnd(6, ' ') + logger.info(` ${chalk.gray(position)} ${err.message}`) + }) + globalErrors = globalErrors.concat(fileErrors) + } + } + + if (globalErrors.count()) return handleError(`${globalErrors.count()} error(s)`) +} + +main() + +function checkValue( + i: number, + row: { [key: string]: string[] | string | boolean }, + key: string, + field: string, + collection: Collection +) { + const errors = new Collection() + let values: string[] = [] + if (Array.isArray(row[field])) { + values = row[field] as string[] + } else if (typeof row[field] === 'string') { + values = new Array(row[field]) as string[] + } + + values.forEach((value: string) => { + if (collection.missing(value)) { + errors.push({ + line: i + 2, + message: `"${row[key]}" has an invalid ${field} "${value}"` + }) + } + }) + + return errors +} + +function validateChannel(channelId: string, i: number) { + const errors = new Collection() + const channels = buffer.get('channels') + + if (channels.missing(channelId)) { + errors.push({ + line: i + 2, + message: `"${channelId}" is missing in the channels.csv` + }) + } + + return errors +} + +function findDuplicatesBy(rows: { [key: string]: string }[], key: string) { + const errors = new Collection() + const buffer = new Dictionary() + + rows.forEach((row, i) => { + const normId = row[key].toLowerCase() + if (buffer.has(normId)) { + errors.push({ + line: i + 2, + message: `entry with the ${key} "${row[key]}" already exists` + }) + } + + buffer.set(normId, true) + }) + + return errors +} + +function validateChannelId(row: { [key: string]: string }, i: number) { + const errors = new Collection() + + const name = normalize(row.name) + const code = row.country.toLowerCase() + const expected = `${name}.${code}` + + if (expected !== row.id) { + errors.push({ + line: i + 2, + message: `"${row.id}" must be derived from the channel name "${row.name}" and the country code "${row.country}"` + }) + } + + function normalize(name: string) { + const translit = transliterate(name) + + return translit + .replace(/^@/i, 'At') + .replace(/^&/i, 'And') + .replace(/\+/gi, 'Plus') + .replace(/\s-(\d)/gi, ' Minus$1') + .replace(/[^a-z\d]+/gi, '') + } + + return errors +} + +function validateChannelBroadcastArea(row: { [key: string]: string[] }, i: number) { + const errors = new Collection() + const regions = buffer.get('regions') + const countries = buffer.get('countries') + const subdivisions = buffer.get('subdivisions') + + row.broadcast_area.forEach((areaCode: string) => { + const [type, code] = areaCode.split('/') + if ( + (type === 'r' && regions.missing(code)) || + (type === 'c' && countries.missing(code)) || + (type === 's' && subdivisions.missing(code)) + ) { + errors.push({ + line: i + 2, + message: `"${row.id}" has the wrong broadcast_area "${areaCode}"` + }) + } + }) + + return errors +} + +function handleError(message: string) { + logger.error(chalk.red(message)) + process.exit(1) +} diff --git a/scripts/models/blocked.ts b/scripts/models/blocked.ts new file mode 100644 index 00000000..1de7a198 --- /dev/null +++ b/scripts/models/blocked.ts @@ -0,0 +1,14 @@ +type BlockedProps = { + channel: string + ref: string +} + +export class Blocked { + channel: string + ref: string + + constructor({ ref, channel }: BlockedProps) { + this.channel = channel + this.ref = ref + } +} diff --git a/scripts/models/channel.ts b/scripts/models/channel.ts new file mode 100644 index 00000000..e9cf5846 --- /dev/null +++ b/scripts/models/channel.ts @@ -0,0 +1,85 @@ +type ChannelProps = { + id: string + name: string + alt_names: string[] + network: string + owners: string[] + country: string + subdivision: string + city: string + broadcast_area: string[] + languages: string[] + categories: string[] + is_nsfw: boolean + launched: string + closed: string + replaced_by: string + website: string + logo: string +} + +export class Channel { + id: string + name: string + alt_names: string[] + network: string + owners: string[] + country: string + subdivision: string + city: string + broadcast_area: string[] + languages: string[] + categories: string[] + is_nsfw: boolean + launched: string + closed: string + replaced_by: string + website: string + logo: string + + constructor({ + id, + name, + alt_names, + network, + owners, + country, + subdivision, + city, + broadcast_area, + languages, + categories, + is_nsfw, + launched, + closed, + replaced_by, + website, + logo + }: ChannelProps) { + this.id = id + this.name = name + this.alt_names = alt_names + this.network = network + this.owners = owners + this.country = country + this.subdivision = subdivision + this.city = city + this.broadcast_area = broadcast_area + this.languages = languages + this.categories = categories + this.is_nsfw = is_nsfw + this.launched = launched + this.closed = closed + this.replaced_by = replaced_by + this.website = website + this.logo = logo + } + + update(data: { [key: string]: string }) { + for (const key in data) { + if (this[key] && data[key]) { + this[key] = data[key] + } + } + } +} diff --git a/scripts/models/index.ts b/scripts/models/index.ts new file mode 100644 index 00000000..c3258404 --- /dev/null +++ b/scripts/models/index.ts @@ -0,0 +1,3 @@ +export * from './channel' +export * from './issue' +export * from './blocked' diff --git a/scripts/models/issue.ts b/scripts/models/issue.ts new file mode 100644 index 00000000..fecb1fde --- /dev/null +++ b/scripts/models/issue.ts @@ -0,0 +1,19 @@ +import { Dictionary } from '@freearhey/core' + +type IssueProps = { + number: number + labels: string[] + data: Dictionary +} + +export class Issue { + number: number + labels: string[] + data: Dictionary + + constructor({ number, labels, data }: IssueProps) { + this.number = number + this.labels = labels + this.data = data + } +} diff --git a/scripts/db/schemes/blocklist.js b/scripts/schemes/blocklist.js similarity index 95% rename from scripts/db/schemes/blocklist.js rename to scripts/schemes/blocklist.js index 29e57d75..f629a85c 100644 --- a/scripts/db/schemes/blocklist.js +++ b/scripts/schemes/blocklist.js @@ -1,8 +1,8 @@ -const Joi = require('joi') - -module.exports = { - channel: Joi.string() - .regex(/^[A-Za-z0-9]+\.[a-z]{2}$/) - .required(), - ref: Joi.string().uri().required() -} +const Joi = require('joi') + +module.exports = { + channel: Joi.string() + .regex(/^[A-Za-z0-9]+\.[a-z]{2}$/) + .required(), + ref: Joi.string().uri().required() +} diff --git a/scripts/db/schemes/categories.js b/scripts/schemes/categories.js similarity index 94% rename from scripts/db/schemes/categories.js rename to scripts/schemes/categories.js index 4d655c97..7986ecd4 100644 --- a/scripts/db/schemes/categories.js +++ b/scripts/schemes/categories.js @@ -1,10 +1,10 @@ -const Joi = require('joi') - -module.exports = { - id: Joi.string() - .regex(/^[a-z]+$/) - .required(), - name: Joi.string() - .regex(/^[A-Z]+$/i) - .required() -} +const Joi = require('joi') + +module.exports = { + id: Joi.string() + .regex(/^[a-z]+$/) + .required(), + name: Joi.string() + .regex(/^[A-Z]+$/i) + .required() +} diff --git a/scripts/db/schemes/channels.js b/scripts/schemes/channels.js similarity index 96% rename from scripts/db/schemes/channels.js rename to scripts/schemes/channels.js index d6c12636..ea6e17ec 100644 --- a/scripts/db/schemes/channels.js +++ b/scripts/schemes/channels.js @@ -1,65 +1,65 @@ -const Joi = require('joi').extend(require('@joi/date')) -const path = require('path') -const url = require('url') - -module.exports = { - id: Joi.string() - .regex(/^[A-Za-z0-9]+\.[a-z]{2}$/) - .required(), - name: Joi.string() - .regex(/^[a-z0-9-!:&.+'/»#%°$@?\s]+$/i) - .required(), - alt_names: Joi.array().items( - Joi.string() - .regex(/^[^",]+$/) - .invalid(Joi.ref('name')) - ), - network: Joi.string() - .regex(/^[^",]+$/) - .allow(null), - owners: Joi.array().items(Joi.string().regex(/^[^",]+$/)), - country: Joi.string() - .regex(/^[A-Z]{2}$/) - .required(), - subdivision: Joi.string() - .regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/) - .allow(null), - city: Joi.string() - .regex(/^[^",]+$/) - .allow(null), - broadcast_area: Joi.array().items( - Joi.string() - .regex(/^(s\/[A-Z]{2}-[A-Z0-9]{1,3}|c\/[A-Z]{2}|r\/[A-Z0-9]{3,7})$/) - .required() - ), - languages: Joi.array().items( - Joi.string() - .regex(/^[a-z]{3}$/) - .required() - ), - categories: Joi.array().items(Joi.string().regex(/^[a-z]+$/)), - is_nsfw: Joi.boolean().strict().required(), - launched: Joi.date().format('YYYY-MM-DD').raw().allow(null), - closed: Joi.date().format('YYYY-MM-DD').raw().allow(null).greater(Joi.ref('launched')), - replaced_by: Joi.string() - .regex(/^[A-Za-z0-9]+\.[a-z]{2}$/) - .allow(null), - website: Joi.string() - .uri({ - scheme: ['http', 'https'] - }) - .allow(null), - logo: Joi.string() - .uri({ - scheme: ['https'] - }) - .custom((value, helper) => { - const ext = path.extname(url.parse(value).pathname) - if (!ext || /(\.png|\.jpeg|\.jpg)/i.test(ext)) { - return true - } else { - return helper.message(`"logo" has an invalid file extension "${ext}"`) - } - }) - .required() -} +const Joi = require('joi').extend(require('@joi/date')) +const path = require('path') +const url = require('url') + +module.exports = { + id: Joi.string() + .regex(/^[A-Za-z0-9]+\.[a-z]{2}$/) + .required(), + name: Joi.string() + .regex(/^[a-z0-9-!:&.+'/»#%°$@?\s]+$/i) + .required(), + alt_names: Joi.array().items( + Joi.string() + .regex(/^[^",]+$/) + .invalid(Joi.ref('name')) + ), + network: Joi.string() + .regex(/^[^",]+$/) + .allow(null), + owners: Joi.array().items(Joi.string().regex(/^[^",]+$/)), + country: Joi.string() + .regex(/^[A-Z]{2}$/) + .required(), + subdivision: Joi.string() + .regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/) + .allow(null), + city: Joi.string() + .regex(/^[^",]+$/) + .allow(null), + broadcast_area: Joi.array().items( + Joi.string() + .regex(/^(s\/[A-Z]{2}-[A-Z0-9]{1,3}|c\/[A-Z]{2}|r\/[A-Z0-9]{3,7})$/) + .required() + ), + languages: Joi.array().items( + Joi.string() + .regex(/^[a-z]{3}$/) + .required() + ), + categories: Joi.array().items(Joi.string().regex(/^[a-z]+$/)), + is_nsfw: Joi.boolean().strict().required(), + launched: Joi.date().format('YYYY-MM-DD').raw().allow(null), + closed: Joi.date().format('YYYY-MM-DD').raw().allow(null).greater(Joi.ref('launched')), + replaced_by: Joi.string() + .regex(/^[A-Za-z0-9]+\.[a-z]{2}$/) + .allow(null), + website: Joi.string() + .uri({ + scheme: ['http', 'https'] + }) + .allow(null), + logo: Joi.string() + .uri({ + scheme: ['https'] + }) + .custom((value, helper) => { + const ext = path.extname(url.parse(value).pathname) + if (!ext || /(\.png|\.jpeg|\.jpg)/i.test(ext)) { + return true + } else { + return helper.message(`"logo" has an invalid file extension "${ext}"`) + } + }) + .required() +} diff --git a/scripts/db/schemes/countries.js b/scripts/schemes/countries.js similarity index 95% rename from scripts/db/schemes/countries.js rename to scripts/schemes/countries.js index 31e737ac..4db75d24 100644 --- a/scripts/db/schemes/countries.js +++ b/scripts/schemes/countries.js @@ -1,18 +1,18 @@ -const Joi = require('joi') - -module.exports = { - name: Joi.string() - .regex(/^[\sA-Z\u00C0-\u00FF().-]+$/i) - .required(), - code: Joi.string() - .regex(/^[A-Z]{2}$/) - .required(), - languages: Joi.array().items( - Joi.string() - .regex(/^[a-z]{3}$/) - .required() - ), - flag: Joi.string() - .regex(/^[\uD83C][\uDDE6-\uDDFF][\uD83C][\uDDE6-\uDDFF]$/) - .required() -} +const Joi = require('joi') + +module.exports = { + name: Joi.string() + .regex(/^[\sA-Z\u00C0-\u00FF().-]+$/i) + .required(), + code: Joi.string() + .regex(/^[A-Z]{2}$/) + .required(), + languages: Joi.array().items( + Joi.string() + .regex(/^[a-z]{3}$/) + .required() + ), + flag: Joi.string() + .regex(/^[\uD83C][\uDDE6-\uDDFF][\uD83C][\uDDE6-\uDDFF]$/) + .required() +} diff --git a/scripts/db/schemes/index.js b/scripts/schemes/index.js similarity index 97% rename from scripts/db/schemes/index.js rename to scripts/schemes/index.js index d8b765e9..f803aadf 100644 --- a/scripts/db/schemes/index.js +++ b/scripts/schemes/index.js @@ -1,7 +1,7 @@ -exports.channels = require('./channels') -exports.categories = require('./categories') -exports.countries = require('./countries') -exports.languages = require('./languages') -exports.regions = require('./regions') -exports.subdivisions = require('./subdivisions') -exports.blocklist = require('./blocklist') +exports.channels = require('./channels') +exports.categories = require('./categories') +exports.countries = require('./countries') +exports.languages = require('./languages') +exports.regions = require('./regions') +exports.subdivisions = require('./subdivisions') +exports.blocklist = require('./blocklist') diff --git a/scripts/db/schemes/languages.js b/scripts/schemes/languages.js similarity index 94% rename from scripts/db/schemes/languages.js rename to scripts/schemes/languages.js index f7ca07ac..4c773ade 100644 --- a/scripts/db/schemes/languages.js +++ b/scripts/schemes/languages.js @@ -1,8 +1,8 @@ -const Joi = require('joi') - -module.exports = { - code: Joi.string() - .regex(/^[a-z]{3}$/) - .required(), - name: Joi.string().required() -} +const Joi = require('joi') + +module.exports = { + code: Joi.string() + .regex(/^[a-z]{3}$/) + .required(), + name: Joi.string().required() +} diff --git a/scripts/db/schemes/regions.js b/scripts/schemes/regions.js similarity index 94% rename from scripts/db/schemes/regions.js rename to scripts/schemes/regions.js index 2560aa9d..16721af2 100644 --- a/scripts/db/schemes/regions.js +++ b/scripts/schemes/regions.js @@ -1,15 +1,15 @@ -const Joi = require('joi') - -module.exports = { - name: Joi.string() - .regex(/^[\sA-Z\u00C0-\u00FF().,-]+$/i) - .required(), - code: Joi.string() - .regex(/^[A-Z]{3,7}$/) - .required(), - countries: Joi.array().items( - Joi.string() - .regex(/^[A-Z]{2}$/) - .required() - ) -} +const Joi = require('joi') + +module.exports = { + name: Joi.string() + .regex(/^[\sA-Z\u00C0-\u00FF().,-]+$/i) + .required(), + code: Joi.string() + .regex(/^[A-Z]{3,7}$/) + .required(), + countries: Joi.array().items( + Joi.string() + .regex(/^[A-Z]{2}$/) + .required() + ) +} diff --git a/scripts/db/schemes/subdivisions.js b/scripts/schemes/subdivisions.js similarity index 95% rename from scripts/db/schemes/subdivisions.js rename to scripts/schemes/subdivisions.js index 43624a30..763eb28c 100644 --- a/scripts/db/schemes/subdivisions.js +++ b/scripts/schemes/subdivisions.js @@ -1,11 +1,11 @@ -const Joi = require('joi') - -module.exports = { - country: Joi.string() - .regex(/^[A-Z]{2}$/) - .required(), - name: Joi.string().required(), - code: Joi.string() - .regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/) - .required() -} +const Joi = require('joi') + +module.exports = { + country: Joi.string() + .regex(/^[A-Z]{2}$/) + .required(), + name: Joi.string().required(), + code: Joi.string() + .regex(/^[A-Z]{2}-[A-Z0-9]{1,3}$/) + .required() +}