Skip to content

Commit

Permalink
Better display & error counts
Browse files Browse the repository at this point in the history
Signed-off-by: Tom <[email protected]>
  • Loading branch information
tdelmas committed Jul 12, 2023
1 parent 299a5b3 commit 013f100
Show file tree
Hide file tree
Showing 7 changed files with 297 additions and 250 deletions.
2 changes: 1 addition & 1 deletion gbfs-validator/__test__/fixtures/v3.0-RC/exaustive.js
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ class MockRequests {
parking_type: 'underground_parking',
parking_hoop: false,
contact_phone: '+33109874321',
capacity: 10,
capacity: 20,
vehicle_type_area_capacity: [
{
vehicle_type_id: 'abc123',
Expand Down
18 changes: 9 additions & 9 deletions gbfs-validator/__test__/gbfs.v3.0-RC.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@ const serverOpts = {

function get_errors(result) {
let errors = []
let otherErrors = []
let nonSchemaErrors = []
let warnings = []

result.files?.map((f) => {
if (f.errors?.length) {
errors.push({ file: f.file, errors: f.errors })
}

if (f.otherErrors?.length) {
otherErrors.push({ file: f.file, errors: f.otherErrors })
if (f.nonSchemaErrors?.length) {
nonSchemaErrors.push({ file: f.file, errors: f.nonSchemaErrors })
}

if (f.warnings?.length) {
Expand All @@ -28,8 +28,8 @@ function get_errors(result) {
errors.push({ file: f.file, lang: l.lang, errors: l.errors })
}

if (l.otherErrors?.length) {
otherErrors.push({ file: f.file, lang: l.lang, errors: l.otherErrors })
if (l.nonSchemaErrors?.length) {
nonSchemaErrors.push({ file: f.file, lang: l.lang, errors: l.nonSchemaErrors })
}

if (l.warnings?.length) {
Expand All @@ -38,7 +38,7 @@ function get_errors(result) {
})
})

return { errors, otherErrors, warnings }
return { errors, nonSchemaErrors, warnings }
}

describe('default feed', () => {
Expand Down Expand Up @@ -158,7 +158,7 @@ describe('exaustive feed', () => {
return gbfs.validation().then((result) => {
expect(get_errors(result)).toEqual({
errors: [],
otherErrors: [],
nonSchemaErrors: [],
warnings: []
})

Expand Down Expand Up @@ -239,11 +239,11 @@ describe('default_reserve_time REQUIRED if reservation_price_per_min or reservat
expect.assertions(3)

return gbfs.validation().then((result) => {
let { errors, otherErrors, warnings } = get_errors(result)
let { errors, nonSchemaErrors, warnings } = get_errors(result)

expect(errors).toEqual([])
expect(warnings).toEqual([])
expect(otherErrors).toEqual([
expect(nonSchemaErrors).toEqual([
{
errors: [
{
Expand Down
177 changes: 151 additions & 26 deletions gbfs-validator/gbfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ function hasErrors(data, required) {
return true
}

if (el.hasErrors || el.errors?.length || el.otherErrors?.length) {
if (el.hasErrors || el.errors?.length || el.nonSchemaErrors?.length) {
return true
}
}
Expand All @@ -32,7 +32,7 @@ function hasWarnings(data) {
}
}

if (el.otherWarnings?.length) {
if (el.nonSchemaWarnings?.length) {
return true
}
}
Expand All @@ -42,16 +42,115 @@ function countErrors(file) {
let errors = 0

errors += file.errors?.length || 0
errors += file.otherErrors?.length || 0
errors += file.nonSchemaErrors?.length || 0

for (const lang of file.languages || []) {
errors += lang.errors?.length || 0
errors += lang.otherErrors?.length || 0
errors += lang.nonSchemaErrors?.length || 0
}

return errors
}

function jsonSchemaSummary(errors) {
if (!errors) {
return []
}

let summary = {}

for (const error of errors) {
let message = error.message

if (error.params && error.params.additionalProperty) {
message += `: ${error.params.additionalProperty}`
}

let instancePath = error.instancePath
.replace(/\/\d+\//g, '/#/')
.replace(/\/\d+$/g, '/#')

if (!summary[message]) {
summary[message] = {}
}

if (!summary[message][instancePath]) {
summary[message][instancePath] = 1
} else {
summary[message][instancePath]++
}
}

return Object.entries(summary).map(([message, value]) => {
let values = Object.entries(value).map(([path, count]) => {
return {
path,
message,
count
}
})

return {
key: message,
message: message,
values
}
})
}

function otherSummary(nonSchemas) {
if (!nonSchemas) {
return []
}

let summary = {}
for (const nonSchema of nonSchemas) {
let key = nonSchema.key
let message = nonSchema.message

if (nonSchema.additionalProperty) {
message = message.replace(/\.$/, '') + `: '${nonSchema.additionalProperty}'`
}

if (!summary[message]) {
summary[message] = {}
}

let path = nonSchema.path.replace(/\/\d+\//g, '/#/').replace(/\/\d+$/g, '/#')

if (!summary[message][path]) {
summary[message][path] = { count: 1, key }
} else {
summary[message][path].count++
}
}

return Object.entries(summary).map(([message, value]) => {
let values = Object.entries(value).map(([path, { count, key }]) => {
return {
key,
path,
message,
count
}
})

return {
key: values[0].key,
message,
values
}
})
}

function getSummary(errors, nonSchemaErrors, nonSchemaWarnings) {
return {
errors: jsonSchemaSummary(errors),
nonSchemaErrors: otherSummary(nonSchemaErrors),
nonSchemaWarnings: otherSummary(nonSchemaWarnings)
}
}

function getPartialSchema(version, partial, data = {}) {
let partialSchema

Expand Down Expand Up @@ -217,18 +316,22 @@ class GBFS {
}

this.autoDiscovery = body
const { errors, schema, otherWarnings, otherErrors } =

const { errors, schema, nonSchemaWarnings, nonSchemaErrors } =
this.validateFile(
this.options.version || body.version || '1.0',
'gbfs',
this.autoDiscovery
)

const summary = getSummary(errors, nonSchemaErrors, nonSchemaWarnings)

return {
schema,
errors,
otherWarnings,
otherErrors,
nonSchemaWarnings,
nonSchemaErrors,
summary,
url,
version: body.version,
recommended: true,
Expand Down Expand Up @@ -266,18 +369,21 @@ class GBFS {

this.autoDiscovery = body

const { errors, schema, otherWarnings, otherErrors } =
const { errors, schema, nonSchemaWarnings, nonSchemaErrors } =
this.validateFile(
this.options.version || body.version || '1.0',
'gbfs',
this.autoDiscovery
)

const summary = getSummary(errors, nonSchemaErrors, nonSchemaWarnings)

return {
schema,
errors,
otherWarnings,
otherErrors,
nonSchemaWarnings,
nonSchemaErrors,
summary,
url: this.url,
version: body.version || '1.0',
recommended: true,
Expand Down Expand Up @@ -320,7 +426,7 @@ class GBFS {

let { schema, errors } = validate(s, data, options)

let others = { errors: [], warnings: [] }
let nonSchema = { errors: [], warnings: [] }
try {
const files = getVersionConfiguration(version).files(this.options)

Expand All @@ -331,8 +437,8 @@ class GBFS {
for (const fn of fns) {
try {
fn({
errors: others.errors,
warnings: others.warnings,
errors: nonSchema.errors,
warnings: nonSchema.warnings,
version,
file,
data,
Expand All @@ -351,27 +457,28 @@ class GBFS {
return {
schema,
errors,
otherErrors: others.errors,
otherWarnings: others.warnings
nonSchemaErrors: nonSchema.errors,
nonSchemaWarnings: nonSchema.warnings,
summary: getSummary(errors, nonSchema.errors, nonSchema.warnings)
}
}

getFile(type, required) {
if (this.autoDiscovery) {
let urls
let urls = []

let version = this.options.version || this.autoDiscovery.version

// 3.0-RC , 3.0 and upcoming minor versions
if (/^3\.\d/.test(version)) {
urls =
this.autoDiscovery.data.feeds?.filter((f) => f.name === type) || []
} else {
urls = Object.entries(this.autoDiscovery.data).map((key) => {
return Object.assign(
{ lang: key[0] },
this.autoDiscovery.data[key[0]].feeds.find((f) => f.name === type)
)
urls = this.autoDiscovery.data?.feeds?.filter((f) => f.name === type)
} else if (this.autoDiscovery.data) {
Object.entries(this.autoDiscovery.data).map(([key, value]) => {
let feed = value.feeds?.find((f) => f.name === type)

if (feed) {
urls.push(Object.assign({ lang: key }, feed))
}
})
}

Expand Down Expand Up @@ -699,14 +806,31 @@ class GBFS {
})

let errorsCount = 0
let summaryErrorCount = 0
const filesResult = result.map((file) => {
let errorsCountFile = countErrors(file)

let fileSummaryErrorCount = 0
if (file.summary) {
fileSummaryErrorCount +=
file.summary.errors.length + file.summary.nonSchemaErrors.length
} else if (file.languages) {
file.languages.forEach((language) => {
if (language.summary) {
fileSummaryErrorCount +=
language.summary.errors.length +
language.summary.nonSchemaErrors.length
}
})
}

errorsCount += errorsCountFile
summaryErrorCount += fileSummaryErrorCount

return {
...file,
errorsCount: errorsCountFile
errorsCount: errorsCountFile,
summaryErrorCount: fileSummaryErrorCount
}
})

Expand All @@ -719,7 +843,8 @@ class GBFS {
},
hasErrors: hasErrors(result),
hasWarnings: hasWarnings(result),
errorsCount
errorsCount,
summaryErrorCount
},
files: filesResult
}
Expand Down
12 changes: 12 additions & 0 deletions gbfs-validator/otherValidation/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,14 @@ If a pricing is used (like in `default_pricing_plan_id` or `pricing_plan_ids` of

If the `default_pricing_plan_id` is not present in the `vehicle_types.json` file when pricing plans are present, the validator will return an error.

# missing_station_information

If a station is declared `station_status.json` but not in `station_information.json`, the validator will return an error.

# missing_station_status

If a station is declared `station_information.json` but not in `station_status.json`, the validator will return an error.

# unknown_plan_id

If a pricing plan id is used (like in `default_pricing_plan_id` or `pricing_plan_ids` of the `vehicle_types.json` file ), but is not present in the `system_pricing_plans.json` file, the validator will return an error.
Expand All @@ -55,6 +63,10 @@ If `num_docks_available` on a station is unusually high validator will return a

If `num_vehicles_available` on a station is unusually high, the validator will return a warning.

# station_capacity_too_low

If `capacity` on a station is lower than the sum of `num_vehicles_available` and `num_docks_available`, the validator will return an error.

# unclosed_polygon

If a the last coordinate of the LineString of a polygon is not the same as the first coordinate, the validator will return an error.
Expand Down
Loading

0 comments on commit 013f100

Please sign in to comment.