fix: add robots.txt to forbidden url schemas
This commit is contained in:
@@ -29,4 +29,4 @@ export type ErrorDTO = {
|
||||
// Used to check against reserved names.
|
||||
export const disallowedUriSchema = z
|
||||
.string()
|
||||
.regex(/^(about|assets|healthcheck|kttydocs|panel)/);
|
||||
.regex(/^(about|assets|healthcheck|kttydocs|panel|robots\.txt)/);
|
||||
|
||||
Reference in New Issue
Block a user