diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 31bcfd96..0b791742 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,50 +1,91 @@
repos:
+
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- - id: check-added-large-files
+
+- repo: https://github.com/psf/black
+ rev: stable
+ hooks:
+ - id: black
+ files: server/szurubooru/
+ language_version: python3.8
+
+- repo: https://github.com/timothycrosley/isort
+ rev: '4.3.21-2'
+ hooks:
+ - id: isort
+ files: server/szurubooru/
+ exclude: server/szurubooru/migrations/env.py
+ additional_dependencies:
+ - toml
+
+- repo: https://github.com/prettier/prettier
+ rev: '2.0.5'
+ hooks:
+ - id: prettier
+ files: client/js/
+ exclude: client/js/.gitignore
+ args: ['--config', 'client/.prettierrc.yml']
+
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v7.1.0
hooks:
- id: eslint
files: client/js/
args: ['--fix']
+ additional_dependencies:
+ - eslint-config-prettier
+
- repo: https://gitlab.com/pycqa/flake8
rev: '3.8.2'
hooks:
- id: flake8
files: server/szurubooru/
additional_dependencies:
- - flake8-print
- args: ['--config=server/setup.cfg']
+ - flake8-print
+ args: ['--config=server/.flake8']
+
- repo: local
hooks:
- - id: pytest
- name: pytest
- entry: >-
- bash -c
- 'docker build -f server/Dockerfile.test -t $(git rev-parse --short HEAD)-test server/
- && docker run --rm -t $(git rev-parse --short HEAD)-test szurubooru/
- && docker rmi --no-prune $(git rev-parse --short HEAD)-test'
- language: system
- types: [python]
- files: server/szurubooru/
- pass_filenames: false
- id: docker-build-client
- name: Test building the client in Docker
- entry: bash -c 'docker build -t szurubooru-client:$(git rev-parse --short HEAD) client/'
+ name: Docker - build client
+ entry: bash -c 'docker build client/'
language: system
types: [file]
files: client/
pass_filenames: false
+
- id: docker-build-server
- name: Test building the server in Docker
- entry: bash -c 'docker build -t szurubooru-server:$(git rev-parse --short HEAD) server/'
+ name: Docker - build server
+ entry: bash -c 'docker build server/'
language: system
types: [file]
files: server/
pass_filenames: false
+
+ - id: pytest
+ name: pytest
+ entry: bash -c 'docker run --rm -t $(docker build -f server/Dockerfile.test -q server/) szurubooru/'
+ language: system
+ types: [python]
+ files: server/szurubooru/
+ exclude: server/szurubooru/migrations/
+ pass_filenames: false
+
+ - id: pytest-cov
+ name: pytest
+ entry: bash -c 'docker run --rm -t $(docker build -f server/Dockerfile.test -q server/) --cov-report=term-missing:skip-covered --cov=szurubooru szurubooru/'
+ language: system
+ types: [python]
+ files: server/szurubooru/
+ exclude: server/szurubooru/migrations/
+ pass_filenames: false
+ verbose: true
+ stages: [manual]
+
+fail_fast: true
exclude: LICENSE.md
diff --git a/client/.eslintrc.yml b/client/.eslintrc.yml
index a176248f..a9d703ff 100644
--- a/client/.eslintrc.yml
+++ b/client/.eslintrc.yml
@@ -2,7 +2,7 @@ env:
browser: true
commonjs: true
es6: true
-extends: 'eslint:recommended'
+extends: 'prettier'
globals:
Atomics: readonly
SharedArrayBuffer: readonly
@@ -10,284 +10,3 @@ ignorePatterns:
- build.js
parserOptions:
ecmaVersion: 11
-rules:
- accessor-pairs: error
- array-bracket-newline: error
- array-bracket-spacing:
- - error
- - never
- array-callback-return: error
- array-element-newline: 'off'
- arrow-body-style: 'off'
- arrow-parens:
- - error
- - as-needed
- arrow-spacing:
- - error
- - after: true
- before: true
- block-scoped-var: error
- block-spacing: error
- brace-style:
- - error
- - 1tbs
- callback-return: 'off'
- camelcase: error
- class-methods-use-this: 'off'
- comma-dangle: 'off'
- comma-spacing:
- - error
- - after: true
- before: false
- comma-style:
- - error
- - last
- complexity: 'off'
- computed-property-spacing:
- - error
- - never
- consistent-return: 'off'
- consistent-this: 'off'
- curly: error
- default-case: error
- default-case-last: error
- default-param-last: error
- dot-location:
- - error
- - property
- dot-notation:
- - error
- - allowKeywords: true
- eol-last: error
- eqeqeq: error
- func-call-spacing: error
- func-name-matching: error
- func-names: error
- func-style:
- - error
- - declaration
- - allowArrowFunctions: true
- function-call-argument-newline:
- - error
- - consistent
- function-paren-newline: 'off'
- generator-star-spacing: error
- global-require: 'off'
- grouped-accessor-pairs: 'off'
- guard-for-in: error
- handle-callback-err: error
- id-blacklist: error
- id-length: 'off'
- id-match: error
- implicit-arrow-linebreak:
- - error
- - beside
- indent:
- - error
- - 4
- indent-legacy: 'off'
- init-declarations: error
- jsx-quotes: error
- key-spacing: error
- keyword-spacing:
- - error
- - after: true
- before: true
- line-comment-position: 'off'
- linebreak-style:
- - error
- - unix
- lines-around-comment: error
- lines-around-directive: error
- lines-between-class-members:
- - error
- - always
- max-classes-per-file: 'off'
- max-depth: error
- max-len: 'off'
- max-lines: 'off'
- max-lines-per-function: 'off'
- max-nested-callbacks: error
- max-params: 'off'
- max-statements: 'off'
- max-statements-per-line: error
- multiline-comment-style:
- - error
- - separate-lines
- multiline-ternary: 'off'
- new-cap: error
- new-parens: error
- newline-after-var: 'off'
- newline-before-return: 'off'
- newline-per-chained-call: 'off'
- no-alert: 'off'
- no-array-constructor: error
- no-await-in-loop: error
- no-bitwise: 'off'
- no-buffer-constructor: 'off'
- no-caller: error
- no-catch-shadow: error
- no-confusing-arrow: error
- no-console: error
- no-constructor-return: error
- no-continue: 'off'
- no-div-regex: 'off'
- no-duplicate-imports: error
- no-else-return: 'off'
- no-empty-function: 'off'
- no-eq-null: error
- no-eval: error
- no-extend-native: error
- no-extra-bind: error
- no-extra-label: error
- no-extra-parens: 'off'
- no-floating-decimal: error
- no-implicit-globals: error
- no-implied-eval: error
- no-inline-comments: 'off'
- no-invalid-this: error
- no-iterator: error
- no-label-var: error
- no-labels: error
- no-lone-blocks: error
- no-lonely-if: error
- no-loop-func: 'off'
- no-loss-of-precision: error
- no-magic-numbers: 'off'
- no-mixed-operators: error
- no-mixed-requires: error
- no-multi-assign: error
- no-multi-spaces:
- - error
- - ignoreEOLComments: true
- no-multi-str: error
- no-multiple-empty-lines: error
- no-native-reassign: error
- no-negated-condition: 'off'
- no-negated-in-lhs: error
- no-nested-ternary: error
- no-new: 'off'
- no-new-func: error
- no-new-object: error
- no-new-require: error
- no-new-wrappers: error
- no-octal-escape: error
- no-param-reassign: 'off'
- no-path-concat: error
- no-plusplus: 'off'
- no-process-env: error
- no-process-exit: error
- no-proto: error
- no-restricted-exports: error
- no-restricted-globals: error
- no-restricted-imports: error
- no-restricted-modules: error
- no-restricted-properties: error
- no-restricted-syntax: error
- no-return-assign: error
- no-return-await: error
- no-script-url: error
- no-self-compare: error
- no-sequences: error
- no-shadow: 'off'
- no-spaced-func: error
- no-sync: error
- no-tabs: error
- no-template-curly-in-string: error
- no-ternary: 'off'
- no-throw-literal: 'off'
- no-trailing-spaces: error
- no-undef-init: error
- no-undefined: 'off'
- no-underscore-dangle: 'off'
- no-unmodified-loop-condition: error
- no-unneeded-ternary: error
- no-unused-expressions: error
- no-unused-vars: 'off'
- no-use-before-define: 'off'
- no-useless-backreference: error
- no-useless-call: error
- no-useless-computed-key: error
- no-useless-concat: error
- no-useless-constructor: error
- no-useless-escape: 'off'
- no-useless-rename: error
- no-useless-return: error
- no-var: 'off'
- no-void: error
- no-warning-comments: warn
- no-whitespace-before-property: error
- nonblock-statement-body-position: error
- object-curly-newline: error
- object-curly-spacing:
- - error
- - never
- object-shorthand: 'off'
- one-var: 'off'
- one-var-declaration-per-line: error
- operator-assignment:
- - error
- - always
- operator-linebreak: 'off'
- padded-blocks: 'off'
- padding-line-between-statements: error
- prefer-arrow-callback: error
- prefer-const: 'off'
- prefer-destructuring: 'off'
- prefer-exponentiation-operator: 'off'
- prefer-named-capture-group: 'off'
- prefer-numeric-literals: error
- prefer-object-spread: 'off'
- prefer-promise-reject-errors: 'off'
- prefer-reflect: 'off'
- prefer-regex-literals: warn
- prefer-rest-params: 'off'
- prefer-spread: 'off'
- prefer-template: 'off'
- quote-props: 'off'
- quotes: 'off'
- radix:
- - error
- - as-needed
- require-atomic-updates: error
- require-await: error
- require-jsdoc: 'off'
- require-unicode-regexp: 'off'
- rest-spread-spacing: error
- semi: 'off'
- semi-spacing:
- - error
- - after: true
- before: false
- semi-style:
- - error
- - last
- sort-imports: error
- sort-keys: 'off'
- sort-vars: error
- space-before-blocks: error
- space-before-function-paren: 'off'
- space-in-parens:
- - error
- - never
- space-infix-ops: error
- space-unary-ops: error
- spaced-comment:
- - error
- - always
- strict: error
- switch-colon-spacing: error
- symbol-description: error
- template-curly-spacing:
- - error
- - never
- template-tag-spacing: error
- unicode-bom:
- - error
- - never
- valid-jsdoc: error
- vars-on-top: error
- wrap-iife: error
- wrap-regex: error
- yield-star-spacing: error
- yoda: 'off'
diff --git a/client/.prettierrc.yml b/client/.prettierrc.yml
new file mode 100644
index 00000000..925bde80
--- /dev/null
+++ b/client/.prettierrc.yml
@@ -0,0 +1,4 @@
+parser: babel
+printWidth: 79
+tabWidth: 4
+quoteProps: consistent
diff --git a/client/js/api.js b/client/js/api.js
index befe02fe..5bde6d81 100644
--- a/client/js/api.js
+++ b/client/js/api.js
@@ -1,10 +1,10 @@
-'use strict';
+"use strict";
-const cookies = require('js-cookie');
-const request = require('superagent');
-const events = require('./events.js');
-const progress = require('./util/progress.js');
-const uri = require('./util/uri.js');
+const cookies = require("js-cookie");
+const request = require("superagent");
+const events = require("./events.js");
+const progress = require("./util/progress.js");
+const uri = require("./util/uri.js");
let fileTokens = {};
let remoteConfig = null;
@@ -18,22 +18,22 @@ class Api extends events.EventTarget {
this.token = null;
this.cache = {};
this.allRanks = [
- 'anonymous',
- 'restricted',
- 'regular',
- 'power',
- 'moderator',
- 'administrator',
- 'nobody',
+ "anonymous",
+ "restricted",
+ "regular",
+ "power",
+ "moderator",
+ "administrator",
+ "nobody",
];
this.rankNames = new Map([
- ['anonymous', 'Anonymous'],
- ['restricted', 'Restricted user'],
- ['regular', 'Regular user'],
- ['power', 'Power user'],
- ['moderator', 'Moderator'],
- ['administrator', 'Administrator'],
- ['nobody', 'Nobody'],
+ ["anonymous", "Anonymous"],
+ ["restricted", "Restricted user"],
+ ["regular", "Regular user"],
+ ["power", "Power user"],
+ ["moderator", "Moderator"],
+ ["administrator", "Administrator"],
+ ["nobody", "Nobody"],
]);
}
@@ -43,11 +43,12 @@ class Api extends events.EventTarget {
resolve(this.cache[url]);
});
}
- return this._wrappedRequest(url, request.get, {}, {}, options)
- .then(response => {
+ return this._wrappedRequest(url, request.get, {}, {}, options).then(
+ (response) => {
this.cache[url] = response;
return Promise.resolve(response);
- });
+ }
+ );
}
post(url, data, files, options) {
@@ -67,10 +68,9 @@ class Api extends events.EventTarget {
fetchConfig() {
if (remoteConfig === null) {
- return this.get(uri.formatApiLink('info'))
- .then(response => {
- remoteConfig = response.config;
- });
+ return this.get(uri.formatApiLink("info")).then((response) => {
+ remoteConfig = response.config;
+ });
} else {
return Promise.resolve();
}
@@ -115,7 +115,8 @@ class Api extends events.EventTarget {
continue;
}
const rankIndex = this.allRanks.indexOf(
- remoteConfig.privileges[p]);
+ remoteConfig.privileges[p]
+ );
if (minViableRank === null || rankIndex < minViableRank) {
minViableRank = rankIndex;
}
@@ -123,17 +124,16 @@ class Api extends events.EventTarget {
if (minViableRank === null) {
throw `Bad privilege name: ${lookup}`;
}
- let myRank = this.user !== null ?
- this.allRanks.indexOf(this.user.rank) :
- 0;
+ let myRank =
+ this.user !== null ? this.allRanks.indexOf(this.user.rank) : 0;
return myRank >= minViableRank;
}
loginFromCookies() {
- const auth = cookies.getJSON('auth');
- return auth && auth.user && auth.token ?
- this.loginWithToken(auth.user, auth.token, true) :
- Promise.resolve();
+ const auth = cookies.getJSON("auth");
+ return auth && auth.user && auth.token
+ ? this.loginWithToken(auth.user, auth.token, true)
+ : Promise.resolve();
}
loginWithToken(userName, token, doRemember) {
@@ -141,63 +141,74 @@ class Api extends events.EventTarget {
return new Promise((resolve, reject) => {
this.userName = userName;
this.token = token;
- this.get('/user/' + userName + '?bump-login=true')
- .then(response => {
+ this.get("/user/" + userName + "?bump-login=true").then(
+ (response) => {
const options = {};
if (doRemember) {
options.expires = 365;
}
cookies.set(
- 'auth',
- {'user': userName, 'token': token},
- options);
+ "auth",
+ { user: userName, token: token },
+ options
+ );
this.user = response;
resolve();
- this.dispatchEvent(new CustomEvent('login'));
- }, error => {
+ this.dispatchEvent(new CustomEvent("login"));
+ },
+ (error) => {
reject(error);
this.logout();
- });
+ }
+ );
});
}
createToken(userName, options) {
let userTokenRequest = {
enabled: true,
- note: 'Web Login Token'
+ note: "Web Login Token",
};
- if (typeof options.expires !== 'undefined') {
- userTokenRequest.expirationTime = new Date().addDays(options.expires).toISOString()
+ if (typeof options.expires !== "undefined") {
+ userTokenRequest.expirationTime = new Date()
+ .addDays(options.expires)
+ .toISOString();
}
return new Promise((resolve, reject) => {
- this.post('/user-token/' + userName, userTokenRequest)
- .then(response => {
+ this.post("/user-token/" + userName, userTokenRequest).then(
+ (response) => {
cookies.set(
- 'auth',
- {'user': userName, 'token': response.token},
- options);
+ "auth",
+ { user: userName, token: response.token },
+ options
+ );
this.userName = userName;
this.token = response.token;
this.userPassword = null;
- }, error => {
+ },
+ (error) => {
reject(error);
- });
+ }
+ );
});
}
deleteToken(userName, userToken) {
return new Promise((resolve, reject) => {
- this.delete('/user-token/' + userName + '/' + userToken, {})
- .then(response => {
+ this.delete("/user-token/" + userName + "/" + userToken, {}).then(
+ (response) => {
const options = {};
cookies.set(
- 'auth',
- {'user': userName, 'token': null},
- options);
+ "auth",
+ { user: userName, token: null },
+ options
+ );
resolve();
- }, error => {
+ },
+ (error) => {
reject(error);
- });
+ }
+ );
});
}
@@ -206,8 +217,8 @@ class Api extends events.EventTarget {
return new Promise((resolve, reject) => {
this.userName = userName;
this.userPassword = userPassword;
- this.get('/user/' + userName + '?bump-login=true')
- .then(response => {
+ this.get("/user/" + userName + "?bump-login=true").then(
+ (response) => {
const options = {};
if (doRemember) {
options.expires = 365;
@@ -215,22 +226,26 @@ class Api extends events.EventTarget {
this.createToken(this.userName, options);
this.user = response;
resolve();
- this.dispatchEvent(new CustomEvent('login'));
- }, error => {
+ this.dispatchEvent(new CustomEvent("login"));
+ },
+ (error) => {
reject(error);
this.logout();
- });
+ }
+ );
});
}
logout() {
let self = this;
- this.deleteToken(this.userName, this.token)
- .then(response => {
+ this.deleteToken(this.userName, this.token).then(
+ (response) => {
self._logout();
- }, error => {
+ },
+ (error) => {
self._logout();
- });
+ }
+ );
}
_logout() {
@@ -238,17 +253,19 @@ class Api extends events.EventTarget {
this.userName = null;
this.userPassword = null;
this.token = null;
- this.dispatchEvent(new CustomEvent('logout'));
+ this.dispatchEvent(new CustomEvent("logout"));
}
forget() {
- cookies.remove('auth');
+ cookies.remove("auth");
}
isLoggedIn(user) {
if (user) {
- return this.userName !== null &&
- this.userName.toLowerCase() === user.name.toLowerCase();
+ return (
+ this.userName !== null &&
+ this.userName.toLowerCase() === user.name.toLowerCase()
+ );
} else {
return this.userName !== null;
}
@@ -259,8 +276,7 @@ class Api extends events.EventTarget {
}
_getFullUrl(url) {
- const fullUrl =
- ('api/' + url).replace(/([^:])\/+/g, '$1/');
+ const fullUrl = ("api/" + url).replace(/([^:])\/+/g, "$1/");
const matches = fullUrl.match(/^([^?]*)\??(.*)$/);
const baseUrl = matches[1];
const request = matches[2];
@@ -285,7 +301,7 @@ class Api extends events.EventTarget {
const file = files[key];
const fileId = this._getFileId(file);
if (fileTokens[fileId]) {
- data[key + 'Token'] = fileTokens[fileId];
+ data[key + "Token"] = fileTokens[fileId];
} else {
promise = promise
.then(() => {
@@ -293,33 +309,40 @@ class Api extends events.EventTarget {
abortFunction = () => uploadPromise.abort();
return uploadPromise;
})
- .then(token => {
+ .then((token) => {
abortFunction = () => {};
fileTokens[fileId] = token;
- data[key + 'Token'] = token;
+ data[key + "Token"] = token;
return Promise.resolve();
});
}
}
}
- promise = promise.then(
- () => {
+ promise = promise
+ .then(() => {
let requestPromise = this._rawRequest(
- url, requestFactory, data, {}, options);
+ url,
+ requestFactory,
+ data,
+ {},
+ options
+ );
abortFunction = () => requestPromise.abort();
return requestPromise;
})
- .catch(error => {
- if (error.response && error.response.name ===
- 'MissingOrExpiredRequiredFileError') {
+ .catch((error) => {
+ if (
+ error.response &&
+ error.response.name === "MissingOrExpiredRequiredFileError"
+ ) {
for (let key of Object.keys(files)) {
const file = files[key];
const fileId = this._getFileId(file);
fileTokens[fileId] = null;
}
error.message =
- 'The uploaded file has expired; ' +
- 'please resend the form to reupload.';
+ "The uploaded file has expired; " +
+ "please resend the form to reupload.";
}
return Promise.reject(error);
});
@@ -331,13 +354,17 @@ class Api extends events.EventTarget {
let abortFunction = () => {};
let returnedPromise = new Promise((resolve, reject) => {
let uploadPromise = this._rawRequest(
- 'uploads', request.post, {}, {content: file}, options);
+ "uploads",
+ request.post,
+ {},
+ { content: file },
+ options
+ );
abortFunction = () => uploadPromise.abort();
- return uploadPromise.then(
- response => {
- abortFunction = () => {};
- return resolve(response.token);
- }, reject);
+ return uploadPromise.then((response) => {
+ abortFunction = () => {};
+ return resolve(response.token);
+ }, reject);
});
returnedPromise.abort = () => abortFunction();
return returnedPromise;
@@ -352,7 +379,7 @@ class Api extends events.EventTarget {
let returnedPromise = new Promise((resolve, reject) => {
let req = requestFactory(fullUrl);
- req.set('Accept', 'application/json');
+ req.set("Accept", "application/json");
if (query) {
req.query(query);
@@ -362,7 +389,7 @@ class Api extends events.EventTarget {
for (let key of Object.keys(files)) {
const value = files[key];
if (value.constructor === String) {
- data[key + 'Url'] = value;
+ data[key + "Url"] = value;
} else {
req.attach(key, value || new Blob());
}
@@ -371,9 +398,9 @@ class Api extends events.EventTarget {
if (data) {
if (files && Object.keys(files).length) {
- req.attach('metadata', new Blob([JSON.stringify(data)]));
+ req.attach("metadata", new Blob([JSON.stringify(data)]));
} else {
- req.set('Content-Type', 'application/json');
+ req.set("Content-Type", "application/json");
req.send(data);
}
}
@@ -382,19 +409,28 @@ class Api extends events.EventTarget {
if (this.userName && this.token) {
req.auth = null;
// eslint-disable-next-line no-undef
- req.set('Authorization', 'Token ' + new Buffer(
- this.userName + ":" + this.token).toString('base64'))
+ req.set(
+ "Authorization",
+ "Token " +
+ new Buffer(
+ this.userName + ":" + this.token
+ ).toString("base64")
+ );
} else if (this.userName && this.userPassword) {
req.auth(
this.userName,
- encodeURIComponent(this.userPassword)
- .replace(/%([0-9A-F]{2})/g, (match, p1) => {
- return String.fromCharCode('0x' + p1);
- }));
+ encodeURIComponent(this.userPassword).replace(
+ /%([0-9A-F]{2})/g,
+ (match, p1) => {
+ return String.fromCharCode("0x" + p1);
+ }
+ )
+ );
}
} catch (e) {
reject(
- new Error('Authentication error (malformed credentials)'));
+ new Error("Authentication error (malformed credentials)")
+ );
}
if (!options.noProgress) {
@@ -402,10 +438,11 @@ class Api extends events.EventTarget {
}
abortFunction = () => {
- req.abort(); // does *NOT* call the callback passed in .end()
+ req.abort(); // does *NOT* call the callback passed in .end()
progress.done();
reject(
- new Error('The request was aborted due to user cancel.'));
+ new Error("The request was aborted due to user cancel.")
+ );
};
req.end((error, response) => {
@@ -414,7 +451,8 @@ class Api extends events.EventTarget {
if (error) {
if (response && response.body) {
error = new Error(
- response.body.description || 'Unknown error');
+ response.body.description || "Unknown error"
+ );
error.response = response.body;
}
reject(error);
diff --git a/client/js/config.js b/client/js/config.js
index 4524be6e..da8fb8fc 100644
--- a/client/js/config.js
+++ b/client/js/config.js
@@ -1,4 +1,4 @@
-'use strict';
+"use strict";
-const config = require('./.config.autogen.json');
+const config = require("./.config.autogen.json");
module.exports = config;
diff --git a/client/js/controllers/auth_controller.js b/client/js/controllers/auth_controller.js
index a6ec97dc..e01fb9bf 100644
--- a/client/js/controllers/auth_controller.js
+++ b/client/js/controllers/auth_controller.js
@@ -1,38 +1,40 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const tags = require('../tags.js');
-const pools = require('../pools.js');
-const uri = require('../util/uri.js');
-const topNavigation = require('../models/top_navigation.js');
-const LoginView = require('../views/login_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const tags = require("../tags.js");
+const pools = require("../pools.js");
+const uri = require("../util/uri.js");
+const topNavigation = require("../models/top_navigation.js");
+const LoginView = require("../views/login_view.js");
class LoginController {
constructor() {
api.forget();
- topNavigation.activate('login');
- topNavigation.setTitle('Login');
+ topNavigation.activate("login");
+ topNavigation.setTitle("Login");
this._loginView = new LoginView();
- this._loginView.addEventListener('submit', e => this._evtLogin(e));
+ this._loginView.addEventListener("submit", (e) => this._evtLogin(e));
}
_evtLogin(e) {
this._loginView.clearMessages();
this._loginView.disableForm();
api.forget();
- api.login(e.detail.name, e.detail.password, e.detail.remember)
- .then(() => {
+ api.login(e.detail.name, e.detail.password, e.detail.remember).then(
+ () => {
const ctx = router.show(uri.formatClientLink());
- ctx.controller.showSuccess('Logged in');
+ ctx.controller.showSuccess("Logged in");
// reload tag category color map, this is required when `tag_categories:list` has a permission other than anonymous
tags.refreshCategoryColorMap();
pools.refreshCategoryColorMap();
- }, error => {
+ },
+ (error) => {
this._loginView.showError(error.message);
this._loginView.enableForm();
- });
+ }
+ );
}
}
@@ -41,15 +43,15 @@ class LogoutController {
api.forget();
api.logout();
const ctx = router.show(uri.formatClientLink());
- ctx.controller.showSuccess('Logged out');
+ ctx.controller.showSuccess("Logged out");
}
}
-module.exports = router => {
- router.enter(['login'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["login"], (ctx, next) => {
ctx.controller = new LoginController();
});
- router.enter(['logout'], (ctx, next) => {
+ router.enter(["logout"], (ctx, next) => {
ctx.controller = new LogoutController();
});
};
diff --git a/client/js/controllers/base_post_controller.js b/client/js/controllers/base_post_controller.js
index b86aea72..7a21449d 100644
--- a/client/js/controllers/base_post_controller.js
+++ b/client/js/controllers/base_post_controller.js
@@ -1,19 +1,19 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const topNavigation = require('../models/top_navigation.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const topNavigation = require("../models/top_navigation.js");
+const EmptyView = require("../views/empty_view.js");
class BasePostController {
constructor(ctx) {
- if (!api.hasPrivilege('posts:view')) {
+ if (!api.hasPrivilege("posts:view")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view posts.');
+ this._view.showError("You don't have privileges to view posts.");
return;
}
- topNavigation.activate('posts');
- topNavigation.setTitle('Post #' + ctx.parameters.id.toString());
+ topNavigation.activate("posts");
+ topNavigation.setTitle("Post #" + ctx.parameters.id.toString());
}
}
diff --git a/client/js/controllers/comments_controller.js b/client/js/controllers/comments_controller.js
index 67e44ac1..d54059e8 100644
--- a/client/js/controllers/comments_controller.js
+++ b/client/js/controllers/comments_controller.js
@@ -1,51 +1,55 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const PostList = require('../models/post_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PageController = require('../controllers/page_controller.js');
-const CommentsPageView = require('../views/comments_page_view.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const PostList = require("../models/post_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PageController = require("../controllers/page_controller.js");
+const CommentsPageView = require("../views/comments_page_view.js");
+const EmptyView = require("../views/empty_view.js");
-const fields = ['id', 'comments', 'commentCount', 'thumbnailUrl'];
+const fields = ["id", "comments", "commentCount", "thumbnailUrl"];
class CommentsController {
constructor(ctx) {
- if (!api.hasPrivilege('comments:list')) {
+ if (!api.hasPrivilege("comments:list")) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to view comments.');
+ "You don't have privileges to view comments."
+ );
return;
}
- topNavigation.activate('comments');
- topNavigation.setTitle('Listing comments');
+ topNavigation.activate("comments");
+ topNavigation.setTitle("Listing comments");
this._pageController = new PageController();
this._pageController.run({
parameters: ctx.parameters,
defaultLimit: 10,
getClientUrlForPage: (offset, limit) => {
- const parameters = Object.assign(
- {}, ctx.parameters, {offset: offset, limit: limit});
- return uri.formatClientLink('comments', parameters);
+ const parameters = Object.assign({}, ctx.parameters, {
+ offset: offset,
+ limit: limit,
+ });
+ return uri.formatClientLink("comments", parameters);
},
requestPage: (offset, limit) => {
return PostList.search(
- 'sort:comment-date comment-count-min:1',
+ "sort:comment-date comment-count-min:1",
offset,
limit,
- fields);
+ fields
+ );
},
- pageRenderer: pageCtx => {
+ pageRenderer: (pageCtx) => {
Object.assign(pageCtx, {
- canViewPosts: api.hasPrivilege('posts:view'),
+ canViewPosts: api.hasPrivilege("posts:view"),
});
const view = new CommentsPageView(pageCtx);
- view.addEventListener('submit', e => this._evtUpdate(e));
- view.addEventListener('score', e => this._evtScore(e));
- view.addEventListener('delete', e => this._evtDelete(e));
+ view.addEventListener("submit", (e) => this._evtUpdate(e));
+ view.addEventListener("score", (e) => this._evtScore(e));
+ view.addEventListener("delete", (e) => this._evtDelete(e));
return view;
},
});
@@ -54,26 +58,27 @@ class CommentsController {
_evtUpdate(e) {
// TODO: disable form
e.detail.comment.text = e.detail.text;
- e.detail.comment.save()
- .catch(error => {
- e.detail.target.showError(error.message);
- // TODO: enable form
- });
+ e.detail.comment.save().catch((error) => {
+ e.detail.target.showError(error.message);
+ // TODO: enable form
+ });
}
_evtScore(e) {
- e.detail.comment.setScore(e.detail.score)
- .catch(error => window.alert(error.message));
+ e.detail.comment
+ .setScore(e.detail.score)
+ .catch((error) => window.alert(error.message));
}
_evtDelete(e) {
- e.detail.comment.delete()
- .catch(error => window.alert(error.message));
+ e.detail.comment
+ .delete()
+ .catch((error) => window.alert(error.message));
}
}
-module.exports = router => {
- router.enter(['comments'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["comments"], (ctx, next) => {
new CommentsController(ctx);
});
-}
+};
diff --git a/client/js/controllers/help_controller.js b/client/js/controllers/help_controller.js
index 8e65346b..c290a6be 100644
--- a/client/js/controllers/help_controller.js
+++ b/client/js/controllers/help_controller.js
@@ -1,24 +1,24 @@
-'use strict';
+"use strict";
-const topNavigation = require('../models/top_navigation.js');
-const HelpView = require('../views/help_view.js');
+const topNavigation = require("../models/top_navigation.js");
+const HelpView = require("../views/help_view.js");
class HelpController {
constructor(section, subsection) {
- topNavigation.activate('help');
- topNavigation.setTitle('Help');
+ topNavigation.activate("help");
+ topNavigation.setTitle("Help");
this._helpView = new HelpView(section, subsection);
}
}
-module.exports = router => {
- router.enter(['help'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["help"], (ctx, next) => {
new HelpController();
});
- router.enter(['help', ':section'], (ctx, next) => {
+ router.enter(["help", ":section"], (ctx, next) => {
new HelpController(ctx.parameters.section);
});
- router.enter(['help', ':section', ':subsection'], (ctx, next) => {
+ router.enter(["help", ":section", ":subsection"], (ctx, next) => {
new HelpController(ctx.parameters.section, ctx.parameters.subsection);
});
};
diff --git a/client/js/controllers/home_controller.js b/client/js/controllers/home_controller.js
index 590da672..cf56e27a 100644
--- a/client/js/controllers/home_controller.js
+++ b/client/js/controllers/home_controller.js
@@ -1,26 +1,26 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const config = require('../config.js');
-const Info = require('../models/info.js');
-const topNavigation = require('../models/top_navigation.js');
-const HomeView = require('../views/home_view.js');
+const api = require("../api.js");
+const config = require("../config.js");
+const Info = require("../models/info.js");
+const topNavigation = require("../models/top_navigation.js");
+const HomeView = require("../views/home_view.js");
class HomeController {
constructor() {
- topNavigation.activate('home');
- topNavigation.setTitle('Home');
+ topNavigation.activate("home");
+ topNavigation.setTitle("Home");
this._homeView = new HomeView({
name: api.getName(),
version: config.meta.version,
buildDate: config.meta.buildDate,
- canListSnapshots: api.hasPrivilege('snapshots:list'),
- canListPosts: api.hasPrivilege('posts:list'),
+ canListSnapshots: api.hasPrivilege("snapshots:list"),
+ canListPosts: api.hasPrivilege("posts:list"),
});
- Info.get()
- .then(info => {
+ Info.get().then(
+ (info) => {
this._homeView.setStats({
diskUsage: info.diskUsage,
postCount: info.postCount,
@@ -31,7 +31,8 @@ class HomeController {
featuringTime: info.featuringTime,
});
},
- error => this._homeView.showError(error.message));
+ (error) => this._homeView.showError(error.message)
+ );
}
showSuccess(message) {
@@ -43,8 +44,8 @@ class HomeController {
}
}
-module.exports = router => {
+module.exports = (router) => {
router.enter([], (ctx, next) => {
ctx.controller = new HomeController();
});
-}
+};
diff --git a/client/js/controllers/not_found_controller.js b/client/js/controllers/not_found_controller.js
index 8ceeef0e..c99e3087 100644
--- a/client/js/controllers/not_found_controller.js
+++ b/client/js/controllers/not_found_controller.js
@@ -1,18 +1,18 @@
-'use strict';
+"use strict";
-const topNavigation = require('../models/top_navigation.js');
-const NotFoundView = require('../views/not_found_view.js');
+const topNavigation = require("../models/top_navigation.js");
+const NotFoundView = require("../views/not_found_view.js");
class NotFoundController {
constructor(path) {
- topNavigation.activate('');
- topNavigation.setTitle('Not found');
+ topNavigation.activate("");
+ topNavigation.setTitle("Not found");
this._notFoundView = new NotFoundView(path);
}
}
-module.exports = router => {
+module.exports = (router) => {
router.enter(null, (ctx, next) => {
ctx.controller = new NotFoundController(ctx.canonicalPath);
});
-}
+};
diff --git a/client/js/controllers/page_controller.js b/client/js/controllers/page_controller.js
index ae55804f..d3ee7e93 100644
--- a/client/js/controllers/page_controller.js
+++ b/client/js/controllers/page_controller.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const settings = require('../models/settings.js');
-const EndlessPageView = require('../views/endless_page_view.js');
-const ManualPageView = require('../views/manual_page_view.js');
+const settings = require("../models/settings.js");
+const EndlessPageView = require("../views/endless_page_view.js");
+const ManualPageView = require("../views/manual_page_view.js");
class PageController {
constructor(ctx) {
diff --git a/client/js/controllers/password_reset_controller.js b/client/js/controllers/password_reset_controller.js
index e0a9801f..aa66c03f 100644
--- a/client/js/controllers/password_reset_controller.js
+++ b/client/js/controllers/password_reset_controller.js
@@ -1,19 +1,20 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const topNavigation = require('../models/top_navigation.js');
-const PasswordResetView = require('../views/password_reset_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const topNavigation = require("../models/top_navigation.js");
+const PasswordResetView = require("../views/password_reset_view.js");
class PasswordResetController {
constructor() {
- topNavigation.activate('login');
- topNavigation.setTitle('Password reminder');
+ topNavigation.activate("login");
+ topNavigation.setTitle("Password reminder");
this._passwordResetView = new PasswordResetView();
- this._passwordResetView.addEventListener(
- 'submit', e => this._evtReset(e));
+ this._passwordResetView.addEventListener("submit", (e) =>
+ this._evtReset(e)
+ );
}
_evtReset(e) {
@@ -21,15 +22,20 @@ class PasswordResetController {
this._passwordResetView.disableForm();
api.forget();
api.logout();
- api.get(uri.formatApiLink('password-reset', e.detail.userNameOrEmail))
- .then(() => {
+ api.get(
+ uri.formatApiLink("password-reset", e.detail.userNameOrEmail)
+ ).then(
+ () => {
this._passwordResetView.showSuccess(
- 'E-mail has been sent. To finish the procedure, ' +
- 'please click the link it contains.');
- }, error => {
+ "E-mail has been sent. To finish the procedure, " +
+ "please click the link it contains."
+ );
+ },
+ (error) => {
this._passwordResetView.showError(error.message);
this._passwordResetView.enableForm();
- });
+ }
+ );
}
}
@@ -38,26 +44,30 @@ class PasswordResetFinishController {
api.forget();
api.logout();
let password = null;
- api.post(uri.formatApiLink('password-reset', name), {token: token})
- .then(response => {
+ api.post(uri.formatApiLink("password-reset", name), { token: token })
+ .then((response) => {
password = response.password;
return api.login(name, password, false);
- }).then(() => {
- const ctx = router.show(uri.formatClientLink());
- ctx.controller.showSuccess('New password: ' + password);
- }, error => {
- const ctx = router.show(uri.formatClientLink());
- ctx.controller.showError(error.message);
- });
+ })
+ .then(
+ () => {
+ const ctx = router.show(uri.formatClientLink());
+ ctx.controller.showSuccess("New password: " + password);
+ },
+ (error) => {
+ const ctx = router.show(uri.formatClientLink());
+ ctx.controller.showError(error.message);
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['password-reset'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["password-reset"], (ctx, next) => {
ctx.controller = new PasswordResetController();
});
- router.enter(['password-reset', ':descriptor'], (ctx, next) => {
- const [name, token] = ctx.parameters.descriptor.split(':', 2);
+ router.enter(["password-reset", ":descriptor"], (ctx, next) => {
+ const [name, token] = ctx.parameters.descriptor.split(":", 2);
ctx.controller = new PasswordResetFinishController(name, token);
});
};
diff --git a/client/js/controllers/pool_categories_controller.js b/client/js/controllers/pool_categories_controller.js
index 4c2c3b7a..4db725ca 100644
--- a/client/js/controllers/pool_categories_controller.js
+++ b/client/js/controllers/pool_categories_controller.js
@@ -1,57 +1,69 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const pools = require('../pools.js');
-const PoolCategoryList = require('../models/pool_category_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PoolCategoriesView = require('../views/pool_categories_view.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const pools = require("../pools.js");
+const PoolCategoryList = require("../models/pool_category_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PoolCategoriesView = require("../views/pool_categories_view.js");
+const EmptyView = require("../views/empty_view.js");
class PoolCategoriesController {
constructor() {
- if (!api.hasPrivilege('poolCategories:list')) {
+ if (!api.hasPrivilege("poolCategories:list")) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to view pool categories.');
+ "You don't have privileges to view pool categories."
+ );
return;
}
- topNavigation.activate('pools');
- topNavigation.setTitle('Listing pools');
- PoolCategoryList.get().then(response => {
- this._poolCategories = response.results;
- this._view = new PoolCategoriesView({
- poolCategories: this._poolCategories,
- canEditName: api.hasPrivilege('poolCategories:edit:name'),
- canEditColor: api.hasPrivilege('poolCategories:edit:color'),
- canDelete: api.hasPrivilege('poolCategories:delete'),
- canCreate: api.hasPrivilege('poolCategories:create'),
- canSetDefault: api.hasPrivilege('poolCategories:setDefault'),
- });
- this._view.addEventListener('submit', e => this._evtSubmit(e));
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ topNavigation.activate("pools");
+ topNavigation.setTitle("Listing pools");
+ PoolCategoryList.get().then(
+ (response) => {
+ this._poolCategories = response.results;
+ this._view = new PoolCategoriesView({
+ poolCategories: this._poolCategories,
+ canEditName: api.hasPrivilege("poolCategories:edit:name"),
+ canEditColor: api.hasPrivilege(
+ "poolCategories:edit:color"
+ ),
+ canDelete: api.hasPrivilege("poolCategories:delete"),
+ canCreate: api.hasPrivilege("poolCategories:create"),
+ canSetDefault: api.hasPrivilege(
+ "poolCategories:setDefault"
+ ),
+ });
+ this._view.addEventListener("submit", (e) =>
+ this._evtSubmit(e)
+ );
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
+ }
+ );
}
_evtSubmit(e) {
this._view.clearMessages();
this._view.disableForm();
- this._poolCategories.save()
- .then(() => {
+ this._poolCategories.save().then(
+ () => {
pools.refreshCategoryColorMap();
this._view.enableForm();
- this._view.showSuccess('Changes saved.');
- }, error => {
+ this._view.showSuccess("Changes saved.");
+ },
+ (error) => {
this._view.enableForm();
this._view.showError(error.message);
- });
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['pool-categories'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["pool-categories"], (ctx, next) => {
ctx.controller = new PoolCategoriesController(ctx, next);
});
};
diff --git a/client/js/controllers/pool_controller.js b/client/js/controllers/pool_controller.js
index a06ba5c2..68441712 100644
--- a/client/js/controllers/pool_controller.js
+++ b/client/js/controllers/pool_controller.js
@@ -1,63 +1,76 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const uri = require('../util/uri.js');
-const Pool = require('../models/pool.js');
-const Post = require('../models/post.js');
-const PoolCategoryList = require('../models/pool_category_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PoolView = require('../views/pool_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const uri = require("../util/uri.js");
+const Pool = require("../models/pool.js");
+const Post = require("../models/post.js");
+const PoolCategoryList = require("../models/pool_category_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PoolView = require("../views/pool_view.js");
+const EmptyView = require("../views/empty_view.js");
class PoolController {
constructor(ctx, section) {
- if (!api.hasPrivilege('pools:view')) {
+ if (!api.hasPrivilege("pools:view")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view pools.');
+ this._view.showError("You don't have privileges to view pools.");
return;
}
Promise.all([
PoolCategoryList.get(),
- Pool.get(ctx.parameters.id)
- ]).then(responses => {
- const [poolCategoriesResponse, pool] = responses;
+ Pool.get(ctx.parameters.id),
+ ]).then(
+ (responses) => {
+ const [poolCategoriesResponse, pool] = responses;
- topNavigation.activate('pools');
- topNavigation.setTitle('Pool #' + pool.names[0]);
+ topNavigation.activate("pools");
+ topNavigation.setTitle("Pool #" + pool.names[0]);
- this._name = ctx.parameters.name;
- pool.addEventListener('change', e => this._evtSaved(e, section));
+ this._name = ctx.parameters.name;
+ pool.addEventListener("change", (e) =>
+ this._evtSaved(e, section)
+ );
- const categories = {};
- for (let category of poolCategoriesResponse.results) {
- categories[category.name] = category.name;
+ const categories = {};
+ for (let category of poolCategoriesResponse.results) {
+ categories[category.name] = category.name;
+ }
+
+ this._view = new PoolView({
+ pool: pool,
+ section: section,
+ canEditAnything: api.hasPrivilege("pools:edit"),
+ canEditNames: api.hasPrivilege("pools:edit:names"),
+ canEditCategory: api.hasPrivilege("pools:edit:category"),
+ canEditDescription: api.hasPrivilege(
+ "pools:edit:description"
+ ),
+ canEditPosts: api.hasPrivilege("pools:edit:posts"),
+ canMerge: api.hasPrivilege("pools:merge"),
+ canDelete: api.hasPrivilege("pools:delete"),
+ categories: categories,
+ escapeColons: uri.escapeColons,
+ });
+
+ this._view.addEventListener("change", (e) =>
+ this._evtChange(e)
+ );
+ this._view.addEventListener("submit", (e) =>
+ this._evtUpdate(e)
+ );
+ this._view.addEventListener("merge", (e) => this._evtMerge(e));
+ this._view.addEventListener("delete", (e) =>
+ this._evtDelete(e)
+ );
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
}
-
- this._view = new PoolView({
- pool: pool,
- section: section,
- canEditAnything: api.hasPrivilege('pools:edit'),
- canEditNames: api.hasPrivilege('pools:edit:names'),
- canEditCategory: api.hasPrivilege('pools:edit:category'),
- canEditDescription: api.hasPrivilege('pools:edit:description'),
- canEditPosts: api.hasPrivilege('pools:edit:posts'),
- canMerge: api.hasPrivilege('pools:merge'),
- canDelete: api.hasPrivilege('pools:delete'),
- categories: categories,
- escapeColons: uri.escapeColons,
- });
-
- this._view.addEventListener('change', e => this._evtChange(e));
- this._view.addEventListener('submit', e => this._evtUpdate(e));
- this._view.addEventListener('merge', e => this._evtMerge(e));
- this._view.addEventListener('delete', e => this._evtDelete(e));
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ );
}
_evtChange(e) {
@@ -67,7 +80,11 @@ class PoolController {
_evtSaved(e, section) {
misc.disableExitConfirmation();
if (this._name !== e.detail.pool.names[0]) {
- router.replace(uri.formatClientLink('pool', e.detail.pool.id, section), null, false);
+ router.replace(
+ uri.formatClientLink("pool", e.detail.pool.id, section),
+ null,
+ false
+ );
}
}
@@ -86,62 +103,74 @@ class PoolController {
if (e.detail.posts !== undefined) {
e.detail.pool.posts.clear();
for (let postId of e.detail.posts) {
- e.detail.pool.posts.add(Post.fromResponse({id: parseInt(postId)}));
+ e.detail.pool.posts.add(
+ Post.fromResponse({ id: parseInt(postId) })
+ );
}
}
- e.detail.pool.save().then(() => {
- this._view.showSuccess('Pool saved.');
- this._view.enableForm();
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ e.detail.pool.save().then(
+ () => {
+ this._view.showSuccess("Pool saved.");
+ this._view.enableForm();
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
_evtMerge(e) {
this._view.clearMessages();
this._view.disableForm();
- e.detail.pool
- .merge(e.detail.targetPoolId, e.detail.addAlias)
- .then(() => {
- this._view.showSuccess('Pool merged.');
+ e.detail.pool.merge(e.detail.targetPoolId, e.detail.addAlias).then(
+ () => {
+ this._view.showSuccess("Pool merged.");
this._view.enableForm();
router.replace(
uri.formatClientLink(
- 'pool', e.detail.targetPoolId, 'merge'),
+ "pool",
+ e.detail.targetPoolId,
+ "merge"
+ ),
null,
- false);
- }, error => {
+ false
+ );
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
_evtDelete(e) {
this._view.clearMessages();
this._view.disableForm();
- e.detail.pool.delete()
- .then(() => {
- const ctx = router.show(uri.formatClientLink('pools'));
- ctx.controller.showSuccess('Pool deleted.');
- }, error => {
+ e.detail.pool.delete().then(
+ () => {
+ const ctx = router.show(uri.formatClientLink("pools"));
+ ctx.controller.showSuccess("Pool deleted.");
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['pool', ':id', 'edit'], (ctx, next) => {
- ctx.controller = new PoolController(ctx, 'edit');
+module.exports = (router) => {
+ router.enter(["pool", ":id", "edit"], (ctx, next) => {
+ ctx.controller = new PoolController(ctx, "edit");
});
- router.enter(['pool', ':id', 'merge'], (ctx, next) => {
- ctx.controller = new PoolController(ctx, 'merge');
+ router.enter(["pool", ":id", "merge"], (ctx, next) => {
+ ctx.controller = new PoolController(ctx, "merge");
});
- router.enter(['pool', ':id', 'delete'], (ctx, next) => {
- ctx.controller = new PoolController(ctx, 'delete');
+ router.enter(["pool", ":id", "delete"], (ctx, next) => {
+ ctx.controller = new PoolController(ctx, "delete");
});
- router.enter(['pool', ':id'], (ctx, next) => {
- ctx.controller = new PoolController(ctx, 'summary');
+ router.enter(["pool", ":id"], (ctx, next) => {
+ ctx.controller = new PoolController(ctx, "summary");
});
};
diff --git a/client/js/controllers/pool_create_controller.js b/client/js/controllers/pool_create_controller.js
index 2b8e7e60..7140aac8 100644
--- a/client/js/controllers/pool_create_controller.js
+++ b/client/js/controllers/pool_create_controller.js
@@ -1,58 +1,65 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const uri = require('../util/uri.js');
-const PoolCategoryList = require('../models/pool_category_list.js');
-const PoolCreateView = require('../views/pool_create_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const uri = require("../util/uri.js");
+const PoolCategoryList = require("../models/pool_category_list.js");
+const PoolCreateView = require("../views/pool_create_view.js");
+const EmptyView = require("../views/empty_view.js");
class PoolCreateController {
constructor(ctx) {
- if (!api.hasPrivilege('pools:create')) {
+ if (!api.hasPrivilege("pools:create")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to create pools.');
+ this._view.showError("You don't have privileges to create pools.");
return;
}
- PoolCategoryList.get().then(poolCategoriesResponse => {
- const categories = {};
- for (let category of poolCategoriesResponse.results) {
- categories[category.name] = category.name;
+ PoolCategoryList.get().then(
+ (poolCategoriesResponse) => {
+ const categories = {};
+ for (let category of poolCategoriesResponse.results) {
+ categories[category.name] = category.name;
+ }
+
+ this._view = new PoolCreateView({
+ canCreate: api.hasPrivilege("pools:create"),
+ categories: categories,
+ escapeColons: uri.escapeColons,
+ });
+
+ this._view.addEventListener("submit", (e) =>
+ this._evtCreate(e)
+ );
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
}
-
- this._view = new PoolCreateView({
- canCreate: api.hasPrivilege('pools:create'),
- categories: categories,
- escapeColons: uri.escapeColons,
- });
-
- this._view.addEventListener('submit', e => this._evtCreate(e));
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ );
}
_evtCreate(e) {
this._view.clearMessages();
this._view.disableForm();
- api.post(uri.formatApiLink('pool'), e.detail)
- .then(() => {
+ api.post(uri.formatApiLink("pool"), e.detail).then(
+ () => {
this._view.clearMessages();
misc.disableExitConfirmation();
- const ctx = router.show(uri.formatClientLink('pools'));
- ctx.controller.showSuccess('Pool created.');
- }, error => {
+ const ctx = router.show(uri.formatClientLink("pools"));
+ ctx.controller.showSuccess("Pool created.");
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['pool', 'create'], (ctx, next) => {
- ctx.controller = new PoolCreateController(ctx, 'create');
+module.exports = (router) => {
+ router.enter(["pool", "create"], (ctx, next) => {
+ ctx.controller = new PoolCreateController(ctx, "create");
});
};
diff --git a/client/js/controllers/pool_list_controller.js b/client/js/controllers/pool_list_controller.js
index dff649f9..91d655c5 100644
--- a/client/js/controllers/pool_list_controller.js
+++ b/client/js/controllers/pool_list_controller.js
@@ -1,47 +1,51 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const PoolList = require('../models/pool_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PageController = require('../controllers/page_controller.js');
-const PoolsHeaderView = require('../views/pools_header_view.js');
-const PoolsPageView = require('../views/pools_page_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const PoolList = require("../models/pool_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PageController = require("../controllers/page_controller.js");
+const PoolsHeaderView = require("../views/pools_header_view.js");
+const PoolsPageView = require("../views/pools_page_view.js");
+const EmptyView = require("../views/empty_view.js");
const fields = [
- 'id',
- 'names',
- 'posts',
- 'creationTime',
- 'postCount',
- 'category'
+ "id",
+ "names",
+ "posts",
+ "creationTime",
+ "postCount",
+ "category",
];
class PoolListController {
constructor(ctx) {
this._pageController = new PageController();
- if (!api.hasPrivilege('pools:list')) {
+ if (!api.hasPrivilege("pools:list")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view pools.');
+ this._view.showError("You don't have privileges to view pools.");
return;
}
this._ctx = ctx;
- topNavigation.activate('pools');
- topNavigation.setTitle('Listing pools');
+ topNavigation.activate("pools");
+ topNavigation.setTitle("Listing pools");
this._headerView = new PoolsHeaderView({
hostNode: this._pageController.view.pageHeaderHolderNode,
parameters: ctx.parameters,
- canCreate: api.hasPrivilege('pools:create'),
- canEditPoolCategories: api.hasPrivilege('poolCategories:edit'),
+ canCreate: api.hasPrivilege("pools:create"),
+ canEditPoolCategories: api.hasPrivilege("poolCategories:edit"),
});
this._headerView.addEventListener(
- 'submit', e => this._evtSubmit(e), 'navigate', e => this._evtNavigate(e));
+ "submit",
+ (e) => this._evtSubmit(e),
+ "navigate",
+ (e) => this._evtNavigate(e)
+ );
this._syncPageController();
}
@@ -57,24 +61,27 @@ class PoolListController {
_evtSubmit(e) {
this._view.clearMessages();
this._view.disableForm();
- e.detail.pool.save()
- .then(() => {
- this._installView(e.detail.pool, 'edit');
- this._view.showSuccess('Pool created.');
+ e.detail.pool.save().then(
+ () => {
+ this._installView(e.detail.pool, "edit");
+ this._view.showSuccess("Pool created.");
router.replace(
- uri.formatClientLink(
- 'pool', e.detail.pool.id, 'edit'),
+ uri.formatClientLink("pool", e.detail.pool.id, "edit"),
null,
- false);
- }, error => {
+ false
+ );
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
_evtNavigate(e) {
router.showNoDispatch(
- uri.formatClientLink('pools', e.detail.parameters));
+ uri.formatClientLink("pools", e.detail.parameters)
+ );
Object.assign(this._ctx.parameters, e.detail.parameters);
this._syncPageController();
}
@@ -84,25 +91,29 @@ class PoolListController {
parameters: this._ctx.parameters,
defaultLimit: 50,
getClientUrlForPage: (offset, limit) => {
- const parameters = Object.assign(
- {}, this._ctx.parameters, {offset: offset, limit: limit});
- return uri.formatClientLink('pools', parameters);
+ const parameters = Object.assign({}, this._ctx.parameters, {
+ offset: offset,
+ limit: limit,
+ });
+ return uri.formatClientLink("pools", parameters);
},
requestPage: (offset, limit) => {
return PoolList.search(
- this._ctx.parameters.query, offset, limit, fields);
+ this._ctx.parameters.query,
+ offset,
+ limit,
+ fields
+ );
},
- pageRenderer: pageCtx => {
+ pageRenderer: (pageCtx) => {
return new PoolsPageView(pageCtx);
},
});
}
}
-module.exports = router => {
- router.enter(
- ['pools'],
- (ctx, next) => {
- ctx.controller = new PoolListController(ctx);
- });
+module.exports = (router) => {
+ router.enter(["pools"], (ctx, next) => {
+ ctx.controller = new PoolListController(ctx);
+ });
};
diff --git a/client/js/controllers/post_detail_controller.js b/client/js/controllers/post_detail_controller.js
index bd9c2e69..9bab1cea 100644
--- a/client/js/controllers/post_detail_controller.js
+++ b/client/js/controllers/post_detail_controller.js
@@ -1,28 +1,33 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const uri = require('../util/uri.js');
-const settings = require('../models/settings.js');
-const Post = require('../models/post.js');
-const PostList = require('../models/post_list.js');
-const PostDetailView = require('../views/post_detail_view.js');
-const BasePostController = require('./base_post_controller.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const uri = require("../util/uri.js");
+const settings = require("../models/settings.js");
+const Post = require("../models/post.js");
+const PostList = require("../models/post_list.js");
+const PostDetailView = require("../views/post_detail_view.js");
+const BasePostController = require("./base_post_controller.js");
+const EmptyView = require("../views/empty_view.js");
class PostDetailController extends BasePostController {
constructor(ctx, section) {
super(ctx);
- Post.get(ctx.parameters.id).then(post => {
- this._id = ctx.parameters.id;
- post.addEventListener('change', e => this._evtSaved(e, section));
- this._installView(post, section);
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ Post.get(ctx.parameters.id).then(
+ (post) => {
+ this._id = ctx.parameters.id;
+ post.addEventListener("change", (e) =>
+ this._evtSaved(e, section)
+ );
+ this._installView(post, section);
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
+ }
+ );
}
showSuccess(message) {
@@ -33,58 +38,68 @@ class PostDetailController extends BasePostController {
this._view = new PostDetailView({
post: post,
section: section,
- canMerge: api.hasPrivilege('posts:merge'),
+ canMerge: api.hasPrivilege("posts:merge"),
});
- this._view.addEventListener('select', e => this._evtSelect(e));
- this._view.addEventListener('merge', e => this._evtMerge(e));
+ this._view.addEventListener("select", (e) => this._evtSelect(e));
+ this._view.addEventListener("merge", (e) => this._evtMerge(e));
}
_evtSelect(e) {
this._view.clearMessages();
this._view.disableForm();
- Post.get(e.detail.postId).then(post => {
- this._view.selectPost(post);
- this._view.enableForm();
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ Post.get(e.detail.postId).then(
+ (post) => {
+ this._view.selectPost(post);
+ this._view.enableForm();
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
_evtSaved(e, section) {
misc.disableExitConfirmation();
if (this._id !== e.detail.post.id) {
router.replace(
- uri.formatClientLink('post', e.detail.post.id, section),
+ uri.formatClientLink("post", e.detail.post.id, section),
null,
- false);
+ false
+ );
}
}
_evtMerge(e) {
this._view.clearMessages();
this._view.disableForm();
- e.detail.post.merge(e.detail.targetPost.id, e.detail.useOldContent)
- .then(() => {
- this._installView(e.detail.post, 'merge');
- this._view.showSuccess('Post merged.');
- router.replace(
- uri.formatClientLink(
- 'post', e.detail.targetPost.id, 'merge'),
- null,
- false);
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ e.detail.post
+ .merge(e.detail.targetPost.id, e.detail.useOldContent)
+ .then(
+ () => {
+ this._installView(e.detail.post, "merge");
+ this._view.showSuccess("Post merged.");
+ router.replace(
+ uri.formatClientLink(
+ "post",
+ e.detail.targetPost.id,
+ "merge"
+ ),
+ null,
+ false
+ );
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
}
-module.exports = router => {
- router.enter(
- ['post', ':id', 'merge'],
- (ctx, next) => {
- ctx.controller = new PostDetailController(ctx, 'merge');
- });
+module.exports = (router) => {
+ router.enter(["post", ":id", "merge"], (ctx, next) => {
+ ctx.controller = new PostDetailController(ctx, "merge");
+ });
};
diff --git a/client/js/controllers/post_list_controller.js b/client/js/controllers/post_list_controller.js
index 357386c8..941ea9de 100644
--- a/client/js/controllers/post_list_controller.js
+++ b/client/js/controllers/post_list_controller.js
@@ -1,48 +1,56 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const settings = require('../models/settings.js');
-const uri = require('../util/uri.js');
-const PostList = require('../models/post_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PageController = require('../controllers/page_controller.js');
-const PostsHeaderView = require('../views/posts_header_view.js');
-const PostsPageView = require('../views/posts_page_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const settings = require("../models/settings.js");
+const uri = require("../util/uri.js");
+const PostList = require("../models/post_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PageController = require("../controllers/page_controller.js");
+const PostsHeaderView = require("../views/posts_header_view.js");
+const PostsPageView = require("../views/posts_page_view.js");
+const EmptyView = require("../views/empty_view.js");
const fields = [
- 'id', 'thumbnailUrl', 'type', 'safety',
- 'score', 'favoriteCount', 'commentCount', 'tags', 'version'
+ "id",
+ "thumbnailUrl",
+ "type",
+ "safety",
+ "score",
+ "favoriteCount",
+ "commentCount",
+ "tags",
+ "version",
];
class PostListController {
constructor(ctx) {
this._pageController = new PageController();
- if (!api.hasPrivilege('posts:list')) {
+ if (!api.hasPrivilege("posts:list")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view posts.');
+ this._view.showError("You don't have privileges to view posts.");
return;
}
this._ctx = ctx;
- topNavigation.activate('posts');
- topNavigation.setTitle('Listing posts');
+ topNavigation.activate("posts");
+ topNavigation.setTitle("Listing posts");
this._headerView = new PostsHeaderView({
hostNode: this._pageController.view.pageHeaderHolderNode,
parameters: ctx.parameters,
enableSafety: api.safetyEnabled(),
- canBulkEditTags: api.hasPrivilege('posts:bulk-edit:tags'),
- canBulkEditSafety: api.hasPrivilege('posts:bulk-edit:safety'),
+ canBulkEditTags: api.hasPrivilege("posts:bulk-edit:tags"),
+ canBulkEditSafety: api.hasPrivilege("posts:bulk-edit:safety"),
bulkEdit: {
- tags: this._bulkEditTags
+ tags: this._bulkEditTags,
},
});
- this._headerView.addEventListener(
- 'navigate', e => this._evtNavigate(e));
+ this._headerView.addEventListener("navigate", (e) =>
+ this._evtNavigate(e)
+ );
this._syncPageController();
}
@@ -52,33 +60,35 @@ class PostListController {
}
get _bulkEditTags() {
- return (this._ctx.parameters.tag || '').split(/\s+/).filter(s => s);
+ return (this._ctx.parameters.tag || "").split(/\s+/).filter((s) => s);
}
_evtNavigate(e) {
router.showNoDispatch(
- uri.formatClientLink('posts', e.detail.parameters));
+ uri.formatClientLink("posts", e.detail.parameters)
+ );
Object.assign(this._ctx.parameters, e.detail.parameters);
this._syncPageController();
}
_evtTag(e) {
Promise.all(
- this._bulkEditTags.map(tag => e.detail.post.tags.addByName(tag)))
+ this._bulkEditTags.map((tag) => e.detail.post.tags.addByName(tag))
+ )
.then(e.detail.post.save())
- .catch(error => window.alert(error.message));
+ .catch((error) => window.alert(error.message));
}
_evtUntag(e) {
for (let tag of this._bulkEditTags) {
e.detail.post.tags.removeByName(tag);
}
- e.detail.post.save().catch(error => window.alert(error.message));
+ e.detail.post.save().catch((error) => window.alert(error.message));
}
_evtChangeSafety(e) {
e.detail.post.safety = e.detail.safety;
- e.detail.post.save().catch(error => window.alert(error.message));
+ e.detail.post.save().catch((error) => window.alert(error.message));
}
_syncPageController() {
@@ -86,39 +96,45 @@ class PostListController {
parameters: this._ctx.parameters,
defaultLimit: parseInt(settings.get().postsPerPage),
getClientUrlForPage: (offset, limit) => {
- const parameters = Object.assign(
- {}, this._ctx.parameters, {offset: offset, limit: limit});
- return uri.formatClientLink('posts', parameters);
+ const parameters = Object.assign({}, this._ctx.parameters, {
+ offset: offset,
+ limit: limit,
+ });
+ return uri.formatClientLink("posts", parameters);
},
requestPage: (offset, limit) => {
return PostList.search(
- this._ctx.parameters.query, offset, limit, fields);
+ this._ctx.parameters.query,
+ offset,
+ limit,
+ fields
+ );
},
- pageRenderer: pageCtx => {
+ pageRenderer: (pageCtx) => {
Object.assign(pageCtx, {
- canViewPosts: api.hasPrivilege('posts:view'),
- canBulkEditTags: api.hasPrivilege('posts:bulk-edit:tags'),
- canBulkEditSafety:
- api.hasPrivilege('posts:bulk-edit:safety'),
+ canViewPosts: api.hasPrivilege("posts:view"),
+ canBulkEditTags: api.hasPrivilege("posts:bulk-edit:tags"),
+ canBulkEditSafety: api.hasPrivilege(
+ "posts:bulk-edit:safety"
+ ),
bulkEdit: {
tags: this._bulkEditTags,
},
});
const view = new PostsPageView(pageCtx);
- view.addEventListener('tag', e => this._evtTag(e));
- view.addEventListener('untag', e => this._evtUntag(e));
- view.addEventListener(
- 'changeSafety', e => this._evtChangeSafety(e));
+ view.addEventListener("tag", (e) => this._evtTag(e));
+ view.addEventListener("untag", (e) => this._evtUntag(e));
+ view.addEventListener("changeSafety", (e) =>
+ this._evtChangeSafety(e)
+ );
return view;
},
});
}
}
-module.exports = router => {
- router.enter(
- ['posts'],
- (ctx, next) => {
- ctx.controller = new PostListController(ctx);
- });
+module.exports = (router) => {
+ router.enter(["posts"], (ctx, next) => {
+ ctx.controller = new PostListController(ctx);
+ });
};
diff --git a/client/js/controllers/post_main_controller.js b/client/js/controllers/post_main_controller.js
index 924e918c..95cfdb52 100644
--- a/client/js/controllers/post_main_controller.js
+++ b/client/js/controllers/post_main_controller.js
@@ -1,16 +1,16 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const misc = require('../util/misc.js');
-const settings = require('../models/settings.js');
-const Comment = require('../models/comment.js');
-const Post = require('../models/post.js');
-const PostList = require('../models/post_list.js');
-const PostMainView = require('../views/post_main_view.js');
-const BasePostController = require('./base_post_controller.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const misc = require("../util/misc.js");
+const settings = require("../models/settings.js");
+const Comment = require("../models/comment.js");
+const Post = require("../models/post.js");
+const PostList = require("../models/post_list.js");
+const PostMainView = require("../views/post_main_view.js");
+const BasePostController = require("./base_post_controller.js");
+const EmptyView = require("../views/empty_view.js");
class PostMainController extends BasePostController {
constructor(ctx, editMode) {
@@ -21,74 +21,107 @@ class PostMainController extends BasePostController {
Post.get(ctx.parameters.id),
PostList.getAround(
ctx.parameters.id,
- parameters ? parameters.query : null),
- ]).then(responses => {
- const [post, aroundResponse] = responses;
+ parameters ? parameters.query : null
+ ),
+ ]).then(
+ (responses) => {
+ const [post, aroundResponse] = responses;
- // remove junk from query, but save it into history so that it can
- // be still accessed after history navigation / page refresh
- if (parameters.query) {
- ctx.state.parameters = parameters;
- const url = editMode ?
- uri.formatClientLink('post', ctx.parameters.id, 'edit') :
- uri.formatClientLink('post', ctx.parameters.id);
- router.replace(url, ctx.state, false);
+ // remove junk from query, but save it into history so that it can
+ // be still accessed after history navigation / page refresh
+ if (parameters.query) {
+ ctx.state.parameters = parameters;
+ const url = editMode
+ ? uri.formatClientLink(
+ "post",
+ ctx.parameters.id,
+ "edit"
+ )
+ : uri.formatClientLink("post", ctx.parameters.id);
+ router.replace(url, ctx.state, false);
+ }
+
+ this._post = post;
+ this._view = new PostMainView({
+ post: post,
+ editMode: editMode,
+ prevPostId: aroundResponse.prev
+ ? aroundResponse.prev.id
+ : null,
+ nextPostId: aroundResponse.next
+ ? aroundResponse.next.id
+ : null,
+ canEditPosts: api.hasPrivilege("posts:edit"),
+ canDeletePosts: api.hasPrivilege("posts:delete"),
+ canFeaturePosts: api.hasPrivilege("posts:feature"),
+ canListComments: api.hasPrivilege("comments:list"),
+ canCreateComments: api.hasPrivilege("comments:create"),
+ parameters: parameters,
+ });
+
+ if (this._view.sidebarControl) {
+ this._view.sidebarControl.addEventListener(
+ "favorite",
+ (e) => this._evtFavoritePost(e)
+ );
+ this._view.sidebarControl.addEventListener(
+ "unfavorite",
+ (e) => this._evtUnfavoritePost(e)
+ );
+ this._view.sidebarControl.addEventListener("score", (e) =>
+ this._evtScorePost(e)
+ );
+ this._view.sidebarControl.addEventListener(
+ "fitModeChange",
+ (e) => this._evtFitModeChange(e)
+ );
+ this._view.sidebarControl.addEventListener("change", (e) =>
+ this._evtPostChange(e)
+ );
+ this._view.sidebarControl.addEventListener("submit", (e) =>
+ this._evtUpdatePost(e)
+ );
+ this._view.sidebarControl.addEventListener(
+ "feature",
+ (e) => this._evtFeaturePost(e)
+ );
+ this._view.sidebarControl.addEventListener("delete", (e) =>
+ this._evtDeletePost(e)
+ );
+ this._view.sidebarControl.addEventListener("merge", (e) =>
+ this._evtMergePost(e)
+ );
+ }
+
+ if (this._view.commentControl) {
+ this._view.commentControl.addEventListener("change", (e) =>
+ this._evtCommentChange(e)
+ );
+ this._view.commentControl.addEventListener("submit", (e) =>
+ this._evtCreateComment(e)
+ );
+ }
+
+ if (this._view.commentListControl) {
+ this._view.commentListControl.addEventListener(
+ "submit",
+ (e) => this._evtUpdateComment(e)
+ );
+ this._view.commentListControl.addEventListener(
+ "score",
+ (e) => this._evtScoreComment(e)
+ );
+ this._view.commentListControl.addEventListener(
+ "delete",
+ (e) => this._evtDeleteComment(e)
+ );
+ }
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
}
-
- this._post = post;
- this._view = new PostMainView({
- post: post,
- editMode: editMode,
- prevPostId: aroundResponse.prev ? aroundResponse.prev.id : null,
- nextPostId: aroundResponse.next ? aroundResponse.next.id : null,
- canEditPosts: api.hasPrivilege('posts:edit'),
- canDeletePosts: api.hasPrivilege('posts:delete'),
- canFeaturePosts: api.hasPrivilege('posts:feature'),
- canListComments: api.hasPrivilege('comments:list'),
- canCreateComments: api.hasPrivilege('comments:create'),
- parameters: parameters,
- });
-
- if (this._view.sidebarControl) {
- this._view.sidebarControl.addEventListener(
- 'favorite', e => this._evtFavoritePost(e));
- this._view.sidebarControl.addEventListener(
- 'unfavorite', e => this._evtUnfavoritePost(e));
- this._view.sidebarControl.addEventListener(
- 'score', e => this._evtScorePost(e));
- this._view.sidebarControl.addEventListener(
- 'fitModeChange', e => this._evtFitModeChange(e));
- this._view.sidebarControl.addEventListener(
- 'change', e => this._evtPostChange(e));
- this._view.sidebarControl.addEventListener(
- 'submit', e => this._evtUpdatePost(e));
- this._view.sidebarControl.addEventListener(
- 'feature', e => this._evtFeaturePost(e));
- this._view.sidebarControl.addEventListener(
- 'delete', e => this._evtDeletePost(e));
- this._view.sidebarControl.addEventListener(
- 'merge', e => this._evtMergePost(e));
- }
-
- if (this._view.commentControl) {
- this._view.commentControl.addEventListener(
- 'change', e => this._evtCommentChange(e));
- this._view.commentControl.addEventListener(
- 'submit', e => this._evtCreateComment(e));
- }
-
- if (this._view.commentListControl) {
- this._view.commentListControl.addEventListener(
- 'submit', e => this._evtUpdateComment(e));
- this._view.commentListControl.addEventListener(
- 'score', e => this._evtScoreComment(e));
- this._view.commentListControl.addEventListener(
- 'delete', e => this._evtDeleteComment(e));
- }
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ );
}
_evtFitModeChange(e) {
@@ -100,32 +133,36 @@ class PostMainController extends BasePostController {
_evtFeaturePost(e) {
this._view.sidebarControl.disableForm();
this._view.sidebarControl.clearMessages();
- e.detail.post.feature()
- .then(() => {
- this._view.sidebarControl.showSuccess('Post featured.');
+ e.detail.post.feature().then(
+ () => {
+ this._view.sidebarControl.showSuccess("Post featured.");
this._view.sidebarControl.enableForm();
- }, error => {
+ },
+ (error) => {
this._view.sidebarControl.showError(error.message);
this._view.sidebarControl.enableForm();
- });
+ }
+ );
}
_evtMergePost(e) {
- router.show(uri.formatClientLink('post', e.detail.post.id, 'merge'));
+ router.show(uri.formatClientLink("post", e.detail.post.id, "merge"));
}
_evtDeletePost(e) {
this._view.sidebarControl.disableForm();
this._view.sidebarControl.clearMessages();
- e.detail.post.delete()
- .then(() => {
+ e.detail.post.delete().then(
+ () => {
misc.disableExitConfirmation();
- const ctx = router.show(uri.formatClientLink('posts'));
- ctx.controller.showSuccess('Post deleted.');
- }, error => {
+ const ctx = router.show(uri.formatClientLink("posts"));
+ ctx.controller.showSuccess("Post deleted.");
+ },
+ (error) => {
this._view.sidebarControl.showError(error.message);
this._view.sidebarControl.enableForm();
- });
+ }
+ );
}
_evtUpdatePost(e) {
@@ -150,15 +187,17 @@ class PostMainController extends BasePostController {
if (e.detail.source !== undefined) {
post.source = e.detail.source;
}
- post.save()
- .then(() => {
- this._view.sidebarControl.showSuccess('Post saved.');
+ post.save().then(
+ () => {
+ this._view.sidebarControl.showSuccess("Post saved.");
this._view.sidebarControl.enableForm();
misc.disableExitConfirmation();
- }, error => {
+ },
+ (error) => {
this._view.sidebarControl.showError(error.message);
this._view.sidebarControl.enableForm();
- });
+ }
+ );
}
_evtPostChange(e) {
@@ -173,79 +212,82 @@ class PostMainController extends BasePostController {
this._view.commentControl.disableForm();
const comment = Comment.create(this._post.id);
comment.text = e.detail.text;
- comment.save()
- .then(() => {
+ comment.save().then(
+ () => {
this._post.comments.add(comment);
this._view.commentControl.exitEditMode();
this._view.commentControl.enableForm();
misc.disableExitConfirmation();
- }, error => {
+ },
+ (error) => {
this._view.commentControl.showError(error.message);
this._view.commentControl.enableForm();
- });
+ }
+ );
}
_evtUpdateComment(e) {
// TODO: disable form
e.detail.comment.text = e.detail.text;
- e.detail.comment.save()
- .catch(error => {
- e.detail.target.showError(error.message);
- // TODO: enable form
- });
+ e.detail.comment.save().catch((error) => {
+ e.detail.target.showError(error.message);
+ // TODO: enable form
+ });
}
_evtScoreComment(e) {
- e.detail.comment.setScore(e.detail.score)
- .catch(error => window.alert(error.message));
+ e.detail.comment
+ .setScore(e.detail.score)
+ .catch((error) => window.alert(error.message));
}
_evtDeleteComment(e) {
- e.detail.comment.delete()
- .catch(error => window.alert(error.message));
+ e.detail.comment
+ .delete()
+ .catch((error) => window.alert(error.message));
}
_evtScorePost(e) {
- if (!api.hasPrivilege('posts:score')) {
+ if (!api.hasPrivilege("posts:score")) {
return;
}
- e.detail.post.setScore(e.detail.score)
- .catch(error => window.alert(error.message));
+ e.detail.post
+ .setScore(e.detail.score)
+ .catch((error) => window.alert(error.message));
}
_evtFavoritePost(e) {
- if (!api.hasPrivilege('posts:favorite')) {
+ if (!api.hasPrivilege("posts:favorite")) {
return;
}
- e.detail.post.addToFavorites()
- .catch(error => window.alert(error.message));
+ e.detail.post
+ .addToFavorites()
+ .catch((error) => window.alert(error.message));
}
_evtUnfavoritePost(e) {
- if (!api.hasPrivilege('posts:favorite')) {
+ if (!api.hasPrivilege("posts:favorite")) {
return;
}
- e.detail.post.removeFromFavorites()
- .catch(error => window.alert(error.message));
+ e.detail.post
+ .removeFromFavorites()
+ .catch((error) => window.alert(error.message));
}
}
-module.exports = router => {
- router.enter(['post', ':id', 'edit'],
- (ctx, next) => {
- // restore parameters from history state
- if (ctx.state.parameters) {
- Object.assign(ctx.parameters, ctx.state.parameters);
- }
- ctx.controller = new PostMainController(ctx, true);
- });
- router.enter(
- ['post', ':id'],
- (ctx, next) => {
- // restore parameters from history state
- if (ctx.state.parameters) {
- Object.assign(ctx.parameters, ctx.state.parameters);
- }
- ctx.controller = new PostMainController(ctx, false);
- });
+module.exports = (router) => {
+ router.enter(["post", ":id", "edit"], (ctx, next) => {
+ // restore parameters from history state
+ if (ctx.state.parameters) {
+ Object.assign(ctx.parameters, ctx.state.parameters);
+ }
+ ctx.controller = new PostMainController(ctx, true);
+ });
+ router.enter(["post", ":id"], (ctx, next) => {
+ // restore parameters from history state
+ if (ctx.state.parameters) {
+ Object.assign(ctx.parameters, ctx.state.parameters);
+ }
+ ctx.controller = new PostMainController(ctx, false);
+ });
};
diff --git a/client/js/controllers/post_upload_controller.js b/client/js/controllers/post_upload_controller.js
index f16aaf02..d317be59 100644
--- a/client/js/controllers/post_upload_controller.js
+++ b/client/js/controllers/post_upload_controller.js
@@ -1,40 +1,40 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const router = require('../router.js');
-const uri = require('../util/uri.js');
-const misc = require('../util/misc.js');
-const progress = require('../util/progress.js');
-const topNavigation = require('../models/top_navigation.js');
-const Post = require('../models/post.js');
-const Tag = require('../models/tag.js');
-const PostUploadView = require('../views/post_upload_view.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const router = require("../router.js");
+const uri = require("../util/uri.js");
+const misc = require("../util/misc.js");
+const progress = require("../util/progress.js");
+const topNavigation = require("../models/top_navigation.js");
+const Post = require("../models/post.js");
+const Tag = require("../models/tag.js");
+const PostUploadView = require("../views/post_upload_view.js");
+const EmptyView = require("../views/empty_view.js");
const genericErrorMessage =
- 'One of the posts needs your attention; ' +
+ "One of the posts needs your attention; " +
'click "resume upload" when you\'re ready.';
class PostUploadController {
constructor() {
this._lastCancellablePromise = null;
- if (!api.hasPrivilege('posts:create')) {
+ if (!api.hasPrivilege("posts:create")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to upload posts.');
+ this._view.showError("You don't have privileges to upload posts.");
return;
}
- topNavigation.activate('upload');
- topNavigation.setTitle('Upload');
+ topNavigation.activate("upload");
+ topNavigation.setTitle("Upload");
this._view = new PostUploadView({
- canUploadAnonymously: api.hasPrivilege('posts:create:anonymous'),
- canViewPosts: api.hasPrivilege('posts:view'),
+ canUploadAnonymously: api.hasPrivilege("posts:create:anonymous"),
+ canViewPosts: api.hasPrivilege("posts:view"),
enableSafety: api.safetyEnabled(),
});
- this._view.addEventListener('change', e => this._evtChange(e));
- this._view.addEventListener('submit', e => this._evtSubmit(e));
- this._view.addEventListener('cancel', e => this._evtCancel(e));
+ this._view.addEventListener("change", (e) => this._evtChange(e));
+ this._view.addEventListener("submit", (e) => this._evtSubmit(e));
+ this._view.addEventListener("cancel", (e) => this._evtCancel(e));
}
_evtChange(e) {
@@ -56,87 +56,109 @@ class PostUploadController {
this._view.disableForm();
this._view.clearMessages();
- e.detail.uploadables.reduce(
- (promise, uploadable) => promise.then(() => this._uploadSinglePost(
- uploadable, e.detail.skipDuplicates)),
- Promise.resolve())
- .then(() => {
- this._view.clearMessages();
- misc.disableExitConfirmation();
- const ctx = router.show(uri.formatClientLink('posts'));
- ctx.controller.showSuccess('Posts uploaded.');
- }, error => {
- if (error.uploadable) {
- if (error.similarPosts) {
- error.uploadable.lookalikes = error.similarPosts;
- this._view.updateUploadable(error.uploadable);
- this._view.showInfo(genericErrorMessage);
- this._view.showInfo(
- error.message, error.uploadable);
+ e.detail.uploadables
+ .reduce(
+ (promise, uploadable) =>
+ promise.then(() =>
+ this._uploadSinglePost(
+ uploadable,
+ e.detail.skipDuplicates
+ )
+ ),
+ Promise.resolve()
+ )
+ .then(
+ () => {
+ this._view.clearMessages();
+ misc.disableExitConfirmation();
+ const ctx = router.show(uri.formatClientLink("posts"));
+ ctx.controller.showSuccess("Posts uploaded.");
+ },
+ (error) => {
+ if (error.uploadable) {
+ if (error.similarPosts) {
+ error.uploadable.lookalikes = error.similarPosts;
+ this._view.updateUploadable(error.uploadable);
+ this._view.showInfo(genericErrorMessage);
+ this._view.showInfo(
+ error.message,
+ error.uploadable
+ );
+ } else {
+ this._view.showError(genericErrorMessage);
+ this._view.showError(
+ error.message,
+ error.uploadable
+ );
+ }
} else {
- this._view.showError(genericErrorMessage);
- this._view.showError(
- error.message, error.uploadable);
+ this._view.showError(error.message);
}
- } else {
- this._view.showError(error.message);
+ this._view.enableForm();
}
- this._view.enableForm();
- });
+ );
}
_uploadSinglePost(uploadable, skipDuplicates) {
progress.start();
let reverseSearchPromise = Promise.resolve();
if (!uploadable.lookalikesConfirmed) {
- reverseSearchPromise =
- Post.reverseSearch(uploadable.url || uploadable.file);
+ reverseSearchPromise = Post.reverseSearch(
+ uploadable.url || uploadable.file
+ );
}
this._lastCancellablePromise = reverseSearchPromise;
- return reverseSearchPromise.then(searchResult => {
- if (searchResult) {
- // notify about exact duplicate
- if (searchResult.exactPost) {
- if (skipDuplicates) {
- this._view.removeUploadable(uploadable);
- return Promise.resolve();
- } else {
- let error = new Error('Post already uploaded ' +
- `(@${searchResult.exactPost.id})`);
+ return reverseSearchPromise
+ .then((searchResult) => {
+ if (searchResult) {
+ // notify about exact duplicate
+ if (searchResult.exactPost) {
+ if (skipDuplicates) {
+ this._view.removeUploadable(uploadable);
+ return Promise.resolve();
+ } else {
+ let error = new Error(
+ "Post already uploaded " +
+ `(@${searchResult.exactPost.id})`
+ );
+ error.uploadable = uploadable;
+ return Promise.reject(error);
+ }
+ }
+
+ // notify about similar posts
+ if (searchResult.similarPosts.length) {
+ let error = new Error(
+ `Found ${searchResult.similarPosts.length} similar ` +
+ "posts.\nYou can resume or discard this upload."
+ );
error.uploadable = uploadable;
+ error.similarPosts = searchResult.similarPosts;
return Promise.reject(error);
}
}
- // notify about similar posts
- if (searchResult.similarPosts.length) {
- let error = new Error(
- `Found ${searchResult.similarPosts.length} similar ` +
- 'posts.\nYou can resume or discard this upload.');
- error.uploadable = uploadable;
- error.similarPosts = searchResult.similarPosts;
- return Promise.reject(error);
- }
- }
-
- // no duplicates, proceed with saving
- let post = this._uploadableToPost(uploadable);
- let savePromise = post.save(uploadable.anonymous)
- .then(() => {
+ // no duplicates, proceed with saving
+ let post = this._uploadableToPost(uploadable);
+ let savePromise = post.save(uploadable.anonymous).then(() => {
this._view.removeUploadable(uploadable);
return Promise.resolve();
});
- this._lastCancellablePromise = savePromise;
- return savePromise;
- }).then(result => {
- progress.done();
- return Promise.resolve(result);
- }, error => {
- error.uploadable = uploadable;
- progress.done();
- return Promise.reject(error);
- });
+ this._lastCancellablePromise = savePromise;
+ return savePromise;
+ })
+ .then(
+ (result) => {
+ progress.done();
+ return Promise.resolve(result);
+ },
+ (error) => {
+ error.uploadable = uploadable;
+ progress.done();
+ return Promise.reject(error);
+ }
+ );
}
_uploadableToPost(uploadable) {
@@ -159,8 +181,8 @@ class PostUploadController {
}
}
-module.exports = router => {
- router.enter(['upload'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["upload"], (ctx, next) => {
ctx.controller = new PostUploadController();
});
};
diff --git a/client/js/controllers/settings_controller.js b/client/js/controllers/settings_controller.js
index 7765143a..8cf2c1cd 100644
--- a/client/js/controllers/settings_controller.js
+++ b/client/js/controllers/settings_controller.js
@@ -1,28 +1,28 @@
-'use strict';
+"use strict";
-const settings = require('../models/settings.js');
-const topNavigation = require('../models/top_navigation.js');
-const SettingsView = require('../views/settings_view.js');
+const settings = require("../models/settings.js");
+const topNavigation = require("../models/top_navigation.js");
+const SettingsView = require("../views/settings_view.js");
class SettingsController {
constructor() {
- topNavigation.activate('settings');
- topNavigation.setTitle('Browsing settings');
+ topNavigation.activate("settings");
+ topNavigation.setTitle("Browsing settings");
this._view = new SettingsView({
settings: settings.get(),
});
- this._view.addEventListener('submit', e => this._evtSubmit(e));
+ this._view.addEventListener("submit", (e) => this._evtSubmit(e));
}
_evtSubmit(e) {
this._view.clearMessages();
settings.save(e.detail);
- this._view.showSuccess('Settings saved.');
+ this._view.showSuccess("Settings saved.");
}
}
-module.exports = router => {
- router.enter(['settings'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["settings"], (ctx, next) => {
ctx.controller = new SettingsController();
});
};
diff --git a/client/js/controllers/snapshots_controller.js b/client/js/controllers/snapshots_controller.js
index 0e9b773c..120b6cbc 100644
--- a/client/js/controllers/snapshots_controller.js
+++ b/client/js/controllers/snapshots_controller.js
@@ -1,41 +1,43 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const SnapshotList = require('../models/snapshot_list.js');
-const PageController = require('../controllers/page_controller.js');
-const topNavigation = require('../models/top_navigation.js');
-const SnapshotsPageView = require('../views/snapshots_page_view.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const SnapshotList = require("../models/snapshot_list.js");
+const PageController = require("../controllers/page_controller.js");
+const topNavigation = require("../models/top_navigation.js");
+const SnapshotsPageView = require("../views/snapshots_page_view.js");
+const EmptyView = require("../views/empty_view.js");
class SnapshotsController {
constructor(ctx) {
- if (!api.hasPrivilege('snapshots:list')) {
+ if (!api.hasPrivilege("snapshots:list")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view history.');
+ this._view.showError("You don't have privileges to view history.");
return;
}
- topNavigation.activate('');
- topNavigation.setTitle('History');
+ topNavigation.activate("");
+ topNavigation.setTitle("History");
this._pageController = new PageController();
this._pageController.run({
parameters: ctx.parameters,
defaultLimit: 25,
getClientUrlForPage: (offset, limit) => {
- const parameters = Object.assign(
- {}, ctx.parameters, {offset: offset, limit: limit});
- return uri.formatClientLink('history', parameters);
+ const parameters = Object.assign({}, ctx.parameters, {
+ offset: offset,
+ limit: limit,
+ });
+ return uri.formatClientLink("history", parameters);
},
requestPage: (offset, limit) => {
- return SnapshotList.search('', offset, limit);
+ return SnapshotList.search("", offset, limit);
},
- pageRenderer: pageCtx => {
+ pageRenderer: (pageCtx) => {
Object.assign(pageCtx, {
- canViewPosts: api.hasPrivilege('posts:view'),
- canViewUsers: api.hasPrivilege('users:view'),
- canViewTags: api.hasPrivilege('tags:view'),
+ canViewPosts: api.hasPrivilege("posts:view"),
+ canViewUsers: api.hasPrivilege("users:view"),
+ canViewTags: api.hasPrivilege("tags:view"),
});
return new SnapshotsPageView(pageCtx);
},
@@ -43,9 +45,8 @@ class SnapshotsController {
}
}
-module.exports = router => {
- router.enter(['history'],
- (ctx, next) => {
- ctx.controller = new SnapshotsController(ctx);
- });
+module.exports = (router) => {
+ router.enter(["history"], (ctx, next) => {
+ ctx.controller = new SnapshotsController(ctx);
+ });
};
diff --git a/client/js/controllers/tag_categories_controller.js b/client/js/controllers/tag_categories_controller.js
index 49600cf3..2470edbb 100644
--- a/client/js/controllers/tag_categories_controller.js
+++ b/client/js/controllers/tag_categories_controller.js
@@ -1,57 +1,67 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const tags = require('../tags.js');
-const TagCategoryList = require('../models/tag_category_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const TagCategoriesView = require('../views/tag_categories_view.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const tags = require("../tags.js");
+const TagCategoryList = require("../models/tag_category_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const TagCategoriesView = require("../views/tag_categories_view.js");
+const EmptyView = require("../views/empty_view.js");
class TagCategoriesController {
constructor() {
- if (!api.hasPrivilege('tagCategories:list')) {
+ if (!api.hasPrivilege("tagCategories:list")) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to view tag categories.');
+ "You don't have privileges to view tag categories."
+ );
return;
}
- topNavigation.activate('tags');
- topNavigation.setTitle('Listing tags');
- TagCategoryList.get().then(response => {
- this._tagCategories = response.results;
- this._view = new TagCategoriesView({
- tagCategories: this._tagCategories,
- canEditName: api.hasPrivilege('tagCategories:edit:name'),
- canEditColor: api.hasPrivilege('tagCategories:edit:color'),
- canDelete: api.hasPrivilege('tagCategories:delete'),
- canCreate: api.hasPrivilege('tagCategories:create'),
- canSetDefault: api.hasPrivilege('tagCategories:setDefault'),
- });
- this._view.addEventListener('submit', e => this._evtSubmit(e));
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ topNavigation.activate("tags");
+ topNavigation.setTitle("Listing tags");
+ TagCategoryList.get().then(
+ (response) => {
+ this._tagCategories = response.results;
+ this._view = new TagCategoriesView({
+ tagCategories: this._tagCategories,
+ canEditName: api.hasPrivilege("tagCategories:edit:name"),
+ canEditColor: api.hasPrivilege("tagCategories:edit:color"),
+ canDelete: api.hasPrivilege("tagCategories:delete"),
+ canCreate: api.hasPrivilege("tagCategories:create"),
+ canSetDefault: api.hasPrivilege(
+ "tagCategories:setDefault"
+ ),
+ });
+ this._view.addEventListener("submit", (e) =>
+ this._evtSubmit(e)
+ );
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
+ }
+ );
}
_evtSubmit(e) {
this._view.clearMessages();
this._view.disableForm();
- this._tagCategories.save()
- .then(() => {
+ this._tagCategories.save().then(
+ () => {
tags.refreshCategoryColorMap();
this._view.enableForm();
- this._view.showSuccess('Changes saved.');
- }, error => {
+ this._view.showSuccess("Changes saved.");
+ },
+ (error) => {
this._view.enableForm();
this._view.showError(error.message);
- });
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['tag-categories'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["tag-categories"], (ctx, next) => {
ctx.controller = new TagCategoriesController(ctx, next);
});
};
diff --git a/client/js/controllers/tag_controller.js b/client/js/controllers/tag_controller.js
index 71405dc8..0928b3ff 100644
--- a/client/js/controllers/tag_controller.js
+++ b/client/js/controllers/tag_controller.js
@@ -1,63 +1,80 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const uri = require('../util/uri.js');
-const Tag = require('../models/tag.js');
-const TagCategoryList = require('../models/tag_category_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const TagView = require('../views/tag_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const uri = require("../util/uri.js");
+const Tag = require("../models/tag.js");
+const TagCategoryList = require("../models/tag_category_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const TagView = require("../views/tag_view.js");
+const EmptyView = require("../views/empty_view.js");
class TagController {
constructor(ctx, section) {
- if (!api.hasPrivilege('tags:view')) {
+ if (!api.hasPrivilege("tags:view")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view tags.');
+ this._view.showError("You don't have privileges to view tags.");
return;
}
Promise.all([
TagCategoryList.get(),
Tag.get(ctx.parameters.name),
- ]).then(responses => {
- const [tagCategoriesResponse, tag] = responses;
+ ]).then(
+ (responses) => {
+ const [tagCategoriesResponse, tag] = responses;
- topNavigation.activate('tags');
- topNavigation.setTitle('Tag #' + tag.names[0]);
+ topNavigation.activate("tags");
+ topNavigation.setTitle("Tag #" + tag.names[0]);
- this._name = ctx.parameters.name;
- tag.addEventListener('change', e => this._evtSaved(e, section));
+ this._name = ctx.parameters.name;
+ tag.addEventListener("change", (e) =>
+ this._evtSaved(e, section)
+ );
- const categories = {};
- for (let category of tagCategoriesResponse.results) {
- categories[category.name] = category.name;
+ const categories = {};
+ for (let category of tagCategoriesResponse.results) {
+ categories[category.name] = category.name;
+ }
+
+ this._view = new TagView({
+ tag: tag,
+ section: section,
+ canEditAnything: api.hasPrivilege("tags:edit"),
+ canEditNames: api.hasPrivilege("tags:edit:names"),
+ canEditCategory: api.hasPrivilege("tags:edit:category"),
+ canEditImplications: api.hasPrivilege(
+ "tags:edit:implications"
+ ),
+ canEditSuggestions: api.hasPrivilege(
+ "tags:edit:suggestions"
+ ),
+ canEditDescription: api.hasPrivilege(
+ "tags:edit:description"
+ ),
+ canMerge: api.hasPrivilege("tags:merge"),
+ canDelete: api.hasPrivilege("tags:delete"),
+ categories: categories,
+ escapeColons: uri.escapeColons,
+ });
+
+ this._view.addEventListener("change", (e) =>
+ this._evtChange(e)
+ );
+ this._view.addEventListener("submit", (e) =>
+ this._evtUpdate(e)
+ );
+ this._view.addEventListener("merge", (e) => this._evtMerge(e));
+ this._view.addEventListener("delete", (e) =>
+ this._evtDelete(e)
+ );
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
}
-
- this._view = new TagView({
- tag: tag,
- section: section,
- canEditAnything: api.hasPrivilege('tags:edit'),
- canEditNames: api.hasPrivilege('tags:edit:names'),
- canEditCategory: api.hasPrivilege('tags:edit:category'),
- canEditImplications: api.hasPrivilege('tags:edit:implications'),
- canEditSuggestions: api.hasPrivilege('tags:edit:suggestions'),
- canEditDescription: api.hasPrivilege('tags:edit:description'),
- canMerge: api.hasPrivilege('tags:merge'),
- canDelete: api.hasPrivilege('tags:delete'),
- categories: categories,
- escapeColons: uri.escapeColons,
- });
-
- this._view.addEventListener('change', e => this._evtChange(e));
- this._view.addEventListener('submit', e => this._evtUpdate(e));
- this._view.addEventListener('merge', e => this._evtMerge(e));
- this._view.addEventListener('delete', e => this._evtDelete(e));
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ );
}
_evtChange(e) {
@@ -68,9 +85,10 @@ class TagController {
misc.disableExitConfirmation();
if (this._name !== e.detail.tag.names[0]) {
router.replace(
- uri.formatClientLink('tag', e.detail.tag.names[0], section),
+ uri.formatClientLink("tag", e.detail.tag.names[0], section),
null,
- false);
+ false
+ );
}
}
@@ -86,59 +104,69 @@ class TagController {
if (e.detail.description !== undefined) {
e.detail.tag.description = e.detail.description;
}
- e.detail.tag.save().then(() => {
- this._view.showSuccess('Tag saved.');
- this._view.enableForm();
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ e.detail.tag.save().then(
+ () => {
+ this._view.showSuccess("Tag saved.");
+ this._view.enableForm();
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
_evtMerge(e) {
this._view.clearMessages();
this._view.disableForm();
- e.detail.tag
- .merge(e.detail.targetTagName, e.detail.addAlias)
- .then(() => {
- this._view.showSuccess('Tag merged.');
+ e.detail.tag.merge(e.detail.targetTagName, e.detail.addAlias).then(
+ () => {
+ this._view.showSuccess("Tag merged.");
this._view.enableForm();
router.replace(
uri.formatClientLink(
- 'tag', e.detail.targetTagName, 'merge'),
+ "tag",
+ e.detail.targetTagName,
+ "merge"
+ ),
null,
- false);
- }, error => {
+ false
+ );
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
_evtDelete(e) {
this._view.clearMessages();
this._view.disableForm();
- e.detail.tag.delete()
- .then(() => {
- const ctx = router.show(uri.formatClientLink('tags'));
- ctx.controller.showSuccess('Tag deleted.');
- }, error => {
+ e.detail.tag.delete().then(
+ () => {
+ const ctx = router.show(uri.formatClientLink("tags"));
+ ctx.controller.showSuccess("Tag deleted.");
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['tag', ':name', 'edit'], (ctx, next) => {
- ctx.controller = new TagController(ctx, 'edit');
+module.exports = (router) => {
+ router.enter(["tag", ":name", "edit"], (ctx, next) => {
+ ctx.controller = new TagController(ctx, "edit");
});
- router.enter(['tag', ':name', 'merge'], (ctx, next) => {
- ctx.controller = new TagController(ctx, 'merge');
+ router.enter(["tag", ":name", "merge"], (ctx, next) => {
+ ctx.controller = new TagController(ctx, "merge");
});
- router.enter(['tag', ':name', 'delete'], (ctx, next) => {
- ctx.controller = new TagController(ctx, 'delete');
+ router.enter(["tag", ":name", "delete"], (ctx, next) => {
+ ctx.controller = new TagController(ctx, "delete");
});
- router.enter(['tag', ':name'], (ctx, next) => {
- ctx.controller = new TagController(ctx, 'summary');
+ router.enter(["tag", ":name"], (ctx, next) => {
+ ctx.controller = new TagController(ctx, "summary");
});
};
diff --git a/client/js/controllers/tag_list_controller.js b/client/js/controllers/tag_list_controller.js
index 34ff8b24..010c5564 100644
--- a/client/js/controllers/tag_list_controller.js
+++ b/client/js/controllers/tag_list_controller.js
@@ -1,46 +1,47 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const TagList = require('../models/tag_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PageController = require('../controllers/page_controller.js');
-const TagsHeaderView = require('../views/tags_header_view.js');
-const TagsPageView = require('../views/tags_page_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const TagList = require("../models/tag_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PageController = require("../controllers/page_controller.js");
+const TagsHeaderView = require("../views/tags_header_view.js");
+const TagsPageView = require("../views/tags_page_view.js");
+const EmptyView = require("../views/empty_view.js");
const fields = [
- 'names',
- 'suggestions',
- 'implications',
- 'creationTime',
- 'usages',
- 'category'
+ "names",
+ "suggestions",
+ "implications",
+ "creationTime",
+ "usages",
+ "category",
];
class TagListController {
constructor(ctx) {
this._pageController = new PageController();
- if (!api.hasPrivilege('tags:list')) {
+ if (!api.hasPrivilege("tags:list")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view tags.');
+ this._view.showError("You don't have privileges to view tags.");
return;
}
this._ctx = ctx;
- topNavigation.activate('tags');
- topNavigation.setTitle('Listing tags');
+ topNavigation.activate("tags");
+ topNavigation.setTitle("Listing tags");
this._headerView = new TagsHeaderView({
hostNode: this._pageController.view.pageHeaderHolderNode,
parameters: ctx.parameters,
- canEditTagCategories: api.hasPrivilege('tagCategories:edit'),
+ canEditTagCategories: api.hasPrivilege("tagCategories:edit"),
});
- this._headerView.addEventListener(
- 'navigate', e => this._evtNavigate(e));
+ this._headerView.addEventListener("navigate", (e) =>
+ this._evtNavigate(e)
+ );
this._syncPageController();
}
@@ -55,7 +56,8 @@ class TagListController {
_evtNavigate(e) {
router.showNoDispatch(
- uri.formatClientLink('tags', e.detail.parameters));
+ uri.formatClientLink("tags", e.detail.parameters)
+ );
Object.assign(this._ctx.parameters, e.detail.parameters);
this._syncPageController();
}
@@ -65,25 +67,29 @@ class TagListController {
parameters: this._ctx.parameters,
defaultLimit: 50,
getClientUrlForPage: (offset, limit) => {
- const parameters = Object.assign(
- {}, this._ctx.parameters, {offset: offset, limit: limit});
- return uri.formatClientLink('tags', parameters);
+ const parameters = Object.assign({}, this._ctx.parameters, {
+ offset: offset,
+ limit: limit,
+ });
+ return uri.formatClientLink("tags", parameters);
},
requestPage: (offset, limit) => {
return TagList.search(
- this._ctx.parameters.query, offset, limit, fields);
+ this._ctx.parameters.query,
+ offset,
+ limit,
+ fields
+ );
},
- pageRenderer: pageCtx => {
+ pageRenderer: (pageCtx) => {
return new TagsPageView(pageCtx);
},
});
}
}
-module.exports = router => {
- router.enter(
- ['tags'],
- (ctx, next) => {
- ctx.controller = new TagListController(ctx);
- });
+module.exports = (router) => {
+ router.enter(["tags"], (ctx, next) => {
+ ctx.controller = new TagListController(ctx);
+ });
};
diff --git a/client/js/controllers/top_navigation_controller.js b/client/js/controllers/top_navigation_controller.js
index 9cd70c18..d9bb276d 100644
--- a/client/js/controllers/top_navigation_controller.js
+++ b/client/js/controllers/top_navigation_controller.js
@@ -1,19 +1,20 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const topNavigation = require('../models/top_navigation.js');
-const TopNavigationView = require('../views/top_navigation_view.js');
+const api = require("../api.js");
+const topNavigation = require("../models/top_navigation.js");
+const TopNavigationView = require("../views/top_navigation_view.js");
class TopNavigationController {
constructor() {
api.fetchConfig().then(() => {
this._topNavigationView = new TopNavigationView();
- topNavigation.addEventListener(
- 'activate', e => this._evtActivate(e));
+ topNavigation.addEventListener("activate", (e) =>
+ this._evtActivate(e)
+ );
- api.addEventListener('login', e => this._evtAuthChange(e));
- api.addEventListener('logout', e => this._evtAuthChange(e));
+ api.addEventListener("login", (e) => this._evtAuthChange(e));
+ api.addEventListener("logout", (e) => this._evtAuthChange(e));
this._render();
});
@@ -28,37 +29,38 @@ class TopNavigationController {
}
_updateNavigationFromPrivileges() {
- topNavigation.get('account').url = 'user/' + api.userName;
- topNavigation.get('account').imageUrl =
- api.user ? api.user.avatarUrl : null;
+ topNavigation.get("account").url = "user/" + api.userName;
+ topNavigation.get("account").imageUrl = api.user
+ ? api.user.avatarUrl
+ : null;
topNavigation.showAll();
- if (!api.hasPrivilege('posts:list')) {
- topNavigation.hide('posts');
+ if (!api.hasPrivilege("posts:list")) {
+ topNavigation.hide("posts");
}
- if (!api.hasPrivilege('posts:create')) {
- topNavigation.hide('upload');
+ if (!api.hasPrivilege("posts:create")) {
+ topNavigation.hide("upload");
}
- if (!api.hasPrivilege('comments:list')) {
- topNavigation.hide('comments');
+ if (!api.hasPrivilege("comments:list")) {
+ topNavigation.hide("comments");
}
- if (!api.hasPrivilege('tags:list')) {
- topNavigation.hide('tags');
+ if (!api.hasPrivilege("tags:list")) {
+ topNavigation.hide("tags");
}
- if (!api.hasPrivilege('users:list')) {
- topNavigation.hide('users');
+ if (!api.hasPrivilege("users:list")) {
+ topNavigation.hide("users");
}
if (api.isLoggedIn()) {
- if (!api.hasPrivilege('users:create:any')) {
- topNavigation.hide('register');
+ if (!api.hasPrivilege("users:create:any")) {
+ topNavigation.hide("register");
}
- topNavigation.hide('login');
+ topNavigation.hide("login");
} else {
- if (!api.hasPrivilege('users:create:self')) {
- topNavigation.hide('register');
+ if (!api.hasPrivilege("users:create:self")) {
+ topNavigation.hide("register");
}
- topNavigation.hide('account');
- topNavigation.hide('logout');
+ topNavigation.hide("account");
+ topNavigation.hide("logout");
}
}
@@ -66,10 +68,11 @@ class TopNavigationController {
this._updateNavigationFromPrivileges();
this._topNavigationView.render({
items: topNavigation.getAll(),
- name: api.getName()
+ name: api.getName(),
});
this._topNavigationView.activate(
- topNavigation.activeItem ? topNavigation.activeItem.key : '');
+ topNavigation.activeItem ? topNavigation.activeItem.key : ""
+ );
}
}
diff --git a/client/js/controllers/user_controller.js b/client/js/controllers/user_controller.js
index e192d879..326736b5 100644
--- a/client/js/controllers/user_controller.js
+++ b/client/js/controllers/user_controller.js
@@ -1,23 +1,25 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const User = require('../models/user.js');
-const UserToken = require('../models/user_token.js');
-const topNavigation = require('../models/top_navigation.js');
-const UserView = require('../views/user_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const User = require("../models/user.js");
+const UserToken = require("../models/user_token.js");
+const topNavigation = require("../models/top_navigation.js");
+const UserView = require("../views/user_view.js");
+const EmptyView = require("../views/empty_view.js");
class UserController {
constructor(ctx, section) {
const userName = ctx.parameters.name;
- if (!api.hasPrivilege('users:view') &&
- !api.isLoggedIn({name: userName})) {
+ if (
+ !api.hasPrivilege("users:view") &&
+ !api.isLoggedIn({ name: userName })
+ ) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view users.');
+ this._view.showError("You don't have privileges to view users.");
return;
}
@@ -25,100 +27,129 @@ class UserController {
this._errorMessages = [];
let userTokenPromise = Promise.resolve([]);
- if (section === 'list-tokens') {
- userTokenPromise = UserToken.get(userName)
- .then(userTokens => {
- return userTokens.map(token => {
- token.isCurrentAuthToken = api.isCurrentAuthToken(token);
+ if (section === "list-tokens") {
+ userTokenPromise = UserToken.get(userName).then(
+ (userTokens) => {
+ return userTokens.map((token) => {
+ token.isCurrentAuthToken = api.isCurrentAuthToken(
+ token
+ );
return token;
});
- }, error => {
+ },
+ (error) => {
return [];
- });
+ }
+ );
}
- topNavigation.setTitle('User ' + userName);
- Promise.all([
- userTokenPromise,
- User.get(userName)
- ]).then(responses => {
- const [userTokens, user] = responses;
- const isLoggedIn = api.isLoggedIn(user);
- const infix = isLoggedIn ? 'self' : 'any';
+ topNavigation.setTitle("User " + userName);
+ Promise.all([userTokenPromise, User.get(userName)]).then(
+ (responses) => {
+ const [userTokens, user] = responses;
+ const isLoggedIn = api.isLoggedIn(user);
+ const infix = isLoggedIn ? "self" : "any";
- this._name = userName;
- user.addEventListener('change', e => this._evtSaved(e, section));
+ this._name = userName;
+ user.addEventListener("change", (e) =>
+ this._evtSaved(e, section)
+ );
- const myRankIndex = api.user ?
- api.allRanks.indexOf(api.user.rank) :
- 0;
- let ranks = {};
- for (let [rankIdx, rankIdentifier] of api.allRanks.entries()) {
- if (rankIdentifier === 'anonymous') {
- continue;
+ const myRankIndex = api.user
+ ? api.allRanks.indexOf(api.user.rank)
+ : 0;
+ let ranks = {};
+ for (let [rankIdx, rankIdentifier] of api.allRanks.entries()) {
+ if (rankIdentifier === "anonymous") {
+ continue;
+ }
+ if (rankIdx > myRankIndex) {
+ continue;
+ }
+ ranks[rankIdentifier] = api.rankNames.get(rankIdentifier);
}
- if (rankIdx > myRankIndex) {
- continue;
+
+ if (isLoggedIn) {
+ topNavigation.activate("account");
+ } else {
+ topNavigation.activate("users");
}
- ranks[rankIdentifier] = api.rankNames.get(rankIdentifier);
+
+ this._view = new UserView({
+ user: user,
+ section: section,
+ isLoggedIn: isLoggedIn,
+ canEditName: api.hasPrivilege(`users:edit:${infix}:name`),
+ canEditPassword: api.hasPrivilege(
+ `users:edit:${infix}:pass`
+ ),
+ canEditEmail: api.hasPrivilege(
+ `users:edit:${infix}:email`
+ ),
+ canEditRank: api.hasPrivilege(`users:edit:${infix}:rank`),
+ canEditAvatar: api.hasPrivilege(
+ `users:edit:${infix}:avatar`
+ ),
+ canEditAnything: api.hasPrivilege(`users:edit:${infix}`),
+ canListTokens: api.hasPrivilege(
+ `userTokens:list:${infix}`
+ ),
+ canCreateToken: api.hasPrivilege(
+ `userTokens:create:${infix}`
+ ),
+ canEditToken: api.hasPrivilege(`userTokens:edit:${infix}`),
+ canDeleteToken: api.hasPrivilege(
+ `userTokens:delete:${infix}`
+ ),
+ canDelete: api.hasPrivilege(`users:delete:${infix}`),
+ ranks: ranks,
+ tokens: userTokens,
+ });
+ this._view.addEventListener("change", (e) =>
+ this._evtChange(e)
+ );
+ this._view.addEventListener("submit", (e) =>
+ this._evtUpdate(e)
+ );
+ this._view.addEventListener("delete", (e) =>
+ this._evtDelete(e)
+ );
+ this._view.addEventListener("create-token", (e) =>
+ this._evtCreateToken(e)
+ );
+ this._view.addEventListener("delete-token", (e) =>
+ this._evtDeleteToken(e)
+ );
+ this._view.addEventListener("update-token", (e) =>
+ this._evtUpdateToken(e)
+ );
+
+ for (let message of this._successMessages) {
+ this.showSuccess(message);
+ }
+
+ for (let message of this._errorMessages) {
+ this.showError(message);
+ }
+ },
+ (error) => {
+ this._view = new EmptyView();
+ this._view.showError(error.message);
}
-
- if (isLoggedIn) {
- topNavigation.activate('account');
- } else {
- topNavigation.activate('users');
- }
-
- this._view = new UserView({
- user: user,
- section: section,
- isLoggedIn: isLoggedIn,
- canEditName: api.hasPrivilege(`users:edit:${infix}:name`),
- canEditPassword: api.hasPrivilege(`users:edit:${infix}:pass`),
- canEditEmail: api.hasPrivilege(`users:edit:${infix}:email`),
- canEditRank: api.hasPrivilege(`users:edit:${infix}:rank`),
- canEditAvatar: api.hasPrivilege(`users:edit:${infix}:avatar`),
- canEditAnything: api.hasPrivilege(`users:edit:${infix}`),
- canListTokens: api.hasPrivilege(`userTokens:list:${infix}`),
- canCreateToken: api.hasPrivilege(`userTokens:create:${infix}`),
- canEditToken: api.hasPrivilege(`userTokens:edit:${infix}`),
- canDeleteToken: api.hasPrivilege(`userTokens:delete:${infix}`),
- canDelete: api.hasPrivilege(`users:delete:${infix}`),
- ranks: ranks,
- tokens: userTokens,
- });
- this._view.addEventListener('change', e => this._evtChange(e));
- this._view.addEventListener('submit', e => this._evtUpdate(e));
- this._view.addEventListener('delete', e => this._evtDelete(e));
- this._view.addEventListener('create-token', e => this._evtCreateToken(e));
- this._view.addEventListener('delete-token', e => this._evtDeleteToken(e));
- this._view.addEventListener('update-token', e => this._evtUpdateToken(e));
-
- for (let message of this._successMessages) {
- this.showSuccess(message);
- }
-
- for (let message of this._errorMessages) {
- this.showError(message);
- }
-
- }, error => {
- this._view = new EmptyView();
- this._view.showError(error.message);
- });
+ );
}
showSuccess(message) {
- if (typeof this._view === 'undefined') {
- this._successMessages.push(message)
+ if (typeof this._view === "undefined") {
+ this._successMessages.push(message);
} else {
this._view.showSuccess(message);
}
}
showError(message) {
- if (typeof this._view === 'undefined') {
- this._errorMessages.push(message)
+ if (typeof this._view === "undefined") {
+ this._errorMessages.push(message);
} else {
this._view.showError(message);
}
@@ -132,9 +163,10 @@ class UserController {
misc.disableExitConfirmation();
if (this._name !== e.detail.user.name) {
router.replace(
- uri.formatClientLink('user', e.detail.user.name, section),
+ uri.formatClientLink("user", e.detail.user.name, section),
null,
- false);
+ false
+ );
}
}
@@ -142,7 +174,7 @@ class UserController {
this._view.clearMessages();
this._view.disableForm();
const isLoggedIn = api.isLoggedIn(e.detail.user);
- const infix = isLoggedIn ? 'self' : 'any';
+ const infix = isLoggedIn ? "self" : "any";
if (e.detail.name !== undefined) {
e.detail.user.name = e.detail.name;
@@ -165,72 +197,105 @@ class UserController {
}
}
- e.detail.user.save().then(() => {
- return isLoggedIn ?
- api.login(
- e.detail.name || api.userName,
- e.detail.password || api.userPassword,
- false) :
- Promise.resolve();
- }).then(() => {
- this._view.showSuccess('Settings updated.');
- this._view.enableForm();
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ e.detail.user
+ .save()
+ .then(() => {
+ return isLoggedIn
+ ? api.login(
+ e.detail.name || api.userName,
+ e.detail.password || api.userPassword,
+ false
+ )
+ : Promise.resolve();
+ })
+ .then(
+ () => {
+ this._view.showSuccess("Settings updated.");
+ this._view.enableForm();
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
_evtDelete(e) {
this._view.clearMessages();
this._view.disableForm();
const isLoggedIn = api.isLoggedIn(e.detail.user);
- e.detail.user.delete()
- .then(() => {
+ e.detail.user.delete().then(
+ () => {
if (isLoggedIn) {
api.forget();
api.logout();
}
- if (api.hasPrivilege('users:list')) {
- const ctx = router.show(uri.formatClientLink('users'));
- ctx.controller.showSuccess('Account deleted.');
+ if (api.hasPrivilege("users:list")) {
+ const ctx = router.show(uri.formatClientLink("users"));
+ ctx.controller.showSuccess("Account deleted.");
} else {
const ctx = router.show(uri.formatClientLink());
- ctx.controller.showSuccess('Account deleted.');
+ ctx.controller.showSuccess("Account deleted.");
}
- }, error => {
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
_evtCreateToken(e) {
this._view.clearMessages();
this._view.disableForm();
- UserToken.create(e.detail.user.name, e.detail.note, e.detail.expirationTime)
- .then(response => {
- const ctx = router.show(uri.formatClientLink('user', e.detail.user.name, 'list-tokens'));
- ctx.controller.showSuccess('Token ' + response.token + ' created.');
- }, error => {
+ UserToken.create(
+ e.detail.user.name,
+ e.detail.note,
+ e.detail.expirationTime
+ ).then(
+ (response) => {
+ const ctx = router.show(
+ uri.formatClientLink(
+ "user",
+ e.detail.user.name,
+ "list-tokens"
+ )
+ );
+ ctx.controller.showSuccess(
+ "Token " + response.token + " created."
+ );
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
_evtDeleteToken(e) {
this._view.clearMessages();
this._view.disableForm();
if (api.isCurrentAuthToken(e.detail.userToken)) {
- router.show(uri.formatClientLink('logout'));
+ router.show(uri.formatClientLink("logout"));
} else {
- e.detail.userToken.delete(e.detail.user.name)
- .then(() => {
- const ctx = router.show(uri.formatClientLink('user', e.detail.user.name, 'list-tokens'));
- ctx.controller.showSuccess('Token ' + e.detail.userToken.token + ' deleted.');
- }, error => {
+ e.detail.userToken.delete(e.detail.user.name).then(
+ () => {
+ const ctx = router.show(
+ uri.formatClientLink(
+ "user",
+ e.detail.user.name,
+ "list-tokens"
+ )
+ );
+ ctx.controller.showSuccess(
+ "Token " + e.detail.userToken.token + " deleted."
+ );
+ },
+ (error) => {
this._view.showError(error.message);
this._view.enableForm();
- });
+ }
+ );
}
}
@@ -242,27 +307,38 @@ class UserController {
e.detail.userToken.note = e.detail.note;
}
- e.detail.userToken.save(e.detail.user.name).then(response => {
- const ctx = router.show(uri.formatClientLink('user', e.detail.user.name, 'list-tokens'));
- ctx.controller.showSuccess('Token ' + response.token + ' updated.');
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ e.detail.userToken.save(e.detail.user.name).then(
+ (response) => {
+ const ctx = router.show(
+ uri.formatClientLink(
+ "user",
+ e.detail.user.name,
+ "list-tokens"
+ )
+ );
+ ctx.controller.showSuccess(
+ "Token " + response.token + " updated."
+ );
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['user', ':name'], (ctx, next) => {
- ctx.controller = new UserController(ctx, 'summary');
+module.exports = (router) => {
+ router.enter(["user", ":name"], (ctx, next) => {
+ ctx.controller = new UserController(ctx, "summary");
});
- router.enter(['user', ':name', 'edit'], (ctx, next) => {
- ctx.controller = new UserController(ctx, 'edit');
+ router.enter(["user", ":name", "edit"], (ctx, next) => {
+ ctx.controller = new UserController(ctx, "edit");
});
- router.enter(['user', ':name', 'list-tokens'], (ctx, next) => {
- ctx.controller = new UserController(ctx, 'list-tokens');
+ router.enter(["user", ":name", "list-tokens"], (ctx, next) => {
+ ctx.controller = new UserController(ctx, "list-tokens");
});
- router.enter(['user', ':name', 'delete'], (ctx, next) => {
- ctx.controller = new UserController(ctx, 'delete');
+ router.enter(["user", ":name", "delete"], (ctx, next) => {
+ ctx.controller = new UserController(ctx, "delete");
});
};
diff --git a/client/js/controllers/user_list_controller.js b/client/js/controllers/user_list_controller.js
index 9860de4e..d6c4fe5e 100644
--- a/client/js/controllers/user_list_controller.js
+++ b/client/js/controllers/user_list_controller.js
@@ -1,27 +1,27 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const router = require('../router.js');
-const uri = require('../util/uri.js');
-const UserList = require('../models/user_list.js');
-const topNavigation = require('../models/top_navigation.js');
-const PageController = require('../controllers/page_controller.js');
-const UsersHeaderView = require('../views/users_header_view.js');
-const UsersPageView = require('../views/users_page_view.js');
-const EmptyView = require('../views/empty_view.js');
+const api = require("../api.js");
+const router = require("../router.js");
+const uri = require("../util/uri.js");
+const UserList = require("../models/user_list.js");
+const topNavigation = require("../models/top_navigation.js");
+const PageController = require("../controllers/page_controller.js");
+const UsersHeaderView = require("../views/users_header_view.js");
+const UsersPageView = require("../views/users_page_view.js");
+const EmptyView = require("../views/empty_view.js");
class UserListController {
constructor(ctx) {
this._pageController = new PageController();
- if (!api.hasPrivilege('users:list')) {
+ if (!api.hasPrivilege("users:list")) {
this._view = new EmptyView();
- this._view.showError('You don\'t have privileges to view users.');
+ this._view.showError("You don't have privileges to view users.");
return;
}
- topNavigation.activate('users');
- topNavigation.setTitle('Listing users');
+ topNavigation.activate("users");
+ topNavigation.setTitle("Listing users");
this._ctx = ctx;
@@ -29,8 +29,9 @@ class UserListController {
hostNode: this._pageController.view.pageHeaderHolderNode,
parameters: ctx.parameters,
});
- this._headerView.addEventListener(
- 'navigate', e => this._evtNavigate(e));
+ this._headerView.addEventListener("navigate", (e) =>
+ this._evtNavigate(e)
+ );
this._syncPageController();
}
@@ -41,7 +42,8 @@ class UserListController {
_evtNavigate(e) {
router.showNoDispatch(
- uri.formatClientLink('users', e.detail.parameters));
+ uri.formatClientLink("users", e.detail.parameters)
+ );
Object.assign(this._ctx.parameters, e.detail.parameters);
this._syncPageController();
}
@@ -51,17 +53,22 @@ class UserListController {
parameters: this._ctx.parameters,
defaultLimit: 30,
getClientUrlForPage: (offset, limit) => {
- const parameters = Object.assign(
- {}, this._ctx.parameters, {offset: offset, limit: limit});
- return uri.formatClientLink('users', parameters);
+ const parameters = Object.assign({}, this._ctx.parameters, {
+ offset: offset,
+ limit: limit,
+ });
+ return uri.formatClientLink("users", parameters);
},
requestPage: (offset, limit) => {
return UserList.search(
- this._ctx.parameters.query, offset, limit);
+ this._ctx.parameters.query,
+ offset,
+ limit
+ );
},
- pageRenderer: pageCtx => {
+ pageRenderer: (pageCtx) => {
Object.assign(pageCtx, {
- canViewUsers: api.hasPrivilege('users:view'),
+ canViewUsers: api.hasPrivilege("users:view"),
});
return new UsersPageView(pageCtx);
},
@@ -69,10 +76,8 @@ class UserListController {
}
}
-module.exports = router => {
- router.enter(
- ['users'],
- (ctx, next) => {
- ctx.controller = new UserListController(ctx);
- });
+module.exports = (router) => {
+ router.enter(["users"], (ctx, next) => {
+ ctx.controller = new UserListController(ctx);
+ });
};
diff --git a/client/js/controllers/user_registration_controller.js b/client/js/controllers/user_registration_controller.js
index 78b94024..89cfd8cd 100644
--- a/client/js/controllers/user_registration_controller.js
+++ b/client/js/controllers/user_registration_controller.js
@@ -1,25 +1,25 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const User = require('../models/user.js');
-const topNavigation = require('../models/top_navigation.js');
-const RegistrationView = require('../views/registration_view.js');
-const EmptyView = require('../views/empty_view.js');
+const router = require("../router.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const User = require("../models/user.js");
+const topNavigation = require("../models/top_navigation.js");
+const RegistrationView = require("../views/registration_view.js");
+const EmptyView = require("../views/empty_view.js");
class UserRegistrationController {
constructor() {
- if (!api.hasPrivilege('users:create:self')) {
+ if (!api.hasPrivilege("users:create:self")) {
this._view = new EmptyView();
- this._view.showError('Registration is closed.');
+ this._view.showError("Registration is closed.");
return;
}
- topNavigation.activate('register');
- topNavigation.setTitle('Registration');
+ topNavigation.activate("register");
+ topNavigation.setTitle("Registration");
this._view = new RegistrationView();
- this._view.addEventListener('submit', e => this._evtRegister(e));
+ this._view.addEventListener("submit", (e) => this._evtRegister(e));
}
_evtRegister(e) {
@@ -30,30 +30,35 @@ class UserRegistrationController {
user.email = e.detail.email;
user.password = e.detail.password;
const isLoggedIn = api.isLoggedIn();
- user.save().then(() => {
- if (isLoggedIn) {
- return Promise.resolve();
- } else {
- api.forget();
- return api.login(e.detail.name, e.detail.password, false);
- }
- }).then(() => {
- if (isLoggedIn) {
- const ctx = router.show(uri.formatClientLink('users'));
- ctx.controller.showSuccess('User added!');
- } else {
- const ctx = router.show(uri.formatClientLink());
- ctx.controller.showSuccess('Welcome aboard!');
- }
- }, error => {
- this._view.showError(error.message);
- this._view.enableForm();
- });
+ user.save()
+ .then(() => {
+ if (isLoggedIn) {
+ return Promise.resolve();
+ } else {
+ api.forget();
+ return api.login(e.detail.name, e.detail.password, false);
+ }
+ })
+ .then(
+ () => {
+ if (isLoggedIn) {
+ const ctx = router.show(uri.formatClientLink("users"));
+ ctx.controller.showSuccess("User added!");
+ } else {
+ const ctx = router.show(uri.formatClientLink());
+ ctx.controller.showSuccess("Welcome aboard!");
+ }
+ },
+ (error) => {
+ this._view.showError(error.message);
+ this._view.enableForm();
+ }
+ );
}
}
-module.exports = router => {
- router.enter(['register'], (ctx, next) => {
+module.exports = (router) => {
+ router.enter(["register"], (ctx, next) => {
new UserRegistrationController();
});
};
diff --git a/client/js/controls/auto_complete_control.js b/client/js/controls/auto_complete_control.js
index a802209e..6e9bf3dd 100644
--- a/client/js/controls/auto_complete_control.js
+++ b/client/js/controls/auto_complete_control.js
@@ -1,6 +1,6 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
const KEY_TAB = 9;
const KEY_RETURN = 13;
@@ -10,14 +10,14 @@ const KEY_UP = 38;
const KEY_DOWN = 40;
function _getSelectionStart(input) {
- if ('selectionStart' in input) {
+ if ("selectionStart" in input) {
return input.selectionStart;
}
if (document.selection) {
input.focus();
const sel = document.selection.createRange();
const selLen = document.selection.createRange().text.length;
- sel.moveStart('character', -input.value.length);
+ sel.moveStart("character", -input.value.length);
return sel.text.length - selLen;
}
return 0;
@@ -27,18 +27,22 @@ class AutoCompleteControl {
constructor(sourceInputNode, options) {
this._sourceInputNode = sourceInputNode;
this._options = {};
- Object.assign(this._options, {
- verticalShift: 2,
- maxResults: 15,
- getTextToFind: () => {
- const value = sourceInputNode.value;
- const start = _getSelectionStart(sourceInputNode);
- return value.substring(0, start).replace(/.*\s+/, '');
+ Object.assign(
+ this._options,
+ {
+ verticalShift: 2,
+ maxResults: 15,
+ getTextToFind: () => {
+ const value = sourceInputNode.value;
+ const start = _getSelectionStart(sourceInputNode);
+ return value.substring(0, start).replace(/.*\s+/, "");
+ },
+ confirm: null,
+ delete: null,
+ getMatches: null,
},
- confirm: null,
- delete: null,
- getMatches: null,
- }, options);
+ options
+ );
this._showTimeout = null;
this._results = [];
@@ -49,22 +53,22 @@ class AutoCompleteControl {
hide() {
window.clearTimeout(this._showTimeout);
- this._suggestionDiv.style.display = 'none';
+ this._suggestionDiv.style.display = "none";
this._isVisible = false;
}
replaceSelectedText(result, addSpace) {
const start = _getSelectionStart(this._sourceInputNode);
- let prefix = '';
+ let prefix = "";
let suffix = this._sourceInputNode.value.substring(start);
let middle = this._sourceInputNode.value.substring(0, start);
- const index = middle.lastIndexOf(' ');
+ const index = middle.lastIndexOf(" ");
if (index !== -1) {
prefix = this._sourceInputNode.value.substring(0, index + 1);
middle = this._sourceInputNode.value.substring(index + 1);
}
- this._sourceInputNode.value = (
- prefix + result.toString() + ' ' + suffix.trimLeft());
+ this._sourceInputNode.value =
+ prefix + result.toString() + " " + suffix.trimLeft();
if (!addSpace) {
this._sourceInputNode.value = this._sourceInputNode.value.trim();
}
@@ -86,7 +90,7 @@ class AutoCompleteControl {
}
_show() {
- this._suggestionDiv.style.display = 'block';
+ this._suggestionDiv.style.display = "block";
this._isVisible = true;
}
@@ -101,29 +105,32 @@ class AutoCompleteControl {
_install() {
if (!this._sourceInputNode) {
- throw new Error('Input element was not found');
+ throw new Error("Input element was not found");
}
- if (this._sourceInputNode.getAttribute('data-autocomplete')) {
+ if (this._sourceInputNode.getAttribute("data-autocomplete")) {
throw new Error(
- 'Autocompletion was already added for this element');
+ "Autocompletion was already added for this element"
+ );
}
- this._sourceInputNode.setAttribute('data-autocomplete', true);
- this._sourceInputNode.setAttribute('autocomplete', 'off');
+ this._sourceInputNode.setAttribute("data-autocomplete", true);
+ this._sourceInputNode.setAttribute("autocomplete", "off");
- this._sourceInputNode.addEventListener(
- 'keydown', e => this._evtKeyDown(e));
- this._sourceInputNode.addEventListener(
- 'blur', e => this._evtBlur(e));
+ this._sourceInputNode.addEventListener("keydown", (e) =>
+ this._evtKeyDown(e)
+ );
+ this._sourceInputNode.addEventListener("blur", (e) =>
+ this._evtBlur(e)
+ );
this._suggestionDiv = views.htmlToDom(
- '
');
- this._suggestionList = this._suggestionDiv.querySelector('ul');
+ ''
+ );
+ this._suggestionList = this._suggestionDiv.querySelector("ul");
document.body.appendChild(this._suggestionDiv);
- views.monitorNodeRemoval(
- this._sourceInputNode, () => {
- this._uninstall();
- });
+ views.monitorNodeRemoval(this._sourceInputNode, () => {
+ this._uninstall();
+ });
}
_uninstall() {
@@ -174,10 +181,9 @@ class AutoCompleteControl {
func();
} else {
window.clearTimeout(this._showTimeout);
- this._showTimeout = window.setTimeout(
- () => {
- this._showOrHide();
- }, 250);
+ this._showTimeout = window.setTimeout(() => {
+ this._showOrHide();
+ }, 250);
}
}
@@ -196,9 +202,11 @@ class AutoCompleteControl {
}
_selectPrevious() {
- this._select(this._activeResult === -1 ?
- this._results.length - 1 :
- this._activeResult - 1);
+ this._select(
+ this._activeResult === -1
+ ? this._results.length - 1
+ : this._activeResult - 1
+ );
}
_selectNext() {
@@ -206,15 +214,18 @@ class AutoCompleteControl {
}
_select(newActiveResult) {
- this._activeResult =
- newActiveResult.between(0, this._results.length - 1, true) ?
- newActiveResult :
- -1;
+ this._activeResult = newActiveResult.between(
+ 0,
+ this._results.length - 1,
+ true
+ )
+ ? newActiveResult
+ : -1;
this._refreshActiveResult();
}
_updateResults(textToFind) {
- this._options.getMatches(textToFind).then(matches => {
+ this._options.getMatches(textToFind).then((matches) => {
const oldResults = this._results.slice();
this._results = matches.slice(0, this._options.maxResults);
const oldResultsHash = JSON.stringify(oldResults);
@@ -237,34 +248,30 @@ class AutoCompleteControl {
}
for (let [resultIndex, resultItem] of this._results.entries()) {
let resultIndexWorkaround = resultIndex;
- const listItem = document.createElement('li');
- const link = document.createElement('a');
+ const listItem = document.createElement("li");
+ const link = document.createElement("a");
link.innerHTML = resultItem.caption;
- link.setAttribute('href', '');
- link.setAttribute('data-key', resultItem.value);
- link.addEventListener(
- 'mouseenter',
- e => {
- e.preventDefault();
- this._activeResult = resultIndexWorkaround;
- this._refreshActiveResult();
- });
- link.addEventListener(
- 'mousedown',
- e => {
- e.preventDefault();
- this._activeResult = resultIndexWorkaround;
- this._confirm(this._getActiveSuggestion());
- this.hide();
- });
+ link.setAttribute("href", "");
+ link.setAttribute("data-key", resultItem.value);
+ link.addEventListener("mouseenter", (e) => {
+ e.preventDefault();
+ this._activeResult = resultIndexWorkaround;
+ this._refreshActiveResult();
+ });
+ link.addEventListener("mousedown", (e) => {
+ e.preventDefault();
+ this._activeResult = resultIndexWorkaround;
+ this._confirm(this._getActiveSuggestion());
+ this.hide();
+ });
listItem.appendChild(link);
this._suggestionList.appendChild(listItem);
}
this._refreshActiveResult();
// display the suggestions offscreen to get the height
- this._suggestionDiv.style.left = '-9999px';
- this._suggestionDiv.style.top = '-9999px';
+ this._suggestionDiv.style.left = "-9999px";
+ this._suggestionDiv.style.top = "-9999px";
this._show();
const verticalShift = this._options.verticalShift;
const inputRect = this._sourceInputNode.getBoundingClientRect();
@@ -275,17 +282,23 @@ class AutoCompleteControl {
// choose where to view the suggestions: if there's more space above
// the input - draw the suggestions above it, otherwise below
const direction =
- inputRect.top + (inputRect.height / 2) < viewPortHeight / 2 ? 1 : -1;
+ inputRect.top + inputRect.height / 2 < viewPortHeight / 2 ? 1 : -1;
let x = inputRect.left - bodyRect.left;
- let y = direction === 1 ?
- inputRect.bottom - bodyRect.top - verticalShift :
- inputRect.top - bodyRect.top - listRect.height + verticalShift;
+ let y =
+ direction === 1
+ ? inputRect.bottom - bodyRect.top - verticalShift
+ : inputRect.top -
+ bodyRect.top -
+ listRect.height +
+ verticalShift;
// remove offscreen items until whole suggestion list can fit on the
// screen
- while ((y < 0 || y + listRect.height > viewPortHeight) &&
- this._suggestionList.childNodes.length) {
+ while (
+ (y < 0 || y + listRect.height > viewPortHeight) &&
+ this._suggestionList.childNodes.length
+ ) {
this._suggestionList.removeChild(this._suggestionList.lastChild);
const prevHeight = listRect.height;
listRect = this._suggestionDiv.getBoundingClientRect();
@@ -295,19 +308,19 @@ class AutoCompleteControl {
}
}
- this._suggestionDiv.style.left = x + 'px';
- this._suggestionDiv.style.top = y + 'px';
+ this._suggestionDiv.style.left = x + "px";
+ this._suggestionDiv.style.top = y + "px";
}
_refreshActiveResult() {
- let activeItem = this._suggestionList.querySelector('li.active');
+ let activeItem = this._suggestionList.querySelector("li.active");
if (activeItem) {
- activeItem.classList.remove('active');
+ activeItem.classList.remove("active");
}
if (this._activeResult >= 0) {
- const allItems = this._suggestionList.querySelectorAll('li');
+ const allItems = this._suggestionList.querySelectorAll("li");
activeItem = allItems[this._activeResult];
- activeItem.classList.add('active');
+ activeItem.classList.add("active");
}
}
}
diff --git a/client/js/controls/comment_control.js b/client/js/controls/comment_control.js
index c105bc07..cdcfd536 100644
--- a/client/js/controls/comment_control.js
+++ b/client/js/controls/comment_control.js
@@ -1,12 +1,12 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const events = require('../events.js');
-const views = require('../util/views.js');
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('comment');
-const scoreTemplate = views.getTemplate('score');
+const template = views.getTemplate("comment");
+const scoreTemplate = views.getTemplate("score");
class CommentControl extends events.EventTarget {
constructor(hostNode, comment, onlyEditing) {
@@ -16,104 +16,111 @@ class CommentControl extends events.EventTarget {
this._onlyEditing = onlyEditing;
if (comment) {
- comment.addEventListener(
- 'change', e => this._evtChange(e));
- comment.addEventListener(
- 'changeScore', e => this._evtChangeScore(e));
+ comment.addEventListener("change", (e) => this._evtChange(e));
+ comment.addEventListener("changeScore", (e) =>
+ this._evtChangeScore(e)
+ );
}
const isLoggedIn = comment && api.isLoggedIn(comment.user);
- const infix = isLoggedIn ? 'own' : 'any';
- views.replaceContent(this._hostNode, template({
- comment: comment,
- user: comment ? comment.user : api.user,
- canViewUsers: api.hasPrivilege('users:view'),
- canEditComment: api.hasPrivilege(`comments:edit:${infix}`),
- canDeleteComment: api.hasPrivilege(`comments:delete:${infix}`),
- onlyEditing: onlyEditing,
- }));
+ const infix = isLoggedIn ? "own" : "any";
+ views.replaceContent(
+ this._hostNode,
+ template({
+ comment: comment,
+ user: comment ? comment.user : api.user,
+ canViewUsers: api.hasPrivilege("users:view"),
+ canEditComment: api.hasPrivilege(`comments:edit:${infix}`),
+ canDeleteComment: api.hasPrivilege(`comments:delete:${infix}`),
+ onlyEditing: onlyEditing,
+ })
+ );
if (this._editButtonNodes) {
for (let node of this._editButtonNodes) {
- node.addEventListener('click', e => this._evtEditClick(e));
+ node.addEventListener("click", (e) => this._evtEditClick(e));
}
}
if (this._deleteButtonNode) {
- this._deleteButtonNode.addEventListener(
- 'click', e => this._evtDeleteClick(e));
+ this._deleteButtonNode.addEventListener("click", (e) =>
+ this._evtDeleteClick(e)
+ );
}
if (this._previewEditingButtonNode) {
- this._previewEditingButtonNode.addEventListener(
- 'click', e => this._evtPreviewEditingClick(e));
+ this._previewEditingButtonNode.addEventListener("click", (e) =>
+ this._evtPreviewEditingClick(e)
+ );
}
if (this._saveChangesButtonNode) {
- this._saveChangesButtonNode.addEventListener(
- 'click', e => this._evtSaveChangesClick(e));
+ this._saveChangesButtonNode.addEventListener("click", (e) =>
+ this._evtSaveChangesClick(e)
+ );
}
if (this._cancelEditingButtonNode) {
- this._cancelEditingButtonNode.addEventListener(
- 'click', e => this._evtCancelEditingClick(e));
+ this._cancelEditingButtonNode.addEventListener("click", (e) =>
+ this._evtCancelEditingClick(e)
+ );
}
this._installScore();
if (onlyEditing) {
- this._selectNav('edit');
- this._selectTab('edit');
+ this._selectNav("edit");
+ this._selectTab("edit");
} else {
- this._selectNav('readonly');
- this._selectTab('preview');
+ this._selectNav("readonly");
+ this._selectTab("preview");
}
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _scoreContainerNode() {
- return this._hostNode.querySelector('.score-container');
+ return this._hostNode.querySelector(".score-container");
}
get _editButtonNodes() {
- return this._hostNode.querySelectorAll('li.edit>a, a.edit');
+ return this._hostNode.querySelectorAll("li.edit>a, a.edit");
}
get _previewEditingButtonNode() {
- return this._hostNode.querySelector('li.preview>a');
+ return this._hostNode.querySelector("li.preview>a");
}
get _deleteButtonNode() {
- return this._hostNode.querySelector('.delete');
+ return this._hostNode.querySelector(".delete");
}
get _upvoteButtonNode() {
- return this._hostNode.querySelector('.upvote');
+ return this._hostNode.querySelector(".upvote");
}
get _downvoteButtonNode() {
- return this._hostNode.querySelector('.downvote');
+ return this._hostNode.querySelector(".downvote");
}
get _saveChangesButtonNode() {
- return this._hostNode.querySelector('.save-changes');
+ return this._hostNode.querySelector(".save-changes");
}
get _cancelEditingButtonNode() {
- return this._hostNode.querySelector('.cancel-editing');
+ return this._hostNode.querySelector(".cancel-editing");
}
get _textareaNode() {
- return this._hostNode.querySelector('.tab.edit textarea');
+ return this._hostNode.querySelector(".tab.edit textarea");
}
get _contentNode() {
- return this._hostNode.querySelector('.tab.preview .comment-content');
+ return this._hostNode.querySelector(".tab.preview .comment-content");
}
get _heightKeeperNode() {
- return this._hostNode.querySelector('.keep-height');
+ return this._hostNode.querySelector(".keep-height");
}
_installScore() {
@@ -122,32 +129,35 @@ class CommentControl extends events.EventTarget {
scoreTemplate({
score: this._comment ? this._comment.score : 0,
ownScore: this._comment ? this._comment.ownScore : 0,
- canScore: api.hasPrivilege('comments:score'),
- }));
+ canScore: api.hasPrivilege("comments:score"),
+ })
+ );
if (this._upvoteButtonNode) {
- this._upvoteButtonNode.addEventListener(
- 'click', e => this._evtScoreClick(e, 1));
+ this._upvoteButtonNode.addEventListener("click", (e) =>
+ this._evtScoreClick(e, 1)
+ );
}
if (this._downvoteButtonNode) {
- this._downvoteButtonNode.addEventListener(
- 'click', e => this._evtScoreClick(e, -1));
+ this._downvoteButtonNode.addEventListener("click", (e) =>
+ this._evtScoreClick(e, -1)
+ );
}
}
enterEditMode() {
- this._selectNav('edit');
- this._selectTab('edit');
+ this._selectNav("edit");
+ this._selectTab("edit");
}
exitEditMode() {
if (this._onlyEditing) {
- this._selectNav('edit');
- this._selectTab('edit');
- this._setText('');
+ this._selectNav("edit");
+ this._selectTab("edit");
+ this._setText("");
} else {
- this._selectNav('readonly');
- this._selectTab('preview');
+ this._selectNav("readonly");
+ this._selectTab("preview");
this._setText(this._comment.text);
}
this._forgetHeight();
@@ -173,27 +183,31 @@ class CommentControl extends events.EventTarget {
_evtScoreClick(e, score) {
e.preventDefault();
- if (!api.hasPrivilege('comments:score')) {
+ if (!api.hasPrivilege("comments:score")) {
return;
}
- this.dispatchEvent(new CustomEvent('score', {
- detail: {
- comment: this._comment,
- score: this._comment.ownScore === score ? 0 : score,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("score", {
+ detail: {
+ comment: this._comment,
+ score: this._comment.ownScore === score ? 0 : score,
+ },
+ })
+ );
}
_evtDeleteClick(e) {
e.preventDefault();
- if (!window.confirm('Are you sure you want to delete this comment?')) {
+ if (!window.confirm("Are you sure you want to delete this comment?")) {
return;
}
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- comment: this._comment,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ comment: this._comment,
+ },
+ })
+ );
}
_evtChange(e) {
@@ -206,21 +220,24 @@ class CommentControl extends events.EventTarget {
_evtPreviewEditingClick(e) {
e.preventDefault();
- this._contentNode.innerHTML =
- misc.formatMarkdown(this._textareaNode.value);
- this._selectTab('edit');
- this._selectTab('preview');
+ this._contentNode.innerHTML = misc.formatMarkdown(
+ this._textareaNode.value
+ );
+ this._selectTab("edit");
+ this._selectTab("preview");
}
_evtSaveChangesClick(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- target: this,
- comment: this._comment,
- text: this._textareaNode.value,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ target: this,
+ comment: this._comment,
+ text: this._textareaNode.value,
+ },
+ })
+ );
}
_evtCancelEditingClick(e) {
@@ -234,22 +251,22 @@ class CommentControl extends events.EventTarget {
}
_selectNav(modeName) {
- for (let node of this._hostNode.querySelectorAll('nav')) {
- node.classList.toggle('active', node.classList.contains(modeName));
+ for (let node of this._hostNode.querySelectorAll("nav")) {
+ node.classList.toggle("active", node.classList.contains(modeName));
}
}
_selectTab(tabName) {
this._ensureHeight();
- for (let node of this._hostNode.querySelectorAll('.tab, .tabs li')) {
- node.classList.toggle('active', node.classList.contains(tabName));
+ for (let node of this._hostNode.querySelectorAll(".tab, .tabs li")) {
+ node.classList.toggle("active", node.classList.contains(tabName));
}
}
_ensureHeight() {
this._heightKeeperNode.style.minHeight =
- this._heightKeeperNode.getBoundingClientRect().height + 'px';
+ this._heightKeeperNode.getBoundingClientRect().height + "px";
}
_forgetHeight() {
diff --git a/client/js/controls/comment_list_control.js b/client/js/controls/comment_list_control.js
index 4f57fd2a..7717ee5e 100644
--- a/client/js/controls/comment_list_control.js
+++ b/client/js/controls/comment_list_control.js
@@ -1,10 +1,10 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const CommentControl = require('../controls/comment_control.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const CommentControl = require("../controls/comment_control.js");
-const template = views.getTemplate('comment-list');
+const template = views.getTemplate("comment-list");
class CommentListControl extends events.EventTarget {
constructor(hostNode, comments, reversed) {
@@ -13,8 +13,8 @@ class CommentListControl extends events.EventTarget {
this._comments = comments;
this._commentIdToNode = {};
- comments.addEventListener('add', e => this._evtAdd(e));
- comments.addEventListener('remove', e => this._evtRemove(e));
+ comments.addEventListener("add", (e) => this._evtAdd(e));
+ comments.addEventListener("remove", (e) => this._evtRemove(e));
views.replaceContent(this._hostNode, template());
@@ -28,16 +28,19 @@ class CommentListControl extends events.EventTarget {
}
get _commentListNode() {
- return this._hostNode.querySelector('ul');
+ return this._hostNode.querySelector("ul");
}
_installCommentNode(comment) {
- const commentListItemNode = document.createElement('li');
+ const commentListItemNode = document.createElement("li");
const commentControl = new CommentControl(
- commentListItemNode, comment, false);
- events.proxyEvent(commentControl, this, 'submit');
- events.proxyEvent(commentControl, this, 'score');
- events.proxyEvent(commentControl, this, 'delete');
+ commentListItemNode,
+ comment,
+ false
+ );
+ events.proxyEvent(commentControl, this, "submit");
+ events.proxyEvent(commentControl, this, "score");
+ events.proxyEvent(commentControl, this, "delete");
this._commentIdToNode[comment.id] = commentListItemNode;
this._commentListNode.appendChild(commentListItemNode);
}
diff --git a/client/js/controls/expander_control.js b/client/js/controls/expander_control.js
index 28a4ffb8..11ad3ef5 100644
--- a/client/js/controls/expander_control.js
+++ b/client/js/controls/expander_control.js
@@ -1,26 +1,28 @@
-'use strict';
+"use strict";
-const ICON_CLASS_OPENED = 'fa-chevron-down';
-const ICON_CLASS_CLOSED = 'fa-chevron-up';
+const ICON_CLASS_OPENED = "fa-chevron-down";
+const ICON_CLASS_CLOSED = "fa-chevron-up";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('expander');
+const template = views.getTemplate("expander");
class ExpanderControl {
constructor(name, title, nodes) {
this._name = name;
- nodes = Array.from(nodes).filter(n => n);
+ nodes = Array.from(nodes).filter((n) => n);
if (!nodes.length) {
return;
}
- const expanderNode = template({title: title});
- const toggleLinkNode = expanderNode.querySelector('a');
- const toggleIconNode = expanderNode.querySelector('i');
- const expanderContentNode = expanderNode.querySelector('div');
- toggleLinkNode.addEventListener('click', e => this._evtToggleClick(e));
+ const expanderNode = template({ title: title });
+ const toggleLinkNode = expanderNode.querySelector("a");
+ const toggleIconNode = expanderNode.querySelector("i");
+ const expanderContentNode = expanderNode.querySelector("div");
+ toggleLinkNode.addEventListener("click", (e) =>
+ this._evtToggleClick(e)
+ );
nodes[0].parentNode.insertBefore(expanderNode, nodes[0]);
@@ -32,29 +34,30 @@ class ExpanderControl {
this._toggleIconNode = toggleIconNode;
expanderNode.classList.toggle(
- 'collapsed',
- this._allStates[this._name] === undefined ?
- false :
- !this._allStates[this._name]);
+ "collapsed",
+ this._allStates[this._name] === undefined
+ ? false
+ : !this._allStates[this._name]
+ );
this._syncIcon();
}
// eslint-disable-next-line accessor-pairs
set title(newTitle) {
if (this._expanderNode) {
- this._expanderNode
- .querySelector('header span')
- .textContent = newTitle;
+ this._expanderNode.querySelector(
+ "header span"
+ ).textContent = newTitle;
}
}
get _isOpened() {
- return !this._expanderNode.classList.contains('collapsed');
+ return !this._expanderNode.classList.contains("collapsed");
}
get _allStates() {
try {
- return JSON.parse(localStorage.getItem('expander')) || {};
+ return JSON.parse(localStorage.getItem("expander")) || {};
} catch (e) {
return {};
}
@@ -63,12 +66,12 @@ class ExpanderControl {
_save() {
const newStates = Object.assign({}, this._allStates);
newStates[this._name] = this._isOpened;
- localStorage.setItem('expander', JSON.stringify(newStates));
+ localStorage.setItem("expander", JSON.stringify(newStates));
}
_evtToggleClick(e) {
e.preventDefault();
- this._expanderNode.classList.toggle('collapsed');
+ this._expanderNode.classList.toggle("collapsed");
this._save();
this._syncIcon();
}
diff --git a/client/js/controls/file_dropper_control.js b/client/js/controls/file_dropper_control.js
index a6187209..2dfb4922 100644
--- a/client/js/controls/file_dropper_control.js
+++ b/client/js/controls/file_dropper_control.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('file-dropper');
+const template = views.getTemplate("file-dropper");
const KEY_RETURN = 13;
@@ -17,37 +17,42 @@ class FileDropperControl extends events.EventTarget {
allowMultiple: options.allowMultiple,
allowUrls: options.allowUrls,
lock: options.lock,
- id: 'file-' + Math.random().toString(36).substring(7),
+ id: "file-" + Math.random().toString(36).substring(7),
urlPlaceholder:
- options.urlPlaceholder || 'Alternatively, paste an URL here.',
+ options.urlPlaceholder || "Alternatively, paste an URL here.",
});
- this._dropperNode = source.querySelector('.file-dropper');
- this._urlInputNode = source.querySelector('input[type=text]');
- this._urlConfirmButtonNode = source.querySelector('button');
- this._fileInputNode = source.querySelector('input[type=file]');
- this._fileInputNode.style.display = 'none';
+ this._dropperNode = source.querySelector(".file-dropper");
+ this._urlInputNode = source.querySelector("input[type=text]");
+ this._urlConfirmButtonNode = source.querySelector("button");
+ this._fileInputNode = source.querySelector("input[type=file]");
+ this._fileInputNode.style.display = "none";
this._fileInputNode.multiple = options.allowMultiple || false;
this._counter = 0;
- this._dropperNode.addEventListener(
- 'dragenter', e => this._evtDragEnter(e));
- this._dropperNode.addEventListener(
- 'dragleave', e => this._evtDragLeave(e));
- this._dropperNode.addEventListener(
- 'dragover', e => this._evtDragOver(e));
- this._dropperNode.addEventListener(
- 'drop', e => this._evtDrop(e));
- this._fileInputNode.addEventListener(
- 'change', e => this._evtFileChange(e));
+ this._dropperNode.addEventListener("dragenter", (e) =>
+ this._evtDragEnter(e)
+ );
+ this._dropperNode.addEventListener("dragleave", (e) =>
+ this._evtDragLeave(e)
+ );
+ this._dropperNode.addEventListener("dragover", (e) =>
+ this._evtDragOver(e)
+ );
+ this._dropperNode.addEventListener("drop", (e) => this._evtDrop(e));
+ this._fileInputNode.addEventListener("change", (e) =>
+ this._evtFileChange(e)
+ );
if (this._urlInputNode) {
- this._urlInputNode.addEventListener(
- 'keydown', e => this._evtUrlInputKeyDown(e));
+ this._urlInputNode.addEventListener("keydown", (e) =>
+ this._evtUrlInputKeyDown(e)
+ );
}
if (this._urlConfirmButtonNode) {
- this._urlConfirmButtonNode.addEventListener(
- 'click', e => this._evtUrlConfirmButtonClick(e));
+ this._urlConfirmButtonNode.addEventListener("click", (e) =>
+ this._evtUrlConfirmButtonClick(e)
+ );
}
this._originalHtml = this._dropperNode.innerHTML;
@@ -56,24 +61,27 @@ class FileDropperControl extends events.EventTarget {
reset() {
this._dropperNode.innerHTML = this._originalHtml;
- this.dispatchEvent(new CustomEvent('reset'));
+ this.dispatchEvent(new CustomEvent("reset"));
}
_emitFiles(files) {
files = Array.from(files);
if (this._options.lock) {
- this._dropperNode.innerText =
- files.map(file => file.name).join(', ');
+ this._dropperNode.innerText = files
+ .map((file) => file.name)
+ .join(", ");
}
this.dispatchEvent(
- new CustomEvent('fileadd', {detail: {files: files}}));
+ new CustomEvent("fileadd", { detail: { files: files } })
+ );
}
_emitUrls(urls) {
- urls = Array.from(urls).map(url => url.trim());
+ urls = Array.from(urls).map((url) => url.trim());
if (this._options.lock) {
- this._dropperNode.innerText =
- urls.map(url => url.split(/\//).reverse()[0]).join(', ');
+ this._dropperNode.innerText = urls
+ .map((url) => url.split(/\//).reverse()[0])
+ .join(", ");
}
for (let url of urls) {
if (!url) {
@@ -84,18 +92,20 @@ class FileDropperControl extends events.EventTarget {
return;
}
}
- this.dispatchEvent(new CustomEvent('urladd', {detail: {urls: urls}}));
+ this.dispatchEvent(
+ new CustomEvent("urladd", { detail: { urls: urls } })
+ );
}
_evtDragEnter(e) {
- this._dropperNode.classList.add('active');
+ this._dropperNode.classList.add("active");
this._counter++;
}
_evtDragLeave(e) {
this._counter--;
if (this._counter === 0) {
- this._dropperNode.classList.remove('active');
+ this._dropperNode.classList.remove("active");
}
}
@@ -109,12 +119,12 @@ class FileDropperControl extends events.EventTarget {
_evtDrop(e) {
e.preventDefault();
- this._dropperNode.classList.remove('active');
+ this._dropperNode.classList.remove("active");
if (!e.dataTransfer.files.length) {
- window.alert('Only files are supported.');
+ window.alert("Only files are supported.");
}
if (!this._options.allowMultiple && e.dataTransfer.files.length > 1) {
- window.alert('Cannot select multiple files.');
+ window.alert("Cannot select multiple files.");
}
this._emitFiles(e.dataTransfer.files);
}
@@ -124,16 +134,16 @@ class FileDropperControl extends events.EventTarget {
return;
}
e.preventDefault();
- this._dropperNode.classList.remove('active');
+ this._dropperNode.classList.remove("active");
this._emitUrls(this._urlInputNode.value.split(/[\r\n]/));
- this._urlInputNode.value = '';
+ this._urlInputNode.value = "";
}
_evtUrlConfirmButtonClick(e) {
e.preventDefault();
- this._dropperNode.classList.remove('active');
+ this._dropperNode.classList.remove("active");
this._emitUrls(this._urlInputNode.value.split(/[\r\n]/));
- this._urlInputNode.value = '';
+ this._urlInputNode.value = "";
}
}
diff --git a/client/js/controls/pool_auto_complete_control.js b/client/js/controls/pool_auto_complete_control.js
index f7208335..97794a88 100644
--- a/client/js/controls/pool_auto_complete_control.js
+++ b/client/js/controls/pool_auto_complete_control.js
@@ -1,43 +1,54 @@
-'use strict';
+"use strict";
-const misc = require('../util/misc.js');
-const PoolList = require('../models/pool_list.js');
-const AutoCompleteControl = require('./auto_complete_control.js');
+const misc = require("../util/misc.js");
+const PoolList = require("../models/pool_list.js");
+const AutoCompleteControl = require("./auto_complete_control.js");
function _poolListToMatches(pools, options) {
- return [...pools].sort((pool1, pool2) => {
- return pool2.postCount - pool1.postCount;
- }).map(pool => {
- let cssName = misc.makeCssName(pool.category, 'pool');
- const caption = (
- ''
- + misc.escapeHtml(pool.names[0] + ' (' + pool.postCount + ')')
- + ' ');
- return {
- caption: caption,
- value: pool,
- };
- });
+ return [...pools]
+ .sort((pool1, pool2) => {
+ return pool2.postCount - pool1.postCount;
+ })
+ .map((pool) => {
+ let cssName = misc.makeCssName(pool.category, "pool");
+ const caption =
+ '' +
+ misc.escapeHtml(pool.names[0] + " (" + pool.postCount + ")") +
+ " ";
+ return {
+ caption: caption,
+ value: pool,
+ };
+ });
}
class PoolAutoCompleteControl extends AutoCompleteControl {
constructor(input, options) {
const minLengthForPartialSearch = 3;
- options.getMatches = text => {
+ options.getMatches = (text) => {
const term = misc.escapeSearchTerm(text);
- const query = (
- text.length < minLengthForPartialSearch
- ? term + '*'
- : '*' + term + '*') + ' sort:post-count';
+ const query =
+ (text.length < minLengthForPartialSearch
+ ? term + "*"
+ : "*" + term + "*") + " sort:post-count";
return new Promise((resolve, reject) => {
- PoolList.search(
- query, 0, this._options.maxResults, ['id', 'names', 'category', 'postCount', 'version'])
- .then(
- response => resolve(
- _poolListToMatches(response.results, this._options)),
- reject);
+ PoolList.search(query, 0, this._options.maxResults, [
+ "id",
+ "names",
+ "category",
+ "postCount",
+ "version",
+ ]).then(
+ (response) =>
+ resolve(
+ _poolListToMatches(response.results, this._options)
+ ),
+ reject
+ );
});
};
diff --git a/client/js/controls/pool_input_control.js b/client/js/controls/pool_input_control.js
index af1c744c..c8995da8 100644
--- a/client/js/controls/pool_input_control.js
+++ b/client/js/controls/pool_input_control.js
@@ -1,24 +1,24 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const pools = require('../pools.js');
-const misc = require('../util/misc.js');
-const uri = require('../util/uri.js');
-const Pool = require('../models/pool.js');
-const settings = require('../models/settings.js');
-const events = require('../events.js');
-const views = require('../util/views.js');
-const PoolAutoCompleteControl = require('./pool_auto_complete_control.js');
+const api = require("../api.js");
+const pools = require("../pools.js");
+const misc = require("../util/misc.js");
+const uri = require("../util/uri.js");
+const Pool = require("../models/pool.js");
+const settings = require("../models/settings.js");
+const events = require("../events.js");
+const views = require("../util/views.js");
+const PoolAutoCompleteControl = require("./pool_auto_complete_control.js");
const KEY_SPACE = 32;
const KEY_RETURN = 13;
-const SOURCE_INIT = 'init';
-const SOURCE_IMPLICATION = 'implication';
-const SOURCE_USER_INPUT = 'user-input';
-const SOURCE_CLIPBOARD = 'clipboard';
+const SOURCE_INIT = "init";
+const SOURCE_IMPLICATION = "implication";
+const SOURCE_USER_INPUT = "user-input";
+const SOURCE_CLIPBOARD = "clipboard";
-const template = views.getTemplate('pool-input');
+const template = views.getTemplate("pool-input");
function _fadeOutListItemNodeStatus(listItemNode) {
if (listItemNode.classList.length) {
@@ -27,8 +27,7 @@ function _fadeOutListItemNodeStatus(listItemNode) {
}
listItemNode.fadeTimeout = window.setTimeout(() => {
while (listItemNode.classList.length) {
- listItemNode.classList.remove(
- listItemNode.classList.item(0));
+ listItemNode.classList.remove(listItemNode.classList.item(0));
}
listItemNode.fadeTimeout = null;
}, 2500);
@@ -45,29 +44,33 @@ class PoolInputControl extends events.EventTarget {
// dom
const editAreaNode = template();
this._editAreaNode = editAreaNode;
- this._poolInputNode = editAreaNode.querySelector('input');
- this._poolListNode = editAreaNode.querySelector('ul.compact-pools');
+ this._poolInputNode = editAreaNode.querySelector("input");
+ this._poolListNode = editAreaNode.querySelector("ul.compact-pools");
this._autoCompleteControl = new PoolAutoCompleteControl(
- this._poolInputNode, {
+ this._poolInputNode,
+ {
getTextToFind: () => {
return this._poolInputNode.value;
},
- confirm: pool => {
- this._poolInputNode.value = '';
+ confirm: (pool) => {
+ this._poolInputNode.value = "";
this.addPool(pool, SOURCE_USER_INPUT);
},
- delete: pool => {
- this._poolInputNode.value = '';
+ delete: (pool) => {
+ this._poolInputNode.value = "";
this.deletePool(pool);
},
- verticalShift: -2
- });
+ verticalShift: -2,
+ }
+ );
// show
- this._hostNode.style.display = 'none';
+ this._hostNode.style.display = "none";
this._hostNode.parentNode.insertBefore(
- this._editAreaNode, hostNode.nextSibling);
+ this._editAreaNode,
+ hostNode.nextSibling
+ );
// add existing pools
for (let pool of [...this.pools]) {
@@ -81,19 +84,21 @@ class PoolInputControl extends events.EventTarget {
return Promise.resolve();
}
- this.pools.add(pool, false)
+ this.pools.add(pool, false);
const listItemNode = this._createListItemNode(pool);
if (!pool.category) {
- listItemNode.classList.add('new');
+ listItemNode.classList.add("new");
}
this._poolListNode.prependChild(listItemNode);
_fadeOutListItemNodeStatus(listItemNode);
- this.dispatchEvent(new CustomEvent('add', {
- detail: {pool: pool, source: source},
- }));
- this.dispatchEvent(new CustomEvent('change'));
+ this.dispatchEvent(
+ new CustomEvent("add", {
+ detail: { pool: pool, source: source },
+ })
+ );
+ this.dispatchEvent(new CustomEvent("change"));
return Promise.resolve();
}
@@ -107,52 +112,57 @@ class PoolInputControl extends events.EventTarget {
this._deleteListItemNode(pool);
- this.dispatchEvent(new CustomEvent('remove', {
- detail: {pool: pool},
- }));
- this.dispatchEvent(new CustomEvent('change'));
+ this.dispatchEvent(
+ new CustomEvent("remove", {
+ detail: { pool: pool },
+ })
+ );
+ this.dispatchEvent(new CustomEvent("change"));
}
_createListItemNode(pool) {
- const className = pool.category ?
- misc.makeCssName(pool.category, 'pool') :
- null;
+ const className = pool.category
+ ? misc.makeCssName(pool.category, "pool")
+ : null;
- const poolLinkNode = document.createElement('a');
+ const poolLinkNode = document.createElement("a");
if (className) {
poolLinkNode.classList.add(className);
}
poolLinkNode.setAttribute(
- 'href', uri.formatClientLink('pool', pool.names[0]));
+ "href",
+ uri.formatClientLink("pool", pool.names[0])
+ );
- const poolIconNode = document.createElement('i');
- poolIconNode.classList.add('fa');
- poolIconNode.classList.add('fa-pool');
+ const poolIconNode = document.createElement("i");
+ poolIconNode.classList.add("fa");
+ poolIconNode.classList.add("fa-pool");
poolLinkNode.appendChild(poolIconNode);
- const searchLinkNode = document.createElement('a');
+ const searchLinkNode = document.createElement("a");
if (className) {
searchLinkNode.classList.add(className);
}
searchLinkNode.setAttribute(
- 'href', uri.formatClientLink(
- 'posts', {query: "pool:" + pool.id}));
- searchLinkNode.textContent = pool.names[0] + ' ';
+ "href",
+ uri.formatClientLink("posts", { query: "pool:" + pool.id })
+ );
+ searchLinkNode.textContent = pool.names[0] + " ";
- const usagesNode = document.createElement('span');
- usagesNode.classList.add('pool-usages');
- usagesNode.setAttribute('data-pseudo-content', pool.postCount);
+ const usagesNode = document.createElement("span");
+ usagesNode.classList.add("pool-usages");
+ usagesNode.setAttribute("data-pseudo-content", pool.postCount);
- const removalLinkNode = document.createElement('a');
- removalLinkNode.classList.add('remove-pool');
- removalLinkNode.setAttribute('href', '');
- removalLinkNode.setAttribute('data-pseudo-content', '×');
- removalLinkNode.addEventListener('click', e => {
+ const removalLinkNode = document.createElement("a");
+ removalLinkNode.classList.add("remove-pool");
+ removalLinkNode.setAttribute("href", "");
+ removalLinkNode.setAttribute("data-pseudo-content", "×");
+ removalLinkNode.addEventListener("click", (e) => {
e.preventDefault();
this.deletePool(pool);
});
- const listItemNode = document.createElement('li');
+ const listItemNode = document.createElement("li");
listItemNode.appendChild(removalLinkNode);
listItemNode.appendChild(poolLinkNode);
listItemNode.appendChild(searchLinkNode);
diff --git a/client/js/controls/post_content_control.js b/client/js/controls/post_content_control.js
index d1848b3a..55daca76 100644
--- a/client/js/controls/post_content_control.js
+++ b/client/js/controls/post_content_control.js
@@ -1,36 +1,38 @@
-'use strict';
+"use strict";
-const settings = require('../models/settings.js');
-const views = require('../util/views.js');
-const optimizedResize = require('../util/optimized_resize.js');
+const settings = require("../models/settings.js");
+const views = require("../util/views.js");
+const optimizedResize = require("../util/optimized_resize.js");
class PostContentControl {
constructor(hostNode, post, viewportSizeCalculator, fitFunctionOverride) {
this._post = post;
this._viewportSizeCalculator = viewportSizeCalculator;
this._hostNode = hostNode;
- this._template = views.getTemplate('post-content');
+ this._template = views.getTemplate("post-content");
let fitMode = settings.get().fitMode;
- if (typeof fitFunctionOverride !== 'undefined') {
+ if (typeof fitFunctionOverride !== "undefined") {
fitMode = fitFunctionOverride;
}
- this._currentFitFunction = {
- 'fit-both': this.fitBoth,
- 'fit-original': this.fitOriginal,
- 'fit-width': this.fitWidth,
- 'fit-height': this.fitHeight,
- }[fitMode] || this.fitBoth;
+ this._currentFitFunction =
+ {
+ "fit-both": this.fitBoth,
+ "fit-original": this.fitOriginal,
+ "fit-width": this.fitWidth,
+ "fit-height": this.fitHeight,
+ }[fitMode] || this.fitBoth;
this._install();
- this._post.addEventListener(
- 'changeContent', e => this._evtPostContentChange(e));
+ this._post.addEventListener("changeContent", (e) =>
+ this._evtPostContentChange(e)
+ );
}
disableOverlay() {
- this._hostNode.querySelector('.post-overlay').style.display = 'none';
+ this._hostNode.querySelector(".post-overlay").style.display = "none";
}
fitWidth() {
@@ -92,10 +94,11 @@ class PostContentControl {
_resize(width, height) {
const resizeListenerNodes = [this._postContentNode].concat(
- ...this._postContentNode.querySelectorAll('.resize-listener'));
+ ...this._postContentNode.querySelectorAll(".resize-listener")
+ );
for (let node of resizeListenerNodes) {
- node.style.width = width + 'px';
- node.style.height = height + 'px';
+ node.style.width = width + "px";
+ node.style.height = height + "px";
}
}
@@ -106,10 +109,9 @@ class PostContentControl {
_install() {
this._reinstall();
optimizedResize.add(() => this._refreshSize());
- views.monitorNodeRemoval(
- this._hostNode, () => {
- this._uninstall();
- });
+ views.monitorNodeRemoval(this._hostNode, () => {
+ this._uninstall();
+ });
}
_reinstall() {
@@ -118,7 +120,7 @@ class PostContentControl {
autoplay: settings.get().autoplayVideos,
});
if (settings.get().transparencyGrid) {
- newNode.classList.add('transparency-grid');
+ newNode.classList.add("transparency-grid");
}
if (this._postContentNode) {
this._hostNode.replaceChild(newNode, this._postContentNode);
diff --git a/client/js/controls/post_edit_sidebar_control.js b/client/js/controls/post_edit_sidebar_control.js
index 495dcdb2..c6b7c226 100644
--- a/client/js/controls/post_edit_sidebar_control.js
+++ b/client/js/controls/post_edit_sidebar_control.js
@@ -1,17 +1,17 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const events = require('../events.js');
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const Note = require('../models/note.js');
-const Point = require('../models/point.js');
-const TagInputControl = require('./tag_input_control.js');
-const PoolInputControl = require('./pool_input_control.js');
-const ExpanderControl = require('../controls/expander_control.js');
-const FileDropperControl = require('../controls/file_dropper_control.js');
+const api = require("../api.js");
+const events = require("../events.js");
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const Note = require("../models/note.js");
+const Point = require("../models/point.js");
+const TagInputControl = require("./tag_input_control.js");
+const PoolInputControl = require("./pool_input_control.js");
+const ExpanderControl = require("../controls/expander_control.js");
+const FileDropperControl = require("../controls/file_dropper_control.js");
-const template = views.getTemplate('post-edit-sidebar');
+const template = views.getTemplate("post-edit-sidebar");
class PostEditSidebarControl extends events.EventTarget {
constructor(hostNode, post, postContentControl, postNotesOverlayControl) {
@@ -24,182 +24,222 @@ class PostEditSidebarControl extends events.EventTarget {
this._postNotesOverlayControl.switchToPassiveEdit();
- views.replaceContent(this._hostNode, template({
- post: this._post,
- enableSafety: api.safetyEnabled(),
- hasClipboard: document.queryCommandSupported('copy'),
- canEditPostSafety: api.hasPrivilege('posts:edit:safety'),
- canEditPostSource: api.hasPrivilege('posts:edit:source'),
- canEditPostTags: api.hasPrivilege('posts:edit:tags'),
- canEditPostRelations: api.hasPrivilege('posts:edit:relations'),
- canEditPostNotes: api.hasPrivilege('posts:edit:notes') &&
- post.type !== 'video' &&
- post.type !== 'flash',
- canEditPostFlags: api.hasPrivilege('posts:edit:flags'),
- canEditPostContent: api.hasPrivilege('posts:edit:content'),
- canEditPostThumbnail: api.hasPrivilege('posts:edit:thumbnail'),
- canEditPoolPosts: api.hasPrivilege('pools:edit:posts'),
- canCreateAnonymousPosts: api.hasPrivilege('posts:create:anonymous'),
- canDeletePosts: api.hasPrivilege('posts:delete'),
- canFeaturePosts: api.hasPrivilege('posts:feature'),
- canMergePosts: api.hasPrivilege('posts:merge'),
- }));
+ views.replaceContent(
+ this._hostNode,
+ template({
+ post: this._post,
+ enableSafety: api.safetyEnabled(),
+ hasClipboard: document.queryCommandSupported("copy"),
+ canEditPostSafety: api.hasPrivilege("posts:edit:safety"),
+ canEditPostSource: api.hasPrivilege("posts:edit:source"),
+ canEditPostTags: api.hasPrivilege("posts:edit:tags"),
+ canEditPostRelations: api.hasPrivilege("posts:edit:relations"),
+ canEditPostNotes:
+ api.hasPrivilege("posts:edit:notes") &&
+ post.type !== "video" &&
+ post.type !== "flash",
+ canEditPostFlags: api.hasPrivilege("posts:edit:flags"),
+ canEditPostContent: api.hasPrivilege("posts:edit:content"),
+ canEditPostThumbnail: api.hasPrivilege("posts:edit:thumbnail"),
+ canEditPoolPosts: api.hasPrivilege("pools:edit:posts"),
+ canCreateAnonymousPosts: api.hasPrivilege(
+ "posts:create:anonymous"
+ ),
+ canDeletePosts: api.hasPrivilege("posts:delete"),
+ canFeaturePosts: api.hasPrivilege("posts:feature"),
+ canMergePosts: api.hasPrivilege("posts:merge"),
+ })
+ );
new ExpanderControl(
- 'post-info',
- 'Basic info',
- this._hostNode.querySelectorAll('.safety, .relations, .flags, .post-source'));
+ "post-info",
+ "Basic info",
+ this._hostNode.querySelectorAll(
+ ".safety, .relations, .flags, .post-source"
+ )
+ );
this._tagsExpander = new ExpanderControl(
- 'post-tags',
+ "post-tags",
`Tags (${this._post.tags.length})`,
- this._hostNode.querySelectorAll('.tags'));
+ this._hostNode.querySelectorAll(".tags")
+ );
this._notesExpander = new ExpanderControl(
- 'post-notes',
- 'Notes',
- this._hostNode.querySelectorAll('.notes'));
+ "post-notes",
+ "Notes",
+ this._hostNode.querySelectorAll(".notes")
+ );
this._poolsExpander = new ExpanderControl(
- 'post-pools',
+ "post-pools",
`Pools (${this._post.pools.length})`,
- this._hostNode.querySelectorAll('.pools'));
+ this._hostNode.querySelectorAll(".pools")
+ );
new ExpanderControl(
- 'post-content',
- 'Content',
- this._hostNode.querySelectorAll('.post-content, .post-thumbnail'));
+ "post-content",
+ "Content",
+ this._hostNode.querySelectorAll(".post-content, .post-thumbnail")
+ );
new ExpanderControl(
- 'post-management',
- 'Management',
- this._hostNode.querySelectorAll('.management'));
+ "post-management",
+ "Management",
+ this._hostNode.querySelectorAll(".management")
+ );
this._syncExpanderTitles();
if (this._formNode) {
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) =>
+ this._evtSubmit(e)
+ );
}
if (this._tagInputNode) {
this._tagControl = new TagInputControl(
- this._tagInputNode, post.tags);
+ this._tagInputNode,
+ post.tags
+ );
}
if (this._poolInputNode) {
this._poolControl = new PoolInputControl(
- this._poolInputNode, post.pools);
+ this._poolInputNode,
+ post.pools
+ );
}
if (this._contentInputNode) {
this._contentFileDropper = new FileDropperControl(
- this._contentInputNode, {allowUrls: true,
+ this._contentInputNode,
+ {
+ allowUrls: true,
lock: true,
- urlPlaceholder: '...or paste an URL here.'});
- this._contentFileDropper.addEventListener('fileadd', e => {
+ urlPlaceholder: "...or paste an URL here.",
+ }
+ );
+ this._contentFileDropper.addEventListener("fileadd", (e) => {
this._newPostContent = e.detail.files[0];
});
- this._contentFileDropper.addEventListener('urladd', e => {
+ this._contentFileDropper.addEventListener("urladd", (e) => {
this._newPostContent = e.detail.urls[0];
});
}
if (this._thumbnailInputNode) {
this._thumbnailFileDropper = new FileDropperControl(
- this._thumbnailInputNode, {lock: true});
- this._thumbnailFileDropper.addEventListener('fileadd', e => {
+ this._thumbnailInputNode,
+ { lock: true }
+ );
+ this._thumbnailFileDropper.addEventListener("fileadd", (e) => {
this._newPostThumbnail = e.detail.files[0];
- this._thumbnailRemovalLinkNode.style.display = 'block';
+ this._thumbnailRemovalLinkNode.style.display = "block";
});
}
if (this._thumbnailRemovalLinkNode) {
- this._thumbnailRemovalLinkNode.addEventListener(
- 'click', e => this._evtRemoveThumbnailClick(e));
- this._thumbnailRemovalLinkNode.style.display =
- this._post.hasCustomThumbnail ? 'block' : 'none';
+ this._thumbnailRemovalLinkNode.addEventListener("click", (e) =>
+ this._evtRemoveThumbnailClick(e)
+ );
+ this._thumbnailRemovalLinkNode.style.display = this._post
+ .hasCustomThumbnail
+ ? "block"
+ : "none";
}
if (this._addNoteLinkNode) {
- this._addNoteLinkNode.addEventListener(
- 'click', e => this._evtAddNoteClick(e));
+ this._addNoteLinkNode.addEventListener("click", (e) =>
+ this._evtAddNoteClick(e)
+ );
}
if (this._copyNotesLinkNode) {
- this._copyNotesLinkNode.addEventListener(
- 'click', e => this._evtCopyNotesClick(e));
+ this._copyNotesLinkNode.addEventListener("click", (e) =>
+ this._evtCopyNotesClick(e)
+ );
}
if (this._pasteNotesLinkNode) {
- this._pasteNotesLinkNode.addEventListener(
- 'click', e => this._evtPasteNotesClick(e));
+ this._pasteNotesLinkNode.addEventListener("click", (e) =>
+ this._evtPasteNotesClick(e)
+ );
}
if (this._deleteNoteLinkNode) {
- this._deleteNoteLinkNode.addEventListener(
- 'click', e => this._evtDeleteNoteClick(e));
+ this._deleteNoteLinkNode.addEventListener("click", (e) =>
+ this._evtDeleteNoteClick(e)
+ );
}
if (this._featureLinkNode) {
- this._featureLinkNode.addEventListener(
- 'click', e => this._evtFeatureClick(e));
+ this._featureLinkNode.addEventListener("click", (e) =>
+ this._evtFeatureClick(e)
+ );
}
if (this._mergeLinkNode) {
- this._mergeLinkNode.addEventListener(
- 'click', e => this._evtMergeClick(e));
+ this._mergeLinkNode.addEventListener("click", (e) =>
+ this._evtMergeClick(e)
+ );
}
if (this._deleteLinkNode) {
- this._deleteLinkNode.addEventListener(
- 'click', e => this._evtDeleteClick(e));
+ this._deleteLinkNode.addEventListener("click", (e) =>
+ this._evtDeleteClick(e)
+ );
}
- this._postNotesOverlayControl.addEventListener(
- 'blur', e => this._evtNoteBlur(e));
+ this._postNotesOverlayControl.addEventListener("blur", (e) =>
+ this._evtNoteBlur(e)
+ );
- this._postNotesOverlayControl.addEventListener(
- 'focus', e => this._evtNoteFocus(e));
+ this._postNotesOverlayControl.addEventListener("focus", (e) =>
+ this._evtNoteFocus(e)
+ );
- this._post.addEventListener(
- 'changeContent', e => this._evtPostContentChange(e));
+ this._post.addEventListener("changeContent", (e) =>
+ this._evtPostContentChange(e)
+ );
- this._post.addEventListener(
- 'changeThumbnail', e => this._evtPostThumbnailChange(e));
+ this._post.addEventListener("changeThumbnail", (e) =>
+ this._evtPostThumbnailChange(e)
+ );
if (this._formNode) {
const inputNodes = this._formNode.querySelectorAll(
- 'input, textarea');
+ "input, textarea"
+ );
for (let node of inputNodes) {
- node.addEventListener(
- 'change',
- e => this.dispatchEvent(new CustomEvent('change')));
+ node.addEventListener("change", (e) =>
+ this.dispatchEvent(new CustomEvent("change"))
+ );
}
- this._postNotesOverlayControl.addEventListener(
- 'change',
- e => this.dispatchEvent(new CustomEvent('change')));
+ this._postNotesOverlayControl.addEventListener("change", (e) =>
+ this.dispatchEvent(new CustomEvent("change"))
+ );
}
- for (let eventType of ['add', 'remove']) {
- this._post.notes.addEventListener(eventType, e => {
+ for (let eventType of ["add", "remove"]) {
+ this._post.notes.addEventListener(eventType, (e) => {
this._syncExpanderTitles();
});
- this._post.pools.addEventListener(eventType, e => {
+ this._post.pools.addEventListener(eventType, (e) => {
this._syncExpanderTitles();
});
}
- this._tagControl.addEventListener(
- 'change', e => {
- this.dispatchEvent(new CustomEvent('change'));
- this._syncExpanderTitles();
- });
+ this._tagControl.addEventListener("change", (e) => {
+ this.dispatchEvent(new CustomEvent("change"));
+ this._syncExpanderTitles();
+ });
if (this._noteTextareaNode) {
- this._noteTextareaNode.addEventListener(
- 'change', e => this._evtNoteTextChangeRequest(e));
+ this._noteTextareaNode.addEventListener("change", (e) =>
+ this._evtNoteTextChangeRequest(e)
+ );
}
- this._poolControl.addEventListener(
- 'change', e => {
- this.dispatchEvent(new CustomEvent('change'));
- this._syncExpanderTitles();
- });
+ this._poolControl.addEventListener("change", (e) => {
+ this.dispatchEvent(new CustomEvent("change"));
+ this._syncExpanderTitles();
+ });
}
_syncExpanderTitles() {
@@ -220,37 +260,43 @@ class PostEditSidebarControl extends events.EventTarget {
e.preventDefault();
this._thumbnailFileDropper.reset();
this._newPostThumbnail = null;
- this._thumbnailRemovalLinkNode.style.display = 'none';
+ this._thumbnailRemovalLinkNode.style.display = "none";
}
_evtFeatureClick(e) {
e.preventDefault();
- if (confirm('Are you sure you want to feature this post?')) {
- this.dispatchEvent(new CustomEvent('feature', {
- detail: {
- post: this._post,
- },
- }));
+ if (confirm("Are you sure you want to feature this post?")) {
+ this.dispatchEvent(
+ new CustomEvent("feature", {
+ detail: {
+ post: this._post,
+ },
+ })
+ );
}
}
_evtMergeClick(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('merge', {
- detail: {
- post: this._post,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("merge", {
+ detail: {
+ post: this._post,
+ },
+ })
+ );
}
_evtDeleteClick(e) {
e.preventDefault();
- if (confirm('Are you sure you want to delete this post?')) {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- post: this._post,
- },
- }));
+ if (confirm("Are you sure you want to delete this post?")) {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ post: this._post,
+ },
+ })
+ );
}
}
@@ -262,60 +308,64 @@ class PostEditSidebarControl extends events.EventTarget {
_evtNoteFocus(e) {
this._editedNote = e.detail.note;
- this._addNoteLinkNode.classList.remove('inactive');
- this._deleteNoteLinkNode.classList.remove('inactive');
- this._noteTextareaNode.removeAttribute('disabled');
+ this._addNoteLinkNode.classList.remove("inactive");
+ this._deleteNoteLinkNode.classList.remove("inactive");
+ this._noteTextareaNode.removeAttribute("disabled");
this._noteTextareaNode.value = e.detail.note.text;
}
_evtNoteBlur(e) {
this._evtNoteTextChangeRequest(null);
- this._addNoteLinkNode.classList.remove('inactive');
- this._deleteNoteLinkNode.classList.add('inactive');
+ this._addNoteLinkNode.classList.remove("inactive");
+ this._deleteNoteLinkNode.classList.add("inactive");
this._noteTextareaNode.blur();
- this._noteTextareaNode.setAttribute('disabled', 'disabled');
- this._noteTextareaNode.value = '';
+ this._noteTextareaNode.setAttribute("disabled", "disabled");
+ this._noteTextareaNode.value = "";
}
_evtAddNoteClick(e) {
e.preventDefault();
- if (e.target.classList.contains('inactive')) {
+ if (e.target.classList.contains("inactive")) {
return;
}
- this._addNoteLinkNode.classList.add('inactive');
+ this._addNoteLinkNode.classList.add("inactive");
this._postNotesOverlayControl.switchToDrawing();
}
_evtCopyNotesClick(e) {
e.preventDefault();
- let textarea = document.createElement('textarea');
- textarea.style.position = 'fixed';
- textarea.style.opacity = '0';
- textarea.value = JSON.stringify([...this._post.notes].map(note => ({
- polygon: [...note.polygon].map(
- point => [point.x, point.y]),
- text: note.text,
- })));
+ let textarea = document.createElement("textarea");
+ textarea.style.position = "fixed";
+ textarea.style.opacity = "0";
+ textarea.value = JSON.stringify(
+ [...this._post.notes].map((note) => ({
+ polygon: [...note.polygon].map((point) => [point.x, point.y]),
+ text: note.text,
+ }))
+ );
document.body.appendChild(textarea);
textarea.select();
let success = false;
try {
- success = document.execCommand('copy');
+ success = document.execCommand("copy");
} catch (err) {
// continue regardless of error
}
textarea.blur();
document.body.removeChild(textarea);
- alert(success
- ? 'Notes copied to clipboard.'
- : 'Failed to copy the text to clipboard. Sorry.');
+ alert(
+ success
+ ? "Notes copied to clipboard."
+ : "Failed to copy the text to clipboard. Sorry."
+ );
}
_evtPasteNotesClick(e) {
e.preventDefault();
const text = window.prompt(
- 'Please enter the exported notes snapshot:');
+ "Please enter the exported notes snapshot:"
+ );
if (!text) {
return;
}
@@ -333,7 +383,7 @@ class PostEditSidebarControl extends events.EventTarget {
_evtDeleteNoteClick(e) {
e.preventDefault();
- if (e.target.classList.contains('inactive')) {
+ if (e.target.classList.contains("inactive")) {
return;
}
this._post.notes.remove(this._editedNote);
@@ -342,72 +392,78 @@ class PostEditSidebarControl extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- post: this._post,
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ post: this._post,
- safety: this._safetyButtonNodes.length ?
- Array.from(this._safetyButtonNodes)
- .filter(node => node.checked)[0]
- .value.toLowerCase() :
- undefined,
+ safety: this._safetyButtonNodes.length
+ ? Array.from(this._safetyButtonNodes)
+ .filter((node) => node.checked)[0]
+ .value.toLowerCase()
+ : undefined,
- flags: this._videoFlags,
+ flags: this._videoFlags,
- tags: this._tagInputNode ?
- misc.splitByWhitespace(this._tagInputNode.value) :
- undefined,
+ tags: this._tagInputNode
+ ? misc.splitByWhitespace(this._tagInputNode.value)
+ : undefined,
- pools: this._poolInputNode ?
- misc.splitByWhitespace(this._poolInputNode.value) :
- undefined,
+ pools: this._poolInputNode
+ ? misc.splitByWhitespace(this._poolInputNode.value)
+ : undefined,
- relations: this._relationsInputNode ?
- misc.splitByWhitespace(this._relationsInputNode.value)
- .map(x => parseInt(x)) :
- undefined,
+ relations: this._relationsInputNode
+ ? misc
+ .splitByWhitespace(
+ this._relationsInputNode.value
+ )
+ .map((x) => parseInt(x))
+ : undefined,
- content: this._newPostContent ?
- this._newPostContent :
- undefined,
+ content: this._newPostContent
+ ? this._newPostContent
+ : undefined,
- thumbnail: this._newPostThumbnail !== undefined ?
- this._newPostThumbnail :
- undefined,
+ thumbnail:
+ this._newPostThumbnail !== undefined
+ ? this._newPostThumbnail
+ : undefined,
- source: this._sourceInputNode ?
- this._sourceInputNode.value :
- undefined,
- },
- }));
+ source: this._sourceInputNode
+ ? this._sourceInputNode.value
+ : undefined,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _submitButtonNode() {
- return this._hostNode.querySelector('.submit');
+ return this._hostNode.querySelector(".submit");
}
get _safetyButtonNodes() {
- return this._formNode.querySelectorAll('.safety input');
+ return this._formNode.querySelectorAll(".safety input");
}
get _tagInputNode() {
- return this._formNode.querySelector('.tags input');
+ return this._formNode.querySelector(".tags input");
}
get _poolInputNode() {
- return this._formNode.querySelector('.pools input');
+ return this._formNode.querySelector(".pools input");
}
get _loopVideoInputNode() {
- return this._formNode.querySelector('.flags input[name=loop]');
+ return this._formNode.querySelector(".flags input[name=loop]");
}
get _soundVideoInputNode() {
- return this._formNode.querySelector('.flags input[name=sound]');
+ return this._formNode.querySelector(".flags input[name=sound]");
}
get _videoFlags() {
@@ -416,65 +472,68 @@ class PostEditSidebarControl extends events.EventTarget {
}
let ret = [];
if (this._loopVideoInputNode.checked) {
- ret.push('loop');
+ ret.push("loop");
}
if (this._soundVideoInputNode.checked) {
- ret.push('sound');
+ ret.push("sound");
}
return ret;
}
get _relationsInputNode() {
- return this._formNode.querySelector('.relations input');
+ return this._formNode.querySelector(".relations input");
}
get _contentInputNode() {
- return this._formNode.querySelector('.post-content .dropper-container');
+ return this._formNode.querySelector(
+ ".post-content .dropper-container"
+ );
}
get _thumbnailInputNode() {
return this._formNode.querySelector(
- '.post-thumbnail .dropper-container');
+ ".post-thumbnail .dropper-container"
+ );
}
get _thumbnailRemovalLinkNode() {
- return this._formNode.querySelector('.post-thumbnail a');
+ return this._formNode.querySelector(".post-thumbnail a");
}
get _sourceInputNode() {
- return this._formNode.querySelector('.post-source textarea');
+ return this._formNode.querySelector(".post-source textarea");
}
get _featureLinkNode() {
- return this._formNode.querySelector('.management .feature');
+ return this._formNode.querySelector(".management .feature");
}
get _mergeLinkNode() {
- return this._formNode.querySelector('.management .merge');
+ return this._formNode.querySelector(".management .merge");
}
get _deleteLinkNode() {
- return this._formNode.querySelector('.management .delete');
+ return this._formNode.querySelector(".management .delete");
}
get _addNoteLinkNode() {
- return this._formNode.querySelector('.notes .add');
+ return this._formNode.querySelector(".notes .add");
}
get _copyNotesLinkNode() {
- return this._formNode.querySelector('.notes .copy');
+ return this._formNode.querySelector(".notes .copy");
}
get _pasteNotesLinkNode() {
- return this._formNode.querySelector('.notes .paste');
+ return this._formNode.querySelector(".notes .paste");
}
get _deleteNoteLinkNode() {
- return this._formNode.querySelector('.notes .delete');
+ return this._formNode.querySelector(".notes .delete");
}
get _noteTextareaNode() {
- return this._formNode.querySelector('.notes textarea');
+ return this._formNode.querySelector(".notes textarea");
}
enableForm() {
diff --git a/client/js/controls/post_notes_overlay_control.js b/client/js/controls/post_notes_overlay_control.js
index 7bf69b28..030f7f28 100644
--- a/client/js/controls/post_notes_overlay_control.js
+++ b/client/js/controls/post_notes_overlay_control.js
@@ -1,13 +1,13 @@
-'use strict';
+"use strict";
-const keyboard = require('../util/keyboard.js');
-const views = require('../util/views.js');
-const events = require('../events.js');
-const misc = require('../util/misc.js');
-const Note = require('../models/note.js');
-const Point = require('../models/point.js');
+const keyboard = require("../util/keyboard.js");
+const views = require("../util/views.js");
+const events = require("../events.js");
+const misc = require("../util/misc.js");
+const Note = require("../models/note.js");
+const Point = require("../models/point.js");
-const svgNS = 'http://www.w3.org/2000/svg';
+const svgNS = "http://www.w3.org/2000/svg";
const snapThreshold = 10;
const circleSize = 10;
@@ -22,19 +22,19 @@ const KEY_RETURN = 13;
function _getDistance(point1, point2) {
return Math.sqrt(
- Math.pow(point1.x - point2.x, 2) +
- Math.pow(point1.y - point2.y, 2));
+ Math.pow(point1.x - point2.x, 2) + Math.pow(point1.y - point2.y, 2)
+ );
}
function _setNodeState(node, stateName) {
if (node === null) {
return;
}
- node.setAttribute('data-state', stateName);
+ node.setAttribute("data-state", stateName);
}
function _clearEditedNote(hostNode) {
- const node = hostNode.querySelector('[data-state=\'editing\']');
+ const node = hostNode.querySelector("[data-state='editing']");
_setNodeState(node, null);
return node !== null;
}
@@ -48,7 +48,7 @@ function _getNoteCentroid(note) {
const y0 = note.polygon.at(i).y;
const x1 = note.polygon.at((i + 1) % vertexCount).x;
const y1 = note.polygon.at((i + 1) % vertexCount).y;
- const a = (x0 * y1) - (x1 * y0);
+ const a = x0 * y1 - x1 * y0;
signedArea += a;
centroid.x += (x0 + x1) * a;
centroid.y += (y0 + y1) * a;
@@ -82,32 +82,30 @@ class State {
return false;
}
- evtCanvasKeyDown(e) {
- }
+ evtCanvasKeyDown(e) {}
- evtNoteMouseDown(e, hoveredNote) {
- }
+ evtNoteMouseDown(e, hoveredNote) {}
- evtCanvasMouseDown(e) {
- }
+ evtCanvasMouseDown(e) {}
- evtCanvasMouseMove(e) {
- }
+ evtCanvasMouseMove(e) {}
- evtCanvasMouseUp(e) {
- }
+ evtCanvasMouseUp(e) {}
_getScreenPoint(point) {
return new Point(
point.x * this._control.boundingBox.width,
- point.y * this._control.boundingBox.height);
+ point.y * this._control.boundingBox.height
+ );
}
_snapPoints(targetPoint, referencePoint) {
const targetScreenPoint = this._getScreenPoint(targetPoint);
const referenceScreenPoint = this._getScreenPoint(referencePoint);
- if (_getDistance(targetScreenPoint, referenceScreenPoint) <
- snapThreshold) {
+ if (
+ _getDistance(targetScreenPoint, referenceScreenPoint) <
+ snapThreshold
+ ) {
targetPoint.x = referencePoint.x;
targetPoint.y = referencePoint.y;
}
@@ -124,15 +122,16 @@ class State {
(e.clientX - this._control.boundingBox.left) /
this._control.boundingBox.width,
(e.clientY - this._control.boundingBox.top) /
- this._control.boundingBox.height);
+ this._control.boundingBox.height
+ );
}
}
class ReadOnlyState extends State {
constructor(control) {
- super(control, 'read-only');
+ super(control, "read-only");
if (_clearEditedNote(control._hostNode)) {
- this._control.dispatchEvent(new CustomEvent('blur'));
+ this._control.dispatchEvent(new CustomEvent("blur"));
}
keyboard.unpause();
}
@@ -144,9 +143,9 @@ class ReadOnlyState extends State {
class PassiveState extends State {
constructor(control) {
- super(control, 'passive');
+ super(control, "passive");
if (_clearEditedNote(control._hostNode)) {
- this._control.dispatchEvent(new CustomEvent('blur'));
+ this._control.dispatchEvent(new CustomEvent("blur"));
}
keyboard.unpause();
}
@@ -164,23 +163,24 @@ class ActiveState extends State {
constructor(control, note, stateName) {
super(control, stateName);
if (_clearEditedNote(control._hostNode)) {
- this._control.dispatchEvent(new CustomEvent('blur'));
+ this._control.dispatchEvent(new CustomEvent("blur"));
}
keyboard.pause();
if (note !== null) {
this._note = note;
this._control.dispatchEvent(
- new CustomEvent('focus', {
- detail: {note: note},
- }));
- _setNodeState(this._note.groupNode, 'editing');
+ new CustomEvent("focus", {
+ detail: { note: note },
+ })
+ );
+ _setNodeState(this._note.groupNode, "editing");
}
}
}
class SelectedState extends ActiveState {
constructor(control, note) {
- super(control, note, 'selected');
+ super(control, note, "selected");
this._clickTimeout = null;
this._control._hideNoteText();
}
@@ -211,27 +211,40 @@ class SelectedState extends ActiveState {
const mouseScreenPoint = this._getScreenPoint(mousePoint);
if (e.shiftKey) {
this._control._state = new ScalingNoteState(
- this._control, this._note, mousePoint);
+ this._control,
+ this._note,
+ mousePoint
+ );
return;
}
if (this._note !== hoveredNote) {
- this._control._state =
- new SelectedState(this._control, hoveredNote);
+ this._control._state = new SelectedState(
+ this._control,
+ hoveredNote
+ );
return;
}
this._clickTimeout = window.setTimeout(() => {
for (let polygonPoint of this._note.polygon) {
const distance = _getDistance(
mouseScreenPoint,
- this._getScreenPoint(polygonPoint));
+ this._getScreenPoint(polygonPoint)
+ );
if (distance < circleSize) {
this._control._state = new MovingPointState(
- this._control, this._note, polygonPoint, mousePoint);
+ this._control,
+ this._note,
+ polygonPoint,
+ mousePoint
+ );
return;
}
}
this._control._state = new MovingNoteState(
- this._control, this._note, mousePoint);
+ this._control,
+ this._note,
+ mousePoint
+ );
}, 100);
}
@@ -241,9 +254,12 @@ class SelectedState extends ActiveState {
for (let polygonPoint of this._note.polygon) {
const distance = _getDistance(
mouseScreenPoint,
- this._getScreenPoint(polygonPoint));
+ this._getScreenPoint(polygonPoint)
+ );
polygonPoint.edgeNode.classList.toggle(
- 'nearby', distance < circleSize);
+ "nearby",
+ distance < circleSize
+ );
}
}
@@ -252,16 +268,24 @@ class SelectedState extends ActiveState {
const mouseScreenPoint = this._getScreenPoint(mousePoint);
if (e.shiftKey) {
this._control._state = new ScalingNoteState(
- this._control, this._note, mousePoint);
+ this._control,
+ this._note,
+ mousePoint
+ );
return;
}
for (let polygonPoint of this._note.polygon) {
const distance = _getDistance(
mouseScreenPoint,
- this._getScreenPoint(polygonPoint));
+ this._getScreenPoint(polygonPoint)
+ );
if (distance < circleSize) {
this._control._state = new MovingPointState(
- this._control, this._note, polygonPoint, mousePoint);
+ this._control,
+ this._note,
+ polygonPoint,
+ mousePoint
+ );
return;
}
}
@@ -283,32 +307,37 @@ class SelectedState extends ActiveState {
const origin = _getNoteCentroid(this._note);
const originalSize = _getNoteSize(this._note);
const targetSize = new Point(
- originalSize.x + (x / this._control.boundingBox.width),
- originalSize.y + (y / this._control.boundingBox.height));
+ originalSize.x + x / this._control.boundingBox.width,
+ originalSize.y + y / this._control.boundingBox.height
+ );
const scale = new Point(
targetSize.x / originalSize.x,
- targetSize.y / originalSize.y);
+ targetSize.y / originalSize.y
+ );
for (let point of this._note.polygon) {
- point.x = origin.x + ((point.x - origin.x) * scale.x);
- point.y = origin.y + ((point.y - origin.y) * scale.y);
+ point.x = origin.x + (point.x - origin.x) * scale.x;
+ point.y = origin.y + (point.y - origin.y) * scale.y;
}
}
}
class MovingPointState extends ActiveState {
constructor(control, note, notePoint, mousePoint) {
- super(control, note, 'moving-point');
+ super(control, note, "moving-point");
this._notePoint = notePoint;
- this._originalNotePoint = {x: notePoint.x, y: notePoint.y};
+ this._originalNotePoint = { x: notePoint.x, y: notePoint.y };
this._originalPosition = mousePoint;
- _setNodeState(this._note.groupNode, 'editing');
+ _setNodeState(this._note.groupNode, "editing");
}
evtCanvasKeyDown(e) {
if (e.which === KEY_ESCAPE) {
this._notePoint.x = this._originalNotePoint.x;
this._notePoint.y = this._originalNotePoint.y;
- this._control._state = new SelectedState(this._control, this._note);
+ this._control._state = new SelectedState(
+ this._control,
+ this._note
+ );
}
}
@@ -326,9 +355,11 @@ class MovingPointState extends ActiveState {
class MovingNoteState extends ActiveState {
constructor(control, note, mousePoint) {
- super(control, note, 'moving-note');
- this._originalPolygon = [...note.polygon].map(
- point => ({x: point.x, y: point.y}));
+ super(control, note, "moving-note");
+ this._originalPolygon = [...note.polygon].map((point) => ({
+ x: point.x,
+ y: point.y,
+ }));
this._originalPosition = mousePoint;
}
@@ -338,7 +369,10 @@ class MovingNoteState extends ActiveState {
this._note.polygon.at(i).x = this._originalPolygon[i].x;
this._note.polygon.at(i).y = this._originalPolygon[i].y;
}
- this._control._state = new SelectedState(this._control, this._note);
+ this._control._state = new SelectedState(
+ this._control,
+ this._note
+ );
}
}
@@ -358,9 +392,11 @@ class MovingNoteState extends ActiveState {
class ScalingNoteState extends ActiveState {
constructor(control, note, mousePoint) {
- super(control, note, 'scaling-note');
- this._originalPolygon = [...note.polygon].map(
- point => ({x: point.x, y: point.y}));
+ super(control, note, "scaling-note");
+ this._originalPolygon = [...note.polygon].map((point) => ({
+ x: point.x,
+ y: point.y,
+ }));
this._originalMousePoint = mousePoint;
this._originalSize = _getNoteSize(note);
}
@@ -371,7 +407,10 @@ class ScalingNoteState extends ActiveState {
this._note.polygon.at(i).x = this._originalPolygon[i].x;
this._note.polygon.at(i).y = this._originalPolygon[i].y;
}
- this._control._state = new SelectedState(this._control, this._note);
+ this._control._state = new SelectedState(
+ this._control,
+ this._note
+ );
}
}
@@ -384,12 +423,16 @@ class ScalingNoteState extends ActiveState {
const originalPolygonPoint = this._originalPolygon[i];
polygonPoint.x =
originalMousePoint.x +
- ((originalPolygonPoint.x - originalMousePoint.x) *
- (1 + ((mousePoint.x - originalMousePoint.x) / originalSize.x)));
+ (originalPolygonPoint.x - originalMousePoint.x) *
+ (1 +
+ (mousePoint.x - originalMousePoint.x) /
+ originalSize.x);
polygonPoint.y =
originalMousePoint.y +
- ((originalPolygonPoint.y - originalMousePoint.y) *
- (1 + ((mousePoint.y - originalMousePoint.y) / originalSize.y)));
+ (originalPolygonPoint.y - originalMousePoint.y) *
+ (1 +
+ (mousePoint.y - originalMousePoint.y) /
+ originalSize.y);
}
}
@@ -400,7 +443,7 @@ class ScalingNoteState extends ActiveState {
class ReadyToDrawState extends ActiveState {
constructor(control) {
- super(control, null, 'ready-to-draw');
+ super(control, null, "ready-to-draw");
}
evtNoteMouseDown(e, hoveredNote) {
@@ -411,23 +454,27 @@ class ReadyToDrawState extends ActiveState {
const mousePoint = this._getPointFromEvent(e);
if (e.shiftKey) {
this._control._state = new DrawingRectangleState(
- this._control, mousePoint);
+ this._control,
+ mousePoint
+ );
} else {
this._control._state = new DrawingPolygonState(
- this._control, mousePoint);
+ this._control,
+ mousePoint
+ );
}
}
}
class DrawingRectangleState extends ActiveState {
constructor(control, mousePoint) {
- super(control, null, 'drawing-rectangle');
+ super(control, null, "drawing-rectangle");
this._note = this._createNote();
this._note.polygon.add(new Point(mousePoint.x, mousePoint.y));
this._note.polygon.add(new Point(mousePoint.x, mousePoint.y));
this._note.polygon.add(new Point(mousePoint.x, mousePoint.y));
this._note.polygon.add(new Point(mousePoint.x, mousePoint.y));
- _setNodeState(this._note.groupNode, 'drawing');
+ _setNodeState(this._note.groupNode, "drawing");
}
evtCanvasMouseUp(e) {
@@ -443,7 +490,10 @@ class DrawingRectangleState extends ActiveState {
this._control._state = new ReadyToDrawState(this._control);
} else {
this._control._post.notes.add(this._note);
- this._control._state = new SelectedState(this._control, this._note);
+ this._control._state = new SelectedState(
+ this._control,
+ this._note
+ );
}
}
@@ -458,11 +508,11 @@ class DrawingRectangleState extends ActiveState {
class DrawingPolygonState extends ActiveState {
constructor(control, mousePoint) {
- super(control, null, 'drawing-polygon');
+ super(control, null, "drawing-polygon");
this._note = this._createNote();
this._note.polygon.add(new Point(mousePoint.x, mousePoint.y));
this._note.polygon.add(new Point(mousePoint.x, mousePoint.y));
- _setNodeState(this._note.groupNode, 'drawing');
+ _setNodeState(this._note.groupNode, "drawing");
}
evtCanvasKeyDown(e) {
@@ -502,11 +552,16 @@ class DrawingPolygonState extends ActiveState {
}
if (e.shiftKey && secondLastPoint) {
- const direction = (Math.round(
- Math.atan2(
- secondLastPoint.y - mousePoint.y,
- secondLastPoint.x - mousePoint.x) /
- (2 * Math.PI / 4)) + 4) % 4;
+ const direction =
+ (Math.round(
+ Math.atan2(
+ secondLastPoint.y - mousePoint.y,
+ secondLastPoint.x - mousePoint.x
+ ) /
+ ((2 * Math.PI) / 4)
+ ) +
+ 4) %
+ 4;
if (direction === 0 || direction === 2) {
lastPoint.x = mousePoint.x;
lastPoint.y = secondLastPoint.y;
@@ -533,7 +588,10 @@ class DrawingPolygonState extends ActiveState {
} else {
this._control._deleteDomNode(this._note);
this._control._post.notes.add(this._note);
- this._control._state = new SelectedState(this._control, this._note);
+ this._control._state = new SelectedState(
+ this._control,
+ this._note
+ );
}
}
}
@@ -544,48 +602,51 @@ class PostNotesOverlayControl extends events.EventTarget {
this._post = post;
this._hostNode = hostNode;
- this._svgNode = document.createElementNS(svgNS, 'svg');
- this._svgNode.classList.add('resize-listener');
- this._svgNode.classList.add('notes-overlay');
- this._svgNode.setAttribute('preserveAspectRatio', 'none');
- this._svgNode.setAttribute('viewBox', '0 0 1 1');
+ this._svgNode = document.createElementNS(svgNS, "svg");
+ this._svgNode.classList.add("resize-listener");
+ this._svgNode.classList.add("notes-overlay");
+ this._svgNode.setAttribute("preserveAspectRatio", "none");
+ this._svgNode.setAttribute("viewBox", "0 0 1 1");
for (let note of this._post.notes) {
this._createPolygonNode(note);
}
this._hostNode.appendChild(this._svgNode);
- this._post.addEventListener('change', e => this._evtPostChange(e));
- this._post.notes.addEventListener('remove', e => {
+ this._post.addEventListener("change", (e) => this._evtPostChange(e));
+ this._post.notes.addEventListener("remove", (e) => {
this._deleteDomNode(e.detail.note);
});
- this._post.notes.addEventListener('add', e => {
+ this._post.notes.addEventListener("add", (e) => {
this._createPolygonNode(e.detail.note);
});
- const keyHandler = e => this._evtCanvasKeyDown(e);
- document.addEventListener('keydown', keyHandler);
- this._svgNode.addEventListener(
- 'mousedown', e => this._evtCanvasMouseDown(e));
- this._svgNode.addEventListener(
- 'mouseup', e => this._evtCanvasMouseUp(e));
- this._svgNode.addEventListener(
- 'mousemove', e => this._evtCanvasMouseMove(e));
+ const keyHandler = (e) => this._evtCanvasKeyDown(e);
+ document.addEventListener("keydown", keyHandler);
+ this._svgNode.addEventListener("mousedown", (e) =>
+ this._evtCanvasMouseDown(e)
+ );
+ this._svgNode.addEventListener("mouseup", (e) =>
+ this._evtCanvasMouseUp(e)
+ );
+ this._svgNode.addEventListener("mousemove", (e) =>
+ this._evtCanvasMouseMove(e)
+ );
- const wrapperNode = document.createElement('div');
- wrapperNode.classList.add('wrapper');
- this._textNode = document.createElement('div');
- this._textNode.classList.add('note-text');
+ const wrapperNode = document.createElement("div");
+ wrapperNode.classList.add("wrapper");
+ this._textNode = document.createElement("div");
+ this._textNode.classList.add("note-text");
this._textNode.appendChild(wrapperNode);
- this._textNode.addEventListener(
- 'mouseleave', e => this._evtNoteMouseLeave(e));
+ this._textNode.addEventListener("mouseleave", (e) =>
+ this._evtNoteMouseLeave(e)
+ );
document.body.appendChild(this._textNode);
- views.monitorNodeRemoval(
- this._hostNode, () => {
- this._hostNode.removeChild(this._svgNode);
- document.removeEventListener('keydown', keyHandler);
- document.body.removeChild(this._textNode);
- this._state = new ReadOnlyState(this);
- });
+ views.monitorNodeRemoval(this._hostNode, () => {
+ this._hostNode.removeChild(this._svgNode);
+ document.removeEventListener("keydown", keyHandler);
+ document.body.removeChild(this._textNode);
+ this._state = new ReadOnlyState(this);
+ });
this._state = new ReadOnlyState(this);
}
@@ -613,7 +674,7 @@ class PostNotesOverlayControl extends events.EventTarget {
}
_evtCanvasKeyDown(e) {
- const illegalNodeNames = ['textarea', 'input', 'select'];
+ const illegalNodeNames = ["textarea", "input", "select"];
if (illegalNodeNames.includes(e.target.nodeName.toLowerCase())) {
return;
}
@@ -655,53 +716,58 @@ class PostNotesOverlayControl extends events.EventTarget {
_evtNoteMouseLeave(e) {
const newElement = e.relatedTarget;
- if (newElement === this._svgNode ||
- (!this._svgNode.contains(newElement) &&
+ if (
+ newElement === this._svgNode ||
+ (!this._svgNode.contains(newElement) &&
!this._textNode.contains(newElement) &&
- newElement !== this._textNode)) {
+ newElement !== this._textNode)
+ ) {
this._hideNoteText();
}
}
_showNoteText(note) {
- this._textNode.querySelector('.wrapper').innerHTML =
- misc.formatMarkdown(note.text);
- this._textNode.style.display = 'block';
+ this._textNode.querySelector(
+ ".wrapper"
+ ).innerHTML = misc.formatMarkdown(note.text);
+ this._textNode.style.display = "block";
const bodyRect = document.body.getBoundingClientRect();
const noteRect = this._textNode.getBoundingClientRect();
const svgRect = this.boundingBox;
const centroid = _getNoteCentroid(note);
- const x = (
+ const x =
-bodyRect.left +
svgRect.left +
- (svgRect.width * centroid.x) -
- (noteRect.width / 2));
- const y = (
+ svgRect.width * centroid.x -
+ noteRect.width / 2;
+ const y =
-bodyRect.top +
svgRect.top +
- (svgRect.height * centroid.y) -
- (noteRect.height / 2));
- this._textNode.style.left = x + 'px';
- this._textNode.style.top = y + 'px';
+ svgRect.height * centroid.y -
+ noteRect.height / 2;
+ this._textNode.style.left = x + "px";
+ this._textNode.style.top = y + "px";
}
_hideNoteText() {
- this._textNode.style.display = 'none';
+ this._textNode.style.display = "none";
}
_updatePolygonNotePoints(note) {
note.polygonNode.setAttribute(
- 'points',
- [...note.polygon].map(
- point => [point.x, point.y].join(',')).join(' '));
+ "points",
+ [...note.polygon]
+ .map((point) => [point.x, point.y].join(","))
+ .join(" ")
+ );
}
_createEdgeNode(point, groupNode) {
- const node = document.createElementNS(svgNS, 'ellipse');
- node.setAttribute('cx', point.x);
- node.setAttribute('cy', point.y);
- node.setAttribute('rx', circleSize / 2 / this.boundingBox.width);
- node.setAttribute('ry', circleSize / 2 / this.boundingBox.height);
+ const node = document.createElementNS(svgNS, "ellipse");
+ node.setAttribute("cx", point.x);
+ node.setAttribute("cy", point.y);
+ node.setAttribute("rx", circleSize / 2 / this.boundingBox.width);
+ node.setAttribute("ry", circleSize / 2 / this.boundingBox.height);
point.edgeNode = node;
groupNode.appendChild(node);
}
@@ -713,8 +779,8 @@ class PostNotesOverlayControl extends events.EventTarget {
_updateEdgeNode(point, note) {
this._updatePolygonNotePoints(note);
- point.edgeNode.setAttribute('cx', point.x);
- point.edgeNode.setAttribute('cy', point.y);
+ point.edgeNode.setAttribute("cx", point.x);
+ point.edgeNode.setAttribute("cy", point.y);
}
_deleteDomNode(note) {
@@ -722,17 +788,19 @@ class PostNotesOverlayControl extends events.EventTarget {
}
_createPolygonNode(note) {
- const groupNode = document.createElementNS(svgNS, 'g');
+ const groupNode = document.createElementNS(svgNS, "g");
note.groupNode = groupNode;
{
- const node = document.createElementNS(svgNS, 'polygon');
+ const node = document.createElementNS(svgNS, "polygon");
note.polygonNode = node;
- node.setAttribute('vector-effect', 'non-scaling-stroke');
- node.setAttribute('stroke-alignment', 'inside');
- node.addEventListener(
- 'mouseenter', e => this._evtNoteMouseEnter(e, note));
- node.addEventListener(
- 'mouseleave', e => this._evtNoteMouseLeave(e));
+ node.setAttribute("vector-effect", "non-scaling-stroke");
+ node.setAttribute("stroke-alignment", "inside");
+ node.addEventListener("mouseenter", (e) =>
+ this._evtNoteMouseEnter(e, note)
+ );
+ node.addEventListener("mouseleave", (e) =>
+ this._evtNoteMouseLeave(e)
+ );
this._updatePolygonNotePoints(note);
groupNode.appendChild(node);
}
@@ -740,17 +808,17 @@ class PostNotesOverlayControl extends events.EventTarget {
this._createEdgeNode(point, groupNode);
}
- note.polygon.addEventListener('change', e => {
+ note.polygon.addEventListener("change", (e) => {
this._updateEdgeNode(e.detail.point, note);
- this.dispatchEvent(new CustomEvent('change'));
+ this.dispatchEvent(new CustomEvent("change"));
});
- note.polygon.addEventListener('remove', e => {
+ note.polygon.addEventListener("remove", (e) => {
this._deleteEdgeNode(e.detail.point, note);
- this.dispatchEvent(new CustomEvent('change'));
+ this.dispatchEvent(new CustomEvent("change"));
});
- note.polygon.addEventListener('add', e => {
+ note.polygon.addEventListener("add", (e) => {
this._createEdgeNode(e.detail.point, groupNode);
- this.dispatchEvent(new CustomEvent('change'));
+ this.dispatchEvent(new CustomEvent("change"));
});
this._svgNode.appendChild(groupNode);
diff --git a/client/js/controls/post_readonly_sidebar_control.js b/client/js/controls/post_readonly_sidebar_control.js
index 388c238c..580ef43f 100644
--- a/client/js/controls/post_readonly_sidebar_control.js
+++ b/client/js/controls/post_readonly_sidebar_control.js
@@ -1,14 +1,14 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const events = require('../events.js');
-const views = require('../util/views.js');
-const uri = require('../util/uri.js');
-const misc = require('../util/misc.js');
+const api = require("../api.js");
+const events = require("../events.js");
+const views = require("../util/views.js");
+const uri = require("../util/uri.js");
+const misc = require("../util/misc.js");
-const template = views.getTemplate('post-readonly-sidebar');
-const scoreTemplate = views.getTemplate('score');
-const favTemplate = views.getTemplate('fav');
+const template = views.getTemplate("post-readonly-sidebar");
+const scoreTemplate = views.getTemplate("score");
+const favTemplate = views.getTemplate("fav");
class PostReadonlySidebarControl extends events.EventTarget {
constructor(hostNode, post, postContentControl) {
@@ -17,19 +17,22 @@ class PostReadonlySidebarControl extends events.EventTarget {
this._post = post;
this._postContentControl = postContentControl;
- post.addEventListener('changeFavorite', e => this._evtChangeFav(e));
- post.addEventListener('changeScore', e => this._evtChangeScore(e));
+ post.addEventListener("changeFavorite", (e) => this._evtChangeFav(e));
+ post.addEventListener("changeScore", (e) => this._evtChangeScore(e));
- views.replaceContent(this._hostNode, template({
- post: this._post,
- enableSafety: api.safetyEnabled(),
- canListPosts: api.hasPrivilege('posts:list'),
- canEditPosts: api.hasPrivilege('posts:edit'),
- canViewTags: api.hasPrivilege('tags:view'),
- escapeColons: uri.escapeColons,
- extractRootDomain: uri.extractRootDomain,
- getPrettyTagName: misc.getPrettyTagName,
- }));
+ views.replaceContent(
+ this._hostNode,
+ template({
+ post: this._post,
+ enableSafety: api.safetyEnabled(),
+ canListPosts: api.hasPrivilege("posts:list"),
+ canEditPosts: api.hasPrivilege("posts:edit"),
+ canViewTags: api.hasPrivilege("tags:view"),
+ escapeColons: uri.escapeColons,
+ extractRootDomain: uri.extractRootDomain,
+ getPrettyTagName: misc.getPrettyTagName,
+ })
+ );
this._installFav();
this._installScore();
@@ -38,58 +41,62 @@ class PostReadonlySidebarControl extends events.EventTarget {
}
get _scoreContainerNode() {
- return this._hostNode.querySelector('.score-container');
+ return this._hostNode.querySelector(".score-container");
}
get _favContainerNode() {
- return this._hostNode.querySelector('.fav-container');
+ return this._hostNode.querySelector(".fav-container");
}
get _upvoteButtonNode() {
- return this._hostNode.querySelector('.upvote');
+ return this._hostNode.querySelector(".upvote");
}
get _downvoteButtonNode() {
- return this._hostNode.querySelector('.downvote');
+ return this._hostNode.querySelector(".downvote");
}
get _addFavButtonNode() {
- return this._hostNode.querySelector('.add-favorite');
+ return this._hostNode.querySelector(".add-favorite");
}
get _remFavButtonNode() {
- return this._hostNode.querySelector('.remove-favorite');
+ return this._hostNode.querySelector(".remove-favorite");
}
get _fitBothButtonNode() {
- return this._hostNode.querySelector('.fit-both');
+ return this._hostNode.querySelector(".fit-both");
}
get _fitOriginalButtonNode() {
- return this._hostNode.querySelector('.fit-original');
+ return this._hostNode.querySelector(".fit-original");
}
get _fitWidthButtonNode() {
- return this._hostNode.querySelector('.fit-width');
+ return this._hostNode.querySelector(".fit-width");
}
get _fitHeightButtonNode() {
- return this._hostNode.querySelector('.fit-height');
+ return this._hostNode.querySelector(".fit-height");
}
_installFitButtons() {
this._fitBothButtonNode.addEventListener(
- 'click', this._eventZoomProxy(
- () => this._postContentControl.fitBoth()));
+ "click",
+ this._eventZoomProxy(() => this._postContentControl.fitBoth())
+ );
this._fitOriginalButtonNode.addEventListener(
- 'click', this._eventZoomProxy(
- () => this._postContentControl.fitOriginal()));
+ "click",
+ this._eventZoomProxy(() => this._postContentControl.fitOriginal())
+ );
this._fitWidthButtonNode.addEventListener(
- 'click', this._eventZoomProxy(
- () => this._postContentControl.fitWidth()));
+ "click",
+ this._eventZoomProxy(() => this._postContentControl.fitWidth())
+ );
this._fitHeightButtonNode.addEventListener(
- 'click', this._eventZoomProxy(
- () => this._postContentControl.fitHeight()));
+ "click",
+ this._eventZoomProxy(() => this._postContentControl.fitHeight())
+ );
}
_installFav() {
@@ -98,16 +105,19 @@ class PostReadonlySidebarControl extends events.EventTarget {
favTemplate({
favoriteCount: this._post.favoriteCount,
ownFavorite: this._post.ownFavorite,
- canFavorite: api.hasPrivilege('posts:favorite'),
- }));
+ canFavorite: api.hasPrivilege("posts:favorite"),
+ })
+ );
if (this._addFavButtonNode) {
- this._addFavButtonNode.addEventListener(
- 'click', e => this._evtAddToFavoritesClick(e));
+ this._addFavButtonNode.addEventListener("click", (e) =>
+ this._evtAddToFavoritesClick(e)
+ );
}
if (this._remFavButtonNode) {
- this._remFavButtonNode.addEventListener(
- 'click', e => this._evtRemoveFromFavoritesClick(e));
+ this._remFavButtonNode.addEventListener("click", (e) =>
+ this._evtRemoveFromFavoritesClick(e)
+ );
}
}
@@ -117,77 +127,88 @@ class PostReadonlySidebarControl extends events.EventTarget {
scoreTemplate({
score: this._post.score,
ownScore: this._post.ownScore,
- canScore: api.hasPrivilege('posts:score'),
- }));
+ canScore: api.hasPrivilege("posts:score"),
+ })
+ );
if (this._upvoteButtonNode) {
- this._upvoteButtonNode.addEventListener(
- 'click', e => this._evtScoreClick(e, 1));
+ this._upvoteButtonNode.addEventListener("click", (e) =>
+ this._evtScoreClick(e, 1)
+ );
}
if (this._downvoteButtonNode) {
- this._downvoteButtonNode.addEventListener(
- 'click', e => this._evtScoreClick(e, -1));
+ this._downvoteButtonNode.addEventListener("click", (e) =>
+ this._evtScoreClick(e, -1)
+ );
}
}
_eventZoomProxy(func) {
- return e => {
+ return (e) => {
e.preventDefault();
e.target.blur();
func();
this._syncFitButton();
- this.dispatchEvent(new CustomEvent('fitModeChange', {
- detail: {
- mode: this._getFitMode(),
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("fitModeChange", {
+ detail: {
+ mode: this._getFitMode(),
+ },
+ })
+ );
};
}
_getFitMode() {
const funcToName = {};
- funcToName[this._postContentControl.fitBoth] = 'fit-both';
- funcToName[this._postContentControl.fitOriginal] = 'fit-original';
- funcToName[this._postContentControl.fitWidth] = 'fit-width';
- funcToName[this._postContentControl.fitHeight] = 'fit-height';
+ funcToName[this._postContentControl.fitBoth] = "fit-both";
+ funcToName[this._postContentControl.fitOriginal] = "fit-original";
+ funcToName[this._postContentControl.fitWidth] = "fit-width";
+ funcToName[this._postContentControl.fitHeight] = "fit-height";
return funcToName[this._postContentControl._currentFitFunction];
}
_syncFitButton() {
const className = this._getFitMode();
- const oldNode = this._hostNode.querySelector('.zoom a.active');
+ const oldNode = this._hostNode.querySelector(".zoom a.active");
const newNode = this._hostNode.querySelector(`.zoom a.${className}`);
if (oldNode) {
- oldNode.classList.remove('active');
+ oldNode.classList.remove("active");
}
- newNode.classList.add('active');
+ newNode.classList.add("active");
}
_evtAddToFavoritesClick(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('favorite', {
- detail: {
- post: this._post,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("favorite", {
+ detail: {
+ post: this._post,
+ },
+ })
+ );
}
_evtRemoveFromFavoritesClick(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('unfavorite', {
- detail: {
- post: this._post,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("unfavorite", {
+ detail: {
+ post: this._post,
+ },
+ })
+ );
}
_evtScoreClick(e, score) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('score', {
- detail: {
- post: this._post,
- score: this._post.ownScore === score ? 0 : score,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("score", {
+ detail: {
+ post: this._post,
+ score: this._post.ownScore === score ? 0 : score,
+ },
+ })
+ );
}
_evtChangeFav(e) {
diff --git a/client/js/controls/tag_auto_complete_control.js b/client/js/controls/tag_auto_complete_control.js
index f8d3f6c3..8ee6aa88 100644
--- a/client/js/controls/tag_auto_complete_control.js
+++ b/client/js/controls/tag_auto_complete_control.js
@@ -1,51 +1,63 @@
-'use strict';
+"use strict";
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const TagList = require('../models/tag_list.js');
-const AutoCompleteControl = require('./auto_complete_control.js');
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const TagList = require("../models/tag_list.js");
+const AutoCompleteControl = require("./auto_complete_control.js");
function _tagListToMatches(tags, options) {
- return [...tags].sort((tag1, tag2) => {
- return tag2.usages - tag1.usages;
- }).map(tag => {
- let cssName = misc.makeCssName(tag.category, 'tag');
- if (options.isTaggedWith(tag.names[0])) {
- cssName += ' disabled';
- }
- const caption = (
- ''
- + misc.escapeHtml(tag.names[0] + ' (' + tag.postCount + ')')
- + ' ');
- return {
- caption: caption,
- value: tag,
- };
- });
+ return [...tags]
+ .sort((tag1, tag2) => {
+ return tag2.usages - tag1.usages;
+ })
+ .map((tag) => {
+ let cssName = misc.makeCssName(tag.category, "tag");
+ if (options.isTaggedWith(tag.names[0])) {
+ cssName += " disabled";
+ }
+ const caption =
+ '' +
+ misc.escapeHtml(tag.names[0] + " (" + tag.postCount + ")") +
+ " ";
+ return {
+ caption: caption,
+ value: tag,
+ };
+ });
}
class TagAutoCompleteControl extends AutoCompleteControl {
constructor(input, options) {
const minLengthForPartialSearch = 3;
- options = Object.assign({
- isTaggedWith: tag => false,
- }, options);
+ options = Object.assign(
+ {
+ isTaggedWith: (tag) => false,
+ },
+ options
+ );
- options.getMatches = text => {
+ options.getMatches = (text) => {
const term = misc.escapeSearchTerm(text);
- const query = (
- text.length < minLengthForPartialSearch
- ? term + '*'
- : '*' + term + '*') + ' sort:usages';
+ const query =
+ (text.length < minLengthForPartialSearch
+ ? term + "*"
+ : "*" + term + "*") + " sort:usages";
return new Promise((resolve, reject) => {
- TagList.search(
- query, 0, this._options.maxResults, ['names', 'category', 'usages'])
- .then(
- response => resolve(
- _tagListToMatches(response.results, this._options)),
- reject);
+ TagList.search(query, 0, this._options.maxResults, [
+ "names",
+ "category",
+ "usages",
+ ]).then(
+ (response) =>
+ resolve(
+ _tagListToMatches(response.results, this._options)
+ ),
+ reject
+ );
});
};
diff --git a/client/js/controls/tag_input_control.js b/client/js/controls/tag_input_control.js
index e0b13393..7d069f9a 100644
--- a/client/js/controls/tag_input_control.js
+++ b/client/js/controls/tag_input_control.js
@@ -1,25 +1,25 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const tags = require('../tags.js');
-const misc = require('../util/misc.js');
-const uri = require('../util/uri.js');
-const Tag = require('../models/tag.js');
-const settings = require('../models/settings.js');
-const events = require('../events.js');
-const views = require('../util/views.js');
-const TagAutoCompleteControl = require('./tag_auto_complete_control.js');
+const api = require("../api.js");
+const tags = require("../tags.js");
+const misc = require("../util/misc.js");
+const uri = require("../util/uri.js");
+const Tag = require("../models/tag.js");
+const settings = require("../models/settings.js");
+const events = require("../events.js");
+const views = require("../util/views.js");
+const TagAutoCompleteControl = require("./tag_auto_complete_control.js");
const KEY_SPACE = 32;
const KEY_RETURN = 13;
-const SOURCE_INIT = 'init';
-const SOURCE_IMPLICATION = 'implication';
-const SOURCE_USER_INPUT = 'user-input';
-const SOURCE_SUGGESTION = 'suggestions';
-const SOURCE_CLIPBOARD = 'clipboard';
+const SOURCE_INIT = "init";
+const SOURCE_IMPLICATION = "implication";
+const SOURCE_USER_INPUT = "user-input";
+const SOURCE_SUGGESTION = "suggestions";
+const SOURCE_CLIPBOARD = "clipboard";
-const template = views.getTemplate('tag-input');
+const template = views.getTemplate("tag-input");
function _fadeOutListItemNodeStatus(listItemNode) {
if (listItemNode.classList.length) {
@@ -28,8 +28,7 @@ function _fadeOutListItemNodeStatus(listItemNode) {
}
listItemNode.fadeTimeout = window.setTimeout(() => {
while (listItemNode.classList.length) {
- listItemNode.classList.remove(
- listItemNode.classList.item(0));
+ listItemNode.classList.remove(listItemNode.classList.item(0));
}
listItemNode.fadeTimeout = null;
}, 2500);
@@ -51,7 +50,9 @@ class SuggestionList {
}
set(suggestion, weight) {
- if (Object.prototype.hasOwnProperty.call(this._suggestions, suggestion)) {
+ if (
+ Object.prototype.hasOwnProperty.call(this._suggestions, suggestion)
+ ) {
weight = Math.max(weight, this._suggestions[suggestion]);
}
this._suggestions[suggestion] = weight;
@@ -74,8 +75,8 @@ class SuggestionList {
let nameDiff = a[0].localeCompare(b[0]);
return weightDiff === 0 ? nameDiff : weightDiff;
});
- return tuples.map(tuple => {
- return {tagName: tuple[0], weight: tuple[1]};
+ return tuples.map((tuple) => {
+ return { tagName: tuple[0], weight: tuple[1] };
});
}
}
@@ -91,45 +92,58 @@ class TagInputControl extends events.EventTarget {
// dom
const editAreaNode = template();
this._editAreaNode = editAreaNode;
- this._tagInputNode = editAreaNode.querySelector('input');
- this._suggestionsNode = editAreaNode.querySelector('.tag-suggestions');
- this._tagListNode = editAreaNode.querySelector('ul.compact-tags');
+ this._tagInputNode = editAreaNode.querySelector("input");
+ this._suggestionsNode = editAreaNode.querySelector(".tag-suggestions");
+ this._tagListNode = editAreaNode.querySelector("ul.compact-tags");
this._autoCompleteControl = new TagAutoCompleteControl(
- this._tagInputNode, {
+ this._tagInputNode,
+ {
getTextToFind: () => {
return this._tagInputNode.value;
},
- confirm: tag => {
- this._tagInputNode.value = '';
+ confirm: (tag) => {
+ this._tagInputNode.value = "";
// note: tags from autocomplete don't contain implications
// so they need to be looked up in API
this.addTagByName(tag.names[0], SOURCE_USER_INPUT);
},
- delete: tag => {
- this._tagInputNode.value = '';
+ delete: (tag) => {
+ this._tagInputNode.value = "";
this.deleteTag(tag);
},
verticalShift: -2,
- isTaggedWith: tagName => this.tags.isTaggedWith(tagName),
- });
+ isTaggedWith: (tagName) => this.tags.isTaggedWith(tagName),
+ }
+ );
// dom events
- this._tagInputNode.addEventListener(
- 'keydown', e => this._evtInputKeyDown(e));
- this._tagInputNode.addEventListener(
- 'paste', e => this._evtInputPaste(e));
- this._editAreaNode.querySelector('a.opacity').addEventListener(
- 'click', e => this._evtToggleSuggestionsPopupOpacityClick(e));
- this._editAreaNode.querySelector('a.close').addEventListener(
- 'click', e => this._evtCloseSuggestionsPopupClick(e));
- this._editAreaNode.querySelector('button').addEventListener(
- 'click', e => this._evtAddTagButtonClick(e));
+ this._tagInputNode.addEventListener("keydown", (e) =>
+ this._evtInputKeyDown(e)
+ );
+ this._tagInputNode.addEventListener("paste", (e) =>
+ this._evtInputPaste(e)
+ );
+ this._editAreaNode
+ .querySelector("a.opacity")
+ .addEventListener("click", (e) =>
+ this._evtToggleSuggestionsPopupOpacityClick(e)
+ );
+ this._editAreaNode
+ .querySelector("a.close")
+ .addEventListener("click", (e) =>
+ this._evtCloseSuggestionsPopupClick(e)
+ );
+ this._editAreaNode
+ .querySelector("button")
+ .addEventListener("click", (e) => this._evtAddTagButtonClick(e));
// show
- this._hostNode.style.display = 'none';
+ this._hostNode.style.display = "none";
this._hostNode.parentNode.insertBefore(
- this._editAreaNode, hostNode.nextSibling);
+ this._editAreaNode,
+ hostNode.nextSibling
+ );
// add existing tags
for (let tag of [...this.tags]) {
@@ -139,7 +153,10 @@ class TagInputControl extends events.EventTarget {
}
addTagByText(text, source) {
- for (let tagName of text.split(/\s+/).filter(word => word).reverse()) {
+ for (let tagName of text
+ .split(/\s+/)
+ .filter((word) => word)
+ .reverse()) {
this.addTagByName(tagName, source);
}
}
@@ -149,48 +166,60 @@ class TagInputControl extends events.EventTarget {
if (!name) {
return;
}
- return Tag.get(name).then(tag => {
- return this.addTag(tag, source);
- }, () => {
- const tag = new Tag();
- tag.names = [name];
- tag.category = null;
- return this.addTag(tag, source);
- });
+ return Tag.get(name).then(
+ (tag) => {
+ return this.addTag(tag, source);
+ },
+ () => {
+ const tag = new Tag();
+ tag.names = [name];
+ tag.category = null;
+ return this.addTag(tag, source);
+ }
+ );
}
addTag(tag, source) {
if (source !== SOURCE_INIT && this.tags.isTaggedWith(tag.names[0])) {
const listItemNode = this._getListItemNode(tag);
if (source !== SOURCE_IMPLICATION) {
- listItemNode.classList.add('duplicate');
+ listItemNode.classList.add("duplicate");
_fadeOutListItemNodeStatus(listItemNode);
}
return Promise.resolve();
}
- return this.tags.addByName(tag.names[0], false).then(() => {
- const listItemNode = this._createListItemNode(tag);
- if (!tag.category) {
- listItemNode.classList.add('new');
- }
- if (source === SOURCE_IMPLICATION) {
- listItemNode.classList.add('implication');
- }
- this._tagListNode.prependChild(listItemNode);
- _fadeOutListItemNodeStatus(listItemNode);
+ return this.tags
+ .addByName(tag.names[0], false)
+ .then(() => {
+ const listItemNode = this._createListItemNode(tag);
+ if (!tag.category) {
+ listItemNode.classList.add("new");
+ }
+ if (source === SOURCE_IMPLICATION) {
+ listItemNode.classList.add("implication");
+ }
+ this._tagListNode.prependChild(listItemNode);
+ _fadeOutListItemNodeStatus(listItemNode);
- return Promise.all(
- tag.implications.map(
- implication => this.addTagByName(
- implication.names[0], SOURCE_IMPLICATION)));
- }).then(() => {
- this.dispatchEvent(new CustomEvent('add', {
- detail: {tag: tag, source: source},
- }));
- this.dispatchEvent(new CustomEvent('change'));
- return Promise.resolve();
- });
+ return Promise.all(
+ tag.implications.map((implication) =>
+ this.addTagByName(
+ implication.names[0],
+ SOURCE_IMPLICATION
+ )
+ )
+ );
+ })
+ .then(() => {
+ this.dispatchEvent(
+ new CustomEvent("add", {
+ detail: { tag: tag, source: source },
+ })
+ );
+ this.dispatchEvent(new CustomEvent("change"));
+ return Promise.resolve();
+ });
}
deleteTag(tag) {
@@ -202,25 +231,27 @@ class TagInputControl extends events.EventTarget {
this._deleteListItemNode(tag);
- this.dispatchEvent(new CustomEvent('remove', {
- detail: {tag: tag},
- }));
- this.dispatchEvent(new CustomEvent('change'));
+ this.dispatchEvent(
+ new CustomEvent("remove", {
+ detail: { tag: tag },
+ })
+ );
+ this.dispatchEvent(new CustomEvent("change"));
}
_evtInputPaste(e) {
e.preventDefault();
- const pastedText = window.clipboardData ?
- window.clipboardData.getData('Text') :
- (e.originalEvent || e).clipboardData.getData('text/plain');
+ const pastedText = window.clipboardData
+ ? window.clipboardData.getData("Text")
+ : (e.originalEvent || e).clipboardData.getData("text/plain");
if (pastedText.length > 2000) {
- window.alert('Pasted text is too long.');
+ window.alert("Pasted text is too long.");
return;
}
this._hideAutoComplete();
this.addTagByText(pastedText, SOURCE_CLIPBOARD);
- this._tagInputNode.value = '';
+ this._tagInputNode.value = "";
}
_evtCloseSuggestionsPopupClick(e) {
@@ -231,7 +262,7 @@ class TagInputControl extends events.EventTarget {
_evtAddTagButtonClick(e) {
e.preventDefault();
this.addTagByName(this._tagInputNode.value, SOURCE_USER_INPUT);
- this._tagInputNode.value = '';
+ this._tagInputNode.value = "";
}
_evtToggleSuggestionsPopupOpacityClick(e) {
@@ -244,36 +275,41 @@ class TagInputControl extends events.EventTarget {
e.preventDefault();
this._hideAutoComplete();
this.addTagByText(this._tagInputNode.value, SOURCE_USER_INPUT);
- this._tagInputNode.value = '';
+ this._tagInputNode.value = "";
}
}
_createListItemNode(tag) {
- const className = tag.category ?
- misc.makeCssName(tag.category, 'tag') :
- null;
+ const className = tag.category
+ ? misc.makeCssName(tag.category, "tag")
+ : null;
- const tagLinkNode = document.createElement('a');
+ const tagLinkNode = document.createElement("a");
if (className) {
tagLinkNode.classList.add(className);
}
tagLinkNode.setAttribute(
- 'href', uri.formatClientLink('tag', tag.names[0]));
+ "href",
+ uri.formatClientLink("tag", tag.names[0])
+ );
- const tagIconNode = document.createElement('i');
- tagIconNode.classList.add('fa');
- tagIconNode.classList.add('fa-tag');
+ const tagIconNode = document.createElement("i");
+ tagIconNode.classList.add("fa");
+ tagIconNode.classList.add("fa-tag");
tagLinkNode.appendChild(tagIconNode);
- const searchLinkNode = document.createElement('a');
+ const searchLinkNode = document.createElement("a");
if (className) {
searchLinkNode.classList.add(className);
}
searchLinkNode.setAttribute(
- 'href', uri.formatClientLink(
- 'posts', {query: uri.escapeColons(tag.names[0])}));
- searchLinkNode.textContent = tag.names[0] + ' ';
- searchLinkNode.addEventListener('click', e => {
+ "href",
+ uri.formatClientLink("posts", {
+ query: uri.escapeColons(tag.names[0]),
+ })
+ );
+ searchLinkNode.textContent = tag.names[0] + " ";
+ searchLinkNode.addEventListener("click", (e) => {
e.preventDefault();
this._suggestions.clear();
if (tag.postCount > 0) {
@@ -284,20 +320,20 @@ class TagInputControl extends events.EventTarget {
}
});
- const usagesNode = document.createElement('span');
- usagesNode.classList.add('tag-usages');
- usagesNode.setAttribute('data-pseudo-content', tag.postCount);
+ const usagesNode = document.createElement("span");
+ usagesNode.classList.add("tag-usages");
+ usagesNode.setAttribute("data-pseudo-content", tag.postCount);
- const removalLinkNode = document.createElement('a');
- removalLinkNode.classList.add('remove-tag');
- removalLinkNode.setAttribute('href', '');
- removalLinkNode.setAttribute('data-pseudo-content', '×');
- removalLinkNode.addEventListener('click', e => {
+ const removalLinkNode = document.createElement("a");
+ removalLinkNode.classList.add("remove-tag");
+ removalLinkNode.setAttribute("href", "");
+ removalLinkNode.setAttribute("data-pseudo-content", "×");
+ removalLinkNode.addEventListener("click", (e) => {
e.preventDefault();
this.deleteTag(tag);
});
- const listItemNode = document.createElement('li');
+ const listItemNode = document.createElement("li");
listItemNode.appendChild(removalLinkNode);
listItemNode.appendChild(tagLinkNode);
listItemNode.appendChild(searchLinkNode);
@@ -327,20 +363,25 @@ class TagInputControl extends events.EventTarget {
if (!browsingSettings.tagSuggestions) {
return;
}
- api.get(
- uri.formatApiLink('tag-siblings', tag.names[0]),
- {noProgress: true})
- .then(response => {
- return Promise.resolve(response.results);
- }, response => {
- return Promise.resolve([]);
- }).then(siblings => {
- const args = siblings.map(s => s.occurrences);
+ api.get(uri.formatApiLink("tag-siblings", tag.names[0]), {
+ noProgress: true,
+ })
+ .then(
+ (response) => {
+ return Promise.resolve(response.results);
+ },
+ (response) => {
+ return Promise.resolve([]);
+ }
+ )
+ .then((siblings) => {
+ const args = siblings.map((s) => s.occurrences);
let maxSiblingOccurrences = Math.max(1, ...args);
for (let sibling of siblings) {
this._suggestions.set(
sibling.tag.names[0],
- sibling.occurrences * 4.9 / maxSiblingOccurrences);
+ (sibling.occurrences * 4.9) / maxSiblingOccurrences
+ );
}
for (let suggestion of tag.suggestions || []) {
this._suggestions.set(suggestion, 5);
@@ -354,10 +395,10 @@ class TagInputControl extends events.EventTarget {
}
_refreshSuggestionsPopup() {
- if (!this._suggestionsNode.classList.contains('shown')) {
+ if (!this._suggestionsNode.classList.contains("shown")) {
return;
}
- const listNode = this._suggestionsNode.querySelector('ul');
+ const listNode = this._suggestionsNode.querySelector("ul");
listNode.scrollTop = 0;
while (listNode.firstChild) {
listNode.removeChild(listNode.firstChild);
@@ -369,35 +410,36 @@ class TagInputControl extends events.EventTarget {
continue;
}
- const addLinkNode = document.createElement('a');
+ const addLinkNode = document.createElement("a");
addLinkNode.textContent = tagName;
- addLinkNode.classList.add('add-tag');
- addLinkNode.setAttribute('href', '');
- Tag.get(tagName).then(tag => {
+ addLinkNode.classList.add("add-tag");
+ addLinkNode.setAttribute("href", "");
+ Tag.get(tagName).then((tag) => {
addLinkNode.classList.add(
- misc.makeCssName(tag.category, 'tag'));
+ misc.makeCssName(tag.category, "tag")
+ );
});
- addLinkNode.addEventListener('click', e => {
+ addLinkNode.addEventListener("click", (e) => {
e.preventDefault();
listNode.removeChild(listItemNode);
this.addTagByName(tagName, SOURCE_SUGGESTION);
});
- const weightNode = document.createElement('span');
- weightNode.classList.add('tag-weight');
- weightNode.setAttribute('data-pseudo-content', weight);
+ const weightNode = document.createElement("span");
+ weightNode.classList.add("tag-weight");
+ weightNode.setAttribute("data-pseudo-content", weight);
- const removalLinkNode = document.createElement('a');
- removalLinkNode.classList.add('remove-tag');
- removalLinkNode.setAttribute('href', '');
- removalLinkNode.setAttribute('data-pseudo-content', '×');
- removalLinkNode.addEventListener('click', e => {
+ const removalLinkNode = document.createElement("a");
+ removalLinkNode.classList.add("remove-tag");
+ removalLinkNode.setAttribute("href", "");
+ removalLinkNode.setAttribute("data-pseudo-content", "×");
+ removalLinkNode.addEventListener("click", (e) => {
e.preventDefault();
listNode.removeChild(listItemNode);
this._suggestions.ban(tagName);
});
- const listItemNode = document.createElement('li');
+ const listItemNode = document.createElement("li");
listItemNode.appendChild(removalLinkNode);
listItemNode.appendChild(weightNode);
listItemNode.appendChild(addLinkNode);
@@ -407,19 +449,19 @@ class TagInputControl extends events.EventTarget {
_closeSuggestionsPopup() {
this._suggestions.clear();
- this._suggestionsNode.classList.remove('shown');
+ this._suggestionsNode.classList.remove("shown");
}
_removeSuggestionsPopupOpacity() {
- this._suggestionsNode.classList.remove('translucent');
+ this._suggestionsNode.classList.remove("translucent");
}
_toggleSuggestionsPopupOpacity() {
- this._suggestionsNode.classList.toggle('translucent');
+ this._suggestionsNode.classList.toggle("translucent");
}
_openSuggestionsPopup() {
- this._suggestionsNode.classList.add('shown');
+ this._suggestionsNode.classList.add("shown");
this._refreshSuggestionsPopup();
}
diff --git a/client/js/events.js b/client/js/events.js
index 945771a4..7d6ffa68 100644
--- a/client/js/events.js
+++ b/client/js/events.js
@@ -1,12 +1,12 @@
-'use strict';
+"use strict";
class EventTarget {
constructor() {
this.eventTarget = document.createDocumentFragment();
for (let method of [
- 'addEventListener',
- 'dispatchEvent',
- 'removeEventListener'
+ "addEventListener",
+ "dispatchEvent",
+ "removeEventListener",
]) {
this[method] = this.eventTarget[method].bind(this.eventTarget);
}
@@ -20,17 +20,19 @@ function proxyEvent(source, target, sourceEventType, targetEventType) {
if (!targetEventType) {
targetEventType = sourceEventType;
}
- source.addEventListener(sourceEventType, e => {
- target.dispatchEvent(new CustomEvent(targetEventType, {
- detail: e.detail,
- }));
+ source.addEventListener(sourceEventType, (e) => {
+ target.dispatchEvent(
+ new CustomEvent(targetEventType, {
+ detail: e.detail,
+ })
+ );
});
}
module.exports = {
- Success: 'success',
- Error: 'error',
- Info: 'info',
+ Success: "success",
+ Error: "error",
+ Info: "info",
proxyEvent: proxyEvent,
EventTarget: EventTarget,
diff --git a/client/js/main.js b/client/js/main.js
index 8406c79e..a6e6d6a4 100644
--- a/client/js/main.js
+++ b/client/js/main.js
@@ -1,82 +1,101 @@
-'use strict';
+"use strict";
-require('./util/polyfill.js');
-const misc = require('./util/misc.js');
-const views = require('./util/views.js');
-const router = require('./router.js');
+require("./util/polyfill.js");
+const misc = require("./util/misc.js");
+const views = require("./util/views.js");
+const router = require("./router.js");
-history.scrollRestoration = 'manual';
+history.scrollRestoration = "manual";
-router.exit(
- null,
- (ctx, next) => {
- ctx.state.scrollX = window.scrollX;
- ctx.state.scrollY = window.scrollY;
- router.replace(router.url, ctx.state);
- if (misc.confirmPageExit()) {
- next();
- }
- });
-
-const mousetrap = require('mousetrap');
-router.enter(
- null,
- (ctx, next) => {
- mousetrap.reset();
+router.exit(null, (ctx, next) => {
+ ctx.state.scrollX = window.scrollX;
+ ctx.state.scrollY = window.scrollY;
+ router.replace(router.url, ctx.state);
+ if (misc.confirmPageExit()) {
next();
- });
-
-const tags = require('./tags.js');
-const pools = require('./pools.js');
-const api = require('./api.js');
-
-api.fetchConfig().then(() => {
- // register controller routes
- let controllers = [];
- controllers.push(require('./controllers/home_controller.js'));
- controllers.push(require('./controllers/help_controller.js'));
- controllers.push(require('./controllers/auth_controller.js'));
- controllers.push(require('./controllers/password_reset_controller.js'));
- controllers.push(require('./controllers/comments_controller.js'));
- controllers.push(require('./controllers/snapshots_controller.js'));
- controllers.push(require('./controllers/post_detail_controller.js'));
- controllers.push(require('./controllers/post_main_controller.js'));
- controllers.push(require('./controllers/post_list_controller.js'));
- controllers.push(require('./controllers/post_upload_controller.js'));
- controllers.push(require('./controllers/tag_controller.js'));
- controllers.push(require('./controllers/tag_list_controller.js'));
- controllers.push(require('./controllers/tag_categories_controller.js'));
- controllers.push(require('./controllers/pool_create_controller.js'));
- controllers.push(require('./controllers/pool_controller.js'));
- controllers.push(require('./controllers/pool_list_controller.js'));
- controllers.push(require('./controllers/pool_categories_controller.js'));
- controllers.push(require('./controllers/settings_controller.js'));
- controllers.push(require('./controllers/user_controller.js'));
- controllers.push(require('./controllers/user_list_controller.js'));
- controllers.push(require('./controllers/user_registration_controller.js'));
-
- // 404 controller needs to be registered last
- controllers.push(require('./controllers/not_found_controller.js'));
-
- for (let controller of controllers) {
- controller(router);
}
-}, error => {
- window.alert('Could not fetch basic configuration from server');
-}).then(() => {
- api.loginFromCookies().then(() => {
- tags.refreshCategoryColorMap();
- pools.refreshCategoryColorMap();
- router.start();
- }, error => {
- if (window.location.href.indexOf('login') !== -1) {
- api.forget();
- router.start();
- } else {
- const ctx = router.start('/');
- ctx.controller.showError(
- 'An error happened while trying to log you in: ' +
- error.message);
- }
- });
});
+
+const mousetrap = require("mousetrap");
+router.enter(null, (ctx, next) => {
+ mousetrap.reset();
+ next();
+});
+
+const tags = require("./tags.js");
+const pools = require("./pools.js");
+const api = require("./api.js");
+
+api.fetchConfig()
+ .then(
+ () => {
+ // register controller routes
+ let controllers = [];
+ controllers.push(require("./controllers/home_controller.js"));
+ controllers.push(require("./controllers/help_controller.js"));
+ controllers.push(require("./controllers/auth_controller.js"));
+ controllers.push(
+ require("./controllers/password_reset_controller.js")
+ );
+ controllers.push(require("./controllers/comments_controller.js"));
+ controllers.push(require("./controllers/snapshots_controller.js"));
+ controllers.push(
+ require("./controllers/post_detail_controller.js")
+ );
+ controllers.push(require("./controllers/post_main_controller.js"));
+ controllers.push(require("./controllers/post_list_controller.js"));
+ controllers.push(
+ require("./controllers/post_upload_controller.js")
+ );
+ controllers.push(require("./controllers/tag_controller.js"));
+ controllers.push(require("./controllers/tag_list_controller.js"));
+ controllers.push(
+ require("./controllers/tag_categories_controller.js")
+ );
+ controllers.push(
+ require("./controllers/pool_create_controller.js")
+ );
+ controllers.push(require("./controllers/pool_controller.js"));
+ controllers.push(require("./controllers/pool_list_controller.js"));
+ controllers.push(
+ require("./controllers/pool_categories_controller.js")
+ );
+ controllers.push(require("./controllers/settings_controller.js"));
+ controllers.push(require("./controllers/user_controller.js"));
+ controllers.push(require("./controllers/user_list_controller.js"));
+ controllers.push(
+ require("./controllers/user_registration_controller.js")
+ );
+
+ // 404 controller needs to be registered last
+ controllers.push(require("./controllers/not_found_controller.js"));
+
+ for (let controller of controllers) {
+ controller(router);
+ }
+ },
+ (error) => {
+ window.alert("Could not fetch basic configuration from server");
+ }
+ )
+ .then(() => {
+ api.loginFromCookies().then(
+ () => {
+ tags.refreshCategoryColorMap();
+ pools.refreshCategoryColorMap();
+ router.start();
+ },
+ (error) => {
+ if (window.location.href.indexOf("login") !== -1) {
+ api.forget();
+ router.start();
+ } else {
+ const ctx = router.start("/");
+ ctx.controller.showError(
+ "An error happened while trying to log you in: " +
+ error.message
+ );
+ }
+ }
+ );
+ });
diff --git a/client/js/models/abstract_list.js b/client/js/models/abstract_list.js
index fb4dec8b..7cf518ce 100644
--- a/client/js/models/abstract_list.js
+++ b/client/js/models/abstract_list.js
@@ -1,6 +1,6 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
+const events = require("../events.js");
class AbstractList extends events.EventTarget {
constructor() {
@@ -13,13 +13,15 @@ class AbstractList extends events.EventTarget {
for (let item of response) {
const addedItem = this._itemClass.fromResponse(item);
if (addedItem.addEventListener) {
- addedItem.addEventListener('delete', e => {
+ addedItem.addEventListener("delete", (e) => {
ret.remove(addedItem);
});
- addedItem.addEventListener('change', e => {
- ret.dispatchEvent(new CustomEvent('change', {
- detail: e.detail,
- }));
+ addedItem.addEventListener("change", (e) => {
+ ret.dispatchEvent(
+ new CustomEvent("change", {
+ detail: e.detail,
+ })
+ );
});
}
ret._list.push(addedItem);
@@ -29,28 +31,32 @@ class AbstractList extends events.EventTarget {
sync(plainList) {
this.clear();
- for (let item of (plainList || [])) {
+ for (let item of plainList || []) {
this.add(this.constructor._itemClass.fromResponse(item));
}
}
add(item) {
if (item.addEventListener) {
- item.addEventListener('delete', e => {
+ item.addEventListener("delete", (e) => {
this.remove(item);
});
- item.addEventListener('change', e => {
- this.dispatchEvent(new CustomEvent('change', {
- detail: e.detail,
- }));
+ item.addEventListener("change", (e) => {
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: e.detail,
+ })
+ );
});
}
this._list.push(item);
const detail = {};
detail[this.constructor._itemName] = item;
- this.dispatchEvent(new CustomEvent('add', {
- detail: detail,
- }));
+ this.dispatchEvent(
+ new CustomEvent("add", {
+ detail: detail,
+ })
+ );
}
clear() {
@@ -67,9 +73,11 @@ class AbstractList extends events.EventTarget {
this._list.splice(index, 1);
const detail = {};
detail[this.constructor._itemName] = itemToRemove;
- this.dispatchEvent(new CustomEvent('remove', {
- detail: detail,
- }));
+ this.dispatchEvent(
+ new CustomEvent("remove", {
+ detail: detail,
+ })
+ );
return;
}
}
diff --git a/client/js/models/comment.js b/client/js/models/comment.js
index e292ec46..4707a3a7 100644
--- a/client/js/models/comment.js
+++ b/client/js/models/comment.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
class Comment extends events.EventTarget {
constructor() {
@@ -31,7 +31,7 @@ class Comment extends events.EventTarget {
}
get text() {
- return this._text || '';
+ return this._text || "";
}
get user() {
@@ -63,47 +63,57 @@ class Comment extends events.EventTarget {
version: this._version,
text: this._text,
};
- let promise = this._id ?
- api.put(uri.formatApiLink('comment', this.id), detail) :
- api.post(uri.formatApiLink('comments'),
- Object.assign({postId: this._postId}, detail));
+ let promise = this._id
+ ? api.put(uri.formatApiLink("comment", this.id), detail)
+ : api.post(
+ uri.formatApiLink("comments"),
+ Object.assign({ postId: this._postId }, detail)
+ );
- return promise.then(response => {
+ return promise.then((response) => {
this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
- detail: {
- comment: this,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: {
+ comment: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
delete() {
- return api.delete(
- uri.formatApiLink('comment', this.id),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- comment: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("comment", this.id), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ comment: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
setScore(score) {
- return api.put(
- uri.formatApiLink('comment', this.id, 'score'),
- {score: score})
- .then(response => {
+ return api
+ .put(uri.formatApiLink("comment", this.id, "score"), {
+ score: score,
+ })
+ .then((response) => {
this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('changeScore', {
- detail: {
- comment: this,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("changeScore", {
+ detail: {
+ comment: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/models/comment_list.js b/client/js/models/comment_list.js
index a8e1150c..bae2d7a3 100644
--- a/client/js/models/comment_list.js
+++ b/client/js/models/comment_list.js
@@ -1,12 +1,11 @@
-'use strict';
+"use strict";
-const AbstractList = require('./abstract_list.js');
-const Comment = require('./comment.js');
+const AbstractList = require("./abstract_list.js");
+const Comment = require("./comment.js");
-class CommentList extends AbstractList {
-}
+class CommentList extends AbstractList {}
CommentList._itemClass = Comment;
-CommentList._itemName = 'comment';
+CommentList._itemName = "comment";
module.exports = CommentList;
diff --git a/client/js/models/info.js b/client/js/models/info.js
index 35ba867a..6b03b389 100644
--- a/client/js/models/info.js
+++ b/client/js/models/info.js
@@ -1,22 +1,20 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const Post = require('./post.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const Post = require("./post.js");
class Info {
static get() {
- return api.get(uri.formatApiLink('info'))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {
- featuredPost: response.featuredPost ?
- Post.fromResponse(response.featuredPost) :
- undefined
- }));
- });
+ return api.get(uri.formatApiLink("info")).then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ featuredPost: response.featuredPost
+ ? Post.fromResponse(response.featuredPost)
+ : undefined,
+ })
+ );
+ });
}
}
diff --git a/client/js/models/note.js b/client/js/models/note.js
index 1709d443..87c8b3bb 100644
--- a/client/js/models/note.js
+++ b/client/js/models/note.js
@@ -1,13 +1,13 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const Point = require('./point.js');
-const PointList = require('./point_list.js');
+const events = require("../events.js");
+const Point = require("./point.js");
+const PointList = require("./point_list.js");
class Note extends events.EventTarget {
constructor() {
super();
- this._text = '…';
+ this._text = "…";
this._polygon = new PointList();
}
diff --git a/client/js/models/note_list.js b/client/js/models/note_list.js
index b54d7f39..10db4354 100644
--- a/client/js/models/note_list.js
+++ b/client/js/models/note_list.js
@@ -1,12 +1,11 @@
-'use strict';
+"use strict";
-const AbstractList = require('./abstract_list.js');
-const Note = require('./note.js');
+const AbstractList = require("./abstract_list.js");
+const Note = require("./note.js");
-class NoteList extends AbstractList {
-}
+class NoteList extends AbstractList {}
NoteList._itemClass = Note;
-NoteList._itemName = 'note';
+NoteList._itemName = "note";
module.exports = NoteList;
diff --git a/client/js/models/point.js b/client/js/models/point.js
index bbb05ecb..70b49615 100644
--- a/client/js/models/point.js
+++ b/client/js/models/point.js
@@ -1,6 +1,6 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
+const events = require("../events.js");
class Point extends events.EventTarget {
constructor(x, y) {
@@ -19,12 +19,16 @@ class Point extends events.EventTarget {
set x(value) {
this._x = value;
- this.dispatchEvent(new CustomEvent('change', {detail: {point: this}}));
+ this.dispatchEvent(
+ new CustomEvent("change", { detail: { point: this } })
+ );
}
set y(value) {
this._y = value;
- this.dispatchEvent(new CustomEvent('change', {detail: {point: this}}));
+ this.dispatchEvent(
+ new CustomEvent("change", { detail: { point: this } })
+ );
}
}
diff --git a/client/js/models/point_list.js b/client/js/models/point_list.js
index 166e6643..3ecd7d78 100644
--- a/client/js/models/point_list.js
+++ b/client/js/models/point_list.js
@@ -1,7 +1,7 @@
-'use strict';
+"use strict";
-const AbstractList = require('./abstract_list.js');
-const Point = require('./point.js');
+const AbstractList = require("./abstract_list.js");
+const Point = require("./point.js");
class PointList extends AbstractList {
get firstPoint() {
@@ -18,6 +18,6 @@ class PointList extends AbstractList {
}
PointList._itemClass = Point;
-PointList._itemName = 'point';
+PointList._itemName = "point";
module.exports = PointList;
diff --git a/client/js/models/pool.js b/client/js/models/pool.js
index 8a86e12d..51fa8a05 100644
--- a/client/js/models/pool.js
+++ b/client/js/models/pool.js
@@ -1,13 +1,13 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
-const misc = require('../util/misc.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
+const misc = require("../util/misc.js");
class Pool extends events.EventTarget {
constructor() {
- const PostList = require('./post_list.js');
+ const PostList = require("./post_list.js");
super();
this._orig = {};
@@ -70,14 +70,13 @@ class Pool extends events.EventTarget {
}
static get(id) {
- return api.get(uri.formatApiLink('pool', id))
- .then(response => {
- return Promise.resolve(Pool.fromResponse(response));
- });
+ return api.get(uri.formatApiLink("pool", id)).then((response) => {
+ return Promise.resolve(Pool.fromResponse(response));
+ });
}
save() {
- const detail = {version: this._version};
+ const detail = { version: this._version };
// send only changed fields to avoid user privilege violation
if (misc.arraysDiffer(this._names, this._orig._names, true)) {
@@ -90,62 +89,71 @@ class Pool extends events.EventTarget {
detail.description = this._description;
}
if (misc.arraysDiffer(this._posts, this._orig._posts)) {
- detail.posts = this._posts.map(post => post.id);
+ detail.posts = this._posts.map((post) => post.id);
}
- let promise = this._id ?
- api.put(uri.formatApiLink('pool', this._id), detail) :
- api.post(uri.formatApiLink('pools'), detail);
- return promise
- .then(response => {
- this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
+ let promise = this._id
+ ? api.put(uri.formatApiLink("pool", this._id), detail)
+ : api.post(uri.formatApiLink("pools"), detail);
+ return promise.then((response) => {
+ this._updateFromResponse(response);
+ this.dispatchEvent(
+ new CustomEvent("change", {
detail: {
pool: this,
},
- }));
- return Promise.resolve();
- });
+ })
+ );
+ return Promise.resolve();
+ });
}
merge(targetId, addAlias) {
- return api.get(uri.formatApiLink('pool', targetId))
- .then(response => {
- return api.post(uri.formatApiLink('pool-merge'), {
+ return api
+ .get(uri.formatApiLink("pool", targetId))
+ .then((response) => {
+ return api.post(uri.formatApiLink("pool-merge"), {
removeVersion: this._version,
remove: this._id,
mergeToVersion: response.version,
mergeTo: targetId,
});
- }).then(response => {
+ })
+ .then((response) => {
if (!addAlias) {
return Promise.resolve(response);
}
- return api.put(uri.formatApiLink('pool', targetId), {
+ return api.put(uri.formatApiLink("pool", targetId), {
version: response.version,
names: response.names.concat(this._names),
});
- }).then(response => {
+ })
+ .then((response) => {
this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
- detail: {
- pool: this,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: {
+ pool: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
delete() {
- return api.delete(
- uri.formatApiLink('pool', this._id),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- pool: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("pool", this._id), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ pool: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/models/pool_category.js b/client/js/models/pool_category.js
index 1ce4b24d..8c7df46e 100644
--- a/client/js/models/pool_category.js
+++ b/client/js/models/pool_category.js
@@ -1,14 +1,14 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
class PoolCategory extends events.EventTarget {
constructor() {
super();
- this._name = '';
- this._color = '#000000';
+ this._name = "";
+ this._color = "#000000";
this._poolCount = 0;
this._isDefault = false;
this._origName = null;
@@ -50,7 +50,7 @@ class PoolCategory extends events.EventTarget {
}
save() {
- const detail = {version: this._version};
+ const detail = { version: this._version };
if (this.name !== this._origName) {
detail.name = this.name;
@@ -63,34 +63,39 @@ class PoolCategory extends events.EventTarget {
return Promise.resolve();
}
- let promise = this._origName ?
- api.put(
- uri.formatApiLink('pool-category', this._origName),
- detail) :
- api.post(uri.formatApiLink('pool-categories'), detail);
+ let promise = this._origName
+ ? api.put(
+ uri.formatApiLink("pool-category", this._origName),
+ detail
+ )
+ : api.post(uri.formatApiLink("pool-categories"), detail);
- return promise
- .then(response => {
- this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
+ return promise.then((response) => {
+ this._updateFromResponse(response);
+ this.dispatchEvent(
+ new CustomEvent("change", {
detail: {
poolCategory: this,
},
- }));
- return Promise.resolve();
- });
+ })
+ );
+ return Promise.resolve();
+ });
}
delete() {
- return api.delete(
- uri.formatApiLink('pool-category', this._origName),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- poolCategory: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("pool-category", this._origName), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ poolCategory: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/models/pool_category_list.js b/client/js/models/pool_category_list.js
index 2699620d..46b7838b 100644
--- a/client/js/models/pool_category_list.js
+++ b/client/js/models/pool_category_list.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const PoolCategory = require('./pool_category.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const PoolCategory = require("./pool_category.js");
class PoolCategoryList extends AbstractList {
constructor() {
@@ -11,7 +11,7 @@ class PoolCategoryList extends AbstractList {
this._defaultCategory = null;
this._origDefaultCategory = null;
this._deletedCategories = [];
- this.addEventListener('remove', e => this._evtCategoryDeleted(e));
+ this.addEventListener("remove", (e) => this._evtCategoryDeleted(e));
}
static fromResponse(response) {
@@ -27,12 +27,16 @@ class PoolCategoryList extends AbstractList {
}
static get() {
- return api.get(uri.formatApiLink('pool-categories'))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: PoolCategoryList.fromResponse(response.results)}));
+ return api
+ .get(uri.formatApiLink("pool-categories"))
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: PoolCategoryList.fromResponse(
+ response.results
+ ),
+ })
+ );
});
}
@@ -57,16 +61,18 @@ class PoolCategoryList extends AbstractList {
promises.push(
api.put(
uri.formatApiLink(
- 'pool-category',
+ "pool-category",
this._defaultCategory.name,
- 'default')));
+ "default"
+ )
+ )
+ );
}
- return Promise.all(promises)
- .then(response => {
- this._deletedCategories = [];
- return Promise.resolve();
- });
+ return Promise.all(promises).then((response) => {
+ this._deletedCategories = [];
+ return Promise.resolve();
+ });
}
_evtCategoryDeleted(e) {
@@ -77,6 +83,6 @@ class PoolCategoryList extends AbstractList {
}
PoolCategoryList._itemClass = PoolCategory;
-PoolCategoryList._itemName = 'poolCategory';
+PoolCategoryList._itemName = "poolCategory";
module.exports = PoolCategoryList;
diff --git a/client/js/models/pool_list.js b/client/js/models/pool_list.js
index 8a3b858a..a8839bb6 100644
--- a/client/js/models/pool_list.js
+++ b/client/js/models/pool_list.js
@@ -1,25 +1,27 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const Pool = require('./pool.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const Pool = require("./pool.js");
class PoolList extends AbstractList {
static search(text, offset, limit, fields) {
- return api.get(
- uri.formatApiLink(
- 'pools', {
+ return api
+ .get(
+ uri.formatApiLink("pools", {
query: text,
offset: offset,
limit: limit,
- fields: fields.join(','),
- }))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: PoolList.fromResponse(response.results)}));
+ fields: fields.join(","),
+ })
+ )
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: PoolList.fromResponse(response.results),
+ })
+ );
});
}
@@ -42,6 +44,6 @@ class PoolList extends AbstractList {
}
PoolList._itemClass = Pool;
-PoolList._itemName = 'pool';
+PoolList._itemName = "pool";
module.exports = PoolList;
diff --git a/client/js/models/post.js b/client/js/models/post.js
index a11d3cb8..e90d0b26 100644
--- a/client/js/models/post.js
+++ b/client/js/models/post.js
@@ -1,15 +1,15 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const tags = require('../tags.js');
-const events = require('../events.js');
-const TagList = require('./tag_list.js');
-const NoteList = require('./note_list.js');
-const CommentList = require('./comment_list.js');
-const PoolList = require('./pool_list.js');
-const Pool = require('./pool.js');
-const misc = require('../util/misc.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const tags = require("../tags.js");
+const events = require("../events.js");
+const TagList = require("./tag_list.js");
+const NoteList = require("./note_list.js");
+const CommentList = require("./comment_list.js");
+const PoolList = require("./pool_list.js");
+const Pool = require("./pool.js");
+const misc = require("../util/misc.js");
class Post extends events.EventTarget {
constructor() {
@@ -67,7 +67,7 @@ class Post extends events.EventTarget {
}
get sourceSplit() {
- return this._source.split('\n');
+ return this._source.split("\n");
}
get canvasWidth() {
@@ -83,11 +83,11 @@ class Post extends events.EventTarget {
}
get newContent() {
- throw 'Invalid operation';
+ throw "Invalid operation";
}
get newThumbnail() {
- throw 'Invalid operation';
+ throw "Invalid operation";
}
get flags() {
@@ -99,7 +99,7 @@ class Post extends events.EventTarget {
}
get tagNames() {
- return this._tags.map(tag => tag.names[0]);
+ return this._tags.map((tag) => tag.names[0]);
}
get notes() {
@@ -174,32 +174,31 @@ class Post extends events.EventTarget {
static reverseSearch(content) {
let apiPromise = api.post(
- uri.formatApiLink('posts', 'reverse-search'),
+ uri.formatApiLink("posts", "reverse-search"),
{},
- {content: content});
- let returnedPromise = apiPromise
- .then(response => {
- if (response.exactPost) {
- response.exactPost = Post.fromResponse(response.exactPost);
- }
- for (let item of response.similarPosts) {
- item.post = Post.fromResponse(item.post);
- }
- return Promise.resolve(response);
- });
+ { content: content }
+ );
+ let returnedPromise = apiPromise.then((response) => {
+ if (response.exactPost) {
+ response.exactPost = Post.fromResponse(response.exactPost);
+ }
+ for (let item of response.similarPosts) {
+ item.post = Post.fromResponse(item.post);
+ }
+ return Promise.resolve(response);
+ });
returnedPromise.abort = () => apiPromise.abort();
return returnedPromise;
}
static get(id) {
- return api.get(uri.formatApiLink('post', id))
- .then(response => {
- return Promise.resolve(Post.fromResponse(response));
- });
+ return api.get(uri.formatApiLink("post", id)).then((response) => {
+ return Promise.resolve(Post.fromResponse(response));
+ });
}
_savePoolPosts() {
- const difference = (a, b) => a.filter(post => !b.hasPoolId(post.id));
+ const difference = (a, b) => a.filter((post) => !b.hasPoolId(post.id));
// find the pools where the post was added or removed
const added = difference(this.pools, this._orig._pools);
@@ -209,7 +208,7 @@ class Post extends events.EventTarget {
// update each pool's list of posts
for (let pool of added) {
- let op = Pool.get(pool.id).then(response => {
+ let op = Pool.get(pool.id).then((response) => {
if (!response.posts.hasPostId(this._id)) {
response.posts.addById(this._id);
return response.save();
@@ -221,7 +220,7 @@ class Post extends events.EventTarget {
}
for (let pool of removed) {
- let op = Pool.get(pool.id).then(response => {
+ let op = Pool.get(pool.id).then((response) => {
if (response.posts.hasPostId(this._id)) {
response.posts.removeById(this._id);
return response.save();
@@ -237,7 +236,7 @@ class Post extends events.EventTarget {
save(anonymous) {
const files = {};
- const detail = {version: this._version};
+ const detail = { version: this._version };
// send only changed fields to avoid user privilege violation
if (anonymous === true) {
@@ -250,14 +249,14 @@ class Post extends events.EventTarget {
detail.flags = this._flags;
}
if (misc.arraysDiffer(this._tags, this._orig._tags)) {
- detail.tags = this._tags.map(tag => tag.names[0]);
+ detail.tags = this._tags.map((tag) => tag.names[0]);
}
if (misc.arraysDiffer(this._relations, this._orig._relations)) {
detail.relations = this._relations;
}
if (misc.arraysDiffer(this._notes, this._orig._notes)) {
- detail.notes = this._notes.map(note => ({
- polygon: note.polygon.map(point => [point.x, point.y]),
+ detail.notes = this._notes.map((note) => ({
+ polygon: note.polygon.map((point) => [point.x, point.y]),
text: note.text,
}));
}
@@ -271,154 +270,187 @@ class Post extends events.EventTarget {
detail.source = this._source;
}
- let apiPromise = this._id ?
- api.put(uri.formatApiLink('post', this.id), detail, files) :
- api.post(uri.formatApiLink('posts'), detail, files);
+ let apiPromise = this._id
+ ? api.put(uri.formatApiLink("post", this.id), detail, files)
+ : api.post(uri.formatApiLink("posts"), detail, files);
- return apiPromise.then(response => {
- if (misc.arraysDiffer(this._pools, this._orig._pools)) {
- return this._savePoolPosts()
- .then(() => Promise.resolve(response));
- }
- return Promise.resolve(response);
- }).then(response => {
- this._updateFromResponse(response);
- this.dispatchEvent(
- new CustomEvent('change', {detail: {post: this}}));
- if (this._newContent) {
- this.dispatchEvent(
- new CustomEvent('changeContent', {detail: {post: this}}));
- }
- if (this._newThumbnail) {
- this.dispatchEvent(
- new CustomEvent('changeThumbnail', {detail: {post: this}}));
- }
+ return apiPromise
+ .then((response) => {
+ if (misc.arraysDiffer(this._pools, this._orig._pools)) {
+ return this._savePoolPosts().then(() =>
+ Promise.resolve(response)
+ );
+ }
+ return Promise.resolve(response);
+ })
+ .then(
+ (response) => {
+ this._updateFromResponse(response);
+ this.dispatchEvent(
+ new CustomEvent("change", { detail: { post: this } })
+ );
+ if (this._newContent) {
+ this.dispatchEvent(
+ new CustomEvent("changeContent", {
+ detail: { post: this },
+ })
+ );
+ }
+ if (this._newThumbnail) {
+ this.dispatchEvent(
+ new CustomEvent("changeThumbnail", {
+ detail: { post: this },
+ })
+ );
+ }
- return Promise.resolve();
- }, error => {
- if (error.response &&
- error.response.name === 'PostAlreadyUploadedError') {
- error.message =
- `Post already uploaded (@${error.response.otherPostId})`;
- }
- return Promise.reject(error);
- });
+ return Promise.resolve();
+ },
+ (error) => {
+ if (
+ error.response &&
+ error.response.name === "PostAlreadyUploadedError"
+ ) {
+ error.message = `Post already uploaded (@${error.response.otherPostId})`;
+ }
+ return Promise.reject(error);
+ }
+ );
}
feature() {
- return api.post(
- uri.formatApiLink('featured-post'),
- {id: this._id})
- .then(response => {
+ return api
+ .post(uri.formatApiLink("featured-post"), { id: this._id })
+ .then((response) => {
return Promise.resolve();
});
}
delete() {
- return api.delete(
- uri.formatApiLink('post', this.id),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- post: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("post", this.id), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ post: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
merge(targetId, useOldContent) {
- return api.get(uri.formatApiLink('post', targetId))
- .then(response => {
- return api.post(uri.formatApiLink('post-merge'), {
+ return api
+ .get(uri.formatApiLink("post", targetId))
+ .then((response) => {
+ return api.post(uri.formatApiLink("post-merge"), {
removeVersion: this._version,
remove: this._id,
mergeToVersion: response.version,
mergeTo: targetId,
replaceContent: useOldContent,
});
- }).then(response => {
+ })
+ .then((response) => {
this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
- detail: {
- post: this,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: {
+ post: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
setScore(score) {
- return api.put(
- uri.formatApiLink('post', this.id, 'score'),
- {score: score})
- .then(response => {
+ return api
+ .put(uri.formatApiLink("post", this.id, "score"), { score: score })
+ .then((response) => {
const prevFavorite = this._ownFavorite;
this._updateFromResponse(response);
if (this._ownFavorite !== prevFavorite) {
- this.dispatchEvent(new CustomEvent('changeFavorite', {
+ this.dispatchEvent(
+ new CustomEvent("changeFavorite", {
+ detail: {
+ post: this,
+ },
+ })
+ );
+ }
+ this.dispatchEvent(
+ new CustomEvent("changeScore", {
detail: {
post: this,
},
- }));
- }
- this.dispatchEvent(new CustomEvent('changeScore', {
- detail: {
- post: this,
- },
- }));
+ })
+ );
return Promise.resolve();
});
}
addToFavorites() {
- return api.post(uri.formatApiLink('post', this.id, 'favorite'))
- .then(response => {
+ return api
+ .post(uri.formatApiLink("post", this.id, "favorite"))
+ .then((response) => {
const prevScore = this._ownScore;
this._updateFromResponse(response);
if (this._ownScore !== prevScore) {
- this.dispatchEvent(new CustomEvent('changeScore', {
+ this.dispatchEvent(
+ new CustomEvent("changeScore", {
+ detail: {
+ post: this,
+ },
+ })
+ );
+ }
+ this.dispatchEvent(
+ new CustomEvent("changeFavorite", {
detail: {
post: this,
},
- }));
- }
- this.dispatchEvent(new CustomEvent('changeFavorite', {
- detail: {
- post: this,
- },
- }));
+ })
+ );
return Promise.resolve();
});
}
removeFromFavorites() {
- return api.delete(uri.formatApiLink('post', this.id, 'favorite'))
- .then(response => {
+ return api
+ .delete(uri.formatApiLink("post", this.id, "favorite"))
+ .then((response) => {
const prevScore = this._ownScore;
this._updateFromResponse(response);
if (this._ownScore !== prevScore) {
- this.dispatchEvent(new CustomEvent('changeScore', {
+ this.dispatchEvent(
+ new CustomEvent("changeScore", {
+ detail: {
+ post: this,
+ },
+ })
+ );
+ }
+ this.dispatchEvent(
+ new CustomEvent("changeFavorite", {
detail: {
post: this,
},
- }));
- }
- this.dispatchEvent(new CustomEvent('changeFavorite', {
- detail: {
- post: this,
- },
- }));
+ })
+ );
return Promise.resolve();
});
}
mutateContentUrl() {
this._contentUrl =
- this._orig._contentUrl +
- '?bypass-cache=' +
- Math.round(Math.random() * 1000);
+ this._orig._contentUrl +
+ "?bypass-cache=" +
+ Math.round(Math.random() * 1000);
}
_updateFromResponse(response) {
@@ -431,15 +463,18 @@ class Post extends events.EventTarget {
_user: response.user,
_safety: response.safety,
_contentUrl: response.contentUrl,
- _fullContentUrl: new URL(response.contentUrl, document.getElementsByTagName('base')[0].href).href,
+ _fullContentUrl: new URL(
+ response.contentUrl,
+ document.getElementsByTagName("base")[0].href
+ ).href,
_thumbnailUrl: response.thumbnailUrl,
_source: response.source,
_canvasWidth: response.canvasWidth,
_canvasHeight: response.canvasHeight,
_fileSize: response.fileSize,
- _flags: [...response.flags || []],
- _relations: [...response.relations || []],
+ _flags: [...(response.flags || [])],
+ _relations: [...(response.relations || [])],
_score: response.score,
_commentCount: response.commentCount,
diff --git a/client/js/models/post_list.js b/client/js/models/post_list.js
index cd94e406..8c2c9d4e 100644
--- a/client/js/models/post_list.js
+++ b/client/js/models/post_list.js
@@ -1,35 +1,37 @@
-'use strict';
+"use strict";
-const settings = require('../models/settings.js');
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const Post = require('./post.js');
+const settings = require("../models/settings.js");
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const Post = require("./post.js");
class PostList extends AbstractList {
static getAround(id, searchQuery) {
return api.get(
- uri.formatApiLink(
- 'post', id, 'around', {
- query: PostList._decorateSearchQuery(searchQuery || ''),
- fields: 'id',
- }));
+ uri.formatApiLink("post", id, "around", {
+ query: PostList._decorateSearchQuery(searchQuery || ""),
+ fields: "id",
+ })
+ );
}
static search(text, offset, limit, fields) {
- return api.get(
- uri.formatApiLink(
- 'posts', {
- query: PostList._decorateSearchQuery(text || ''),
+ return api
+ .get(
+ uri.formatApiLink("posts", {
+ query: PostList._decorateSearchQuery(text || ""),
offset: offset,
limit: limit,
- fields: fields.join(','),
- }))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: PostList.fromResponse(response.results)}));
+ fields: fields.join(","),
+ })
+ )
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: PostList.fromResponse(response.results),
+ })
+ );
});
}
@@ -43,7 +45,7 @@ class PostList extends AbstractList {
}
}
if (disabledSafety.length) {
- text = `-rating:${disabledSafety.join(',')} ${text}`;
+ text = `-rating:${disabledSafety.join(",")} ${text}`;
}
}
return text.trim();
@@ -63,7 +65,7 @@ class PostList extends AbstractList {
return;
}
- let post = Post.fromResponse({id: id});
+ let post = Post.fromResponse({ id: id });
this.add(post);
}
@@ -77,6 +79,6 @@ class PostList extends AbstractList {
}
PostList._itemClass = Post;
-PostList._itemName = 'post';
+PostList._itemName = "post";
module.exports = PostList;
diff --git a/client/js/models/settings.js b/client/js/models/settings.js
index 774aa0bf..bd6ac5b5 100644
--- a/client/js/models/settings.js
+++ b/client/js/models/settings.js
@@ -1,6 +1,6 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
+const events = require("../events.js");
const defaultSettings = {
listPosts: {
@@ -12,7 +12,7 @@ const defaultSettings = {
endlessScroll: false,
keyboardShortcuts: true,
transparencyGrid: true,
- fitMode: 'fit-both',
+ fitMode: "fit-both",
tagSuggestions: true,
autoplayVideos: false,
postsPerPage: 42,
@@ -28,7 +28,7 @@ class Settings extends events.EventTarget {
_getFromLocalStorage() {
let ret = Object.assign({}, defaultSettings);
try {
- Object.assign(ret, JSON.parse(localStorage.getItem('settings')));
+ Object.assign(ret, JSON.parse(localStorage.getItem("settings")));
} catch (e) {
// continue regardless of error
}
@@ -37,14 +37,16 @@ class Settings extends events.EventTarget {
save(newSettings, silent) {
newSettings = Object.assign(this.cache, newSettings);
- localStorage.setItem('settings', JSON.stringify(newSettings));
+ localStorage.setItem("settings", JSON.stringify(newSettings));
this.cache = this._getFromLocalStorage();
if (silent !== true) {
- this.dispatchEvent(new CustomEvent('change', {
- detail: {
- settings: this.cache,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: {
+ settings: this.cache,
+ },
+ })
+ );
}
}
diff --git a/client/js/models/snapshot.js b/client/js/models/snapshot.js
index cf5a3083..5f8e2ae1 100644
--- a/client/js/models/snapshot.js
+++ b/client/js/models/snapshot.js
@@ -1,7 +1,7 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const events = require('../events.js');
+const api = require("../api.js");
+const events = require("../events.js");
class Snapshot extends events.EventTarget {
constructor() {
diff --git a/client/js/models/snapshot_list.js b/client/js/models/snapshot_list.js
index 475b89a3..9ea1bdd6 100644
--- a/client/js/models/snapshot_list.js
+++ b/client/js/models/snapshot_list.js
@@ -1,24 +1,31 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const Snapshot = require('./snapshot.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const Snapshot = require("./snapshot.js");
class SnapshotList extends AbstractList {
static search(text, offset, limit) {
- return api.get(uri.formatApiLink(
- 'snapshots', {query: text, offset: offset, limit: limit}))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: SnapshotList.fromResponse(response.results)}));
+ return api
+ .get(
+ uri.formatApiLink("snapshots", {
+ query: text,
+ offset: offset,
+ limit: limit,
+ })
+ )
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: SnapshotList.fromResponse(response.results),
+ })
+ );
});
}
}
SnapshotList._itemClass = Snapshot;
-SnapshotList._itemName = 'snapshot';
+SnapshotList._itemName = "snapshot";
module.exports = SnapshotList;
diff --git a/client/js/models/tag.js b/client/js/models/tag.js
index b99c63f1..a5632c8a 100644
--- a/client/js/models/tag.js
+++ b/client/js/models/tag.js
@@ -1,13 +1,13 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
-const misc = require('../util/misc.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
+const misc = require("../util/misc.js");
class Tag extends events.EventTarget {
constructor() {
- const TagList = require('./tag_list.js');
+ const TagList = require("./tag_list.js");
super();
this._orig = {};
@@ -71,14 +71,13 @@ class Tag extends events.EventTarget {
}
static get(name) {
- return api.get(uri.formatApiLink('tag', name))
- .then(response => {
- return Promise.resolve(Tag.fromResponse(response));
- });
+ return api.get(uri.formatApiLink("tag", name)).then((response) => {
+ return Promise.resolve(Tag.fromResponse(response));
+ });
}
save() {
- const detail = {version: this._version};
+ const detail = { version: this._version };
// send only changed fields to avoid user privilege violation
if (misc.arraysDiffer(this._names, this._orig._names, true)) {
@@ -92,66 +91,77 @@ class Tag extends events.EventTarget {
}
if (misc.arraysDiffer(this._implications, this._orig._implications)) {
detail.implications = this._implications.map(
- relation => relation.names[0]);
+ (relation) => relation.names[0]
+ );
}
if (misc.arraysDiffer(this._suggestions, this._orig._suggestions)) {
detail.suggestions = this._suggestions.map(
- relation => relation.names[0]);
+ (relation) => relation.names[0]
+ );
}
- let promise = this._origName ?
- api.put(uri.formatApiLink('tag', this._origName), detail) :
- api.post(uri.formatApiLink('tags'), detail);
- return promise
- .then(response => {
- this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
+ let promise = this._origName
+ ? api.put(uri.formatApiLink("tag", this._origName), detail)
+ : api.post(uri.formatApiLink("tags"), detail);
+ return promise.then((response) => {
+ this._updateFromResponse(response);
+ this.dispatchEvent(
+ new CustomEvent("change", {
detail: {
tag: this,
},
- }));
- return Promise.resolve();
- });
+ })
+ );
+ return Promise.resolve();
+ });
}
merge(targetName, addAlias) {
- return api.get(uri.formatApiLink('tag', targetName))
- .then(response => {
- return api.post(uri.formatApiLink('tag-merge'), {
+ return api
+ .get(uri.formatApiLink("tag", targetName))
+ .then((response) => {
+ return api.post(uri.formatApiLink("tag-merge"), {
removeVersion: this._version,
remove: this._origName,
mergeToVersion: response.version,
mergeTo: targetName,
});
- }).then(response => {
+ })
+ .then((response) => {
if (!addAlias) {
return Promise.resolve(response);
}
- return api.put(uri.formatApiLink('tag', targetName), {
+ return api.put(uri.formatApiLink("tag", targetName), {
version: response.version,
names: response.names.concat(this._names),
});
- }).then(response => {
+ })
+ .then((response) => {
this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
- detail: {
- tag: this,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: {
+ tag: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
delete() {
- return api.delete(
- uri.formatApiLink('tag', this._origName),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- tag: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("tag", this._origName), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ tag: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/models/tag_category.js b/client/js/models/tag_category.js
index 42e54d8d..a8d0e64c 100644
--- a/client/js/models/tag_category.js
+++ b/client/js/models/tag_category.js
@@ -1,14 +1,14 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
class TagCategory extends events.EventTarget {
constructor() {
super();
- this._name = '';
- this._color = '#000000';
+ this._name = "";
+ this._color = "#000000";
this._tagCount = 0;
this._isDefault = false;
this._origName = null;
@@ -50,7 +50,7 @@ class TagCategory extends events.EventTarget {
}
save() {
- const detail = {version: this._version};
+ const detail = { version: this._version };
if (this.name !== this._origName) {
detail.name = this.name;
@@ -63,34 +63,39 @@ class TagCategory extends events.EventTarget {
return Promise.resolve();
}
- let promise = this._origName ?
- api.put(
- uri.formatApiLink('tag-category', this._origName),
- detail) :
- api.post(uri.formatApiLink('tag-categories'), detail);
+ let promise = this._origName
+ ? api.put(
+ uri.formatApiLink("tag-category", this._origName),
+ detail
+ )
+ : api.post(uri.formatApiLink("tag-categories"), detail);
- return promise
- .then(response => {
- this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
+ return promise.then((response) => {
+ this._updateFromResponse(response);
+ this.dispatchEvent(
+ new CustomEvent("change", {
detail: {
tagCategory: this,
},
- }));
- return Promise.resolve();
- });
+ })
+ );
+ return Promise.resolve();
+ });
}
delete() {
- return api.delete(
- uri.formatApiLink('tag-category', this._origName),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- tagCategory: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("tag-category", this._origName), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ tagCategory: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/models/tag_category_list.js b/client/js/models/tag_category_list.js
index 6c1182fb..2fc15228 100644
--- a/client/js/models/tag_category_list.js
+++ b/client/js/models/tag_category_list.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const TagCategory = require('./tag_category.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const TagCategory = require("./tag_category.js");
class TagCategoryList extends AbstractList {
constructor() {
@@ -11,7 +11,7 @@ class TagCategoryList extends AbstractList {
this._defaultCategory = null;
this._origDefaultCategory = null;
this._deletedCategories = [];
- this.addEventListener('remove', e => this._evtCategoryDeleted(e));
+ this.addEventListener("remove", (e) => this._evtCategoryDeleted(e));
}
static fromResponse(response) {
@@ -27,12 +27,16 @@ class TagCategoryList extends AbstractList {
}
static get() {
- return api.get(uri.formatApiLink('tag-categories'))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: TagCategoryList.fromResponse(response.results)}));
+ return api
+ .get(uri.formatApiLink("tag-categories"))
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: TagCategoryList.fromResponse(
+ response.results
+ ),
+ })
+ );
});
}
@@ -57,16 +61,18 @@ class TagCategoryList extends AbstractList {
promises.push(
api.put(
uri.formatApiLink(
- 'tag-category',
+ "tag-category",
this._defaultCategory.name,
- 'default')));
+ "default"
+ )
+ )
+ );
}
- return Promise.all(promises)
- .then(response => {
- this._deletedCategories = [];
- return Promise.resolve();
- });
+ return Promise.all(promises).then((response) => {
+ this._deletedCategories = [];
+ return Promise.resolve();
+ });
}
_evtCategoryDeleted(e) {
@@ -77,6 +83,6 @@ class TagCategoryList extends AbstractList {
}
TagCategoryList._itemClass = TagCategory;
-TagCategoryList._itemName = 'tagCategory';
+TagCategoryList._itemName = "tagCategory";
module.exports = TagCategoryList;
diff --git a/client/js/models/tag_list.js b/client/js/models/tag_list.js
index 31a34fa3..7e6b6438 100644
--- a/client/js/models/tag_list.js
+++ b/client/js/models/tag_list.js
@@ -1,25 +1,27 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const Tag = require('./tag.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const Tag = require("./tag.js");
class TagList extends AbstractList {
static search(text, offset, limit, fields) {
- return api.get(
- uri.formatApiLink(
- 'tags', {
+ return api
+ .get(
+ uri.formatApiLink("tags", {
query: text,
offset: offset,
limit: limit,
- fields: fields.join(','),
- }))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: TagList.fromResponse(response.results)}));
+ fields: fields.join(","),
+ })
+ )
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: TagList.fromResponse(response.results),
+ })
+ );
});
}
@@ -45,10 +47,12 @@ class TagList extends AbstractList {
this.add(tag);
if (addImplications !== false) {
- return Tag.get(tagName).then(actualTag => {
+ return Tag.get(tagName).then((actualTag) => {
return Promise.all(
- actualTag.implications.map(
- relation => this.addByName(relation.names[0], true)));
+ actualTag.implications.map((relation) =>
+ this.addByName(relation.names[0], true)
+ )
+ );
});
}
@@ -67,6 +71,6 @@ class TagList extends AbstractList {
}
TagList._itemClass = Tag;
-TagList._itemName = 'tag';
+TagList._itemName = "tag";
module.exports = TagList;
diff --git a/client/js/models/top_navigation.js b/client/js/models/top_navigation.js
index 91b8976b..a469a034 100644
--- a/client/js/models/top_navigation.js
+++ b/client/js/models/top_navigation.js
@@ -1,7 +1,7 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
+const events = require("../events.js");
+const api = require("../api.js");
class TopNavigationItem {
constructor(accessKey, title, url, available, imageUrl) {
@@ -44,18 +44,20 @@ class TopNavigation extends events.EventTarget {
activate(key) {
this.activeItem = null;
- this.dispatchEvent(new CustomEvent('activate', {
- detail: {
- key: key,
- item: key ? this.get(key) : null,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("activate", {
+ detail: {
+ key: key,
+ item: key ? this.get(key) : null,
+ },
+ })
+ );
}
setTitle(title) {
api.fetchConfig().then(() => {
document.oldTitle = null;
- document.title = api.getName() + (title ? (' – ' + title) : '');
+ document.title = api.getName() + (title ? " – " + title : "");
});
}
@@ -76,24 +78,22 @@ class TopNavigation extends events.EventTarget {
function _makeTopNavigation() {
const ret = new TopNavigation();
- ret.add('home', new TopNavigationItem('H', 'Home', ''));
- ret.add('posts', new TopNavigationItem('P', 'Posts', 'posts'));
- ret.add('upload', new TopNavigationItem('U', 'Upload', 'upload'));
- ret.add('comments', new TopNavigationItem('C', 'Comments', 'comments'));
- ret.add('tags', new TopNavigationItem('T', 'Tags', 'tags'));
- ret.add('pools', new TopNavigationItem('O', 'Pools', 'pools'));
- ret.add('users', new TopNavigationItem('S', 'Users', 'users'));
- ret.add('account', new TopNavigationItem('A', 'Account', 'user/{me}'));
- ret.add('register', new TopNavigationItem('R', 'Register', 'register'));
- ret.add('login', new TopNavigationItem('L', 'Log in', 'login'));
- ret.add('logout', new TopNavigationItem('O', 'Logout', 'logout'));
- ret.add('help', new TopNavigationItem('E', 'Help', 'help'));
+ ret.add("home", new TopNavigationItem("H", "Home", ""));
+ ret.add("posts", new TopNavigationItem("P", "Posts", "posts"));
+ ret.add("upload", new TopNavigationItem("U", "Upload", "upload"));
+ ret.add("comments", new TopNavigationItem("C", "Comments", "comments"));
+ ret.add("tags", new TopNavigationItem("T", "Tags", "tags"));
+ ret.add("pools", new TopNavigationItem("O", "Pools", "pools"));
+ ret.add("users", new TopNavigationItem("S", "Users", "users"));
+ ret.add("account", new TopNavigationItem("A", "Account", "user/{me}"));
+ ret.add("register", new TopNavigationItem("R", "Register", "register"));
+ ret.add("login", new TopNavigationItem("L", "Log in", "login"));
+ ret.add("logout", new TopNavigationItem("O", "Logout", "logout"));
+ ret.add("help", new TopNavigationItem("E", "Help", "help"));
ret.add(
- 'settings',
- new TopNavigationItem(
- null,
- ' ',
- 'settings'));
+ "settings",
+ new TopNavigationItem(null, " ", "settings")
+ );
return ret;
}
diff --git a/client/js/models/user.js b/client/js/models/user.js
index 40fbb14d..28dc3efe 100644
--- a/client/js/models/user.js
+++ b/client/js/models/user.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
class User extends events.EventTarget {
constructor() {
@@ -64,11 +64,11 @@ class User extends events.EventTarget {
}
get avatarContent() {
- throw 'Invalid operation';
+ throw "Invalid operation";
}
get password() {
- throw 'Invalid operation';
+ throw "Invalid operation";
}
set name(value) {
@@ -102,15 +102,14 @@ class User extends events.EventTarget {
}
static get(name) {
- return api.get(uri.formatApiLink('user', name))
- .then(response => {
- return Promise.resolve(User.fromResponse(response));
- });
+ return api.get(uri.formatApiLink("user", name)).then((response) => {
+ return Promise.resolve(User.fromResponse(response));
+ });
}
save() {
const files = [];
- const detail = {version: this._version};
+ const detail = { version: this._version };
const transient = this._orig._name;
if (this._name !== this._orig._name) {
@@ -133,33 +132,40 @@ class User extends events.EventTarget {
detail.password = this._password;
}
- let promise = this._orig._name ?
- api.put(
- uri.formatApiLink('user', this._orig._name), detail, files) :
- api.post(uri.formatApiLink('users'), detail, files);
+ let promise = this._orig._name
+ ? api.put(
+ uri.formatApiLink("user", this._orig._name),
+ detail,
+ files
+ )
+ : api.post(uri.formatApiLink("users"), detail, files);
- return promise
- .then(response => {
- this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
+ return promise.then((response) => {
+ this._updateFromResponse(response);
+ this.dispatchEvent(
+ new CustomEvent("change", {
detail: {
user: this,
},
- }));
- return Promise.resolve();
- });
+ })
+ );
+ return Promise.resolve();
+ });
}
delete() {
- return api.delete(
- uri.formatApiLink('user', this._orig._name),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- user: this,
- },
- }));
+ return api
+ .delete(uri.formatApiLink("user", this._orig._name), {
+ version: this._version,
+ })
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ user: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/models/user_list.js b/client/js/models/user_list.js
index d44622c8..c537f8f9 100644
--- a/client/js/models/user_list.js
+++ b/client/js/models/user_list.js
@@ -1,25 +1,31 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const AbstractList = require('./abstract_list.js');
-const User = require('./user.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const AbstractList = require("./abstract_list.js");
+const User = require("./user.js");
class UserList extends AbstractList {
static search(text, offset, limit) {
- return api.get(
- uri.formatApiLink(
- 'users', {query: text, offset: offset, limit: limit}))
- .then(response => {
- return Promise.resolve(Object.assign(
- {},
- response,
- {results: UserList.fromResponse(response.results)}));
+ return api
+ .get(
+ uri.formatApiLink("users", {
+ query: text,
+ offset: offset,
+ limit: limit,
+ })
+ )
+ .then((response) => {
+ return Promise.resolve(
+ Object.assign({}, response, {
+ results: UserList.fromResponse(response.results),
+ })
+ );
});
}
}
UserList._itemClass = User;
-UserList._itemName = 'user';
+UserList._itemName = "user";
module.exports = UserList;
diff --git a/client/js/models/user_token.js b/client/js/models/user_token.js
index e49f5aa0..c9d28a2a 100644
--- a/client/js/models/user_token.js
+++ b/client/js/models/user_token.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const uri = require('../util/uri.js');
-const events = require('../events.js');
+const api = require("../api.js");
+const uri = require("../util/uri.js");
+const events = require("../events.js");
class UserToken extends events.EventTarget {
constructor() {
@@ -48,12 +48,12 @@ class UserToken extends events.EventTarget {
}
static fromResponse(response) {
- if (typeof response.results !== 'undefined') {
+ if (typeof response.results !== "undefined") {
let tokenList = [];
for (let responseToken of response.results) {
const token = new UserToken();
token._updateFromResponse(responseToken);
- tokenList.push(token)
+ tokenList.push(token);
}
return tokenList;
} else {
@@ -64,15 +64,16 @@ class UserToken extends events.EventTarget {
}
static get(userName) {
- return api.get(uri.formatApiLink('user-tokens', userName))
- .then(response => {
+ return api
+ .get(uri.formatApiLink("user-tokens", userName))
+ .then((response) => {
return Promise.resolve(UserToken.fromResponse(response));
});
}
static create(userName, note, expirationTime) {
let userTokenRequest = {
- enabled: true
+ enabled: true,
};
if (note) {
userTokenRequest.note = note;
@@ -80,43 +81,54 @@ class UserToken extends events.EventTarget {
if (expirationTime) {
userTokenRequest.expirationTime = expirationTime;
}
- return api.post(uri.formatApiLink('user-token', userName), userTokenRequest)
- .then(response => {
- return Promise.resolve(UserToken.fromResponse(response))
+ return api
+ .post(uri.formatApiLink("user-token", userName), userTokenRequest)
+ .then((response) => {
+ return Promise.resolve(UserToken.fromResponse(response));
});
}
save(userName) {
- const detail = {version: this._version};
+ const detail = { version: this._version };
if (this._note !== this._orig._note) {
detail.note = this._note;
}
- return api.put(
- uri.formatApiLink('user-token', userName, this._orig._token),
- detail)
- .then(response => {
+ return api
+ .put(
+ uri.formatApiLink("user-token", userName, this._orig._token),
+ detail
+ )
+ .then((response) => {
this._updateFromResponse(response);
- this.dispatchEvent(new CustomEvent('change', {
- detail: {
- userToken: this,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("change", {
+ detail: {
+ userToken: this,
+ },
+ })
+ );
return Promise.resolve(this);
});
}
delete(userName) {
- return api.delete(
- uri.formatApiLink('user-token', userName, this._orig._token),
- {version: this._version})
- .then(response => {
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- userToken: this,
- },
- }));
+ return api
+ .delete(
+ uri.formatApiLink("user-token", userName, this._orig._token),
+ {
+ version: this._version,
+ }
+ )
+ .then((response) => {
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ userToken: this,
+ },
+ })
+ );
return Promise.resolve();
});
}
diff --git a/client/js/pools.js b/client/js/pools.js
index 8484f0b5..aa58d5ed 100644
--- a/client/js/pools.js
+++ b/client/js/pools.js
@@ -1,22 +1,23 @@
-'use strict';
+"use strict";
-const misc = require('./util/misc.js');
-const PoolCategoryList = require('./models/pool_category_list.js');
+const misc = require("./util/misc.js");
+const PoolCategoryList = require("./models/pool_category_list.js");
let _stylesheet = null;
function refreshCategoryColorMap() {
- return PoolCategoryList.get().then(response => {
+ return PoolCategoryList.get().then((response) => {
if (_stylesheet) {
document.head.removeChild(_stylesheet);
}
- _stylesheet = document.createElement('style');
+ _stylesheet = document.createElement("style");
document.head.appendChild(_stylesheet);
for (let category of response.results) {
- const ruleName = misc.makeCssName(category.name, 'pool');
+ const ruleName = misc.makeCssName(category.name, "pool");
_stylesheet.sheet.insertRule(
`.${ruleName} { color: ${category.color} }`,
- _stylesheet.sheet.cssRules.length);
+ _stylesheet.sheet.cssRules.length
+ );
}
});
}
diff --git a/client/js/router.js b/client/js/router.js
index b5b48552..460a6415 100644
--- a/client/js/router.js
+++ b/client/js/router.js
@@ -1,4 +1,4 @@
-'use strict';
+"use strict";
// modified page.js by visionmedia
// - changed regexes to components
@@ -10,13 +10,17 @@
// - rename .save() to .replaceState()
// - offer .url
-const clickEvent = document.ontouchstart ? 'touchstart' : 'click';
-const uri = require('./util/uri.js');
+const clickEvent = document.ontouchstart ? "touchstart" : "click";
+const uri = require("./util/uri.js");
let location = window.history.location || window.location;
function _getOrigin() {
- return location.protocol + '//' + location.hostname
- + (location.port ? (':' + location.port) : '');
+ return (
+ location.protocol +
+ "//" +
+ location.hostname +
+ (location.port ? ":" + location.port : "")
+ );
}
function _isSameOrigin(href) {
@@ -24,15 +28,16 @@ function _isSameOrigin(href) {
}
function _getBaseHref() {
- const bases = document.getElementsByTagName('base');
- return bases.length > 0 ?
- bases[0].href.replace(_getOrigin(), '').replace(/\/+$/, '') : '';
+ const bases = document.getElementsByTagName("base");
+ return bases.length > 0
+ ? bases[0].href.replace(_getOrigin(), "").replace(/\/+$/, "")
+ : "";
}
class Context {
constructor(path, state) {
const base = _getBaseHref();
- path = path.indexOf('/') !== 0 ? '/' + path : path;
+ path = path.indexOf("/") !== 0 ? "/" + path : path;
path = path.indexOf(base) !== 0 ? base + path : path;
this.canonicalPath = path;
@@ -55,7 +60,7 @@ class Context {
class Route {
constructor(path) {
- this.method = 'GET';
+ this.method = "GET";
this.path = path;
this.parameterNames = [];
@@ -64,16 +69,17 @@ class Route {
} else {
let parts = [];
for (let component of this.path) {
- if (component[0] === ':') {
- parts.push('([^/]+)');
+ if (component[0] === ":") {
+ parts.push("([^/]+)");
this.parameterNames.push(component.substr(1));
- } else { // assert [a-z]+
+ } else {
+ // assert [a-z]+
parts.push(component);
}
}
- let regexString = '^/' + parts.join('/');
- regexString += '(?:/*|/((?:(?:[a-z]+=[^/]+);)*(?:[a-z]+=[^/]+)))$';
- this.parameterNames.push('variable');
+ let regexString = "^/" + parts.join("/");
+ regexString += "(?:/*|/((?:(?:[a-z]+=[^/]+);)*(?:[a-z]+=[^/]+)))$";
+ this.parameterNames.push("variable");
this.regex = new RegExp(regexString);
}
}
@@ -88,7 +94,7 @@ class Route {
}
match(path, parameters) {
- const qsIndex = path.indexOf('?');
+ const qsIndex = path.indexOf("?");
const pathname = ~qsIndex ? path.slice(0, qsIndex) : path;
const match = this.regex.exec(pathname);
@@ -104,8 +110,8 @@ class Route {
continue;
}
- if (name === 'variable') {
- for (let word of (value || '').split(/;/)) {
+ if (name === "variable") {
+ for (let word of (value || "").split(/;/)) {
const [key, subvalue] = word.split(/=/, 2);
parameters[key] = uri.unescapeParam(subvalue);
}
@@ -148,7 +154,7 @@ class Router {
this._running = true;
this._onPopState = _onPopState(this);
this._onClick = _onClick(this);
- window.addEventListener('popstate', this._onPopState, false);
+ window.addEventListener("popstate", this._onPopState, false);
document.addEventListener(clickEvent, this._onClick, false);
const url = location.pathname + location.search + location.hash;
return this.replace(url, history.state, true);
@@ -160,7 +166,7 @@ class Router {
}
this._running = false;
document.removeEventListener(clickEvent, this._onClick, false);
- window.removeEventListener('popstate', this._onPopState, false);
+ window.removeEventListener("popstate", this._onPopState, false);
}
showNoDispatch(path, state) {
@@ -199,11 +205,11 @@ class Router {
middle();
next();
};
- const callChain = (this.ctx ? this._exits : [])
- .concat(
- [swap],
- this._callbacks,
- [this._unhandled, (ctx, next) => {}]);
+ const callChain = (this.ctx ? this._exits : []).concat(
+ [swap],
+ this._callbacks,
+ [this._unhandled, (ctx, next) => {}]
+ );
let i = 0;
let fn = () => {
@@ -226,20 +232,18 @@ class Router {
}
}
-const _onPopState = router => {
+const _onPopState = (router) => {
let loaded = false;
- if (document.readyState === 'complete') {
+ if (document.readyState === "complete") {
loaded = true;
} else {
- window.addEventListener(
- 'load',
- () => {
- setTimeout(() => {
- loaded = true;
- }, 0);
- });
+ window.addEventListener("load", () => {
+ setTimeout(() => {
+ loaded = true;
+ }, 0);
+ });
}
- return e => {
+ return (e) => {
if (!loaded) {
return;
}
@@ -247,16 +251,13 @@ const _onPopState = router => {
const path = e.state.path;
router.replace(path, e.state, true);
} else {
- router.show(
- location.pathname + location.hash,
- undefined,
- false);
+ router.show(location.pathname + location.hash, undefined, false);
}
};
};
-const _onClick = router => {
- return e => {
+const _onClick = (router) => {
+ return (e) => {
if (1 !== _which(e)) {
return;
}
@@ -268,23 +269,25 @@ const _onClick = router => {
}
let el = e.path ? e.path[0] : e.target;
- while (el && el.nodeName !== 'A') {
+ while (el && el.nodeName !== "A") {
el = el.parentNode;
}
- if (!el || el.nodeName !== 'A') {
+ if (!el || el.nodeName !== "A") {
return;
}
- if (el.hasAttribute('download') ||
- el.getAttribute('rel') === 'external') {
+ if (
+ el.hasAttribute("download") ||
+ el.getAttribute("rel") === "external"
+ ) {
return;
}
- const link = el.getAttribute('href');
- if (el.pathname === location.pathname && (el.hash || '#' === link)) {
+ const link = el.getAttribute("href");
+ if (el.pathname === location.pathname && (el.hash || "#" === link)) {
return;
}
- if (link && link.indexOf('mailto:') > -1) {
+ if (link && link.indexOf("mailto:") > -1) {
return;
}
if (el.target) {
@@ -295,7 +298,7 @@ const _onClick = router => {
}
const base = _getBaseHref();
- const orig = el.pathname + el.search + (el.hash || '');
+ const orig = el.pathname + el.search + (el.hash || "");
const path = !orig.indexOf(base) ? orig.slice(base.length) : orig;
if (base && orig === path) {
diff --git a/client/js/tags.js b/client/js/tags.js
index c037f6f6..ecf9b662 100644
--- a/client/js/tags.js
+++ b/client/js/tags.js
@@ -1,22 +1,23 @@
-'use strict';
+"use strict";
-const misc = require('./util/misc.js');
-const TagCategoryList = require('./models/tag_category_list.js');
+const misc = require("./util/misc.js");
+const TagCategoryList = require("./models/tag_category_list.js");
let _stylesheet = null;
function refreshCategoryColorMap() {
- return TagCategoryList.get().then(response => {
+ return TagCategoryList.get().then((response) => {
if (_stylesheet) {
document.head.removeChild(_stylesheet);
}
- _stylesheet = document.createElement('style');
+ _stylesheet = document.createElement("style");
document.head.appendChild(_stylesheet);
for (let category of response.results) {
- const ruleName = misc.makeCssName(category.name, 'tag');
+ const ruleName = misc.makeCssName(category.name, "tag");
_stylesheet.sheet.insertRule(
`.${ruleName} { color: ${category.color} }`,
- _stylesheet.sheet.cssRules.length);
+ _stylesheet.sheet.cssRules.length
+ );
}
});
}
diff --git a/client/js/templates.js b/client/js/templates.js
index f378cbcf..88bd5688 100644
--- a/client/js/templates.js
+++ b/client/js/templates.js
@@ -1,3 +1,3 @@
-'use strict';
+"use strict";
-module.exports = require('./.templates.autogen.js');
+module.exports = require("./.templates.autogen.js");
diff --git a/client/js/util/keyboard.js b/client/js/util/keyboard.js
index 8ee6ee97..3fe32633 100644
--- a/client/js/util/keyboard.js
+++ b/client/js/util/keyboard.js
@@ -1,12 +1,12 @@
-'use strict';
+"use strict";
-const mousetrap = require('mousetrap');
-const settings = require('../models/settings.js');
+const mousetrap = require("mousetrap");
+const settings = require("../models/settings.js");
let paused = false;
const _originalStopCallback = mousetrap.prototype.stopCallback;
// eslint-disable-next-line func-names
-mousetrap.prototype.stopCallback = function(...args) {
+mousetrap.prototype.stopCallback = function (...args) {
var self = this;
if (paused) {
return true;
diff --git a/client/js/util/markdown.js b/client/js/util/markdown.js
index 210280e8..792c9383 100644
--- a/client/js/util/markdown.js
+++ b/client/js/util/markdown.js
@@ -1,6 +1,6 @@
-'use strict';
+"use strict";
-const marked = require('marked');
+const marked = require("marked");
class BaseMarkdownWrapper {
preprocess(text) {
@@ -20,42 +20,44 @@ class SjisWrapper extends BaseMarkdownWrapper {
preprocess(text) {
return text.replace(
- /\[sjis\]((?:[^\[]|\[(?!\/?sjis\]))+)\[\/sjis\]/ig,
+ /\[sjis\]((?:[^\[]|\[(?!\/?sjis\]))+)\[\/sjis\]/gi,
(match, capture) => {
- var ret = '%%%SJIS' + this.buf.length;
+ var ret = "%%%SJIS" + this.buf.length;
this.buf.push(capture);
return ret;
- });
+ }
+ );
}
postprocess(text) {
return text.replace(
/(?:)?%%%SJIS(\d+)(?:<\/p>)?/,
(match, capture) => {
- return '
' + this.buf[capture] + '
';
- });
+ return '' + this.buf[capture] + "
";
+ }
+ );
}
}
// fix \ before ~ being stripped away
class TildeWrapper extends BaseMarkdownWrapper {
preprocess(text) {
- return text.replace(/\\~/g, '%%%T');
+ return text.replace(/\\~/g, "%%%T");
}
postprocess(text) {
- return text.replace(/%%%T/g, '\\~');
+ return text.replace(/%%%T/g, "\\~");
}
}
// prevent ^#... from being treated as headers, due to tag permalinks
class TagPermalinkFixWrapper extends BaseMarkdownWrapper {
preprocess(text) {
- return text.replace(/^#/g, '%%%#');
+ return text.replace(/^#/g, "%%%#");
}
postprocess(text) {
- return text.replace(/%%%#/g, '#');
+ return text.replace(/%%%#/g, "#");
}
}
@@ -63,19 +65,23 @@ class TagPermalinkFixWrapper extends BaseMarkdownWrapper {
class EntityPermalinkWrapper extends BaseMarkdownWrapper {
preprocess(text) {
// URL-based permalinks
+ text = text.replace(new RegExp("\\b/post/(\\d+)/?\\b", "g"), "@$1");
text = text.replace(
- new RegExp('\\b/post/(\\d+)/?\\b', 'g'), '@$1');
+ new RegExp("\\b/tag/([a-zA-Z0-9_-]+?)/?", "g"),
+ "#$1"
+ );
text = text.replace(
- new RegExp('\\b/tag/([a-zA-Z0-9_-]+?)/?', 'g'), '#$1');
- text = text.replace(
- new RegExp('\\b/user/([a-zA-Z0-9_-]+?)/?', 'g'), '+$1');
+ new RegExp("\\b/user/([a-zA-Z0-9_-]+?)/?", "g"),
+ "+$1"
+ );
text = text.replace(
/(^|^\(|(?:[^\]])\(|[\s<>\[\]\)])([+#@][a-zA-Z0-9_-]+)/g,
- '$1[$2]($2)');
- text = text.replace(/\]\(@(\d+)\)/g, '](/post/$1)');
- text = text.replace(/\]\(\+([a-zA-Z0-9_-]+)\)/g, '](/user/$1)');
- text = text.replace(/\]\(#([a-zA-Z0-9_-]+)\)/g, '](/posts/query=$1)');
+ "$1[$2]($2)"
+ );
+ text = text.replace(/\]\(@(\d+)\)/g, "](/post/$1)");
+ text = text.replace(/\]\(\+([a-zA-Z0-9_-]+)\)/g, "](/user/$1)");
+ text = text.replace(/\]\(#([a-zA-Z0-9_-]+)\)/g, "](/posts/query=$1)");
return text;
}
}
@@ -83,51 +89,58 @@ class EntityPermalinkWrapper extends BaseMarkdownWrapper {
class SearchPermalinkWrapper extends BaseMarkdownWrapper {
postprocess(text) {
return text.replace(
- /\[search\]((?:[^\[]|\[(?!\/?search\]))+)\[\/search\]/ig,
- '$1
');
+ /\[search\]((?:[^\[]|\[(?!\/?search\]))+)\[\/search\]/gi,
+ '$1
'
+ );
}
}
class SpoilersWrapper extends BaseMarkdownWrapper {
postprocess(text) {
return text.replace(
- /\[spoiler\]((?:[^\[]|\[(?!\/?spoiler\]))+)\[\/spoiler\]/ig,
- '$1 ');
+ /\[spoiler\]((?:[^\[]|\[(?!\/?spoiler\]))+)\[\/spoiler\]/gi,
+ '$1 '
+ );
}
}
class SmallWrapper extends BaseMarkdownWrapper {
postprocess(text) {
return text.replace(
- /\[small\]((?:[^\[]|\[(?!\/?small\]))+)\[\/small\]/ig,
- '$1 ');
+ /\[small\]((?:[^\[]|\[(?!\/?small\]))+)\[\/small\]/gi,
+ "$1 "
+ );
}
}
class StrikeThroughWrapper extends BaseMarkdownWrapper {
postprocess(text) {
- text = text.replace(/(^|[^\\])(~~|~)([^~]+)\2/g, '$1$3');
- return text.replace(/\\~/g, '~');
+ text = text.replace(/(^|[^\\])(~~|~)([^~]+)\2/g, "$1$3");
+ return text.replace(/\\~/g, "~");
}
}
function createRenderer() {
function sanitize(str) {
- return str.replace(/&<"/g, m => {
- if (m === '&') {
- return '&';
+ return str.replace(/&<"/g, (m) => {
+ if (m === "&") {
+ return "&";
}
- if (m === '<') {
- return '<';
+ if (m === "<") {
+ return "<";
}
- return '"';
+ return """;
});
}
const renderer = new marked.Renderer();
renderer.image = (href, title, alt) => {
- let [_, url, width, height] =
- (/^(.+?)(?:\s=\s*(\d*)\s*x\s*(\d*)\s*)?$/).exec(href);
+ let [
+ _,
+ url,
+ width,
+ height,
+ ] = /^(.+?)(?:\s=\s*(\d*)\s*x\s*(\d*)\s*)?$/.exec(href);
let res = ' {
- const decimalPlaces = number < 20 && suffix !== 'B' ? 1 : 0;
+ const decimalPlaces = number < 20 && suffix !== "B" ? 1 : 0;
return number.toFixed(decimalPlaces) + suffix;
- });
+ }
+ );
}
function formatRelativeTime(timeString) {
if (!timeString) {
- return 'never';
+ return "never";
}
const then = Date.parse(timeString);
@@ -63,17 +64,17 @@ function formatRelativeTime(timeString) {
const future = now < then;
const descriptions = [
- [60, 'a few seconds', null],
- [60 * 2, 'a minute', null],
- [60 * 60, '% minutes', 60],
- [60 * 60 * 2, 'an hour', null],
- [60 * 60 * 24, '% hours', 60 * 60],
- [60 * 60 * 24 * 2, 'a day', null],
- [60 * 60 * 24 * 30.42, '% days', 60 * 60 * 24],
- [60 * 60 * 24 * 30.42 * 2, 'a month', null],
- [60 * 60 * 24 * 30.42 * 12, '% months', 60 * 60 * 24 * 30.42],
- [60 * 60 * 24 * 30.42 * 12 * 2, 'a year', null],
- [8640000000000000 /* max*/, '% years', 60 * 60 * 24 * 30.42 * 12],
+ [60, "a few seconds", null],
+ [60 * 2, "a minute", null],
+ [60 * 60, "% minutes", 60],
+ [60 * 60 * 2, "an hour", null],
+ [60 * 60 * 24, "% hours", 60 * 60],
+ [60 * 60 * 24 * 2, "a day", null],
+ [60 * 60 * 24 * 30.42, "% days", 60 * 60 * 24],
+ [60 * 60 * 24 * 30.42 * 2, "a month", null],
+ [60 * 60 * 24 * 30.42 * 12, "% months", 60 * 60 * 24 * 30.42],
+ [60 * 60 * 24 * 30.42 * 12 * 2, "a year", null],
+ [8640000000000000 /* max*/, "% years", 60 * 60 * 24 * 30.42 * 12],
];
let text = null;
@@ -87,10 +88,10 @@ function formatRelativeTime(timeString) {
}
}
- if (text === 'a day') {
- return future ? 'tomorrow' : 'yesterday';
+ if (text === "a day") {
+ return future ? "tomorrow" : "yesterday";
}
- return future ? 'in ' + text : text + ' ago';
+ return future ? "in " + text : text + " ago";
}
function formatMarkdown(text) {
@@ -102,7 +103,7 @@ function formatInlineMarkdown(text) {
}
function splitByWhitespace(str) {
- return str.split(/\s+/).filter(s => s);
+ return str.split(/\s+/).filter((s) => s);
}
function unindent(callSite, ...args) {
@@ -110,28 +111,30 @@ function unindent(callSite, ...args) {
let size = -1;
return str.replace(/\n(\s+)/g, (m, m1) => {
if (size < 0) {
- size = m1.replace(/\t/g, ' ').length;
+ size = m1.replace(/\t/g, " ").length;
}
- return '\n' + m1.slice(Math.min(m1.length, size));
+ return "\n" + m1.slice(Math.min(m1.length, size));
});
}
- if (typeof callSite === 'string') {
+ if (typeof callSite === "string") {
return format(callSite);
}
- if (typeof callSite === 'function') {
+ if (typeof callSite === "function") {
return (...args) => format(callSite(...args));
}
let output = callSite
.slice(0, args.length + 1)
- .map((text, i) => (i === 0 ? '' : args[i - 1]) + text)
- .join('');
+ .map((text, i) => (i === 0 ? "" : args[i - 1]) + text)
+ .join("");
return format(output);
}
function enableExitConfirmation() {
- window.onbeforeunload = e => {
- return 'Are you sure you want to leave? ' +
- 'Data you have entered may not be saved.';
+ window.onbeforeunload = (e) => {
+ return (
+ "Are you sure you want to leave? " +
+ "Data you have entered may not be saved."
+ );
};
}
@@ -150,16 +153,17 @@ function confirmPageExit() {
}
function makeCssName(text, suffix) {
- return suffix + '-' + text.replace(/[^a-z0-9]/g, '_');
+ return suffix + "-" + text.replace(/[^a-z0-9]/g, "_");
}
function escapeHtml(unsafe) {
- return unsafe.toString()
- .replace(/&/g, '&')
- .replace(//g, '>')
- .replace(/"/g, '"')
- .replace(/'/g, ''');
+ return unsafe
+ .toString()
+ .replace(/&/g, "&")
+ .replace(//g, ">")
+ .replace(/"/g, """)
+ .replace(/'/g, "'");
}
function arraysDiffer(source1, source2, orderImportant) {
@@ -177,25 +181,27 @@ function arraysDiffer(source1, source2, orderImportant) {
return false;
}
return (
- source1.filter(value => !source2.includes(value)).length > 0 ||
- source2.filter(value => !source1.includes(value)).length > 0);
+ source1.filter((value) => !source2.includes(value)).length > 0 ||
+ source2.filter((value) => !source1.includes(value)).length > 0
+ );
}
function escapeSearchTerm(text) {
- return text.replace(/([a-z_-]):/g, '$1\\:');
+ return text.replace(/([a-z_-]):/g, "$1\\:");
}
function dataURItoBlob(dataURI) {
- const chunks = dataURI.split(',');
- const byteString = chunks[0].indexOf('base64') >= 0 ?
- window.atob(chunks[1]) :
- unescape(chunks[1]);
- const mimeString = chunks[0].split(':')[1].split(';')[0];
+ const chunks = dataURI.split(",");
+ const byteString =
+ chunks[0].indexOf("base64") >= 0
+ ? window.atob(chunks[1])
+ : unescape(chunks[1]);
+ const mimeString = chunks[0].split(":")[1].split(";")[0];
const data = new Uint8Array(byteString.length);
for (let i = 0; i < byteString.length; i++) {
data[i] = byteString.charCodeAt(i);
}
- return new Blob([data], {type: mimeString});
+ return new Blob([data], { type: mimeString });
}
function getPrettyTagName(tag) {
diff --git a/client/js/util/optimized_resize.js b/client/js/util/optimized_resize.js
index 545c4589..d45df4a6 100644
--- a/client/js/util/optimized_resize.js
+++ b/client/js/util/optimized_resize.js
@@ -1,4 +1,4 @@
-'use strict';
+"use strict";
let callbacks = [];
let running = false;
@@ -15,7 +15,7 @@ function resize() {
}
function runCallbacks() {
- callbacks.forEach(callback => {
+ callbacks.forEach((callback) => {
callback();
});
running = false;
@@ -26,8 +26,8 @@ function add(callback) {
}
function remove(callback) {
- callbacks = callbacks.filter(c => c !== callback);
+ callbacks = callbacks.filter((c) => c !== callback);
}
-window.addEventListener('resize', resize);
-module.exports = {add: add, remove: remove};
+window.addEventListener("resize", resize);
+module.exports = { add: add, remove: remove };
diff --git a/client/js/util/polyfill.js b/client/js/util/polyfill.js
index 09f58354..ec809ff3 100644
--- a/client/js/util/polyfill.js
+++ b/client/js/util/polyfill.js
@@ -1,11 +1,11 @@
/* eslint-disable func-names, no-extend-native */
-'use strict';
+"use strict";
// fix iterating over NodeList in Chrome and Opera
NodeList.prototype[Symbol.iterator] = Array.prototype[Symbol.iterator];
-NodeList.prototype.querySelector = function(...args) {
+NodeList.prototype.querySelector = function (...args) {
for (let node of this) {
if (node.nodeType === 3) {
continue;
@@ -18,7 +18,7 @@ NodeList.prototype.querySelector = function(...args) {
return null;
};
-NodeList.prototype.querySelectorAll = function(...args) {
+NodeList.prototype.querySelectorAll = function (...args) {
let result = [];
for (let node of this) {
if (node.nodeType === 3) {
@@ -32,7 +32,7 @@ NodeList.prototype.querySelectorAll = function(...args) {
};
// non standard
-Node.prototype.prependChild = function(child) {
+Node.prototype.prependChild = function (child) {
if (this.firstChild) {
this.insertBefore(child, this.firstChild);
} else {
@@ -41,29 +41,25 @@ Node.prototype.prependChild = function(child) {
};
// non standard
-Promise.prototype.always = function(onResolveOrReject) {
- return this.then(
- onResolveOrReject,
- reason => {
- onResolveOrReject(reason);
- throw reason;
- });
+Promise.prototype.always = function (onResolveOrReject) {
+ return this.then(onResolveOrReject, (reason) => {
+ onResolveOrReject(reason);
+ throw reason;
+ });
};
// non standard
-Number.prototype.between = function(a, b, inclusive) {
+Number.prototype.between = function (a, b, inclusive) {
const min = Math.min(a, b);
const max = Math.max(a, b);
- return inclusive ?
- this >= min && this <= max :
- this > min && this < max;
+ return inclusive ? this >= min && this <= max : this > min && this < max;
};
// non standard
Promise.prototype.abort = () => {};
// non standard
-Date.prototype.addDays = function(days) {
+Date.prototype.addDays = function (days) {
let dat = new Date(this.valueOf());
dat.setDate(dat.getDate() + days);
return dat;
diff --git a/client/js/util/progress.js b/client/js/util/progress.js
index 98df1797..d6d12cb5 100644
--- a/client/js/util/progress.js
+++ b/client/js/util/progress.js
@@ -1,6 +1,6 @@
-'use strict';
+"use strict";
-const nprogress = require('nprogress');
+const nprogress = require("nprogress");
let nesting = 0;
diff --git a/client/js/util/search.js b/client/js/util/search.js
index 8d5fca45..c540c2f1 100644
--- a/client/js/util/search.js
+++ b/client/js/util/search.js
@@ -1,14 +1,16 @@
-'use strict';
+"use strict";
-const misc = require('./misc.js');
-const keyboard = require('../util/keyboard.js');
-const views = require('./views.js');
+const misc = require("./misc.js");
+const keyboard = require("../util/keyboard.js");
+const views = require("./views.js");
function searchInputNodeFocusHelper(inputNode) {
- keyboard.bind('q', () => {
+ keyboard.bind("q", () => {
inputNode.focus();
inputNode.setSelectionRange(
- inputNode.value.length, inputNode.value.length);
+ inputNode.value.length,
+ inputNode.value.length
+ );
});
}
diff --git a/client/js/util/touch.js b/client/js/util/touch.js
index 53b0978f..64bd00ac 100644
--- a/client/js/util/touch.js
+++ b/client/js/util/touch.js
@@ -1,11 +1,11 @@
-'use strict';
+"use strict";
const direction = {
NONE: null,
- LEFT: 'left',
- RIGHT: 'right',
- DOWN: 'down',
- UP: 'up'
+ LEFT: "left",
+ RIGHT: "right",
+ DOWN: "down",
+ UP: "up",
};
function handleTouchStart(handler, evt) {
@@ -37,20 +37,20 @@ function handleTouchMove(handler, evt) {
function handleTouchEnd(handler) {
switch (handler._direction) {
- case direction.NONE:
- return;
- case direction.LEFT:
- handler._swipeLeftTask();
- break;
- case direction.RIGHT:
- handler._swipeRightTask();
- break;
- case direction.DOWN:
- handler._swipeDownTask();
- break;
- case direction.UP:
- handler._swipeUpTask();
- // no default
+ case direction.NONE:
+ return;
+ case direction.LEFT:
+ handler._swipeLeftTask();
+ break;
+ case direction.RIGHT:
+ handler._swipeRightTask();
+ break;
+ case direction.DOWN:
+ handler._swipeDownTask();
+ break;
+ case direction.UP:
+ handler._swipeUpTask();
+ // no default
}
handler._xStart = null;
@@ -58,11 +58,13 @@ function handleTouchEnd(handler) {
}
class Touch {
- constructor(target,
+ constructor(
+ target,
swipeLeft = () => {},
swipeRight = () => {},
swipeUp = () => {},
- swipeDown = () => {}) {
+ swipeDown = () => {}
+ ) {
this._target = target;
this._swipeLeftTask = swipeLeft;
@@ -74,18 +76,15 @@ class Touch {
this._yStart = null;
this._direction = direction.NONE;
- this._target.addEventListener('touchstart',
- evt => {
- handleTouchStart(this, evt);
- });
- this._target.addEventListener('touchmove',
- evt => {
- handleTouchMove(this, evt);
- });
- this._target.addEventListener('touchend',
- () => {
- handleTouchEnd(this);
- });
+ this._target.addEventListener("touchstart", (evt) => {
+ handleTouchStart(this, evt);
+ });
+ this._target.addEventListener("touchmove", (evt) => {
+ handleTouchMove(this, evt);
+ });
+ this._target.addEventListener("touchend", () => {
+ handleTouchEnd(this);
+ });
}
}
diff --git a/client/js/util/uri.js b/client/js/util/uri.js
index 868545f8..16fa4f8d 100644
--- a/client/js/util/uri.js
+++ b/client/js/util/uri.js
@@ -1,4 +1,4 @@
-'use strict';
+"use strict";
function formatApiLink(...values) {
let parts = [];
@@ -9,18 +9,19 @@ function formatApiLink(...values) {
for (let key of Object.keys(value)) {
if (value[key]) {
variableParts.push(
- key + '=' + encodeURIComponent(value[key].toString()));
+ key + "=" + encodeURIComponent(value[key].toString())
+ );
}
}
if (variableParts.length) {
- parts.push('?' + variableParts.join('&'));
+ parts.push("?" + variableParts.join("&"));
}
break;
} else {
parts.push(encodeURIComponent(value.toString()));
}
}
- return '/' + parts.join('/');
+ return "/" + parts.join("/");
}
function escapeParam(text) {
@@ -40,48 +41,52 @@ function formatClientLink(...values) {
for (let key of Object.keys(value)) {
if (value[key]) {
variableParts.push(
- key + '=' + escapeParam(value[key].toString()));
+ key + "=" + escapeParam(value[key].toString())
+ );
}
}
if (variableParts.length) {
- parts.push(variableParts.join(';'));
+ parts.push(variableParts.join(";"));
}
break;
} else {
parts.push(escapeParam(value.toString()));
}
}
- return parts.join('/');
+ return parts.join("/");
}
function extractHostname(url) {
// https://stackoverflow.com/a/23945027
return url
- .split('/')[url.indexOf("//") > -1 ? 2 : 0]
- .split(':')[0]
- .split('?')[0];
+ .split("/")
+ [url.indexOf("//") > -1 ? 2 : 0].split(":")[0]
+ .split("?")[0];
}
function extractRootDomain(url) {
// https://stackoverflow.com/a/23945027
let domain = extractHostname(url);
- let splitArr = domain.split('.');
+ let splitArr = domain.split(".");
let arrLen = splitArr.length;
// if there is a subdomain
if (arrLen > 2) {
- domain = splitArr[arrLen - 2] + '.' + splitArr[arrLen - 1];
+ domain = splitArr[arrLen - 2] + "." + splitArr[arrLen - 1];
// check to see if it's using a Country Code Top Level Domain (ccTLD) (i.e. ".me.uk")
- if (splitArr[arrLen - 2].length === 2 && splitArr[arrLen - 1].length === 2) {
+ if (
+ splitArr[arrLen - 2].length === 2 &&
+ splitArr[arrLen - 1].length === 2
+ ) {
// this is using a ccTLD
- domain = splitArr[arrLen - 3] + '.' + domain;
+ domain = splitArr[arrLen - 3] + "." + domain;
}
}
return domain;
}
function escapeColons(text) {
- return text.replace(new RegExp(':', 'g'), '\\:');
+ return text.replace(new RegExp(":", "g"), "\\:");
}
module.exports = {
diff --git a/client/js/util/views.js b/client/js/util/views.js
index afdda76b..f69b34b2 100644
--- a/client/js/util/views.js
+++ b/client/js/util/views.js
@@ -1,27 +1,27 @@
-'use strict';
+"use strict";
-require('../util/polyfill.js');
-const api = require('../api.js');
-const templates = require('../templates.js');
+require("../util/polyfill.js");
+const api = require("../api.js");
+const templates = require("../templates.js");
const domParser = new DOMParser();
-const misc = require('./misc.js');
-const uri = require('./uri.js');
+const misc = require("./misc.js");
+const uri = require("./uri.js");
function _imbueId(options) {
if (!options.id) {
- options.id = 'gen-' + Math.random().toString(36).substring(7);
+ options.id = "gen-" + Math.random().toString(36).substring(7);
}
}
function _makeLabel(options, attrs) {
if (!options.text) {
- return '';
+ return "";
}
if (!attrs) {
attrs = {};
}
attrs.for = options.id;
- return makeElement('label', attrs, options.text);
+ return makeElement("label", attrs, options.text);
}
function makeFileSize(fileSize) {
@@ -34,251 +34,282 @@ function makeMarkdown(text) {
function makeRelativeTime(time) {
return makeElement(
- 'time', {datetime: time, title: time}, misc.formatRelativeTime(time));
+ "time",
+ { datetime: time, title: time },
+ misc.formatRelativeTime(time)
+ );
}
function makeThumbnail(url) {
return makeElement(
- 'span',
- url ?
- {class: 'thumbnail', style: `background-image: url(\'${url}\')`} :
- {class: 'thumbnail empty'},
- makeElement('img', {alt: 'thumbnail', src: url}));
+ "span",
+ url
+ ? {
+ class: "thumbnail",
+ style: `background-image: url(\'${url}\')`,
+ }
+ : { class: "thumbnail empty" },
+ makeElement("img", { alt: "thumbnail", src: url })
+ );
}
function makeRadio(options) {
_imbueId(options);
return makeElement(
- 'label',
- {for: options.id},
- makeElement(
- 'input',
- {
- id: options.id,
- name: options.name,
- value: options.value,
- type: 'radio',
- checked: options.selectedValue === options.value,
- disabled: options.readonly,
- required: options.required,
- }),
- makeElement('span', {class: 'radio'}, options.text));
+ "label",
+ { for: options.id },
+ makeElement("input", {
+ id: options.id,
+ name: options.name,
+ value: options.value,
+ type: "radio",
+ checked: options.selectedValue === options.value,
+ disabled: options.readonly,
+ required: options.required,
+ }),
+ makeElement("span", { class: "radio" }, options.text)
+ );
}
function makeCheckbox(options) {
_imbueId(options);
return makeElement(
- 'label',
- {for: options.id},
- makeElement(
- 'input',
- {
- id: options.id,
- name: options.name,
- value: options.value,
- type: 'checkbox',
- checked: options.checked !== undefined ?
- options.checked : false,
- disabled: options.readonly,
- required: options.required,
- }),
- makeElement('span', {class: 'checkbox'}, options.text));
+ "label",
+ { for: options.id },
+ makeElement("input", {
+ id: options.id,
+ name: options.name,
+ value: options.value,
+ type: "checkbox",
+ checked: options.checked !== undefined ? options.checked : false,
+ disabled: options.readonly,
+ required: options.required,
+ }),
+ makeElement("span", { class: "checkbox" }, options.text)
+ );
}
function makeSelect(options) {
- return _makeLabel(options) +
+ return (
+ _makeLabel(options) +
makeElement(
- 'select',
+ "select",
{
id: options.id,
name: options.name,
disabled: options.readonly,
},
- ...Object.keys(options.keyValues).map(key => makeElement(
- 'option',
- {value: key, selected: key === options.selectedKey},
- options.keyValues[key])));
+ ...Object.keys(options.keyValues).map((key) =>
+ makeElement(
+ "option",
+ { value: key, selected: key === options.selectedKey },
+ options.keyValues[key]
+ )
+ )
+ )
+ );
}
function makeInput(options) {
- options.value = options.value || '';
- return _makeLabel(options) + makeElement('input', options);
+ options.value = options.value || "";
+ return _makeLabel(options) + makeElement("input", options);
}
function makeButton(options) {
- options.type = 'button';
+ options.type = "button";
return makeInput(options);
}
function makeTextInput(options) {
- options.type = 'text';
+ options.type = "text";
return makeInput(options);
}
function makeTextarea(options) {
- const value = options.value || '';
+ const value = options.value || "";
delete options.value;
- return _makeLabel(options) + makeElement('textarea', options, value);
+ return _makeLabel(options) + makeElement("textarea", options, value);
}
function makePasswordInput(options) {
- options.type = 'password';
+ options.type = "password";
return makeInput(options);
}
function makeEmailInput(options) {
- options.type = 'email';
+ options.type = "email";
return makeInput(options);
}
function makeColorInput(options) {
- const textInput = makeElement(
- 'input', {
- type: 'text',
- value: options.value || '',
- required: options.required,
- class: 'color',
- });
- const backgroundPreviewNode = makeElement(
- 'div',
- {
- class: 'preview background-preview',
- style:
- `border-color: ${options.value};
+ const textInput = makeElement("input", {
+ type: "text",
+ value: options.value || "",
+ required: options.required,
+ class: "color",
+ });
+ const backgroundPreviewNode = makeElement("div", {
+ class: "preview background-preview",
+ style: `border-color: ${options.value};
background-color: ${options.value}`,
- });
- const textPreviewNode = makeElement(
- 'div',
- {
- class: 'preview text-preview',
- style:
- `border-color: ${options.value};
+ });
+ const textPreviewNode = makeElement("div", {
+ class: "preview text-preview",
+ style: `border-color: ${options.value};
color: ${options.value}`,
- });
+ });
return makeElement(
- 'label', {class: 'color'}, textInput, backgroundPreviewNode, textPreviewNode);
+ "label",
+ { class: "color" },
+ textInput,
+ backgroundPreviewNode,
+ textPreviewNode
+ );
}
function makeNumericInput(options) {
- options.type = 'number';
+ options.type = "number";
return makeInput(options);
}
function makeDateInput(options) {
- options.type = 'date';
- return makeInput(options)
+ options.type = "date";
+ return makeInput(options);
}
function getPostUrl(id, parameters) {
return uri.formatClientLink(
- 'post', id, parameters ? {query: parameters.query} : {});
+ "post",
+ id,
+ parameters ? { query: parameters.query } : {}
+ );
}
function getPostEditUrl(id, parameters) {
return uri.formatClientLink(
- 'post', id, 'edit', parameters ? {query: parameters.query} : {});
+ "post",
+ id,
+ "edit",
+ parameters ? { query: parameters.query } : {}
+ );
}
function makePostLink(id, includeHash) {
let text = id;
if (includeHash) {
- text = '@' + id;
+ text = "@" + id;
}
- return api.hasPrivilege('posts:view') ?
- makeElement(
- 'a',
- {href: uri.formatClientLink('post', id)},
- misc.escapeHtml(text)) :
- misc.escapeHtml(text);
+ return api.hasPrivilege("posts:view")
+ ? makeElement(
+ "a",
+ { href: uri.formatClientLink("post", id) },
+ misc.escapeHtml(text)
+ )
+ : misc.escapeHtml(text);
}
function makeTagLink(name, includeHash, includeCount, tag) {
- const category = tag ? tag.category : 'unknown';
+ const category = tag ? tag.category : "unknown";
let text = misc.getPrettyTagName(name);
if (includeHash === true) {
- text = '#' + text;
+ text = "#" + text;
}
if (includeCount === true) {
- text += ' (' + (tag ? tag.postCount : 0) + ')';
+ text += " (" + (tag ? tag.postCount : 0) + ")";
}
- return api.hasPrivilege('tags:view') ?
- makeElement(
- 'a',
- {
- href: uri.formatClientLink('tag', name),
- class: misc.makeCssName(category, 'tag'),
- },
- misc.escapeHtml(text)) :
- makeElement(
- 'span',
- {class: misc.makeCssName(category, 'tag')},
- misc.escapeHtml(text));
+ return api.hasPrivilege("tags:view")
+ ? makeElement(
+ "a",
+ {
+ href: uri.formatClientLink("tag", name),
+ class: misc.makeCssName(category, "tag"),
+ },
+ misc.escapeHtml(text)
+ )
+ : makeElement(
+ "span",
+ { class: misc.makeCssName(category, "tag") },
+ misc.escapeHtml(text)
+ );
}
function makePoolLink(id, includeHash, includeCount, pool, name) {
- const category = pool ? pool.category : 'unknown';
+ const category = pool ? pool.category : "unknown";
let text = name ? name : pool.names[0];
if (includeHash === true) {
- text = '#' + text;
+ text = "#" + text;
}
if (includeCount === true) {
- text += ' (' + (pool ? pool.postCount : 0) + ')';
+ text += " (" + (pool ? pool.postCount : 0) + ")";
}
- return api.hasPrivilege('pools:view') ?
- makeElement(
- 'a',
- {
- href: uri.formatClientLink('pool', id),
- class: misc.makeCssName(category, 'pool'),
- },
- misc.escapeHtml(text)) :
- makeElement(
- 'span',
- {class: misc.makeCssName(category, 'pool')},
- misc.escapeHtml(text));
+ return api.hasPrivilege("pools:view")
+ ? makeElement(
+ "a",
+ {
+ href: uri.formatClientLink("pool", id),
+ class: misc.makeCssName(category, "pool"),
+ },
+ misc.escapeHtml(text)
+ )
+ : makeElement(
+ "span",
+ { class: misc.makeCssName(category, "pool") },
+ misc.escapeHtml(text)
+ );
}
function makeUserLink(user) {
let text = makeThumbnail(user ? user.avatarUrl : null);
- text += user && user.name ? misc.escapeHtml(user.name) : 'Anonymous';
- const link = user && api.hasPrivilege('users:view') ?
- makeElement(
- 'a', {href: uri.formatClientLink('user', user.name)}, text) :
- text;
- return makeElement('span', {class: 'user'}, link);
+ text += user && user.name ? misc.escapeHtml(user.name) : "Anonymous";
+ const link =
+ user && api.hasPrivilege("users:view")
+ ? makeElement(
+ "a",
+ { href: uri.formatClientLink("user", user.name) },
+ text
+ )
+ : text;
+ return makeElement("span", { class: "user" }, link);
}
function makeFlexboxAlign(options) {
return [...misc.range(20)]
- .map(() => ' ').join('');
+ .map(() => ' ')
+ .join("");
}
function makeAccessKey(html, key) {
- const regex = new RegExp('(' + key + ')', 'i');
+ const regex = new RegExp("(" + key + ")", "i");
html = html.replace(
- regex, '$1 ');
+ regex,
+ '$1 '
+ );
return html;
}
function _serializeElement(name, attributes) {
return [name]
- .concat(Object.keys(attributes).map(key => {
- if (attributes[key] === true) {
- return key;
- } else if (attributes[key] === false ||
- attributes[key] === undefined) {
- return '';
- }
- const attribute = misc.escapeHtml(attributes[key] || '');
- return `${key}="${attribute}"`;
- }))
- .join(' ');
+ .concat(
+ Object.keys(attributes).map((key) => {
+ if (attributes[key] === true) {
+ return key;
+ } else if (
+ attributes[key] === false ||
+ attributes[key] === undefined
+ ) {
+ return "";
+ }
+ const attribute = misc.escapeHtml(attributes[key] || "");
+ return `${key}="${attribute}"`;
+ })
+ )
+ .join(" ");
}
function makeElement(name, attrs, ...content) {
- return content.length !== undefined ?
- `<${_serializeElement(name, attrs)}>${content.join('')}${name}>` :
- `<${_serializeElement(name, attrs)}/>`;
+ return content.length !== undefined
+ ? `<${_serializeElement(name, attrs)}>${content.join("")}${name}>`
+ : `<${_serializeElement(name, attrs)}/>`;
}
function emptyContent(target) {
@@ -302,25 +333,25 @@ function replaceContent(target, source) {
function showMessage(target, message, className) {
if (!message) {
- message = 'Unknown message';
+ message = "Unknown message";
}
- const messagesHolderNode = target.querySelector('.messages');
+ const messagesHolderNode = target.querySelector(".messages");
if (!messagesHolderNode) {
return false;
}
- const textNode = document.createElement('div');
- textNode.innerHTML = message.replace(/\n/g, ' ');
- textNode.classList.add('message');
+ const textNode = document.createElement("div");
+ textNode.innerHTML = message.replace(/\n/g, " ");
+ textNode.classList.add("message");
textNode.classList.add(className);
- const wrapperNode = document.createElement('div');
- wrapperNode.classList.add('message-wrapper');
+ const wrapperNode = document.createElement("div");
+ wrapperNode.classList.add("message-wrapper");
wrapperNode.appendChild(textNode);
messagesHolderNode.appendChild(wrapperNode);
return true;
}
function appendExclamationMark() {
- if (!document.title.startsWith('!')) {
+ if (!document.title.startsWith("!")) {
document.oldTitle = document.title;
document.title = `! ${document.title}`;
}
@@ -328,15 +359,15 @@ function appendExclamationMark() {
function showError(target, message) {
appendExclamationMark();
- return showMessage(target, misc.formatInlineMarkdown(message), 'error');
+ return showMessage(target, misc.formatInlineMarkdown(message), "error");
}
function showSuccess(target, message) {
- return showMessage(target, misc.formatInlineMarkdown(message), 'success');
+ return showMessage(target, misc.formatInlineMarkdown(message), "success");
}
function showInfo(target, message) {
- return showMessage(target, misc.formatInlineMarkdown(message), 'info');
+ return showMessage(target, misc.formatInlineMarkdown(message), "info");
}
function clearMessages(target) {
@@ -344,7 +375,7 @@ function clearMessages(target) {
document.title = document.oldTitle;
document.oldTitle = null;
}
- for (let messagesHolderNode of target.querySelectorAll('.messages')) {
+ for (let messagesHolderNode of target.querySelectorAll(".messages")) {
emptyContent(messagesHolderNode);
}
}
@@ -352,15 +383,15 @@ function clearMessages(target) {
function htmlToDom(html) {
// code taken from jQuery + Krasimir Tsonev's blog
const wrapMap = {
- _: [1, '', '
'],
- option: [1, '', ' '],
- legend: [1, '', ' '],
- area: [1, '', ' '],
- param: [1, '', ' '],
- thead: [1, ''],
- tr: [2, ''],
- td: [3, ''],
- col: [2, ''],
+ _: [1, "", "
"],
+ option: [1, "", " "],
+ legend: [1, "", " "],
+ area: [1, "", " "],
+ param: [1, "", " "],
+ thead: [1, ""],
+ tr: [2, ""],
+ td: [3, ""],
+ col: [2, ""],
};
wrapMap.optgroup = wrapMap.option;
wrapMap.tbody = wrapMap.thead;
@@ -369,8 +400,8 @@ function htmlToDom(html) {
wrapMap.caption = wrapMap.thead;
wrapMap.th = wrapMap.td;
- let element = document.createElement('div');
- const match = (/<\s*(\w+)[^>]*?>/g).exec(html);
+ let element = document.createElement("div");
+ const match = /<\s*(\w+)[^>]*?>/g.exec(html);
if (match) {
const tag = match[1];
@@ -382,9 +413,9 @@ function htmlToDom(html) {
} else {
element.innerHTML = html;
}
- return element.childNodes.length > 1 ?
- element.childNodes :
- element.firstChild;
+ return element.childNodes.length > 1
+ ? element.childNodes
+ : element.firstChild;
}
function getTemplate(templatePath) {
@@ -392,7 +423,7 @@ function getTemplate(templatePath) {
throw `Missing template: ${templatePath}`;
}
const templateFactory = templates[templatePath];
- return ctx => {
+ return (ctx) => {
if (!ctx) {
ctx = {};
}
@@ -423,7 +454,7 @@ function getTemplate(templatePath) {
makeElement: makeElement,
makeCssName: misc.makeCssName,
makeNumericInput: makeNumericInput,
- formatClientLink: uri.formatClientLink
+ formatClientLink: uri.formatClientLink,
});
return htmlToDom(templateFactory(ctx));
};
@@ -432,49 +463,51 @@ function getTemplate(templatePath) {
function decorateValidator(form) {
// postpone showing form fields validity until user actually tries
// to submit it (seeing red/green form w/o doing anything breaks POLA)
- let submitButton = form.querySelector('.buttons input');
+ let submitButton = form.querySelector(".buttons input");
if (!submitButton) {
- submitButton = form.querySelector('input[type=submit]');
+ submitButton = form.querySelector("input[type=submit]");
}
if (submitButton) {
- submitButton.addEventListener('click', e => {
- form.classList.add('show-validation');
+ submitButton.addEventListener("click", (e) => {
+ form.classList.add("show-validation");
});
}
- form.addEventListener('submit', e => {
- form.classList.remove('show-validation');
+ form.addEventListener("submit", (e) => {
+ form.classList.remove("show-validation");
});
}
function disableForm(form) {
- for (let input of form.querySelectorAll('input')) {
+ for (let input of form.querySelectorAll("input")) {
input.disabled = true;
}
}
function enableForm(form) {
- for (let input of form.querySelectorAll('input')) {
+ for (let input of form.querySelectorAll("input")) {
input.disabled = false;
}
}
function syncScrollPosition() {
- window.requestAnimationFrame(
- () => {
- if (history.state && Object.prototype.hasOwnProperty.call(history.state, 'scrollX')) {
- window.scrollTo(history.state.scrollX, history.state.scrollY);
- } else {
- window.scrollTo(0, 0);
- }
- });
+ window.requestAnimationFrame(() => {
+ if (
+ history.state &&
+ Object.prototype.hasOwnProperty.call(history.state, "scrollX")
+ ) {
+ window.scrollTo(history.state.scrollX, history.state.scrollY);
+ } else {
+ window.scrollTo(0, 0);
+ }
+ });
}
function slideDown(element) {
const duration = 500;
return new Promise((resolve, reject) => {
const height = element.getBoundingClientRect().height;
- element.style.maxHeight = '0';
- element.style.overflow = 'hidden';
+ element.style.maxHeight = "0";
+ element.style.overflow = "hidden";
window.setTimeout(() => {
element.style.transition = `all ${duration}ms ease`;
element.style.maxHeight = `${height}px`;
@@ -489,7 +522,7 @@ function slideUp(element) {
const duration = 500;
return new Promise((resolve, reject) => {
const height = element.getBoundingClientRect().height;
- element.style.overflow = 'hidden';
+ element.style.overflow = "hidden";
element.style.maxHeight = `${height}px`;
element.style.transition = `all ${duration}ms ease`;
window.setTimeout(() => {
@@ -502,26 +535,27 @@ function slideUp(element) {
}
function monitorNodeRemoval(monitoredNode, callback) {
- const mutationObserver = new MutationObserver(
- mutations => {
- for (let mutation of mutations) {
- for (let node of mutation.removedNodes) {
- if (node.contains(monitoredNode)) {
- mutationObserver.disconnect();
- callback();
- return;
- }
+ const mutationObserver = new MutationObserver((mutations) => {
+ for (let mutation of mutations) {
+ for (let node of mutation.removedNodes) {
+ if (node.contains(monitoredNode)) {
+ mutationObserver.disconnect();
+ callback();
+ return;
}
}
- });
- mutationObserver.observe(
- document.body, {childList: true, subtree: true});
+ }
+ });
+ mutationObserver.observe(document.body, {
+ childList: true,
+ subtree: true,
+ });
}
-document.addEventListener('input', e => {
- if (e.target.classList.contains('color')) {
- let bkNode = e.target.parentNode.querySelector('.background-preview');
- let textNode = e.target.parentNode.querySelector('.text-preview');
+document.addEventListener("input", (e) => {
+ if (e.target.classList.contains("color")) {
+ let bkNode = e.target.parentNode.querySelector(".background-preview");
+ let textNode = e.target.parentNode.querySelector(".text-preview");
bkNode.style.backgroundColor = e.target.value;
bkNode.style.borderColor = e.target.value;
textNode.style.color = e.target.value;
@@ -530,8 +564,8 @@ document.addEventListener('input', e => {
});
// prevent opening buttons in new tabs
-document.addEventListener('click', e => {
- if (e.target.getAttribute('href') === '' && e.which === 2) {
+document.addEventListener("click", (e) => {
+ if (e.target.getAttribute("href") === "" && e.which === 2) {
e.preventDefault();
}
});
diff --git a/client/js/views/comments_page_view.js b/client/js/views/comments_page_view.js
index d5bb294c..5648b3a8 100644
--- a/client/js/views/comments_page_view.js
+++ b/client/js/views/comments_page_view.js
@@ -1,10 +1,10 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const CommentListControl = require('../controls/comment_list_control.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const CommentListControl = require("../controls/comment_list_control.js");
-const template = views.getTemplate('comments-page');
+const template = views.getTemplate("comments-page");
class CommentsPageView extends events.EventTarget {
constructor(ctx) {
@@ -16,12 +16,14 @@ class CommentsPageView extends events.EventTarget {
for (let post of ctx.response.results) {
const commentListControl = new CommentListControl(
sourceNode.querySelector(
- `.comments-container[data-for="${post.id}"]`),
+ `.comments-container[data-for="${post.id}"]`
+ ),
post.comments,
- true);
- events.proxyEvent(commentListControl, this, 'submit');
- events.proxyEvent(commentListControl, this, 'score');
- events.proxyEvent(commentListControl, this, 'delete');
+ true
+ );
+ events.proxyEvent(commentListControl, this, "submit");
+ events.proxyEvent(commentListControl, this, "score");
+ events.proxyEvent(commentListControl, this, "delete");
}
views.replaceContent(this._hostNode, sourceNode);
diff --git a/client/js/views/empty_view.js b/client/js/views/empty_view.js
index 21843d73..59d336da 100644
--- a/client/js/views/empty_view.js
+++ b/client/js/views/empty_view.js
@@ -1,15 +1,16 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
const template = () => {
return views.htmlToDom(
- '');
+ ''
+ );
};
class EmptyView {
constructor() {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, template());
views.syncScrollPosition();
}
diff --git a/client/js/views/endless_page_view.js b/client/js/views/endless_page_view.js
index f30b8fb5..f94c3718 100644
--- a/client/js/views/endless_page_view.js
+++ b/client/js/views/endless_page_view.js
@@ -1,10 +1,10 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const views = require('../util/views.js');
+const router = require("../router.js");
+const views = require("../util/views.js");
-const holderTemplate = views.getTemplate('endless-pager');
-const pageTemplate = views.getTemplate('endless-pager-page');
+const holderTemplate = views.getTemplate("endless-pager");
+const pageTemplate = views.getTemplate("endless-pager-page");
function isScrolledIntoView(element) {
let top = 0;
@@ -12,14 +12,12 @@ function isScrolledIntoView(element) {
top += element.offsetTop || 0;
element = element.offsetParent;
} while (element);
- return (
- (top >= window.scrollY) &&
- (top <= window.scrollY + window.innerHeight));
+ return top >= window.scrollY && top <= window.scrollY + window.innerHeight;
}
class EndlessPageView {
constructor(ctx) {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, holderTemplate());
}
@@ -40,12 +38,13 @@ class EndlessPageView {
this.defaultLimit = parseInt(ctx.parameters.limit || ctx.defaultLimit);
const initialOffset = parseInt(ctx.parameters.offset || 0);
- this._loadPage(ctx, initialOffset, this.defaultLimit, true)
- .then(pageNode => {
+ this._loadPage(ctx, initialOffset, this.defaultLimit, true).then(
+ (pageNode) => {
if (initialOffset !== 0) {
pageNode.scrollIntoView();
}
- });
+ }
+ );
this._timeout = window.setInterval(() => {
window.requestAnimationFrame(() => {
@@ -58,19 +57,19 @@ class EndlessPageView {
}
get pageHeaderHolderNode() {
- return this._hostNode.querySelector('.page-header-holder');
+ return this._hostNode.querySelector(".page-header-holder");
}
get topPageGuardNode() {
- return this._hostNode.querySelector('.page-guard.top');
+ return this._hostNode.querySelector(".page-guard.top");
}
get bottomPageGuardNode() {
- return this._hostNode.querySelector('.page-guard.bottom');
+ return this._hostNode.querySelector(".page-guard.bottom");
}
get _pagesHolderNode() {
- return this._hostNode.querySelector('.pages-holder');
+ return this._hostNode.querySelector(".pages-holder");
}
_destroy() {
@@ -82,9 +81,10 @@ class EndlessPageView {
let topPageNode = null;
let element = document.elementFromPoint(
window.innerWidth / 2,
- window.innerHeight / 2);
+ window.innerHeight / 2
+ );
while (element.parentNode !== null) {
- if (element.classList.contains('page')) {
+ if (element.classList.contains("page")) {
topPageNode = element;
break;
}
@@ -93,15 +93,17 @@ class EndlessPageView {
if (!topPageNode) {
return;
}
- let topOffset = parseInt(topPageNode.getAttribute('data-offset'));
- let topLimit = parseInt(topPageNode.getAttribute('data-limit'));
+ let topOffset = parseInt(topPageNode.getAttribute("data-offset"));
+ let topLimit = parseInt(topPageNode.getAttribute("data-limit"));
if (topOffset !== this.currentOffset) {
router.replace(
ctx.getClientUrlForPage(
topOffset,
- topLimit === ctx.defaultLimit ? null : topLimit),
+ topLimit === ctx.defaultLimit ? null : topLimit
+ ),
ctx.state,
- false);
+ false
+ );
this.currentOffset = topOffset;
}
}
@@ -115,43 +117,47 @@ class EndlessPageView {
return;
}
- if (this.minOffsetShown > 0 &&
- isScrolledIntoView(this.topPageGuardNode)) {
+ if (
+ this.minOffsetShown > 0 &&
+ isScrolledIntoView(this.topPageGuardNode)
+ ) {
this._loadPage(
ctx,
this.minOffsetShown - this.defaultLimit,
this.defaultLimit,
- false);
+ false
+ );
}
- if (this.maxOffsetShown < this.totalRecords &&
- isScrolledIntoView(this.bottomPageGuardNode)) {
- this._loadPage(
- ctx,
- this.maxOffsetShown,
- this.defaultLimit,
- true);
+ if (
+ this.maxOffsetShown < this.totalRecords &&
+ isScrolledIntoView(this.bottomPageGuardNode)
+ ) {
+ this._loadPage(ctx, this.maxOffsetShown, this.defaultLimit, true);
}
}
_loadPage(ctx, offset, limit, append) {
this._runningRequests++;
return new Promise((resolve, reject) => {
- ctx.requestPage(offset, limit).then(response => {
- if (!this._active) {
+ ctx.requestPage(offset, limit).then(
+ (response) => {
+ if (!this._active) {
+ this._runningRequests--;
+ return Promise.reject();
+ }
+ window.requestAnimationFrame(() => {
+ let pageNode = this._renderPage(ctx, append, response);
+ this._runningRequests--;
+ resolve(pageNode);
+ });
+ },
+ (error) => {
+ this.showError(error.message);
this._runningRequests--;
- return Promise.reject();
+ reject();
}
- window.requestAnimationFrame(() => {
- let pageNode = this._renderPage(ctx, append, response);
- this._runningRequests--;
- resolve(pageNode);
- });
- }, error => {
- this.showError(error.message);
- this._runningRequests--;
- reject();
- });
+ );
});
}
@@ -162,30 +168,35 @@ class EndlessPageView {
pageNode = pageTemplate({
totalPages: Math.ceil(response.total / response.limit),
page: Math.ceil(
- (response.offset + response.limit) / response.limit),
+ (response.offset + response.limit) / response.limit
+ ),
});
- pageNode.setAttribute('data-offset', response.offset);
- pageNode.setAttribute('data-limit', response.limit);
+ pageNode.setAttribute("data-offset", response.offset);
+ pageNode.setAttribute("data-limit", response.limit);
ctx.pageRenderer({
parameters: ctx.parameters,
response: response,
- hostNode: pageNode.querySelector('.page-content-holder'),
+ hostNode: pageNode.querySelector(".page-content-holder"),
});
this.totalRecords = response.total;
- if (response.offset < this.minOffsetShown ||
- this.minOffsetShown === null) {
+ if (
+ response.offset < this.minOffsetShown ||
+ this.minOffsetShown === null
+ ) {
this.minOffsetShown = response.offset;
}
- if (response.offset + response.results.length
- > this.maxOffsetShown ||
- this.maxOffsetShown === null) {
+ if (
+ response.offset + response.results.length >
+ this.maxOffsetShown ||
+ this.maxOffsetShown === null
+ ) {
this.maxOffsetShown =
response.offset + response.results.length;
}
- response.results.addEventListener('remove', e => {
+ response.results.addEventListener("remove", (e) => {
this.maxOffsetShown--;
this.totalRecords--;
});
@@ -200,10 +211,11 @@ class EndlessPageView {
window.scroll(
window.scrollX,
- window.scrollY + pageNode.offsetHeight);
+ window.scrollY + pageNode.offsetHeight
+ );
}
} else if (!response.results.length) {
- this.showInfo('No data to show');
+ this.showInfo("No data to show");
}
this._initialPageLoad = false;
diff --git a/client/js/views/help_view.js b/client/js/views/help_view.js
index 1ab016d7..a88b0169 100644
--- a/client/js/views/help_view.js
+++ b/client/js/views/help_view.js
@@ -1,73 +1,81 @@
-'use strict';
+"use strict";
-const api = require('../api.js');
-const views = require('../util/views.js');
+const api = require("../api.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('help');
+const template = views.getTemplate("help");
const sectionTemplates = {
- 'about': views.getTemplate('help-about'),
- 'keyboard': views.getTemplate('help-keyboard'),
- 'search': views.getTemplate('help-search'),
- 'comments': views.getTemplate('help-comments'),
- 'tos': views.getTemplate('help-tos'),
+ about: views.getTemplate("help-about"),
+ keyboard: views.getTemplate("help-keyboard"),
+ search: views.getTemplate("help-search"),
+ comments: views.getTemplate("help-comments"),
+ tos: views.getTemplate("help-tos"),
};
const subsectionTemplates = {
- 'search': {
- 'default': views.getTemplate('help-search-general'),
- 'posts': views.getTemplate('help-search-posts'),
- 'users': views.getTemplate('help-search-users'),
- 'tags': views.getTemplate('help-search-tags'),
- 'pools': views.getTemplate('help-search-pools'),
+ search: {
+ default: views.getTemplate("help-search-general"),
+ posts: views.getTemplate("help-search-posts"),
+ users: views.getTemplate("help-search-users"),
+ tags: views.getTemplate("help-search-tags"),
+ pools: views.getTemplate("help-search-pools"),
},
};
class HelpView {
constructor(section, subsection) {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
const sourceNode = template();
const ctx = {
name: api.getName(),
};
- section = section || 'about';
+ section = section || "about";
if (section in sectionTemplates) {
views.replaceContent(
- sourceNode.querySelector('.content'),
- sectionTemplates[section](ctx));
+ sourceNode.querySelector(".content"),
+ sectionTemplates[section](ctx)
+ );
}
- subsection = subsection || 'default';
- if (section in subsectionTemplates &&
- subsection in subsectionTemplates[section]) {
+ subsection = subsection || "default";
+ if (
+ section in subsectionTemplates &&
+ subsection in subsectionTemplates[section]
+ ) {
views.replaceContent(
- sourceNode.querySelector('.subcontent'),
- subsectionTemplates[section][subsection](ctx));
+ sourceNode.querySelector(".subcontent"),
+ subsectionTemplates[section][subsection](ctx)
+ );
}
views.replaceContent(this._hostNode, sourceNode);
- for (let itemNode of
- sourceNode.querySelectorAll('.primary [data-name]')) {
+ for (let itemNode of sourceNode.querySelectorAll(
+ ".primary [data-name]"
+ )) {
itemNode.classList.toggle(
- 'active',
- itemNode.getAttribute('data-name') === section);
- if (itemNode.getAttribute('data-name') === section) {
+ "active",
+ itemNode.getAttribute("data-name") === section
+ );
+ if (itemNode.getAttribute("data-name") === section) {
itemNode.parentNode.scrollLeft =
itemNode.getBoundingClientRect().left -
- itemNode.parentNode.getBoundingClientRect().left
+ itemNode.parentNode.getBoundingClientRect().left;
}
}
- for (let itemNode of
- sourceNode.querySelectorAll('.secondary [data-name]')) {
+ for (let itemNode of sourceNode.querySelectorAll(
+ ".secondary [data-name]"
+ )) {
itemNode.classList.toggle(
- 'active',
- itemNode.getAttribute('data-name') === subsection);
- if (itemNode.getAttribute('data-name') === subsection) {
+ "active",
+ itemNode.getAttribute("data-name") === subsection
+ );
+ if (itemNode.getAttribute("data-name") === subsection) {
itemNode.parentNode.scrollLeft =
itemNode.getBoundingClientRect().left -
- itemNode.parentNode.getBoundingClientRect().left
+ itemNode.parentNode.getBoundingClientRect().left;
}
}
diff --git a/client/js/views/home_view.js b/client/js/views/home_view.js
index 35d4742f..c91363b2 100644
--- a/client/js/views/home_view.js
+++ b/client/js/views/home_view.js
@@ -1,22 +1,20 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const uri = require('../util/uri.js');
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const PostContentControl = require('../controls/post_content_control.js');
-const PostNotesOverlayControl
- = require('../controls/post_notes_overlay_control.js');
-const TagAutoCompleteControl =
- require('../controls/tag_auto_complete_control.js');
+const router = require("../router.js");
+const uri = require("../util/uri.js");
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const PostContentControl = require("../controls/post_content_control.js");
+const PostNotesOverlayControl = require("../controls/post_notes_overlay_control.js");
+const TagAutoCompleteControl = require("../controls/tag_auto_complete_control.js");
-const template = views.getTemplate('home');
-const footerTemplate = views.getTemplate('home-footer');
-const featuredPostTemplate = views.getTemplate('home-featured-post');
+const template = views.getTemplate("home");
+const footerTemplate = views.getTemplate("home-footer");
+const featuredPostTemplate = views.getTemplate("home-featured-post");
class HomeView {
constructor(ctx) {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
this._ctx = ctx;
const sourceNode = template(ctx);
@@ -27,11 +25,16 @@ class HomeView {
this._autoCompleteControl = new TagAutoCompleteControl(
this._searchInputNode,
{
- confirm: tag => this._autoCompleteControl.replaceSelectedText(
- misc.escapeSearchTerm(tag.names[0]), true),
- });
- this._formNode.addEventListener(
- 'submit', e => this._evtFormSubmit(e));
+ confirm: (tag) =>
+ this._autoCompleteControl.replaceSelectedText(
+ misc.escapeSearchTerm(tag.names[0]),
+ true
+ ),
+ }
+ );
+ this._formNode.addEventListener("submit", (e) =>
+ this._evtFormSubmit(e)
+ );
}
}
@@ -46,59 +49,67 @@ class HomeView {
setStats(stats) {
views.replaceContent(
this._footerContainerNode,
- footerTemplate(Object.assign({}, stats, this._ctx)));
+ footerTemplate(Object.assign({}, stats, this._ctx))
+ );
}
setFeaturedPost(postInfo) {
views.replaceContent(
- this._postInfoContainerNode, featuredPostTemplate(postInfo));
+ this._postInfoContainerNode,
+ featuredPostTemplate(postInfo)
+ );
if (this._postContainerNode && postInfo.featuredPost) {
this._postContentControl = new PostContentControl(
this._postContainerNode,
postInfo.featuredPost,
() => {
- return [
- window.innerWidth * 0.8,
- window.innerHeight * 0.7,
- ];
+ return [window.innerWidth * 0.8, window.innerHeight * 0.7];
},
- 'fit-both');
+ "fit-both"
+ );
this._postNotesOverlay = new PostNotesOverlayControl(
- this._postContainerNode.querySelector('.post-overlay'),
- postInfo.featuredPost);
+ this._postContainerNode.querySelector(".post-overlay"),
+ postInfo.featuredPost
+ );
- if (postInfo.featuredPost.type === 'video'
- || postInfo.featuredPost.type === 'flash') {
+ if (
+ postInfo.featuredPost.type === "video" ||
+ postInfo.featuredPost.type === "flash"
+ ) {
this._postContentControl.disableOverlay();
}
}
}
get _footerContainerNode() {
- return this._hostNode.querySelector('.footer-container');
+ return this._hostNode.querySelector(".footer-container");
}
get _postInfoContainerNode() {
- return this._hostNode.querySelector('.post-info-container');
+ return this._hostNode.querySelector(".post-info-container");
}
get _postContainerNode() {
- return this._hostNode.querySelector('.post-container');
+ return this._hostNode.querySelector(".post-container");
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _searchInputNode() {
- return this._formNode.querySelector('input[name=search-text]');
+ return this._formNode.querySelector("input[name=search-text]");
}
_evtFormSubmit(e) {
e.preventDefault();
this._searchInputNode.blur();
- router.show(uri.formatClientLink('posts', {query: this._searchInputNode.value}));
+ router.show(
+ uri.formatClientLink("posts", {
+ query: this._searchInputNode.value,
+ })
+ );
}
}
diff --git a/client/js/views/login_view.js b/client/js/views/login_view.js
index 2c05332c..64d49f97 100644
--- a/client/js/views/login_view.js
+++ b/client/js/views/login_view.js
@@ -1,52 +1,63 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('login');
+const template = views.getTemplate("login");
class LoginView extends events.EventTarget {
constructor() {
super();
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
- views.replaceContent(this._hostNode, template({
- userNamePattern: api.getUserNameRegex(),
- passwordPattern: api.getPasswordRegex(),
- canSendMails: api.canSendMails(),
- }));
+ views.replaceContent(
+ this._hostNode,
+ template({
+ userNamePattern: api.getUserNameRegex(),
+ passwordPattern: api.getPasswordRegex(),
+ canSendMails: api.canSendMails(),
+ })
+ );
views.syncScrollPosition();
views.decorateValidator(this._formNode);
- this._userNameInputNode.setAttribute('pattern', api.getUserNameRegex());
- this._passwordInputNode.setAttribute('pattern', api.getPasswordRegex());
- this._formNode.addEventListener('submit', e => {
+ this._userNameInputNode.setAttribute(
+ "pattern",
+ api.getUserNameRegex()
+ );
+ this._passwordInputNode.setAttribute(
+ "pattern",
+ api.getPasswordRegex()
+ );
+ this._formNode.addEventListener("submit", (e) => {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- name: this._userNameInputNode.value,
- password: this._passwordInputNode.value,
- remember: this._rememberInputNode.checked,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ name: this._userNameInputNode.value,
+ password: this._passwordInputNode.value,
+ remember: this._rememberInputNode.checked,
+ },
+ })
+ );
});
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _userNameInputNode() {
- return this._formNode.querySelector('[name=name]');
+ return this._formNode.querySelector("[name=name]");
}
get _passwordInputNode() {
- return this._formNode.querySelector('[name=password]');
+ return this._formNode.querySelector("[name=password]");
}
get _rememberInputNode() {
- return this._formNode.querySelector('[name=remember-user]');
+ return this._formNode.querySelector("[name=remember-user]");
}
disableForm() {
diff --git a/client/js/views/manual_page_view.js b/client/js/views/manual_page_view.js
index 6798e9bc..390994df 100644
--- a/client/js/views/manual_page_view.js
+++ b/client/js/views/manual_page_view.js
@@ -1,11 +1,11 @@
-'use strict';
+"use strict";
-const router = require('../router.js');
-const keyboard = require('../util/keyboard.js');
-const views = require('../util/views.js');
+const router = require("../router.js");
+const keyboard = require("../util/keyboard.js");
+const views = require("../util/views.js");
-const holderTemplate = views.getTemplate('manual-pager');
-const navTemplate = views.getTemplate('manual-pager-nav');
+const holderTemplate = views.getTemplate("manual-pager");
+const navTemplate = views.getTemplate("manual-pager-nav");
function _removeConsecutiveDuplicates(a) {
return a.filter((item, pos, ary) => {
@@ -22,9 +22,7 @@ function _getVisiblePageNumbers(currentPage, totalPages) {
for (let i = totalPages - threshold; i <= totalPages; i++) {
pagesVisible.push(i);
}
- for (let i = currentPage - threshold;
- i <= currentPage + threshold;
- i++) {
+ for (let i = currentPage - threshold; i <= currentPage + threshold; i++) {
pagesVisible.push(i);
}
pagesVisible = pagesVisible.filter((item, pos, ary) => {
@@ -38,18 +36,22 @@ function _getVisiblePageNumbers(currentPage, totalPages) {
}
function _getPages(
- currentPage, pageNumbers, limit, defaultLimit, removedItems) {
+ currentPage,
+ pageNumbers,
+ limit,
+ defaultLimit,
+ removedItems
+) {
const pages = new Map();
let prevPage = 0;
for (let page of pageNumbers) {
if (page !== prevPage + 1) {
- pages.set(page - 1, {ellipsis: true});
+ pages.set(page - 1, { ellipsis: true });
}
pages.set(page, {
number: page,
offset:
- ((page - 1) * limit) -
- (page > currentPage ? removedItems : 0),
+ (page - 1) * limit - (page > currentPage ? removedItems : 0),
limit: limit === defaultLimit ? null : limit,
active: currentPage === page,
});
@@ -60,7 +62,7 @@ function _getPages(
class ManualPageView {
constructor(ctx) {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, holderTemplate());
}
@@ -70,52 +72,65 @@ class ManualPageView {
this.clearMessages();
views.emptyContent(this._pageNavNode);
- ctx.requestPage(offset, limit).then(response => {
- ctx.pageRenderer({
- parameters: ctx.parameters,
- response: response,
- hostNode: this._pageContentHolderNode,
- });
+ ctx.requestPage(offset, limit).then(
+ (response) => {
+ ctx.pageRenderer({
+ parameters: ctx.parameters,
+ response: response,
+ hostNode: this._pageContentHolderNode,
+ });
- keyboard.bind(['a', 'left'], () => {
- this._navigateToPrevNextPage('prev');
- });
- keyboard.bind(['d', 'right'], () => {
- this._navigateToPrevNextPage('next');
- });
+ keyboard.bind(["a", "left"], () => {
+ this._navigateToPrevNextPage("prev");
+ });
+ keyboard.bind(["d", "right"], () => {
+ this._navigateToPrevNextPage("next");
+ });
- let removedItems = 0;
- if (response.total) {
- this._refreshNav(
- offset, limit, response.total, removedItems, ctx);
+ let removedItems = 0;
+ if (response.total) {
+ this._refreshNav(
+ offset,
+ limit,
+ response.total,
+ removedItems,
+ ctx
+ );
+ }
+
+ if (!response.results.length) {
+ this.showInfo("No data to show");
+ }
+
+ response.results.addEventListener("remove", (e) => {
+ removedItems++;
+ this._refreshNav(
+ offset,
+ limit,
+ response.total,
+ removedItems,
+ ctx
+ );
+ });
+
+ views.syncScrollPosition();
+ },
+ (response) => {
+ this.showError(response.message);
}
-
- if (!response.results.length) {
- this.showInfo('No data to show');
- }
-
- response.results.addEventListener('remove', e => {
- removedItems++;
- this._refreshNav(
- offset, limit, response.total, removedItems, ctx);
- });
-
- views.syncScrollPosition();
- }, response => {
- this.showError(response.message);
- });
+ );
}
get pageHeaderHolderNode() {
- return this._hostNode.querySelector('.page-header-holder');
+ return this._hostNode.querySelector(".page-header-holder");
}
get _pageContentHolderNode() {
- return this._hostNode.querySelector('.page-content-holder');
+ return this._hostNode.querySelector(".page-content-holder");
}
get _pageNavNode() {
- return this._hostNode.querySelector('.page-nav');
+ return this._hostNode.querySelector(".page-nav");
}
clearMessages() {
@@ -135,11 +150,11 @@ class ManualPageView {
}
_navigateToPrevNextPage(className) {
- const linkNode = this._hostNode.querySelector('a.' + className);
- if (linkNode.classList.contains('disabled')) {
+ const linkNode = this._hostNode.querySelector("a." + className);
+ if (linkNode.classList.contains("disabled")) {
return;
}
- router.show(linkNode.getAttribute('href'));
+ router.show(linkNode.getAttribute("href"));
}
_refreshNav(offset, limit, total, removedItems, ctx) {
@@ -147,7 +162,12 @@ class ManualPageView {
const totalPages = Math.ceil((total - removedItems) / limit);
const pageNumbers = _getVisiblePageNumbers(currentPage, totalPages);
const pages = _getPages(
- currentPage, pageNumbers, limit, ctx.defaultLimit, removedItems);
+ currentPage,
+ pageNumbers,
+ limit,
+ ctx.defaultLimit,
+ removedItems
+ );
views.replaceContent(
this._pageNavNode,
@@ -158,7 +178,8 @@ class ManualPageView {
currentPage: currentPage,
totalPages: totalPages,
pages: pages,
- }));
+ })
+ );
}
}
diff --git a/client/js/views/not_found_view.js b/client/js/views/not_found_view.js
index 487613b5..c930b09f 100644
--- a/client/js/views/not_found_view.js
+++ b/client/js/views/not_found_view.js
@@ -1,14 +1,14 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('not-found');
+const template = views.getTemplate("not-found");
class NotFoundView {
constructor(path) {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
- const sourceNode = template({path: path});
+ const sourceNode = template({ path: path });
views.replaceContent(this._hostNode, sourceNode);
views.syncScrollPosition();
}
diff --git a/client/js/views/password_reset_view.js b/client/js/views/password_reset_view.js
index 685fe5a0..82a7d50c 100644
--- a/client/js/views/password_reset_view.js
+++ b/client/js/views/password_reset_view.js
@@ -1,30 +1,35 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('password-reset');
+const template = views.getTemplate("password-reset");
class PasswordResetView extends events.EventTarget {
constructor() {
super();
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
- views.replaceContent(this._hostNode, template({
- canSendMails: api.canSendMails(),
- contactEmail: api.getContactEmail(),
- }));
+ views.replaceContent(
+ this._hostNode,
+ template({
+ canSendMails: api.canSendMails(),
+ contactEmail: api.getContactEmail(),
+ })
+ );
views.syncScrollPosition();
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => {
+ this._formNode.addEventListener("submit", (e) => {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- userNameOrEmail: this._userNameOrEmailFieldNode.value,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ userNameOrEmail: this._userNameOrEmailFieldNode.value,
+ },
+ })
+ );
});
}
@@ -49,11 +54,11 @@ class PasswordResetView extends events.EventTarget {
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _userNameOrEmailFieldNode() {
- return this._formNode.querySelector('[name=user-name]');
+ return this._formNode.querySelector("[name=user-name]");
}
}
diff --git a/client/js/views/pool_categories_view.js b/client/js/views/pool_categories_view.js
index 19283581..ac381d52 100644
--- a/client/js/views/pool_categories_view.js
+++ b/client/js/views/pool_categories_view.js
@@ -1,17 +1,17 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const PoolCategory = require('../models/pool_category.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const PoolCategory = require("../models/pool_category.js");
-const template = views.getTemplate('pool-categories');
-const rowTemplate = views.getTemplate('pool-category-row');
+const template = views.getTemplate("pool-categories");
+const rowTemplate = views.getTemplate("pool-category-row");
class PoolCategoriesView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, template(ctx));
views.syncScrollPosition();
@@ -31,18 +31,22 @@ class PoolCategoriesView extends events.EventTarget {
}
if (this._addLinkNode) {
- this._addLinkNode.addEventListener(
- 'click', e => this._evtAddButtonClick(e));
+ this._addLinkNode.addEventListener("click", (e) =>
+ this._evtAddButtonClick(e)
+ );
}
- ctx.poolCategories.addEventListener(
- 'add', e => this._evtPoolCategoryAdded(e));
+ ctx.poolCategories.addEventListener("add", (e) =>
+ this._evtPoolCategoryAdded(e)
+ );
- ctx.poolCategories.addEventListener(
- 'remove', e => this._evtPoolCategoryDeleted(e));
+ ctx.poolCategories.addEventListener("remove", (e) =>
+ this._evtPoolCategoryDeleted(e)
+ );
- this._formNode.addEventListener(
- 'submit', e => this._evtSaveButtonClick(e, ctx));
+ this._formNode.addEventListener("submit", (e) =>
+ this._evtSaveButtonClick(e, ctx)
+ );
}
enableForm() {
@@ -66,44 +70,48 @@ class PoolCategoriesView extends events.EventTarget {
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _tableBodyNode() {
- return this._hostNode.querySelector('tbody');
+ return this._hostNode.querySelector("tbody");
}
get _addLinkNode() {
- return this._hostNode.querySelector('a.add');
+ return this._hostNode.querySelector("a.add");
}
_addPoolCategoryRowNode(poolCategory) {
const rowNode = rowTemplate(
- Object.assign(
- {}, this._ctx, {poolCategory: poolCategory}));
+ Object.assign({}, this._ctx, { poolCategory: poolCategory })
+ );
- const nameInput = rowNode.querySelector('.name input');
+ const nameInput = rowNode.querySelector(".name input");
if (nameInput) {
- nameInput.addEventListener(
- 'change', e => this._evtNameChange(e, rowNode));
+ nameInput.addEventListener("change", (e) =>
+ this._evtNameChange(e, rowNode)
+ );
}
- const colorInput = rowNode.querySelector('.color input');
+ const colorInput = rowNode.querySelector(".color input");
if (colorInput) {
- colorInput.addEventListener(
- 'change', e => this._evtColorChange(e, rowNode));
+ colorInput.addEventListener("change", (e) =>
+ this._evtColorChange(e, rowNode)
+ );
}
- const removeLinkNode = rowNode.querySelector('.remove a');
+ const removeLinkNode = rowNode.querySelector(".remove a");
if (removeLinkNode) {
- removeLinkNode.addEventListener(
- 'click', e => this._evtDeleteButtonClick(e, rowNode));
+ removeLinkNode.addEventListener("click", (e) =>
+ this._evtDeleteButtonClick(e, rowNode)
+ );
}
- const defaultLinkNode = rowNode.querySelector('.set-default a');
+ const defaultLinkNode = rowNode.querySelector(".set-default a");
if (defaultLinkNode) {
- defaultLinkNode.addEventListener(
- 'click', e => this._evtSetDefaultButtonClick(e, rowNode));
+ defaultLinkNode.addEventListener("click", (e) =>
+ this._evtSetDefaultButtonClick(e, rowNode)
+ );
}
this._tableBodyNode.appendChild(rowNode);
@@ -141,7 +149,7 @@ class PoolCategoriesView extends events.EventTarget {
_evtDeleteButtonClick(e, rowNode, link) {
e.preventDefault();
- if (e.target.classList.contains('inactive')) {
+ if (e.target.classList.contains("inactive")) {
return;
}
this._ctx.poolCategories.remove(rowNode._poolCategory);
@@ -150,16 +158,16 @@ class PoolCategoriesView extends events.EventTarget {
_evtSetDefaultButtonClick(e, rowNode) {
e.preventDefault();
this._ctx.poolCategories.defaultCategory = rowNode._poolCategory;
- const oldRowNode = rowNode.parentNode.querySelector('tr.default');
+ const oldRowNode = rowNode.parentNode.querySelector("tr.default");
if (oldRowNode) {
- oldRowNode.classList.remove('default');
+ oldRowNode.classList.remove("default");
}
- rowNode.classList.add('default');
+ rowNode.classList.add("default");
}
_evtSaveButtonClick(e, ctx) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit'));
+ this.dispatchEvent(new CustomEvent("submit"));
}
}
diff --git a/client/js/views/pool_create_view.js b/client/js/views/pool_create_view.js
index f22cf716..fc75f452 100644
--- a/client/js/views/pool_create_view.js
+++ b/client/js/views/pool_create_view.js
@@ -1,41 +1,43 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const Pool = require('../models/pool.js')
+const events = require("../events.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const Pool = require("../models/pool.js");
-const template = views.getTemplate('pool-create');
+const template = views.getTemplate("pool-create");
class PoolCreateView extends events.EventTarget {
constructor(ctx) {
super();
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, template(ctx));
views.decorateValidator(this._formNode);
if (this._namesFieldNode) {
- this._namesFieldNode.addEventListener(
- 'input', e => this._evtNameInput(e));
+ this._namesFieldNode.addEventListener("input", (e) =>
+ this._evtNameInput(e)
+ );
}
if (this._postsFieldNode) {
- this._postsFieldNode.addEventListener(
- 'input', e => this._evtPostsInput(e));
+ this._postsFieldNode.addEventListener("input", (e) =>
+ this._evtPostsInput(e)
+ );
}
for (let node of this._formNode.querySelectorAll(
- 'input, select, textarea, posts')) {
- node.addEventListener(
- 'change', e => {
- this.dispatchEvent(new CustomEvent('change'));
- });
+ "input, select, textarea, posts"
+ )) {
+ node.addEventListener("change", (e) => {
+ this.dispatchEvent(new CustomEvent("change"));
+ });
}
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -64,19 +66,21 @@ class PoolCreateView extends events.EventTarget {
if (!list.length) {
this._namesFieldNode.setCustomValidity(
- 'Pools must have at least one name.');
+ "Pools must have at least one name."
+ );
return;
}
for (let item of list) {
if (!regex.test(item)) {
this._namesFieldNode.setCustomValidity(
- `Pool name "${item}" contains invalid symbols.`);
+ `Pool name "${item}" contains invalid symbols.`
+ );
return;
}
}
- this._namesFieldNode.setCustomValidity('');
+ this._namesFieldNode.setCustomValidity("");
}
_evtPostsInput(e) {
@@ -86,46 +90,50 @@ class PoolCreateView extends events.EventTarget {
for (let item of list) {
if (!regex.test(item)) {
this._postsFieldNode.setCustomValidity(
- `Pool ID "${item}" is not an integer.`);
+ `Pool ID "${item}" is not an integer.`
+ );
return;
}
}
- this._postsFieldNode.setCustomValidity('');
+ this._postsFieldNode.setCustomValidity("");
}
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- names: misc.splitByWhitespace(this._namesFieldNode.value),
- category: this._categoryFieldNode.value,
- description: this._descriptionFieldNode.value,
- posts: misc.splitByWhitespace(this._postsFieldNode.value)
- .map(i => parseInt(i))
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ names: misc.splitByWhitespace(this._namesFieldNode.value),
+ category: this._categoryFieldNode.value,
+ description: this._descriptionFieldNode.value,
+ posts: misc
+ .splitByWhitespace(this._postsFieldNode.value)
+ .map((i) => parseInt(i)),
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _namesFieldNode() {
- return this._formNode.querySelector('.names input');
+ return this._formNode.querySelector(".names input");
}
get _categoryFieldNode() {
- return this._formNode.querySelector('.category select');
+ return this._formNode.querySelector(".category select");
}
get _descriptionFieldNode() {
- return this._formNode.querySelector('.description textarea');
+ return this._formNode.querySelector(".description textarea");
}
get _postsFieldNode() {
- return this._formNode.querySelector('.posts input');
+ return this._formNode.querySelector(".posts input");
}
}
diff --git a/client/js/views/pool_delete_view.js b/client/js/views/pool_delete_view.js
index d3707dbe..fa92a492 100644
--- a/client/js/views/pool_delete_view.js
+++ b/client/js/views/pool_delete_view.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('pool-delete');
+const template = views.getTemplate("pool-delete");
class PoolDeleteView extends events.EventTarget {
constructor(ctx) {
@@ -13,7 +13,7 @@ class PoolDeleteView extends events.EventTarget {
this._pool = ctx.pool;
views.replaceContent(this._hostNode, template(ctx));
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -38,15 +38,17 @@ class PoolDeleteView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- pool: this._pool,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ pool: this._pool,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
}
diff --git a/client/js/views/pool_edit_view.js b/client/js/views/pool_edit_view.js
index 9c118b3c..b30ab9b7 100644
--- a/client/js/views/pool_edit_view.js
+++ b/client/js/views/pool_edit_view.js
@@ -1,12 +1,12 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const Post = require('../models/post.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const Post = require("../models/post.js");
-const template = views.getTemplate('pool-edit');
+const template = views.getTemplate("pool-edit");
class PoolEditView extends events.EventTarget {
constructor(ctx) {
@@ -19,24 +19,26 @@ class PoolEditView extends events.EventTarget {
views.decorateValidator(this._formNode);
if (this._namesFieldNode) {
- this._namesFieldNode.addEventListener(
- 'input', e => this._evtNameInput(e));
+ this._namesFieldNode.addEventListener("input", (e) =>
+ this._evtNameInput(e)
+ );
}
if (this._postsFieldNode) {
- this._postsFieldNode.addEventListener(
- 'input', e => this._evtPostsInput(e));
+ this._postsFieldNode.addEventListener("input", (e) =>
+ this._evtPostsInput(e)
+ );
}
for (let node of this._formNode.querySelectorAll(
- 'input, select, textarea, posts')) {
- node.addEventListener(
- 'change', e => {
- this.dispatchEvent(new CustomEvent('change'));
- });
+ "input, select, textarea, posts"
+ )) {
+ node.addEventListener("change", (e) => {
+ this.dispatchEvent(new CustomEvent("change"));
+ });
}
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -65,19 +67,21 @@ class PoolEditView extends events.EventTarget {
if (!list.length) {
this._namesFieldNode.setCustomValidity(
- 'Pools must have at least one name.');
+ "Pools must have at least one name."
+ );
return;
}
for (let item of list) {
if (!regex.test(item)) {
this._namesFieldNode.setCustomValidity(
- `Pool name "${item}" contains invalid symbols.`);
+ `Pool name "${item}" contains invalid symbols.`
+ );
return;
}
}
- this._namesFieldNode.setCustomValidity('');
+ this._namesFieldNode.setCustomValidity("");
}
_evtPostsInput(e) {
@@ -87,57 +91,60 @@ class PoolEditView extends events.EventTarget {
for (let item of list) {
if (!regex.test(item)) {
this._postsFieldNode.setCustomValidity(
- `Pool ID "${item}" is not an integer.`);
+ `Pool ID "${item}" is not an integer.`
+ );
return;
}
}
- this._postsFieldNode.setCustomValidity('');
+ this._postsFieldNode.setCustomValidity("");
}
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- pool: this._pool,
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ pool: this._pool,
- names: this._namesFieldNode ?
- misc.splitByWhitespace(this._namesFieldNode.value) :
- undefined,
+ names: this._namesFieldNode
+ ? misc.splitByWhitespace(this._namesFieldNode.value)
+ : undefined,
- category: this._categoryFieldNode ?
- this._categoryFieldNode.value :
- undefined,
+ category: this._categoryFieldNode
+ ? this._categoryFieldNode.value
+ : undefined,
- description: this._descriptionFieldNode ?
- this._descriptionFieldNode.value :
- undefined,
+ description: this._descriptionFieldNode
+ ? this._descriptionFieldNode.value
+ : undefined,
- posts: this._postsFieldNode ?
- misc.splitByWhitespace(this._postsFieldNode.value) :
- undefined,
- },
- }));
+ posts: this._postsFieldNode
+ ? misc.splitByWhitespace(this._postsFieldNode.value)
+ : undefined,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _namesFieldNode() {
- return this._formNode.querySelector('.names input');
+ return this._formNode.querySelector(".names input");
}
get _categoryFieldNode() {
- return this._formNode.querySelector('.category select');
+ return this._formNode.querySelector(".category select");
}
get _descriptionFieldNode() {
- return this._formNode.querySelector('.description textarea');
+ return this._formNode.querySelector(".description textarea");
}
get _postsFieldNode() {
- return this._formNode.querySelector('.posts input');
+ return this._formNode.querySelector(".posts input");
}
}
diff --git a/client/js/views/pool_merge_view.js b/client/js/views/pool_merge_view.js
index 09c4e8d0..f0ca15a3 100644
--- a/client/js/views/pool_merge_view.js
+++ b/client/js/views/pool_merge_view.js
@@ -1,12 +1,11 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const views = require('../util/views.js');
-const PoolAutoCompleteControl =
- require('../controls/pool_auto_complete_control.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const views = require("../util/views.js");
+const PoolAutoCompleteControl = require("../controls/pool_auto_complete_control.js");
-const template = views.getTemplate('pool-merge');
+const template = views.getTemplate("pool-merge");
class PoolMergeView extends events.EventTarget {
constructor(ctx) {
@@ -23,15 +22,18 @@ class PoolMergeView extends events.EventTarget {
this._autoCompleteControl = new PoolAutoCompleteControl(
this._targetPoolFieldNode,
{
- confirm: pool => {
+ confirm: (pool) => {
this._targetPoolId = pool.id;
this._autoCompleteControl.replaceSelectedText(
- pool.names[0], false);
- }
- });
+ pool.names[0],
+ false
+ );
+ },
+ }
+ );
}
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -56,24 +58,26 @@ class PoolMergeView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- pool: this._pool,
- targetPoolId: this._targetPoolId
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ pool: this._pool,
+ targetPoolId: this._targetPoolId,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _targetPoolFieldNode() {
- return this._formNode.querySelector('input[name=target-pool]');
+ return this._formNode.querySelector("input[name=target-pool]");
}
get _addAliasCheckboxNode() {
- return this._formNode.querySelector('input[name=alias]');
+ return this._formNode.querySelector("input[name=alias]");
}
}
diff --git a/client/js/views/pool_summary_view.js b/client/js/views/pool_summary_view.js
index a5808cc0..37ce1c28 100644
--- a/client/js/views/pool_summary_view.js
+++ b/client/js/views/pool_summary_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('pool-summary');
+const template = views.getTemplate("pool-summary");
class PoolSummaryView {
constructor(ctx) {
diff --git a/client/js/views/pool_view.js b/client/js/views/pool_view.js
index f296e906..12b77040 100644
--- a/client/js/views/pool_view.js
+++ b/client/js/views/pool_view.js
@@ -1,26 +1,26 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const misc = require('../util/misc.js');
-const PoolSummaryView = require('./pool_summary_view.js');
-const PoolEditView = require('./pool_edit_view.js');
-const PoolMergeView = require('./pool_merge_view.js');
-const PoolDeleteView = require('./pool_delete_view.js');
-const EmptyView = require('../views/empty_view.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const misc = require("../util/misc.js");
+const PoolSummaryView = require("./pool_summary_view.js");
+const PoolEditView = require("./pool_edit_view.js");
+const PoolMergeView = require("./pool_merge_view.js");
+const PoolDeleteView = require("./pool_delete_view.js");
+const EmptyView = require("../views/empty_view.js");
-const template = views.getTemplate('pool');
+const template = views.getTemplate("pool");
class PoolView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- ctx.pool.addEventListener('change', e => this._evtChange(e));
- ctx.section = ctx.section || 'summary';
+ ctx.pool.addEventListener("change", (e) => this._evtChange(e));
+ ctx.section = ctx.section || "summary";
ctx.getPrettyPoolName = misc.getPrettyPoolName;
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
this._install();
}
@@ -28,52 +28,54 @@ class PoolView extends events.EventTarget {
const ctx = this._ctx;
views.replaceContent(this._hostNode, template(ctx));
- for (let item of this._hostNode.querySelectorAll('[data-name]')) {
+ for (let item of this._hostNode.querySelectorAll("[data-name]")) {
item.classList.toggle(
- 'active', item.getAttribute('data-name') === ctx.section);
- if (item.getAttribute('data-name') === ctx.section) {
+ "active",
+ item.getAttribute("data-name") === ctx.section
+ );
+ if (item.getAttribute("data-name") === ctx.section) {
item.parentNode.scrollLeft =
item.getBoundingClientRect().left -
- item.parentNode.getBoundingClientRect().left
+ item.parentNode.getBoundingClientRect().left;
}
}
- ctx.hostNode = this._hostNode.querySelector('.pool-content-holder');
- if (ctx.section === 'edit') {
+ ctx.hostNode = this._hostNode.querySelector(".pool-content-holder");
+ if (ctx.section === "edit") {
if (!this._ctx.canEditAnything) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to edit pools.');
+ "You don't have privileges to edit pools."
+ );
} else {
this._view = new PoolEditView(ctx);
- events.proxyEvent(this._view, this, 'submit');
+ events.proxyEvent(this._view, this, "submit");
}
-
- } else if (ctx.section === 'merge') {
+ } else if (ctx.section === "merge") {
if (!this._ctx.canMerge) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to merge pools.');
+ "You don't have privileges to merge pools."
+ );
} else {
this._view = new PoolMergeView(ctx);
- events.proxyEvent(this._view, this, 'submit', 'merge');
+ events.proxyEvent(this._view, this, "submit", "merge");
}
-
- } else if (ctx.section === 'delete') {
+ } else if (ctx.section === "delete") {
if (!this._ctx.canDelete) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to delete pools.');
+ "You don't have privileges to delete pools."
+ );
} else {
this._view = new PoolDeleteView(ctx);
- events.proxyEvent(this._view, this, 'submit', 'delete');
+ events.proxyEvent(this._view, this, "submit", "delete");
}
-
} else {
this._view = new PoolSummaryView(ctx);
}
- events.proxyEvent(this._view, this, 'change');
+ events.proxyEvent(this._view, this, "change");
views.syncScrollPosition();
}
diff --git a/client/js/views/pools_header_view.js b/client/js/views/pools_header_view.js
index 65bbe1b1..cfc6c8bf 100644
--- a/client/js/views/pools_header_view.js
+++ b/client/js/views/pools_header_view.js
@@ -1,13 +1,12 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const misc = require('../util/misc.js');
-const search = require('../util/search.js');
-const views = require('../util/views.js');
-const PoolAutoCompleteControl =
- require('../controls/pool_auto_complete_control.js');
+const events = require("../events.js");
+const misc = require("../util/misc.js");
+const search = require("../util/search.js");
+const views = require("../util/views.js");
+const PoolAutoCompleteControl = require("../controls/pool_auto_complete_control.js");
-const template = views.getTemplate('pools-header');
+const template = views.getTemplate("pools-header");
class PoolsHeaderView extends events.EventTarget {
constructor(ctx) {
@@ -20,31 +19,41 @@ class PoolsHeaderView extends events.EventTarget {
this._autoCompleteControl = new PoolAutoCompleteControl(
this._queryInputNode,
{
- confirm: pool => this._autoCompleteControl.replaceSelectedText(
- misc.escapeSearchTerm(pool.names[0]), true),
- });
+ confirm: (pool) =>
+ this._autoCompleteControl.replaceSelectedText(
+ misc.escapeSearchTerm(pool.names[0]),
+ true
+ ),
+ }
+ );
}
search.searchInputNodeFocusHelper(this._queryInputNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _queryInputNode() {
- return this._hostNode.querySelector('[name=search-text]');
+ return this._hostNode.querySelector("[name=search-text]");
}
_evtSubmit(e) {
e.preventDefault();
this._queryInputNode.blur();
- this.dispatchEvent(new CustomEvent('navigate', {detail: {parameters: {
- query: this._queryInputNode.value,
- page: 1,
- }}}));
+ this.dispatchEvent(
+ new CustomEvent("navigate", {
+ detail: {
+ parameters: {
+ query: this._queryInputNode.value,
+ page: 1,
+ },
+ },
+ })
+ );
}
}
diff --git a/client/js/views/pools_page_view.js b/client/js/views/pools_page_view.js
index 234d4467..6230ef69 100644
--- a/client/js/views/pools_page_view.js
+++ b/client/js/views/pools_page_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('pools-page');
+const template = views.getTemplate("pools-page");
class PoolsPageView {
constructor(ctx) {
diff --git a/client/js/views/post_detail_view.js b/client/js/views/post_detail_view.js
index 14786d3f..587c41fa 100644
--- a/client/js/views/post_detail_view.js
+++ b/client/js/views/post_detail_view.js
@@ -1,21 +1,21 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const PostMergeView = require('./post_merge_view.js');
-const EmptyView = require('../views/empty_view.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const PostMergeView = require("./post_merge_view.js");
+const EmptyView = require("../views/empty_view.js");
-const template = views.getTemplate('post-detail');
+const template = views.getTemplate("post-detail");
class PostDetailView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- ctx.post.addEventListener('change', e => this._evtChange(e));
- ctx.section = ctx.section || 'summary';
+ ctx.post.addEventListener("change", (e) => this._evtChange(e));
+ ctx.section = ctx.section || "summary";
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
this._install();
}
@@ -23,28 +23,30 @@ class PostDetailView extends events.EventTarget {
const ctx = this._ctx;
views.replaceContent(this._hostNode, template(ctx));
- for (let item of this._hostNode.querySelectorAll('[data-name]')) {
+ for (let item of this._hostNode.querySelectorAll("[data-name]")) {
item.classList.toggle(
- 'active', item.getAttribute('data-name') === ctx.section);
- if (item.getAttribute('data-name') === ctx.section) {
+ "active",
+ item.getAttribute("data-name") === ctx.section
+ );
+ if (item.getAttribute("data-name") === ctx.section) {
item.parentNode.scrollLeft =
item.getBoundingClientRect().left -
- item.parentNode.getBoundingClientRect().left
+ item.parentNode.getBoundingClientRect().left;
}
}
- ctx.hostNode = this._hostNode.querySelector('.post-content-holder');
- if (ctx.section === 'merge') {
+ ctx.hostNode = this._hostNode.querySelector(".post-content-holder");
+ if (ctx.section === "merge") {
if (!this._ctx.canMerge) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to merge posts.');
+ "You don't have privileges to merge posts."
+ );
} else {
this._view = new PostMergeView(ctx);
- events.proxyEvent(this._view, this, 'select');
- events.proxyEvent(this._view, this, 'submit', 'merge');
+ events.proxyEvent(this._view, this, "select");
+ events.proxyEvent(this._view, this, "submit", "merge");
}
-
} else {
// this._view = new PostSummaryView(ctx);
}
diff --git a/client/js/views/post_main_view.js b/client/js/views/post_main_view.js
index 6a7472c5..73bb324f 100644
--- a/client/js/views/post_main_view.js
+++ b/client/js/views/post_main_view.js
@@ -1,35 +1,33 @@
-'use strict';
+"use strict";
-const iosCorrectedInnerHeight = require('ios-inner-height');
-const router = require('../router.js');
-const views = require('../util/views.js');
-const uri = require('../util/uri.js');
-const keyboard = require('../util/keyboard.js');
-const Touch = require('../util/touch.js');
-const PostContentControl = require('../controls/post_content_control.js');
-const PostNotesOverlayControl =
- require('../controls/post_notes_overlay_control.js');
-const PostReadonlySidebarControl =
- require('../controls/post_readonly_sidebar_control.js');
-const PostEditSidebarControl =
- require('../controls/post_edit_sidebar_control.js');
-const CommentControl = require('../controls/comment_control.js');
-const CommentListControl = require('../controls/comment_list_control.js');
+const iosCorrectedInnerHeight = require("ios-inner-height");
+const router = require("../router.js");
+const views = require("../util/views.js");
+const uri = require("../util/uri.js");
+const keyboard = require("../util/keyboard.js");
+const Touch = require("../util/touch.js");
+const PostContentControl = require("../controls/post_content_control.js");
+const PostNotesOverlayControl = require("../controls/post_notes_overlay_control.js");
+const PostReadonlySidebarControl = require("../controls/post_readonly_sidebar_control.js");
+const PostEditSidebarControl = require("../controls/post_edit_sidebar_control.js");
+const CommentControl = require("../controls/comment_control.js");
+const CommentListControl = require("../controls/comment_list_control.js");
-const template = views.getTemplate('post-main');
+const template = views.getTemplate("post-main");
class PostMainView {
constructor(ctx) {
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
const sourceNode = template(ctx);
- const postContainerNode = sourceNode.querySelector('.post-container');
- const sidebarNode = sourceNode.querySelector('.sidebar');
+ const postContainerNode = sourceNode.querySelector(".post-container");
+ const sidebarNode = sourceNode.querySelector(".sidebar");
views.replaceContent(this._hostNode, sourceNode);
views.syncScrollPosition();
- const topNavigationNode =
- document.body.querySelector('#top-navigation');
+ const topNavigationNode = document.body.querySelector(
+ "#top-navigation"
+ );
this._postContentControl = new PostContentControl(
postContainerNode,
@@ -43,15 +41,17 @@ class PostMainView {
margin,
iosCorrectedInnerHeight() -
topNavigationNode.getBoundingClientRect().height -
- (margin * 2),
+ margin * 2,
];
- });
+ }
+ );
this._postNotesOverlayControl = new PostNotesOverlayControl(
- postContainerNode.querySelector('.post-overlay'),
- ctx.post);
+ postContainerNode.querySelector(".post-overlay"),
+ ctx.post
+ );
- if (ctx.post.type === 'video' || ctx.post.type === 'flash') {
+ if (ctx.post.type === "video" || ctx.post.type === "flash") {
this._postContentControl.disableOverlay();
}
@@ -71,16 +71,16 @@ class PostMainView {
}
};
- keyboard.bind('e', () => {
+ keyboard.bind("e", () => {
if (ctx.editMode) {
- router.show(uri.formatClientLink('post', ctx.post.id));
+ router.show(uri.formatClientLink("post", ctx.post.id));
} else {
- router.show(uri.formatClientLink('post', ctx.post.id, 'edit'));
+ router.show(uri.formatClientLink("post", ctx.post.id, "edit"));
}
});
- keyboard.bind(['a', 'left'], showPreviousImage);
- keyboard.bind(['d', 'right'], showNextImage);
- keyboard.bind('del', e => {
+ keyboard.bind(["a", "left"], showPreviousImage);
+ keyboard.bind(["d", "right"], showNextImage);
+ keyboard.bind("del", (e) => {
if (ctx.editMode) {
this.sidebarControl._evtDeleteClick(e);
}
@@ -90,53 +90,65 @@ class PostMainView {
postContainerNode,
() => {
if (!ctx.editMode) {
- showPreviousImage()
+ showPreviousImage();
}
},
() => {
if (!ctx.editMode) {
- showNextImage()
+ showNextImage();
}
}
- )
+ );
}
_installSidebar(ctx) {
const sidebarContainerNode = document.querySelector(
- '#content-holder .sidebar-container');
+ "#content-holder .sidebar-container"
+ );
if (ctx.editMode) {
this.sidebarControl = new PostEditSidebarControl(
sidebarContainerNode,
ctx.post,
this._postContentControl,
- this._postNotesOverlayControl);
+ this._postNotesOverlayControl
+ );
} else {
this.sidebarControl = new PostReadonlySidebarControl(
- sidebarContainerNode, ctx.post, this._postContentControl);
+ sidebarContainerNode,
+ ctx.post,
+ this._postContentControl
+ );
}
}
_installCommentForm() {
const commentFormContainer = document.querySelector(
- '#content-holder .comment-form-container');
+ "#content-holder .comment-form-container"
+ );
if (!commentFormContainer) {
return;
}
this.commentControl = new CommentControl(
- commentFormContainer, null, true);
+ commentFormContainer,
+ null,
+ true
+ );
}
_installComments(comments) {
const commentsContainerNode = document.querySelector(
- '#content-holder .comments-container');
+ "#content-holder .comments-container"
+ );
if (!commentsContainerNode) {
return;
}
this.commentListControl = new CommentListControl(
- commentsContainerNode, comments);
+ commentsContainerNode,
+ comments
+ );
}
}
diff --git a/client/js/views/post_merge_view.js b/client/js/views/post_merge_view.js
index 952e3a33..20924d37 100644
--- a/client/js/views/post_merge_view.js
+++ b/client/js/views/post_merge_view.js
@@ -1,11 +1,11 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
const KEY_RETURN = 13;
-const template = views.getTemplate('post-merge');
-const sideTemplate = views.getTemplate('post-merge-side');
+const template = views.getTemplate("post-merge");
+const sideTemplate = views.getTemplate("post-merge-side");
class PostMergeView extends events.EventTarget {
constructor(ctx) {
@@ -23,7 +23,7 @@ class PostMergeView extends events.EventTarget {
this._refreshLeftSide();
this._refreshRightSide();
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -52,47 +52,61 @@ class PostMergeView extends events.EventTarget {
}
_refreshLeftSide() {
- this._refreshSide(this._leftPost, this._leftSideNode, 'left', false);
+ this._refreshSide(this._leftPost, this._leftSideNode, "left", false);
}
_refreshRightSide() {
- this._refreshSide(this._rightPost, this._rightSideNode, 'right', true);
+ this._refreshSide(this._rightPost, this._rightSideNode, "right", true);
}
_refreshSide(post, sideNode, sideName, isEditable) {
views.replaceContent(
sideNode,
- sideTemplate(Object.assign({}, this._ctx, {post: post,
- name: sideName,
- editable: isEditable})));
+ sideTemplate(
+ Object.assign({}, this._ctx, {
+ post: post,
+ name: sideName,
+ editable: isEditable,
+ })
+ )
+ );
- let postIdNode = sideNode.querySelector('input[type=text]');
- let searchButtonNode = sideNode.querySelector('input[type=button]');
+ let postIdNode = sideNode.querySelector("input[type=text]");
+ let searchButtonNode = sideNode.querySelector("input[type=button]");
if (isEditable) {
- postIdNode.addEventListener(
- 'keydown', e => this._evtPostSearchFieldKeyDown(e));
- searchButtonNode.addEventListener(
- 'click', e => this._evtPostSearchButtonClick(e, postIdNode));
+ postIdNode.addEventListener("keydown", (e) =>
+ this._evtPostSearchFieldKeyDown(e)
+ );
+ searchButtonNode.addEventListener("click", (e) =>
+ this._evtPostSearchButtonClick(e, postIdNode)
+ );
}
}
_evtSubmit(e) {
e.preventDefault();
const checkedTargetPost = this._formNode.querySelector(
- '.target-post :checked').value;
+ ".target-post :checked"
+ ).value;
const checkedTargetPostContent = this._formNode.querySelector(
- '.target-post-content :checked').value;
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- post: checkedTargetPost === 'left' ?
- this._rightPost :
- this._leftPost,
- targetPost: checkedTargetPost === 'left' ?
- this._leftPost :
- this._rightPost,
- useOldContent: checkedTargetPostContent !== checkedTargetPost,
- },
- }));
+ ".target-post-content :checked"
+ ).value;
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ post:
+ checkedTargetPost === "left"
+ ? this._rightPost
+ : this._leftPost,
+ targetPost:
+ checkedTargetPost === "left"
+ ? this._leftPost
+ : this._rightPost,
+ useOldContent:
+ checkedTargetPostContent !== checkedTargetPost,
+ },
+ })
+ );
}
_evtPostSearchFieldKeyDown(e) {
@@ -102,33 +116,37 @@ class PostMergeView extends events.EventTarget {
}
e.target.blur();
e.preventDefault();
- this.dispatchEvent(new CustomEvent('select', {
- detail: {
- postId: e.target.value,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("select", {
+ detail: {
+ postId: e.target.value,
+ },
+ })
+ );
}
_evtPostSearchButtonClick(e, textNode) {
e.target.blur();
e.preventDefault();
- this.dispatchEvent(new CustomEvent('select', {
- detail: {
- postId: textNode.value,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("select", {
+ detail: {
+ postId: textNode.value,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _leftSideNode() {
- return this._hostNode.querySelector('.left-post-container');
+ return this._hostNode.querySelector(".left-post-container");
}
get _rightSideNode() {
- return this._hostNode.querySelector('.right-post-container');
+ return this._hostNode.querySelector(".right-post-container");
}
}
diff --git a/client/js/views/post_upload_view.js b/client/js/views/post_upload_view.js
index f1d465fc..7daf4fb0 100644
--- a/client/js/views/post_upload_view.js
+++ b/client/js/views/post_upload_view.js
@@ -1,22 +1,24 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const FileDropperControl = require('../controls/file_dropper_control.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const FileDropperControl = require("../controls/file_dropper_control.js");
-const template = views.getTemplate('post-upload');
-const rowTemplate = views.getTemplate('post-upload-row');
+const template = views.getTemplate("post-upload");
+const rowTemplate = views.getTemplate("post-upload-row");
function _mimeTypeToPostType(mimeType) {
- return {
- 'application/x-shockwave-flash': 'flash',
- 'image/gif': 'image',
- 'image/jpeg': 'image',
- 'image/png': 'image',
- 'image/webp': 'image',
- 'video/mp4': 'video',
- 'video/webm': 'video',
- }[mimeType] || 'unknown';
+ return (
+ {
+ "application/x-shockwave-flash": "flash",
+ "image/gif": "image",
+ "image/jpeg": "image",
+ "image/png": "image",
+ "image/webp": "image",
+ "video/mp4": "video",
+ "video/webm": "video",
+ }[mimeType] || "unknown"
+ );
}
class Uploadable extends events.EventTarget {
@@ -24,18 +26,17 @@ class Uploadable extends events.EventTarget {
super();
this.lookalikes = [];
this.lookalikesConfirmed = false;
- this.safety = 'safe';
+ this.safety = "safe";
this.flags = [];
this.tags = [];
this.relations = [];
this.anonymous = false;
}
- destroy() {
- }
+ destroy() {}
get mimeType() {
- return 'application/octet-stream';
+ return "application/octet-stream";
}
get type() {
@@ -43,11 +44,11 @@ class Uploadable extends events.EventTarget {
}
get key() {
- throw new Error('Not implemented');
+ throw new Error("Not implemented");
}
get name() {
- throw new Error('Not implemented');
+ throw new Error("Not implemented");
}
}
@@ -62,10 +63,11 @@ class File extends Uploadable {
} else {
let reader = new FileReader();
reader.readAsDataURL(file);
- reader.addEventListener('load', e => {
+ reader.addEventListener("load", (e) => {
this._previewUrl = e.target.result;
this.dispatchEvent(
- new CustomEvent('finish', {detail: {uploadable: this}}));
+ new CustomEvent("finish", { detail: { uploadable: this } })
+ );
});
}
}
@@ -97,25 +99,25 @@ class Url extends Uploadable {
constructor(url) {
super();
this.url = url;
- this.dispatchEvent(new CustomEvent('finish'));
+ this.dispatchEvent(new CustomEvent("finish"));
}
get mimeType() {
let mime = {
- 'swf': 'application/x-shockwave-flash',
- 'jpg': 'image/jpeg',
- 'png': 'image/png',
- 'gif': 'image/gif',
- 'webp': 'image/webp',
- 'mp4': 'video/mp4',
- 'webm': 'video/webm',
+ swf: "application/x-shockwave-flash",
+ jpg: "image/jpeg",
+ png: "image/png",
+ gif: "image/gif",
+ webp: "image/webp",
+ mp4: "video/mp4",
+ webm: "video/webm",
};
for (let extension of Object.keys(mime)) {
- if (this.url.toLowerCase().indexOf('.' + extension) !== -1) {
+ if (this.url.toLowerCase().indexOf("." + extension) !== -1) {
return mime[extension];
}
}
- return 'unknown';
+ return "unknown";
}
get previewUrl() {
@@ -135,7 +137,7 @@ class PostUploadView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, template());
views.syncScrollPosition();
@@ -143,40 +145,46 @@ class PostUploadView extends events.EventTarget {
this._cancelButtonNode.disabled = true;
this._uploadables = [];
- this._uploadables.find = u => {
- return this._uploadables.findIndex(u2 => u.key === u2.key);
+ this._uploadables.find = (u) => {
+ return this._uploadables.findIndex((u2) => u.key === u2.key);
};
this._contentFileDropper = new FileDropperControl(
this._contentInputNode,
{
extraText:
- 'Allowed extensions: .jpg, .png, .gif, .webm, .mp4, .swf',
+ "Allowed extensions: .jpg, .png, .gif, .webm, .mp4, .swf",
allowUrls: true,
allowMultiple: true,
lock: false,
- });
- this._contentFileDropper.addEventListener(
- 'fileadd', e => this._evtFilesAdded(e));
- this._contentFileDropper.addEventListener(
- 'urladd', e => this._evtUrlsAdded(e));
+ }
+ );
+ this._contentFileDropper.addEventListener("fileadd", (e) =>
+ this._evtFilesAdded(e)
+ );
+ this._contentFileDropper.addEventListener("urladd", (e) =>
+ this._evtUrlsAdded(e)
+ );
- this._cancelButtonNode.addEventListener(
- 'click', e => this._evtCancelButtonClick(e));
- this._formNode.addEventListener('submit', e => this._evtFormSubmit(e));
- this._formNode.classList.add('inactive');
+ this._cancelButtonNode.addEventListener("click", (e) =>
+ this._evtCancelButtonClick(e)
+ );
+ this._formNode.addEventListener("submit", (e) =>
+ this._evtFormSubmit(e)
+ );
+ this._formNode.classList.add("inactive");
}
enableForm() {
views.enableForm(this._formNode);
this._cancelButtonNode.disabled = true;
- this._formNode.classList.remove('uploading');
+ this._formNode.classList.remove("uploading");
}
disableForm() {
views.disableForm(this._formNode);
this._cancelButtonNode.disabled = false;
- this._formNode.classList.add('uploading');
+ this._formNode.classList.add("uploading");
}
clearMessages() {
@@ -201,7 +209,7 @@ class PostUploadView extends events.EventTarget {
}
addUploadables(uploadables) {
- this._formNode.classList.remove('inactive');
+ this._formNode.classList.remove("inactive");
let duplicatesFound = 0;
for (let uploadable of uploadables) {
if (this._uploadables.find(uploadable) !== -1) {
@@ -209,20 +217,22 @@ class PostUploadView extends events.EventTarget {
continue;
}
this._uploadables.push(uploadable);
- this._emit('change');
+ this._emit("change");
this._renderRowNode(uploadable);
- uploadable.addEventListener(
- 'finish', e => this._updateThumbnailNode(e.detail.uploadable));
+ uploadable.addEventListener("finish", (e) =>
+ this._updateThumbnailNode(e.detail.uploadable)
+ );
}
if (duplicatesFound) {
let message = null;
if (duplicatesFound < uploadables.length) {
- message = 'Some of the files were already added ' +
- 'and have been skipped.';
+ message =
+ "Some of the files were already added " +
+ "and have been skipped.";
} else if (duplicatesFound === 1) {
- message = 'This file was already added.';
+ message = "This file was already added.";
} else {
- message = 'These files were already added.';
+ message = "These files were already added.";
}
alert(message);
}
@@ -235,10 +245,10 @@ class PostUploadView extends events.EventTarget {
uploadable.destroy();
uploadable.rowNode.parentNode.removeChild(uploadable.rowNode);
this._uploadables.splice(this._uploadables.find(uploadable), 1);
- this._emit('change');
+ this._emit("change");
if (!this._uploadables.length) {
- this._formNode.classList.add('inactive');
- this._submitButtonNode.value = 'Upload all';
+ this._formNode.classList.add("inactive");
+ this._submitButtonNode.value = "Upload all";
}
}
@@ -248,16 +258,16 @@ class PostUploadView extends events.EventTarget {
}
_evtFilesAdded(e) {
- this.addUploadables(e.detail.files.map(file => new File(file)));
+ this.addUploadables(e.detail.files.map((file) => new File(file)));
}
_evtUrlsAdded(e) {
- this.addUploadables(e.detail.urls.map(url => new Url(url)));
+ this.addUploadables(e.detail.urls.map((url) => new Url(url)));
}
_evtCancelButtonClick(e) {
e.preventDefault();
- this._emit('cancel');
+ this._emit("cancel");
}
_evtFormSubmit(e) {
@@ -265,19 +275,21 @@ class PostUploadView extends events.EventTarget {
for (let uploadable of this._uploadables) {
this._updateUploadableFromDom(uploadable);
}
- this._submitButtonNode.value = 'Resume upload';
- this._emit('submit');
+ this._submitButtonNode.value = "Resume upload";
+ this._emit("submit");
}
_updateUploadableFromDom(uploadable) {
const rowNode = uploadable.rowNode;
- const safetyNode = rowNode.querySelector('.safety input:checked');
+ const safetyNode = rowNode.querySelector(".safety input:checked");
if (safetyNode) {
uploadable.safety = safetyNode.value;
}
- const anonymousNode = rowNode.querySelector('.anonymous input:checked');
+ const anonymousNode = rowNode.querySelector(
+ ".anonymous input:checked"
+ );
if (anonymousNode) {
uploadable.anonymous = true;
}
@@ -286,11 +298,14 @@ class PostUploadView extends events.EventTarget {
uploadable.relations = [];
for (let [i, lookalike] of uploadable.lookalikes.entries()) {
let lookalikeNode = rowNode.querySelector(
- `.lookalikes li:nth-child(${i + 1})`);
- if (lookalikeNode.querySelector('[name=copy-tags]').checked) {
- uploadable.tags = uploadable.tags.concat(lookalike.post.tagNames);
+ `.lookalikes li:nth-child(${i + 1})`
+ );
+ if (lookalikeNode.querySelector("[name=copy-tags]").checked) {
+ uploadable.tags = uploadable.tags.concat(
+ lookalike.post.tagNames
+ );
}
- if (lookalikeNode.querySelector('[name=add-relation]').checked) {
+ if (lookalikeNode.querySelector("[name=add-relation]").checked) {
uploadable.relations.push(lookalike.post.id);
}
}
@@ -317,78 +332,97 @@ class PostUploadView extends events.EventTarget {
this._uploadables[index + delta] = uploadable1;
if (delta === 1) {
this._listNode.insertBefore(
- uploadable2.rowNode, uploadable1.rowNode);
+ uploadable2.rowNode,
+ uploadable1.rowNode
+ );
} else {
this._listNode.insertBefore(
- uploadable1.rowNode, uploadable2.rowNode);
+ uploadable1.rowNode,
+ uploadable2.rowNode
+ );
}
}
}
_emit(eventType) {
this.dispatchEvent(
- new CustomEvent(
- eventType,
- {detail: {
+ new CustomEvent(eventType, {
+ detail: {
uploadables: this._uploadables,
skipDuplicates: this._skipDuplicatesCheckboxNode.checked,
- }}));
+ },
+ })
+ );
}
_renderRowNode(uploadable) {
- const rowNode = rowTemplate(Object.assign(
- {}, this._ctx, {uploadable: uploadable}));
+ const rowNode = rowTemplate(
+ Object.assign({}, this._ctx, { uploadable: uploadable })
+ );
if (uploadable.rowNode) {
uploadable.rowNode.parentNode.replaceChild(
- rowNode, uploadable.rowNode);
+ rowNode,
+ uploadable.rowNode
+ );
} else {
this._listNode.appendChild(rowNode);
}
uploadable.rowNode = rowNode;
- rowNode.querySelector('a.remove').addEventListener('click',
- e => this._evtRemoveClick(e, uploadable));
- rowNode.querySelector('a.move-up').addEventListener('click',
- e => this._evtMoveClick(e, uploadable, -1));
- rowNode.querySelector('a.move-down').addEventListener('click',
- e => this._evtMoveClick(e, uploadable, 1));
+ rowNode
+ .querySelector("a.remove")
+ .addEventListener("click", (e) =>
+ this._evtRemoveClick(e, uploadable)
+ );
+ rowNode
+ .querySelector("a.move-up")
+ .addEventListener("click", (e) =>
+ this._evtMoveClick(e, uploadable, -1)
+ );
+ rowNode
+ .querySelector("a.move-down")
+ .addEventListener("click", (e) =>
+ this._evtMoveClick(e, uploadable, 1)
+ );
}
_updateThumbnailNode(uploadable) {
- const rowNode = rowTemplate(Object.assign(
- {}, this._ctx, {uploadable: uploadable}));
+ const rowNode = rowTemplate(
+ Object.assign({}, this._ctx, { uploadable: uploadable })
+ );
views.replaceContent(
- uploadable.rowNode.querySelector('.thumbnail'),
- rowNode.querySelector('.thumbnail').childNodes);
+ uploadable.rowNode.querySelector(".thumbnail"),
+ rowNode.querySelector(".thumbnail").childNodes
+ );
}
get _uploading() {
- return this._formNode.classList.contains('uploading');
+ return this._formNode.classList.contains("uploading");
}
get _listNode() {
- return this._hostNode.querySelector('.uploadables-container');
+ return this._hostNode.querySelector(".uploadables-container");
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _skipDuplicatesCheckboxNode() {
- return this._hostNode.querySelector('form [name=skip-duplicates]');
+ return this._hostNode.querySelector("form [name=skip-duplicates]");
}
get _submitButtonNode() {
- return this._hostNode.querySelector('form [type=submit]');
+ return this._hostNode.querySelector("form [type=submit]");
}
get _cancelButtonNode() {
- return this._hostNode.querySelector('form .cancel');
+ return this._hostNode.querySelector("form .cancel");
}
get _contentInputNode() {
- return this._formNode.querySelector('.dropper-container');
+ return this._formNode.querySelector(".dropper-container");
}
}
diff --git a/client/js/views/posts_header_view.js b/client/js/views/posts_header_view.js
index 0797fffe..81546a7e 100644
--- a/client/js/views/posts_header_view.js
+++ b/client/js/views/posts_header_view.js
@@ -1,53 +1,56 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const settings = require('../models/settings.js');
-const keyboard = require('../util/keyboard.js');
-const misc = require('../util/misc.js');
-const search = require('../util/search.js');
-const views = require('../util/views.js');
-const TagAutoCompleteControl =
- require('../controls/tag_auto_complete_control.js');
+const events = require("../events.js");
+const settings = require("../models/settings.js");
+const keyboard = require("../util/keyboard.js");
+const misc = require("../util/misc.js");
+const search = require("../util/search.js");
+const views = require("../util/views.js");
+const TagAutoCompleteControl = require("../controls/tag_auto_complete_control.js");
-const template = views.getTemplate('posts-header');
+const template = views.getTemplate("posts-header");
class BulkEditor extends events.EventTarget {
constructor(hostNode) {
super();
this._hostNode = hostNode;
- this._openLinkNode.addEventListener(
- 'click', e => this._evtOpenLinkClick(e));
- this._closeLinkNode.addEventListener(
- 'click', e => this._evtCloseLinkClick(e));
+ this._openLinkNode.addEventListener("click", (e) =>
+ this._evtOpenLinkClick(e)
+ );
+ this._closeLinkNode.addEventListener("click", (e) =>
+ this._evtCloseLinkClick(e)
+ );
}
get opened() {
- return this._hostNode.classList.contains('opened') &&
- !this._hostNode.classList.contains('hidden');
+ return (
+ this._hostNode.classList.contains("opened") &&
+ !this._hostNode.classList.contains("hidden")
+ );
}
get _openLinkNode() {
- return this._hostNode.querySelector('.open');
+ return this._hostNode.querySelector(".open");
}
get _closeLinkNode() {
- return this._hostNode.querySelector('.close');
+ return this._hostNode.querySelector(".close");
}
toggleOpen(state) {
- this._hostNode.classList.toggle('opened', state);
+ this._hostNode.classList.toggle("opened", state);
}
toggleHide(state) {
- this._hostNode.classList.toggle('hidden', state);
+ this._hostNode.classList.toggle("hidden", state);
}
_evtOpenLinkClick(e) {
- throw new Error('Not implemented');
+ throw new Error("Not implemented");
}
_evtCloseLinkClick(e) {
- throw new Error('Not implemented');
+ throw new Error("Not implemented");
}
}
@@ -55,13 +58,13 @@ class BulkSafetyEditor extends BulkEditor {
_evtOpenLinkClick(e) {
e.preventDefault();
this.toggleOpen(true);
- this.dispatchEvent(new CustomEvent('open', {detail: {}}));
+ this.dispatchEvent(new CustomEvent("open", { detail: {} }));
}
_evtCloseLinkClick(e) {
e.preventDefault();
this.toggleOpen(false);
- this.dispatchEvent(new CustomEvent('close', {detail: {}}));
+ this.dispatchEvent(new CustomEvent("close", { detail: {} }));
}
}
@@ -71,10 +74,16 @@ class BulkTagEditor extends BulkEditor {
this._autoCompleteControl = new TagAutoCompleteControl(
this._inputNode,
{
- confirm: tag => this._autoCompleteControl.replaceSelectedText(
- tag.names[0], false),
- });
- this._hostNode.addEventListener('submit', e => this._evtFormSubmit(e));
+ confirm: (tag) =>
+ this._autoCompleteControl.replaceSelectedText(
+ tag.names[0],
+ false
+ ),
+ }
+ );
+ this._hostNode.addEventListener("submit", (e) =>
+ this._evtFormSubmit(e)
+ );
}
get value() {
@@ -82,7 +91,7 @@ class BulkTagEditor extends BulkEditor {
}
get _inputNode() {
- return this._hostNode.querySelector('input[name=tag]');
+ return this._hostNode.querySelector("input[name=tag]");
}
focus() {
@@ -96,22 +105,22 @@ class BulkTagEditor extends BulkEditor {
_evtFormSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {detail: {}}));
+ this.dispatchEvent(new CustomEvent("submit", { detail: {} }));
}
_evtOpenLinkClick(e) {
e.preventDefault();
this.toggleOpen(true);
this.focus();
- this.dispatchEvent(new CustomEvent('open', {detail: {}}));
+ this.dispatchEvent(new CustomEvent("open", { detail: {} }));
}
_evtCloseLinkClick(e) {
e.preventDefault();
- this._inputNode.value = '';
+ this._inputNode.value = "";
this.toggleOpen(false);
this.blur();
- this.dispatchEvent(new CustomEvent('close', {detail: {}}));
+ this.dispatchEvent(new CustomEvent("close", { detail: {} }));
}
}
@@ -127,18 +136,25 @@ class PostsHeaderView extends events.EventTarget {
this._autoCompleteControl = new TagAutoCompleteControl(
this._queryInputNode,
{
- confirm: tag => this._autoCompleteControl.replaceSelectedText(
- misc.escapeSearchTerm(tag.names[0]), true),
- });
+ confirm: (tag) =>
+ this._autoCompleteControl.replaceSelectedText(
+ misc.escapeSearchTerm(tag.names[0]),
+ true
+ ),
+ }
+ );
- keyboard.bind('p', () => this._focusFirstPostNode());
+ keyboard.bind("p", () => this._focusFirstPostNode());
search.searchInputNodeFocusHelper(this._queryInputNode);
for (let safetyButtonNode of this._safetyButtonNodes) {
- safetyButtonNode.addEventListener(
- 'click', e => this._evtSafetyButtonClick(e));
+ safetyButtonNode.addEventListener("click", (e) =>
+ this._evtSafetyButtonClick(e)
+ );
}
- this._formNode.addEventListener('submit', e => this._evtFormSubmit(e));
+ this._formNode.addEventListener("submit", (e) =>
+ this._evtFormSubmit(e)
+ );
this._bulkEditors = [];
if (this._bulkEditTagsNode) {
@@ -148,19 +164,20 @@ class PostsHeaderView extends events.EventTarget {
if (this._bulkEditSafetyNode) {
this._bulkSafetyEditor = new BulkSafetyEditor(
- this._bulkEditSafetyNode);
+ this._bulkEditSafetyNode
+ );
this._bulkEditors.push(this._bulkSafetyEditor);
}
for (let editor of this._bulkEditors) {
- editor.addEventListener('submit', e => {
+ editor.addEventListener("submit", (e) => {
this._navigate();
});
- editor.addEventListener('open', e => {
+ editor.addEventListener("open", (e) => {
this._hideBulkEditorsExcept(editor);
this._navigate();
});
- editor.addEventListener('close', e => {
+ editor.addEventListener("close", (e) => {
this._closeAndShowAllBulkEditors();
this._navigate();
});
@@ -174,23 +191,23 @@ class PostsHeaderView extends events.EventTarget {
}
get _formNode() {
- return this._hostNode.querySelector('form.search');
+ return this._hostNode.querySelector("form.search");
}
get _safetyButtonNodes() {
- return this._hostNode.querySelectorAll('form .safety');
+ return this._hostNode.querySelectorAll("form .safety");
}
get _queryInputNode() {
- return this._hostNode.querySelector('form [name=search-text]');
+ return this._hostNode.querySelector("form [name=search-text]");
}
get _bulkEditTagsNode() {
- return this._hostNode.querySelector('.bulk-edit-tags');
+ return this._hostNode.querySelector(".bulk-edit-tags");
}
get _bulkEditSafetyNode() {
- return this._hostNode.querySelector('.bulk-edit-safety');
+ return this._hostNode.querySelector(".bulk-edit-safety");
}
_openBulkEditor(editor) {
@@ -216,20 +233,23 @@ class PostsHeaderView extends events.EventTarget {
_evtSafetyButtonClick(e, url) {
e.preventDefault();
- e.target.classList.toggle('disabled');
- const safety = e.target.getAttribute('data-safety');
+ e.target.classList.toggle("disabled");
+ const safety = e.target.getAttribute("data-safety");
let browsingSettings = settings.get();
- browsingSettings.listPosts[safety] =
- !browsingSettings.listPosts[safety];
+ browsingSettings.listPosts[safety] = !browsingSettings.listPosts[
+ safety
+ ];
settings.save(browsingSettings, true);
this.dispatchEvent(
- new CustomEvent(
- 'navigate', {
- detail: {
- parameters: Object.assign(
- {}, this._ctx.parameters, {tag: null, offset: 0}),
- },
- }));
+ new CustomEvent("navigate", {
+ detail: {
+ parameters: Object.assign({}, this._ctx.parameters, {
+ tag: null,
+ offset: 0,
+ }),
+ },
+ })
+ );
}
_evtFormSubmit(e) {
@@ -239,27 +259,33 @@ class PostsHeaderView extends events.EventTarget {
_navigate() {
this._autoCompleteControl.hide();
- let parameters = {query: this._queryInputNode.value};
+ let parameters = { query: this._queryInputNode.value };
// convert falsy values to an empty string "" so that we can correctly compare with the current query
- const prevQuery = this._ctx.parameters.query ? this._ctx.parameters.query : "";
- parameters.offset = parameters.query === prevQuery ? this._ctx.parameters.offset : 0;
+ const prevQuery = this._ctx.parameters.query
+ ? this._ctx.parameters.query
+ : "";
+ parameters.offset =
+ parameters.query === prevQuery ? this._ctx.parameters.offset : 0;
if (this._bulkTagEditor && this._bulkTagEditor.opened) {
parameters.tag = this._bulkTagEditor.value;
this._bulkTagEditor.blur();
} else {
parameters.tag = null;
}
- parameters.safety = (
- this._bulkSafetyEditor &&
- this._bulkSafetyEditor.opened ? '1' : null);
+ parameters.safety =
+ this._bulkSafetyEditor && this._bulkSafetyEditor.opened
+ ? "1"
+ : null;
this.dispatchEvent(
- new CustomEvent('navigate', {detail: {parameters: parameters}}));
+ new CustomEvent("navigate", { detail: { parameters: parameters } })
+ );
}
_focusFirstPostNode() {
- const firstPostNode =
- document.body.querySelector('.post-list li:first-child a');
+ const firstPostNode = document.body.querySelector(
+ ".post-list li:first-child a"
+ );
if (firstPostNode) {
firstPostNode.focus();
}
diff --git a/client/js/views/posts_page_view.js b/client/js/views/posts_page_view.js
index 84521e35..ba07a63a 100644
--- a/client/js/views/posts_page_view.js
+++ b/client/js/views/posts_page_view.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('posts-page');
+const template = views.getTemplate("posts-page");
class PostsPageView extends events.EventTarget {
constructor(ctx) {
@@ -15,26 +15,28 @@ class PostsPageView extends events.EventTarget {
this._postIdToPost = {};
for (let post of ctx.response.results) {
this._postIdToPost[post.id] = post;
- post.addEventListener('change', e => this._evtPostChange(e));
+ post.addEventListener("change", (e) => this._evtPostChange(e));
}
this._postIdToListItemNode = {};
for (let listItemNode of this._listItemNodes) {
- const postId = listItemNode.getAttribute('data-post-id');
+ const postId = listItemNode.getAttribute("data-post-id");
const post = this._postIdToPost[postId];
this._postIdToListItemNode[postId] = listItemNode;
const tagFlipperNode = this._getTagFlipperNode(listItemNode);
if (tagFlipperNode) {
- tagFlipperNode.addEventListener(
- 'click', e => this._evtBulkEditTagsClick(e, post));
+ tagFlipperNode.addEventListener("click", (e) =>
+ this._evtBulkEditTagsClick(e, post)
+ );
}
const safetyFlipperNode = this._getSafetyFlipperNode(listItemNode);
if (safetyFlipperNode) {
- for (let linkNode of safetyFlipperNode.querySelectorAll('a')) {
- linkNode.addEventListener(
- 'click', e => this._evtBulkEditSafetyClick(e, post));
+ for (let linkNode of safetyFlipperNode.querySelectorAll("a")) {
+ linkNode.addEventListener("click", (e) =>
+ this._evtBulkEditSafetyClick(e, post)
+ );
}
}
}
@@ -43,21 +45,21 @@ class PostsPageView extends events.EventTarget {
}
get _listItemNodes() {
- return this._hostNode.querySelectorAll('li');
+ return this._hostNode.querySelectorAll("li");
}
_getTagFlipperNode(listItemNode) {
- return listItemNode.querySelector('.tag-flipper');
+ return listItemNode.querySelector(".tag-flipper");
}
_getSafetyFlipperNode(listItemNode) {
- return listItemNode.querySelector('.safety-flipper');
+ return listItemNode.querySelector(".safety-flipper");
}
_evtPostChange(e) {
const listItemNode = this._postIdToListItemNode[e.detail.post.id];
- for (let node of listItemNode.querySelectorAll('[data-disabled]')) {
- node.removeAttribute('data-disabled');
+ for (let node of listItemNode.querySelectorAll("[data-disabled]")) {
+ node.removeAttribute("data-disabled");
}
this._syncBulkEditorsHighlights();
}
@@ -65,35 +67,41 @@ class PostsPageView extends events.EventTarget {
_evtBulkEditTagsClick(e, post) {
e.preventDefault();
const linkNode = e.target;
- if (linkNode.getAttribute('data-disabled')) {
+ if (linkNode.getAttribute("data-disabled")) {
return;
}
- linkNode.setAttribute('data-disabled', true);
+ linkNode.setAttribute("data-disabled", true);
this.dispatchEvent(
new CustomEvent(
- linkNode.classList.contains('tagged') ? 'untag' : 'tag',
- {detail: {post: post}}));
+ linkNode.classList.contains("tagged") ? "untag" : "tag",
+ {
+ detail: { post: post },
+ }
+ )
+ );
}
_evtBulkEditSafetyClick(e, post) {
e.preventDefault();
const linkNode = e.target;
- if (linkNode.getAttribute('data-disabled')) {
+ if (linkNode.getAttribute("data-disabled")) {
return;
}
- const newSafety = linkNode.getAttribute('data-safety');
+ const newSafety = linkNode.getAttribute("data-safety");
if (post.safety === newSafety) {
return;
}
- linkNode.setAttribute('data-disabled', true);
+ linkNode.setAttribute("data-disabled", true);
this.dispatchEvent(
- new CustomEvent(
- 'changeSafety', {detail: {post: post, safety: newSafety}}));
+ new CustomEvent("changeSafety", {
+ detail: { post: post, safety: newSafety },
+ })
+ );
}
_syncBulkEditorsHighlights() {
for (let listItemNode of this._listItemNodes) {
- const postId = listItemNode.getAttribute('data-post-id');
+ const postId = listItemNode.getAttribute("data-post-id");
const post = this._postIdToPost[postId];
const tagFlipperNode = this._getTagFlipperNode(listItemNode);
@@ -102,14 +110,17 @@ class PostsPageView extends events.EventTarget {
for (let tag of this._ctx.bulkEdit.tags) {
tagged &= post.tags.isTaggedWith(tag);
}
- tagFlipperNode.classList.toggle('tagged', tagged);
+ tagFlipperNode.classList.toggle("tagged", tagged);
}
const safetyFlipperNode = this._getSafetyFlipperNode(listItemNode);
if (safetyFlipperNode) {
- for (let linkNode of safetyFlipperNode.querySelectorAll('a')) {
- const safety = linkNode.getAttribute('data-safety');
- linkNode.classList.toggle('active', post.safety === safety);
+ for (let linkNode of safetyFlipperNode.querySelectorAll("a")) {
+ const safety = linkNode.getAttribute("data-safety");
+ linkNode.classList.toggle(
+ "active",
+ post.safety === safety
+ );
}
}
}
diff --git a/client/js/views/registration_view.js b/client/js/views/registration_view.js
index 1db5fcb9..0a08de23 100644
--- a/client/js/views/registration_view.js
+++ b/client/js/views/registration_view.js
@@ -1,22 +1,25 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('user-registration');
+const template = views.getTemplate("user-registration");
class RegistrationView extends events.EventTarget {
constructor() {
super();
- this._hostNode = document.getElementById('content-holder');
- views.replaceContent(this._hostNode, template({
- userNamePattern: api.getUserNameRegex(),
- passwordPattern: api.getPasswordRegex(),
- }));
+ this._hostNode = document.getElementById("content-holder");
+ views.replaceContent(
+ this._hostNode,
+ template({
+ userNamePattern: api.getUserNameRegex(),
+ passwordPattern: api.getPasswordRegex(),
+ })
+ );
views.syncScrollPosition();
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -37,29 +40,31 @@ class RegistrationView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- name: this._userNameFieldNode.value,
- password: this._passwordFieldNode.value,
- email: this._emailFieldNode.value,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ name: this._userNameFieldNode.value,
+ password: this._passwordFieldNode.value,
+ email: this._emailFieldNode.value,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _userNameFieldNode() {
- return this._formNode.querySelector('[name=name]');
+ return this._formNode.querySelector("[name=name]");
}
get _passwordFieldNode() {
- return this._formNode.querySelector('[name=password]');
+ return this._formNode.querySelector("[name=password]");
}
get _emailFieldNode() {
- return this._formNode.querySelector('[name=email]');
+ return this._formNode.querySelector("[name=email]");
}
}
diff --git a/client/js/views/settings_view.js b/client/js/views/settings_view.js
index 5ab7316a..184028ec 100644
--- a/client/js/views/settings_view.js
+++ b/client/js/views/settings_view.js
@@ -1,21 +1,23 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('settings');
+const template = views.getTemplate("settings");
class SettingsView extends events.EventTarget {
constructor(ctx) {
super();
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(
- this._hostNode, template({browsingSettings: ctx.settings}));
+ this._hostNode,
+ template({ browsingSettings: ctx.settings })
+ );
views.syncScrollPosition();
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -28,26 +30,32 @@ class SettingsView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- upscaleSmallPosts: this._find('upscale-small-posts').checked,
- endlessScroll: this._find('endless-scroll').checked,
- keyboardShortcuts: this._find('keyboard-shortcuts').checked,
- transparencyGrid: this._find('transparency-grid').checked,
- tagSuggestions: this._find('tag-suggestions').checked,
- autoplayVideos: this._find('autoplay-videos').checked,
- postsPerPage: this._find('posts-per-page').value,
- tagUnderscoresAsSpaces: this._find('tag-underscores-as-spaces').checked,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ upscaleSmallPosts: this._find("upscale-small-posts")
+ .checked,
+ endlessScroll: this._find("endless-scroll").checked,
+ keyboardShortcuts: this._find("keyboard-shortcuts")
+ .checked,
+ transparencyGrid: this._find("transparency-grid").checked,
+ tagSuggestions: this._find("tag-suggestions").checked,
+ autoplayVideos: this._find("autoplay-videos").checked,
+ postsPerPage: this._find("posts-per-page").value,
+ tagUnderscoresAsSpaces: this._find(
+ "tag-underscores-as-spaces"
+ ).checked,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
_find(nodeName) {
- return this._formNode.querySelector('[name=' + nodeName + ']');
+ return this._formNode.querySelector("[name=" + nodeName + "]");
}
}
diff --git a/client/js/views/snapshots_page_view.js b/client/js/views/snapshots_page_view.js
index e7ea264e..a665ea0d 100644
--- a/client/js/views/snapshots_page_view.js
+++ b/client/js/views/snapshots_page_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('snapshots-page');
+const template = views.getTemplate("snapshots-page");
function _extend(target, source) {
target.push.apply(target, source);
@@ -10,18 +10,18 @@ function _extend(target, source) {
function _formatBasicChange(diff, text) {
const lines = [];
- if (diff.type === 'list change') {
+ if (diff.type === "list change") {
const addedItems = diff.added;
const removedItems = diff.removed;
if (addedItems && addedItems.length) {
- lines.push(`Added ${text} (${addedItems.join(', ')})`);
+ lines.push(`Added ${text} (${addedItems.join(", ")})`);
}
if (removedItems && removedItems.length) {
- lines.push(`Removed ${text} (${removedItems.join(', ')})`);
+ lines.push(`Removed ${text} (${removedItems.join(", ")})`);
}
- } else if (diff.type === 'primitive change') {
- const oldValue = diff['old-value'];
- const newValue = diff['new-value'];
+ } else if (diff.type === "primitive change") {
+ const oldValue = diff["old-value"];
+ const newValue = diff["new-value"];
lines.push(`Changed ${text} (${oldValue} → ${newValue})`);
} else {
lines.push(`Changed ${text}`);
@@ -30,13 +30,13 @@ function _formatBasicChange(diff, text) {
}
function _makeResourceLink(type, id) {
- if (type === 'post') {
+ if (type === "post") {
return views.makePostLink(id, true);
- } else if (type === 'tag') {
+ } else if (type === "tag") {
return views.makeTagLink(id, true);
- } else if (type === 'tag_category') {
+ } else if (type === "tag_category") {
return 'category "' + id + '"';
- } else if (type === 'pool') {
+ } else if (type === "pool") {
return views.makePoolLink(id, true);
}
}
@@ -50,96 +50,102 @@ function _makeItemCreation(type, data) {
let text = key[0].toUpperCase() + key.substr(1).toLowerCase();
if (Array.isArray(data[key])) {
if (data[key].length) {
- lines.push(`${text}: ${data[key].join(', ')}`);
+ lines.push(`${text}: ${data[key].join(", ")}`);
}
} else {
lines.push(`${text}: ${data[key]}`);
}
}
- return lines.join(' ');
+ return lines.join(" ");
}
function _makeItemModification(type, data) {
const lines = [];
const diff = data.value;
- if (type === 'tag_category') {
+ if (type === "tag_category") {
if (diff.name) {
- _extend(lines, _formatBasicChange(diff.name, 'name'));
+ _extend(lines, _formatBasicChange(diff.name, "name"));
}
if (diff.color) {
- _extend(lines, _formatBasicChange(diff.color, 'color'));
+ _extend(lines, _formatBasicChange(diff.color, "color"));
}
if (diff.default) {
- _extend(lines, ['Made into default category']);
+ _extend(lines, ["Made into default category"]);
}
-
- } else if (type === 'tag') {
+ } else if (type === "tag") {
if (diff.names) {
- _extend(lines, _formatBasicChange(diff.names, 'names'));
+ _extend(lines, _formatBasicChange(diff.names, "names"));
}
if (diff.category) {
- _extend(
- lines, _formatBasicChange(diff.category, 'category'));
+ _extend(lines, _formatBasicChange(diff.category, "category"));
}
if (diff.suggestions) {
_extend(
- lines, _formatBasicChange(diff.suggestions, 'suggestions'));
+ lines,
+ _formatBasicChange(diff.suggestions, "suggestions")
+ );
}
if (diff.implications) {
_extend(
- lines, _formatBasicChange(diff.implications, 'implications'));
+ lines,
+ _formatBasicChange(diff.implications, "implications")
+ );
}
-
- } else if (type === 'post') {
+ } else if (type === "post") {
if (diff.checksum) {
- _extend(lines, ['Changed content']);
+ _extend(lines, ["Changed content"]);
}
if (diff.featured) {
- _extend(lines, ['Featured on front page']);
+ _extend(lines, ["Featured on front page"]);
}
if (diff.source) {
- _extend(lines, _formatBasicChange(diff.source, 'source'));
+ _extend(lines, _formatBasicChange(diff.source, "source"));
}
if (diff.safety) {
- _extend(lines, _formatBasicChange(diff.safety, 'safety'));
+ _extend(lines, _formatBasicChange(diff.safety, "safety"));
}
if (diff.tags) {
- _extend(lines, _formatBasicChange(diff.tags, 'tags'));
+ _extend(lines, _formatBasicChange(diff.tags, "tags"));
}
if (diff.relations) {
- _extend(lines, _formatBasicChange(diff.relations, 'relations'));
+ _extend(lines, _formatBasicChange(diff.relations, "relations"));
}
if (diff.notes) {
- _extend(lines, ['Changed notes']);
+ _extend(lines, ["Changed notes"]);
}
if (diff.flags) {
- _extend(lines, ['Changed flags']);
+ _extend(lines, ["Changed flags"]);
}
-
- } else if (type === 'pool') {
+ } else if (type === "pool") {
if (diff.names) {
- _extend(lines, _formatBasicChange(diff.names, 'names'));
+ _extend(lines, _formatBasicChange(diff.names, "names"));
}
if (diff.category) {
- _extend(
- lines, _formatBasicChange(diff.category, 'category'));
+ _extend(lines, _formatBasicChange(diff.category, "category"));
}
if (diff.posts) {
- _extend(
- lines, _formatBasicChange(diff.posts, 'posts'));
+ _extend(lines, _formatBasicChange(diff.posts, "posts"));
}
}
- return lines.join(' ');
+ return lines.join(" ");
}
class SnapshotsPageView {
constructor(ctx) {
- views.replaceContent(ctx.hostNode, template(Object.assign({
- makeResourceLink: _makeResourceLink,
- makeItemCreation: _makeItemCreation,
- makeItemModification: _makeItemModification,
- }, ctx)));
+ views.replaceContent(
+ ctx.hostNode,
+ template(
+ Object.assign(
+ {
+ makeResourceLink: _makeResourceLink,
+ makeItemCreation: _makeItemCreation,
+ makeItemModification: _makeItemModification,
+ },
+ ctx
+ )
+ )
+ );
}
}
diff --git a/client/js/views/tag_categories_view.js b/client/js/views/tag_categories_view.js
index 7e1000d0..1f1a4dac 100644
--- a/client/js/views/tag_categories_view.js
+++ b/client/js/views/tag_categories_view.js
@@ -1,17 +1,17 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const TagCategory = require('../models/tag_category.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const TagCategory = require("../models/tag_category.js");
-const template = views.getTemplate('tag-categories');
-const rowTemplate = views.getTemplate('tag-category-row');
+const template = views.getTemplate("tag-categories");
+const rowTemplate = views.getTemplate("tag-category-row");
class TagCategoriesView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
views.replaceContent(this._hostNode, template(ctx));
views.syncScrollPosition();
@@ -31,18 +31,22 @@ class TagCategoriesView extends events.EventTarget {
}
if (this._addLinkNode) {
- this._addLinkNode.addEventListener(
- 'click', e => this._evtAddButtonClick(e));
+ this._addLinkNode.addEventListener("click", (e) =>
+ this._evtAddButtonClick(e)
+ );
}
- ctx.tagCategories.addEventListener(
- 'add', e => this._evtTagCategoryAdded(e));
+ ctx.tagCategories.addEventListener("add", (e) =>
+ this._evtTagCategoryAdded(e)
+ );
- ctx.tagCategories.addEventListener(
- 'remove', e => this._evtTagCategoryDeleted(e));
+ ctx.tagCategories.addEventListener("remove", (e) =>
+ this._evtTagCategoryDeleted(e)
+ );
- this._formNode.addEventListener(
- 'submit', e => this._evtSaveButtonClick(e, ctx));
+ this._formNode.addEventListener("submit", (e) =>
+ this._evtSaveButtonClick(e, ctx)
+ );
}
enableForm() {
@@ -66,44 +70,48 @@ class TagCategoriesView extends events.EventTarget {
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _tableBodyNode() {
- return this._hostNode.querySelector('tbody');
+ return this._hostNode.querySelector("tbody");
}
get _addLinkNode() {
- return this._hostNode.querySelector('a.add');
+ return this._hostNode.querySelector("a.add");
}
_addTagCategoryRowNode(tagCategory) {
const rowNode = rowTemplate(
- Object.assign(
- {}, this._ctx, {tagCategory: tagCategory}));
+ Object.assign({}, this._ctx, { tagCategory: tagCategory })
+ );
- const nameInput = rowNode.querySelector('.name input');
+ const nameInput = rowNode.querySelector(".name input");
if (nameInput) {
- nameInput.addEventListener(
- 'change', e => this._evtNameChange(e, rowNode));
+ nameInput.addEventListener("change", (e) =>
+ this._evtNameChange(e, rowNode)
+ );
}
- const colorInput = rowNode.querySelector('.color input');
+ const colorInput = rowNode.querySelector(".color input");
if (colorInput) {
- colorInput.addEventListener(
- 'change', e => this._evtColorChange(e, rowNode));
+ colorInput.addEventListener("change", (e) =>
+ this._evtColorChange(e, rowNode)
+ );
}
- const removeLinkNode = rowNode.querySelector('.remove a');
+ const removeLinkNode = rowNode.querySelector(".remove a");
if (removeLinkNode) {
- removeLinkNode.addEventListener(
- 'click', e => this._evtDeleteButtonClick(e, rowNode));
+ removeLinkNode.addEventListener("click", (e) =>
+ this._evtDeleteButtonClick(e, rowNode)
+ );
}
- const defaultLinkNode = rowNode.querySelector('.set-default a');
+ const defaultLinkNode = rowNode.querySelector(".set-default a");
if (defaultLinkNode) {
- defaultLinkNode.addEventListener(
- 'click', e => this._evtSetDefaultButtonClick(e, rowNode));
+ defaultLinkNode.addEventListener("click", (e) =>
+ this._evtSetDefaultButtonClick(e, rowNode)
+ );
}
this._tableBodyNode.appendChild(rowNode);
@@ -141,7 +149,7 @@ class TagCategoriesView extends events.EventTarget {
_evtDeleteButtonClick(e, rowNode, link) {
e.preventDefault();
- if (e.target.classList.contains('inactive')) {
+ if (e.target.classList.contains("inactive")) {
return;
}
this._ctx.tagCategories.remove(rowNode._tagCategory);
@@ -150,16 +158,16 @@ class TagCategoriesView extends events.EventTarget {
_evtSetDefaultButtonClick(e, rowNode) {
e.preventDefault();
this._ctx.tagCategories.defaultCategory = rowNode._tagCategory;
- const oldRowNode = rowNode.parentNode.querySelector('tr.default');
+ const oldRowNode = rowNode.parentNode.querySelector("tr.default");
if (oldRowNode) {
- oldRowNode.classList.remove('default');
+ oldRowNode.classList.remove("default");
}
- rowNode.classList.add('default');
+ rowNode.classList.add("default");
}
_evtSaveButtonClick(e, ctx) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit'));
+ this.dispatchEvent(new CustomEvent("submit"));
}
}
diff --git a/client/js/views/tag_delete_view.js b/client/js/views/tag_delete_view.js
index 4d27f150..4246ec3e 100644
--- a/client/js/views/tag_delete_view.js
+++ b/client/js/views/tag_delete_view.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('tag-delete');
+const template = views.getTemplate("tag-delete");
class TagDeleteView extends events.EventTarget {
constructor(ctx) {
@@ -13,7 +13,7 @@ class TagDeleteView extends events.EventTarget {
this._tag = ctx.tag;
views.replaceContent(this._hostNode, template(ctx));
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -38,15 +38,17 @@ class TagDeleteView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- tag: this._tag,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ tag: this._tag,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
}
diff --git a/client/js/views/tag_edit_view.js b/client/js/views/tag_edit_view.js
index bb4a382b..58c1bc45 100644
--- a/client/js/views/tag_edit_view.js
+++ b/client/js/views/tag_edit_view.js
@@ -1,12 +1,12 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const misc = require('../util/misc.js');
-const views = require('../util/views.js');
-const TagInputControl = require('../controls/tag_input_control.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const misc = require("../util/misc.js");
+const views = require("../util/views.js");
+const TagInputControl = require("../controls/tag_input_control.js");
-const template = views.getTemplate('tag-edit');
+const template = views.getTemplate("tag-edit");
class TagEditView extends events.EventTarget {
constructor(ctx) {
@@ -19,28 +19,33 @@ class TagEditView extends events.EventTarget {
views.decorateValidator(this._formNode);
if (this._namesFieldNode) {
- this._namesFieldNode.addEventListener(
- 'input', e => this._evtNameInput(e));
+ this._namesFieldNode.addEventListener("input", (e) =>
+ this._evtNameInput(e)
+ );
}
if (this._implicationsFieldNode) {
new TagInputControl(
- this._implicationsFieldNode, this._tag.implications);
+ this._implicationsFieldNode,
+ this._tag.implications
+ );
}
if (this._suggestionsFieldNode) {
new TagInputControl(
- this._suggestionsFieldNode, this._tag.suggestions);
+ this._suggestionsFieldNode,
+ this._tag.suggestions
+ );
}
for (let node of this._formNode.querySelectorAll(
- 'input, select, textarea')) {
- node.addEventListener(
- 'change', e => {
- this.dispatchEvent(new CustomEvent('change'));
- });
+ "input, select, textarea"
+ )) {
+ node.addEventListener("change", (e) => {
+ this.dispatchEvent(new CustomEvent("change"));
+ });
}
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -69,72 +74,80 @@ class TagEditView extends events.EventTarget {
if (!list.length) {
this._namesFieldNode.setCustomValidity(
- 'Tags must have at least one name.');
+ "Tags must have at least one name."
+ );
return;
}
for (let item of list) {
if (!regex.test(item)) {
this._namesFieldNode.setCustomValidity(
- `Tag name "${item}" contains invalid symbols.`);
+ `Tag name "${item}" contains invalid symbols.`
+ );
return;
}
}
- this._namesFieldNode.setCustomValidity('');
+ this._namesFieldNode.setCustomValidity("");
}
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- tag: this._tag,
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ tag: this._tag,
- names: this._namesFieldNode ?
- misc.splitByWhitespace(this._namesFieldNode.value) :
- undefined,
+ names: this._namesFieldNode
+ ? misc.splitByWhitespace(this._namesFieldNode.value)
+ : undefined,
- category: this._categoryFieldNode ?
- this._categoryFieldNode.value :
- undefined,
+ category: this._categoryFieldNode
+ ? this._categoryFieldNode.value
+ : undefined,
- implications: this._implicationsFieldNode ?
- misc.splitByWhitespace(this._implicationsFieldNode.value) :
- undefined,
+ implications: this._implicationsFieldNode
+ ? misc.splitByWhitespace(
+ this._implicationsFieldNode.value
+ )
+ : undefined,
- suggestions: this._suggestionsFieldNode ?
- misc.splitByWhitespace(this._suggestionsFieldNode.value) :
- undefined,
+ suggestions: this._suggestionsFieldNode
+ ? misc.splitByWhitespace(
+ this._suggestionsFieldNode.value
+ )
+ : undefined,
- description: this._descriptionFieldNode ?
- this._descriptionFieldNode.value :
- undefined,
- },
- }));
+ description: this._descriptionFieldNode
+ ? this._descriptionFieldNode.value
+ : undefined,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _namesFieldNode() {
- return this._formNode.querySelector('.names input');
+ return this._formNode.querySelector(".names input");
}
get _categoryFieldNode() {
- return this._formNode.querySelector('.category select');
+ return this._formNode.querySelector(".category select");
}
get _implicationsFieldNode() {
- return this._formNode.querySelector('.implications input');
+ return this._formNode.querySelector(".implications input");
}
get _suggestionsFieldNode() {
- return this._formNode.querySelector('.suggestions input');
+ return this._formNode.querySelector(".suggestions input");
}
get _descriptionFieldNode() {
- return this._formNode.querySelector('.description textarea');
+ return this._formNode.querySelector(".description textarea");
}
}
diff --git a/client/js/views/tag_merge_view.js b/client/js/views/tag_merge_view.js
index e29183a6..f823ca44 100644
--- a/client/js/views/tag_merge_view.js
+++ b/client/js/views/tag_merge_view.js
@@ -1,12 +1,11 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const views = require('../util/views.js');
-const TagAutoCompleteControl =
- require('../controls/tag_auto_complete_control.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const views = require("../util/views.js");
+const TagAutoCompleteControl = require("../controls/tag_auto_complete_control.js");
-const template = views.getTemplate('tag-merge');
+const template = views.getTemplate("tag-merge");
class TagMergeView extends events.EventTarget {
constructor(ctx) {
@@ -22,12 +21,16 @@ class TagMergeView extends events.EventTarget {
this._autoCompleteControl = new TagAutoCompleteControl(
this._targetTagFieldNode,
{
- confirm: tag => this._autoCompleteControl.replaceSelectedText(
- tag.names[0], false),
- });
+ confirm: (tag) =>
+ this._autoCompleteControl.replaceSelectedText(
+ tag.names[0],
+ false
+ ),
+ }
+ );
}
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -52,25 +55,27 @@ class TagMergeView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- tag: this._tag,
- targetTagName: this._targetTagFieldNode.value,
- addAlias: this._addAliasCheckboxNode.checked,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ tag: this._tag,
+ targetTagName: this._targetTagFieldNode.value,
+ addAlias: this._addAliasCheckboxNode.checked,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _targetTagFieldNode() {
- return this._formNode.querySelector('input[name=target-tag]');
+ return this._formNode.querySelector("input[name=target-tag]");
}
get _addAliasCheckboxNode() {
- return this._formNode.querySelector('input[name=alias]');
+ return this._formNode.querySelector("input[name=alias]");
}
}
diff --git a/client/js/views/tag_summary_view.js b/client/js/views/tag_summary_view.js
index 019e72b0..11c61917 100644
--- a/client/js/views/tag_summary_view.js
+++ b/client/js/views/tag_summary_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('tag-summary');
+const template = views.getTemplate("tag-summary");
class TagSummaryView {
constructor(ctx) {
diff --git a/client/js/views/tag_view.js b/client/js/views/tag_view.js
index 2aaa9996..e6c37767 100644
--- a/client/js/views/tag_view.js
+++ b/client/js/views/tag_view.js
@@ -1,26 +1,26 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const misc = require('../util/misc.js');
-const TagSummaryView = require('./tag_summary_view.js');
-const TagEditView = require('./tag_edit_view.js');
-const TagMergeView = require('./tag_merge_view.js');
-const TagDeleteView = require('./tag_delete_view.js');
-const EmptyView = require('../views/empty_view.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const misc = require("../util/misc.js");
+const TagSummaryView = require("./tag_summary_view.js");
+const TagEditView = require("./tag_edit_view.js");
+const TagMergeView = require("./tag_merge_view.js");
+const TagDeleteView = require("./tag_delete_view.js");
+const EmptyView = require("../views/empty_view.js");
-const template = views.getTemplate('tag');
+const template = views.getTemplate("tag");
class TagView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- ctx.tag.addEventListener('change', e => this._evtChange(e));
- ctx.section = ctx.section || 'summary';
+ ctx.tag.addEventListener("change", (e) => this._evtChange(e));
+ ctx.section = ctx.section || "summary";
ctx.getPrettyTagName = misc.getPrettyTagName;
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
this._install();
}
@@ -28,52 +28,54 @@ class TagView extends events.EventTarget {
const ctx = this._ctx;
views.replaceContent(this._hostNode, template(ctx));
- for (let item of this._hostNode.querySelectorAll('[data-name]')) {
+ for (let item of this._hostNode.querySelectorAll("[data-name]")) {
item.classList.toggle(
- 'active', item.getAttribute('data-name') === ctx.section);
- if (item.getAttribute('data-name') === ctx.section) {
+ "active",
+ item.getAttribute("data-name") === ctx.section
+ );
+ if (item.getAttribute("data-name") === ctx.section) {
item.parentNode.scrollLeft =
item.getBoundingClientRect().left -
- item.parentNode.getBoundingClientRect().left
+ item.parentNode.getBoundingClientRect().left;
}
}
- ctx.hostNode = this._hostNode.querySelector('.tag-content-holder');
- if (ctx.section === 'edit') {
+ ctx.hostNode = this._hostNode.querySelector(".tag-content-holder");
+ if (ctx.section === "edit") {
if (!this._ctx.canEditAnything) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to edit tags.');
+ "You don't have privileges to edit tags."
+ );
} else {
this._view = new TagEditView(ctx);
- events.proxyEvent(this._view, this, 'submit');
+ events.proxyEvent(this._view, this, "submit");
}
-
- } else if (ctx.section === 'merge') {
+ } else if (ctx.section === "merge") {
if (!this._ctx.canMerge) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to merge tags.');
+ "You don't have privileges to merge tags."
+ );
} else {
this._view = new TagMergeView(ctx);
- events.proxyEvent(this._view, this, 'submit', 'merge');
+ events.proxyEvent(this._view, this, "submit", "merge");
}
-
- } else if (ctx.section === 'delete') {
+ } else if (ctx.section === "delete") {
if (!this._ctx.canDelete) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to delete tags.');
+ "You don't have privileges to delete tags."
+ );
} else {
this._view = new TagDeleteView(ctx);
- events.proxyEvent(this._view, this, 'submit', 'delete');
+ events.proxyEvent(this._view, this, "submit", "delete");
}
-
} else {
this._view = new TagSummaryView(ctx);
}
- events.proxyEvent(this._view, this, 'change');
+ events.proxyEvent(this._view, this, "change");
views.syncScrollPosition();
}
diff --git a/client/js/views/tags_header_view.js b/client/js/views/tags_header_view.js
index 425c8114..6dd76267 100644
--- a/client/js/views/tags_header_view.js
+++ b/client/js/views/tags_header_view.js
@@ -1,13 +1,12 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const misc = require('../util/misc.js');
-const search = require('../util/search.js');
-const views = require('../util/views.js');
-const TagAutoCompleteControl =
- require('../controls/tag_auto_complete_control.js');
+const events = require("../events.js");
+const misc = require("../util/misc.js");
+const search = require("../util/search.js");
+const views = require("../util/views.js");
+const TagAutoCompleteControl = require("../controls/tag_auto_complete_control.js");
-const template = views.getTemplate('tags-header');
+const template = views.getTemplate("tags-header");
class TagsHeaderView extends events.EventTarget {
constructor(ctx) {
@@ -20,31 +19,41 @@ class TagsHeaderView extends events.EventTarget {
this._autoCompleteControl = new TagAutoCompleteControl(
this._queryInputNode,
{
- confirm: tag => this._autoCompleteControl.replaceSelectedText(
- misc.escapeSearchTerm(tag.names[0]), true),
- });
+ confirm: (tag) =>
+ this._autoCompleteControl.replaceSelectedText(
+ misc.escapeSearchTerm(tag.names[0]),
+ true
+ ),
+ }
+ );
}
search.searchInputNodeFocusHelper(this._queryInputNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _queryInputNode() {
- return this._hostNode.querySelector('[name=search-text]');
+ return this._hostNode.querySelector("[name=search-text]");
}
_evtSubmit(e) {
e.preventDefault();
this._queryInputNode.blur();
- this.dispatchEvent(new CustomEvent('navigate', {detail: {parameters: {
- query: this._queryInputNode.value,
- page: 1,
- }}}));
+ this.dispatchEvent(
+ new CustomEvent("navigate", {
+ detail: {
+ parameters: {
+ query: this._queryInputNode.value,
+ page: 1,
+ },
+ },
+ })
+ );
}
}
diff --git a/client/js/views/tags_page_view.js b/client/js/views/tags_page_view.js
index cd335a4f..e2b28201 100644
--- a/client/js/views/tags_page_view.js
+++ b/client/js/views/tags_page_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('tags-page');
+const template = views.getTemplate("tags-page");
class TagsPageView {
constructor(ctx) {
diff --git a/client/js/views/top_navigation_view.js b/client/js/views/top_navigation_view.js
index 2f991870..efe75a91 100644
--- a/client/js/views/top_navigation_view.js
+++ b/client/js/views/top_navigation_view.js
@@ -1,24 +1,24 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('top-navigation');
+const template = views.getTemplate("top-navigation");
class TopNavigationView {
constructor() {
- this._hostNode = document.getElementById('top-navigation-holder');
+ this._hostNode = document.getElementById("top-navigation-holder");
}
get _mobileNavigationToggleNode() {
- return this._hostNode.querySelector('#mobile-navigation-toggle');
+ return this._hostNode.querySelector("#mobile-navigation-toggle");
}
get _navigationListNode() {
- return this._hostNode.querySelector('nav > ul');
+ return this._hostNode.querySelector("nav > ul");
}
get _navigationLinkNodes() {
- return this._navigationListNode.querySelectorAll('li > a');
+ return this._navigationListNode.querySelectorAll("li > a");
}
render(ctx) {
@@ -28,28 +28,32 @@ class TopNavigationView {
}
activate(key) {
- for (let itemNode of this._hostNode.querySelectorAll('[data-name]')) {
+ for (let itemNode of this._hostNode.querySelectorAll("[data-name]")) {
itemNode.classList.toggle(
- 'active', itemNode.getAttribute('data-name') === key);
+ "active",
+ itemNode.getAttribute("data-name") === key
+ );
}
}
_bindMobileNavigationEvents() {
- this._mobileNavigationToggleNode.addEventListener(
- 'click', e => this._mobileNavigationToggleClick(e));
+ this._mobileNavigationToggleNode.addEventListener("click", (e) =>
+ this._mobileNavigationToggleClick(e)
+ );
for (let navigationLinkNode of this._navigationLinkNodes) {
- navigationLinkNode.addEventListener(
- 'click', e => this._navigationLinkClick(e));
+ navigationLinkNode.addEventListener("click", (e) =>
+ this._navigationLinkClick(e)
+ );
}
}
_mobileNavigationToggleClick(e) {
- this._navigationListNode.classList.toggle('opened');
+ this._navigationListNode.classList.toggle("opened");
}
_navigationLinkClick(e) {
- this._navigationListNode.classList.remove('opened');
+ this._navigationListNode.classList.remove("opened");
}
}
diff --git a/client/js/views/user_delete_view.js b/client/js/views/user_delete_view.js
index bdaf9e66..37de52ff 100644
--- a/client/js/views/user_delete_view.js
+++ b/client/js/views/user_delete_view.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('user-delete');
+const template = views.getTemplate("user-delete");
class UserDeleteView extends events.EventTarget {
constructor(ctx) {
@@ -14,7 +14,7 @@ class UserDeleteView extends events.EventTarget {
views.replaceContent(this._hostNode, template(ctx));
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -39,16 +39,17 @@ class UserDeleteView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- user: this._user,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ user: this._user,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
-
+ return this._hostNode.querySelector("form");
}
}
diff --git a/client/js/views/user_edit_view.js b/client/js/views/user_edit_view.js
index 317d8e1f..4886726a 100644
--- a/client/js/views/user_edit_view.js
+++ b/client/js/views/user_edit_view.js
@@ -1,18 +1,18 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const api = require('../api.js');
-const views = require('../util/views.js');
-const FileDropperControl = require('../controls/file_dropper_control.js');
+const events = require("../events.js");
+const api = require("../api.js");
+const views = require("../util/views.js");
+const FileDropperControl = require("../controls/file_dropper_control.js");
-const template = views.getTemplate('user-edit');
+const template = views.getTemplate("user-edit");
class UserEditView extends events.EventTarget {
constructor(ctx) {
super();
- ctx.userNamePattern = api.getUserNameRegex() + (/|^$/).source;
- ctx.passwordPattern = api.getPasswordRegex() + (/|^$/).source;
+ ctx.userNamePattern = api.getUserNameRegex() + /|^$/.source;
+ ctx.passwordPattern = api.getPasswordRegex() + /|^$/.source;
this._user = ctx.user;
this._hostNode = ctx.hostNode;
@@ -22,24 +22,26 @@ class UserEditView extends events.EventTarget {
this._avatarContent = null;
if (this._avatarContentInputNode) {
this._avatarFileDropper = new FileDropperControl(
- this._avatarContentInputNode, {lock: true});
- this._avatarFileDropper.addEventListener('fileadd', e => {
+ this._avatarContentInputNode,
+ { lock: true }
+ );
+ this._avatarFileDropper.addEventListener("fileadd", (e) => {
this._hostNode.querySelector(
- '[name=avatar-style][value=manual]').checked = true;
+ "[name=avatar-style][value=manual]"
+ ).checked = true;
this._avatarContent = e.detail.files[0];
});
}
- for (let node of this._formNode.querySelectorAll('input, select')) {
- node.addEventListener(
- 'change', e => {
- if (!e.target.classList.contains('anticomplete')) {
- this.dispatchEvent(new CustomEvent('change'));
- }
- });
+ for (let node of this._formNode.querySelectorAll("input, select")) {
+ node.addEventListener("change", (e) => {
+ if (!e.target.classList.contains("anticomplete")) {
+ this.dispatchEvent(new CustomEvent("change"));
+ }
+ });
}
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
clearMessages() {
@@ -64,61 +66,63 @@ class UserEditView extends events.EventTarget {
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- user: this._user,
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ user: this._user,
- name: this._userNameInputNode ?
- this._userNameInputNode.value :
- undefined,
+ name: this._userNameInputNode
+ ? this._userNameInputNode.value
+ : undefined,
- email: this._emailInputNode ?
- this._emailInputNode.value :
- undefined,
+ email: this._emailInputNode
+ ? this._emailInputNode.value
+ : undefined,
- rank: this._rankInputNode ?
- this._rankInputNode.value :
- undefined,
+ rank: this._rankInputNode
+ ? this._rankInputNode.value
+ : undefined,
- avatarStyle: this._avatarStyleInputNode ?
- this._avatarStyleInputNode.value :
- undefined,
+ avatarStyle: this._avatarStyleInputNode
+ ? this._avatarStyleInputNode.value
+ : undefined,
- password: this._passwordInputNode ?
- this._passwordInputNode.value :
- undefined,
+ password: this._passwordInputNode
+ ? this._passwordInputNode.value
+ : undefined,
- avatarContent: this._avatarContent,
- },
- }));
+ avatarContent: this._avatarContent,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _rankInputNode() {
- return this._formNode.querySelector('[name=rank]');
+ return this._formNode.querySelector("[name=rank]");
}
get _emailInputNode() {
- return this._formNode.querySelector('[name=email]');
+ return this._formNode.querySelector("[name=email]");
}
get _userNameInputNode() {
- return this._formNode.querySelector('[name=name]');
+ return this._formNode.querySelector("[name=name]");
}
get _passwordInputNode() {
- return this._formNode.querySelector('[name=password]');
+ return this._formNode.querySelector("[name=password]");
}
get _avatarContentInputNode() {
- return this._formNode.querySelector('#avatar-content');
+ return this._formNode.querySelector("#avatar-content");
}
get _avatarStyleInputNode() {
- return this._formNode.querySelector('[name=avatar-style]:checked');
+ return this._formNode.querySelector("[name=avatar-style]:checked");
}
}
diff --git a/client/js/views/user_summary_view.js b/client/js/views/user_summary_view.js
index d8463f36..eab827f2 100644
--- a/client/js/views/user_summary_view.js
+++ b/client/js/views/user_summary_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('user-summary');
+const template = views.getTemplate("user-summary");
class UserSummaryView {
constructor(ctx) {
diff --git a/client/js/views/user_tokens_view.js b/client/js/views/user_tokens_view.js
index 8e283bf8..68707f08 100644
--- a/client/js/views/user_tokens_view.js
+++ b/client/js/views/user_tokens_view.js
@@ -1,9 +1,9 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('user-tokens');
+const template = views.getTemplate("user-tokens");
class UserTokenView extends events.EventTarget {
constructor(ctx) {
@@ -16,7 +16,7 @@ class UserTokenView extends events.EventTarget {
views.replaceContent(this._hostNode, template(ctx));
views.decorateValidator(this._formNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
this._decorateTokenForms();
this._decorateTokenNoteChangeLinks();
@@ -26,8 +26,9 @@ class UserTokenView extends events.EventTarget {
this._tokenFormNodes = [];
for (let i = 0; i < this._tokens.length; i++) {
let formNode = this._hostNode.querySelector(
- '.token[data-token-id=\"' + i + '\"]');
- formNode.addEventListener('submit', e => this._evtDelete(e));
+ '.token[data-token-id="' + i + '"]'
+ );
+ formNode.addEventListener("submit", (e) => this._evtDelete(e));
this._tokenFormNodes.push(formNode);
}
}
@@ -35,9 +36,11 @@ class UserTokenView extends events.EventTarget {
_decorateTokenNoteChangeLinks() {
for (let i = 0; i < this._tokens.length; i++) {
let linkNode = this._hostNode.querySelector(
- '.token-change-note[data-token-id=\"' + i + '\"]');
- linkNode.addEventListener(
- 'click', e => this._evtChangeNoteClick(e));
+ '.token-change-note[data-token-id="' + i + '"]'
+ );
+ linkNode.addEventListener("click", (e) =>
+ this._evtChangeNoteClick(e)
+ );
}
}
@@ -69,65 +72,75 @@ class UserTokenView extends events.EventTarget {
_evtDelete(e) {
e.preventDefault();
- const userToken = this._tokens[parseInt(
- e.target.getAttribute('data-token-id'))];
- this.dispatchEvent(new CustomEvent('delete', {
- detail: {
- user: this._user,
- userToken: userToken,
- },
- }));
+ const userToken = this._tokens[
+ parseInt(e.target.getAttribute("data-token-id"))
+ ];
+ this.dispatchEvent(
+ new CustomEvent("delete", {
+ detail: {
+ user: this._user,
+ userToken: userToken,
+ },
+ })
+ );
}
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('submit', {
- detail: {
- user: this._user,
+ this.dispatchEvent(
+ new CustomEvent("submit", {
+ detail: {
+ user: this._user,
- note: this._userTokenNoteInputNode ?
- this._userTokenNoteInputNode.value :
- undefined,
+ note: this._userTokenNoteInputNode
+ ? this._userTokenNoteInputNode.value
+ : undefined,
- expirationTime:
- (this._userTokenExpirationTimeInputNode
- && this._userTokenExpirationTimeInputNode.value) ?
- new Date(this._userTokenExpirationTimeInputNode.value)
- .toISOString() :
- undefined,
- },
- }));
+ expirationTime:
+ this._userTokenExpirationTimeInputNode &&
+ this._userTokenExpirationTimeInputNode.value
+ ? new Date(
+ this._userTokenExpirationTimeInputNode.value
+ ).toISOString()
+ : undefined,
+ },
+ })
+ );
}
_evtChangeNoteClick(e) {
e.preventDefault();
const userToken = this._tokens[
- parseInt(e.target.getAttribute('data-token-id'))];
+ parseInt(e.target.getAttribute("data-token-id"))
+ ];
const text = window.prompt(
- 'Please enter the new name:',
- userToken.note !== null ? userToken.note : undefined);
+ "Please enter the new name:",
+ userToken.note !== null ? userToken.note : undefined
+ );
if (!text) {
return;
}
- this.dispatchEvent(new CustomEvent('update', {
- detail: {
- user: this._user,
- userToken: userToken,
- note: text ? text : undefined,
- },
- }));
+ this.dispatchEvent(
+ new CustomEvent("update", {
+ detail: {
+ user: this._user,
+ userToken: userToken,
+ note: text ? text : undefined,
+ },
+ })
+ );
}
get _formNode() {
- return this._hostNode.querySelector('#create-token-form');
+ return this._hostNode.querySelector("#create-token-form");
}
get _userTokenNoteInputNode() {
- return this._formNode.querySelector('.note input');
+ return this._formNode.querySelector(".note input");
}
get _userTokenExpirationTimeInputNode() {
- return this._formNode.querySelector('.expirationTime input');
+ return this._formNode.querySelector(".expirationTime input");
}
}
diff --git a/client/js/views/user_view.js b/client/js/views/user_view.js
index d96ebe1b..2eebfe93 100644
--- a/client/js/views/user_view.js
+++ b/client/js/views/user_view.js
@@ -1,24 +1,24 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const views = require('../util/views.js');
-const UserDeleteView = require('./user_delete_view.js');
-const UserTokensView = require('./user_tokens_view.js');
-const UserSummaryView = require('./user_summary_view.js');
-const UserEditView = require('./user_edit_view.js');
-const EmptyView = require('../views/empty_view.js');
+const events = require("../events.js");
+const views = require("../util/views.js");
+const UserDeleteView = require("./user_delete_view.js");
+const UserTokensView = require("./user_tokens_view.js");
+const UserSummaryView = require("./user_summary_view.js");
+const UserEditView = require("./user_edit_view.js");
+const EmptyView = require("../views/empty_view.js");
-const template = views.getTemplate('user');
+const template = views.getTemplate("user");
class UserView extends events.EventTarget {
constructor(ctx) {
super();
this._ctx = ctx;
- ctx.user.addEventListener('change', e => this._evtChange(e));
- ctx.section = ctx.section || 'summary';
+ ctx.user.addEventListener("change", (e) => this._evtChange(e));
+ ctx.section = ctx.section || "summary";
- this._hostNode = document.getElementById('content-holder');
+ this._hostNode = document.getElementById("content-holder");
this._install();
}
@@ -26,52 +26,56 @@ class UserView extends events.EventTarget {
const ctx = this._ctx;
views.replaceContent(this._hostNode, template(ctx));
- for (let item of this._hostNode.querySelectorAll('[data-name]')) {
+ for (let item of this._hostNode.querySelectorAll("[data-name]")) {
item.classList.toggle(
- 'active', item.getAttribute('data-name') === ctx.section);
- if (item.getAttribute('data-name') === ctx.section) {
+ "active",
+ item.getAttribute("data-name") === ctx.section
+ );
+ if (item.getAttribute("data-name") === ctx.section) {
item.parentNode.scrollLeft =
item.getBoundingClientRect().left -
- item.parentNode.getBoundingClientRect().left
+ item.parentNode.getBoundingClientRect().left;
}
}
- ctx.hostNode = this._hostNode.querySelector('#user-content-holder');
- if (ctx.section === 'edit') {
+ ctx.hostNode = this._hostNode.querySelector("#user-content-holder");
+ if (ctx.section === "edit") {
if (!this._ctx.canEditAnything) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to edit users.');
+ "You don't have privileges to edit users."
+ );
} else {
this._view = new UserEditView(ctx);
- events.proxyEvent(this._view, this, 'submit');
+ events.proxyEvent(this._view, this, "submit");
}
- } else if (ctx.section === 'list-tokens') {
+ } else if (ctx.section === "list-tokens") {
if (!this._ctx.canListTokens) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to view user tokens.');
+ "You don't have privileges to view user tokens."
+ );
} else {
this._view = new UserTokensView(ctx);
- events.proxyEvent(this._view, this, 'delete', 'delete-token');
- events.proxyEvent(this._view, this, 'submit', 'create-token');
- events.proxyEvent(this._view, this, 'update', 'update-token');
+ events.proxyEvent(this._view, this, "delete", "delete-token");
+ events.proxyEvent(this._view, this, "submit", "create-token");
+ events.proxyEvent(this._view, this, "update", "update-token");
}
- } else if (ctx.section === 'delete') {
+ } else if (ctx.section === "delete") {
if (!this._ctx.canDelete) {
this._view = new EmptyView();
this._view.showError(
- 'You don\'t have privileges to delete users.');
+ "You don't have privileges to delete users."
+ );
} else {
this._view = new UserDeleteView(ctx);
- events.proxyEvent(this._view, this, 'submit', 'delete');
+ events.proxyEvent(this._view, this, "submit", "delete");
}
-
} else {
this._view = new UserSummaryView(ctx);
}
- events.proxyEvent(this._view, this, 'change');
+ events.proxyEvent(this._view, this, "change");
views.syncScrollPosition();
}
diff --git a/client/js/views/users_header_view.js b/client/js/views/users_header_view.js
index 08b7620c..d6b7c6a2 100644
--- a/client/js/views/users_header_view.js
+++ b/client/js/views/users_header_view.js
@@ -1,10 +1,10 @@
-'use strict';
+"use strict";
-const events = require('../events.js');
-const search = require('../util/search.js');
-const views = require('../util/views.js');
+const events = require("../events.js");
+const search = require("../util/search.js");
+const views = require("../util/views.js");
-const template = views.getTemplate('users-header');
+const template = views.getTemplate("users-header");
class UsersHeaderView extends events.EventTarget {
constructor(ctx) {
@@ -15,23 +15,29 @@ class UsersHeaderView extends events.EventTarget {
search.searchInputNodeFocusHelper(this._queryInputNode);
- this._formNode.addEventListener('submit', e => this._evtSubmit(e));
+ this._formNode.addEventListener("submit", (e) => this._evtSubmit(e));
}
get _formNode() {
- return this._hostNode.querySelector('form');
+ return this._hostNode.querySelector("form");
}
get _queryInputNode() {
- return this._formNode.querySelector('[name=search-text]');
+ return this._formNode.querySelector("[name=search-text]");
}
_evtSubmit(e) {
e.preventDefault();
- this.dispatchEvent(new CustomEvent('navigate', {detail: {parameters: {
- query: this._queryInputNode.value,
- page: 1,
- }}}));
+ this.dispatchEvent(
+ new CustomEvent("navigate", {
+ detail: {
+ parameters: {
+ query: this._queryInputNode.value,
+ page: 1,
+ },
+ },
+ })
+ );
}
}
diff --git a/client/js/views/users_page_view.js b/client/js/views/users_page_view.js
index f772d4ed..26894339 100644
--- a/client/js/views/users_page_view.js
+++ b/client/js/views/users_page_view.js
@@ -1,8 +1,8 @@
-'use strict';
+"use strict";
-const views = require('../util/views.js');
+const views = require("../util/views.js");
-const template = views.getTemplate('users-page');
+const template = views.getTemplate("users-page");
class UsersPageView {
constructor(ctx) {
diff --git a/server/.dockerignore b/server/.dockerignore
index 0b6e9fc1..cdfeafc5 100644
--- a/server/.dockerignore
+++ b/server/.dockerignore
@@ -1,5 +1,6 @@
# Linter configs
-setup.cfg
+pyproject.toml
+.flake8
# Python requirements files
requirements.txt
diff --git a/server/.flake8 b/server/.flake8
new file mode 100644
index 00000000..023ae127
--- /dev/null
+++ b/server/.flake8
@@ -0,0 +1,5 @@
+[flake8]
+filename = szurubooru/
+exclude = __pycache__
+ignore = F401, W503, W504, E203, E231
+max-line-length = 79
diff --git a/server/Dockerfile.test b/server/Dockerfile.test
index d5b11339..16c61218 100644
--- a/server/Dockerfile.test
+++ b/server/Dockerfile.test
@@ -47,4 +47,4 @@ ENV POSTGRES_HOST=x \
COPY --chown=app:app ./ /opt/app/
ENTRYPOINT ["pytest", "--tb=short"]
-CMD ["--cov-report=term-missing:skip-covered", "--cov=szurubooru", "szurubooru/"]
+CMD ["szurubooru/"]
diff --git a/server/hooks/test b/server/hooks/test
index bf7776dc..1edf7016 100755
--- a/server/hooks/test
+++ b/server/hooks/test
@@ -1,8 +1,8 @@
#!/bin/sh
set -e
-docker build -f ${DOCKERFILE_PATH:-Dockerfile}.test -t ${IMAGE_NAME}-test .
-docker run --rm -t ${IMAGE_NAME}-test
-docker rmi ${IMAGE_NAME}-test
+docker run --rm \
+ -t $(docker build -f ${DOCKERFILE_PATH:-Dockerfile}.test -q .) \
+ --color=no szurubooru/
exit $?
diff --git a/server/pyproject.toml b/server/pyproject.toml
new file mode 100644
index 00000000..ccf47fc4
--- /dev/null
+++ b/server/pyproject.toml
@@ -0,0 +1,10 @@
+[tool.black]
+line-length = 79
+
+[tool.isort]
+known_first_party = ["szurubooru"]
+known_third_party = ["PIL", "alembic", "coloredlogs", "freezegun", "nacl", "numpy", "pyrfc3339", "pytest", "pytz", "sqlalchemy", "yaml", "youtube_dl"]
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+use_parentheses = true
diff --git a/server/setup.cfg b/server/setup.cfg
deleted file mode 100644
index 21790814..00000000
--- a/server/setup.cfg
+++ /dev/null
@@ -1,20 +0,0 @@
-[flake8]
-filename = szurubooru/
-exclude = __pycache__
-ignore = F401, W503, W504
-max-line-length = 79
-
-[mypy]
-ignore_missing_imports = True
-follow_imports = skip
-disallow_untyped_calls = True
-disallow_untyped_defs = True
-check_untyped_defs = True
-disallow_subclassing_any = False
-warn_redundant_casts = True
-warn_unused_ignores = True
-strict_optional = True
-strict_boolean = False
-
-[mypy-szurubooru.tests.*]
-ignore_errors=True
diff --git a/server/szuru-admin b/server/szuru-admin
index 6433fdef..7abc69c0 100755
--- a/server/szuru-admin
+++ b/server/szuru-admin
@@ -13,8 +13,9 @@ from getpass import getpass
from sys import stderr
from szurubooru import config, db, errors, model
-from szurubooru.func import files, images, \
- posts as postfuncs, users as userfuncs
+from szurubooru.func import files, images
+from szurubooru.func import posts as postfuncs
+from szurubooru.func import users as userfuncs
def reset_password(username: str) -> None:
diff --git a/server/szurubooru/api/__init__.py b/server/szurubooru/api/__init__.py
index 28f0e984..d9b7ecba 100644
--- a/server/szurubooru/api/__init__.py
+++ b/server/szurubooru/api/__init__.py
@@ -1,12 +1,12 @@
+import szurubooru.api.comment_api
import szurubooru.api.info_api
-import szurubooru.api.user_api
-import szurubooru.api.user_token_api
-import szurubooru.api.post_api
-import szurubooru.api.tag_api
-import szurubooru.api.tag_category_api
+import szurubooru.api.password_reset_api
import szurubooru.api.pool_api
import szurubooru.api.pool_category_api
-import szurubooru.api.comment_api
-import szurubooru.api.password_reset_api
+import szurubooru.api.post_api
import szurubooru.api.snapshot_api
+import szurubooru.api.tag_api
+import szurubooru.api.tag_category_api
import szurubooru.api.upload_api
+import szurubooru.api.user_api
+import szurubooru.api.user_token_api
diff --git a/server/szurubooru/api/comment_api.py b/server/szurubooru/api/comment_api.py
index cc8350a6..d60d23ed 100644
--- a/server/szurubooru/api/comment_api.py
+++ b/server/szurubooru/api/comment_api.py
@@ -1,44 +1,52 @@
-from typing import Dict
from datetime import datetime
-from szurubooru import search, rest, model
-from szurubooru.func import (
- auth, comments, posts, scores, versions, serialization)
+from typing import Dict
+from szurubooru import model, rest, search
+from szurubooru.func import (
+ auth,
+ comments,
+ posts,
+ scores,
+ serialization,
+ versions,
+)
_search_executor = search.Executor(search.configs.CommentSearchConfig())
def _get_comment(params: Dict[str, str]) -> model.Comment:
try:
- comment_id = int(params['comment_id'])
+ comment_id = int(params["comment_id"])
except TypeError:
raise comments.InvalidCommentIdError(
- 'Invalid comment ID: %r.' % params['comment_id'])
+ "Invalid comment ID: %r." % params["comment_id"]
+ )
return comments.get_comment_by_id(comment_id)
-def _serialize(
- ctx: rest.Context, comment: model.Comment) -> rest.Response:
+def _serialize(ctx: rest.Context, comment: model.Comment) -> rest.Response:
return comments.serialize_comment(
- comment,
- ctx.user,
- options=serialization.get_serialization_options(ctx))
+ comment, ctx.user, options=serialization.get_serialization_options(ctx)
+ )
-@rest.routes.get('/comments/?')
+@rest.routes.get("/comments/?")
def get_comments(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'comments:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "comments:list")
return _search_executor.execute_and_serialize(
- ctx, lambda comment: _serialize(ctx, comment))
+ ctx, lambda comment: _serialize(ctx, comment)
+ )
-@rest.routes.post('/comments/?')
+@rest.routes.post("/comments/?")
def create_comment(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'comments:create')
- text = ctx.get_param_as_string('text')
- post_id = ctx.get_param_as_int('postId')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "comments:create")
+ text = ctx.get_param_as_string("text")
+ post_id = ctx.get_param_as_int("postId")
post = posts.get_post_by_id(post_id)
comment = comments.create_comment(ctx.user, post, text)
ctx.session.add(comment)
@@ -46,53 +54,55 @@ def create_comment(
return _serialize(ctx, comment)
-@rest.routes.get('/comment/(?P[^/]+)/?')
+@rest.routes.get("/comment/(?P[^/]+)/?")
def get_comment(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'comments:view')
+ auth.verify_privilege(ctx.user, "comments:view")
comment = _get_comment(params)
return _serialize(ctx, comment)
-@rest.routes.put('/comment/(?P[^/]+)/?')
+@rest.routes.put("/comment/(?P[^/]+)/?")
def update_comment(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
comment = _get_comment(params)
versions.verify_version(comment, ctx)
versions.bump_version(comment)
- infix = 'own' if ctx.user.user_id == comment.user_id else 'any'
- text = ctx.get_param_as_string('text')
- auth.verify_privilege(ctx.user, 'comments:edit:%s' % infix)
+ infix = "own" if ctx.user.user_id == comment.user_id else "any"
+ text = ctx.get_param_as_string("text")
+ auth.verify_privilege(ctx.user, "comments:edit:%s" % infix)
comments.update_comment_text(comment, text)
comment.last_edit_time = datetime.utcnow()
ctx.session.commit()
return _serialize(ctx, comment)
-@rest.routes.delete('/comment/(?P[^/]+)/?')
+@rest.routes.delete("/comment/(?P[^/]+)/?")
def delete_comment(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
comment = _get_comment(params)
versions.verify_version(comment, ctx)
- infix = 'own' if ctx.user.user_id == comment.user_id else 'any'
- auth.verify_privilege(ctx.user, 'comments:delete:%s' % infix)
+ infix = "own" if ctx.user.user_id == comment.user_id else "any"
+ auth.verify_privilege(ctx.user, "comments:delete:%s" % infix)
ctx.session.delete(comment)
ctx.session.commit()
return {}
-@rest.routes.put('/comment/(?P[^/]+)/score/?')
+@rest.routes.put("/comment/(?P[^/]+)/score/?")
def set_comment_score(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'comments:score')
- score = ctx.get_param_as_int('score')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "comments:score")
+ score = ctx.get_param_as_int("score")
comment = _get_comment(params)
scores.set_score(comment, ctx.user, score)
ctx.session.commit()
return _serialize(ctx, comment)
-@rest.routes.delete('/comment/(?P[^/]+)/score/?')
+@rest.routes.delete("/comment/(?P[^/]+)/score/?")
def delete_comment_score(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'comments:score')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "comments:score")
comment = _get_comment(params)
scores.delete_score(comment, ctx.user)
ctx.session.commit()
diff --git a/server/szurubooru/api/info_api.py b/server/szurubooru/api/info_api.py
index 1e2fd1d4..757b09cf 100644
--- a/server/szurubooru/api/info_api.py
+++ b/server/szurubooru/api/info_api.py
@@ -1,10 +1,10 @@
import os
-from typing import Optional, Dict
from datetime import datetime, timedelta
+from typing import Dict, Optional
+
from szurubooru import config, rest
from szurubooru.func import auth, posts, users, util
-
_cache_time = None # type: Optional[datetime]
_cache_result = None # type: Optional[int]
@@ -17,7 +17,7 @@ def _get_disk_usage() -> int:
assert _cache_result is not None
return _cache_result
total_size = 0
- for dir_path, _, file_names in os.walk(config.config['data_dir']):
+ for dir_path, _, file_names in os.walk(config.config["data_dir"]):
for file_name in file_names:
file_path = os.path.join(dir_path, file_name)
try:
@@ -29,35 +29,38 @@ def _get_disk_usage() -> int:
return total_size
-@rest.routes.get('/info/?')
-def get_info(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
+@rest.routes.get("/info/?")
+def get_info(ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
post_feature = posts.try_get_current_post_feature()
ret = {
- 'postCount': posts.get_post_count(),
- 'diskUsage': _get_disk_usage(),
- 'serverTime': datetime.utcnow(),
- 'config': {
- 'name': config.config['name'],
- 'userNameRegex': config.config['user_name_regex'],
- 'passwordRegex': config.config['password_regex'],
- 'tagNameRegex': config.config['tag_name_regex'],
- 'tagCategoryNameRegex': config.config['tag_category_name_regex'],
- 'defaultUserRank': config.config['default_rank'],
- 'enableSafety': config.config['enable_safety'],
- 'contactEmail': config.config['contact_email'],
- 'canSendMails': bool(config.config['smtp']['host']),
- 'privileges':
- util.snake_case_to_lower_camel_case_keys(
- config.config['privileges']),
+ "postCount": posts.get_post_count(),
+ "diskUsage": _get_disk_usage(),
+ "serverTime": datetime.utcnow(),
+ "config": {
+ "name": config.config["name"],
+ "userNameRegex": config.config["user_name_regex"],
+ "passwordRegex": config.config["password_regex"],
+ "tagNameRegex": config.config["tag_name_regex"],
+ "tagCategoryNameRegex": config.config["tag_category_name_regex"],
+ "defaultUserRank": config.config["default_rank"],
+ "enableSafety": config.config["enable_safety"],
+ "contactEmail": config.config["contact_email"],
+ "canSendMails": bool(config.config["smtp"]["host"]),
+ "privileges": util.snake_case_to_lower_camel_case_keys(
+ config.config["privileges"]
+ ),
},
}
- if auth.has_privilege(ctx.user, 'posts:view:featured'):
- ret['featuredPost'] = (
+ if auth.has_privilege(ctx.user, "posts:view:featured"):
+ ret["featuredPost"] = (
posts.serialize_post(post_feature.post, ctx.user)
- if post_feature else None)
- ret['featuringUser'] = (
+ if post_feature
+ else None
+ )
+ ret["featuringUser"] = (
users.serialize_user(post_feature.user, ctx.user)
- if post_feature else None)
- ret['featuringTime'] = post_feature.time if post_feature else None
+ if post_feature
+ else None
+ )
+ ret["featuringTime"] = post_feature.time if post_feature else None
return ret
diff --git a/server/szurubooru/api/password_reset_api.py b/server/szurubooru/api/password_reset_api.py
index 5296d235..e0e31b7d 100644
--- a/server/szurubooru/api/password_reset_api.py
+++ b/server/szurubooru/api/password_reset_api.py
@@ -1,60 +1,65 @@
+from hashlib import md5
from typing import Dict
+
from szurubooru import config, errors, rest
from szurubooru.func import auth, mailer, users, versions
-from hashlib import md5
-
-MAIL_SUBJECT = 'Password reset for {name}'
+MAIL_SUBJECT = "Password reset for {name}"
MAIL_BODY = (
- 'You (or someone else) requested to reset your password on {name}.\n'
- 'If you wish to proceed, click this link: {url}\n'
- 'Otherwise, please ignore this email.')
+ "You (or someone else) requested to reset your password on {name}.\n"
+ "If you wish to proceed, click this link: {url}\n"
+ "Otherwise, please ignore this email."
+)
-@rest.routes.get('/password-reset/(?P[^/]+)/?')
+@rest.routes.get("/password-reset/(?P[^/]+)/?")
def start_password_reset(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- user_name = params['user_name']
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ user_name = params["user_name"]
user = users.get_user_by_name_or_email(user_name)
if not user.email:
raise errors.ValidationError(
- 'User %r hasn\'t supplied email. Cannot reset password.' % (
- user_name))
+ "User %r hasn't supplied email. Cannot reset password."
+ % (user_name)
+ )
token = auth.generate_authentication_token(user)
- if config.config['domain']:
- url = config.config['domain']
- elif 'HTTP_ORIGIN' in ctx.env:
- url = ctx.env['HTTP_ORIGIN'].rstrip('/')
- elif 'HTTP_REFERER' in ctx.env:
- url = ctx.env['HTTP_REFERER'].rstrip('/')
+ if config.config["domain"]:
+ url = config.config["domain"]
+ elif "HTTP_ORIGIN" in ctx.env:
+ url = ctx.env["HTTP_ORIGIN"].rstrip("/")
+ elif "HTTP_REFERER" in ctx.env:
+ url = ctx.env["HTTP_REFERER"].rstrip("/")
else:
- url = ''
- url += '/password-reset/%s:%s' % (user.name, token)
+ url = ""
+ url += "/password-reset/%s:%s" % (user.name, token)
mailer.send_mail(
- config.config['smtp']['from'],
+ config.config["smtp"]["from"],
user.email,
- MAIL_SUBJECT.format(name=config.config['name']),
- MAIL_BODY.format(name=config.config['name'], url=url))
+ MAIL_SUBJECT.format(name=config.config["name"]),
+ MAIL_BODY.format(name=config.config["name"], url=url),
+ )
return {}
def _hash(token: str) -> str:
- return md5(token.encode('utf-8')).hexdigest()
+ return md5(token.encode("utf-8")).hexdigest()
-@rest.routes.post('/password-reset/(?P[^/]+)/?')
+@rest.routes.post("/password-reset/(?P[^/]+)/?")
def finish_password_reset(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- user_name = params['user_name']
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ user_name = params["user_name"]
user = users.get_user_by_name_or_email(user_name)
good_token = auth.generate_authentication_token(user)
- token = ctx.get_param_as_string('token')
+ token = ctx.get_param_as_string("token")
if _hash(token) != _hash(good_token):
- raise errors.ValidationError('Invalid password reset token.')
+ raise errors.ValidationError("Invalid password reset token.")
new_password = users.reset_user_password(user)
versions.bump_version(user)
ctx.session.commit()
- return {'password': new_password}
+ return {"password": new_password}
diff --git a/server/szurubooru/api/pool_api.py b/server/szurubooru/api/pool_api.py
index 9627ed21..a2fb716b 100644
--- a/server/szurubooru/api/pool_api.py
+++ b/server/szurubooru/api/pool_api.py
@@ -1,38 +1,42 @@
-from typing import Optional, List, Dict
from datetime import datetime
-from szurubooru import db, model, search, rest
-from szurubooru.func import auth, pools, snapshots, serialization, versions
+from typing import Dict, List, Optional
+from szurubooru import db, model, rest, search
+from szurubooru.func import auth, pools, serialization, snapshots, versions
_search_executor = search.Executor(search.configs.PoolSearchConfig())
def _serialize(ctx: rest.Context, pool: model.Pool) -> rest.Response:
return pools.serialize_pool(
- pool, options=serialization.get_serialization_options(ctx))
+ pool, options=serialization.get_serialization_options(ctx)
+ )
def _get_pool(params: Dict[str, str]) -> model.Pool:
- return pools.get_pool_by_id(params['pool_id'])
+ return pools.get_pool_by_id(params["pool_id"])
-@rest.routes.get('/pools/?')
+@rest.routes.get("/pools/?")
def get_pools(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pools:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "pools:list")
return _search_executor.execute_and_serialize(
- ctx, lambda pool: _serialize(ctx, pool))
+ ctx, lambda pool: _serialize(ctx, pool)
+ )
-@rest.routes.post('/pool/?')
+@rest.routes.post("/pool/?")
def create_pool(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pools:create')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "pools:create")
- names = ctx.get_param_as_string_list('names')
- category = ctx.get_param_as_string('category')
- description = ctx.get_param_as_string('description', default='')
- posts = ctx.get_param_as_int_list('posts', default=[])
+ names = ctx.get_param_as_string_list("names")
+ category = ctx.get_param_as_string("category")
+ description = ctx.get_param_as_string("description", default="")
+ posts = ctx.get_param_as_int_list("posts", default=[])
pool = pools.create_pool(names, category, posts)
pool.last_edit_time = datetime.utcnow()
@@ -44,32 +48,34 @@ def create_pool(
return _serialize(ctx, pool)
-@rest.routes.get('/pool/(?P[^/]+)/?')
+@rest.routes.get("/pool/(?P[^/]+)/?")
def get_pool(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pools:view')
+ auth.verify_privilege(ctx.user, "pools:view")
pool = _get_pool(params)
return _serialize(ctx, pool)
-@rest.routes.put('/pool/(?P[^/]+)/?')
+@rest.routes.put("/pool/(?P[^/]+)/?")
def update_pool(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
pool = _get_pool(params)
versions.verify_version(pool, ctx)
versions.bump_version(pool)
- if ctx.has_param('names'):
- auth.verify_privilege(ctx.user, 'pools:edit:names')
- pools.update_pool_names(pool, ctx.get_param_as_string_list('names'))
- if ctx.has_param('category'):
- auth.verify_privilege(ctx.user, 'pools:edit:category')
+ if ctx.has_param("names"):
+ auth.verify_privilege(ctx.user, "pools:edit:names")
+ pools.update_pool_names(pool, ctx.get_param_as_string_list("names"))
+ if ctx.has_param("category"):
+ auth.verify_privilege(ctx.user, "pools:edit:category")
pools.update_pool_category_name(
- pool, ctx.get_param_as_string('category'))
- if ctx.has_param('description'):
- auth.verify_privilege(ctx.user, 'pools:edit:description')
+ pool, ctx.get_param_as_string("category")
+ )
+ if ctx.has_param("description"):
+ auth.verify_privilege(ctx.user, "pools:edit:description")
pools.update_pool_description(
- pool, ctx.get_param_as_string('description'))
- if ctx.has_param('posts'):
- auth.verify_privilege(ctx.user, 'pools:edit:posts')
- posts = ctx.get_param_as_int_list('posts')
+ pool, ctx.get_param_as_string("description")
+ )
+ if ctx.has_param("posts"):
+ auth.verify_privilege(ctx.user, "pools:edit:posts")
+ posts = ctx.get_param_as_int_list("posts")
pools.update_pool_posts(pool, posts)
pool.last_edit_time = datetime.utcnow()
ctx.session.flush()
@@ -78,28 +84,29 @@ def update_pool(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
return _serialize(ctx, pool)
-@rest.routes.delete('/pool/(?P[^/]+)/?')
+@rest.routes.delete("/pool/(?P[^/]+)/?")
def delete_pool(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
pool = _get_pool(params)
versions.verify_version(pool, ctx)
- auth.verify_privilege(ctx.user, 'pools:delete')
+ auth.verify_privilege(ctx.user, "pools:delete")
snapshots.delete(pool, ctx.user)
pools.delete(pool)
ctx.session.commit()
return {}
-@rest.routes.post('/pool-merge/?')
+@rest.routes.post("/pool-merge/?")
def merge_pools(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- source_pool_id = ctx.get_param_as_string('remove')
- target_pool_id = ctx.get_param_as_string('mergeTo')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ source_pool_id = ctx.get_param_as_string("remove")
+ target_pool_id = ctx.get_param_as_string("mergeTo")
source_pool = pools.get_pool_by_id(source_pool_id)
target_pool = pools.get_pool_by_id(target_pool_id)
- versions.verify_version(source_pool, ctx, 'removeVersion')
- versions.verify_version(target_pool, ctx, 'mergeToVersion')
+ versions.verify_version(source_pool, ctx, "removeVersion")
+ versions.verify_version(target_pool, ctx, "mergeToVersion")
versions.bump_version(target_pool)
- auth.verify_privilege(ctx.user, 'pools:merge')
+ auth.verify_privilege(ctx.user, "pools:merge")
pools.merge_pools(source_pool, target_pool)
snapshots.merge(source_pool, target_pool, ctx.user)
ctx.session.commit()
diff --git a/server/szurubooru/api/pool_category_api.py b/server/szurubooru/api/pool_category_api.py
index f2937247..9af41d41 100644
--- a/server/szurubooru/api/pool_category_api.py
+++ b/server/szurubooru/api/pool_category_api.py
@@ -1,31 +1,42 @@
from typing import Dict
+
from szurubooru import model, rest
from szurubooru.func import (
- auth, pools, pool_categories, snapshots, serialization, versions)
+ auth,
+ pool_categories,
+ pools,
+ serialization,
+ snapshots,
+ versions,
+)
def _serialize(
- ctx: rest.Context, category: model.PoolCategory) -> rest.Response:
+ ctx: rest.Context, category: model.PoolCategory
+) -> rest.Response:
return pool_categories.serialize_category(
- category, options=serialization.get_serialization_options(ctx))
+ category, options=serialization.get_serialization_options(ctx)
+ )
-@rest.routes.get('/pool-categories/?')
+@rest.routes.get("/pool-categories/?")
def get_pool_categories(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pool_categories:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "pool_categories:list")
categories = pool_categories.get_all_categories()
return {
- 'results': [_serialize(ctx, category) for category in categories],
+ "results": [_serialize(ctx, category) for category in categories],
}
-@rest.routes.post('/pool-categories/?')
+@rest.routes.post("/pool-categories/?")
def create_pool_category(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pool_categories:create')
- name = ctx.get_param_as_string('name')
- color = ctx.get_param_as_string('color')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "pool_categories:create")
+ name = ctx.get_param_as_string("name")
+ color = ctx.get_param_as_string("color")
category = pool_categories.create_category(name, color)
ctx.session.add(category)
ctx.session.flush()
@@ -34,54 +45,63 @@ def create_pool_category(
return _serialize(ctx, category)
-@rest.routes.get('/pool-category/(?P[^/]+)/?')
+@rest.routes.get("/pool-category/(?P[^/]+)/?")
def get_pool_category(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pool_categories:view')
- category = pool_categories.get_category_by_name(params['category_name'])
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "pool_categories:view")
+ category = pool_categories.get_category_by_name(params["category_name"])
return _serialize(ctx, category)
-@rest.routes.put('/pool-category/(?P[^/]+)/?')
+@rest.routes.put("/pool-category/(?P[^/]+)/?")
def update_pool_category(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
category = pool_categories.get_category_by_name(
- params['category_name'], lock=True)
+ params["category_name"], lock=True
+ )
versions.verify_version(category, ctx)
versions.bump_version(category)
- if ctx.has_param('name'):
- auth.verify_privilege(ctx.user, 'pool_categories:edit:name')
+ if ctx.has_param("name"):
+ auth.verify_privilege(ctx.user, "pool_categories:edit:name")
pool_categories.update_category_name(
- category, ctx.get_param_as_string('name'))
- if ctx.has_param('color'):
- auth.verify_privilege(ctx.user, 'pool_categories:edit:color')
+ category, ctx.get_param_as_string("name")
+ )
+ if ctx.has_param("color"):
+ auth.verify_privilege(ctx.user, "pool_categories:edit:color")
pool_categories.update_category_color(
- category, ctx.get_param_as_string('color'))
+ category, ctx.get_param_as_string("color")
+ )
ctx.session.flush()
snapshots.modify(category, ctx.user)
ctx.session.commit()
return _serialize(ctx, category)
-@rest.routes.delete('/pool-category/(?P[^/]+)/?')
+@rest.routes.delete("/pool-category/(?P[^/]+)/?")
def delete_pool_category(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
category = pool_categories.get_category_by_name(
- params['category_name'], lock=True)
+ params["category_name"], lock=True
+ )
versions.verify_version(category, ctx)
- auth.verify_privilege(ctx.user, 'pool_categories:delete')
+ auth.verify_privilege(ctx.user, "pool_categories:delete")
pool_categories.delete_category(category)
snapshots.delete(category, ctx.user)
ctx.session.commit()
return {}
-@rest.routes.put('/pool-category/(?P[^/]+)/default/?')
+@rest.routes.put("/pool-category/(?P[^/]+)/default/?")
def set_pool_category_as_default(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'pool_categories:set_default')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "pool_categories:set_default")
category = pool_categories.get_category_by_name(
- params['category_name'], lock=True)
+ params["category_name"], lock=True
+ )
pool_categories.set_default_category(category)
ctx.session.flush()
snapshots.modify(category, ctx.user)
diff --git a/server/szurubooru/api/post_api.py b/server/szurubooru/api/post_api.py
index 6d48fbd3..bf40bc07 100644
--- a/server/szurubooru/api/post_api.py
+++ b/server/szurubooru/api/post_api.py
@@ -1,10 +1,18 @@
-from typing import Optional, Dict, List
from datetime import datetime
-from szurubooru import db, model, errors, rest, search
-from szurubooru.func import (
- auth, tags, posts, snapshots, favorites, scores,
- serialization, versions, mime)
+from typing import Dict, List, Optional
+from szurubooru import db, errors, model, rest, search
+from szurubooru.func import (
+ auth,
+ favorites,
+ mime,
+ posts,
+ scores,
+ serialization,
+ snapshots,
+ tags,
+ versions,
+)
_search_executor_config = search.configs.PostSearchConfig()
_search_executor = search.Executor(_search_executor_config)
@@ -12,10 +20,11 @@ _search_executor = search.Executor(_search_executor_config)
def _get_post_id(params: Dict[str, str]) -> int:
try:
- return int(params['post_id'])
+ return int(params["post_id"])
except TypeError:
raise posts.InvalidPostIdError(
- 'Invalid post ID: %r.' % params['post_id'])
+ "Invalid post ID: %r." % params["post_id"]
+ )
def _get_post(params: Dict[str, str]) -> model.Post:
@@ -23,56 +32,62 @@ def _get_post(params: Dict[str, str]) -> model.Post:
def _serialize_post(
- ctx: rest.Context, post: Optional[model.Post]) -> rest.Response:
+ ctx: rest.Context, post: Optional[model.Post]
+) -> rest.Response:
return posts.serialize_post(
- post,
- ctx.user,
- options=serialization.get_serialization_options(ctx))
+ post, ctx.user, options=serialization.get_serialization_options(ctx)
+ )
-@rest.routes.get('/posts/?')
+@rest.routes.get("/posts/?")
def get_posts(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:list")
_search_executor_config.user = ctx.user
return _search_executor.execute_and_serialize(
- ctx, lambda post: _serialize_post(ctx, post))
+ ctx, lambda post: _serialize_post(ctx, post)
+ )
-@rest.routes.post('/posts/?')
+@rest.routes.post("/posts/?")
def create_post(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- anonymous = ctx.get_param_as_bool('anonymous', default=False)
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ anonymous = ctx.get_param_as_bool("anonymous", default=False)
if anonymous:
- auth.verify_privilege(ctx.user, 'posts:create:anonymous')
+ auth.verify_privilege(ctx.user, "posts:create:anonymous")
else:
- auth.verify_privilege(ctx.user, 'posts:create:identified')
+ auth.verify_privilege(ctx.user, "posts:create:identified")
content = ctx.get_file(
- 'content',
+ "content",
use_video_downloader=auth.has_privilege(
- ctx.user, 'uploads:use_downloader'))
- tag_names = ctx.get_param_as_string_list('tags', default=[])
- safety = ctx.get_param_as_string('safety')
- source = ctx.get_param_as_string('source', default='')
- if ctx.has_param('contentUrl') and not source:
- source = ctx.get_param_as_string('contentUrl', default='')
- relations = ctx.get_param_as_int_list('relations', default=[])
- notes = ctx.get_param_as_list('notes', default=[])
+ ctx.user, "uploads:use_downloader"
+ ),
+ )
+ tag_names = ctx.get_param_as_string_list("tags", default=[])
+ safety = ctx.get_param_as_string("safety")
+ source = ctx.get_param_as_string("source", default="")
+ if ctx.has_param("contentUrl") and not source:
+ source = ctx.get_param_as_string("contentUrl", default="")
+ relations = ctx.get_param_as_int_list("relations", default=[])
+ notes = ctx.get_param_as_list("notes", default=[])
flags = ctx.get_param_as_string_list(
- 'flags',
- default=posts.get_default_flags(content))
+ "flags", default=posts.get_default_flags(content)
+ )
post, new_tags = posts.create_post(
- content, tag_names, None if anonymous else ctx.user)
+ content, tag_names, None if anonymous else ctx.user
+ )
if len(new_tags):
- auth.verify_privilege(ctx.user, 'tags:create')
+ auth.verify_privilege(ctx.user, "tags:create")
posts.update_post_safety(post, safety)
posts.update_post_source(post, source)
posts.update_post_relations(post, relations)
posts.update_post_notes(post, notes)
posts.update_post_flags(post, flags)
- if ctx.has_file('thumbnail'):
- posts.update_post_thumbnail(post, ctx.get_file('thumbnail'))
+ if ctx.has_file("thumbnail"):
+ posts.update_post_thumbnail(post, ctx.get_file("thumbnail"))
ctx.session.add(post)
ctx.session.flush()
create_snapshots_for_post(post, new_tags, None if anonymous else ctx.user)
@@ -81,68 +96,75 @@ def create_post(
create_snapshots_for_post(
alternate_post,
alternate_post_new_tags,
- None if anonymous else ctx.user)
+ None if anonymous else ctx.user,
+ )
ctx.session.commit()
return _serialize_post(ctx, post)
def create_snapshots_for_post(
- post: model.Post,
- new_tags: List[model.Tag],
- user: Optional[model.User]):
+ post: model.Post, new_tags: List[model.Tag], user: Optional[model.User]
+):
snapshots.create(post, user)
for tag in new_tags:
snapshots.create(tag, user)
-@rest.routes.get('/post/(?P[^/]+)/?')
+@rest.routes.get("/post/(?P[^/]+)/?")
def get_post(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:view')
+ auth.verify_privilege(ctx.user, "posts:view")
post = _get_post(params)
return _serialize_post(ctx, post)
-@rest.routes.put('/post/(?P[^/]+)/?')
+@rest.routes.put("/post/(?P[^/]+)/?")
def update_post(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
post = _get_post(params)
versions.verify_version(post, ctx)
versions.bump_version(post)
- if ctx.has_file('content'):
- auth.verify_privilege(ctx.user, 'posts:edit:content')
+ if ctx.has_file("content"):
+ auth.verify_privilege(ctx.user, "posts:edit:content")
posts.update_post_content(
post,
- ctx.get_file('content', use_video_downloader=auth.has_privilege(
- ctx.user, 'uploads:use_downloader')))
- if ctx.has_param('tags'):
- auth.verify_privilege(ctx.user, 'posts:edit:tags')
+ ctx.get_file(
+ "content",
+ use_video_downloader=auth.has_privilege(
+ ctx.user, "uploads:use_downloader"
+ ),
+ ),
+ )
+ if ctx.has_param("tags"):
+ auth.verify_privilege(ctx.user, "posts:edit:tags")
new_tags = posts.update_post_tags(
- post, ctx.get_param_as_string_list('tags'))
+ post, ctx.get_param_as_string_list("tags")
+ )
if len(new_tags):
- auth.verify_privilege(ctx.user, 'tags:create')
+ auth.verify_privilege(ctx.user, "tags:create")
db.session.flush()
for tag in new_tags:
snapshots.create(tag, ctx.user)
- if ctx.has_param('safety'):
- auth.verify_privilege(ctx.user, 'posts:edit:safety')
- posts.update_post_safety(post, ctx.get_param_as_string('safety'))
- if ctx.has_param('source'):
- auth.verify_privilege(ctx.user, 'posts:edit:source')
- posts.update_post_source(post, ctx.get_param_as_string('source'))
- elif ctx.has_param('contentUrl'):
- posts.update_post_source(post, ctx.get_param_as_string('contentUrl'))
- if ctx.has_param('relations'):
- auth.verify_privilege(ctx.user, 'posts:edit:relations')
+ if ctx.has_param("safety"):
+ auth.verify_privilege(ctx.user, "posts:edit:safety")
+ posts.update_post_safety(post, ctx.get_param_as_string("safety"))
+ if ctx.has_param("source"):
+ auth.verify_privilege(ctx.user, "posts:edit:source")
+ posts.update_post_source(post, ctx.get_param_as_string("source"))
+ elif ctx.has_param("contentUrl"):
+ posts.update_post_source(post, ctx.get_param_as_string("contentUrl"))
+ if ctx.has_param("relations"):
+ auth.verify_privilege(ctx.user, "posts:edit:relations")
posts.update_post_relations(
- post, ctx.get_param_as_int_list('relations'))
- if ctx.has_param('notes'):
- auth.verify_privilege(ctx.user, 'posts:edit:notes')
- posts.update_post_notes(post, ctx.get_param_as_list('notes'))
- if ctx.has_param('flags'):
- auth.verify_privilege(ctx.user, 'posts:edit:flags')
- posts.update_post_flags(post, ctx.get_param_as_string_list('flags'))
- if ctx.has_file('thumbnail'):
- auth.verify_privilege(ctx.user, 'posts:edit:thumbnail')
- posts.update_post_thumbnail(post, ctx.get_file('thumbnail'))
+ post, ctx.get_param_as_int_list("relations")
+ )
+ if ctx.has_param("notes"):
+ auth.verify_privilege(ctx.user, "posts:edit:notes")
+ posts.update_post_notes(post, ctx.get_param_as_list("notes"))
+ if ctx.has_param("flags"):
+ auth.verify_privilege(ctx.user, "posts:edit:flags")
+ posts.update_post_flags(post, ctx.get_param_as_string_list("flags"))
+ if ctx.has_file("thumbnail"):
+ auth.verify_privilege(ctx.user, "posts:edit:thumbnail")
+ posts.update_post_thumbnail(post, ctx.get_file("thumbnail"))
post.last_edit_time = datetime.utcnow()
ctx.session.flush()
snapshots.modify(post, ctx.user)
@@ -150,9 +172,9 @@ def update_post(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
return _serialize_post(ctx, post)
-@rest.routes.delete('/post/(?P[^/]+)/?')
+@rest.routes.delete("/post/(?P[^/]+)/?")
def delete_post(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:delete')
+ auth.verify_privilege(ctx.user, "posts:delete")
post = _get_post(params)
versions.verify_version(post, ctx)
snapshots.delete(post, ctx.user)
@@ -161,103 +183,113 @@ def delete_post(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
return {}
-@rest.routes.post('/post-merge/?')
+@rest.routes.post("/post-merge/?")
def merge_posts(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- source_post_id = ctx.get_param_as_int('remove')
- target_post_id = ctx.get_param_as_int('mergeTo')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ source_post_id = ctx.get_param_as_int("remove")
+ target_post_id = ctx.get_param_as_int("mergeTo")
source_post = posts.get_post_by_id(source_post_id)
target_post = posts.get_post_by_id(target_post_id)
- replace_content = ctx.get_param_as_bool('replaceContent')
- versions.verify_version(source_post, ctx, 'removeVersion')
- versions.verify_version(target_post, ctx, 'mergeToVersion')
+ replace_content = ctx.get_param_as_bool("replaceContent")
+ versions.verify_version(source_post, ctx, "removeVersion")
+ versions.verify_version(target_post, ctx, "mergeToVersion")
versions.bump_version(target_post)
- auth.verify_privilege(ctx.user, 'posts:merge')
+ auth.verify_privilege(ctx.user, "posts:merge")
posts.merge_posts(source_post, target_post, replace_content)
snapshots.merge(source_post, target_post, ctx.user)
ctx.session.commit()
return _serialize_post(ctx, target_post)
-@rest.routes.get('/featured-post/?')
+@rest.routes.get("/featured-post/?")
def get_featured_post(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:view:featured')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:view:featured")
post = posts.try_get_featured_post()
return _serialize_post(ctx, post)
-@rest.routes.post('/featured-post/?')
+@rest.routes.post("/featured-post/?")
def set_featured_post(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:feature')
- post_id = ctx.get_param_as_int('id')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:feature")
+ post_id = ctx.get_param_as_int("id")
post = posts.get_post_by_id(post_id)
featured_post = posts.try_get_featured_post()
if featured_post and featured_post.post_id == post.post_id:
raise posts.PostAlreadyFeaturedError(
- 'Post %r is already featured.' % post_id)
+ "Post %r is already featured." % post_id
+ )
posts.feature_post(post, ctx.user)
snapshots.modify(post, ctx.user)
ctx.session.commit()
return _serialize_post(ctx, post)
-@rest.routes.put('/post/(?P[^/]+)/score/?')
+@rest.routes.put("/post/(?P[^/]+)/score/?")
def set_post_score(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:score')
+ auth.verify_privilege(ctx.user, "posts:score")
post = _get_post(params)
- score = ctx.get_param_as_int('score')
+ score = ctx.get_param_as_int("score")
scores.set_score(post, ctx.user, score)
ctx.session.commit()
return _serialize_post(ctx, post)
-@rest.routes.delete('/post/(?P[^/]+)/score/?')
+@rest.routes.delete("/post/(?P[^/]+)/score/?")
def delete_post_score(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:score')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:score")
post = _get_post(params)
scores.delete_score(post, ctx.user)
ctx.session.commit()
return _serialize_post(ctx, post)
-@rest.routes.post('/post/(?P[^/]+)/favorite/?')
+@rest.routes.post("/post/(?P[^/]+)/favorite/?")
def add_post_to_favorites(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:favorite')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:favorite")
post = _get_post(params)
favorites.set_favorite(post, ctx.user)
ctx.session.commit()
return _serialize_post(ctx, post)
-@rest.routes.delete('/post/(?P[^/]+)/favorite/?')
+@rest.routes.delete("/post/(?P[^/]+)/favorite/?")
def delete_post_from_favorites(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:favorite')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:favorite")
post = _get_post(params)
favorites.unset_favorite(post, ctx.user)
ctx.session.commit()
return _serialize_post(ctx, post)
-@rest.routes.get('/post/(?P[^/]+)/around/?')
+@rest.routes.get("/post/(?P[^/]+)/around/?")
def get_posts_around(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:list')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:list")
_search_executor_config.user = ctx.user
post_id = _get_post_id(params)
return _search_executor.get_around_and_serialize(
- ctx, post_id, lambda post: _serialize_post(ctx, post))
+ ctx, post_id, lambda post: _serialize_post(ctx, post)
+ )
-@rest.routes.post('/posts/reverse-search/?')
+@rest.routes.post("/posts/reverse-search/?")
def get_posts_by_image(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'posts:reverse_search')
- content = ctx.get_file('content')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "posts:reverse_search")
+ content = ctx.get_file("content")
try:
lookalikes = posts.search_by_image(content)
@@ -265,14 +297,11 @@ def get_posts_by_image(
lookalikes = []
return {
- 'exactPost':
- _serialize_post(ctx, posts.search_by_image_exact(content)),
- 'similarPosts':
- [
- {
- 'distance': distance,
- 'post': _serialize_post(ctx, post),
- }
- for distance, post in lookalikes
- ],
+ "exactPost": _serialize_post(
+ ctx, posts.search_by_image_exact(content)
+ ),
+ "similarPosts": [
+ {"distance": distance, "post": _serialize_post(ctx, post),}
+ for distance, post in lookalikes
+ ],
}
diff --git a/server/szurubooru/api/snapshot_api.py b/server/szurubooru/api/snapshot_api.py
index 469be7f6..87012a2f 100644
--- a/server/szurubooru/api/snapshot_api.py
+++ b/server/szurubooru/api/snapshot_api.py
@@ -1,14 +1,16 @@
from typing import Dict
-from szurubooru import search, rest
-from szurubooru.func import auth, snapshots
+from szurubooru import rest, search
+from szurubooru.func import auth, snapshots
_search_executor = search.Executor(search.configs.SnapshotSearchConfig())
-@rest.routes.get('/snapshots/?')
+@rest.routes.get("/snapshots/?")
def get_snapshots(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'snapshots:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "snapshots:list")
return _search_executor.execute_and_serialize(
- ctx, lambda snapshot: snapshots.serialize_snapshot(snapshot, ctx.user))
+ ctx, lambda snapshot: snapshots.serialize_snapshot(snapshot, ctx.user)
+ )
diff --git a/server/szurubooru/api/tag_api.py b/server/szurubooru/api/tag_api.py
index f9a15e76..6b4c807e 100644
--- a/server/szurubooru/api/tag_api.py
+++ b/server/szurubooru/api/tag_api.py
@@ -1,19 +1,20 @@
-from typing import Optional, List, Dict
from datetime import datetime
-from szurubooru import db, model, search, rest
-from szurubooru.func import auth, tags, snapshots, serialization, versions
+from typing import Dict, List, Optional
+from szurubooru import db, model, rest, search
+from szurubooru.func import auth, serialization, snapshots, tags, versions
_search_executor = search.Executor(search.configs.TagSearchConfig())
def _serialize(ctx: rest.Context, tag: model.Tag) -> rest.Response:
return tags.serialize_tag(
- tag, options=serialization.get_serialization_options(ctx))
+ tag, options=serialization.get_serialization_options(ctx)
+ )
def _get_tag(params: Dict[str, str]) -> model.Tag:
- return tags.get_tag_by_name(params['tag_name'])
+ return tags.get_tag_by_name(params["tag_name"])
def _create_if_needed(tag_names: List[str], user: model.User) -> None:
@@ -21,29 +22,31 @@ def _create_if_needed(tag_names: List[str], user: model.User) -> None:
return
_existing_tags, new_tags = tags.get_or_create_tags_by_names(tag_names)
if len(new_tags):
- auth.verify_privilege(user, 'tags:create')
+ auth.verify_privilege(user, "tags:create")
db.session.flush()
for tag in new_tags:
snapshots.create(tag, user)
-@rest.routes.get('/tags/?')
+@rest.routes.get("/tags/?")
def get_tags(ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tags:list')
+ auth.verify_privilege(ctx.user, "tags:list")
return _search_executor.execute_and_serialize(
- ctx, lambda tag: _serialize(ctx, tag))
+ ctx, lambda tag: _serialize(ctx, tag)
+ )
-@rest.routes.post('/tags/?')
+@rest.routes.post("/tags/?")
def create_tag(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tags:create')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "tags:create")
- names = ctx.get_param_as_string_list('names')
- category = ctx.get_param_as_string('category')
- description = ctx.get_param_as_string('description', default='')
- suggestions = ctx.get_param_as_string_list('suggestions', default=[])
- implications = ctx.get_param_as_string_list('implications', default=[])
+ names = ctx.get_param_as_string_list("names")
+ category = ctx.get_param_as_string("category")
+ description = ctx.get_param_as_string("description", default="")
+ suggestions = ctx.get_param_as_string_list("suggestions", default=[])
+ implications = ctx.get_param_as_string_list("implications", default=[])
_create_if_needed(suggestions, ctx.user)
_create_if_needed(implications, ctx.user)
@@ -57,37 +60,37 @@ def create_tag(
return _serialize(ctx, tag)
-@rest.routes.get('/tag/(?P.+)')
+@rest.routes.get("/tag/(?P.+)")
def get_tag(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tags:view')
+ auth.verify_privilege(ctx.user, "tags:view")
tag = _get_tag(params)
return _serialize(ctx, tag)
-@rest.routes.put('/tag/(?P.+)')
+@rest.routes.put("/tag/(?P.+)")
def update_tag(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
tag = _get_tag(params)
versions.verify_version(tag, ctx)
versions.bump_version(tag)
- if ctx.has_param('names'):
- auth.verify_privilege(ctx.user, 'tags:edit:names')
- tags.update_tag_names(tag, ctx.get_param_as_string_list('names'))
- if ctx.has_param('category'):
- auth.verify_privilege(ctx.user, 'tags:edit:category')
- tags.update_tag_category_name(
- tag, ctx.get_param_as_string('category'))
- if ctx.has_param('description'):
- auth.verify_privilege(ctx.user, 'tags:edit:description')
+ if ctx.has_param("names"):
+ auth.verify_privilege(ctx.user, "tags:edit:names")
+ tags.update_tag_names(tag, ctx.get_param_as_string_list("names"))
+ if ctx.has_param("category"):
+ auth.verify_privilege(ctx.user, "tags:edit:category")
+ tags.update_tag_category_name(tag, ctx.get_param_as_string("category"))
+ if ctx.has_param("description"):
+ auth.verify_privilege(ctx.user, "tags:edit:description")
tags.update_tag_description(
- tag, ctx.get_param_as_string('description'))
- if ctx.has_param('suggestions'):
- auth.verify_privilege(ctx.user, 'tags:edit:suggestions')
- suggestions = ctx.get_param_as_string_list('suggestions')
+ tag, ctx.get_param_as_string("description")
+ )
+ if ctx.has_param("suggestions"):
+ auth.verify_privilege(ctx.user, "tags:edit:suggestions")
+ suggestions = ctx.get_param_as_string_list("suggestions")
_create_if_needed(suggestions, ctx.user)
tags.update_tag_suggestions(tag, suggestions)
- if ctx.has_param('implications'):
- auth.verify_privilege(ctx.user, 'tags:edit:implications')
- implications = ctx.get_param_as_string_list('implications')
+ if ctx.has_param("implications"):
+ auth.verify_privilege(ctx.user, "tags:edit:implications")
+ implications = ctx.get_param_as_string_list("implications")
_create_if_needed(implications, ctx.user)
tags.update_tag_implications(tag, implications)
tag.last_edit_time = datetime.utcnow()
@@ -97,44 +100,45 @@ def update_tag(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
return _serialize(ctx, tag)
-@rest.routes.delete('/tag/(?P.+)')
+@rest.routes.delete("/tag/(?P.+)")
def delete_tag(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
tag = _get_tag(params)
versions.verify_version(tag, ctx)
- auth.verify_privilege(ctx.user, 'tags:delete')
+ auth.verify_privilege(ctx.user, "tags:delete")
snapshots.delete(tag, ctx.user)
tags.delete(tag)
ctx.session.commit()
return {}
-@rest.routes.post('/tag-merge/?')
+@rest.routes.post("/tag-merge/?")
def merge_tags(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- source_tag_name = ctx.get_param_as_string('remove')
- target_tag_name = ctx.get_param_as_string('mergeTo')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ source_tag_name = ctx.get_param_as_string("remove")
+ target_tag_name = ctx.get_param_as_string("mergeTo")
source_tag = tags.get_tag_by_name(source_tag_name)
target_tag = tags.get_tag_by_name(target_tag_name)
- versions.verify_version(source_tag, ctx, 'removeVersion')
- versions.verify_version(target_tag, ctx, 'mergeToVersion')
+ versions.verify_version(source_tag, ctx, "removeVersion")
+ versions.verify_version(target_tag, ctx, "mergeToVersion")
versions.bump_version(target_tag)
- auth.verify_privilege(ctx.user, 'tags:merge')
+ auth.verify_privilege(ctx.user, "tags:merge")
tags.merge_tags(source_tag, target_tag)
snapshots.merge(source_tag, target_tag, ctx.user)
ctx.session.commit()
return _serialize(ctx, target_tag)
-@rest.routes.get('/tag-siblings/(?P.+)')
+@rest.routes.get("/tag-siblings/(?P.+)")
def get_tag_siblings(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tags:view')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "tags:view")
tag = _get_tag(params)
result = tags.get_tag_siblings(tag)
serialized_siblings = []
for sibling, occurrences in result:
- serialized_siblings.append({
- 'tag': _serialize(ctx, sibling),
- 'occurrences': occurrences
- })
- return {'results': serialized_siblings}
+ serialized_siblings.append(
+ {"tag": _serialize(ctx, sibling), "occurrences": occurrences}
+ )
+ return {"results": serialized_siblings}
diff --git a/server/szurubooru/api/tag_category_api.py b/server/szurubooru/api/tag_category_api.py
index 07da9993..498289a4 100644
--- a/server/szurubooru/api/tag_category_api.py
+++ b/server/szurubooru/api/tag_category_api.py
@@ -1,31 +1,42 @@
from typing import Dict
+
from szurubooru import model, rest
from szurubooru.func import (
- auth, tags, tag_categories, snapshots, serialization, versions)
+ auth,
+ serialization,
+ snapshots,
+ tag_categories,
+ tags,
+ versions,
+)
def _serialize(
- ctx: rest.Context, category: model.TagCategory) -> rest.Response:
+ ctx: rest.Context, category: model.TagCategory
+) -> rest.Response:
return tag_categories.serialize_category(
- category, options=serialization.get_serialization_options(ctx))
+ category, options=serialization.get_serialization_options(ctx)
+ )
-@rest.routes.get('/tag-categories/?')
+@rest.routes.get("/tag-categories/?")
def get_tag_categories(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tag_categories:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "tag_categories:list")
categories = tag_categories.get_all_categories()
return {
- 'results': [_serialize(ctx, category) for category in categories],
+ "results": [_serialize(ctx, category) for category in categories],
}
-@rest.routes.post('/tag-categories/?')
+@rest.routes.post("/tag-categories/?")
def create_tag_category(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tag_categories:create')
- name = ctx.get_param_as_string('name')
- color = ctx.get_param_as_string('color')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "tag_categories:create")
+ name = ctx.get_param_as_string("name")
+ color = ctx.get_param_as_string("color")
category = tag_categories.create_category(name, color)
ctx.session.add(category)
ctx.session.flush()
@@ -34,54 +45,63 @@ def create_tag_category(
return _serialize(ctx, category)
-@rest.routes.get('/tag-category/(?P[^/]+)/?')
+@rest.routes.get("/tag-category/(?P[^/]+)/?")
def get_tag_category(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tag_categories:view')
- category = tag_categories.get_category_by_name(params['category_name'])
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "tag_categories:view")
+ category = tag_categories.get_category_by_name(params["category_name"])
return _serialize(ctx, category)
-@rest.routes.put('/tag-category/(?P[^/]+)/?')
+@rest.routes.put("/tag-category/(?P[^/]+)/?")
def update_tag_category(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
category = tag_categories.get_category_by_name(
- params['category_name'], lock=True)
+ params["category_name"], lock=True
+ )
versions.verify_version(category, ctx)
versions.bump_version(category)
- if ctx.has_param('name'):
- auth.verify_privilege(ctx.user, 'tag_categories:edit:name')
+ if ctx.has_param("name"):
+ auth.verify_privilege(ctx.user, "tag_categories:edit:name")
tag_categories.update_category_name(
- category, ctx.get_param_as_string('name'))
- if ctx.has_param('color'):
- auth.verify_privilege(ctx.user, 'tag_categories:edit:color')
+ category, ctx.get_param_as_string("name")
+ )
+ if ctx.has_param("color"):
+ auth.verify_privilege(ctx.user, "tag_categories:edit:color")
tag_categories.update_category_color(
- category, ctx.get_param_as_string('color'))
+ category, ctx.get_param_as_string("color")
+ )
ctx.session.flush()
snapshots.modify(category, ctx.user)
ctx.session.commit()
return _serialize(ctx, category)
-@rest.routes.delete('/tag-category/(?P[^/]+)/?')
+@rest.routes.delete("/tag-category/(?P[^/]+)/?")
def delete_tag_category(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
category = tag_categories.get_category_by_name(
- params['category_name'], lock=True)
+ params["category_name"], lock=True
+ )
versions.verify_version(category, ctx)
- auth.verify_privilege(ctx.user, 'tag_categories:delete')
+ auth.verify_privilege(ctx.user, "tag_categories:delete")
tag_categories.delete_category(category)
snapshots.delete(category, ctx.user)
ctx.session.commit()
return {}
-@rest.routes.put('/tag-category/(?P[^/]+)/default/?')
+@rest.routes.put("/tag-category/(?P[^/]+)/default/?")
def set_tag_category_as_default(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- auth.verify_privilege(ctx.user, 'tag_categories:set_default')
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "tag_categories:set_default")
category = tag_categories.get_category_by_name(
- params['category_name'], lock=True)
+ params["category_name"], lock=True
+ )
tag_categories.set_default_category(category)
ctx.session.flush()
snapshots.modify(category, ctx.user)
diff --git a/server/szurubooru/api/upload_api.py b/server/szurubooru/api/upload_api.py
index 16b46406..3b7bca8a 100644
--- a/server/szurubooru/api/upload_api.py
+++ b/server/szurubooru/api/upload_api.py
@@ -1,16 +1,20 @@
from typing import Dict
+
from szurubooru import rest
from szurubooru.func import auth, file_uploads
-@rest.routes.post('/uploads/?')
+@rest.routes.post("/uploads/?")
def create_temporary_file(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'uploads:create')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "uploads:create")
content = ctx.get_file(
- 'content',
+ "content",
allow_tokens=False,
use_video_downloader=auth.has_privilege(
- ctx.user, 'uploads:use_downloader'))
+ ctx.user, "uploads:use_downloader"
+ ),
+ )
token = file_uploads.save(content)
- return {'token': token}
+ return {"token": token}
diff --git a/server/szurubooru/api/user_api.py b/server/szurubooru/api/user_api.py
index 5e14fabe..a6196cb8 100644
--- a/server/szurubooru/api/user_api.py
+++ b/server/szurubooru/api/user_api.py
@@ -1,97 +1,102 @@
from typing import Any, Dict
-from szurubooru import model, search, rest
-from szurubooru.func import auth, users, serialization, versions
+from szurubooru import model, rest, search
+from szurubooru.func import auth, serialization, users, versions
_search_executor = search.Executor(search.configs.UserSearchConfig())
def _serialize(
- ctx: rest.Context, user: model.User, **kwargs: Any) -> rest.Response:
+ ctx: rest.Context, user: model.User, **kwargs: Any
+) -> rest.Response:
return users.serialize_user(
user,
ctx.user,
options=serialization.get_serialization_options(ctx),
- **kwargs)
+ **kwargs
+ )
-@rest.routes.get('/users/?')
+@rest.routes.get("/users/?")
def get_users(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
- auth.verify_privilege(ctx.user, 'users:list')
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
+ auth.verify_privilege(ctx.user, "users:list")
return _search_executor.execute_and_serialize(
- ctx, lambda user: _serialize(ctx, user))
+ ctx, lambda user: _serialize(ctx, user)
+ )
-@rest.routes.post('/users/?')
+@rest.routes.post("/users/?")
def create_user(
- ctx: rest.Context, _params: Dict[str, str] = {}) -> rest.Response:
+ ctx: rest.Context, _params: Dict[str, str] = {}
+) -> rest.Response:
if ctx.user.user_id is None:
- auth.verify_privilege(ctx.user, 'users:create:self')
+ auth.verify_privilege(ctx.user, "users:create:self")
else:
- auth.verify_privilege(ctx.user, 'users:create:any')
+ auth.verify_privilege(ctx.user, "users:create:any")
- name = ctx.get_param_as_string('name')
- password = ctx.get_param_as_string('password')
- email = ctx.get_param_as_string('email', default='')
+ name = ctx.get_param_as_string("name")
+ password = ctx.get_param_as_string("password")
+ email = ctx.get_param_as_string("email", default="")
user = users.create_user(name, password, email)
- if ctx.has_param('rank'):
- users.update_user_rank(user, ctx.get_param_as_string('rank'), ctx.user)
- if ctx.has_param('avatarStyle'):
+ if ctx.has_param("rank"):
+ users.update_user_rank(user, ctx.get_param_as_string("rank"), ctx.user)
+ if ctx.has_param("avatarStyle"):
users.update_user_avatar(
user,
- ctx.get_param_as_string('avatarStyle'),
- ctx.get_file('avatar', default=b''))
+ ctx.get_param_as_string("avatarStyle"),
+ ctx.get_file("avatar", default=b""),
+ )
ctx.session.add(user)
ctx.session.commit()
return _serialize(ctx, user, force_show_email=True)
-@rest.routes.get('/user/(?P[^/]+)/?')
+@rest.routes.get("/user/(?P[^/]+)/?")
def get_user(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
+ user = users.get_user_by_name(params["user_name"])
if ctx.user.user_id != user.user_id:
- auth.verify_privilege(ctx.user, 'users:view')
+ auth.verify_privilege(ctx.user, "users:view")
return _serialize(ctx, user)
-@rest.routes.put('/user/(?P[^/]+)/?')
+@rest.routes.put("/user/(?P[^/]+)/?")
def update_user(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
+ user = users.get_user_by_name(params["user_name"])
versions.verify_version(user, ctx)
versions.bump_version(user)
- infix = 'self' if ctx.user.user_id == user.user_id else 'any'
- if ctx.has_param('name'):
- auth.verify_privilege(ctx.user, 'users:edit:%s:name' % infix)
- users.update_user_name(user, ctx.get_param_as_string('name'))
- if ctx.has_param('password'):
- auth.verify_privilege(ctx.user, 'users:edit:%s:pass' % infix)
- users.update_user_password(
- user, ctx.get_param_as_string('password'))
- if ctx.has_param('email'):
- auth.verify_privilege(ctx.user, 'users:edit:%s:email' % infix)
- users.update_user_email(user, ctx.get_param_as_string('email'))
- if ctx.has_param('rank'):
- auth.verify_privilege(ctx.user, 'users:edit:%s:rank' % infix)
- users.update_user_rank(
- user, ctx.get_param_as_string('rank'), ctx.user)
- if ctx.has_param('avatarStyle'):
- auth.verify_privilege(ctx.user, 'users:edit:%s:avatar' % infix)
+ infix = "self" if ctx.user.user_id == user.user_id else "any"
+ if ctx.has_param("name"):
+ auth.verify_privilege(ctx.user, "users:edit:%s:name" % infix)
+ users.update_user_name(user, ctx.get_param_as_string("name"))
+ if ctx.has_param("password"):
+ auth.verify_privilege(ctx.user, "users:edit:%s:pass" % infix)
+ users.update_user_password(user, ctx.get_param_as_string("password"))
+ if ctx.has_param("email"):
+ auth.verify_privilege(ctx.user, "users:edit:%s:email" % infix)
+ users.update_user_email(user, ctx.get_param_as_string("email"))
+ if ctx.has_param("rank"):
+ auth.verify_privilege(ctx.user, "users:edit:%s:rank" % infix)
+ users.update_user_rank(user, ctx.get_param_as_string("rank"), ctx.user)
+ if ctx.has_param("avatarStyle"):
+ auth.verify_privilege(ctx.user, "users:edit:%s:avatar" % infix)
users.update_user_avatar(
user,
- ctx.get_param_as_string('avatarStyle'),
- ctx.get_file('avatar', default=b''))
+ ctx.get_param_as_string("avatarStyle"),
+ ctx.get_file("avatar", default=b""),
+ )
ctx.session.commit()
return _serialize(ctx, user)
-@rest.routes.delete('/user/(?P[^/]+)/?')
+@rest.routes.delete("/user/(?P[^/]+)/?")
def delete_user(ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
+ user = users.get_user_by_name(params["user_name"])
versions.verify_version(user, ctx)
- infix = 'self' if ctx.user.user_id == user.user_id else 'any'
- auth.verify_privilege(ctx.user, 'users:delete:%s' % infix)
+ infix = "self" if ctx.user.user_id == user.user_id else "any"
+ auth.verify_privilege(ctx.user, "users:delete:%s" % infix)
ctx.session.delete(user)
ctx.session.commit()
return {}
diff --git a/server/szurubooru/api/user_token_api.py b/server/szurubooru/api/user_token_api.py
index 77398239..772f97a6 100644
--- a/server/szurubooru/api/user_token_api.py
+++ b/server/szurubooru/api/user_token_api.py
@@ -1,82 +1,90 @@
from typing import Dict
+
from szurubooru import model, rest
-from szurubooru.func import auth, users, user_tokens, serialization, versions
+from szurubooru.func import auth, serialization, user_tokens, users, versions
def _serialize(
- ctx: rest.Context, user_token: model.UserToken) -> rest.Response:
+ ctx: rest.Context, user_token: model.UserToken
+) -> rest.Response:
return user_tokens.serialize_user_token(
user_token,
ctx.user,
- options=serialization.get_serialization_options(ctx))
+ options=serialization.get_serialization_options(ctx),
+ )
-@rest.routes.get('/user-tokens/(?P[^/]+)/?')
+@rest.routes.get("/user-tokens/(?P[^/]+)/?")
def get_user_tokens(
- ctx: rest.Context, params: Dict[str, str] = {}) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
- infix = 'self' if ctx.user.user_id == user.user_id else 'any'
- auth.verify_privilege(ctx.user, 'user_tokens:list:%s' % infix)
+ ctx: rest.Context, params: Dict[str, str] = {}
+) -> rest.Response:
+ user = users.get_user_by_name(params["user_name"])
+ infix = "self" if ctx.user.user_id == user.user_id else "any"
+ auth.verify_privilege(ctx.user, "user_tokens:list:%s" % infix)
user_token_list = user_tokens.get_user_tokens(user)
- return {
- 'results': [_serialize(ctx, token) for token in user_token_list]
- }
+ return {"results": [_serialize(ctx, token) for token in user_token_list]}
-@rest.routes.post('/user-token/(?P[^/]+)/?')
+@rest.routes.post("/user-token/(?P[^/]+)/?")
def create_user_token(
- ctx: rest.Context, params: Dict[str, str] = {}) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
- infix = 'self' if ctx.user.user_id == user.user_id else 'any'
- auth.verify_privilege(ctx.user, 'user_tokens:create:%s' % infix)
- enabled = ctx.get_param_as_bool('enabled', True)
+ ctx: rest.Context, params: Dict[str, str] = {}
+) -> rest.Response:
+ user = users.get_user_by_name(params["user_name"])
+ infix = "self" if ctx.user.user_id == user.user_id else "any"
+ auth.verify_privilege(ctx.user, "user_tokens:create:%s" % infix)
+ enabled = ctx.get_param_as_bool("enabled", True)
user_token = user_tokens.create_user_token(user, enabled)
- if ctx.has_param('note'):
- note = ctx.get_param_as_string('note')
+ if ctx.has_param("note"):
+ note = ctx.get_param_as_string("note")
user_tokens.update_user_token_note(user_token, note)
- if ctx.has_param('expirationTime'):
- expiration_time = ctx.get_param_as_string('expirationTime')
+ if ctx.has_param("expirationTime"):
+ expiration_time = ctx.get_param_as_string("expirationTime")
user_tokens.update_user_token_expiration_time(
- user_token, expiration_time)
+ user_token, expiration_time
+ )
ctx.session.add(user_token)
ctx.session.commit()
return _serialize(ctx, user_token)
-@rest.routes.put('/user-token/(?P[^/]+)/(?P[^/]+)/?')
+@rest.routes.put("/user-token/(?P[^/]+)/(?P[^/]+)/?")
def update_user_token(
- ctx: rest.Context, params: Dict[str, str] = {}) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
- infix = 'self' if ctx.user.user_id == user.user_id else 'any'
- auth.verify_privilege(ctx.user, 'user_tokens:edit:%s' % infix)
- user_token = user_tokens.get_by_user_and_token(user, params['user_token'])
+ ctx: rest.Context, params: Dict[str, str] = {}
+) -> rest.Response:
+ user = users.get_user_by_name(params["user_name"])
+ infix = "self" if ctx.user.user_id == user.user_id else "any"
+ auth.verify_privilege(ctx.user, "user_tokens:edit:%s" % infix)
+ user_token = user_tokens.get_by_user_and_token(user, params["user_token"])
versions.verify_version(user_token, ctx)
versions.bump_version(user_token)
- if ctx.has_param('enabled'):
- auth.verify_privilege(ctx.user, 'user_tokens:edit:%s' % infix)
+ if ctx.has_param("enabled"):
+ auth.verify_privilege(ctx.user, "user_tokens:edit:%s" % infix)
user_tokens.update_user_token_enabled(
- user_token, ctx.get_param_as_bool('enabled'))
- if ctx.has_param('note'):
- auth.verify_privilege(ctx.user, 'user_tokens:edit:%s' % infix)
- note = ctx.get_param_as_string('note')
+ user_token, ctx.get_param_as_bool("enabled")
+ )
+ if ctx.has_param("note"):
+ auth.verify_privilege(ctx.user, "user_tokens:edit:%s" % infix)
+ note = ctx.get_param_as_string("note")
user_tokens.update_user_token_note(user_token, note)
- if ctx.has_param('expirationTime'):
- auth.verify_privilege(ctx.user, 'user_tokens:edit:%s' % infix)
- expiration_time = ctx.get_param_as_string('expirationTime')
+ if ctx.has_param("expirationTime"):
+ auth.verify_privilege(ctx.user, "user_tokens:edit:%s" % infix)
+ expiration_time = ctx.get_param_as_string("expirationTime")
user_tokens.update_user_token_expiration_time(
- user_token, expiration_time)
+ user_token, expiration_time
+ )
user_tokens.update_user_token_edit_time(user_token)
ctx.session.commit()
return _serialize(ctx, user_token)
-@rest.routes.delete('/user-token/(?P[^/]+)/(?P[^/]+)/?')
+@rest.routes.delete("/user-token/(?P[^/]+)/(?P[^/]+)/?")
def delete_user_token(
- ctx: rest.Context, params: Dict[str, str]) -> rest.Response:
- user = users.get_user_by_name(params['user_name'])
- infix = 'self' if ctx.user.user_id == user.user_id else 'any'
- auth.verify_privilege(ctx.user, 'user_tokens:delete:%s' % infix)
- user_token = user_tokens.get_by_user_and_token(user, params['user_token'])
+ ctx: rest.Context, params: Dict[str, str]
+) -> rest.Response:
+ user = users.get_user_by_name(params["user_name"])
+ infix = "self" if ctx.user.user_id == user.user_id else "any"
+ auth.verify_privilege(ctx.user, "user_tokens:delete:%s" % infix)
+ user_token = user_tokens.get_by_user_and_token(user, params["user_token"])
if user_token is not None:
ctx.session.delete(user_token)
ctx.session.commit()
diff --git a/server/szurubooru/config.py b/server/szurubooru/config.py
index 72a24b60..a0fc3e7a 100644
--- a/server/szurubooru/config.py
+++ b/server/szurubooru/config.py
@@ -1,9 +1,10 @@
-from typing import Dict
import logging
import os
-import yaml
-from szurubooru import errors
+from typing import Dict
+import yaml
+
+from szurubooru import errors
logger = logging.getLogger(__name__)
@@ -21,21 +22,22 @@ def _merge(left: Dict, right: Dict) -> Dict:
def _docker_config() -> Dict:
- for key in ['POSTGRES_USER', 'POSTGRES_PASSWORD', 'POSTGRES_HOST']:
+ for key in ["POSTGRES_USER", "POSTGRES_PASSWORD", "POSTGRES_HOST"]:
if not os.getenv(key, False):
raise errors.ConfigError(f'Environment variable "{key}" not set')
return {
- 'debug': True,
- 'show_sql': int(os.getenv('LOG_SQL', 0)),
- 'data_url': os.getenv('DATA_URL', 'data/'),
- 'data_dir': '/data/',
- 'database': 'postgres://%(user)s:%(pass)s@%(host)s:%(port)d/%(db)s' % {
- 'user': os.getenv('POSTGRES_USER'),
- 'pass': os.getenv('POSTGRES_PASSWORD'),
- 'host': os.getenv('POSTGRES_HOST'),
- 'port': int(os.getenv('POSTGRES_PORT', 5432)),
- 'db': os.getenv('POSTGRES_DB', os.getenv('POSTGRES_USER'))
- }
+ "debug": True,
+ "show_sql": int(os.getenv("LOG_SQL", 0)),
+ "data_url": os.getenv("DATA_URL", "data/"),
+ "data_dir": "/data/",
+ "database": "postgres://%(user)s:%(pass)s@%(host)s:%(port)d/%(db)s"
+ % {
+ "user": os.getenv("POSTGRES_USER"),
+ "pass": os.getenv("POSTGRES_PASSWORD"),
+ "host": os.getenv("POSTGRES_HOST"),
+ "port": int(os.getenv("POSTGRES_PORT", 5432)),
+ "db": os.getenv("POSTGRES_DB", os.getenv("POSTGRES_USER")),
+ },
}
@@ -45,13 +47,14 @@ def _file_config(filename: str) -> Dict:
def _read_config() -> Dict:
- ret = _file_config('config.yaml.dist')
- if os.path.isfile('config.yaml'):
- ret = _merge(ret, _file_config('config.yaml'))
- elif os.path.isdir('config.yaml'):
+ ret = _file_config("config.yaml.dist")
+ if os.path.isfile("config.yaml"):
+ ret = _merge(ret, _file_config("config.yaml"))
+ elif os.path.isdir("config.yaml"):
logger.warning(
- '\'config.yaml\' should be a file, not a directory, skipping')
- if os.path.exists('/.dockerenv'):
+ "'config.yaml' should be a file, not a directory, skipping"
+ )
+ if os.path.exists("/.dockerenv"):
ret = _merge(ret, _docker_config())
return ret
diff --git a/server/szurubooru/db.py b/server/szurubooru/db.py
index 03bfaff4..ed59a70a 100644
--- a/server/szurubooru/db.py
+++ b/server/szurubooru/db.py
@@ -1,12 +1,13 @@
-from typing import Any
import threading
+from typing import Any
+
import sqlalchemy as sa
import sqlalchemy.orm
+
from szurubooru import config
-
_data = threading.local()
-_engine = sa.create_engine(config.config['database']) # type: Any
+_engine = sa.create_engine(config.config["database"]) # type: Any
_sessionmaker = sa.orm.sessionmaker(bind=_engine, autoflush=False) # type: Any
session = sa.orm.scoped_session(_sessionmaker) # type: Any
@@ -30,7 +31,7 @@ def get_query_count() -> int:
def _bump_query_count() -> None:
- _data.query_count = getattr(_data, 'query_count', 0) + 1
+ _data.query_count = getattr(_data, "query_count", 0) + 1
-sa.event.listen(_engine, 'after_execute', lambda *args: _bump_query_count())
+sa.event.listen(_engine, "after_execute", lambda *args: _bump_query_count())
diff --git a/server/szurubooru/errors.py b/server/szurubooru/errors.py
index beeb469c..9106f047 100644
--- a/server/szurubooru/errors.py
+++ b/server/szurubooru/errors.py
@@ -3,9 +3,10 @@ from typing import Dict
class BaseError(RuntimeError):
def __init__(
- self,
- message: str = 'Unknown error',
- extra_fields: Dict[str, str] = None) -> None:
+ self,
+ message: str = "Unknown error",
+ extra_fields: Dict[str, str] = None,
+ ) -> None:
super().__init__(message)
self.extra_fields = extra_fields
diff --git a/server/szurubooru/facade.py b/server/szurubooru/facade.py
index d8d0b634..f1a9a072 100644
--- a/server/szurubooru/facade.py
+++ b/server/szurubooru/facade.py
@@ -1,109 +1,114 @@
-import os
-import time
import logging
+import os
import threading
-from typing import Callable, Any, Type
+import time
+from typing import Any, Callable, Type
import coloredlogs
import sqlalchemy as sa
import sqlalchemy.orm.exc
-from szurubooru import config, db, errors, rest
-from szurubooru.func.posts import update_all_post_signatures
+
+from szurubooru import api, config, db, errors, middleware, rest
from szurubooru.func.file_uploads import purge_old_uploads
-from szurubooru import api, middleware
+from szurubooru.func.posts import update_all_post_signatures
def _map_error(
- ex: Exception,
- target_class: Type[rest.errors.BaseHttpError],
- title: str) -> rest.errors.BaseHttpError:
+ ex: Exception, target_class: Type[rest.errors.BaseHttpError], title: str
+) -> rest.errors.BaseHttpError:
return target_class(
name=type(ex).__name__,
title=title,
description=str(ex),
- extra_fields=getattr(ex, 'extra_fields', {}))
+ extra_fields=getattr(ex, "extra_fields", {}),
+ )
def _on_auth_error(ex: Exception) -> None:
- raise _map_error(ex, rest.errors.HttpForbidden, 'Authentication error')
+ raise _map_error(ex, rest.errors.HttpForbidden, "Authentication error")
def _on_validation_error(ex: Exception) -> None:
- raise _map_error(ex, rest.errors.HttpBadRequest, 'Validation error')
+ raise _map_error(ex, rest.errors.HttpBadRequest, "Validation error")
def _on_search_error(ex: Exception) -> None:
- raise _map_error(ex, rest.errors.HttpBadRequest, 'Search error')
+ raise _map_error(ex, rest.errors.HttpBadRequest, "Search error")
def _on_integrity_error(ex: Exception) -> None:
- raise _map_error(ex, rest.errors.HttpConflict, 'Integrity violation')
+ raise _map_error(ex, rest.errors.HttpConflict, "Integrity violation")
def _on_not_found_error(ex: Exception) -> None:
- raise _map_error(ex, rest.errors.HttpNotFound, 'Not found')
+ raise _map_error(ex, rest.errors.HttpNotFound, "Not found")
def _on_processing_error(ex: Exception) -> None:
- raise _map_error(ex, rest.errors.HttpBadRequest, 'Processing error')
+ raise _map_error(ex, rest.errors.HttpBadRequest, "Processing error")
def _on_third_party_error(ex: Exception) -> None:
raise _map_error(
- ex,
- rest.errors.HttpInternalServerError,
- 'Server configuration error')
+ ex, rest.errors.HttpInternalServerError, "Server configuration error"
+ )
def _on_stale_data_error(_ex: Exception) -> None:
raise rest.errors.HttpConflict(
- name='IntegrityError',
- title='Integrity violation',
+ name="IntegrityError",
+ title="Integrity violation",
description=(
- 'Someone else modified this in the meantime. '
- 'Please try again.'))
+ "Someone else modified this in the meantime. " "Please try again."
+ ),
+ )
def validate_config() -> None:
- '''
+ """
Check whether config doesn't contain errors that might prove
lethal at runtime.
- '''
+ """
from szurubooru.func.auth import RANK_MAP
- for privilege, rank in config.config['privileges'].items():
+
+ for privilege, rank in config.config["privileges"].items():
if rank not in RANK_MAP.values():
raise errors.ConfigError(
- 'Rank %r for privilege %r is missing' % (rank, privilege))
- if config.config['default_rank'] not in RANK_MAP.values():
+ "Rank %r for privilege %r is missing" % (rank, privilege)
+ )
+ if config.config["default_rank"] not in RANK_MAP.values():
raise errors.ConfigError(
- 'Default rank %r is not on the list of known ranks' % (
- config.config['default_rank']))
+ "Default rank %r is not on the list of known ranks"
+ % (config.config["default_rank"])
+ )
- for key in ['data_url', 'data_dir']:
+ for key in ["data_url", "data_dir"]:
if not config.config[key]:
raise errors.ConfigError(
- 'Service is not configured: %r is missing' % key)
+ "Service is not configured: %r is missing" % key
+ )
- if not os.path.isabs(config.config['data_dir']):
- raise errors.ConfigError(
- 'data_dir must be an absolute path')
+ if not os.path.isabs(config.config["data_dir"]):
+ raise errors.ConfigError("data_dir must be an absolute path")
- if not config.config['database']:
- raise errors.ConfigError('Database is not configured')
+ if not config.config["database"]:
+ raise errors.ConfigError("Database is not configured")
- if config.config['smtp']['host']:
- if not config.config['smtp']['port']:
+ if config.config["smtp"]["host"]:
+ if not config.config["smtp"]["port"]:
+ raise errors.ConfigError("SMTP host is set but port is not set")
+ if not config.config["smtp"]["user"]:
raise errors.ConfigError(
- 'SMTP host is set but port is not set')
- if not config.config['smtp']['user']:
+ "SMTP host is set but username is not set"
+ )
+ if not config.config["smtp"]["pass"]:
raise errors.ConfigError(
- 'SMTP host is set but username is not set')
- if not config.config['smtp']['pass']:
+ "SMTP host is set but password is not set"
+ )
+ if not config.config["smtp"]["from"]:
raise errors.ConfigError(
- 'SMTP host is set but password is not set')
- if not config.config['smtp']['from']:
- raise errors.ConfigError(
- 'From address must be set to use mail-based password reset')
+ "From address must be set to use mail-based password reset"
+ )
def purge_old_uploads_daemon() -> None:
@@ -116,13 +121,13 @@ def purge_old_uploads_daemon() -> None:
def create_app() -> Callable[[Any, Any], Any]:
- ''' Create a WSGI compatible App object. '''
+ """ Create a WSGI compatible App object. """
validate_config()
- coloredlogs.install(fmt='[%(asctime)-15s] %(name)s %(message)s')
- if config.config['debug']:
- logging.getLogger('szurubooru').setLevel(logging.INFO)
- if config.config['show_sql']:
- logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
+ coloredlogs.install(fmt="[%(asctime)-15s] %(name)s %(message)s")
+ if config.config["debug"]:
+ logging.getLogger("szurubooru").setLevel(logging.INFO)
+ if config.config["show_sql"]:
+ logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
purge_thread = threading.Thread(target=purge_old_uploads_daemon)
purge_thread.daemon = True
diff --git a/server/szurubooru/func/auth.py b/server/szurubooru/func/auth.py
index 504fe61b..d0137756 100644
--- a/server/szurubooru/func/auth.py
+++ b/server/szurubooru/func/auth.py
@@ -1,60 +1,67 @@
-from typing import Tuple, Optional
import hashlib
import random
import uuid
from collections import OrderedDict
from datetime import datetime
+from typing import Optional, Tuple
+
from nacl import pwhash
from nacl.exceptions import InvalidkeyError
-from szurubooru import config, db, model, errors
+
+from szurubooru import config, db, errors, model
from szurubooru.func import util
-
-RANK_MAP = OrderedDict([
- (model.User.RANK_ANONYMOUS, 'anonymous'),
- (model.User.RANK_RESTRICTED, 'restricted'),
- (model.User.RANK_REGULAR, 'regular'),
- (model.User.RANK_POWER, 'power'),
- (model.User.RANK_MODERATOR, 'moderator'),
- (model.User.RANK_ADMINISTRATOR, 'administrator'),
- (model.User.RANK_NOBODY, 'nobody'),
-])
+RANK_MAP = OrderedDict(
+ [
+ (model.User.RANK_ANONYMOUS, "anonymous"),
+ (model.User.RANK_RESTRICTED, "restricted"),
+ (model.User.RANK_REGULAR, "regular"),
+ (model.User.RANK_POWER, "power"),
+ (model.User.RANK_MODERATOR, "moderator"),
+ (model.User.RANK_ADMINISTRATOR, "administrator"),
+ (model.User.RANK_NOBODY, "nobody"),
+ ]
+)
def get_password_hash(salt: str, password: str) -> Tuple[str, int]:
- ''' Retrieve argon2id password hash. '''
- return pwhash.argon2id.str(
- (config.config['secret'] + salt + password).encode('utf8')
- ).decode('utf8'), 3
+ """ Retrieve argon2id password hash. """
+ return (
+ pwhash.argon2id.str(
+ (config.config["secret"] + salt + password).encode("utf8")
+ ).decode("utf8"),
+ 3,
+ )
def get_sha256_legacy_password_hash(
- salt: str, password: str) -> Tuple[str, int]:
- ''' Retrieve old-style sha256 password hash. '''
+ salt: str, password: str
+) -> Tuple[str, int]:
+ """ Retrieve old-style sha256 password hash. """
digest = hashlib.sha256()
- digest.update(config.config['secret'].encode('utf8'))
- digest.update(salt.encode('utf8'))
- digest.update(password.encode('utf8'))
+ digest.update(config.config["secret"].encode("utf8"))
+ digest.update(salt.encode("utf8"))
+ digest.update(password.encode("utf8"))
return digest.hexdigest(), 2
def get_sha1_legacy_password_hash(salt: str, password: str) -> Tuple[str, int]:
- ''' Retrieve old-style sha1 password hash. '''
+ """ Retrieve old-style sha1 password hash. """
digest = hashlib.sha1()
- digest.update(b'1A2/$_4xVa')
- digest.update(salt.encode('utf8'))
- digest.update(password.encode('utf8'))
+ digest.update(b"1A2/$_4xVa")
+ digest.update(salt.encode("utf8"))
+ digest.update(password.encode("utf8"))
return digest.hexdigest(), 1
def create_password() -> str:
alphabet = {
- 'c': list('bcdfghijklmnpqrstvwxyz'),
- 'v': list('aeiou'),
- 'n': list('0123456789'),
+ "c": list("bcdfghijklmnpqrstvwxyz"),
+ "v": list("aeiou"),
+ "n": list("0123456789"),
}
- pattern = 'cvcvnncvcv'
- return ''.join(random.choice(alphabet[type]) for type in list(pattern))
+ pattern = "cvcvnncvcv"
+ return "".join(random.choice(alphabet[type]) for type in list(pattern))
def is_valid_password(user: model.User, password: str) -> bool:
@@ -63,12 +70,13 @@ def is_valid_password(user: model.User, password: str) -> bool:
try:
return pwhash.verify(
- user.password_hash.encode('utf8'),
- (config.config['secret'] + salt + password).encode('utf8'))
+ user.password_hash.encode("utf8"),
+ (config.config["secret"] + salt + password).encode("utf8"),
+ )
except InvalidkeyError:
possible_hashes = [
get_sha256_legacy_password_hash(salt, password)[0],
- get_sha1_legacy_password_hash(salt, password)[0]
+ get_sha1_legacy_password_hash(salt, password)[0],
]
if valid_hash in possible_hashes:
# Convert the user password hash to the new hash
@@ -82,16 +90,18 @@ def is_valid_password(user: model.User, password: str) -> bool:
def is_valid_token(user_token: Optional[model.UserToken]) -> bool:
- '''
+ """
Token must be enabled and if it has an expiration, it must be
greater than now.
- '''
+ """
if user_token is None:
return False
if not user_token.enabled:
return False
- if (user_token.expiration_time is not None
- and user_token.expiration_time < datetime.utcnow()):
+ if (
+ user_token.expiration_time is not None
+ and user_token.expiration_time < datetime.utcnow()
+ ):
return False
return True
@@ -99,26 +109,27 @@ def is_valid_token(user_token: Optional[model.UserToken]) -> bool:
def has_privilege(user: model.User, privilege_name: str) -> bool:
assert user
all_ranks = list(RANK_MAP.keys())
- assert privilege_name in config.config['privileges']
+ assert privilege_name in config.config["privileges"]
assert user.rank in all_ranks
minimal_rank = util.flip(RANK_MAP)[
- config.config['privileges'][privilege_name]]
- good_ranks = all_ranks[all_ranks.index(minimal_rank):]
+ config.config["privileges"][privilege_name]
+ ]
+ good_ranks = all_ranks[all_ranks.index(minimal_rank) :]
return user.rank in good_ranks
def verify_privilege(user: model.User, privilege_name: str) -> None:
assert user
if not has_privilege(user, privilege_name):
- raise errors.AuthError('Insufficient privileges to do this.')
+ raise errors.AuthError("Insufficient privileges to do this.")
def generate_authentication_token(user: model.User) -> str:
- ''' Generate nonguessable challenge (e.g. links in password reminder). '''
+ """ Generate nonguessable challenge (e.g. links in password reminder). """
assert user
digest = hashlib.md5()
- digest.update(config.config['secret'].encode('utf8'))
- digest.update(user.password_salt.encode('utf8'))
+ digest.update(config.config["secret"].encode("utf8"))
+ digest.update(user.password_salt.encode("utf8"))
return digest.hexdigest()
diff --git a/server/szurubooru/func/cache.py b/server/szurubooru/func/cache.py
index 01e46592..65e99e16 100644
--- a/server/szurubooru/func/cache.py
+++ b/server/szurubooru/func/cache.py
@@ -1,5 +1,5 @@
-from typing import Any, List, Dict
from datetime import datetime
+from typing import Any, Dict, List
class LruCacheItem:
@@ -18,12 +18,11 @@ class LruCache:
def insert_item(self, item: LruCacheItem) -> None:
if item.key in self.hash:
item_index = next(
- i
- for i, v in enumerate(self.item_list)
- if v.key == item.key)
+ i for i, v in enumerate(self.item_list) if v.key == item.key
+ )
self.item_list[:] = (
- self.item_list[:item_index] +
- self.item_list[item_index + 1:])
+ self.item_list[:item_index] + self.item_list[item_index + 1 :]
+ )
self.item_list.insert(0, item)
else:
if len(self.item_list) > self.length:
diff --git a/server/szurubooru/func/comments.py b/server/szurubooru/func/comments.py
index 9f882831..5eb7c8ea 100644
--- a/server/szurubooru/func/comments.py
+++ b/server/szurubooru/func/comments.py
@@ -1,7 +1,8 @@
from datetime import datetime
-from typing import Any, Optional, List, Dict, Callable
-from szurubooru import db, model, errors, rest
-from szurubooru.func import users, scores, serialization
+from typing import Any, Callable, Dict, List, Optional
+
+from szurubooru import db, errors, model, rest
+from szurubooru.func import scores, serialization, users
class InvalidCommentIdError(errors.ValidationError):
@@ -23,15 +24,15 @@ class CommentSerializer(serialization.BaseSerializer):
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'id': self.serialize_id,
- 'user': self.serialize_user,
- 'postId': self.serialize_post_id,
- 'version': self.serialize_version,
- 'text': self.serialize_text,
- 'creationTime': self.serialize_creation_time,
- 'lastEditTime': self.serialize_last_edit_time,
- 'score': self.serialize_score,
- 'ownScore': self.serialize_own_score,
+ "id": self.serialize_id,
+ "user": self.serialize_user,
+ "postId": self.serialize_post_id,
+ "version": self.serialize_version,
+ "text": self.serialize_text,
+ "creationTime": self.serialize_creation_time,
+ "lastEditTime": self.serialize_last_edit_time,
+ "score": self.serialize_score,
+ "ownScore": self.serialize_own_score,
}
def serialize_id(self) -> Any:
@@ -63,9 +64,8 @@ class CommentSerializer(serialization.BaseSerializer):
def serialize_comment(
- comment: model.Comment,
- auth_user: model.User,
- options: List[str] = []) -> rest.Response:
+ comment: model.Comment, auth_user: model.User, options: List[str] = []
+) -> rest.Response:
if comment is None:
return None
return CommentSerializer(comment, auth_user).serialize(options)
@@ -74,21 +74,22 @@ def serialize_comment(
def try_get_comment_by_id(comment_id: int) -> Optional[model.Comment]:
comment_id = int(comment_id)
return (
- db.session
- .query(model.Comment)
+ db.session.query(model.Comment)
.filter(model.Comment.comment_id == comment_id)
- .one_or_none())
+ .one_or_none()
+ )
def get_comment_by_id(comment_id: int) -> model.Comment:
comment = try_get_comment_by_id(comment_id)
if comment:
return comment
- raise CommentNotFoundError('Comment %r not found.' % comment_id)
+ raise CommentNotFoundError("Comment %r not found." % comment_id)
def create_comment(
- user: model.User, post: model.Post, text: str) -> model.Comment:
+ user: model.User, post: model.Post, text: str
+) -> model.Comment:
comment = model.Comment()
comment.user = user
comment.post = post
@@ -100,5 +101,5 @@ def create_comment(
def update_comment_text(comment: model.Comment, text: str) -> None:
assert comment
if not text:
- raise EmptyCommentTextError('Comment text cannot be empty.')
+ raise EmptyCommentTextError("Comment text cannot be empty.")
comment.text = text
diff --git a/server/szurubooru/func/diff.py b/server/szurubooru/func/diff.py
index 90014f7e..3282ebb0 100644
--- a/server/szurubooru/func/diff.py
+++ b/server/szurubooru/func/diff.py
@@ -1,4 +1,4 @@
-from typing import List, Dict, Any
+from typing import Any, Dict, List
def get_list_diff(old: List[Any], new: List[Any]) -> Any:
@@ -16,8 +16,11 @@ def get_list_diff(old: List[Any], new: List[Any]) -> Any:
equal = False
added.append(item)
- return None if equal else {
- 'type': 'list change', 'added': added, 'removed': removed}
+ return (
+ None
+ if equal
+ else {"type": "list change", "added": added, "removed": removed}
+ )
def get_dict_diff(old: Dict[str, Any], new: Dict[str, Any]) -> Any:
@@ -40,23 +43,20 @@ def get_dict_diff(old: Dict[str, Any], new: Dict[str, Any]) -> Any:
else:
equal = False
value[key] = {
- 'type': 'primitive change',
- 'old-value': old[key],
- 'new-value': new[key],
+ "type": "primitive change",
+ "old-value": old[key],
+ "new-value": new[key],
}
else:
equal = False
- value[key] = {
- 'type': 'deleted property',
- 'value': old[key]
- }
+ value[key] = {"type": "deleted property", "value": old[key]}
for key in new.keys():
if key not in old:
equal = False
value[key] = {
- 'type': 'added property',
- 'value': new[key],
+ "type": "added property",
+ "value": new[key],
}
- return None if equal else {'type': 'object change', 'value': value}
+ return None if equal else {"type": "object change", "value": value}
diff --git a/server/szurubooru/func/favorites.py b/server/szurubooru/func/favorites.py
index f567bfad..a0125377 100644
--- a/server/szurubooru/func/favorites.py
+++ b/server/szurubooru/func/favorites.py
@@ -1,6 +1,7 @@
-from typing import Any, Optional, Callable, Tuple
from datetime import datetime
-from szurubooru import db, model, errors
+from typing import Any, Callable, Optional, Tuple
+
+from szurubooru import db, errors, model
class InvalidFavoriteTargetError(errors.ValidationError):
@@ -8,10 +9,11 @@ class InvalidFavoriteTargetError(errors.ValidationError):
def _get_table_info(
- entity: model.Base) -> Tuple[model.Base, Callable[[model.Base], Any]]:
+ entity: model.Base,
+) -> Tuple[model.Base, Callable[[model.Base], Any]]:
assert entity
resource_type, _, _ = model.util.get_resource_info(entity)
- if resource_type == 'post':
+ if resource_type == "post":
return model.PostFavorite, lambda table: table.post_id
raise InvalidFavoriteTargetError()
@@ -38,6 +40,7 @@ def unset_favorite(entity: model.Base, user: Optional[model.User]) -> None:
def set_favorite(entity: model.Base, user: Optional[model.User]) -> None:
from szurubooru.func import scores
+
assert entity
assert user
try:
diff --git a/server/szurubooru/func/file_uploads.py b/server/szurubooru/func/file_uploads.py
index e7f93d83..800397df 100644
--- a/server/szurubooru/func/file_uploads.py
+++ b/server/szurubooru/func/file_uploads.py
@@ -1,25 +1,25 @@
-from typing import Optional
from datetime import datetime, timedelta
-from szurubooru.func import files, util
+from typing import Optional
+from szurubooru.func import files, util
MAX_MINUTES = 60
def _get_path(checksum: str) -> str:
- return 'temporary-uploads/%s.dat' % checksum
+ return "temporary-uploads/%s.dat" % checksum
def purge_old_uploads() -> None:
now = datetime.now()
- for file in files.scan('temporary-uploads'):
+ for file in files.scan("temporary-uploads"):
file_time = datetime.fromtimestamp(file.stat().st_ctime)
if now - file_time > timedelta(minutes=MAX_MINUTES):
- files.delete('temporary-uploads/%s' % file.name)
+ files.delete("temporary-uploads/%s" % file.name)
def get(checksum: str) -> Optional[bytes]:
- return files.get('temporary-uploads/%s.dat' % checksum)
+ return files.get("temporary-uploads/%s.dat" % checksum)
def save(content: bytes) -> str:
diff --git a/server/szurubooru/func/files.py b/server/szurubooru/func/files.py
index fa9f36fd..6a898269 100644
--- a/server/szurubooru/func/files.py
+++ b/server/szurubooru/func/files.py
@@ -1,10 +1,11 @@
-from typing import Any, Optional, List
import os
+from typing import Any, List, Optional
+
from szurubooru import config
def _get_full_path(path: str) -> str:
- return os.path.join(config.config['data_dir'], path)
+ return os.path.join(config.config["data_dir"], path)
def delete(path: str) -> None:
@@ -31,12 +32,12 @@ def get(path: str) -> Optional[bytes]:
full_path = _get_full_path(path)
if not os.path.exists(full_path):
return None
- with open(full_path, 'rb') as handle:
+ with open(full_path, "rb") as handle:
return handle.read()
def save(path: str, content: bytes) -> None:
full_path = _get_full_path(path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
- with open(full_path, 'wb') as handle:
+ with open(full_path, "wb") as handle:
handle.write(content)
diff --git a/server/szurubooru/func/image_hash.py b/server/szurubooru/func/image_hash.py
index 771302d2..fc7d141b 100644
--- a/server/szurubooru/func/image_hash.py
+++ b/server/szurubooru/func/image_hash.py
@@ -1,12 +1,13 @@
import logging
-from io import BytesIO
-from datetime import datetime
-from typing import Any, Optional, Tuple, Set, List, Callable
import math
+from datetime import datetime
+from io import BytesIO
+from typing import Any, Callable, List, Optional, Set, Tuple
+
import numpy as np
from PIL import Image
-from szurubooru import config, errors
+from szurubooru import config, errors
logger = logging.getLogger(__name__)
@@ -16,7 +17,7 @@ logger = logging.getLogger(__name__)
LOWER_PERCENTILE = 5
UPPER_PERCENTILE = 95
-IDENTICAL_TOLERANCE = 2 / 255.
+IDENTICAL_TOLERANCE = 2 / 255.0
DISTANCE_CUTOFF = 0.45
N_LEVELS = 2
N = 9
@@ -38,68 +39,74 @@ NpMatrix = np.ndarray
def _preprocess_image(content: bytes) -> NpMatrix:
try:
img = Image.open(BytesIO(content))
- return np.asarray(img.convert('L'), dtype=np.uint8)
+ return np.asarray(img.convert("L"), dtype=np.uint8)
except IOError:
raise errors.ProcessingError(
- 'Unable to generate a signature hash '
- 'for this image.')
+ "Unable to generate a signature hash " "for this image."
+ )
def _crop_image(
- image: NpMatrix,
- lower_percentile: float,
- upper_percentile: float) -> Window:
+ image: NpMatrix, lower_percentile: float, upper_percentile: float
+) -> Window:
rw = np.cumsum(np.sum(np.abs(np.diff(image, axis=1)), axis=1))
cw = np.cumsum(np.sum(np.abs(np.diff(image, axis=0)), axis=0))
upper_column_limit = np.searchsorted(
- cw, np.percentile(cw, upper_percentile), side='left')
+ cw, np.percentile(cw, upper_percentile), side="left"
+ )
lower_column_limit = np.searchsorted(
- cw, np.percentile(cw, lower_percentile), side='right')
+ cw, np.percentile(cw, lower_percentile), side="right"
+ )
upper_row_limit = np.searchsorted(
- rw, np.percentile(rw, upper_percentile), side='left')
+ rw, np.percentile(rw, upper_percentile), side="left"
+ )
lower_row_limit = np.searchsorted(
- rw, np.percentile(rw, lower_percentile), side='right')
+ rw, np.percentile(rw, lower_percentile), side="right"
+ )
if lower_row_limit > upper_row_limit:
- lower_row_limit = int(lower_percentile / 100. * image.shape[0])
- upper_row_limit = int(upper_percentile / 100. * image.shape[0])
+ lower_row_limit = int(lower_percentile / 100.0 * image.shape[0])
+ upper_row_limit = int(upper_percentile / 100.0 * image.shape[0])
if lower_column_limit > upper_column_limit:
- lower_column_limit = int(lower_percentile / 100. * image.shape[1])
- upper_column_limit = int(upper_percentile / 100. * image.shape[1])
+ lower_column_limit = int(lower_percentile / 100.0 * image.shape[1])
+ upper_column_limit = int(upper_percentile / 100.0 * image.shape[1])
return (
(lower_row_limit, upper_row_limit),
- (lower_column_limit, upper_column_limit))
+ (lower_column_limit, upper_column_limit),
+ )
def _normalize_and_threshold(
- diff_array: NpMatrix,
- identical_tolerance: float,
- n_levels: int) -> None:
+ diff_array: NpMatrix, identical_tolerance: float, n_levels: int
+) -> None:
mask = np.abs(diff_array) < identical_tolerance
- diff_array[mask] = 0.
+ diff_array[mask] = 0.0
if np.all(mask):
return
positive_cutoffs = np.percentile(
- diff_array[diff_array > 0.], np.linspace(0, 100, n_levels + 1))
+ diff_array[diff_array > 0.0], np.linspace(0, 100, n_levels + 1)
+ )
negative_cutoffs = np.percentile(
- diff_array[diff_array < 0.], np.linspace(100, 0, n_levels + 1))
+ diff_array[diff_array < 0.0], np.linspace(100, 0, n_levels + 1)
+ )
for level, interval in enumerate(
- positive_cutoffs[i:i + 2]
- for i in range(positive_cutoffs.shape[0] - 1)):
+ positive_cutoffs[i : i + 2]
+ for i in range(positive_cutoffs.shape[0] - 1)
+ ):
diff_array[
- (diff_array >= interval[0]) & (diff_array <= interval[1])] = \
- level + 1
+ (diff_array >= interval[0]) & (diff_array <= interval[1])
+ ] = (level + 1)
for level, interval in enumerate(
- negative_cutoffs[i:i + 2]
- for i in range(negative_cutoffs.shape[0] - 1)):
+ negative_cutoffs[i : i + 2]
+ for i in range(negative_cutoffs.shape[0] - 1)
+ ):
diff_array[
- (diff_array <= interval[0]) & (diff_array >= interval[1])] = \
- -(level + 1)
+ (diff_array <= interval[0]) & (diff_array >= interval[1])
+ ] = -(level + 1)
def _compute_grid_points(
- image: NpMatrix,
- n: float,
- window: Window = None) -> Tuple[NpMatrix, NpMatrix]:
+ image: NpMatrix, n: float, window: Window = None
+) -> Tuple[NpMatrix, NpMatrix]:
if window is None:
window = ((0, image.shape[0]), (0, image.shape[1]))
x_coords = np.linspace(window[0][0], window[0][1], n + 2, dtype=int)[1:-1]
@@ -108,12 +115,10 @@ def _compute_grid_points(
def _compute_mean_level(
- image: NpMatrix,
- x_coords: NpMatrix,
- y_coords: NpMatrix,
- p: Optional[float]) -> NpMatrix:
+ image: NpMatrix, x_coords: NpMatrix, y_coords: NpMatrix, p: Optional[float]
+) -> NpMatrix:
if p is None:
- p = max([2.0, int(0.5 + min(image.shape) / 20.)])
+ p = max([2.0, int(0.5 + min(image.shape) / 20.0)])
avg_grey = np.zeros((x_coords.shape[0], y_coords.shape[0]))
for i, x in enumerate(x_coords):
lower_x_lim = int(max([x - p / 2, 0]))
@@ -122,7 +127,8 @@ def _compute_mean_level(
lower_y_lim = int(max([y - p / 2, 0]))
upper_y_lim = int(min([lower_y_lim + p, image.shape[1]]))
avg_grey[i, j] = np.mean(
- image[lower_x_lim:upper_x_lim, lower_y_lim:upper_y_lim])
+ image[lower_x_lim:upper_x_lim, lower_y_lim:upper_y_lim]
+ )
return avg_grey
@@ -132,59 +138,82 @@ def _compute_differentials(grey_level_matrix: NpMatrix) -> NpMatrix:
(
np.diff(grey_level_matrix),
(
- np.zeros(grey_level_matrix.shape[0])
- .reshape((grey_level_matrix.shape[0], 1))
- )
- ), axis=1)
+ np.zeros(grey_level_matrix.shape[0]).reshape(
+ (grey_level_matrix.shape[0], 1)
+ )
+ ),
+ ),
+ axis=1,
+ )
down_neighbors = -np.concatenate(
(
np.diff(grey_level_matrix, axis=0),
(
- np.zeros(grey_level_matrix.shape[1])
- .reshape((1, grey_level_matrix.shape[1]))
- )
- ))
+ np.zeros(grey_level_matrix.shape[1]).reshape(
+ (1, grey_level_matrix.shape[1])
+ )
+ ),
+ )
+ )
left_neighbors = -np.concatenate(
- (right_neighbors[:, -1:], right_neighbors[:, :-1]), axis=1)
+ (right_neighbors[:, -1:], right_neighbors[:, :-1]), axis=1
+ )
up_neighbors = -np.concatenate((down_neighbors[-1:], down_neighbors[:-1]))
diagonals = np.arange(
- -grey_level_matrix.shape[0] + 1, grey_level_matrix.shape[0])
- upper_left_neighbors = sum([
- np.diagflat(np.insert(np.diff(np.diag(grey_level_matrix, i)), 0, 0), i)
- for i in diagonals])
- upper_right_neighbors = sum([
- np.diagflat(np.insert(np.diff(np.diag(flipped, i)), 0, 0), i)
- for i in diagonals])
+ -grey_level_matrix.shape[0] + 1, grey_level_matrix.shape[0]
+ )
+ upper_left_neighbors = sum(
+ [
+ np.diagflat(
+ np.insert(np.diff(np.diag(grey_level_matrix, i)), 0, 0), i
+ )
+ for i in diagonals
+ ]
+ )
+ upper_right_neighbors = sum(
+ [
+ np.diagflat(np.insert(np.diff(np.diag(flipped, i)), 0, 0), i)
+ for i in diagonals
+ ]
+ )
lower_right_neighbors = -np.pad(
- upper_left_neighbors[1:, 1:], (0, 1), mode='constant')
+ upper_left_neighbors[1:, 1:], (0, 1), mode="constant"
+ )
lower_left_neighbors = -np.pad(
- upper_right_neighbors[1:, 1:], (0, 1), mode='constant')
- return np.dstack(np.array([
- upper_left_neighbors,
- up_neighbors,
- np.fliplr(upper_right_neighbors),
- left_neighbors,
- right_neighbors,
- np.fliplr(lower_left_neighbors),
- down_neighbors,
- lower_right_neighbors]))
+ upper_right_neighbors[1:, 1:], (0, 1), mode="constant"
+ )
+ return np.dstack(
+ np.array(
+ [
+ upper_left_neighbors,
+ up_neighbors,
+ np.fliplr(upper_right_neighbors),
+ left_neighbors,
+ right_neighbors,
+ np.fliplr(lower_left_neighbors),
+ down_neighbors,
+ lower_right_neighbors,
+ ]
+ )
+ )
def _words_to_int(word_array: NpMatrix) -> List[int]:
width = word_array.shape[1]
- coding_vector = 3**np.arange(width)
+ coding_vector = 3 ** np.arange(width)
return np.dot(word_array + 1, coding_vector).astype(int).tolist()
def _get_words(array: NpMatrix, k: int, n: int) -> NpMatrix:
- word_positions = np.linspace(
- 0, array.shape[0], n, endpoint=False).astype('int')
+ word_positions = np.linspace(0, array.shape[0], n, endpoint=False).astype(
+ "int"
+ )
assert k <= array.shape[0]
assert word_positions.shape[0] <= array.shape[0]
- words = np.zeros((n, k)).astype('int8')
+ words = np.zeros((n, k)).astype("int8")
for i, pos in enumerate(word_positions):
if pos + k <= array.shape[0]:
- words[i] = array[pos:pos + k]
+ words[i] = array[pos : pos + k]
else:
temp = array[pos:].copy()
temp.resize(k, refcheck=False)
@@ -199,16 +228,17 @@ def generate_signature(content: bytes) -> NpMatrix:
image_limits = _crop_image(
im_array,
lower_percentile=LOWER_PERCENTILE,
- upper_percentile=UPPER_PERCENTILE)
+ upper_percentile=UPPER_PERCENTILE,
+ )
x_coords, y_coords = _compute_grid_points(
- im_array, n=N, window=image_limits)
+ im_array, n=N, window=image_limits
+ )
avg_grey = _compute_mean_level(im_array, x_coords, y_coords, p=P)
diff_matrix = _compute_differentials(avg_grey)
_normalize_and_threshold(
- diff_matrix,
- identical_tolerance=IDENTICAL_TOLERANCE,
- n_levels=N_LEVELS)
- return np.ravel(diff_matrix).astype('int8')
+ diff_matrix, identical_tolerance=IDENTICAL_TOLERANCE, n_levels=N_LEVELS
+ )
+ return np.ravel(diff_matrix).astype("int8")
def generate_words(signature: NpMatrix) -> List[int]:
@@ -216,9 +246,8 @@ def generate_words(signature: NpMatrix) -> List[int]:
def normalized_distance(
- target_array: Any,
- vec: NpMatrix,
- nan_value: float = 1.0) -> List[float]:
+ target_array: Any, vec: NpMatrix, nan_value: float = 1.0
+) -> List[float]:
target_array = np.array(target_array).astype(int)
vec = vec.astype(int)
topvec = np.linalg.norm(vec - target_array, axis=1)
@@ -230,7 +259,7 @@ def normalized_distance(
def pack_signature(signature: NpMatrix) -> bytes:
- '''
+ """
Serializes the signature vector for efficient storage in a database.
Shifts the range of the signature vector from [-N_LEVELS,+N_LEVELS]
@@ -241,24 +270,38 @@ def pack_signature(signature: NpMatrix) -> bytes:
This is then converted into a more packed array consisting of
uint32 elements (for SIG_CHUNK_BITS = 32).
- '''
- coding_vector = np.flipud(SIG_BASE**np.arange(SIG_CHUNK_WIDTH))
- return np.array([
- np.dot(x, coding_vector) for x in
- np.reshape(signature + N_LEVELS, (-1, SIG_CHUNK_WIDTH))
- ]).astype(f'uint{SIG_CHUNK_BITS}').tobytes()
+ """
+ coding_vector = np.flipud(SIG_BASE ** np.arange(SIG_CHUNK_WIDTH))
+ return (
+ np.array(
+ [
+ np.dot(x, coding_vector)
+ for x in np.reshape(
+ signature + N_LEVELS, (-1, SIG_CHUNK_WIDTH)
+ )
+ ]
+ )
+ .astype(f"uint{SIG_CHUNK_BITS}")
+ .tobytes()
+ )
def unpack_signature(packed: bytes) -> NpMatrix:
- '''
+ """
Deserializes the signature vector once recieved from the database.
Functions as an inverse transformation of pack_signature()
- '''
- return np.ravel(np.array([
- [
- int(digit) - N_LEVELS for digit in
- np.base_repr(e, base=SIG_BASE).zfill(SIG_CHUNK_WIDTH)
- ] for e in
- np.frombuffer(packed, dtype=f'uint{SIG_CHUNK_BITS}')
- ]).astype('int8'))
+ """
+ return np.ravel(
+ np.array(
+ [
+ [
+ int(digit) - N_LEVELS
+ for digit in np.base_repr(e, base=SIG_BASE).zfill(
+ SIG_CHUNK_WIDTH
+ )
+ ]
+ for e in np.frombuffer(packed, dtype=f"uint{SIG_CHUNK_BITS}")
+ ]
+ ).astype("int8")
+ )
diff --git a/server/szurubooru/func/images.py b/server/szurubooru/func/images.py
index 1a96dcc3..440fc06d 100644
--- a/server/szurubooru/func/images.py
+++ b/server/szurubooru/func/images.py
@@ -1,14 +1,14 @@
-from typing import List
-import logging
import json
-import shlex
-import subprocess
+import logging
import math
import re
+import shlex
+import subprocess
+from typing import List
+
from szurubooru import errors
from szurubooru.func import mime, util
-
logger = logging.getLogger(__name__)
@@ -19,97 +19,139 @@ class Image:
@property
def width(self) -> int:
- return self.info['streams'][0]['width']
+ return self.info["streams"][0]["width"]
@property
def height(self) -> int:
- return self.info['streams'][0]['height']
+ return self.info["streams"][0]["height"]
@property
def frames(self) -> int:
- return self.info['streams'][0]['nb_read_frames']
+ return self.info["streams"][0]["nb_read_frames"]
def resize_fill(self, width: int, height: int) -> None:
width_greater = self.width > self.height
width, height = (-1, height) if width_greater else (width, -1)
cli = [
- '-i', '{path}',
- '-f', 'image2',
- '-filter:v', "scale='{width}:{height}'".format(
- width=width, height=height),
- '-map', '0:v:0',
- '-vframes', '1',
- '-vcodec', 'png',
- '-',
+ "-i",
+ "{path}",
+ "-f",
+ "image2",
+ "-filter:v",
+ "scale='{width}:{height}'".format(width=width, height=height),
+ "-map",
+ "0:v:0",
+ "-vframes",
+ "1",
+ "-vcodec",
+ "png",
+ "-",
]
- if 'duration' in self.info['format'] \
- and self.info['format']['format_name'] != 'swf':
- duration = float(self.info['format']['duration'])
+ if (
+ "duration" in self.info["format"]
+ and self.info["format"]["format_name"] != "swf"
+ ):
+ duration = float(self.info["format"]["duration"])
if duration > 3:
- cli = [
- '-ss',
- '%d' % math.floor(duration * 0.3),
- ] + cli
+ cli = ["-ss", "%d" % math.floor(duration * 0.3),] + cli
content = self._execute(cli, ignore_error_if_data=True)
if not content:
- raise errors.ProcessingError('Error while resizing image.')
+ raise errors.ProcessingError("Error while resizing image.")
self.content = content
self._reload_info()
def to_png(self) -> bytes:
- return self._execute([
- '-i', '{path}',
- '-f', 'image2',
- '-map', '0:v:0',
- '-vframes', '1',
- '-vcodec', 'png',
- '-',
- ])
+ return self._execute(
+ [
+ "-i",
+ "{path}",
+ "-f",
+ "image2",
+ "-map",
+ "0:v:0",
+ "-vframes",
+ "1",
+ "-vcodec",
+ "png",
+ "-",
+ ]
+ )
def to_jpeg(self) -> bytes:
- return self._execute([
- '-f', 'lavfi',
- '-i', 'color=white:s=%dx%d' % (self.width, self.height),
- '-i', '{path}',
- '-f', 'image2',
- '-filter_complex', 'overlay',
- '-map', '0:v:0',
- '-vframes', '1',
- '-vcodec', 'mjpeg',
- '-',
- ])
+ return self._execute(
+ [
+ "-f",
+ "lavfi",
+ "-i",
+ "color=white:s=%dx%d" % (self.width, self.height),
+ "-i",
+ "{path}",
+ "-f",
+ "image2",
+ "-filter_complex",
+ "overlay",
+ "-map",
+ "0:v:0",
+ "-vframes",
+ "1",
+ "-vcodec",
+ "mjpeg",
+ "-",
+ ]
+ )
def to_webm(self) -> bytes:
- with util.create_temp_file_path(suffix='.log') as phase_log_path:
+ with util.create_temp_file_path(suffix=".log") as phase_log_path:
# Pass 1
- self._execute([
- '-i', '{path}',
- '-pass', '1',
- '-passlogfile', phase_log_path,
- '-vcodec', 'libvpx-vp9',
- '-crf', '4',
- '-b:v', '2500K',
- '-acodec', 'libvorbis',
- '-f', 'webm',
- '-y', '/dev/null'
- ])
+ self._execute(
+ [
+ "-i",
+ "{path}",
+ "-pass",
+ "1",
+ "-passlogfile",
+ phase_log_path,
+ "-vcodec",
+ "libvpx-vp9",
+ "-crf",
+ "4",
+ "-b:v",
+ "2500K",
+ "-acodec",
+ "libvorbis",
+ "-f",
+ "webm",
+ "-y",
+ "/dev/null",
+ ]
+ )
# Pass 2
- return self._execute([
- '-i', '{path}',
- '-pass', '2',
- '-passlogfile', phase_log_path,
- '-vcodec', 'libvpx-vp9',
- '-crf', '4',
- '-b:v', '2500K',
- '-acodec', 'libvorbis',
- '-f', 'webm',
- '-'
- ])
+ return self._execute(
+ [
+ "-i",
+ "{path}",
+ "-pass",
+ "2",
+ "-passlogfile",
+ phase_log_path,
+ "-vcodec",
+ "libvpx-vp9",
+ "-crf",
+ "4",
+ "-b:v",
+ "2500K",
+ "-acodec",
+ "libvorbis",
+ "-f",
+ "webm",
+ "-",
+ ]
+ )
def to_mp4(self) -> bytes:
- with util.create_temp_file_path(suffix='.dat') as mp4_temp_path:
+ with util.create_temp_file_path(suffix=".dat") as mp4_temp_path:
width = self.width
height = self.height
altered_dimensions = False
@@ -123,97 +165,138 @@ class Image:
altered_dimensions = True
args = [
- '-i', '{path}',
- '-vcodec', 'libx264',
- '-preset', 'slow',
- '-crf', '22',
- '-b:v', '200K',
- '-profile:v', 'main',
- '-pix_fmt', 'yuv420p',
- '-acodec', 'aac',
- '-f', 'mp4'
+ "-i",
+ "{path}",
+ "-vcodec",
+ "libx264",
+ "-preset",
+ "slow",
+ "-crf",
+ "22",
+ "-b:v",
+ "200K",
+ "-profile:v",
+ "main",
+ "-pix_fmt",
+ "yuv420p",
+ "-acodec",
+ "aac",
+ "-f",
+ "mp4",
]
if altered_dimensions:
- args += ['-filter:v', 'scale=\'%d:%d\'' % (width, height)]
+ args += ["-filter:v", "scale='%d:%d'" % (width, height)]
- self._execute(args + ['-y', mp4_temp_path])
+ self._execute(args + ["-y", mp4_temp_path])
- with open(mp4_temp_path, 'rb') as mp4_temp:
+ with open(mp4_temp_path, "rb") as mp4_temp:
return mp4_temp.read()
def check_for_sound(self) -> bool:
- audioinfo = json.loads(self._execute([
- '-i', '{path}',
- '-of', 'json',
- '-select_streams', 'a',
- '-show_streams',
- ], program='ffprobe').decode('utf-8'))
- assert 'streams' in audioinfo
- if len(audioinfo['streams']) < 1:
+ audioinfo = json.loads(
+ self._execute(
+ [
+ "-i",
+ "{path}",
+ "-of",
+ "json",
+ "-select_streams",
+ "a",
+ "-show_streams",
+ ],
+ program="ffprobe",
+ ).decode("utf-8")
+ )
+ assert "streams" in audioinfo
+ if len(audioinfo["streams"]) < 1:
return False
- log = self._execute([
- '-hide_banner',
- '-progress', '-',
- '-i', '{path}',
- '-af', 'volumedetect',
- '-max_muxing_queue_size', '99999',
- '-vn', '-sn',
- '-f', 'null',
- '-y', '/dev/null',
- ], get_logs=True).decode('utf-8', errors='replace')
- log_match = re.search(r'.*volumedetect.*mean_volume: (.*) dB', log)
+ log = self._execute(
+ [
+ "-hide_banner",
+ "-progress",
+ "-",
+ "-i",
+ "{path}",
+ "-af",
+ "volumedetect",
+ "-max_muxing_queue_size",
+ "99999",
+ "-vn",
+ "-sn",
+ "-f",
+ "null",
+ "-y",
+ "/dev/null",
+ ],
+ get_logs=True,
+ ).decode("utf-8", errors="replace")
+ log_match = re.search(r".*volumedetect.*mean_volume: (.*) dB", log)
if not log_match or not log_match.groups():
raise errors.ProcessingError(
- 'A problem occured when trying to check for audio')
+ "A problem occured when trying to check for audio"
+ )
meanvol = float(log_match.groups()[0])
# -91.0 dB is the minimum for 16-bit audio, assume sound if > -80.0 dB
return meanvol > -80.0
def _execute(
- self,
- cli: List[str],
- program: str = 'ffmpeg',
- ignore_error_if_data: bool = False,
- get_logs: bool = False) -> bytes:
+ self,
+ cli: List[str],
+ program: str = "ffmpeg",
+ ignore_error_if_data: bool = False,
+ get_logs: bool = False,
+ ) -> bytes:
extension = mime.get_extension(mime.get_mime_type(self.content))
assert extension
- with util.create_temp_file(suffix='.' + extension) as handle:
+ with util.create_temp_file(suffix="." + extension) as handle:
handle.write(self.content)
handle.flush()
- cli = [program, '-loglevel', '32' if get_logs else '24'] + cli
+ cli = [program, "-loglevel", "32" if get_logs else "24"] + cli
cli = [part.format(path=handle.name) for part in cli]
proc = subprocess.Popen(
cli,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ stderr=subprocess.PIPE,
+ )
out, err = proc.communicate(input=self.content)
if proc.returncode != 0:
logger.warning(
- 'Failed to execute ffmpeg command (cli=%r, err=%r)',
- ' '.join(shlex.quote(arg) for arg in cli),
- err)
- if ((len(out) > 0 and not ignore_error_if_data)
- or len(out) == 0):
+ "Failed to execute ffmpeg command (cli=%r, err=%r)",
+ " ".join(shlex.quote(arg) for arg in cli),
+ err,
+ )
+ if (len(out) > 0 and not ignore_error_if_data) or len(
+ out
+ ) == 0:
raise errors.ProcessingError(
- 'Error while processing image.\n'
- + err.decode('utf-8'))
+ "Error while processing image.\n" + err.decode("utf-8")
+ )
return err if get_logs else out
def _reload_info(self) -> None:
- self.info = json.loads(self._execute([
- '-i', '{path}',
- '-of', 'json',
- '-select_streams', 'v',
- '-show_format',
- '-show_streams',
- ], program='ffprobe').decode('utf-8'))
- assert 'format' in self.info
- assert 'streams' in self.info
- if len(self.info['streams']) < 1:
- logger.warning('The video contains no video streams.')
+ self.info = json.loads(
+ self._execute(
+ [
+ "-i",
+ "{path}",
+ "-of",
+ "json",
+ "-select_streams",
+ "v",
+ "-show_format",
+ "-show_streams",
+ ],
+ program="ffprobe",
+ ).decode("utf-8")
+ )
+ assert "format" in self.info
+ assert "streams" in self.info
+ if len(self.info["streams"]) < 1:
+ logger.warning("The video contains no video streams.")
raise errors.ProcessingError(
- 'The video contains no video streams.')
+ "The video contains no video streams."
+ )
diff --git a/server/szurubooru/func/mailer.py b/server/szurubooru/func/mailer.py
index 76682f11..c4cf9db3 100644
--- a/server/szurubooru/func/mailer.py
+++ b/server/szurubooru/func/mailer.py
@@ -1,16 +1,18 @@
-import smtplib
import email.mime.text
+import smtplib
+
from szurubooru import config
def send_mail(sender: str, recipient: str, subject: str, body: str) -> None:
msg = email.mime.text.MIMEText(body)
- msg['Subject'] = subject
- msg['From'] = sender
- msg['To'] = recipient
+ msg["Subject"] = subject
+ msg["From"] = sender
+ msg["To"] = recipient
smtp = smtplib.SMTP(
- config.config['smtp']['host'], int(config.config['smtp']['port']))
- smtp.login(config.config['smtp']['user'], config.config['smtp']['pass'])
+ config.config["smtp"]["host"], int(config.config["smtp"]["port"])
+ )
+ smtp.login(config.config["smtp"]["user"], config.config["smtp"]["pass"])
smtp.send_message(msg)
smtp.quit()
diff --git a/server/szurubooru/func/mime.py b/server/szurubooru/func/mime.py
index afab817e..5f6279b4 100644
--- a/server/szurubooru/func/mime.py
+++ b/server/szurubooru/func/mime.py
@@ -4,60 +4,66 @@ from typing import Optional
def get_mime_type(content: bytes) -> str:
if not content:
- return 'application/octet-stream'
+ return "application/octet-stream"
- if content[0:3] in (b'CWS', b'FWS', b'ZWS'):
- return 'application/x-shockwave-flash'
+ if content[0:3] in (b"CWS", b"FWS", b"ZWS"):
+ return "application/x-shockwave-flash"
- if content[0:3] == b'\xFF\xD8\xFF':
- return 'image/jpeg'
+ if content[0:3] == b"\xFF\xD8\xFF":
+ return "image/jpeg"
- if content[0:6] == b'\x89PNG\x0D\x0A':
- return 'image/png'
+ if content[0:6] == b"\x89PNG\x0D\x0A":
+ return "image/png"
- if content[0:6] in (b'GIF87a', b'GIF89a'):
- return 'image/gif'
+ if content[0:6] in (b"GIF87a", b"GIF89a"):
+ return "image/gif"
- if content[8:12] == b'WEBP':
- return 'image/webp'
+ if content[8:12] == b"WEBP":
+ return "image/webp"
- if content[0:4] == b'\x1A\x45\xDF\xA3':
- return 'video/webm'
+ if content[0:4] == b"\x1A\x45\xDF\xA3":
+ return "video/webm"
- if content[4:12] in (b'ftypisom', b'ftypiso5', b'ftypmp42'):
- return 'video/mp4'
+ if content[4:12] in (b"ftypisom", b"ftypiso5", b"ftypmp42"):
+ return "video/mp4"
- return 'application/octet-stream'
+ return "application/octet-stream"
def get_extension(mime_type: str) -> Optional[str]:
extension_map = {
- 'application/x-shockwave-flash': 'swf',
- 'image/gif': 'gif',
- 'image/jpeg': 'jpg',
- 'image/png': 'png',
- 'image/webp': 'webp',
- 'video/mp4': 'mp4',
- 'video/webm': 'webm',
- 'application/octet-stream': 'dat',
+ "application/x-shockwave-flash": "swf",
+ "image/gif": "gif",
+ "image/jpeg": "jpg",
+ "image/png": "png",
+ "image/webp": "webp",
+ "video/mp4": "mp4",
+ "video/webm": "webm",
+ "application/octet-stream": "dat",
}
- return extension_map.get((mime_type or '').strip().lower(), None)
+ return extension_map.get((mime_type or "").strip().lower(), None)
def is_flash(mime_type: str) -> bool:
- return mime_type.lower() == 'application/x-shockwave-flash'
+ return mime_type.lower() == "application/x-shockwave-flash"
def is_video(mime_type: str) -> bool:
- return mime_type.lower() in ('application/ogg', 'video/mp4', 'video/webm')
+ return mime_type.lower() in ("application/ogg", "video/mp4", "video/webm")
def is_image(mime_type: str) -> bool:
return mime_type.lower() in (
- 'image/jpeg', 'image/png', 'image/gif', 'image/webp')
+ "image/jpeg",
+ "image/png",
+ "image/gif",
+ "image/webp",
+ )
def is_animated_gif(content: bytes) -> bool:
- pattern = b'\x21\xF9\x04[\x00-\xFF]{4}\x00[\x2C\x21]'
- return get_mime_type(content) == 'image/gif' \
+ pattern = b"\x21\xF9\x04[\x00-\xFF]{4}\x00[\x2C\x21]"
+ return (
+ get_mime_type(content) == "image/gif"
and len(re.findall(pattern, content)) > 1
+ )
diff --git a/server/szurubooru/func/net.py b/server/szurubooru/func/net.py
index 81103400..ddac65c8 100644
--- a/server/szurubooru/func/net.py
+++ b/server/szurubooru/func/net.py
@@ -1,12 +1,13 @@
import logging
-import urllib.request
import os
+import urllib.request
from tempfile import NamedTemporaryFile
-from szurubooru import config, errors
-from szurubooru.func import mime, util
+
from youtube_dl import YoutubeDL
from youtube_dl.utils import YoutubeDLError
+from szurubooru import config, errors
+from szurubooru.func import mime, util
logger = logging.getLogger(__name__)
@@ -14,41 +15,46 @@ logger = logging.getLogger(__name__)
def download(url: str, use_video_downloader: bool = False) -> bytes:
assert url
request = urllib.request.Request(url)
- if config.config['user_agent']:
- request.add_header('User-Agent', config.config['user_agent'])
- request.add_header('Referer', url)
+ if config.config["user_agent"]:
+ request.add_header("User-Agent", config.config["user_agent"])
+ request.add_header("Referer", url)
try:
with urllib.request.urlopen(request) as handle:
content = handle.read()
except Exception as ex:
- raise errors.ProcessingError('Error downloading %s (%s)' % (url, ex))
- if (use_video_downloader and
- mime.get_mime_type(content) == 'application/octet-stream'):
+ raise errors.ProcessingError("Error downloading %s (%s)" % (url, ex))
+ if (
+ use_video_downloader
+ and mime.get_mime_type(content) == "application/octet-stream"
+ ):
return _youtube_dl_wrapper(url)
return content
def _youtube_dl_wrapper(url: str) -> bytes:
outpath = os.path.join(
- config.config['data_dir'],
- 'temporary-uploads',
- 'youtubedl-' + util.get_sha1(url)[0:8] + '.dat')
+ config.config["data_dir"],
+ "temporary-uploads",
+ "youtubedl-" + util.get_sha1(url)[0:8] + ".dat",
+ )
options = {
- 'ignoreerrors': False,
- 'format': 'best[ext=webm]/best[ext=mp4]/best[ext=flv]',
- 'logger': logger,
- 'max_filesize': config.config['max_dl_filesize'],
- 'max_downloads': 1,
- 'outtmpl': outpath,
+ "ignoreerrors": False,
+ "format": "best[ext=webm]/best[ext=mp4]/best[ext=flv]",
+ "logger": logger,
+ "max_filesize": config.config["max_dl_filesize"],
+ "max_downloads": 1,
+ "outtmpl": outpath,
}
try:
with YoutubeDL(options) as ydl:
ydl.extract_info(url, download=True)
- with open(outpath, 'rb') as f:
+ with open(outpath, "rb") as f:
return f.read()
except YoutubeDLError as ex:
raise errors.ThirdPartyError(
- 'Error downloading video %s (%s)' % (url, ex))
+ "Error downloading video %s (%s)" % (url, ex)
+ )
except FileNotFoundError:
raise errors.ThirdPartyError(
- 'Error downloading video %s (file could not be saved)' % (url))
+ "Error downloading video %s (file could not be saved)" % (url)
+ )
diff --git a/server/szurubooru/func/pool_categories.py b/server/szurubooru/func/pool_categories.py
index 83305de0..c13857cd 100644
--- a/server/szurubooru/func/pool_categories.py
+++ b/server/szurubooru/func/pool_categories.py
@@ -1,11 +1,12 @@
import re
-from typing import Any, Optional, Dict, List, Callable
+from typing import Any, Callable, Dict, List, Optional
+
import sqlalchemy as sa
-from szurubooru import config, db, model, errors, rest
-from szurubooru.func import util, serialization, cache
+from szurubooru import config, db, errors, model, rest
+from szurubooru.func import cache, serialization, util
-DEFAULT_CATEGORY_NAME_CACHE_KEY = 'default-pool-category'
+DEFAULT_CATEGORY_NAME_CACHE_KEY = "default-pool-category"
class PoolCategoryNotFoundError(errors.NotFoundError):
@@ -29,10 +30,11 @@ class InvalidPoolCategoryColorError(errors.ValidationError):
def _verify_name_validity(name: str) -> None:
- name_regex = config.config['pool_category_name_regex']
+ name_regex = config.config["pool_category_name_regex"]
if not re.match(name_regex, name):
raise InvalidPoolCategoryNameError(
- 'Name must satisfy regex %r.' % name_regex)
+ "Name must satisfy regex %r." % name_regex
+ )
class PoolCategorySerializer(serialization.BaseSerializer):
@@ -41,11 +43,11 @@ class PoolCategorySerializer(serialization.BaseSerializer):
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'name': self.serialize_name,
- 'version': self.serialize_version,
- 'color': self.serialize_color,
- 'usages': self.serialize_usages,
- 'default': self.serialize_default,
+ "name": self.serialize_name,
+ "version": self.serialize_version,
+ "color": self.serialize_color,
+ "usages": self.serialize_usages,
+ "default": self.serialize_default,
}
def serialize_name(self) -> Any:
@@ -65,8 +67,8 @@ class PoolCategorySerializer(serialization.BaseSerializer):
def serialize_category(
- category: Optional[model.PoolCategory],
- options: List[str] = []) -> Optional[rest.Response]:
+ category: Optional[model.PoolCategory], options: List[str] = []
+) -> Optional[rest.Response]:
if not category:
return None
return PoolCategorySerializer(category).serialize(options)
@@ -84,18 +86,21 @@ def create_category(name: str, color: str) -> model.PoolCategory:
def update_category_name(category: model.PoolCategory, name: str) -> None:
assert category
if not name:
- raise InvalidPoolCategoryNameError('Name cannot be empty.')
+ raise InvalidPoolCategoryNameError("Name cannot be empty.")
expr = sa.func.lower(model.PoolCategory.name) == name.lower()
if category.pool_category_id:
expr = expr & (
- model.PoolCategory.pool_category_id != category.pool_category_id)
+ model.PoolCategory.pool_category_id != category.pool_category_id
+ )
already_exists = (
- db.session.query(model.PoolCategory).filter(expr).count() > 0)
+ db.session.query(model.PoolCategory).filter(expr).count() > 0
+ )
if already_exists:
raise PoolCategoryAlreadyExistsError(
- 'A category with this name already exists.')
+ "A category with this name already exists."
+ )
if util.value_exceeds_column_size(name, model.PoolCategory.name):
- raise InvalidPoolCategoryNameError('Name is too long.')
+ raise InvalidPoolCategoryNameError("Name is too long.")
_verify_name_validity(name)
category.name = name
cache.remove(DEFAULT_CATEGORY_NAME_CACHE_KEY)
@@ -104,20 +109,20 @@ def update_category_name(category: model.PoolCategory, name: str) -> None:
def update_category_color(category: model.PoolCategory, color: str) -> None:
assert category
if not color:
- raise InvalidPoolCategoryColorError('Color cannot be empty.')
- if not re.match(r'^#?[0-9a-z]+$', color):
- raise InvalidPoolCategoryColorError('Invalid color.')
+ raise InvalidPoolCategoryColorError("Color cannot be empty.")
+ if not re.match(r"^#?[0-9a-z]+$", color):
+ raise InvalidPoolCategoryColorError("Invalid color.")
if util.value_exceeds_column_size(color, model.PoolCategory.color):
- raise InvalidPoolCategoryColorError('Color is too long.')
+ raise InvalidPoolCategoryColorError("Color is too long.")
category.color = color
def try_get_category_by_name(
- name: str, lock: bool = False) -> Optional[model.PoolCategory]:
- query = (
- db.session
- .query(model.PoolCategory)
- .filter(sa.func.lower(model.PoolCategory.name) == name.lower()))
+ name: str, lock: bool = False
+) -> Optional[model.PoolCategory]:
+ query = db.session.query(model.PoolCategory).filter(
+ sa.func.lower(model.PoolCategory.name) == name.lower()
+ )
if lock:
query = query.with_for_update()
return query.one_or_none()
@@ -126,7 +131,7 @@ def try_get_category_by_name(
def get_category_by_name(name: str, lock: bool = False) -> model.PoolCategory:
category = try_get_category_by_name(name, lock)
if not category:
- raise PoolCategoryNotFoundError('Pool category %r not found.' % name)
+ raise PoolCategoryNotFoundError("Pool category %r not found." % name)
return category
@@ -135,26 +140,28 @@ def get_all_category_names() -> List[str]:
def get_all_categories() -> List[model.PoolCategory]:
- return db.session.query(model.PoolCategory).order_by(
- model.PoolCategory.name.asc()).all()
+ return (
+ db.session.query(model.PoolCategory)
+ .order_by(model.PoolCategory.name.asc())
+ .all()
+ )
def try_get_default_category(
- lock: bool = False) -> Optional[model.PoolCategory]:
- query = (
- db.session
- .query(model.PoolCategory)
- .filter(model.PoolCategory.default))
+ lock: bool = False,
+) -> Optional[model.PoolCategory]:
+ query = db.session.query(model.PoolCategory).filter(
+ model.PoolCategory.default
+ )
if lock:
query = query.with_for_update()
category = query.first()
# if for some reason (e.g. as a result of migration) there's no default
# category, get the first record available.
if not category:
- query = (
- db.session
- .query(model.PoolCategory)
- .order_by(model.PoolCategory.pool_category_id.asc()))
+ query = db.session.query(model.PoolCategory).order_by(
+ model.PoolCategory.pool_category_id.asc()
+ )
if lock:
query = query.with_for_update()
category = query.first()
@@ -164,7 +171,7 @@ def try_get_default_category(
def get_default_category(lock: bool = False) -> model.PoolCategory:
category = try_get_default_category(lock)
if not category:
- raise PoolCategoryNotFoundError('No pool category created yet.')
+ raise PoolCategoryNotFoundError("No pool category created yet.")
return category
@@ -191,9 +198,10 @@ def set_default_category(category: model.PoolCategory) -> None:
def delete_category(category: model.PoolCategory) -> None:
assert category
if len(get_all_category_names()) == 1:
- raise PoolCategoryIsInUseError('Cannot delete the last category.')
+ raise PoolCategoryIsInUseError("Cannot delete the last category.")
if (category.pool_count or 0) > 0:
raise PoolCategoryIsInUseError(
- 'Pool category has some usages and cannot be deleted. ' +
- 'Please remove this category from relevant pools first.')
+ "Pool category has some usages and cannot be deleted. "
+ + "Please remove this category from relevant pools first."
+ )
db.session.delete(category)
diff --git a/server/szurubooru/func/pools.py b/server/szurubooru/func/pools.py
index 4acf9dfd..8bd50908 100644
--- a/server/szurubooru/func/pools.py
+++ b/server/szurubooru/func/pools.py
@@ -1,9 +1,11 @@
import re
-from typing import Any, Optional, Tuple, List, Dict, Callable
from datetime import datetime
+from typing import Any, Callable, Dict, List, Optional, Tuple
+
import sqlalchemy as sa
-from szurubooru import config, db, model, errors, rest
-from szurubooru.func import util, pool_categories, posts, serialization
+
+from szurubooru import config, db, errors, model, rest
+from szurubooru.func import pool_categories, posts, serialization, util
class PoolNotFoundError(errors.NotFoundError):
@@ -44,10 +46,10 @@ class InvalidPoolNonexistentPostError(errors.ValidationError):
def _verify_name_validity(name: str) -> None:
if util.value_exceeds_column_size(name, model.PoolName.name):
- raise InvalidPoolNameError('Name is too long.')
- name_regex = config.config['pool_name_regex']
+ raise InvalidPoolNameError("Name is too long.")
+ name_regex = config.config["pool_name_regex"]
if not re.match(name_regex, name):
- raise InvalidPoolNameError('Name must satisfy regex %r.' % name_regex)
+ raise InvalidPoolNameError("Name must satisfy regex %r." % name_regex)
def _get_names(pool: model.Pool) -> List[str]:
@@ -60,7 +62,8 @@ def _lower_list(names: List[str]) -> List[str]:
def _check_name_intersection(
- names1: List[str], names2: List[str], case_sensitive: bool) -> bool:
+ names1: List[str], names2: List[str], case_sensitive: bool
+) -> bool:
if not case_sensitive:
names1 = _lower_list(names1)
names2 = _lower_list(names2)
@@ -85,7 +88,8 @@ def sort_pools(pools: List[model.Pool]) -> List[model.Pool]:
key=lambda pool: (
default_category_name == pool.category.name,
pool.category.name,
- pool.names[0].name)
+ pool.names[0].name,
+ ),
)
@@ -95,15 +99,15 @@ class PoolSerializer(serialization.BaseSerializer):
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'id': self.serialize_id,
- 'names': self.serialize_names,
- 'category': self.serialize_category,
- 'version': self.serialize_version,
- 'description': self.serialize_description,
- 'creationTime': self.serialize_creation_time,
- 'lastEditTime': self.serialize_last_edit_time,
- 'postCount': self.serialize_post_count,
- 'posts': self.serialize_posts
+ "id": self.serialize_id,
+ "names": self.serialize_names,
+ "category": self.serialize_category,
+ "version": self.serialize_version,
+ "description": self.serialize_description,
+ "creationTime": self.serialize_creation_time,
+ "lastEditTime": self.serialize_last_edit_time,
+ "postCount": self.serialize_post_count,
+ "posts": self.serialize_posts,
}
def serialize_id(self) -> Any:
@@ -132,7 +136,8 @@ class PoolSerializer(serialization.BaseSerializer):
def serialize_posts(self) -> Any:
return [
- post for post in [
+ post
+ for post in [
posts.serialize_micro_post(rel, None)
for rel in self.pool.posts
]
@@ -140,7 +145,8 @@ class PoolSerializer(serialization.BaseSerializer):
def serialize_pool(
- pool: model.Pool, options: List[str] = []) -> Optional[rest.Response]:
+ pool: model.Pool, options: List[str] = []
+) -> Optional[rest.Response]:
if not pool:
return None
return PoolSerializer(pool).serialize(options)
@@ -148,32 +154,32 @@ def serialize_pool(
def try_get_pool_by_id(pool_id: int) -> Optional[model.Pool]:
return (
- db.session
- .query(model.Pool)
+ db.session.query(model.Pool)
.filter(model.Pool.pool_id == pool_id)
- .one_or_none())
+ .one_or_none()
+ )
def get_pool_by_id(pool_id: int) -> model.Pool:
pool = try_get_pool_by_id(pool_id)
if not pool:
- raise PoolNotFoundError('Pool %r not found.' % pool_id)
+ raise PoolNotFoundError("Pool %r not found." % pool_id)
return pool
def try_get_pool_by_name(name: str) -> Optional[model.Pool]:
return (
- db.session
- .query(model.Pool)
+ db.session.query(model.Pool)
.join(model.PoolName)
.filter(sa.func.lower(model.PoolName.name) == name.lower())
- .one_or_none())
+ .one_or_none()
+ )
def get_pool_by_name(name: str) -> model.Pool:
pool = try_get_pool_by_name(name)
if not pool:
- raise PoolNotFoundError('Pool %r not found.' % name)
+ raise PoolNotFoundError("Pool %r not found." % name)
return pool
@@ -187,12 +193,16 @@ def get_pools_by_names(names: List[str]) -> List[model.Pool]:
.filter(
sa.sql.or_(
sa.func.lower(model.PoolName.name) == name.lower()
- for name in names))
- .all())
+ for name in names
+ )
+ )
+ .all()
+ )
def get_or_create_pools_by_names(
- names: List[str]) -> Tuple[List[model.Pool], List[model.Pool]]:
+ names: List[str],
+) -> Tuple[List[model.Pool], List[model.Pool]]:
names = util.icase_unique(names)
existing_pools = get_pools_by_names(names)
new_pools = []
@@ -201,14 +211,14 @@ def get_or_create_pools_by_names(
found = False
for existing_pool in existing_pools:
if _check_name_intersection(
- _get_names(existing_pool), [name], False):
+ _get_names(existing_pool), [name], False
+ ):
found = True
break
if not found:
new_pool = create_pool(
- names=[name],
- category_name=pool_category_name,
- post_ids=[])
+ names=[name], category_name=pool_category_name, post_ids=[]
+ )
db.session.add(new_pool)
new_pools.append(new_pool)
return existing_pools, new_pools
@@ -223,20 +233,19 @@ def merge_pools(source_pool: model.Pool, target_pool: model.Pool) -> None:
assert source_pool
assert target_pool
if source_pool.pool_id == target_pool.pool_id:
- raise InvalidPoolRelationError('Cannot merge pool with itself.')
+ raise InvalidPoolRelationError("Cannot merge pool with itself.")
def merge_pool_posts(source_pool_id: int, target_pool_id: int) -> None:
alias1 = model.PoolPost
alias2 = sa.orm.util.aliased(model.PoolPost)
- update_stmt = (
- sa.sql.expression.update(alias1)
- .where(alias1.pool_id == source_pool_id))
- update_stmt = (
- update_stmt
- .where(
- ~sa.exists()
- .where(alias1.post_id == alias2.post_id)
- .where(alias2.pool_id == target_pool_id)))
+ update_stmt = sa.sql.expression.update(alias1).where(
+ alias1.pool_id == source_pool_id
+ )
+ update_stmt = update_stmt.where(
+ ~sa.exists()
+ .where(alias1.post_id == alias2.post_id)
+ .where(alias2.pool_id == target_pool_id)
+ )
update_stmt = update_stmt.values(pool_id=target_pool_id)
db.session.execute(update_stmt)
@@ -245,9 +254,8 @@ def merge_pools(source_pool: model.Pool, target_pool: model.Pool) -> None:
def create_pool(
- names: List[str],
- category_name: str,
- post_ids: List[int]) -> model.Pool:
+ names: List[str], category_name: str, post_ids: List[int]
+) -> model.Pool:
pool = model.Pool()
pool.creation_time = datetime.utcnow()
update_pool_names(pool, names)
@@ -266,7 +274,7 @@ def update_pool_names(pool: model.Pool, names: List[str]) -> None:
assert pool
names = util.icase_unique([name for name in names if name])
if not len(names):
- raise InvalidPoolNameError('At least one name must be specified.')
+ raise InvalidPoolNameError("At least one name must be specified.")
for name in names:
_verify_name_validity(name)
@@ -279,7 +287,8 @@ def update_pool_names(pool: model.Pool, names: List[str]) -> None:
existing_pools = db.session.query(model.PoolName).filter(expr).all()
if len(existing_pools):
raise PoolAlreadyExistsError(
- 'One of names is already used by another pool.')
+ "One of names is already used by another pool."
+ )
# remove unwanted items
for pool_name in pool.names[:]:
@@ -300,7 +309,7 @@ def update_pool_names(pool: model.Pool, names: List[str]) -> None:
def update_pool_description(pool: model.Pool, description: str) -> None:
assert pool
if util.value_exceeds_column_size(description, model.Pool.description):
- raise InvalidPoolDescriptionError('Description is too long.')
+ raise InvalidPoolDescriptionError("Description is too long.")
pool.description = description or None
@@ -308,14 +317,15 @@ def update_pool_posts(pool: model.Pool, post_ids: List[int]) -> None:
assert pool
dupes = _duplicates(post_ids)
if len(dupes) > 0:
- dupes = ', '.join(list(str(x) for x in dupes))
- raise InvalidPoolDuplicateError('Duplicate post(s) in pool: ' + dupes)
+ dupes = ", ".join(list(str(x) for x in dupes))
+ raise InvalidPoolDuplicateError("Duplicate post(s) in pool: " + dupes)
ret = posts.get_posts_by_ids(post_ids)
if len(post_ids) != len(ret):
missing = set(post_ids) - set(post.post_id for post in ret)
- missing = ', '.join(list(str(x) for x in missing))
+ missing = ", ".join(list(str(x) for x in missing))
raise InvalidPoolNonexistentPostError(
- 'The following posts do not exist: ' + missing)
+ "The following posts do not exist: " + missing
+ )
pool.posts.clear()
for post in ret:
pool.posts.append(post)
diff --git a/server/szurubooru/func/posts.py b/server/szurubooru/func/posts.py
index e18656ac..67996b64 100644
--- a/server/szurubooru/func/posts.py
+++ b/server/szurubooru/func/posts.py
@@ -1,21 +1,34 @@
-import logging
import hmac
-from typing import Any, Optional, Tuple, List, Dict, Callable
+import logging
from datetime import datetime
-import sqlalchemy as sa
-from szurubooru import config, db, model, errors, rest
-from szurubooru.func import (
- users, scores, comments, tags, pools, util,
- mime, images, files, image_hash, serialization, snapshots)
+from typing import Any, Callable, Dict, List, Optional, Tuple
+import sqlalchemy as sa
+
+from szurubooru import config, db, errors, model, rest
+from szurubooru.func import (
+ comments,
+ files,
+ image_hash,
+ images,
+ mime,
+ pools,
+ scores,
+ serialization,
+ snapshots,
+ tags,
+ users,
+ util,
+)
logger = logging.getLogger(__name__)
EMPTY_PIXEL = (
- b'\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00'
- b'\xff\xff\xff\x21\xf9\x04\x01\x00\x00\x01\x00\x2c\x00\x00\x00\x00'
- b'\x01\x00\x01\x00\x00\x02\x02\x4c\x01\x00\x3b')
+ b"\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00"
+ b"\xff\xff\xff\x21\xf9\x04\x01\x00\x00\x01\x00\x2c\x00\x00\x00\x00"
+ b"\x01\x00\x01\x00\x00\x02\x02\x4c\x01\x00\x3b"
+)
class PostNotFoundError(errors.NotFoundError):
@@ -29,11 +42,12 @@ class PostAlreadyFeaturedError(errors.ValidationError):
class PostAlreadyUploadedError(errors.ValidationError):
def __init__(self, other_post: model.Post) -> None:
super().__init__(
- 'Post already uploaded (%d)' % other_post.post_id,
+ "Post already uploaded (%d)" % other_post.post_id,
{
- 'otherPostUrl': get_post_content_url(other_post),
- 'otherPostId': other_post.post_id,
- })
+ "otherPostUrl": get_post_content_url(other_post),
+ "otherPostId": other_post.post_id,
+ },
+ )
class InvalidPostIdError(errors.ValidationError):
@@ -65,75 +79,82 @@ class InvalidPostFlagError(errors.ValidationError):
SAFETY_MAP = {
- model.Post.SAFETY_SAFE: 'safe',
- model.Post.SAFETY_SKETCHY: 'sketchy',
- model.Post.SAFETY_UNSAFE: 'unsafe',
+ model.Post.SAFETY_SAFE: "safe",
+ model.Post.SAFETY_SKETCHY: "sketchy",
+ model.Post.SAFETY_UNSAFE: "unsafe",
}
TYPE_MAP = {
- model.Post.TYPE_IMAGE: 'image',
- model.Post.TYPE_ANIMATION: 'animation',
- model.Post.TYPE_VIDEO: 'video',
- model.Post.TYPE_FLASH: 'flash',
+ model.Post.TYPE_IMAGE: "image",
+ model.Post.TYPE_ANIMATION: "animation",
+ model.Post.TYPE_VIDEO: "video",
+ model.Post.TYPE_FLASH: "flash",
}
FLAG_MAP = {
- model.Post.FLAG_LOOP: 'loop',
- model.Post.FLAG_SOUND: 'sound',
+ model.Post.FLAG_LOOP: "loop",
+ model.Post.FLAG_SOUND: "sound",
}
def get_post_security_hash(id: int) -> str:
return hmac.new(
- config.config['secret'].encode('utf8'),
- msg=str(id).encode('utf-8'),
- digestmod='md5').hexdigest()[0:16]
+ config.config["secret"].encode("utf8"),
+ msg=str(id).encode("utf-8"),
+ digestmod="md5",
+ ).hexdigest()[0:16]
def get_post_content_url(post: model.Post) -> str:
assert post
- return '%s/posts/%d_%s.%s' % (
- config.config['data_url'].rstrip('/'),
+ return "%s/posts/%d_%s.%s" % (
+ config.config["data_url"].rstrip("/"),
post.post_id,
get_post_security_hash(post.post_id),
- mime.get_extension(post.mime_type) or 'dat')
+ mime.get_extension(post.mime_type) or "dat",
+ )
def get_post_thumbnail_url(post: model.Post) -> str:
assert post
- return '%s/generated-thumbnails/%d_%s.jpg' % (
- config.config['data_url'].rstrip('/'),
+ return "%s/generated-thumbnails/%d_%s.jpg" % (
+ config.config["data_url"].rstrip("/"),
post.post_id,
- get_post_security_hash(post.post_id))
+ get_post_security_hash(post.post_id),
+ )
def get_post_content_path(post: model.Post) -> str:
assert post
assert post.post_id
- return 'posts/%d_%s.%s' % (
+ return "posts/%d_%s.%s" % (
post.post_id,
get_post_security_hash(post.post_id),
- mime.get_extension(post.mime_type) or 'dat')
+ mime.get_extension(post.mime_type) or "dat",
+ )
def get_post_thumbnail_path(post: model.Post) -> str:
assert post
- return 'generated-thumbnails/%d_%s.jpg' % (
+ return "generated-thumbnails/%d_%s.jpg" % (
post.post_id,
- get_post_security_hash(post.post_id))
+ get_post_security_hash(post.post_id),
+ )
def get_post_thumbnail_backup_path(post: model.Post) -> str:
assert post
- return 'posts/custom-thumbnails/%d_%s.dat' % (
- post.post_id, get_post_security_hash(post.post_id))
+ return "posts/custom-thumbnails/%d_%s.dat" % (
+ post.post_id,
+ get_post_security_hash(post.post_id),
+ )
def serialize_note(note: model.PostNote) -> rest.Response:
assert note
return {
- 'polygon': note.polygon,
- 'text': note.text,
+ "polygon": note.polygon,
+ "text": note.text,
}
@@ -144,39 +165,39 @@ class PostSerializer(serialization.BaseSerializer):
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'id': self.serialize_id,
- 'version': self.serialize_version,
- 'creationTime': self.serialize_creation_time,
- 'lastEditTime': self.serialize_last_edit_time,
- 'safety': self.serialize_safety,
- 'source': self.serialize_source,
- 'type': self.serialize_type,
- 'mimeType': self.serialize_mime,
- 'checksum': self.serialize_checksum,
- 'fileSize': self.serialize_file_size,
- 'canvasWidth': self.serialize_canvas_width,
- 'canvasHeight': self.serialize_canvas_height,
- 'contentUrl': self.serialize_content_url,
- 'thumbnailUrl': self.serialize_thumbnail_url,
- 'flags': self.serialize_flags,
- 'tags': self.serialize_tags,
- 'relations': self.serialize_relations,
- 'user': self.serialize_user,
- 'score': self.serialize_score,
- 'ownScore': self.serialize_own_score,
- 'ownFavorite': self.serialize_own_favorite,
- 'tagCount': self.serialize_tag_count,
- 'favoriteCount': self.serialize_favorite_count,
- 'commentCount': self.serialize_comment_count,
- 'noteCount': self.serialize_note_count,
- 'relationCount': self.serialize_relation_count,
- 'featureCount': self.serialize_feature_count,
- 'lastFeatureTime': self.serialize_last_feature_time,
- 'favoritedBy': self.serialize_favorited_by,
- 'hasCustomThumbnail': self.serialize_has_custom_thumbnail,
- 'notes': self.serialize_notes,
- 'comments': self.serialize_comments,
- 'pools': self.serialize_pools,
+ "id": self.serialize_id,
+ "version": self.serialize_version,
+ "creationTime": self.serialize_creation_time,
+ "lastEditTime": self.serialize_last_edit_time,
+ "safety": self.serialize_safety,
+ "source": self.serialize_source,
+ "type": self.serialize_type,
+ "mimeType": self.serialize_mime,
+ "checksum": self.serialize_checksum,
+ "fileSize": self.serialize_file_size,
+ "canvasWidth": self.serialize_canvas_width,
+ "canvasHeight": self.serialize_canvas_height,
+ "contentUrl": self.serialize_content_url,
+ "thumbnailUrl": self.serialize_thumbnail_url,
+ "flags": self.serialize_flags,
+ "tags": self.serialize_tags,
+ "relations": self.serialize_relations,
+ "user": self.serialize_user,
+ "score": self.serialize_score,
+ "ownScore": self.serialize_own_score,
+ "ownFavorite": self.serialize_own_favorite,
+ "tagCount": self.serialize_tag_count,
+ "favoriteCount": self.serialize_favorite_count,
+ "commentCount": self.serialize_comment_count,
+ "noteCount": self.serialize_note_count,
+ "relationCount": self.serialize_relation_count,
+ "featureCount": self.serialize_feature_count,
+ "lastFeatureTime": self.serialize_last_feature_time,
+ "favoritedBy": self.serialize_favorited_by,
+ "hasCustomThumbnail": self.serialize_has_custom_thumbnail,
+ "notes": self.serialize_notes,
+ "comments": self.serialize_comments,
+ "pools": self.serialize_pools,
}
def serialize_id(self) -> Any:
@@ -227,21 +248,24 @@ class PostSerializer(serialization.BaseSerializer):
def serialize_tags(self) -> Any:
return [
{
- 'names': [name.name for name in tag.names],
- 'category': tag.category.name,
- 'usages': tag.post_count,
+ "names": [name.name for name in tag.names],
+ "category": tag.category.name,
+ "usages": tag.post_count,
}
- for tag in tags.sort_tags(self.post.tags)]
+ for tag in tags.sort_tags(self.post.tags)
+ ]
def serialize_relations(self) -> Any:
return sorted(
{
- post['id']: post
+ post["id"]: post
for post in [
serialize_micro_post(rel, self.auth_user)
- for rel in self.post.relations]
+ for rel in self.post.relations
+ ]
}.values(),
- key=lambda post: post['id'])
+ key=lambda post: post["id"],
+ )
def serialize_user(self) -> Any:
return users.serialize_micro_user(self.post.user, self.auth_user)
@@ -253,10 +277,16 @@ class PostSerializer(serialization.BaseSerializer):
return scores.get_score(self.post, self.auth_user)
def serialize_own_favorite(self) -> Any:
- return len([
- user for user in self.post.favorited_by
- if user.user_id == self.auth_user.user_id]
- ) > 0
+ return (
+ len(
+ [
+ user
+ for user in self.post.favorited_by
+ if user.user_id == self.auth_user.user_id
+ ]
+ )
+ > 0
+ )
def serialize_tag_count(self) -> Any:
return self.post.tag_count
@@ -291,36 +321,40 @@ class PostSerializer(serialization.BaseSerializer):
def serialize_notes(self) -> Any:
return sorted(
[serialize_note(note) for note in self.post.notes],
- key=lambda x: x['polygon'])
+ key=lambda x: x["polygon"],
+ )
def serialize_comments(self) -> Any:
return [
comments.serialize_comment(comment, self.auth_user)
for comment in sorted(
- self.post.comments,
- key=lambda comment: comment.creation_time)]
+ self.post.comments, key=lambda comment: comment.creation_time
+ )
+ ]
def serialize_pools(self) -> List[Any]:
return [
pools.serialize_pool(pool)
for pool in sorted(
- self.post.pools,
- key=lambda pool: pool.creation_time)]
+ self.post.pools, key=lambda pool: pool.creation_time
+ )
+ ]
def serialize_post(
- post: Optional[model.Post],
- auth_user: model.User,
- options: List[str] = []) -> Optional[rest.Response]:
+ post: Optional[model.Post], auth_user: model.User, options: List[str] = []
+) -> Optional[rest.Response]:
if not post:
return None
return PostSerializer(post, auth_user).serialize(options)
def serialize_micro_post(
- post: model.Post, auth_user: model.User) -> Optional[rest.Response]:
+ post: model.Post, auth_user: model.User
+) -> Optional[rest.Response]:
return serialize_post(
- post, auth_user=auth_user, options=['id', 'thumbnailUrl'])
+ post, auth_user=auth_user, options=["id", "thumbnailUrl"]
+ )
def get_post_count() -> int:
@@ -329,16 +363,16 @@ def get_post_count() -> int:
def try_get_post_by_id(post_id: int) -> Optional[model.Post]:
return (
- db.session
- .query(model.Post)
+ db.session.query(model.Post)
.filter(model.Post.post_id == post_id)
- .one_or_none())
+ .one_or_none()
+ )
def get_post_by_id(post_id: int) -> model.Post:
post = try_get_post_by_id(post_id)
if not post:
- raise PostNotFoundError('Post %r not found.' % post_id)
+ raise PostNotFoundError("Post %r not found." % post_id)
return post
@@ -347,23 +381,19 @@ def get_posts_by_ids(ids: List[int]) -> List[model.Post]:
return []
posts = (
db.session.query(model.Post)
- .filter(
- sa.sql.or_(
- model.Post.post_id == post_id
- for post_id in ids))
- .all())
- id_order = {
- v: k for k, v in enumerate(ids)
- }
+ .filter(sa.sql.or_(model.Post.post_id == post_id for post_id in ids))
+ .all()
+ )
+ id_order = {v: k for k, v in enumerate(ids)}
return sorted(posts, key=lambda post: id_order.get(post.post_id))
def try_get_current_post_feature() -> Optional[model.PostFeature]:
return (
- db.session
- .query(model.PostFeature)
+ db.session.query(model.PostFeature)
.order_by(model.PostFeature.time.desc())
- .first())
+ .first()
+ )
def try_get_featured_post() -> Optional[model.Post]:
@@ -372,18 +402,17 @@ def try_get_featured_post() -> Optional[model.Post]:
def create_post(
- content: bytes,
- tag_names: List[str],
- user: Optional[model.User]) -> Tuple[model.Post, List[model.Tag]]:
+ content: bytes, tag_names: List[str], user: Optional[model.User]
+) -> Tuple[model.Post, List[model.Tag]]:
post = model.Post()
post.safety = model.Post.SAFETY_SAFE
post.user = user
post.creation_time = datetime.utcnow()
post.flags = []
- post.type = ''
- post.checksum = ''
- post.mime_type = ''
+ post.type = ""
+ post.checksum = ""
+ post.mime_type = ""
update_post_content(post, content)
new_tags = update_post_tags(post, tag_names)
@@ -397,34 +426,38 @@ def update_post_safety(post: model.Post, safety: str) -> None:
safety = util.flip(SAFETY_MAP).get(safety, None)
if not safety:
raise InvalidPostSafetyError(
- 'Safety can be either of %r.' % list(SAFETY_MAP.values()))
+ "Safety can be either of %r." % list(SAFETY_MAP.values())
+ )
post.safety = safety
def update_post_source(post: model.Post, source: Optional[str]) -> None:
assert post
if util.value_exceeds_column_size(source, model.Post.source):
- raise InvalidPostSourceError('Source is too long.')
+ raise InvalidPostSourceError("Source is too long.")
post.source = source or None
-@sa.events.event.listens_for(model.Post, 'after_insert')
+@sa.events.event.listens_for(model.Post, "after_insert")
def _after_post_insert(
- _mapper: Any, _connection: Any, post: model.Post) -> None:
+ _mapper: Any, _connection: Any, post: model.Post
+) -> None:
_sync_post_content(post)
-@sa.events.event.listens_for(model.Post, 'after_update')
+@sa.events.event.listens_for(model.Post, "after_update")
def _after_post_update(
- _mapper: Any, _connection: Any, post: model.Post) -> None:
+ _mapper: Any, _connection: Any, post: model.Post
+) -> None:
_sync_post_content(post)
-@sa.events.event.listens_for(model.Post, 'before_delete')
+@sa.events.event.listens_for(model.Post, "before_delete")
def _before_post_delete(
- _mapper: Any, _connection: Any, post: model.Post) -> None:
+ _mapper: Any, _connection: Any, post: model.Post
+) -> None:
if post.post_id:
- if config.config['delete_source_files']:
+ if config.config["delete_source_files"]:
files.delete(get_post_content_path(post))
files.delete(get_post_thumbnail_path(post))
@@ -432,50 +465,50 @@ def _before_post_delete(
def _sync_post_content(post: model.Post) -> None:
regenerate_thumb = False
- if hasattr(post, '__content'):
- content = getattr(post, '__content')
+ if hasattr(post, "__content"):
+ content = getattr(post, "__content")
files.save(get_post_content_path(post), content)
- delattr(post, '__content')
+ delattr(post, "__content")
regenerate_thumb = True
- if hasattr(post, '__thumbnail'):
- if getattr(post, '__thumbnail'):
+ if hasattr(post, "__thumbnail"):
+ if getattr(post, "__thumbnail"):
files.save(
get_post_thumbnail_backup_path(post),
- getattr(post, '__thumbnail'))
+ getattr(post, "__thumbnail"),
+ )
else:
files.delete(get_post_thumbnail_backup_path(post))
- delattr(post, '__thumbnail')
+ delattr(post, "__thumbnail")
regenerate_thumb = True
if regenerate_thumb:
generate_post_thumbnail(post)
-def generate_alternate_formats(post: model.Post, content: bytes) \
- -> List[Tuple[model.Post, List[model.Tag]]]:
+def generate_alternate_formats(
+ post: model.Post, content: bytes
+) -> List[Tuple[model.Post, List[model.Tag]]]:
assert post
assert content
new_posts = []
if mime.is_animated_gif(content):
tag_names = [tag.first_name for tag in post.tags]
- if config.config['convert']['gif']['to_mp4']:
+ if config.config["convert"]["gif"]["to_mp4"]:
mp4_post, new_tags = create_post(
- images.Image(content).to_mp4(),
- tag_names,
- post.user)
- update_post_flags(mp4_post, ['loop'])
+ images.Image(content).to_mp4(), tag_names, post.user
+ )
+ update_post_flags(mp4_post, ["loop"])
update_post_safety(mp4_post, post.safety)
update_post_source(mp4_post, post.source)
new_posts += [(mp4_post, new_tags)]
- if config.config['convert']['gif']['to_webm']:
+ if config.config["convert"]["gif"]["to_webm"]:
webm_post, new_tags = create_post(
- images.Image(content).to_webm(),
- tag_names,
- post.user)
- update_post_flags(webm_post, ['loop'])
+ images.Image(content).to_webm(), tag_names, post.user
+ )
+ update_post_flags(webm_post, ["loop"])
update_post_safety(webm_post, post.safety)
update_post_source(webm_post, post.source)
new_posts += [(webm_post, new_tags)]
@@ -502,10 +535,11 @@ def get_default_flags(content: bytes) -> List[str]:
def purge_post_signature(post: model.Post) -> None:
- (db.session
- .query(model.PostSignature)
+ (
+ db.session.query(model.PostSignature)
.filter(model.PostSignature.post_id == post.post_id)
- .delete())
+ .delete()
+ )
def generate_post_signature(post: model.Post, content: bytes) -> None:
@@ -514,30 +548,36 @@ def generate_post_signature(post: model.Post, content: bytes) -> None:
packed_signature = image_hash.pack_signature(unpacked_signature)
words = image_hash.generate_words(unpacked_signature)
- db.session.add(model.PostSignature(
- post=post, signature=packed_signature, words=words))
+ db.session.add(
+ model.PostSignature(
+ post=post, signature=packed_signature, words=words
+ )
+ )
except errors.ProcessingError:
- if not config.config['allow_broken_uploads']:
+ if not config.config["allow_broken_uploads"]:
raise InvalidPostContentError(
- 'Unable to generate image hash data.')
+ "Unable to generate image hash data."
+ )
def update_all_post_signatures() -> None:
posts_to_hash = (
- db.session
- .query(model.Post)
+ db.session.query(model.Post)
.filter(
- (model.Post.type == model.Post.TYPE_IMAGE) |
- (model.Post.type == model.Post.TYPE_ANIMATION))
+ (model.Post.type == model.Post.TYPE_IMAGE)
+ | (model.Post.type == model.Post.TYPE_ANIMATION)
+ )
.filter(model.Post.signature == None) # noqa: E711
.order_by(model.Post.post_id.asc())
- .all())
+ .all()
+ )
for post in posts_to_hash:
try:
generate_post_signature(
- post, files.get(get_post_content_path(post)))
+ post, files.get(get_post_content_path(post))
+ )
db.session.commit()
- logger.info('Hashed Post %d', post.post_id)
+ logger.info("Hashed Post %d", post.post_id)
except Exception as ex:
logger.exception(ex)
@@ -545,7 +585,7 @@ def update_all_post_signatures() -> None:
def update_post_content(post: model.Post, content: Optional[bytes]) -> None:
assert post
if not content:
- raise InvalidPostContentError('Post content missing.')
+ raise InvalidPostContentError("Post content missing.")
update_signature = False
post.mime_type = mime.get_mime_type(content)
@@ -561,18 +601,21 @@ def update_post_content(post: model.Post, content: Optional[bytes]) -> None:
post.type = model.Post.TYPE_VIDEO
else:
raise InvalidPostContentError(
- 'Unhandled file type: %r' % post.mime_type)
+ "Unhandled file type: %r" % post.mime_type
+ )
post.checksum = util.get_sha1(content)
other_post = (
- db.session
- .query(model.Post)
+ db.session.query(model.Post)
.filter(model.Post.checksum == post.checksum)
.filter(model.Post.post_id != post.post_id)
- .one_or_none())
- if other_post \
- and other_post.post_id \
- and other_post.post_id != post.post_id:
+ .one_or_none()
+ )
+ if (
+ other_post
+ and other_post.post_id
+ and other_post.post_id != post.post_id
+ ):
raise PostAlreadyUploadedError(other_post)
if update_signature:
@@ -585,27 +628,29 @@ def update_post_content(post: model.Post, content: Optional[bytes]) -> None:
post.canvas_width = image.width
post.canvas_height = image.height
except errors.ProcessingError:
- if not config.config['allow_broken_uploads']:
- raise InvalidPostContentError(
- 'Unable to process image metadata')
+ if not config.config["allow_broken_uploads"]:
+ raise InvalidPostContentError("Unable to process image metadata")
else:
post.canvas_width = None
post.canvas_height = None
- if (post.canvas_width is not None and post.canvas_width <= 0) \
- or (post.canvas_height is not None and post.canvas_height <= 0):
- if not config.config['allow_broken_uploads']:
+ if (post.canvas_width is not None and post.canvas_width <= 0) or (
+ post.canvas_height is not None and post.canvas_height <= 0
+ ):
+ if not config.config["allow_broken_uploads"]:
raise InvalidPostContentError(
- 'Invalid image dimensions returned during processing')
+ "Invalid image dimensions returned during processing"
+ )
else:
post.canvas_width = None
post.canvas_height = None
- setattr(post, '__content', content)
+ setattr(post, "__content", content)
def update_post_thumbnail(
- post: model.Post, content: Optional[bytes] = None) -> None:
+ post: model.Post, content: Optional[bytes] = None
+) -> None:
assert post
- setattr(post, '__thumbnail', content)
+ setattr(post, "__thumbnail", content)
def generate_post_thumbnail(post: model.Post) -> None:
@@ -618,15 +663,17 @@ def generate_post_thumbnail(post: model.Post) -> None:
assert content
image = images.Image(content)
image.resize_fill(
- int(config.config['thumbnails']['post_width']),
- int(config.config['thumbnails']['post_height']))
+ int(config.config["thumbnails"]["post_width"]),
+ int(config.config["thumbnails"]["post_height"]),
+ )
files.save(get_post_thumbnail_path(post), image.to_jpeg())
except errors.ProcessingError:
files.save(get_post_thumbnail_path(post), EMPTY_PIXEL)
def update_post_tags(
- post: model.Post, tag_names: List[str]) -> List[model.Tag]:
+ post: model.Post, tag_names: List[str]
+) -> List[model.Tag]:
assert post
existing_tags, new_tags = tags.get_or_create_tags_by_names(tag_names)
post.tags = existing_tags + new_tags
@@ -638,22 +685,21 @@ def update_post_relations(post: model.Post, new_post_ids: List[int]) -> None:
try:
new_post_ids = [int(id) for id in new_post_ids]
except ValueError:
- raise InvalidPostRelationError(
- 'A relation must be numeric post ID.')
+ raise InvalidPostRelationError("A relation must be numeric post ID.")
old_posts = post.relations
old_post_ids = [int(p.post_id) for p in old_posts]
if new_post_ids:
new_posts = (
- db.session
- .query(model.Post)
+ db.session.query(model.Post)
.filter(model.Post.post_id.in_(new_post_ids))
- .all())
+ .all()
+ )
else:
new_posts = []
if len(new_posts) != len(new_post_ids):
- raise InvalidPostRelationError('One of relations does not exist.')
+ raise InvalidPostRelationError("One of relations does not exist.")
if post.post_id in new_post_ids:
- raise InvalidPostRelationError('Post cannot relate to itself.')
+ raise InvalidPostRelationError("Post cannot relate to itself.")
relations_to_del = [p for p in old_posts if p.post_id not in new_post_ids]
relations_to_add = [p for p in new_posts if p.post_id not in old_post_ids]
@@ -669,37 +715,44 @@ def update_post_notes(post: model.Post, notes: Any) -> None:
assert post
post.notes = []
for note in notes:
- for field in ('polygon', 'text'):
+ for field in ("polygon", "text"):
if field not in note:
- raise InvalidPostNoteError('Note is missing %r field.' % field)
- if not note['text']:
- raise InvalidPostNoteError('A note\'s text cannot be empty.')
- if not isinstance(note['polygon'], (list, tuple)):
+ raise InvalidPostNoteError("Note is missing %r field." % field)
+ if not note["text"]:
+ raise InvalidPostNoteError("A note's text cannot be empty.")
+ if not isinstance(note["polygon"], (list, tuple)):
raise InvalidPostNoteError(
- 'A note\'s polygon must be a list of points.')
- if len(note['polygon']) < 3:
+ "A note's polygon must be a list of points."
+ )
+ if len(note["polygon"]) < 3:
raise InvalidPostNoteError(
- 'A note\'s polygon must have at least 3 points.')
- for point in note['polygon']:
+ "A note's polygon must have at least 3 points."
+ )
+ for point in note["polygon"]:
if not isinstance(point, (list, tuple)):
raise InvalidPostNoteError(
- 'A note\'s polygon point must be a list of length 2.')
+ "A note's polygon point must be a list of length 2."
+ )
if len(point) != 2:
raise InvalidPostNoteError(
- 'A point in note\'s polygon must have two coordinates.')
+ "A point in note's polygon must have two coordinates."
+ )
try:
pos_x = float(point[0])
pos_y = float(point[1])
if not 0 <= pos_x <= 1 or not 0 <= pos_y <= 1:
raise InvalidPostNoteError(
- 'All points must fit in the image (0..1 range).')
+ "All points must fit in the image (0..1 range)."
+ )
except ValueError:
raise InvalidPostNoteError(
- 'A point in note\'s polygon must be numeric.')
- if util.value_exceeds_column_size(note['text'], model.PostNote.text):
- raise InvalidPostNoteError('Note text is too long.')
+ "A point in note's polygon must be numeric."
+ )
+ if util.value_exceeds_column_size(note["text"], model.PostNote.text):
+ raise InvalidPostNoteError("Note text is too long.")
post.notes.append(
- model.PostNote(polygon=note['polygon'], text=str(note['text'])))
+ model.PostNote(polygon=note["polygon"], text=str(note["text"]))
+ )
def update_post_flags(post: model.Post, flags: List[str]) -> None:
@@ -709,7 +762,8 @@ def update_post_flags(post: model.Post, flags: List[str]) -> None:
flag = util.flip(FLAG_MAP).get(flag, None)
if not flag:
raise InvalidPostFlagError(
- 'Flag must be one of %r.' % list(FLAG_MAP.values()))
+ "Flag must be one of %r." % list(FLAG_MAP.values())
+ )
target_flags.append(flag)
post.flags = target_flags
@@ -729,32 +783,31 @@ def delete(post: model.Post) -> None:
def merge_posts(
- source_post: model.Post,
- target_post: model.Post,
- replace_content: bool) -> None:
+ source_post: model.Post, target_post: model.Post, replace_content: bool
+) -> None:
assert source_post
assert target_post
if source_post.post_id == target_post.post_id:
- raise InvalidPostRelationError('Cannot merge post with itself.')
+ raise InvalidPostRelationError("Cannot merge post with itself.")
def merge_tables(
- table: model.Base,
- anti_dup_func: Optional[Callable[[model.Base, model.Base], bool]],
- source_post_id: int,
- target_post_id: int) -> None:
+ table: model.Base,
+ anti_dup_func: Optional[Callable[[model.Base, model.Base], bool]],
+ source_post_id: int,
+ target_post_id: int,
+ ) -> None:
alias1 = table
alias2 = sa.orm.util.aliased(table)
- update_stmt = (
- sa.sql.expression.update(alias1)
- .where(alias1.post_id == source_post_id))
+ update_stmt = sa.sql.expression.update(alias1).where(
+ alias1.post_id == source_post_id
+ )
if anti_dup_func is not None:
- update_stmt = (
- update_stmt
- .where(
- ~sa.exists()
- .where(anti_dup_func(alias1, alias2))
- .where(alias2.post_id == target_post_id)))
+ update_stmt = update_stmt.where(
+ ~sa.exists()
+ .where(anti_dup_func(alias1, alias2))
+ .where(alias2.post_id == target_post_id)
+ )
update_stmt = update_stmt.values(post_id=target_post_id)
db.session.execute(update_stmt)
@@ -764,21 +817,24 @@ def merge_posts(
model.PostTag,
lambda alias1, alias2: alias1.tag_id == alias2.tag_id,
source_post_id,
- target_post_id)
+ target_post_id,
+ )
def merge_scores(source_post_id: int, target_post_id: int) -> None:
merge_tables(
model.PostScore,
lambda alias1, alias2: alias1.user_id == alias2.user_id,
source_post_id,
- target_post_id)
+ target_post_id,
+ )
def merge_favorites(source_post_id: int, target_post_id: int) -> None:
merge_tables(
model.PostFavorite,
lambda alias1, alias2: alias1.user_id == alias2.user_id,
source_post_id,
- target_post_id)
+ target_post_id,
+ )
def merge_comments(source_post_id: int, target_post_id: int) -> None:
merge_tables(model.Comment, None, source_post_id, target_post_id)
@@ -793,8 +849,10 @@ def merge_posts(
.where(
~sa.exists()
.where(alias2.child_id == alias1.child_id)
- .where(alias2.parent_id == target_post_id))
- .values(parent_id=target_post_id))
+ .where(alias2.parent_id == target_post_id)
+ )
+ .values(parent_id=target_post_id)
+ )
db.session.execute(update_stmt)
update_stmt = (
@@ -804,8 +862,10 @@ def merge_posts(
.where(
~sa.exists()
.where(alias2.parent_id == alias1.parent_id)
- .where(alias2.child_id == target_post_id))
- .values(child_id=target_post_id))
+ .where(alias2.child_id == target_post_id)
+ )
+ .values(child_id=target_post_id)
+ )
db.session.execute(update_stmt)
merge_tags(source_post.post_id, target_post.post_id)
@@ -837,44 +897,49 @@ def merge_posts(
def search_by_image_exact(image_content: bytes) -> Optional[model.Post]:
checksum = util.get_sha1(image_content)
return (
- db.session
- .query(model.Post)
+ db.session.query(model.Post)
.filter(model.Post.checksum == checksum)
- .one_or_none())
+ .one_or_none()
+ )
def search_by_image(image_content: bytes) -> List[Tuple[float, model.Post]]:
query_signature = image_hash.generate_signature(image_content)
query_words = image_hash.generate_words(query_signature)
- '''
+ """
The unnest function is used here to expand one row containing the 'words'
array into multiple rows each containing a singular word.
Documentation of the unnest function can be found here:
https://www.postgresql.org/docs/9.2/functions-array.html
- '''
+ """
- dbquery = '''
+ dbquery = """
SELECT s.post_id, s.signature, count(a.query) AS score
FROM post_signature AS s, unnest(s.words, :q) AS a(word, query)
WHERE a.word = a.query
GROUP BY s.post_id
ORDER BY score DESC LIMIT 100;
- '''
+ """
- candidates = db.session.execute(dbquery, {'q': query_words})
- data = tuple(zip(*[
- (post_id, image_hash.unpack_signature(packedsig))
- for post_id, packedsig, score in candidates
- ]))
+ candidates = db.session.execute(dbquery, {"q": query_words})
+ data = tuple(
+ zip(
+ *[
+ (post_id, image_hash.unpack_signature(packedsig))
+ for post_id, packedsig, score in candidates
+ ]
+ )
+ )
if data:
candidate_post_ids, sigarray = data
distances = image_hash.normalized_distance(sigarray, query_signature)
return [
(distance, try_get_post_by_id(candidate_post_id))
- for candidate_post_id, distance
- in zip(candidate_post_ids, distances)
+ for candidate_post_id, distance in zip(
+ candidate_post_ids, distances
+ )
if distance < image_hash.DISTANCE_CUTOFF
]
else:
diff --git a/server/szurubooru/func/scores.py b/server/szurubooru/func/scores.py
index 615fd981..b095f487 100644
--- a/server/szurubooru/func/scores.py
+++ b/server/szurubooru/func/scores.py
@@ -1,6 +1,7 @@
import datetime
-from typing import Any, Tuple, Callable
-from szurubooru import db, model, errors
+from typing import Any, Callable, Tuple
+
+from szurubooru import db, errors, model
class InvalidScoreTargetError(errors.ValidationError):
@@ -12,12 +13,13 @@ class InvalidScoreValueError(errors.ValidationError):
def _get_table_info(
- entity: model.Base) -> Tuple[model.Base, Callable[[model.Base], Any]]:
+ entity: model.Base,
+) -> Tuple[model.Base, Callable[[model.Base], Any]]:
assert entity
resource_type, _, _ = model.util.get_resource_info(entity)
- if resource_type == 'post':
+ if resource_type == "post":
return model.PostScore, lambda table: table.post_id
- elif resource_type == 'comment':
+ elif resource_type == "comment":
return model.CommentScore, lambda table: table.comment_id
raise InvalidScoreTargetError()
@@ -40,16 +42,17 @@ def get_score(entity: model.Base, user: model.User) -> int:
assert user
table, get_column = _get_table_info(entity)
row = (
- db.session
- .query(table.score)
+ db.session.query(table.score)
.filter(get_column(table) == get_column(entity))
.filter(table.user_id == user.user_id)
- .one_or_none())
+ .one_or_none()
+ )
return row[0] if row else 0
def set_score(entity: model.Base, user: model.User, score: int) -> None:
from szurubooru.func import favorites
+
assert entity
assert user
if not score:
@@ -61,7 +64,8 @@ def set_score(entity: model.Base, user: model.User, score: int) -> None:
return
if score not in (-1, 1):
raise InvalidScoreValueError(
- 'Score %r is invalid. Valid scores: %r.' % (score, (-1, 1)))
+ "Score %r is invalid. Valid scores: %r." % (score, (-1, 1))
+ )
score_entity = _get_score_entity(entity, user)
if score_entity:
score_entity.score = score
diff --git a/server/szurubooru/func/serialization.py b/server/szurubooru/func/serialization.py
index 699fb473..d2fadc06 100644
--- a/server/szurubooru/func/serialization.py
+++ b/server/szurubooru/func/serialization.py
@@ -1,9 +1,10 @@
-from typing import Any, List, Dict, Callable
-from szurubooru import model, rest, errors
+from typing import Any, Callable, Dict, List
+
+from szurubooru import errors, model, rest
def get_serialization_options(ctx: rest.Context) -> List[str]:
- return ctx.get_param_as_list('fields', default=[])
+ return ctx.get_param_as_list("fields", default=[])
class BaseSerializer:
@@ -17,8 +18,9 @@ class BaseSerializer:
for key in options:
if key not in field_factories:
raise errors.ValidationError(
- 'Invalid key: %r. Valid keys: %r.' % (
- key, list(sorted(field_factories.keys()))))
+ "Invalid key: %r. Valid keys: %r."
+ % (key, list(sorted(field_factories.keys())))
+ )
factory = field_factories[key]
ret[key] = factory()
return ret
diff --git a/server/szurubooru/func/snapshots.py b/server/szurubooru/func/snapshots.py
index 38d42a97..b34b7022 100644
--- a/server/szurubooru/func/snapshots.py
+++ b/server/szurubooru/func/snapshots.py
@@ -1,6 +1,8 @@
-from typing import Any, Optional, Dict, Callable
from datetime import datetime
+from typing import Any, Callable, Dict, Optional
+
import sqlalchemy as sa
+
from szurubooru import db, model
from szurubooru.func import diff, users
@@ -8,86 +10,95 @@ from szurubooru.func import diff, users
def get_tag_category_snapshot(category: model.TagCategory) -> Dict[str, Any]:
assert category
return {
- 'name': category.name,
- 'color': category.color,
- 'default': True if category.default else False,
+ "name": category.name,
+ "color": category.color,
+ "default": True if category.default else False,
}
def get_tag_snapshot(tag: model.Tag) -> Dict[str, Any]:
assert tag
return {
- 'names': [tag_name.name for tag_name in tag.names],
- 'category': tag.category.name,
- 'suggestions': sorted(rel.first_name for rel in tag.suggestions),
- 'implications': sorted(rel.first_name for rel in tag.implications),
+ "names": [tag_name.name for tag_name in tag.names],
+ "category": tag.category.name,
+ "suggestions": sorted(rel.first_name for rel in tag.suggestions),
+ "implications": sorted(rel.first_name for rel in tag.implications),
}
def get_pool_category_snapshot(category: model.PoolCategory) -> Dict[str, Any]:
assert category
return {
- 'name': category.name,
- 'color': category.color,
- 'default': True if category.default else False,
+ "name": category.name,
+ "color": category.color,
+ "default": True if category.default else False,
}
def get_pool_snapshot(pool: model.Pool) -> Dict[str, Any]:
assert pool
return {
- 'names': [pool_name.name for pool_name in pool.names],
- 'category': pool.category.name,
- 'posts': [post.post_id for post in pool.posts]
+ "names": [pool_name.name for pool_name in pool.names],
+ "category": pool.category.name,
+ "posts": [post.post_id for post in pool.posts],
}
def get_post_snapshot(post: model.Post) -> Dict[str, Any]:
assert post
return {
- 'source': post.source,
- 'safety': post.safety,
- 'checksum': post.checksum,
- 'flags': post.flags,
- 'featured': post.is_featured,
- 'tags': sorted([tag.first_name for tag in post.tags]),
- 'relations': sorted([rel.post_id for rel in post.relations]),
- 'notes': sorted([{
- 'polygon': [[point[0], point[1]] for point in note.polygon],
- 'text': note.text,
- } for note in post.notes], key=lambda x: x['polygon']),
+ "source": post.source,
+ "safety": post.safety,
+ "checksum": post.checksum,
+ "flags": post.flags,
+ "featured": post.is_featured,
+ "tags": sorted([tag.first_name for tag in post.tags]),
+ "relations": sorted([rel.post_id for rel in post.relations]),
+ "notes": sorted(
+ [
+ {
+ "polygon": [
+ [point[0], point[1]] for point in note.polygon
+ ],
+ "text": note.text,
+ }
+ for note in post.notes
+ ],
+ key=lambda x: x["polygon"],
+ ),
}
_snapshot_factories = {
# lambdas allow mocking target functions in the tests
- 'tag_category': lambda entity: get_tag_category_snapshot(entity),
- 'tag': lambda entity: get_tag_snapshot(entity),
- 'post': lambda entity: get_post_snapshot(entity),
- 'pool_category': lambda entity: get_pool_category_snapshot(entity),
- 'pool': lambda entity: get_pool_snapshot(entity),
+ "tag_category": lambda entity: get_tag_category_snapshot(entity),
+ "tag": lambda entity: get_tag_snapshot(entity),
+ "post": lambda entity: get_post_snapshot(entity),
+ "pool_category": lambda entity: get_pool_category_snapshot(entity),
+ "pool": lambda entity: get_pool_snapshot(entity),
} # type: Dict[model.Base, Callable[[model.Base], Dict[str ,Any]]]
def serialize_snapshot(
- snapshot: model.Snapshot, auth_user: model.User) -> Dict[str, Any]:
+ snapshot: model.Snapshot, auth_user: model.User
+) -> Dict[str, Any]:
assert snapshot
return {
- 'operation': snapshot.operation,
- 'type': snapshot.resource_type,
- 'id': snapshot.resource_name,
- 'user': users.serialize_micro_user(snapshot.user, auth_user),
- 'data': snapshot.data,
- 'time': snapshot.creation_time,
+ "operation": snapshot.operation,
+ "type": snapshot.resource_type,
+ "id": snapshot.resource_name,
+ "user": users.serialize_micro_user(snapshot.user, auth_user),
+ "data": snapshot.data,
+ "time": snapshot.creation_time,
}
def _create(
- operation: str,
- entity: model.Base,
- auth_user: Optional[model.User]) -> model.Snapshot:
- resource_type, resource_pkey, resource_name = (
- model.util.get_resource_info(entity))
+ operation: str, entity: model.Base, auth_user: Optional[model.User]
+) -> model.Snapshot:
+ resource_type, resource_pkey, resource_name = model.util.get_resource_info(
+ entity
+ )
snapshot = model.Snapshot()
snapshot.creation_time = datetime.utcnow()
@@ -114,10 +125,11 @@ def modify(entity: model.Base, auth_user: Optional[model.User]) -> None:
(
cls
for cls in model.Base._decl_class_registry.values()
- if hasattr(cls, '__table__')
+ if hasattr(cls, "__table__")
and cls.__table__.fullname == entity.__table__.fullname
),
- None)
+ None,
+ )
assert table
snapshot = _create(model.Snapshot.OPERATION_MODIFIED, entity, auth_user)
@@ -125,7 +137,7 @@ def modify(entity: model.Base, auth_user: Optional[model.User]) -> None:
detached_session = sa.orm.sessionmaker(bind=db.session.get_bind())()
detached_entity = detached_session.query(table).get(snapshot.resource_pkey)
- assert detached_entity, 'Entity not found in DB, have you committed it?'
+ assert detached_entity, "Entity not found in DB, have you committed it?"
detached_snapshot = snapshot_factory(detached_entity)
detached_session.close()
@@ -146,14 +158,19 @@ def delete(entity: model.Base, auth_user: Optional[model.User]) -> None:
def merge(
- source_entity: model.Base,
- target_entity: model.Base,
- auth_user: Optional[model.User]) -> None:
+ source_entity: model.Base,
+ target_entity: model.Base,
+ auth_user: Optional[model.User],
+) -> None:
assert source_entity
assert target_entity
snapshot = _create(
- model.Snapshot.OPERATION_MERGED, source_entity, auth_user)
- resource_type, _resource_pkey, resource_name = (
- model.util.get_resource_info(target_entity))
+ model.Snapshot.OPERATION_MERGED, source_entity, auth_user
+ )
+ (
+ resource_type,
+ _resource_pkey,
+ resource_name,
+ ) = model.util.get_resource_info(target_entity)
snapshot.data = [resource_type, resource_name]
db.session.add(snapshot)
diff --git a/server/szurubooru/func/tag_categories.py b/server/szurubooru/func/tag_categories.py
index ab0fc321..bbf72978 100644
--- a/server/szurubooru/func/tag_categories.py
+++ b/server/szurubooru/func/tag_categories.py
@@ -1,11 +1,12 @@
import re
-from typing import Any, Optional, Dict, List, Callable
+from typing import Any, Callable, Dict, List, Optional
+
import sqlalchemy as sa
-from szurubooru import config, db, model, errors, rest
-from szurubooru.func import util, serialization, cache
+from szurubooru import config, db, errors, model, rest
+from szurubooru.func import cache, serialization, util
-DEFAULT_CATEGORY_NAME_CACHE_KEY = 'default-tag-category'
+DEFAULT_CATEGORY_NAME_CACHE_KEY = "default-tag-category"
class TagCategoryNotFoundError(errors.NotFoundError):
@@ -29,10 +30,11 @@ class InvalidTagCategoryColorError(errors.ValidationError):
def _verify_name_validity(name: str) -> None:
- name_regex = config.config['tag_category_name_regex']
+ name_regex = config.config["tag_category_name_regex"]
if not re.match(name_regex, name):
raise InvalidTagCategoryNameError(
- 'Name must satisfy regex %r.' % name_regex)
+ "Name must satisfy regex %r." % name_regex
+ )
class TagCategorySerializer(serialization.BaseSerializer):
@@ -41,11 +43,11 @@ class TagCategorySerializer(serialization.BaseSerializer):
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'name': self.serialize_name,
- 'version': self.serialize_version,
- 'color': self.serialize_color,
- 'usages': self.serialize_usages,
- 'default': self.serialize_default,
+ "name": self.serialize_name,
+ "version": self.serialize_version,
+ "color": self.serialize_color,
+ "usages": self.serialize_usages,
+ "default": self.serialize_default,
}
def serialize_name(self) -> Any:
@@ -65,8 +67,8 @@ class TagCategorySerializer(serialization.BaseSerializer):
def serialize_category(
- category: Optional[model.TagCategory],
- options: List[str] = []) -> Optional[rest.Response]:
+ category: Optional[model.TagCategory], options: List[str] = []
+) -> Optional[rest.Response]:
if not category:
return None
return TagCategorySerializer(category).serialize(options)
@@ -84,18 +86,21 @@ def create_category(name: str, color: str) -> model.TagCategory:
def update_category_name(category: model.TagCategory, name: str) -> None:
assert category
if not name:
- raise InvalidTagCategoryNameError('Name cannot be empty.')
+ raise InvalidTagCategoryNameError("Name cannot be empty.")
expr = sa.func.lower(model.TagCategory.name) == name.lower()
if category.tag_category_id:
expr = expr & (
- model.TagCategory.tag_category_id != category.tag_category_id)
+ model.TagCategory.tag_category_id != category.tag_category_id
+ )
already_exists = (
- db.session.query(model.TagCategory).filter(expr).count() > 0)
+ db.session.query(model.TagCategory).filter(expr).count() > 0
+ )
if already_exists:
raise TagCategoryAlreadyExistsError(
- 'A category with this name already exists.')
+ "A category with this name already exists."
+ )
if util.value_exceeds_column_size(name, model.TagCategory.name):
- raise InvalidTagCategoryNameError('Name is too long.')
+ raise InvalidTagCategoryNameError("Name is too long.")
_verify_name_validity(name)
category.name = name
cache.remove(DEFAULT_CATEGORY_NAME_CACHE_KEY)
@@ -104,20 +109,20 @@ def update_category_name(category: model.TagCategory, name: str) -> None:
def update_category_color(category: model.TagCategory, color: str) -> None:
assert category
if not color:
- raise InvalidTagCategoryColorError('Color cannot be empty.')
- if not re.match(r'^#?[0-9a-z]+$', color):
- raise InvalidTagCategoryColorError('Invalid color.')
+ raise InvalidTagCategoryColorError("Color cannot be empty.")
+ if not re.match(r"^#?[0-9a-z]+$", color):
+ raise InvalidTagCategoryColorError("Invalid color.")
if util.value_exceeds_column_size(color, model.TagCategory.color):
- raise InvalidTagCategoryColorError('Color is too long.')
+ raise InvalidTagCategoryColorError("Color is too long.")
category.color = color
def try_get_category_by_name(
- name: str, lock: bool = False) -> Optional[model.TagCategory]:
- query = (
- db.session
- .query(model.TagCategory)
- .filter(sa.func.lower(model.TagCategory.name) == name.lower()))
+ name: str, lock: bool = False
+) -> Optional[model.TagCategory]:
+ query = db.session.query(model.TagCategory).filter(
+ sa.func.lower(model.TagCategory.name) == name.lower()
+ )
if lock:
query = query.with_for_update()
return query.one_or_none()
@@ -126,7 +131,7 @@ def try_get_category_by_name(
def get_category_by_name(name: str, lock: bool = False) -> model.TagCategory:
category = try_get_category_by_name(name, lock)
if not category:
- raise TagCategoryNotFoundError('Tag category %r not found.' % name)
+ raise TagCategoryNotFoundError("Tag category %r not found." % name)
return category
@@ -135,26 +140,28 @@ def get_all_category_names() -> List[str]:
def get_all_categories() -> List[model.TagCategory]:
- return db.session.query(model.TagCategory).order_by(
- model.TagCategory.name.asc()).all()
+ return (
+ db.session.query(model.TagCategory)
+ .order_by(model.TagCategory.name.asc())
+ .all()
+ )
def try_get_default_category(
- lock: bool = False) -> Optional[model.TagCategory]:
- query = (
- db.session
- .query(model.TagCategory)
- .filter(model.TagCategory.default))
+ lock: bool = False,
+) -> Optional[model.TagCategory]:
+ query = db.session.query(model.TagCategory).filter(
+ model.TagCategory.default
+ )
if lock:
query = query.with_for_update()
category = query.first()
# if for some reason (e.g. as a result of migration) there's no default
# category, get the first record available.
if not category:
- query = (
- db.session
- .query(model.TagCategory)
- .order_by(model.TagCategory.tag_category_id.asc()))
+ query = db.session.query(model.TagCategory).order_by(
+ model.TagCategory.tag_category_id.asc()
+ )
if lock:
query = query.with_for_update()
category = query.first()
@@ -164,7 +171,7 @@ def try_get_default_category(
def get_default_category(lock: bool = False) -> model.TagCategory:
category = try_get_default_category(lock)
if not category:
- raise TagCategoryNotFoundError('No tag category created yet.')
+ raise TagCategoryNotFoundError("No tag category created yet.")
return category
@@ -191,9 +198,10 @@ def set_default_category(category: model.TagCategory) -> None:
def delete_category(category: model.TagCategory) -> None:
assert category
if len(get_all_category_names()) == 1:
- raise TagCategoryIsInUseError('Cannot delete the last category.')
+ raise TagCategoryIsInUseError("Cannot delete the last category.")
if (category.tag_count or 0) > 0:
raise TagCategoryIsInUseError(
- 'Tag category has some usages and cannot be deleted. ' +
- 'Please remove this category from relevant tags first..')
+ "Tag category has some usages and cannot be deleted. "
+ + "Please remove this category from relevant tags first.."
+ )
db.session.delete(category)
diff --git a/server/szurubooru/func/tags.py b/server/szurubooru/func/tags.py
index 60eda50a..e15a78cc 100644
--- a/server/szurubooru/func/tags.py
+++ b/server/szurubooru/func/tags.py
@@ -1,9 +1,11 @@
import re
-from typing import Any, Optional, Tuple, List, Dict, Callable
from datetime import datetime
+from typing import Any, Callable, Dict, List, Optional, Tuple
+
import sqlalchemy as sa
-from szurubooru import config, db, model, errors, rest
-from szurubooru.func import util, tag_categories, serialization
+
+from szurubooru import config, db, errors, model, rest
+from szurubooru.func import serialization, tag_categories, util
class TagNotFoundError(errors.NotFoundError):
@@ -36,10 +38,10 @@ class InvalidTagDescriptionError(errors.ValidationError):
def _verify_name_validity(name: str) -> None:
if util.value_exceeds_column_size(name, model.TagName.name):
- raise InvalidTagNameError('Name is too long.')
- name_regex = config.config['tag_name_regex']
+ raise InvalidTagNameError("Name is too long.")
+ name_regex = config.config["tag_name_regex"]
if not re.match(name_regex, name):
- raise InvalidTagNameError('Name must satisfy regex %r.' % name_regex)
+ raise InvalidTagNameError("Name must satisfy regex %r." % name_regex)
def _get_names(tag: model.Tag) -> List[str]:
@@ -52,7 +54,8 @@ def _lower_list(names: List[str]) -> List[str]:
def _check_name_intersection(
- names1: List[str], names2: List[str], case_sensitive: bool) -> bool:
+ names1: List[str], names2: List[str], case_sensitive: bool
+) -> bool:
if not case_sensitive:
names1 = _lower_list(names1)
names2 = _lower_list(names2)
@@ -66,15 +69,16 @@ def sort_tags(tags: List[model.Tag]) -> List[model.Tag]:
key=lambda tag: (
default_category_name == tag.category.name,
tag.category.name,
- tag.names[0].name)
+ tag.names[0].name,
+ ),
)
def serialize_relation(tag):
return {
- 'names': [tag_name.name for tag_name in tag.names],
- 'category': tag.category.name,
- 'usages': tag.post_count,
+ "names": [tag_name.name for tag_name in tag.names],
+ "category": tag.category.name,
+ "usages": tag.post_count,
}
@@ -84,15 +88,15 @@ class TagSerializer(serialization.BaseSerializer):
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'names': self.serialize_names,
- 'category': self.serialize_category,
- 'version': self.serialize_version,
- 'description': self.serialize_description,
- 'creationTime': self.serialize_creation_time,
- 'lastEditTime': self.serialize_last_edit_time,
- 'usages': self.serialize_usages,
- 'suggestions': self.serialize_suggestions,
- 'implications': self.serialize_implications,
+ "names": self.serialize_names,
+ "category": self.serialize_category,
+ "version": self.serialize_version,
+ "description": self.serialize_description,
+ "creationTime": self.serialize_creation_time,
+ "lastEditTime": self.serialize_last_edit_time,
+ "usages": self.serialize_usages,
+ "suggestions": self.serialize_suggestions,
+ "implications": self.serialize_implications,
}
def serialize_names(self) -> Any:
@@ -119,16 +123,19 @@ class TagSerializer(serialization.BaseSerializer):
def serialize_suggestions(self) -> Any:
return [
serialize_relation(relation)
- for relation in sort_tags(self.tag.suggestions)]
+ for relation in sort_tags(self.tag.suggestions)
+ ]
def serialize_implications(self) -> Any:
return [
serialize_relation(relation)
- for relation in sort_tags(self.tag.implications)]
+ for relation in sort_tags(self.tag.implications)
+ ]
def serialize_tag(
- tag: model.Tag, options: List[str] = []) -> Optional[rest.Response]:
+ tag: model.Tag, options: List[str] = []
+) -> Optional[rest.Response]:
if not tag:
return None
return TagSerializer(tag).serialize(options)
@@ -136,17 +143,17 @@ def serialize_tag(
def try_get_tag_by_name(name: str) -> Optional[model.Tag]:
return (
- db.session
- .query(model.Tag)
+ db.session.query(model.Tag)
.join(model.TagName)
.filter(sa.func.lower(model.TagName.name) == name.lower())
- .one_or_none())
+ .one_or_none()
+ )
def get_tag_by_name(name: str) -> model.Tag:
tag = try_get_tag_by_name(name)
if not tag:
- raise TagNotFoundError('Tag %r not found.' % name)
+ raise TagNotFoundError("Tag %r not found." % name)
return tag
@@ -160,12 +167,16 @@ def get_tags_by_names(names: List[str]) -> List[model.Tag]:
.filter(
sa.sql.or_(
sa.func.lower(model.TagName.name) == name.lower()
- for name in names))
- .all())
+ for name in names
+ )
+ )
+ .all()
+ )
def get_or_create_tags_by_names(
- names: List[str]) -> Tuple[List[model.Tag], List[model.Tag]]:
+ names: List[str],
+) -> Tuple[List[model.Tag], List[model.Tag]]:
names = util.icase_unique(names)
existing_tags = get_tags_by_names(names)
new_tags = []
@@ -174,7 +185,8 @@ def get_or_create_tags_by_names(
found = False
for existing_tag in existing_tags:
if _check_name_intersection(
- _get_names(existing_tag), [name], False):
+ _get_names(existing_tag), [name], False
+ ):
found = True
break
if not found:
@@ -182,7 +194,8 @@ def get_or_create_tags_by_names(
names=[name],
category_name=tag_category_name,
suggestions=[],
- implications=[])
+ implications=[],
+ )
db.session.add(new_tag)
new_tags.append(new_tag)
return existing_tags, new_tags
@@ -194,8 +207,7 @@ def get_tag_siblings(tag: model.Tag) -> List[model.Tag]:
pt_alias1 = sa.orm.aliased(model.PostTag)
pt_alias2 = sa.orm.aliased(model.PostTag)
result = (
- db.session
- .query(tag_alias, sa.func.count(pt_alias2.post_id))
+ db.session.query(tag_alias, sa.func.count(pt_alias2.post_id))
.join(pt_alias1, pt_alias1.tag_id == tag_alias.tag_id)
.join(pt_alias2, pt_alias2.post_id == pt_alias1.post_id)
.filter(pt_alias2.tag_id == tag.tag_id)
@@ -203,18 +215,23 @@ def get_tag_siblings(tag: model.Tag) -> List[model.Tag]:
.group_by(tag_alias.tag_id)
.order_by(sa.func.count(pt_alias2.post_id).desc())
.order_by(tag_alias.first_name)
- .limit(50))
+ .limit(50)
+ )
return result
def delete(source_tag: model.Tag) -> None:
assert source_tag
db.session.execute(
- sa.sql.expression.delete(model.TagSuggestion)
- .where(model.TagSuggestion.child_id == source_tag.tag_id))
+ sa.sql.expression.delete(model.TagSuggestion).where(
+ model.TagSuggestion.child_id == source_tag.tag_id
+ )
+ )
db.session.execute(
- sa.sql.expression.delete(model.TagImplication)
- .where(model.TagImplication.child_id == source_tag.tag_id))
+ sa.sql.expression.delete(model.TagImplication).where(
+ model.TagImplication.child_id == source_tag.tag_id
+ )
+ )
db.session.delete(source_tag)
@@ -222,25 +239,25 @@ def merge_tags(source_tag: model.Tag, target_tag: model.Tag) -> None:
assert source_tag
assert target_tag
if source_tag.tag_id == target_tag.tag_id:
- raise InvalidTagRelationError('Cannot merge tag with itself.')
+ raise InvalidTagRelationError("Cannot merge tag with itself.")
def merge_posts(source_tag_id: int, target_tag_id: int) -> None:
alias1 = model.PostTag
alias2 = sa.orm.util.aliased(model.PostTag)
- update_stmt = (
- sa.sql.expression.update(alias1)
- .where(alias1.tag_id == source_tag_id))
- update_stmt = (
- update_stmt
- .where(
- ~sa.exists()
- .where(alias1.post_id == alias2.post_id)
- .where(alias2.tag_id == target_tag_id)))
+ update_stmt = sa.sql.expression.update(alias1).where(
+ alias1.tag_id == source_tag_id
+ )
+ update_stmt = update_stmt.where(
+ ~sa.exists()
+ .where(alias1.post_id == alias2.post_id)
+ .where(alias2.tag_id == target_tag_id)
+ )
update_stmt = update_stmt.values(tag_id=target_tag_id)
db.session.execute(update_stmt)
def merge_relations(
- table: model.Base, source_tag_id: int, target_tag_id: int) -> None:
+ table: model.Base, source_tag_id: int, target_tag_id: int
+ ) -> None:
alias1 = table
alias2 = sa.orm.util.aliased(table)
update_stmt = (
@@ -250,8 +267,10 @@ def merge_tags(source_tag: model.Tag, target_tag: model.Tag) -> None:
.where(
~sa.exists()
.where(alias2.child_id == alias1.child_id)
- .where(alias2.parent_id == target_tag_id))
- .values(parent_id=target_tag_id))
+ .where(alias2.parent_id == target_tag_id)
+ )
+ .values(parent_id=target_tag_id)
+ )
db.session.execute(update_stmt)
update_stmt = (
@@ -261,8 +280,10 @@ def merge_tags(source_tag: model.Tag, target_tag: model.Tag) -> None:
.where(
~sa.exists()
.where(alias2.parent_id == alias1.parent_id)
- .where(alias2.child_id == target_tag_id))
- .values(child_id=target_tag_id))
+ .where(alias2.child_id == target_tag_id)
+ )
+ .values(child_id=target_tag_id)
+ )
db.session.execute(update_stmt)
def merge_suggestions(source_tag_id: int, target_tag_id: int) -> None:
@@ -278,10 +299,11 @@ def merge_tags(source_tag: model.Tag, target_tag: model.Tag) -> None:
def create_tag(
- names: List[str],
- category_name: str,
- suggestions: List[str],
- implications: List[str]) -> model.Tag:
+ names: List[str],
+ category_name: str,
+ suggestions: List[str],
+ implications: List[str],
+) -> model.Tag:
tag = model.Tag()
tag.creation_time = datetime.utcnow()
update_tag_names(tag, names)
@@ -301,7 +323,7 @@ def update_tag_names(tag: model.Tag, names: List[str]) -> None:
assert tag
names = util.icase_unique([name for name in names if name])
if not len(names):
- raise InvalidTagNameError('At least one name must be specified.')
+ raise InvalidTagNameError("At least one name must be specified.")
for name in names:
_verify_name_validity(name)
@@ -314,7 +336,8 @@ def update_tag_names(tag: model.Tag, names: List[str]) -> None:
existing_tags = db.session.query(model.TagName).filter(expr).all()
if len(existing_tags):
raise TagAlreadyExistsError(
- 'One of names is already used by another tag.')
+ "One of names is already used by another tag."
+ )
# remove unwanted items
for tag_name in tag.names[:]:
@@ -336,7 +359,7 @@ def update_tag_names(tag: model.Tag, names: List[str]) -> None:
def update_tag_implications(tag: model.Tag, relations: List[str]) -> None:
assert tag
if _check_name_intersection(_get_names(tag), relations, False):
- raise InvalidTagRelationError('Tag cannot imply itself.')
+ raise InvalidTagRelationError("Tag cannot imply itself.")
tag.implications = get_tags_by_names(relations)
@@ -344,12 +367,12 @@ def update_tag_implications(tag: model.Tag, relations: List[str]) -> None:
def update_tag_suggestions(tag: model.Tag, relations: List[str]) -> None:
assert tag
if _check_name_intersection(_get_names(tag), relations, False):
- raise InvalidTagRelationError('Tag cannot suggest itself.')
+ raise InvalidTagRelationError("Tag cannot suggest itself.")
tag.suggestions = get_tags_by_names(relations)
def update_tag_description(tag: model.Tag, description: str) -> None:
assert tag
if util.value_exceeds_column_size(description, model.Tag.description):
- raise InvalidTagDescriptionError('Description is too long.')
+ raise InvalidTagDescriptionError("Description is too long.")
tag.description = description or None
diff --git a/server/szurubooru/func/user_tokens.py b/server/szurubooru/func/user_tokens.py
index c0f4badb..8d977e49 100644
--- a/server/szurubooru/func/user_tokens.py
+++ b/server/szurubooru/func/user_tokens.py
@@ -1,8 +1,10 @@
from datetime import datetime
-from typing import Any, Optional, List, Dict, Callable
-from pyrfc3339 import parser as rfc3339_parser
+from typing import Any, Callable, Dict, List, Optional
+
import pytz
-from szurubooru import db, model, rest, errors
+from pyrfc3339 import parser as rfc3339_parser
+
+from szurubooru import db, errors, model, rest
from szurubooru.func import auth, serialization, users, util
@@ -16,23 +18,22 @@ class InvalidNoteError(errors.ValidationError):
class UserTokenSerializer(serialization.BaseSerializer):
def __init__(
- self,
- user_token: model.UserToken,
- auth_user: model.User) -> None:
+ self, user_token: model.UserToken, auth_user: model.User
+ ) -> None:
self.user_token = user_token
self.auth_user = auth_user
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'user': self.serialize_user,
- 'token': self.serialize_token,
- 'note': self.serialize_note,
- 'enabled': self.serialize_enabled,
- 'expirationTime': self.serialize_expiration_time,
- 'creationTime': self.serialize_creation_time,
- 'lastEditTime': self.serialize_last_edit_time,
- 'lastUsageTime': self.serialize_last_usage_time,
- 'version': self.serialize_version,
+ "user": self.serialize_user,
+ "token": self.serialize_token,
+ "note": self.serialize_note,
+ "enabled": self.serialize_enabled,
+ "expirationTime": self.serialize_expiration_time,
+ "creationTime": self.serialize_creation_time,
+ "lastEditTime": self.serialize_last_edit_time,
+ "lastUsageTime": self.serialize_last_usage_time,
+ "version": self.serialize_version,
}
def serialize_user(self) -> Any:
@@ -64,31 +65,31 @@ class UserTokenSerializer(serialization.BaseSerializer):
def serialize_user_token(
- user_token: Optional[model.UserToken],
- auth_user: model.User,
- options: List[str] = []) -> Optional[rest.Response]:
+ user_token: Optional[model.UserToken],
+ auth_user: model.User,
+ options: List[str] = [],
+) -> Optional[rest.Response]:
if not user_token:
return None
return UserTokenSerializer(user_token, auth_user).serialize(options)
-def get_by_user_and_token(
- user: model.User, token: str) -> model.UserToken:
+def get_by_user_and_token(user: model.User, token: str) -> model.UserToken:
return (
- db.session
- .query(model.UserToken)
+ db.session.query(model.UserToken)
.filter(model.UserToken.user_id == user.user_id)
.filter(model.UserToken.token == token)
- .one_or_none())
+ .one_or_none()
+ )
def get_user_tokens(user: model.User) -> List[model.UserToken]:
assert user
return (
- db.session
- .query(model.UserToken)
+ db.session.query(model.UserToken)
.filter(model.UserToken.user_id == user.user_id)
- .all())
+ .all()
+ )
def create_user_token(user: model.User, enabled: bool) -> model.UserToken:
@@ -103,7 +104,8 @@ def create_user_token(user: model.User, enabled: bool) -> model.UserToken:
def update_user_token_enabled(
- user_token: model.UserToken, enabled: bool) -> None:
+ user_token: model.UserToken, enabled: bool
+) -> None:
assert user_token
user_token.enabled = enabled
update_user_token_edit_time(user_token)
@@ -115,28 +117,30 @@ def update_user_token_edit_time(user_token: model.UserToken) -> None:
def update_user_token_expiration_time(
- user_token: model.UserToken, expiration_time_str: str) -> None:
+ user_token: model.UserToken, expiration_time_str: str
+) -> None:
assert user_token
try:
expiration_time = rfc3339_parser.parse(expiration_time_str, utc=True)
expiration_time = expiration_time.astimezone(pytz.UTC)
if expiration_time < datetime.utcnow().replace(tzinfo=pytz.UTC):
raise InvalidExpirationError(
- 'Expiration cannot happen in the past')
+ "Expiration cannot happen in the past"
+ )
user_token.expiration_time = expiration_time
update_user_token_edit_time(user_token)
except ValueError:
raise InvalidExpirationError(
- 'Expiration is in an invalid format {}'.format(
- expiration_time_str))
+ "Expiration is in an invalid format {}".format(expiration_time_str)
+ )
def update_user_token_note(user_token: model.UserToken, note: str) -> None:
assert user_token
- note = note.strip() if note is not None else ''
+ note = note.strip() if note is not None else ""
note = None if len(note) == 0 else note
if util.value_exceeds_column_size(note, model.UserToken.note):
- raise InvalidNoteError('Note is too long.')
+ raise InvalidNoteError("Note is too long.")
user_token.note = note
update_user_token_edit_time(user_token)
diff --git a/server/szurubooru/func/users.py b/server/szurubooru/func/users.py
index e5946dc9..5cbe3cc0 100644
--- a/server/szurubooru/func/users.py
+++ b/server/szurubooru/func/users.py
@@ -1,9 +1,11 @@
-from datetime import datetime
-from typing import Any, Optional, Union, List, Dict, Callable
import re
+from datetime import datetime
+from typing import Any, Callable, Dict, List, Optional, Union
+
import sqlalchemy as sa
-from szurubooru import config, db, model, errors, rest
-from szurubooru.func import auth, util, serialization, files, images
+
+from szurubooru import config, db, errors, model, rest
+from szurubooru.func import auth, files, images, serialization, util
class UserNotFoundError(errors.NotFoundError):
@@ -35,36 +37,41 @@ class InvalidAvatarError(errors.ValidationError):
def get_avatar_path(user_name: str) -> str:
- return 'avatars/' + user_name.lower() + '.png'
+ return "avatars/" + user_name.lower() + ".png"
def get_avatar_url(user: model.User) -> str:
assert user
if user.avatar_style == user.AVATAR_GRAVATAR:
assert user.email or user.name
- return 'https://gravatar.com/avatar/%s?d=retro&s=%d' % (
+ return "https://gravatar.com/avatar/%s?d=retro&s=%d" % (
util.get_md5((user.email or user.name).lower()),
- config.config['thumbnails']['avatar_width'])
+ config.config["thumbnails"]["avatar_width"],
+ )
assert user.name
- return '%s/avatars/%s.png' % (
- config.config['data_url'].rstrip('/'), user.name.lower())
+ return "%s/avatars/%s.png" % (
+ config.config["data_url"].rstrip("/"),
+ user.name.lower(),
+ )
def get_email(
- user: model.User,
- auth_user: model.User,
- force_show_email: bool) -> Union[bool, str]:
+ user: model.User, auth_user: model.User, force_show_email: bool
+) -> Union[bool, str]:
assert user
assert auth_user
- if not force_show_email \
- and auth_user.user_id != user.user_id \
- and not auth.has_privilege(auth_user, 'users:edit:any:email'):
+ if (
+ not force_show_email
+ and auth_user.user_id != user.user_id
+ and not auth.has_privilege(auth_user, "users:edit:any:email")
+ ):
return False
return user.email
def get_liked_post_count(
- user: model.User, auth_user: model.User) -> Union[bool, int]:
+ user: model.User, auth_user: model.User
+) -> Union[bool, int]:
assert user
assert auth_user
if auth_user.user_id != user.user_id:
@@ -73,7 +80,8 @@ def get_liked_post_count(
def get_disliked_post_count(
- user: model.User, auth_user: model.User) -> Union[bool, int]:
+ user: model.User, auth_user: model.User
+) -> Union[bool, int]:
assert user
assert auth_user
if auth_user.user_id != user.user_id:
@@ -83,29 +91,30 @@ def get_disliked_post_count(
class UserSerializer(serialization.BaseSerializer):
def __init__(
- self,
- user: model.User,
- auth_user: model.User,
- force_show_email: bool = False) -> None:
+ self,
+ user: model.User,
+ auth_user: model.User,
+ force_show_email: bool = False,
+ ) -> None:
self.user = user
self.auth_user = auth_user
self.force_show_email = force_show_email
def _serializers(self) -> Dict[str, Callable[[], Any]]:
return {
- 'name': self.serialize_name,
- 'creationTime': self.serialize_creation_time,
- 'lastLoginTime': self.serialize_last_login_time,
- 'version': self.serialize_version,
- 'rank': self.serialize_rank,
- 'avatarStyle': self.serialize_avatar_style,
- 'avatarUrl': self.serialize_avatar_url,
- 'commentCount': self.serialize_comment_count,
- 'uploadedPostCount': self.serialize_uploaded_post_count,
- 'favoritePostCount': self.serialize_favorite_post_count,
- 'likedPostCount': self.serialize_liked_post_count,
- 'dislikedPostCount': self.serialize_disliked_post_count,
- 'email': self.serialize_email,
+ "name": self.serialize_name,
+ "creationTime": self.serialize_creation_time,
+ "lastLoginTime": self.serialize_last_login_time,
+ "version": self.serialize_version,
+ "rank": self.serialize_rank,
+ "avatarStyle": self.serialize_avatar_style,
+ "avatarUrl": self.serialize_avatar_url,
+ "commentCount": self.serialize_comment_count,
+ "uploadedPostCount": self.serialize_uploaded_post_count,
+ "favoritePostCount": self.serialize_favorite_post_count,
+ "likedPostCount": self.serialize_liked_post_count,
+ "dislikedPostCount": self.serialize_disliked_post_count,
+ "email": self.serialize_email,
}
def serialize_name(self) -> Any:
@@ -149,20 +158,22 @@ class UserSerializer(serialization.BaseSerializer):
def serialize_user(
- user: Optional[model.User],
- auth_user: model.User,
- options: List[str] = [],
- force_show_email: bool = False) -> Optional[rest.Response]:
+ user: Optional[model.User],
+ auth_user: model.User,
+ options: List[str] = [],
+ force_show_email: bool = False,
+) -> Optional[rest.Response]:
if not user:
return None
return UserSerializer(user, auth_user, force_show_email).serialize(options)
def serialize_micro_user(
- user: Optional[model.User],
- auth_user: model.User) -> Optional[rest.Response]:
+ user: Optional[model.User], auth_user: model.User
+) -> Optional[rest.Response]:
return serialize_user(
- user, auth_user=auth_user, options=['name', 'avatarUrl'])
+ user, auth_user=auth_user, options=["name", "avatarUrl"]
+ )
def get_user_count() -> int:
@@ -171,33 +182,34 @@ def get_user_count() -> int:
def try_get_user_by_name(name: str) -> Optional[model.User]:
return (
- db.session
- .query(model.User)
+ db.session.query(model.User)
.filter(sa.func.lower(model.User.name) == sa.func.lower(name))
- .one_or_none())
+ .one_or_none()
+ )
def get_user_by_name(name: str) -> model.User:
user = try_get_user_by_name(name)
if not user:
- raise UserNotFoundError('User %r not found.' % name)
+ raise UserNotFoundError("User %r not found." % name)
return user
def try_get_user_by_name_or_email(name_or_email: str) -> Optional[model.User]:
return (
- db.session
- .query(model.User)
+ db.session.query(model.User)
.filter(
- (sa.func.lower(model.User.name) == sa.func.lower(name_or_email)) |
- (sa.func.lower(model.User.email) == sa.func.lower(name_or_email)))
- .one_or_none())
+ (sa.func.lower(model.User.name) == sa.func.lower(name_or_email))
+ | (sa.func.lower(model.User.email) == sa.func.lower(name_or_email))
+ )
+ .one_or_none()
+ )
def get_user_by_name_or_email(name_or_email: str) -> model.User:
user = try_get_user_by_name_or_email(name_or_email)
if not user:
- raise UserNotFoundError('User %r not found.' % name_or_email)
+ raise UserNotFoundError("User %r not found." % name_or_email)
return user
@@ -207,7 +219,7 @@ def create_user(name: str, password: str, email: str) -> model.User:
update_user_password(user, password)
update_user_email(user, email)
if get_user_count() > 0:
- user.rank = util.flip(auth.RANK_MAP)[config.config['default_rank']]
+ user.rank = util.flip(auth.RANK_MAP)[config.config["default_rank"]]
else:
user.rank = model.User.RANK_ADMINISTRATOR
user.creation_time = datetime.utcnow()
@@ -218,17 +230,18 @@ def create_user(name: str, password: str, email: str) -> model.User:
def update_user_name(user: model.User, name: str) -> None:
assert user
if not name:
- raise InvalidUserNameError('Name cannot be empty.')
+ raise InvalidUserNameError("Name cannot be empty.")
if util.value_exceeds_column_size(name, model.User.name):
- raise InvalidUserNameError('User name is too long.')
+ raise InvalidUserNameError("User name is too long.")
name = name.strip()
- name_regex = config.config['user_name_regex']
+ name_regex = config.config["user_name_regex"]
if not re.match(name_regex, name):
raise InvalidUserNameError(
- 'User name %r must satisfy regex %r.' % (name, name_regex))
+ "User name %r must satisfy regex %r." % (name, name_regex)
+ )
other_user = try_get_user_by_name(name)
if other_user and other_user.user_id != user.user_id:
- raise UserAlreadyExistsError('User %r already exists.' % name)
+ raise UserAlreadyExistsError("User %r already exists." % name)
if user.name and files.has(get_avatar_path(user.name)):
files.move(get_avatar_path(user.name), get_avatar_path(name))
user.name = name
@@ -237,14 +250,16 @@ def update_user_name(user: model.User, name: str) -> None:
def update_user_password(user: model.User, password: str) -> None:
assert user
if not password:
- raise InvalidPasswordError('Password cannot be empty.')
- password_regex = config.config['password_regex']
+ raise InvalidPasswordError("Password cannot be empty.")
+ password_regex = config.config["password_regex"]
if not re.match(password_regex, password):
raise InvalidPasswordError(
- 'Password must satisfy regex %r.' % password_regex)
+ "Password must satisfy regex %r." % password_regex
+ )
user.password_salt = auth.create_password()
password_hash, revision = auth.get_password_hash(
- user.password_salt, password)
+ user.password_salt, password
+ )
user.password_hash = password_hash
user.password_revision = revision
@@ -253,53 +268,56 @@ def update_user_email(user: model.User, email: str) -> None:
assert user
email = email.strip()
if util.value_exceeds_column_size(email, model.User.email):
- raise InvalidEmailError('Email is too long.')
+ raise InvalidEmailError("Email is too long.")
if not util.is_valid_email(email):
- raise InvalidEmailError('E-mail is invalid.')
+ raise InvalidEmailError("E-mail is invalid.")
user.email = email or None
def update_user_rank(
- user: model.User, rank: str, auth_user: model.User) -> None:
+ user: model.User, rank: str, auth_user: model.User
+) -> None:
assert user
if not rank:
- raise InvalidRankError('Rank cannot be empty.')
+ raise InvalidRankError("Rank cannot be empty.")
rank = util.flip(auth.RANK_MAP).get(rank.strip(), None)
all_ranks = list(auth.RANK_MAP.values())
if not rank:
- raise InvalidRankError(
- 'Rank can be either of %r.' % all_ranks)
+ raise InvalidRankError("Rank can be either of %r." % all_ranks)
if rank in (model.User.RANK_ANONYMOUS, model.User.RANK_NOBODY):
- raise InvalidRankError('Rank %r cannot be used.' % auth.RANK_MAP[rank])
- if all_ranks.index(auth_user.rank) \
- < all_ranks.index(rank) and get_user_count() > 0:
- raise errors.AuthError('Trying to set higher rank than your own.')
+ raise InvalidRankError("Rank %r cannot be used." % auth.RANK_MAP[rank])
+ if (
+ all_ranks.index(auth_user.rank) < all_ranks.index(rank)
+ and get_user_count() > 0
+ ):
+ raise errors.AuthError("Trying to set higher rank than your own.")
user.rank = rank
def update_user_avatar(
- user: model.User,
- avatar_style: str,
- avatar_content: Optional[bytes] = None) -> None:
+ user: model.User, avatar_style: str, avatar_content: Optional[bytes] = None
+) -> None:
assert user
- if avatar_style == 'gravatar':
+ if avatar_style == "gravatar":
user.avatar_style = user.AVATAR_GRAVATAR
- elif avatar_style == 'manual':
+ elif avatar_style == "manual":
user.avatar_style = user.AVATAR_MANUAL
- avatar_path = 'avatars/' + user.name.lower() + '.png'
+ avatar_path = "avatars/" + user.name.lower() + ".png"
if not avatar_content:
if files.has(avatar_path):
return
- raise InvalidAvatarError('Avatar content missing.')
+ raise InvalidAvatarError("Avatar content missing.")
image = images.Image(avatar_content)
image.resize_fill(
- int(config.config['thumbnails']['avatar_width']),
- int(config.config['thumbnails']['avatar_height']))
+ int(config.config["thumbnails"]["avatar_width"]),
+ int(config.config["thumbnails"]["avatar_height"]),
+ )
files.save(avatar_path, image.to_png())
else:
raise InvalidAvatarError(
- 'Avatar style %r is invalid. Valid avatar styles: %r.' % (
- avatar_style, ['gravatar', 'manual']))
+ "Avatar style %r is invalid. Valid avatar styles: %r."
+ % (avatar_style, ["gravatar", "manual"])
+ )
def bump_user_login_time(user: model.User) -> None:
@@ -312,7 +330,8 @@ def reset_user_password(user: model.User) -> str:
password = auth.create_password()
user.password_salt = auth.create_password()
password_hash, revision = auth.get_password_hash(
- user.password_salt, password)
+ user.password_salt, password
+ )
user.password_hash = password_hash
user.password_revision = revision
return password
diff --git a/server/szurubooru/func/util.py b/server/szurubooru/func/util.py
index 1bf34fa6..f8391365 100644
--- a/server/szurubooru/func/util.py
+++ b/server/szurubooru/func/util.py
@@ -1,29 +1,32 @@
-import os
import hashlib
+import os
import re
import tempfile
-from typing import Any, Optional, Union, Tuple, List, Dict, Generator, TypeVar
-from datetime import datetime, timedelta
from contextlib import contextmanager
+from datetime import datetime, timedelta
+from typing import Any, Dict, Generator, List, Optional, Tuple, TypeVar, Union
+
from szurubooru import errors
-
-T = TypeVar('T')
+T = TypeVar("T")
def snake_case_to_lower_camel_case(text: str) -> str:
- components = text.split('_')
- return components[0].lower() + \
- ''.join(word[0].upper() + word[1:].lower() for word in components[1:])
+ components = text.split("_")
+ return components[0].lower() + "".join(
+ word[0].upper() + word[1:].lower() for word in components[1:]
+ )
def snake_case_to_upper_train_case(text: str) -> str:
- return '-'.join(
- word[0].upper() + word[1:].lower() for word in text.split('_'))
+ return "-".join(
+ word[0].upper() + word[1:].lower() for word in text.split("_")
+ )
def snake_case_to_lower_camel_case_keys(
- source: Dict[str, Any]) -> Dict[str, Any]:
+ source: Dict[str, Any]
+) -> Dict[str, Any]:
target = {}
for key, value in source.items():
target[snake_case_to_lower_camel_case(key)] = value
@@ -35,7 +38,7 @@ def create_temp_file(**kwargs: Any) -> Generator:
(descriptor, path) = tempfile.mkstemp(**kwargs)
os.close(descriptor)
try:
- with open(path, 'r+b') as handle:
+ with open(path, "r+b") as handle:
yield handle
finally:
os.remove(path)
@@ -61,7 +64,7 @@ def unalias_dict(source: List[Tuple[List[str], T]]) -> Dict[str, T]:
def get_md5(source: Union[str, bytes]) -> str:
if not isinstance(source, bytes):
- source = source.encode('utf-8')
+ source = source.encode("utf-8")
md5 = hashlib.md5()
md5.update(source)
return md5.hexdigest()
@@ -69,7 +72,7 @@ def get_md5(source: Union[str, bytes]) -> str:
def get_sha1(source: Union[str, bytes]) -> str:
if not isinstance(source, bytes):
- source = source.encode('utf-8')
+ source = source.encode("utf-8")
sha1 = hashlib.sha1()
sha1.update(source)
return sha1.hexdigest()
@@ -80,12 +83,13 @@ def flip(source: Dict[Any, Any]) -> Dict[Any, Any]:
def is_valid_email(email: Optional[str]) -> bool:
- ''' Return whether given email address is valid or empty. '''
- return not email or re.match(r'^[^@]*@[^@]*\.[^@]*$', email) is not None
+ """ Return whether given email address is valid or empty. """
+ return not email or re.match(r"^[^@]*@[^@]*\.[^@]*$", email) is not None
class dotdict(dict):
- ''' dot.notation access to dictionary attributes. '''
+ """ dot.notation access to dictionary attributes. """
+
def __getattr__(self, attr: str) -> Any:
return self.get(attr)
@@ -94,51 +98,54 @@ class dotdict(dict):
def parse_time_range(value: str) -> Tuple[datetime, datetime]:
- ''' Return tuple containing min/max time for given text representation. '''
+ """ Return tuple containing min/max time for given text representation. """
one_day = timedelta(days=1)
one_second = timedelta(seconds=1)
almost_one_day = one_day - one_second
value = value.lower()
if not value:
- raise errors.ValidationError('Empty date format.')
+ raise errors.ValidationError("Empty date format.")
- if value == 'today':
+ if value == "today":
now = datetime.utcnow()
return (
datetime(now.year, now.month, now.day, 0, 0, 0),
- datetime(now.year, now.month, now.day, 0, 0, 0) + almost_one_day
+ datetime(now.year, now.month, now.day, 0, 0, 0) + almost_one_day,
)
- if value == 'yesterday':
+ if value == "yesterday":
now = datetime.utcnow()
return (
datetime(now.year, now.month, now.day, 0, 0, 0) - one_day,
- datetime(now.year, now.month, now.day, 0, 0, 0) - one_second)
+ datetime(now.year, now.month, now.day, 0, 0, 0) - one_second,
+ )
- match = re.match(r'^(\d{4})$', value)
+ match = re.match(r"^(\d{4})$", value)
if match:
year = int(match.group(1))
return (datetime(year, 1, 1), datetime(year + 1, 1, 1) - one_second)
- match = re.match(r'^(\d{4})-(\d{1,2})$', value)
+ match = re.match(r"^(\d{4})-(\d{1,2})$", value)
if match:
year = int(match.group(1))
month = int(match.group(2))
return (
datetime(year, month, 1),
- datetime(year, month + 1, 1) - one_second)
+ datetime(year, month + 1, 1) - one_second,
+ )
- match = re.match(r'^(\d{4})-(\d{1,2})-(\d{1,2})$', value)
+ match = re.match(r"^(\d{4})-(\d{1,2})-(\d{1,2})$", value)
if match:
year = int(match.group(1))
month = int(match.group(2))
day = int(match.group(3))
return (
datetime(year, month, day),
- datetime(year, month, day + 1) - one_second)
+ datetime(year, month, day + 1) - one_second,
+ )
- raise errors.ValidationError('Invalid date format: %r.' % value)
+ raise errors.ValidationError("Invalid date format: %r." % value)
def icase_unique(source: List[str]) -> List[str]:
@@ -168,4 +175,4 @@ def get_column_size(column: Any) -> Optional[int]:
def chunks(source_list: List[Any], part_size: int) -> Generator:
for i in range(0, len(source_list), part_size):
- yield source_list[i:i + part_size]
+ yield source_list[i : i + part_size]
diff --git a/server/szurubooru/func/versions.py b/server/szurubooru/func/versions.py
index 6e5a3670..790b05bf 100644
--- a/server/szurubooru/func/versions.py
+++ b/server/szurubooru/func/versions.py
@@ -1,16 +1,16 @@
-from szurubooru import errors, rest, model
+from szurubooru import errors, model, rest
def verify_version(
- entity: model.Base,
- context: rest.Context,
- field_name: str = 'version') -> None:
+ entity: model.Base, context: rest.Context, field_name: str = "version"
+) -> None:
actual_version = context.get_param_as_int(field_name)
expected_version = entity.version
if actual_version != expected_version:
raise errors.IntegrityError(
- 'Someone else modified this in the meantime. ' +
- 'Please try again.')
+ "Someone else modified this in the meantime. "
+ + "Please try again."
+ )
def bump_version(entity: model.Base) -> None:
diff --git a/server/szurubooru/middleware/__init__.py b/server/szurubooru/middleware/__init__.py
index c5a90d8a..71775644 100644
--- a/server/szurubooru/middleware/__init__.py
+++ b/server/szurubooru/middleware/__init__.py
@@ -1,4 +1,4 @@
-''' Various hooks that get executed for each request. '''
+""" Various hooks that get executed for each request. """
import szurubooru.middleware.authenticator
import szurubooru.middleware.cache_purger
diff --git a/server/szurubooru/middleware/authenticator.py b/server/szurubooru/middleware/authenticator.py
index 4340ec94..e73b235e 100644
--- a/server/szurubooru/middleware/authenticator.py
+++ b/server/szurubooru/middleware/authenticator.py
@@ -1,55 +1,66 @@
import base64
from typing import Optional, Tuple
-from szurubooru import model, errors, rest
-from szurubooru.func import auth, users, user_tokens
+
+from szurubooru import errors, model, rest
+from szurubooru.func import auth, user_tokens, users
from szurubooru.rest.errors import HttpBadRequest
def _authenticate_basic_auth(username: str, password: str) -> model.User:
- ''' Try to authenticate user. Throw AuthError for invalid users. '''
+ """ Try to authenticate user. Throw AuthError for invalid users. """
user = users.get_user_by_name(username)
if not auth.is_valid_password(user, password):
- raise errors.AuthError('Invalid password.')
+ raise errors.AuthError("Invalid password.")
return user
def _authenticate_token(
- username: str, token: str) -> Tuple[model.User, model.UserToken]:
- ''' Try to authenticate user. Throw AuthError for invalid users. '''
+ username: str, token: str
+) -> Tuple[model.User, model.UserToken]:
+ """ Try to authenticate user. Throw AuthError for invalid users. """
user = users.get_user_by_name(username)
user_token = user_tokens.get_by_user_and_token(user, token)
if not auth.is_valid_token(user_token):
- raise errors.AuthError('Invalid token.')
+ raise errors.AuthError("Invalid token.")
return user, user_token
def _get_user(ctx: rest.Context, bump_login: bool) -> Optional[model.User]:
- if not ctx.has_header('Authorization'):
+ if not ctx.has_header("Authorization"):
return None
auth_token = None
try:
- auth_type, credentials = ctx.get_header('Authorization').split(' ', 1)
- if auth_type.lower() == 'basic':
- username, password = base64.decodebytes(
- credentials.encode('ascii')).decode('utf8').split(':', 1)
+ auth_type, credentials = ctx.get_header("Authorization").split(" ", 1)
+ if auth_type.lower() == "basic":
+ username, password = (
+ base64.decodebytes(credentials.encode("ascii"))
+ .decode("utf8")
+ .split(":", 1)
+ )
auth_user = _authenticate_basic_auth(username, password)
- elif auth_type.lower() == 'token':
- username, token = base64.decodebytes(
- credentials.encode('ascii')).decode('utf8').split(':', 1)
+ elif auth_type.lower() == "token":
+ username, token = (
+ base64.decodebytes(credentials.encode("ascii"))
+ .decode("utf8")
+ .split(":", 1)
+ )
auth_user, auth_token = _authenticate_token(username, token)
else:
raise HttpBadRequest(
- 'ValidationError',
- 'Only basic or token HTTP authentication is supported.')
+ "ValidationError",
+ "Only basic or token HTTP authentication is supported.",
+ )
except ValueError as err:
msg = (
- 'Authorization header values are not properly formed. '
- 'Supplied header {0}. Got error: {1}')
+ "Authorization header values are not properly formed. "
+ "Supplied header {0}. Got error: {1}"
+ )
raise HttpBadRequest(
- 'ValidationError',
- msg.format(ctx.get_header('Authorization'), str(err)))
+ "ValidationError",
+ msg.format(ctx.get_header("Authorization"), str(err)),
+ )
if bump_login and auth_user.user_id:
users.bump_user_login_time(auth_user)
@@ -61,8 +72,8 @@ def _get_user(ctx: rest.Context, bump_login: bool) -> Optional[model.User]:
def process_request(ctx: rest.Context) -> None:
- ''' Bind the user to request. Update last login time if needed. '''
- bump_login = ctx.get_param_as_bool('bump-login', default=False)
+ """ Bind the user to request. Update last login time if needed. """
+ bump_login = ctx.get_param_as_bool("bump-login", default=False)
auth_user = _get_user(ctx, bump_login)
if auth_user:
ctx.user = auth_user
diff --git a/server/szurubooru/middleware/cache_purger.py b/server/szurubooru/middleware/cache_purger.py
index d83fb845..e3d05a73 100644
--- a/server/szurubooru/middleware/cache_purger.py
+++ b/server/szurubooru/middleware/cache_purger.py
@@ -5,5 +5,5 @@ from szurubooru.rest import middleware
@middleware.pre_hook
def process_request(ctx: rest.Context) -> None:
- if ctx.method != 'GET':
+ if ctx.method != "GET":
cache.purge()
diff --git a/server/szurubooru/middleware/request_logger.py b/server/szurubooru/middleware/request_logger.py
index 54e40e4a..79fffbdd 100644
--- a/server/szurubooru/middleware/request_logger.py
+++ b/server/szurubooru/middleware/request_logger.py
@@ -1,8 +1,8 @@
import logging
+
from szurubooru import db, rest
from szurubooru.rest import middleware
-
logger = logging.getLogger(__name__)
@@ -14,8 +14,9 @@ def process_request(_ctx: rest.Context) -> None:
@middleware.post_hook
def process_response(ctx: rest.Context) -> None:
logger.info(
- '%s %s (user=%s, queries=%d)',
+ "%s %s (user=%s, queries=%d)",
ctx.method,
ctx.url,
ctx.user.name,
- db.get_query_count())
+ db.get_query_count(),
+ )
diff --git a/server/szurubooru/migrations/env.py b/server/szurubooru/migrations/env.py
index f0a06dd7..cd4f6ad1 100644
--- a/server/szurubooru/migrations/env.py
+++ b/server/szurubooru/migrations/env.py
@@ -1,29 +1,40 @@
+"""
+Alembic setup and configuration script
+
+isort:skip_file
+"""
+
+
+import logging.config
import os
import sys
+from time import sleep
import alembic
import sqlalchemy as sa
-import logging.config
-from time import sleep
+
+# fmt: off
# make szurubooru module importable
dir_to_self = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(dir_to_self, *[os.pardir] * 2))
-import szurubooru.model.base # noqa: E402
import szurubooru.config # noqa: E402
+import szurubooru.model.base # noqa: E402
+# fmt: on
+
alembic_config = alembic.context.config
logging.config.fileConfig(alembic_config.config_file_name)
szuru_config = szurubooru.config.config
-alembic_config.set_main_option('sqlalchemy.url', szuru_config['database'])
+alembic_config.set_main_option("sqlalchemy.url", szuru_config["database"])
target_metadata = szurubooru.model.Base.metadata
def run_migrations_offline():
- '''
+ """
Run migrations in 'offline' mode.
This configures the context with just a URL
@@ -33,29 +44,31 @@ def run_migrations_offline():
Calls to context.execute() here emit the given string to the
script output.
- '''
- url = alembic_config.get_main_option('sqlalchemy.url')
+ """
+ url = alembic_config.get_main_option("sqlalchemy.url")
alembic.context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
- compare_type=True)
+ compare_type=True,
+ )
with alembic.context.begin_transaction():
alembic.context.run_migrations()
def run_migrations_online():
- '''
+ """
Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
- '''
+ """
connectable = sa.engine_from_config(
alembic_config.get_section(alembic_config.config_ini_section),
- prefix='sqlalchemy.',
- poolclass=sa.pool.NullPool)
+ prefix="sqlalchemy.",
+ poolclass=sa.pool.NullPool,
+ )
def connect_with_timeout(connectable, timeout=45):
dt = 5
@@ -70,7 +83,8 @@ def run_migrations_online():
alembic.context.configure(
connection=connection,
target_metadata=target_metadata,
- compare_type=True)
+ compare_type=True,
+ )
with alembic.context.begin_transaction():
alembic.context.run_migrations()
diff --git a/server/szurubooru/migrations/script.py.mako b/server/szurubooru/migrations/script.py.mako
index 13adc519..f065447c 100644
--- a/server/szurubooru/migrations/script.py.mako
+++ b/server/szurubooru/migrations/script.py.mako
@@ -7,6 +7,7 @@ Created at: ${create_date}
import sqlalchemy as sa
from alembic import op
+
${imports if imports else ""}
revision = ${repr(up_revision)}
diff --git a/server/szurubooru/migrations/versions/00cb3a2734db_create_tag_tables.py b/server/szurubooru/migrations/versions/00cb3a2734db_create_tag_tables.py
index 77d76414..a817044f 100644
--- a/server/szurubooru/migrations/versions/00cb3a2734db_create_tag_tables.py
+++ b/server/szurubooru/migrations/versions/00cb3a2734db_create_tag_tables.py
@@ -1,65 +1,70 @@
-'''
+"""
Create tag tables
Revision ID: 00cb3a2734db
Created at: 2016-04-15 23:15:36.255429
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '00cb3a2734db'
-down_revision = 'e5c1216a8503'
+revision = "00cb3a2734db"
+down_revision = "e5c1216a8503"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'tag_category',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('name', sa.Unicode(length=32), nullable=False),
- sa.Column('color', sa.Unicode(length=32), nullable=False),
- sa.PrimaryKeyConstraint('id'))
+ "tag_category",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.Unicode(length=32), nullable=False),
+ sa.Column("color", sa.Unicode(length=32), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'tag',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('category_id', sa.Integer(), nullable=False),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('last_edit_time', sa.DateTime(), nullable=True),
- sa.ForeignKeyConstraint(['category_id'], ['tag_category.id']),
- sa.PrimaryKeyConstraint('id'))
+ "tag",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("category_id", sa.Integer(), nullable=False),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("last_edit_time", sa.DateTime(), nullable=True),
+ sa.ForeignKeyConstraint(["category_id"], ["tag_category.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'tag_name',
- sa.Column('tag_name_id', sa.Integer(), nullable=False),
- sa.Column('tag_id', sa.Integer(), nullable=False),
- sa.Column('name', sa.Unicode(length=64), nullable=False),
- sa.ForeignKeyConstraint(['tag_id'], ['tag.id']),
- sa.PrimaryKeyConstraint('tag_name_id'),
- sa.UniqueConstraint('name'))
+ "tag_name",
+ sa.Column("tag_name_id", sa.Integer(), nullable=False),
+ sa.Column("tag_id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.Unicode(length=64), nullable=False),
+ sa.ForeignKeyConstraint(["tag_id"], ["tag.id"]),
+ sa.PrimaryKeyConstraint("tag_name_id"),
+ sa.UniqueConstraint("name"),
+ )
op.create_table(
- 'tag_implication',
- sa.Column('parent_id', sa.Integer(), nullable=False),
- sa.Column('child_id', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['parent_id'], ['tag.id']),
- sa.ForeignKeyConstraint(['child_id'], ['tag.id']),
- sa.PrimaryKeyConstraint('parent_id', 'child_id'))
+ "tag_implication",
+ sa.Column("parent_id", sa.Integer(), nullable=False),
+ sa.Column("child_id", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["parent_id"], ["tag.id"]),
+ sa.ForeignKeyConstraint(["child_id"], ["tag.id"]),
+ sa.PrimaryKeyConstraint("parent_id", "child_id"),
+ )
op.create_table(
- 'tag_suggestion',
- sa.Column('parent_id', sa.Integer(), nullable=False),
- sa.Column('child_id', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['parent_id'], ['tag.id']),
- sa.ForeignKeyConstraint(['child_id'], ['tag.id']),
- sa.PrimaryKeyConstraint('parent_id', 'child_id'))
+ "tag_suggestion",
+ sa.Column("parent_id", sa.Integer(), nullable=False),
+ sa.Column("child_id", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["parent_id"], ["tag.id"]),
+ sa.ForeignKeyConstraint(["child_id"], ["tag.id"]),
+ sa.PrimaryKeyConstraint("parent_id", "child_id"),
+ )
def downgrade():
- op.drop_table('tag_suggestion')
- op.drop_table('tag_implication')
- op.drop_table('tag_name')
- op.drop_table('tag')
- op.drop_table('tag_category')
+ op.drop_table("tag_suggestion")
+ op.drop_table("tag_implication")
+ op.drop_table("tag_name")
+ op.drop_table("tag")
+ op.drop_table("tag_category")
diff --git a/server/szurubooru/migrations/versions/02ef5f73f4ab_add_hashes_to_post_file_names.py b/server/szurubooru/migrations/versions/02ef5f73f4ab_add_hashes_to_post_file_names.py
index 1e09e340..fb4f09c8 100644
--- a/server/szurubooru/migrations/versions/02ef5f73f4ab_add_hashes_to_post_file_names.py
+++ b/server/szurubooru/migrations/versions/02ef5f73f4ab_add_hashes_to_post_file_names.py
@@ -1,43 +1,45 @@
-'''
+"""
Add hashes to post file names
Revision ID: 02ef5f73f4ab
Created at: 2017-08-24 13:30:46.766928
-'''
+"""
import os
import re
+
from szurubooru.func import files, posts
-revision = '02ef5f73f4ab'
-down_revision = '5f00af3004a4'
+revision = "02ef5f73f4ab"
+down_revision = "5f00af3004a4"
branch_labels = None
depends_on = None
def upgrade():
- for name in ['posts', 'posts/custom-thumbnails', 'generated-thumbnails']:
+ for name in ["posts", "posts/custom-thumbnails", "generated-thumbnails"]:
for entry in list(files.scan(name)):
- match = re.match(r'^(?P\d+)\.(?P\w+)$', entry.name)
+ match = re.match(r"^(?P\d+)\.(?P\w+)$", entry.name)
if match:
- post_id = int(match.group('name'))
+ post_id = int(match.group("name"))
security_hash = posts.get_post_security_hash(post_id)
- ext = match.group('ext')
- new_name = '%s_%s.%s' % (post_id, security_hash, ext)
+ ext = match.group("ext")
+ new_name = "%s_%s.%s" % (post_id, security_hash, ext)
new_path = os.path.join(os.path.dirname(entry.path), new_name)
os.rename(entry.path, new_path)
def downgrade():
- for name in ['posts', 'posts/custom-thumbnails', 'generated-thumbnails']:
+ for name in ["posts", "posts/custom-thumbnails", "generated-thumbnails"]:
for entry in list(files.scan(name)):
match = re.match(
- r'^(?P\d+)_(?P[0-9A-Fa-f]+)\.(?P\w+)$',
- entry.name)
+ r"^(?P\d+)_(?P[0-9A-Fa-f]+)\.(?P\w+)$",
+ entry.name,
+ )
if match:
- post_id = int(match.group('name'))
- security_hash = match.group('hash') # noqa: F841
- ext = match.group('ext')
- new_name = '%s.%s' % (post_id, ext)
+ post_id = int(match.group("name"))
+ security_hash = match.group("hash") # noqa: F841
+ ext = match.group("ext")
+ new_name = "%s.%s" % (post_id, ext)
new_path = os.path.join(os.path.dirname(entry.path), new_name)
os.rename(entry.path, new_path)
diff --git a/server/szurubooru/migrations/versions/055d0e048fb3_add_default_column_to_tag_categories.py b/server/szurubooru/migrations/versions/055d0e048fb3_add_default_column_to_tag_categories.py
index 1ced1596..0b4efb8d 100644
--- a/server/szurubooru/migrations/versions/055d0e048fb3_add_default_column_to_tag_categories.py
+++ b/server/szurubooru/migrations/versions/055d0e048fb3_add_default_column_to_tag_categories.py
@@ -1,28 +1,30 @@
-'''
+"""
Add default column to tag categories
Revision ID: 055d0e048fb3
Created at: 2016-05-22 18:12:58.149678
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '055d0e048fb3'
-down_revision = '49ab4e1139ef'
+revision = "055d0e048fb3"
+down_revision = "49ab4e1139ef"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
- 'tag_category', sa.Column('default', sa.Boolean(), nullable=True))
+ "tag_category", sa.Column("default", sa.Boolean(), nullable=True)
+ )
op.execute(
- sa.table('tag_category', sa.column('default'))
+ sa.table("tag_category", sa.column("default"))
.update()
- .values(default=False))
- op.alter_column('tag_category', 'default', nullable=False)
+ .values(default=False)
+ )
+ op.alter_column("tag_category", "default", nullable=False)
def downgrade():
- op.drop_column('tag_category', 'default')
+ op.drop_column("tag_category", "default")
diff --git a/server/szurubooru/migrations/versions/1cd4c7b22846_change_flags_column_to_string.py b/server/szurubooru/migrations/versions/1cd4c7b22846_change_flags_column_to_string.py
index ce017519..ef9c5f3d 100644
--- a/server/szurubooru/migrations/versions/1cd4c7b22846_change_flags_column_to_string.py
+++ b/server/szurubooru/migrations/versions/1cd4c7b22846_change_flags_column_to_string.py
@@ -1,61 +1,54 @@
-'''
+"""
Change flags column to string
Revision ID: 1cd4c7b22846
Created at: 2018-09-21 19:37:27.686568
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '1cd4c7b22846'
-down_revision = 'a39c7f98a7fa'
+revision = "1cd4c7b22846"
+down_revision = "a39c7f98a7fa"
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
- op.alter_column('post', 'flags', new_column_name='oldflags')
- op.add_column('post', sa.Column(
- 'flags', sa.Unicode(200), default='', nullable=True))
+ op.alter_column("post", "flags", new_column_name="oldflags")
+ op.add_column(
+ "post", sa.Column("flags", sa.Unicode(200), default="", nullable=True)
+ )
posts = sa.Table(
- 'post',
+ "post",
sa.MetaData(),
- sa.Column('id', sa.Integer, primary_key=True),
- sa.Column('flags', sa.Unicode(200), default='', nullable=True),
- sa.Column('oldflags', sa.PickleType(), nullable=True),
+ sa.Column("id", sa.Integer, primary_key=True),
+ sa.Column("flags", sa.Unicode(200), default="", nullable=True),
+ sa.Column("oldflags", sa.PickleType(), nullable=True),
)
for row in conn.execute(posts.select()):
- newflag = ','.join(row.oldflags) if row.oldflags else ''
+ newflag = ",".join(row.oldflags) if row.oldflags else ""
conn.execute(
- posts.update().where(
- posts.c.id == row.id
- ).values(
- flags=newflag
- )
+ posts.update().where(posts.c.id == row.id).values(flags=newflag)
)
- op.drop_column('post', 'oldflags')
+ op.drop_column("post", "oldflags")
def downgrade():
conn = op.get_bind()
- op.alter_column('post', 'flags', new_column_name='oldflags')
- op.add_column('post', sa.Column('flags', sa.PickleType(), nullable=True))
+ op.alter_column("post", "flags", new_column_name="oldflags")
+ op.add_column("post", sa.Column("flags", sa.PickleType(), nullable=True))
posts = sa.Table(
- 'post',
+ "post",
sa.MetaData(),
- sa.Column('id', sa.Integer, primary_key=True),
- sa.Column('flags', sa.PickleType(), nullable=True),
- sa.Column('oldflags', sa.Unicode(200), default='', nullable=True),
+ sa.Column("id", sa.Integer, primary_key=True),
+ sa.Column("flags", sa.PickleType(), nullable=True),
+ sa.Column("oldflags", sa.Unicode(200), default="", nullable=True),
)
for row in conn.execute(posts.select()):
- newflag = [x for x in row.oldflags.split(',') if x]
+ newflag = [x for x in row.oldflags.split(",") if x]
conn.execute(
- posts.update().where(
- posts.c.id == row.id
- ).values(
- flags=newflag
- )
+ posts.update().where(posts.c.id == row.id).values(flags=newflag)
)
- op.drop_column('post', 'oldflags')
+ op.drop_column("post", "oldflags")
diff --git a/server/szurubooru/migrations/versions/1e280b5d5df1_longer_tag_names.py b/server/szurubooru/migrations/versions/1e280b5d5df1_longer_tag_names.py
index 17702914..5a791031 100644
--- a/server/szurubooru/migrations/versions/1e280b5d5df1_longer_tag_names.py
+++ b/server/szurubooru/migrations/versions/1e280b5d5df1_longer_tag_names.py
@@ -1,43 +1,50 @@
-'''
+"""
Longer tag names
Revision ID: 1e280b5d5df1
Created at: 2020-03-15 18:57:12.901148
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = '1e280b5d5df1'
-down_revision = '52d6ea6584b8'
+revision = "1e280b5d5df1"
+down_revision = "52d6ea6584b8"
branch_labels = None
depends_on = None
def upgrade():
op.alter_column(
- 'tag_name', 'name',
+ "tag_name",
+ "name",
type_=sa.Unicode(128),
existing_type=sa.Unicode(64),
- existing_nullable=False)
+ existing_nullable=False,
+ )
op.alter_column(
- 'snapshot', 'resource_name',
+ "snapshot",
+ "resource_name",
type_=sa.Unicode(128),
existing_type=sa.Unicode(64),
- existing_nullable=False)
+ existing_nullable=False,
+ )
def downgrade():
op.alter_column(
- 'tag_name', 'name',
+ "tag_name",
+ "name",
type_=sa.Unicode(64),
existing_type=sa.Unicode(128),
- existing_nullable=False)
+ existing_nullable=False,
+ )
op.alter_column(
- 'snapshot', 'resource_name',
+ "snapshot",
+ "resource_name",
type_=sa.Unicode(64),
existing_type=sa.Unicode(128),
- existing_nullable=False)
+ existing_nullable=False,
+ )
diff --git a/server/szurubooru/migrations/versions/23abaf4a0a4b_add_mime_type_to_posts.py b/server/szurubooru/migrations/versions/23abaf4a0a4b_add_mime_type_to_posts.py
index e18119b2..c8c40317 100644
--- a/server/szurubooru/migrations/versions/23abaf4a0a4b_add_mime_type_to_posts.py
+++ b/server/szurubooru/migrations/versions/23abaf4a0a4b_add_mime_type_to_posts.py
@@ -1,23 +1,24 @@
-'''
+"""
Add mime type to posts
Revision ID: 23abaf4a0a4b
Created at: 2016-05-02 00:02:33.024885
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '23abaf4a0a4b'
-down_revision = 'ed6dd16a30f3'
+revision = "23abaf4a0a4b"
+down_revision = "ed6dd16a30f3"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
- 'post', sa.Column('mime-type', sa.Unicode(length=32), nullable=False))
+ "post", sa.Column("mime-type", sa.Unicode(length=32), nullable=False)
+ )
def downgrade():
- op.drop_column('post', 'mime-type')
+ op.drop_column("post", "mime-type")
diff --git a/server/szurubooru/migrations/versions/336a76ec1338_create_post_tables.py b/server/szurubooru/migrations/versions/336a76ec1338_create_post_tables.py
index b767c98f..aea6cc78 100644
--- a/server/szurubooru/migrations/versions/336a76ec1338_create_post_tables.py
+++ b/server/szurubooru/migrations/versions/336a76ec1338_create_post_tables.py
@@ -1,64 +1,67 @@
-'''
+"""
Create post tables
Revision ID: 336a76ec1338
Created at: 2016-04-19 12:06:08.649503
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '336a76ec1338'
-down_revision = '00cb3a2734db'
+revision = "336a76ec1338"
+down_revision = "00cb3a2734db"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'post',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=True),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('last_edit_time', sa.DateTime(), nullable=True),
- sa.Column('safety', sa.Unicode(length=32), nullable=False),
- sa.Column('type', sa.Unicode(length=32), nullable=False),
- sa.Column('checksum', sa.Unicode(length=64), nullable=False),
- sa.Column('source', sa.Unicode(length=200), nullable=True),
- sa.Column('file_size', sa.Integer(), nullable=True),
- sa.Column('image_width', sa.Integer(), nullable=True),
- sa.Column('image_height', sa.Integer(), nullable=True),
- sa.Column('flags', sa.Integer(), nullable=False),
- sa.Column('auto_fav_count', sa.Integer(), nullable=False),
- sa.Column('auto_score', sa.Integer(), nullable=False),
- sa.Column('auto_feature_count', sa.Integer(), nullable=False),
- sa.Column('auto_comment_count', sa.Integer(), nullable=False),
- sa.Column('auto_note_count', sa.Integer(), nullable=False),
- sa.Column('auto_fav_time', sa.Integer(), nullable=False),
- sa.Column('auto_feature_time', sa.Integer(), nullable=False),
- sa.Column('auto_comment_creation_time', sa.Integer(), nullable=False),
- sa.Column('auto_comment_edit_time', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.PrimaryKeyConstraint('id'))
+ "post",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=True),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("last_edit_time", sa.DateTime(), nullable=True),
+ sa.Column("safety", sa.Unicode(length=32), nullable=False),
+ sa.Column("type", sa.Unicode(length=32), nullable=False),
+ sa.Column("checksum", sa.Unicode(length=64), nullable=False),
+ sa.Column("source", sa.Unicode(length=200), nullable=True),
+ sa.Column("file_size", sa.Integer(), nullable=True),
+ sa.Column("image_width", sa.Integer(), nullable=True),
+ sa.Column("image_height", sa.Integer(), nullable=True),
+ sa.Column("flags", sa.Integer(), nullable=False),
+ sa.Column("auto_fav_count", sa.Integer(), nullable=False),
+ sa.Column("auto_score", sa.Integer(), nullable=False),
+ sa.Column("auto_feature_count", sa.Integer(), nullable=False),
+ sa.Column("auto_comment_count", sa.Integer(), nullable=False),
+ sa.Column("auto_note_count", sa.Integer(), nullable=False),
+ sa.Column("auto_fav_time", sa.Integer(), nullable=False),
+ sa.Column("auto_feature_time", sa.Integer(), nullable=False),
+ sa.Column("auto_comment_creation_time", sa.Integer(), nullable=False),
+ sa.Column("auto_comment_edit_time", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'post_relation',
- sa.Column('parent_id', sa.Integer(), nullable=False),
- sa.Column('child_id', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['child_id'], ['post.id']),
- sa.ForeignKeyConstraint(['parent_id'], ['post.id']),
- sa.PrimaryKeyConstraint('parent_id', 'child_id'))
+ "post_relation",
+ sa.Column("parent_id", sa.Integer(), nullable=False),
+ sa.Column("child_id", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["child_id"], ["post.id"]),
+ sa.ForeignKeyConstraint(["parent_id"], ["post.id"]),
+ sa.PrimaryKeyConstraint("parent_id", "child_id"),
+ )
op.create_table(
- 'post_tag',
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('tag_id', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.ForeignKeyConstraint(['tag_id'], ['tag.id']),
- sa.PrimaryKeyConstraint('post_id', 'tag_id'))
+ "post_tag",
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("tag_id", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.ForeignKeyConstraint(["tag_id"], ["tag.id"]),
+ sa.PrimaryKeyConstraint("post_id", "tag_id"),
+ )
def downgrade():
- op.drop_table('post_tag')
- op.drop_table('post_relation')
- op.drop_table('post')
+ op.drop_table("post_tag")
+ op.drop_table("post_relation")
+ op.drop_table("post")
diff --git a/server/szurubooru/migrations/versions/3c1f0316fa7f_resize_post_columns.py b/server/szurubooru/migrations/versions/3c1f0316fa7f_resize_post_columns.py
index 417803e4..17e30d5f 100644
--- a/server/szurubooru/migrations/versions/3c1f0316fa7f_resize_post_columns.py
+++ b/server/szurubooru/migrations/versions/3c1f0316fa7f_resize_post_columns.py
@@ -1,38 +1,34 @@
-'''
+"""
resize post columns
Revision ID: 3c1f0316fa7f
Created at: 2019-07-27 22:29:33.874837
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '3c1f0316fa7f'
-down_revision = '1cd4c7b22846'
+revision = "3c1f0316fa7f"
+down_revision = "1cd4c7b22846"
branch_labels = None
depends_on = None
def upgrade():
op.alter_column(
- 'post', 'flags',
- type_=sa.Unicode(32),
- existing_type=sa.Unicode(200))
+ "post", "flags", type_=sa.Unicode(32), existing_type=sa.Unicode(200)
+ )
op.alter_column(
- 'post', 'source',
- type_=sa.Unicode(2048),
- existing_type=sa.Unicode(200))
+ "post", "source", type_=sa.Unicode(2048), existing_type=sa.Unicode(200)
+ )
def downgrade():
op.alter_column(
- 'post', 'flags',
- type_=sa.Unicode(200),
- existing_type=sa.Unicode(32))
+ "post", "flags", type_=sa.Unicode(200), existing_type=sa.Unicode(32)
+ )
op.alter_column(
- 'post', 'source',
- type_=sa.Unicode(200),
- existing_type=sa.Unicode(2048))
+ "post", "source", type_=sa.Unicode(200), existing_type=sa.Unicode(2048)
+ )
diff --git a/server/szurubooru/migrations/versions/46cd5229839b_add_snapshot_resource_repr.py b/server/szurubooru/migrations/versions/46cd5229839b_add_snapshot_resource_repr.py
index 0a46fbcc..7620e90f 100644
--- a/server/szurubooru/migrations/versions/46cd5229839b_add_snapshot_resource_repr.py
+++ b/server/szurubooru/migrations/versions/46cd5229839b_add_snapshot_resource_repr.py
@@ -1,24 +1,25 @@
-'''
+"""
Add snapshot resource_repr column
Revision ID: 46cd5229839b
Created at: 2016-04-21 19:00:48.087069
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '46cd5229839b'
-down_revision = '565e01e3cf6d'
+revision = "46cd5229839b"
+down_revision = "565e01e3cf6d"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
- 'snapshot',
- sa.Column('resource_repr', sa.Unicode(length=64), nullable=False))
+ "snapshot",
+ sa.Column("resource_repr", sa.Unicode(length=64), nullable=False),
+ )
def downgrade():
- op.drop_column('snapshot', 'resource_repr')
+ op.drop_column("snapshot", "resource_repr")
diff --git a/server/szurubooru/migrations/versions/46df355634dc_add_comment_tables.py b/server/szurubooru/migrations/versions/46df355634dc_add_comment_tables.py
index 49971fef..c2a00923 100644
--- a/server/szurubooru/migrations/versions/46df355634dc_add_comment_tables.py
+++ b/server/szurubooru/migrations/versions/46df355634dc_add_comment_tables.py
@@ -1,43 +1,45 @@
-'''
+"""
Add comment tables
Revision ID: 46df355634dc
Created at: 2016-04-24 09:02:05.008648
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '46df355634dc'
-down_revision = '84bd402f15f0'
+revision = "46df355634dc"
+down_revision = "84bd402f15f0"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'comment',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=True),
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('last_edit_time', sa.DateTime(), nullable=True),
- sa.Column('text', sa.UnicodeText(), nullable=True),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.PrimaryKeyConstraint('id'))
+ "comment",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=True),
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("last_edit_time", sa.DateTime(), nullable=True),
+ sa.Column("text", sa.UnicodeText(), nullable=True),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'comment_score',
- sa.Column('comment_id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=False),
- sa.Column('time', sa.DateTime(), nullable=False),
- sa.Column('score', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['comment_id'], ['comment.id']),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.PrimaryKeyConstraint('comment_id', 'user_id'))
+ "comment_score",
+ sa.Column("comment_id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=False),
+ sa.Column("time", sa.DateTime(), nullable=False),
+ sa.Column("score", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["comment_id"], ["comment.id"]),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.PrimaryKeyConstraint("comment_id", "user_id"),
+ )
def downgrade():
- op.drop_table('comment_score')
- op.drop_table('comment')
+ op.drop_table("comment_score")
+ op.drop_table("comment")
diff --git a/server/szurubooru/migrations/versions/49ab4e1139ef_create_indexes.py b/server/szurubooru/migrations/versions/49ab4e1139ef_create_indexes.py
index b18e4108..73a0ad15 100644
--- a/server/szurubooru/migrations/versions/49ab4e1139ef_create_indexes.py
+++ b/server/szurubooru/migrations/versions/49ab4e1139ef_create_indexes.py
@@ -1,71 +1,74 @@
-'''
+"""
Create indexes
Revision ID: 49ab4e1139ef
Created at: 2016-05-09 09:38:28.078936
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '49ab4e1139ef'
-down_revision = '23abaf4a0a4b'
+revision = "49ab4e1139ef"
+down_revision = "23abaf4a0a4b"
branch_labels = None
depends_on = None
def upgrade():
for index_name, table_name, column_name in [
- ('ix_comment_post_id', 'comment', 'post_id'),
- ('ix_comment_user_id', 'comment', 'user_id'),
- ('ix_comment_score_user_id', 'comment_score', 'user_id'),
- ('ix_post_user_id', 'post', 'user_id'),
- ('ix_post_favorite_post_id', 'post_favorite', 'post_id'),
- ('ix_post_favorite_user_id', 'post_favorite', 'user_id'),
- ('ix_post_feature_post_id', 'post_feature', 'post_id'),
- ('ix_post_feature_user_id', 'post_feature', 'user_id'),
- ('ix_post_note_post_id', 'post_note', 'post_id'),
- ('ix_post_relation_child_id', 'post_relation', 'child_id'),
- ('ix_post_relation_parent_id', 'post_relation', 'parent_id'),
- ('ix_post_score_post_id', 'post_score', 'post_id'),
- ('ix_post_score_user_id', 'post_score', 'user_id'),
- ('ix_post_tag_post_id', 'post_tag', 'post_id'),
- ('ix_post_tag_tag_id', 'post_tag', 'tag_id'),
- ('ix_snapshot_resource_id', 'snapshot', 'resource_id'),
- ('ix_snapshot_resource_type', 'snapshot', 'resource_type'),
- ('ix_tag_category_id', 'tag', 'category_id'),
- ('ix_tag_implication_child_id', 'tag_implication', 'child_id'),
- ('ix_tag_implication_parent_id', 'tag_implication', 'parent_id'),
- ('ix_tag_name_tag_id', 'tag_name', 'tag_id'),
- ('ix_tag_suggestion_child_id', 'tag_suggestion', 'child_id'),
- ('ix_tag_suggestion_parent_id', 'tag_suggestion', 'parent_id')]:
+ ("ix_comment_post_id", "comment", "post_id"),
+ ("ix_comment_user_id", "comment", "user_id"),
+ ("ix_comment_score_user_id", "comment_score", "user_id"),
+ ("ix_post_user_id", "post", "user_id"),
+ ("ix_post_favorite_post_id", "post_favorite", "post_id"),
+ ("ix_post_favorite_user_id", "post_favorite", "user_id"),
+ ("ix_post_feature_post_id", "post_feature", "post_id"),
+ ("ix_post_feature_user_id", "post_feature", "user_id"),
+ ("ix_post_note_post_id", "post_note", "post_id"),
+ ("ix_post_relation_child_id", "post_relation", "child_id"),
+ ("ix_post_relation_parent_id", "post_relation", "parent_id"),
+ ("ix_post_score_post_id", "post_score", "post_id"),
+ ("ix_post_score_user_id", "post_score", "user_id"),
+ ("ix_post_tag_post_id", "post_tag", "post_id"),
+ ("ix_post_tag_tag_id", "post_tag", "tag_id"),
+ ("ix_snapshot_resource_id", "snapshot", "resource_id"),
+ ("ix_snapshot_resource_type", "snapshot", "resource_type"),
+ ("ix_tag_category_id", "tag", "category_id"),
+ ("ix_tag_implication_child_id", "tag_implication", "child_id"),
+ ("ix_tag_implication_parent_id", "tag_implication", "parent_id"),
+ ("ix_tag_name_tag_id", "tag_name", "tag_id"),
+ ("ix_tag_suggestion_child_id", "tag_suggestion", "child_id"),
+ ("ix_tag_suggestion_parent_id", "tag_suggestion", "parent_id"),
+ ]:
op.create_index(
- op.f(index_name), table_name, [column_name], unique=False)
+ op.f(index_name), table_name, [column_name], unique=False
+ )
def downgrade():
for index_name, table_name in [
- ('ix_tag_suggestion_parent_id', 'tag_suggestion'),
- ('ix_tag_suggestion_child_id', 'tag_suggestion'),
- ('ix_tag_name_tag_id', 'tag_name'),
- ('ix_tag_implication_parent_id', 'tag_implication'),
- ('ix_tag_implication_child_id', 'tag_implication'),
- ('ix_tag_category_id', 'tag'),
- ('ix_snapshot_resource_type', 'snapshot'),
- ('ix_snapshot_resource_id', 'snapshot'),
- ('ix_post_tag_tag_id', 'post_tag'),
- ('ix_post_tag_post_id', 'post_tag'),
- ('ix_post_score_user_id', 'post_score'),
- ('ix_post_score_post_id', 'post_score'),
- ('ix_post_relation_parent_id', 'post_relation'),
- ('ix_post_relation_child_id', 'post_relation'),
- ('ix_post_note_post_id', 'post_note'),
- ('ix_post_feature_user_id', 'post_feature'),
- ('ix_post_feature_post_id', 'post_feature'),
- ('ix_post_favorite_user_id', 'post_favorite'),
- ('ix_post_favorite_post_id', 'post_favorite'),
- ('ix_post_user_id', 'post'),
- ('ix_comment_score_user_id', 'comment_score'),
- ('ix_comment_user_id', 'comment'),
- ('ix_comment_post_id', 'comment')]:
+ ("ix_tag_suggestion_parent_id", "tag_suggestion"),
+ ("ix_tag_suggestion_child_id", "tag_suggestion"),
+ ("ix_tag_name_tag_id", "tag_name"),
+ ("ix_tag_implication_parent_id", "tag_implication"),
+ ("ix_tag_implication_child_id", "tag_implication"),
+ ("ix_tag_category_id", "tag"),
+ ("ix_snapshot_resource_type", "snapshot"),
+ ("ix_snapshot_resource_id", "snapshot"),
+ ("ix_post_tag_tag_id", "post_tag"),
+ ("ix_post_tag_post_id", "post_tag"),
+ ("ix_post_score_user_id", "post_score"),
+ ("ix_post_score_post_id", "post_score"),
+ ("ix_post_relation_parent_id", "post_relation"),
+ ("ix_post_relation_child_id", "post_relation"),
+ ("ix_post_note_post_id", "post_note"),
+ ("ix_post_feature_user_id", "post_feature"),
+ ("ix_post_feature_post_id", "post_feature"),
+ ("ix_post_favorite_user_id", "post_favorite"),
+ ("ix_post_favorite_post_id", "post_favorite"),
+ ("ix_post_user_id", "post"),
+ ("ix_comment_score_user_id", "comment_score"),
+ ("ix_comment_user_id", "comment"),
+ ("ix_comment_post_id", "comment"),
+ ]:
op.drop_index(op.f(index_name), table_name=table_name)
diff --git a/server/szurubooru/migrations/versions/4a020f1d271a_rename_snapshot_columns.py b/server/szurubooru/migrations/versions/4a020f1d271a_rename_snapshot_columns.py
index e1c98ca9..957bede3 100644
--- a/server/szurubooru/migrations/versions/4a020f1d271a_rename_snapshot_columns.py
+++ b/server/szurubooru/migrations/versions/4a020f1d271a_rename_snapshot_columns.py
@@ -1,54 +1,57 @@
-'''
+"""
Rename snapshot columns
Revision ID: 4a020f1d271a
Created at: 2016-08-16 09:25:38.350861
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = '4a020f1d271a'
-down_revision = '840b460c5613'
+revision = "4a020f1d271a"
+down_revision = "840b460c5613"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
- 'snapshot',
- sa.Column('resource_name', sa.Unicode(length=64), nullable=False))
+ "snapshot",
+ sa.Column("resource_name", sa.Unicode(length=64), nullable=False),
+ )
op.add_column(
- 'snapshot',
- sa.Column('resource_pkey', sa.Integer(), nullable=False))
+ "snapshot", sa.Column("resource_pkey", sa.Integer(), nullable=False)
+ )
op.create_index(
- op.f('ix_snapshot_resource_pkey'),
- 'snapshot',
- ['resource_pkey'],
- unique=False)
- op.drop_index('ix_snapshot_resource_id', table_name='snapshot')
- op.drop_column('snapshot', 'resource_id')
- op.drop_column('snapshot', 'resource_repr')
+ op.f("ix_snapshot_resource_pkey"),
+ "snapshot",
+ ["resource_pkey"],
+ unique=False,
+ )
+ op.drop_index("ix_snapshot_resource_id", table_name="snapshot")
+ op.drop_column("snapshot", "resource_id")
+ op.drop_column("snapshot", "resource_repr")
def downgrade():
op.add_column(
- 'snapshot',
+ "snapshot",
sa.Column(
- 'resource_repr',
+ "resource_repr",
sa.VARCHAR(length=64),
autoincrement=False,
- nullable=False))
+ nullable=False,
+ ),
+ )
op.add_column(
- 'snapshot',
+ "snapshot",
sa.Column(
- 'resource_id',
- sa.INTEGER(),
- autoincrement=False,
- nullable=False))
+ "resource_id", sa.INTEGER(), autoincrement=False, nullable=False
+ ),
+ )
op.create_index(
- 'ix_snapshot_resource_id', 'snapshot', ['resource_id'], unique=False)
- op.drop_index(op.f('ix_snapshot_resource_pkey'), table_name='snapshot')
- op.drop_column('snapshot', 'resource_pkey')
- op.drop_column('snapshot', 'resource_name')
+ "ix_snapshot_resource_id", "snapshot", ["resource_id"], unique=False
+ )
+ op.drop_index(op.f("ix_snapshot_resource_pkey"), table_name="snapshot")
+ op.drop_column("snapshot", "resource_pkey")
+ op.drop_column("snapshot", "resource_name")
diff --git a/server/szurubooru/migrations/versions/4c526f869323_add_description_to_tags.py b/server/szurubooru/migrations/versions/4c526f869323_add_description_to_tags.py
index f53866f1..443bc3c4 100644
--- a/server/szurubooru/migrations/versions/4c526f869323_add_description_to_tags.py
+++ b/server/szurubooru/migrations/versions/4c526f869323_add_description_to_tags.py
@@ -1,23 +1,24 @@
-'''
+"""
Add description to tags
Revision ID: 4c526f869323
Created at: 2016-06-21 17:56:34.979741
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '4c526f869323'
-down_revision = '055d0e048fb3'
+revision = "4c526f869323"
+down_revision = "055d0e048fb3"
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
- 'tag', sa.Column('description', sa.UnicodeText(), nullable=True))
+ "tag", sa.Column("description", sa.UnicodeText(), nullable=True)
+ )
def downgrade():
- op.drop_column('tag', 'description')
+ op.drop_column("tag", "description")
diff --git a/server/szurubooru/migrations/versions/52d6ea6584b8_generate_post_signature_table.py b/server/szurubooru/migrations/versions/52d6ea6584b8_generate_post_signature_table.py
index 5a865739..e544f808 100644
--- a/server/szurubooru/migrations/versions/52d6ea6584b8_generate_post_signature_table.py
+++ b/server/szurubooru/migrations/versions/52d6ea6584b8_generate_post_signature_table.py
@@ -1,16 +1,15 @@
-'''
+"""
Generate post signature table
Revision ID: 52d6ea6584b8
Created at: 2020-03-07 17:03:40.193512
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = '52d6ea6584b8'
-down_revision = '3c1f0316fa7f'
+revision = "52d6ea6584b8"
+down_revision = "3c1f0316fa7f"
branch_labels = None
depends_on = None
@@ -18,13 +17,14 @@ depends_on = None
def upgrade():
ArrayType = sa.dialects.postgresql.ARRAY(sa.Integer, dimensions=1)
op.create_table(
- 'post_signature',
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('signature', sa.LargeBinary(), nullable=False),
- sa.Column('words', ArrayType, nullable=False),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.PrimaryKeyConstraint('post_id'))
+ "post_signature",
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("signature", sa.LargeBinary(), nullable=False),
+ sa.Column("words", ArrayType, nullable=False),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.PrimaryKeyConstraint("post_id"),
+ )
def downgrade():
- op.drop_table('post_signature')
+ op.drop_table("post_signature")
diff --git a/server/szurubooru/migrations/versions/54de8acc6cef_add_default_pool_category.py b/server/szurubooru/migrations/versions/54de8acc6cef_add_default_pool_category.py
index eaa68f27..73bf3420 100644
--- a/server/szurubooru/migrations/versions/54de8acc6cef_add_default_pool_category.py
+++ b/server/szurubooru/migrations/versions/54de8acc6cef_add_default_pool_category.py
@@ -1,16 +1,15 @@
-'''
+"""
add default pool category
Revision ID: 54de8acc6cef
Created at: 2020-05-03 14:57:46.825766
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = '54de8acc6cef'
-down_revision = '6a2f424ec9d2'
+revision = "54de8acc6cef"
+down_revision = "6a2f424ec9d2"
branch_labels = None
depends_on = None
@@ -19,18 +18,18 @@ Base = sa.ext.declarative.declarative_base()
class PoolCategory(Base):
- __tablename__ = 'pool_category'
- __table_args__ = {'extend_existing': True}
+ __tablename__ = "pool_category"
+ __table_args__ = {"extend_existing": True}
- pool_category_id = sa.Column('id', sa.Integer, primary_key=True)
- version = sa.Column('version', sa.Integer, nullable=False)
- name = sa.Column('name', sa.Unicode(32), nullable=False)
- color = sa.Column('color', sa.Unicode(32), nullable=False)
- default = sa.Column('default', sa.Boolean, nullable=False)
+ pool_category_id = sa.Column("id", sa.Integer, primary_key=True)
+ version = sa.Column("version", sa.Integer, nullable=False)
+ name = sa.Column("name", sa.Unicode(32), nullable=False)
+ color = sa.Column("color", sa.Unicode(32), nullable=False)
+ default = sa.Column("default", sa.Boolean, nullable=False)
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
@@ -38,8 +37,8 @@ def upgrade():
session = sa.orm.session.Session(bind=op.get_bind())
if session.query(PoolCategory).count() == 0:
category = PoolCategory()
- category.name = 'default'
- category.color = 'default'
+ category.name = "default"
+ category.color = "default"
category.version = 1
category.default = True
session.add(category)
@@ -49,13 +48,13 @@ def upgrade():
def downgrade():
session = sa.orm.session.Session(bind=op.get_bind())
default_category = (
- session
- .query(PoolCategory)
- .filter(PoolCategory.name == 'default')
- .filter(PoolCategory.color == 'default')
+ session.query(PoolCategory)
+ .filter(PoolCategory.name == "default")
+ .filter(PoolCategory.color == "default")
.filter(PoolCategory.version == 1)
.filter(PoolCategory.default == 1)
- .one_or_none())
+ .one_or_none()
+ )
if default_category:
session.delete(default_category)
session.commit()
diff --git a/server/szurubooru/migrations/versions/565e01e3cf6d_create_snapshot_table.py b/server/szurubooru/migrations/versions/565e01e3cf6d_create_snapshot_table.py
index 475fe96b..a4fb89fd 100644
--- a/server/szurubooru/migrations/versions/565e01e3cf6d_create_snapshot_table.py
+++ b/server/szurubooru/migrations/versions/565e01e3cf6d_create_snapshot_table.py
@@ -1,32 +1,33 @@
-'''
+"""
Create snapshot table
Revision ID: 565e01e3cf6d
Created at: 2016-04-19 12:07:58.372426
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '565e01e3cf6d'
-down_revision = '336a76ec1338'
+revision = "565e01e3cf6d"
+down_revision = "336a76ec1338"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'snapshot',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('resource_type', sa.Unicode(length=32), nullable=False),
- sa.Column('resource_id', sa.Integer(), nullable=False),
- sa.Column('operation', sa.Unicode(length=16), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=True),
- sa.Column('data', sa.PickleType(), nullable=True),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.PrimaryKeyConstraint('id'))
+ "snapshot",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("resource_type", sa.Unicode(length=32), nullable=False),
+ sa.Column("resource_id", sa.Integer(), nullable=False),
+ sa.Column("operation", sa.Unicode(length=16), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=True),
+ sa.Column("data", sa.PickleType(), nullable=True),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
def downgrade():
- op.drop_table('snapshot')
+ op.drop_table("snapshot")
diff --git a/server/szurubooru/migrations/versions/5f00af3004a4_add_default_tag_category.py b/server/szurubooru/migrations/versions/5f00af3004a4_add_default_tag_category.py
index e097e675..716a04e8 100644
--- a/server/szurubooru/migrations/versions/5f00af3004a4_add_default_tag_category.py
+++ b/server/szurubooru/migrations/versions/5f00af3004a4_add_default_tag_category.py
@@ -1,18 +1,17 @@
-'''
+"""
Add default tag category
Revision ID: 5f00af3004a4
Created at: 2017-02-02 20:06:13.336380
-'''
+"""
import sqlalchemy as sa
-from alembic import op
import sqlalchemy.ext.declarative
import sqlalchemy.orm.session
+from alembic import op
-
-revision = '5f00af3004a4'
-down_revision = '9837fc981ec7'
+revision = "5f00af3004a4"
+down_revision = "9837fc981ec7"
branch_labels = None
depends_on = None
@@ -21,18 +20,18 @@ Base = sa.ext.declarative.declarative_base()
class TagCategory(Base):
- __tablename__ = 'tag_category'
- __table_args__ = {'extend_existing': True}
+ __tablename__ = "tag_category"
+ __table_args__ = {"extend_existing": True}
- tag_category_id = sa.Column('id', sa.Integer, primary_key=True)
- version = sa.Column('version', sa.Integer, nullable=False)
- name = sa.Column('name', sa.Unicode(32), nullable=False)
- color = sa.Column('color', sa.Unicode(32), nullable=False)
- default = sa.Column('default', sa.Boolean, nullable=False)
+ tag_category_id = sa.Column("id", sa.Integer, primary_key=True)
+ version = sa.Column("version", sa.Integer, nullable=False)
+ name = sa.Column("name", sa.Unicode(32), nullable=False)
+ color = sa.Column("color", sa.Unicode(32), nullable=False)
+ default = sa.Column("default", sa.Boolean, nullable=False)
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
@@ -40,8 +39,8 @@ def upgrade():
session = sa.orm.session.Session(bind=op.get_bind())
if session.query(TagCategory).count() == 0:
category = TagCategory()
- category.name = 'default'
- category.color = 'default'
+ category.name = "default"
+ category.color = "default"
category.version = 1
category.default = True
session.add(category)
@@ -51,13 +50,13 @@ def upgrade():
def downgrade():
session = sa.orm.session.Session(bind=op.get_bind())
default_category = (
- session
- .query(TagCategory)
- .filter(TagCategory.name == 'default')
- .filter(TagCategory.color == 'default')
+ session.query(TagCategory)
+ .filter(TagCategory.name == "default")
+ .filter(TagCategory.color == "default")
.filter(TagCategory.version == 1)
.filter(TagCategory.default == 1)
- .one_or_none())
+ .one_or_none()
+ )
if default_category:
session.delete(default_category)
session.commit()
diff --git a/server/szurubooru/migrations/versions/6a2f424ec9d2_create_pool_tables.py b/server/szurubooru/migrations/versions/6a2f424ec9d2_create_pool_tables.py
index 18a0d7af..197373d7 100644
--- a/server/szurubooru/migrations/versions/6a2f424ec9d2_create_pool_tables.py
+++ b/server/szurubooru/migrations/versions/6a2f424ec9d2_create_pool_tables.py
@@ -1,64 +1,67 @@
-'''
+"""
create pool tables
Revision ID: 6a2f424ec9d2
Created at: 2020-05-03 14:47:59.136410
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = '6a2f424ec9d2'
-down_revision = '1e280b5d5df1'
+revision = "6a2f424ec9d2"
+down_revision = "1e280b5d5df1"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'pool_category',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('version', sa.Integer(), nullable=False, default=1),
- sa.Column('name', sa.Unicode(length=32), nullable=False),
- sa.Column('color', sa.Unicode(length=32), nullable=False),
- sa.Column('default', sa.Boolean(), nullable=False, default=False),
- sa.PrimaryKeyConstraint('id'))
+ "pool_category",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("version", sa.Integer(), nullable=False, default=1),
+ sa.Column("name", sa.Unicode(length=32), nullable=False),
+ sa.Column("color", sa.Unicode(length=32), nullable=False),
+ sa.Column("default", sa.Boolean(), nullable=False, default=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'pool',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('version', sa.Integer(), nullable=False, default=1),
- sa.Column('description', sa.UnicodeText(), nullable=True),
- sa.Column('category_id', sa.Integer(), nullable=False),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('last_edit_time', sa.DateTime(), nullable=True),
- sa.ForeignKeyConstraint(['category_id'], ['pool_category.id']),
- sa.PrimaryKeyConstraint('id'))
+ "pool",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("version", sa.Integer(), nullable=False, default=1),
+ sa.Column("description", sa.UnicodeText(), nullable=True),
+ sa.Column("category_id", sa.Integer(), nullable=False),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("last_edit_time", sa.DateTime(), nullable=True),
+ sa.ForeignKeyConstraint(["category_id"], ["pool_category.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'pool_name',
- sa.Column('pool_name_id', sa.Integer(), nullable=False),
- sa.Column('pool_id', sa.Integer(), nullable=False),
- sa.Column('name', sa.Unicode(length=256), nullable=False),
- sa.Column('ord', sa.Integer(), nullable=False, index=True),
- sa.ForeignKeyConstraint(['pool_id'], ['pool.id']),
- sa.PrimaryKeyConstraint('pool_name_id'),
- sa.UniqueConstraint('name'))
+ "pool_name",
+ sa.Column("pool_name_id", sa.Integer(), nullable=False),
+ sa.Column("pool_id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.Unicode(length=256), nullable=False),
+ sa.Column("ord", sa.Integer(), nullable=False, index=True),
+ sa.ForeignKeyConstraint(["pool_id"], ["pool.id"]),
+ sa.PrimaryKeyConstraint("pool_name_id"),
+ sa.UniqueConstraint("name"),
+ )
op.create_table(
- 'pool_post',
- sa.Column('pool_id', sa.Integer(), nullable=False),
- sa.Column('post_id', sa.Integer(), nullable=False, index=True),
- sa.Column('ord', sa.Integer(), nullable=False, index=True),
- sa.ForeignKeyConstraint(['pool_id'], ['pool.id'], ondelete='CASCADE'),
- sa.ForeignKeyConstraint(['post_id'], ['post.id'], ondelete='CASCADE'),
- sa.PrimaryKeyConstraint('pool_id', 'post_id'))
+ "pool_post",
+ sa.Column("pool_id", sa.Integer(), nullable=False),
+ sa.Column("post_id", sa.Integer(), nullable=False, index=True),
+ sa.Column("ord", sa.Integer(), nullable=False, index=True),
+ sa.ForeignKeyConstraint(["pool_id"], ["pool.id"], ondelete="CASCADE"),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"], ondelete="CASCADE"),
+ sa.PrimaryKeyConstraint("pool_id", "post_id"),
+ )
def downgrade():
- op.drop_index(op.f('ix_pool_name_ord'), table_name='pool_name')
- op.drop_table('pool_post')
- op.drop_table('pool_name')
- op.drop_table('pool')
- op.drop_table('pool_category')
+ op.drop_index(op.f("ix_pool_name_ord"), table_name="pool_name")
+ op.drop_table("pool_post")
+ op.drop_table("pool_name")
+ op.drop_table("pool")
+ op.drop_table("pool_category")
diff --git a/server/szurubooru/migrations/versions/7f6baf38c27c_add_versions.py b/server/szurubooru/migrations/versions/7f6baf38c27c_add_versions.py
index 22360260..24056c02 100644
--- a/server/szurubooru/migrations/versions/7f6baf38c27c_add_versions.py
+++ b/server/szurubooru/migrations/versions/7f6baf38c27c_add_versions.py
@@ -1,31 +1,30 @@
-'''
+"""
Add entity versions
Revision ID: 7f6baf38c27c
Created at: 2016-08-06 22:26:58.111763
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '7f6baf38c27c'
-down_revision = '4c526f869323'
+revision = "7f6baf38c27c"
+down_revision = "4c526f869323"
branch_labels = None
depends_on = None
-tables = ['tag_category', 'tag', 'user', 'post', 'comment']
+tables = ["tag_category", "tag", "user", "post", "comment"]
def upgrade():
for table in tables:
- op.add_column(table, sa.Column('version', sa.Integer(), nullable=True))
+ op.add_column(table, sa.Column("version", sa.Integer(), nullable=True))
op.execute(
- sa.table(table, sa.column('version'))
- .update()
- .values(version=1))
- op.alter_column(table, 'version', nullable=False)
+ sa.table(table, sa.column("version")).update().values(version=1)
+ )
+ op.alter_column(table, "version", nullable=False)
def downgrade():
for table in tables:
- op.drop_column(table, 'version')
+ op.drop_column(table, "version")
diff --git a/server/szurubooru/migrations/versions/840b460c5613_fix_foreignkeys.py b/server/szurubooru/migrations/versions/840b460c5613_fix_foreignkeys.py
index 8821dd8e..b2b5df5a 100644
--- a/server/szurubooru/migrations/versions/840b460c5613_fix_foreignkeys.py
+++ b/server/szurubooru/migrations/versions/840b460c5613_fix_foreignkeys.py
@@ -1,33 +1,36 @@
-'''
+"""
Fix ForeignKey constraint definitions
Revision ID: 840b460c5613
Created at: 2016-08-15 18:39:30.909867
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = '840b460c5613'
-down_revision = '7f6baf38c27c'
+revision = "840b460c5613"
+down_revision = "7f6baf38c27c"
branch_labels = None
depends_on = None
def upgrade():
- op.drop_constraint('post_user_id_fkey', 'post', type_='foreignkey')
- op.drop_constraint('snapshot_user_id_fkey', 'snapshot', type_='foreignkey')
+ op.drop_constraint("post_user_id_fkey", "post", type_="foreignkey")
+ op.drop_constraint("snapshot_user_id_fkey", "snapshot", type_="foreignkey")
op.create_foreign_key(
- None, 'post', 'user', ['user_id'], ['id'], ondelete='SET NULL')
+ None, "post", "user", ["user_id"], ["id"], ondelete="SET NULL"
+ )
op.create_foreign_key(
- None, 'snapshot', 'user', ['user_id'], ['id'], ondelete='set null')
+ None, "snapshot", "user", ["user_id"], ["id"], ondelete="set null"
+ )
def downgrade():
- op.drop_constraint(None, 'snapshot', type_='foreignkey')
- op.drop_constraint(None, 'post', type_='foreignkey')
+ op.drop_constraint(None, "snapshot", type_="foreignkey")
+ op.drop_constraint(None, "post", type_="foreignkey")
op.create_foreign_key(
- 'snapshot_user_id_fkey', 'snapshot', 'user', ['user_id'], ['id'])
+ "snapshot_user_id_fkey", "snapshot", "user", ["user_id"], ["id"]
+ )
op.create_foreign_key(
- 'post_user_id_fkey', 'post', 'user', ['user_id'], ['id'])
+ "post_user_id_fkey", "post", "user", ["user_id"], ["id"]
+ )
diff --git a/server/szurubooru/migrations/versions/84bd402f15f0_change_flags_column_type.py b/server/szurubooru/migrations/versions/84bd402f15f0_change_flags_column_type.py
index 72366413..d495b822 100644
--- a/server/szurubooru/migrations/versions/84bd402f15f0_change_flags_column_type.py
+++ b/server/szurubooru/migrations/versions/84bd402f15f0_change_flags_column_type.py
@@ -1,26 +1,27 @@
-'''
+"""
Change flags column type
Revision ID: 84bd402f15f0
Created at: 2016-04-22 20:48:32.386159
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '84bd402f15f0'
-down_revision = '9587de88a84b'
+revision = "84bd402f15f0"
+down_revision = "9587de88a84b"
branch_labels = None
depends_on = None
def upgrade():
- op.drop_column('post', 'flags')
- op.add_column('post', sa.Column('flags', sa.PickleType(), nullable=True))
+ op.drop_column("post", "flags")
+ op.add_column("post", sa.Column("flags", sa.PickleType(), nullable=True))
def downgrade():
- op.drop_column('post', 'flags')
+ op.drop_column("post", "flags")
op.add_column(
- 'post',
- sa.Column('flags', sa.Integer(), autoincrement=False, nullable=False))
+ "post",
+ sa.Column("flags", sa.Integer(), autoincrement=False, nullable=False),
+ )
diff --git a/server/szurubooru/migrations/versions/9587de88a84b_create_aux_post_tables.py b/server/szurubooru/migrations/versions/9587de88a84b_create_aux_post_tables.py
index eddec24f..46647cff 100644
--- a/server/szurubooru/migrations/versions/9587de88a84b_create_aux_post_tables.py
+++ b/server/szurubooru/migrations/versions/9587de88a84b_create_aux_post_tables.py
@@ -1,61 +1,65 @@
-'''
+"""
Create auxilliary post tables
Revision ID: 9587de88a84b
Created at: 2016-04-22 17:42:57.697229
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = '9587de88a84b'
-down_revision = '46cd5229839b'
+revision = "9587de88a84b"
+down_revision = "46cd5229839b"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'post_favorite',
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=False),
- sa.Column('time', sa.DateTime(), nullable=False),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.PrimaryKeyConstraint('post_id', 'user_id'))
+ "post_favorite",
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=False),
+ sa.Column("time", sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.PrimaryKeyConstraint("post_id", "user_id"),
+ )
op.create_table(
- 'post_feature',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=False),
- sa.Column('time', sa.DateTime(), nullable=False),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.PrimaryKeyConstraint('id'))
+ "post_feature",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=False),
+ sa.Column("time", sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'post_note',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('text', sa.UnicodeText(), nullable=False),
- sa.Column('polygon', sa.PickleType(), nullable=False),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.PrimaryKeyConstraint('id'))
+ "post_note",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("text", sa.UnicodeText(), nullable=False),
+ sa.Column("polygon", sa.PickleType(), nullable=False),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_table(
- 'post_score',
- sa.Column('post_id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=False),
- sa.Column('time', sa.DateTime(), nullable=False),
- sa.Column('score', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['post_id'], ['post.id']),
- sa.ForeignKeyConstraint(['user_id'], ['user.id']),
- sa.PrimaryKeyConstraint('post_id', 'user_id'))
+ "post_score",
+ sa.Column("post_id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=False),
+ sa.Column("time", sa.DateTime(), nullable=False),
+ sa.Column("score", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["post_id"], ["post.id"]),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"]),
+ sa.PrimaryKeyConstraint("post_id", "user_id"),
+ )
def downgrade():
- op.drop_table('post_score')
- op.drop_table('post_note')
- op.drop_table('post_feature')
- op.drop_table('post_favorite')
+ op.drop_table("post_score")
+ op.drop_table("post_note")
+ op.drop_table("post_feature")
+ op.drop_table("post_favorite")
diff --git a/server/szurubooru/migrations/versions/9837fc981ec7_add_order_to_tag_names.py b/server/szurubooru/migrations/versions/9837fc981ec7_add_order_to_tag_names.py
index 39f9edff..d1e74e95 100644
--- a/server/szurubooru/migrations/versions/9837fc981ec7_add_order_to_tag_names.py
+++ b/server/szurubooru/migrations/versions/9837fc981ec7_add_order_to_tag_names.py
@@ -1,17 +1,16 @@
-'''
+"""
Add order to tag names
Revision ID: 9837fc981ec7
Created at: 2016-08-28 19:03:59.831527
-'''
+"""
import sqlalchemy as sa
-from alembic import op
import sqlalchemy.ext.declarative
+from alembic import op
-
-revision = '9837fc981ec7'
-down_revision = '4a020f1d271a'
+revision = "9837fc981ec7"
+down_revision = "4a020f1d271a"
branch_labels = None
depends_on = None
@@ -20,21 +19,20 @@ Base = sa.ext.declarative.declarative_base()
class TagName(Base):
- __tablename__ = 'tag_name'
- __table_args__ = {'extend_existing': True}
+ __tablename__ = "tag_name"
+ __table_args__ = {"extend_existing": True}
- tag_name_id = sa.Column('tag_name_id', sa.Integer, primary_key=True)
- ord = sa.Column('ord', sa.Integer, nullable=False, index=True)
+ tag_name_id = sa.Column("tag_name_id", sa.Integer, primary_key=True)
+ ord = sa.Column("ord", sa.Integer, nullable=False, index=True)
def upgrade():
- op.add_column('tag_name', sa.Column('ord', sa.Integer(), nullable=True))
+ op.add_column("tag_name", sa.Column("ord", sa.Integer(), nullable=True))
op.execute(TagName.__table__.update().values(ord=TagName.tag_name_id))
- op.alter_column('tag_name', 'ord', nullable=False)
- op.create_index(
- op.f('ix_tag_name_ord'), 'tag_name', ['ord'], unique=False)
+ op.alter_column("tag_name", "ord", nullable=False)
+ op.create_index(op.f("ix_tag_name_ord"), "tag_name", ["ord"], unique=False)
def downgrade():
- op.drop_index(op.f('ix_tag_name_ord'), table_name='tag_name')
- op.drop_column('tag_name', 'ord')
+ op.drop_index(op.f("ix_tag_name_ord"), table_name="tag_name")
+ op.drop_column("tag_name", "ord")
diff --git a/server/szurubooru/migrations/versions/9ef1a1643c2a_update_user_table_for_hardened_passwords.py b/server/szurubooru/migrations/versions/9ef1a1643c2a_update_user_table_for_hardened_passwords.py
index 38057728..b8763a7d 100644
--- a/server/szurubooru/migrations/versions/9ef1a1643c2a_update_user_table_for_hardened_passwords.py
+++ b/server/szurubooru/migrations/versions/9ef1a1643c2a_update_user_table_for_hardened_passwords.py
@@ -1,19 +1,18 @@
-'''
+"""
Alter the password_hash field to work with larger output.
Particularly libsodium output for greater password security.
Revision ID: 9ef1a1643c2a
Created at: 2018-02-24 23:00:32.848575
-'''
+"""
import sqlalchemy as sa
import sqlalchemy.ext.declarative
import sqlalchemy.orm.session
from alembic import op
-
-revision = '9ef1a1643c2a'
-down_revision = '02ef5f73f4ab'
+revision = "9ef1a1643c2a"
+down_revision = "02ef5f73f4ab"
branch_labels = None
depends_on = None
@@ -21,43 +20,46 @@ Base = sa.ext.declarative.declarative_base()
class User(Base):
- __tablename__ = 'user'
+ __tablename__ = "user"
- AVATAR_GRAVATAR = 'gravatar'
+ AVATAR_GRAVATAR = "gravatar"
- user_id = sa.Column('id', sa.Integer, primary_key=True)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_login_time = sa.Column('last_login_time', sa.DateTime)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- name = sa.Column('name', sa.Unicode(50), nullable=False, unique=True)
- password_hash = sa.Column('password_hash', sa.Unicode(128), nullable=False)
- password_salt = sa.Column('password_salt', sa.Unicode(32))
+ user_id = sa.Column("id", sa.Integer, primary_key=True)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_login_time = sa.Column("last_login_time", sa.DateTime)
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ name = sa.Column("name", sa.Unicode(50), nullable=False, unique=True)
+ password_hash = sa.Column("password_hash", sa.Unicode(128), nullable=False)
+ password_salt = sa.Column("password_salt", sa.Unicode(32))
password_revision = sa.Column(
- 'password_revision', sa.SmallInteger, default=0, nullable=False)
- email = sa.Column('email', sa.Unicode(64), nullable=True)
- rank = sa.Column('rank', sa.Unicode(32), nullable=False)
+ "password_revision", sa.SmallInteger, default=0, nullable=False
+ )
+ email = sa.Column("email", sa.Unicode(64), nullable=True)
+ rank = sa.Column("rank", sa.Unicode(32), nullable=False)
avatar_style = sa.Column(
- 'avatar_style', sa.Unicode(32), nullable=False,
- default=AVATAR_GRAVATAR)
+ "avatar_style", sa.Unicode(32), nullable=False, default=AVATAR_GRAVATAR
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
def upgrade():
op.alter_column(
- 'user',
- 'password_hash',
+ "user",
+ "password_hash",
existing_type=sa.VARCHAR(length=64),
type_=sa.Unicode(length=128),
- existing_nullable=False)
- op.add_column('user', sa.Column(
- 'password_revision',
- sa.SmallInteger(),
- nullable=True,
- default=0))
+ existing_nullable=False,
+ )
+ op.add_column(
+ "user",
+ sa.Column(
+ "password_revision", sa.SmallInteger(), nullable=True, default=0
+ ),
+ )
session = sa.orm.session.Session(bind=op.get_bind())
if session.query(User).count() >= 0:
@@ -73,17 +75,16 @@ def upgrade():
session.commit()
op.alter_column(
- 'user',
- 'password_revision',
- existing_nullable=True,
- nullable=False)
+ "user", "password_revision", existing_nullable=True, nullable=False
+ )
def downgrade():
op.alter_column(
- 'user',
- 'password_hash',
+ "user",
+ "password_hash",
existing_type=sa.Unicode(length=128),
type_=sa.VARCHAR(length=64),
- existing_nullable=False)
- op.drop_column('user', 'password_revision')
+ existing_nullable=False,
+ )
+ op.drop_column("user", "password_revision")
diff --git a/server/szurubooru/migrations/versions/a39c7f98a7fa_add_user_token_table.py b/server/szurubooru/migrations/versions/a39c7f98a7fa_add_user_token_table.py
index 899eaa70..57dda5c0 100644
--- a/server/szurubooru/migrations/versions/a39c7f98a7fa_add_user_token_table.py
+++ b/server/szurubooru/migrations/versions/a39c7f98a7fa_add_user_token_table.py
@@ -1,39 +1,40 @@
-'''
+"""
Added a user_token table for API authorization
Revision ID: a39c7f98a7fa
Created at: 2018-02-25 01:31:27.345595
-'''
+"""
import sqlalchemy as sa
from alembic import op
-
-revision = 'a39c7f98a7fa'
-down_revision = '9ef1a1643c2a'
+revision = "a39c7f98a7fa"
+down_revision = "9ef1a1643c2a"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
- 'user_token',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('user_id', sa.Integer(), nullable=False),
- sa.Column('token', sa.Unicode(length=36), nullable=False),
- sa.Column('note', sa.Unicode(length=128), nullable=True),
- sa.Column('enabled', sa.Boolean(), nullable=False),
- sa.Column('expiration_time', sa.DateTime(), nullable=True),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('last_edit_time', sa.DateTime(), nullable=True),
- sa.Column('last_usage_time', sa.DateTime(), nullable=True),
- sa.Column('version', sa.Integer(), nullable=False),
- sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'),
- sa.PrimaryKeyConstraint('id'))
+ "user_token",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("user_id", sa.Integer(), nullable=False),
+ sa.Column("token", sa.Unicode(length=36), nullable=False),
+ sa.Column("note", sa.Unicode(length=128), nullable=True),
+ sa.Column("enabled", sa.Boolean(), nullable=False),
+ sa.Column("expiration_time", sa.DateTime(), nullable=True),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("last_edit_time", sa.DateTime(), nullable=True),
+ sa.Column("last_usage_time", sa.DateTime(), nullable=True),
+ sa.Column("version", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
+ sa.PrimaryKeyConstraint("id"),
+ )
op.create_index(
- op.f('ix_user_token_user_id'), 'user_token', ['user_id'], unique=False)
+ op.f("ix_user_token_user_id"), "user_token", ["user_id"], unique=False
+ )
def downgrade():
- op.drop_index(op.f('ix_user_token_user_id'), table_name='user_token')
- op.drop_table('user_token')
+ op.drop_index(op.f("ix_user_token_user_id"), table_name="user_token")
+ op.drop_table("user_token")
diff --git a/server/szurubooru/migrations/versions/e5c1216a8503_create_user_table.py b/server/szurubooru/migrations/versions/e5c1216a8503_create_user_table.py
index a84e31ba..672d4eb9 100644
--- a/server/szurubooru/migrations/versions/e5c1216a8503_create_user_table.py
+++ b/server/szurubooru/migrations/versions/e5c1216a8503_create_user_table.py
@@ -1,14 +1,14 @@
-'''
+"""
Create user table
Revision ID: e5c1216a8503
Created at: 2016-03-20 15:53:25.030415
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = 'e5c1216a8503'
+revision = "e5c1216a8503"
down_revision = None
branch_labels = None
depends_on = None
@@ -16,19 +16,20 @@ depends_on = None
def upgrade():
op.create_table(
- 'user',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('name', sa.Unicode(length=50), nullable=False),
- sa.Column('password_hash', sa.Unicode(length=64), nullable=False),
- sa.Column('password_salt', sa.Unicode(length=32), nullable=True),
- sa.Column('email', sa.Unicode(length=64), nullable=True),
- sa.Column('rank', sa.Unicode(length=32), nullable=False),
- sa.Column('creation_time', sa.DateTime(), nullable=False),
- sa.Column('last_login_time', sa.DateTime()),
- sa.Column('avatar_style', sa.Unicode(length=32), nullable=False),
- sa.PrimaryKeyConstraint('id'))
- op.create_unique_constraint('uq_user_name', 'user', ['name'])
+ "user",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.Unicode(length=50), nullable=False),
+ sa.Column("password_hash", sa.Unicode(length=64), nullable=False),
+ sa.Column("password_salt", sa.Unicode(length=32), nullable=True),
+ sa.Column("email", sa.Unicode(length=64), nullable=True),
+ sa.Column("rank", sa.Unicode(length=32), nullable=False),
+ sa.Column("creation_time", sa.DateTime(), nullable=False),
+ sa.Column("last_login_time", sa.DateTime()),
+ sa.Column("avatar_style", sa.Unicode(length=32), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
def downgrade():
- op.drop_table('user')
+ op.drop_table("user")
diff --git a/server/szurubooru/migrations/versions/ed6dd16a30f3_delete_post_columns.py b/server/szurubooru/migrations/versions/ed6dd16a30f3_delete_post_columns.py
index dffc6c0d..3b36c6ad 100644
--- a/server/szurubooru/migrations/versions/ed6dd16a30f3_delete_post_columns.py
+++ b/server/szurubooru/migrations/versions/ed6dd16a30f3_delete_post_columns.py
@@ -1,48 +1,49 @@
-'''
+"""
Delete post columns
Revision ID: ed6dd16a30f3
Created at: 2016-04-24 16:29:25.309154
-'''
+"""
import sqlalchemy as sa
from alembic import op
-revision = 'ed6dd16a30f3'
-down_revision = '46df355634dc'
+revision = "ed6dd16a30f3"
+down_revision = "46df355634dc"
branch_labels = None
depends_on = None
def upgrade():
for column_name in [
- 'auto_comment_edit_time',
- 'auto_fav_count',
- 'auto_comment_creation_time',
- 'auto_feature_count',
- 'auto_comment_count',
- 'auto_score',
- 'auto_fav_time',
- 'auto_feature_time',
- 'auto_note_count']:
- op.drop_column('post', column_name)
+ "auto_comment_edit_time",
+ "auto_fav_count",
+ "auto_comment_creation_time",
+ "auto_feature_count",
+ "auto_comment_count",
+ "auto_score",
+ "auto_fav_time",
+ "auto_feature_time",
+ "auto_note_count",
+ ]:
+ op.drop_column("post", column_name)
def downgrade():
for column_name in [
- 'auto_note_count',
- 'auto_feature_time',
- 'auto_fav_time',
- 'auto_score',
- 'auto_comment_count',
- 'auto_feature_count',
- 'auto_comment_creation_time',
- 'auto_fav_count',
- 'auto_comment_edit_time']:
+ "auto_note_count",
+ "auto_feature_time",
+ "auto_fav_time",
+ "auto_score",
+ "auto_comment_count",
+ "auto_feature_count",
+ "auto_comment_creation_time",
+ "auto_fav_count",
+ "auto_comment_edit_time",
+ ]:
op.add_column(
- 'post',
+ "post",
sa.Column(
- column_name,
- sa.INTEGER(),
- autoincrement=False,
- nullable=False))
+ column_name, sa.INTEGER(), autoincrement=False, nullable=False
+ ),
+ )
diff --git a/server/szurubooru/model/__init__.py b/server/szurubooru/model/__init__.py
index 4f6cb2a6..21a178ef 100644
--- a/server/szurubooru/model/__init__.py
+++ b/server/szurubooru/model/__init__.py
@@ -1,18 +1,19 @@
+import szurubooru.model.util
from szurubooru.model.base import Base
-from szurubooru.model.user import User, UserToken
-from szurubooru.model.tag_category import TagCategory
-from szurubooru.model.tag import Tag, TagName, TagSuggestion, TagImplication
-from szurubooru.model.post import (
- Post,
- PostTag,
- PostRelation,
- PostFavorite,
- PostScore,
- PostNote,
- PostFeature,
- PostSignature)
+from szurubooru.model.comment import Comment, CommentScore
from szurubooru.model.pool import Pool, PoolName, PoolPost
from szurubooru.model.pool_category import PoolCategory
-from szurubooru.model.comment import Comment, CommentScore
+from szurubooru.model.post import (
+ Post,
+ PostFavorite,
+ PostFeature,
+ PostNote,
+ PostRelation,
+ PostScore,
+ PostSignature,
+ PostTag,
+)
from szurubooru.model.snapshot import Snapshot
-import szurubooru.model.util
+from szurubooru.model.tag import Tag, TagImplication, TagName, TagSuggestion
+from szurubooru.model.tag_category import TagCategory
+from szurubooru.model.user import User, UserToken
diff --git a/server/szurubooru/model/base.py b/server/szurubooru/model/base.py
index 00ea8e13..860e5425 100644
--- a/server/szurubooru/model/base.py
+++ b/server/szurubooru/model/base.py
@@ -1,4 +1,3 @@
from sqlalchemy.ext.declarative import declarative_base
-
Base = declarative_base()
diff --git a/server/szurubooru/model/comment.py b/server/szurubooru/model/comment.py
index 17b76a05..e64961e6 100644
--- a/server/szurubooru/model/comment.py
+++ b/server/szurubooru/model/comment.py
@@ -1,58 +1,65 @@
import sqlalchemy as sa
+
from szurubooru.db import get_session
from szurubooru.model.base import Base
class CommentScore(Base):
- __tablename__ = 'comment_score'
+ __tablename__ = "comment_score"
comment_id = sa.Column(
- 'comment_id',
+ "comment_id",
sa.Integer,
- sa.ForeignKey('comment.id'),
- nullable=False,
- primary_key=True)
- user_id = sa.Column(
- 'user_id',
- sa.Integer,
- sa.ForeignKey('user.id'),
+ sa.ForeignKey("comment.id"),
nullable=False,
primary_key=True,
- index=True)
- time = sa.Column('time', sa.DateTime, nullable=False)
- score = sa.Column('score', sa.Integer, nullable=False)
+ )
+ user_id = sa.Column(
+ "user_id",
+ sa.Integer,
+ sa.ForeignKey("user.id"),
+ nullable=False,
+ primary_key=True,
+ index=True,
+ )
+ time = sa.Column("time", sa.DateTime, nullable=False)
+ score = sa.Column("score", sa.Integer, nullable=False)
- comment = sa.orm.relationship('Comment')
+ comment = sa.orm.relationship("Comment")
user = sa.orm.relationship(
- 'User',
- backref=sa.orm.backref('comment_scores', cascade='all, delete-orphan'))
+ "User",
+ backref=sa.orm.backref("comment_scores", cascade="all, delete-orphan"),
+ )
class Comment(Base):
- __tablename__ = 'comment'
+ __tablename__ = "comment"
- comment_id = sa.Column('id', sa.Integer, primary_key=True)
+ comment_id = sa.Column("id", sa.Integer, primary_key=True)
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
nullable=False,
- index=True)
+ index=True,
+ )
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id'),
+ sa.ForeignKey("user.id"),
nullable=True,
- index=True)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_edit_time = sa.Column('last_edit_time', sa.DateTime)
- text = sa.Column('text', sa.UnicodeText, default=None)
+ index=True,
+ )
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_edit_time = sa.Column("last_edit_time", sa.DateTime)
+ text = sa.Column("text", sa.UnicodeText, default=None)
- user = sa.orm.relationship('User')
- post = sa.orm.relationship('Post')
+ user = sa.orm.relationship("User")
+ post = sa.orm.relationship("Post")
scores = sa.orm.relationship(
- 'CommentScore', cascade='all, delete-orphan', lazy='joined')
+ "CommentScore", cascade="all, delete-orphan", lazy="joined"
+ )
@property
def score(self) -> int:
@@ -60,9 +67,11 @@ class Comment(Base):
get_session()
.query(sa.sql.expression.func.sum(CommentScore.score))
.filter(CommentScore.comment_id == self.comment_id)
- .one()[0] or 0)
+ .one()[0]
+ or 0
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
diff --git a/server/szurubooru/model/pool.py b/server/szurubooru/model/pool.py
index 6fc83610..3dcdd353 100644
--- a/server/szurubooru/model/pool.py
+++ b/server/szurubooru/model/pool.py
@@ -1,21 +1,23 @@
import sqlalchemy as sa
-from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.orderinglist import ordering_list
+
from szurubooru.model.base import Base
class PoolName(Base):
- __tablename__ = 'pool_name'
+ __tablename__ = "pool_name"
- pool_name_id = sa.Column('pool_name_id', sa.Integer, primary_key=True)
+ pool_name_id = sa.Column("pool_name_id", sa.Integer, primary_key=True)
pool_id = sa.Column(
- 'pool_id',
+ "pool_id",
sa.Integer,
- sa.ForeignKey('pool.id'),
+ sa.ForeignKey("pool.id"),
nullable=False,
- index=True)
- name = sa.Column('name', sa.Unicode(128), nullable=False, unique=True)
- order = sa.Column('ord', sa.Integer, nullable=False, index=True)
+ index=True,
+ )
+ name = sa.Column("name", sa.Unicode(128), nullable=False, unique=True)
+ order = sa.Column("ord", sa.Integer, nullable=False, index=True)
def __init__(self, name: str, order: int) -> None:
self.name = name
@@ -23,69 +25,76 @@ class PoolName(Base):
class PoolPost(Base):
- __tablename__ = 'pool_post'
+ __tablename__ = "pool_post"
pool_id = sa.Column(
- 'pool_id',
+ "pool_id",
sa.Integer,
- sa.ForeignKey('pool.id'),
+ sa.ForeignKey("pool.id"),
nullable=False,
primary_key=True,
- index=True)
+ index=True,
+ )
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
nullable=False,
primary_key=True,
- index=True)
- order = sa.Column('ord', sa.Integer, nullable=False, index=True)
+ index=True,
+ )
+ order = sa.Column("ord", sa.Integer, nullable=False, index=True)
- pool = sa.orm.relationship('Pool', back_populates='_posts')
- post = sa.orm.relationship('Post', back_populates='_pools')
+ pool = sa.orm.relationship("Pool", back_populates="_posts")
+ post = sa.orm.relationship("Post", back_populates="_pools")
def __init__(self, post) -> None:
self.post_id = post.post_id
class Pool(Base):
- __tablename__ = 'pool'
+ __tablename__ = "pool"
- pool_id = sa.Column('id', sa.Integer, primary_key=True)
+ pool_id = sa.Column("id", sa.Integer, primary_key=True)
category_id = sa.Column(
- 'category_id',
+ "category_id",
sa.Integer,
- sa.ForeignKey('pool_category.id'),
+ sa.ForeignKey("pool_category.id"),
nullable=False,
- index=True)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_edit_time = sa.Column('last_edit_time', sa.DateTime)
- description = sa.Column('description', sa.UnicodeText, default=None)
+ index=True,
+ )
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_edit_time = sa.Column("last_edit_time", sa.DateTime)
+ description = sa.Column("description", sa.UnicodeText, default=None)
- category = sa.orm.relationship('PoolCategory', lazy='joined')
+ category = sa.orm.relationship("PoolCategory", lazy="joined")
names = sa.orm.relationship(
- 'PoolName',
- cascade='all,delete-orphan',
- lazy='joined',
- order_by='PoolName.order')
+ "PoolName",
+ cascade="all,delete-orphan",
+ lazy="joined",
+ order_by="PoolName.order",
+ )
_posts = sa.orm.relationship(
- 'PoolPost',
- cascade='all,delete-orphan',
- lazy='joined',
- back_populates='pool',
- order_by='PoolPost.order',
- collection_class=ordering_list('order'))
- posts = association_proxy('_posts', 'post')
+ "PoolPost",
+ cascade="all,delete-orphan",
+ lazy="joined",
+ back_populates="pool",
+ order_by="PoolPost.order",
+ collection_class=ordering_list("order"),
+ )
+ posts = association_proxy("_posts", "post")
post_count = sa.orm.column_property(
(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PoolPost.post_id)])
+ [sa.sql.expression.func.count(PoolPost.post_id)]
+ )
.where(PoolPost.pool_id == pool_id)
.as_scalar()
),
- deferred=True)
+ deferred=True,
+ )
first_name = sa.orm.column_property(
(
@@ -95,9 +104,10 @@ class Pool(Base):
.limit(1)
.as_scalar()
),
- deferred=True)
+ deferred=True,
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
diff --git a/server/szurubooru/model/pool_category.py b/server/szurubooru/model/pool_category.py
index a54e5a66..f527d2d8 100644
--- a/server/szurubooru/model/pool_category.py
+++ b/server/szurubooru/model/pool_category.py
@@ -1,29 +1,34 @@
from typing import Optional
+
import sqlalchemy as sa
+
from szurubooru.model.base import Base
from szurubooru.model.pool import Pool
class PoolCategory(Base):
- __tablename__ = 'pool_category'
+ __tablename__ = "pool_category"
- pool_category_id = sa.Column('id', sa.Integer, primary_key=True)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- name = sa.Column('name', sa.Unicode(32), nullable=False)
+ pool_category_id = sa.Column("id", sa.Integer, primary_key=True)
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ name = sa.Column("name", sa.Unicode(32), nullable=False)
color = sa.Column(
- 'color', sa.Unicode(32), nullable=False, default='#000000')
- default = sa.Column('default', sa.Boolean, nullable=False, default=False)
+ "color", sa.Unicode(32), nullable=False, default="#000000"
+ )
+ default = sa.Column("default", sa.Boolean, nullable=False, default=False)
def __init__(self, name: Optional[str] = None) -> None:
self.name = name
pool_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count('Pool.pool_id')])
+ [sa.sql.expression.func.count("Pool.pool_id")]
+ )
.where(Pool.category_id == pool_category_id)
- .correlate_except(sa.table('Pool')))
+ .correlate_except(sa.table("Pool"))
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
diff --git a/server/szurubooru/model/post.py b/server/szurubooru/model/post.py
index 11da8660..9e06a743 100644
--- a/server/szurubooru/model/post.py
+++ b/server/szurubooru/model/post.py
@@ -1,122 +1,135 @@
from typing import List
+
import sqlalchemy as sa
-from szurubooru.model.base import Base
-from szurubooru.model.comment import Comment
-from szurubooru.model.pool import PoolPost
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.orderinglist import ordering_list
+from szurubooru.model.base import Base
+from szurubooru.model.comment import Comment
+from szurubooru.model.pool import PoolPost
+
class PostFeature(Base):
- __tablename__ = 'post_feature'
+ __tablename__ = "post_feature"
- post_feature_id = sa.Column('id', sa.Integer, primary_key=True)
+ post_feature_id = sa.Column("id", sa.Integer, primary_key=True)
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
nullable=False,
- index=True)
+ index=True,
+ )
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id'),
+ sa.ForeignKey("user.id"),
nullable=False,
- index=True)
- time = sa.Column('time', sa.DateTime, nullable=False)
+ index=True,
+ )
+ time = sa.Column("time", sa.DateTime, nullable=False)
- post = sa.orm.relationship('Post') # type: Post
+ post = sa.orm.relationship("Post") # type: Post
user = sa.orm.relationship(
- 'User',
- backref=sa.orm.backref(
- 'post_features', cascade='all, delete-orphan'))
+ "User",
+ backref=sa.orm.backref("post_features", cascade="all, delete-orphan"),
+ )
class PostScore(Base):
- __tablename__ = 'post_score'
+ __tablename__ = "post_score"
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
primary_key=True,
nullable=False,
- index=True)
+ index=True,
+ )
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id'),
+ sa.ForeignKey("user.id"),
primary_key=True,
nullable=False,
- index=True)
- time = sa.Column('time', sa.DateTime, nullable=False)
- score = sa.Column('score', sa.Integer, nullable=False)
+ index=True,
+ )
+ time = sa.Column("time", sa.DateTime, nullable=False)
+ score = sa.Column("score", sa.Integer, nullable=False)
- post = sa.orm.relationship('Post')
+ post = sa.orm.relationship("Post")
user = sa.orm.relationship(
- 'User',
- backref=sa.orm.backref('post_scores', cascade='all, delete-orphan'))
+ "User",
+ backref=sa.orm.backref("post_scores", cascade="all, delete-orphan"),
+ )
class PostFavorite(Base):
- __tablename__ = 'post_favorite'
+ __tablename__ = "post_favorite"
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
primary_key=True,
nullable=False,
- index=True)
+ index=True,
+ )
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id'),
+ sa.ForeignKey("user.id"),
primary_key=True,
nullable=False,
- index=True)
- time = sa.Column('time', sa.DateTime, nullable=False)
+ index=True,
+ )
+ time = sa.Column("time", sa.DateTime, nullable=False)
- post = sa.orm.relationship('Post')
+ post = sa.orm.relationship("Post")
user = sa.orm.relationship(
- 'User',
- backref=sa.orm.backref('post_favorites', cascade='all, delete-orphan'))
+ "User",
+ backref=sa.orm.backref("post_favorites", cascade="all, delete-orphan"),
+ )
class PostNote(Base):
- __tablename__ = 'post_note'
+ __tablename__ = "post_note"
- post_note_id = sa.Column('id', sa.Integer, primary_key=True)
+ post_note_id = sa.Column("id", sa.Integer, primary_key=True)
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
nullable=False,
- index=True)
- polygon = sa.Column('polygon', sa.PickleType, nullable=False)
- text = sa.Column('text', sa.UnicodeText, nullable=False)
+ index=True,
+ )
+ polygon = sa.Column("polygon", sa.PickleType, nullable=False)
+ text = sa.Column("text", sa.UnicodeText, nullable=False)
- post = sa.orm.relationship('Post')
+ post = sa.orm.relationship("Post")
class PostRelation(Base):
- __tablename__ = 'post_relation'
+ __tablename__ = "post_relation"
parent_id = sa.Column(
- 'parent_id',
+ "parent_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
primary_key=True,
nullable=False,
- index=True)
+ index=True,
+ )
child_id = sa.Column(
- 'child_id',
+ "child_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
primary_key=True,
nullable=False,
- index=True)
+ index=True,
+ )
def __init__(self, parent_id: int, child_id: int) -> None:
self.parent_id = parent_id
@@ -124,22 +137,24 @@ class PostRelation(Base):
class PostTag(Base):
- __tablename__ = 'post_tag'
+ __tablename__ = "post_tag"
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
primary_key=True,
nullable=False,
- index=True)
+ index=True,
+ )
tag_id = sa.Column(
- 'tag_id',
+ "tag_id",
sa.Integer,
- sa.ForeignKey('tag.id'),
+ sa.ForeignKey("tag.id"),
primary_key=True,
nullable=False,
- index=True)
+ index=True,
+ )
def __init__(self, post_id: int, tag_id: int) -> None:
self.post_id = post_id
@@ -147,105 +162,119 @@ class PostTag(Base):
class PostSignature(Base):
- __tablename__ = 'post_signature'
+ __tablename__ = "post_signature"
post_id = sa.Column(
- 'post_id',
+ "post_id",
sa.Integer,
- sa.ForeignKey('post.id'),
+ sa.ForeignKey("post.id"),
primary_key=True,
nullable=False,
- index=True)
- signature = sa.Column('signature', sa.LargeBinary, nullable=False)
+ index=True,
+ )
+ signature = sa.Column("signature", sa.LargeBinary, nullable=False)
words = sa.Column(
- 'words',
+ "words",
sa.dialects.postgresql.ARRAY(sa.Integer, dimensions=1),
nullable=False,
- index=True)
+ index=True,
+ )
- post = sa.orm.relationship('Post')
+ post = sa.orm.relationship("Post")
class Post(Base):
- __tablename__ = 'post'
+ __tablename__ = "post"
- SAFETY_SAFE = 'safe'
- SAFETY_SKETCHY = 'sketchy'
- SAFETY_UNSAFE = 'unsafe'
+ SAFETY_SAFE = "safe"
+ SAFETY_SKETCHY = "sketchy"
+ SAFETY_UNSAFE = "unsafe"
- TYPE_IMAGE = 'image'
- TYPE_ANIMATION = 'animation'
- TYPE_VIDEO = 'video'
- TYPE_FLASH = 'flash'
+ TYPE_IMAGE = "image"
+ TYPE_ANIMATION = "animation"
+ TYPE_VIDEO = "video"
+ TYPE_FLASH = "flash"
- FLAG_LOOP = 'loop'
- FLAG_SOUND = 'sound'
+ FLAG_LOOP = "loop"
+ FLAG_SOUND = "sound"
# basic meta
- post_id = sa.Column('id', sa.Integer, primary_key=True)
+ post_id = sa.Column("id", sa.Integer, primary_key=True)
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id', ondelete='SET NULL'),
+ sa.ForeignKey("user.id", ondelete="SET NULL"),
nullable=True,
- index=True)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_edit_time = sa.Column('last_edit_time', sa.DateTime)
- safety = sa.Column('safety', sa.Unicode(32), nullable=False)
- source = sa.Column('source', sa.Unicode(2048))
- flags_string = sa.Column('flags', sa.Unicode(32), default='')
+ index=True,
+ )
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_edit_time = sa.Column("last_edit_time", sa.DateTime)
+ safety = sa.Column("safety", sa.Unicode(32), nullable=False)
+ source = sa.Column("source", sa.Unicode(2048))
+ flags_string = sa.Column("flags", sa.Unicode(32), default="")
# content description
- type = sa.Column('type', sa.Unicode(32), nullable=False)
- checksum = sa.Column('checksum', sa.Unicode(64), nullable=False)
- file_size = sa.Column('file_size', sa.Integer)
- canvas_width = sa.Column('image_width', sa.Integer)
- canvas_height = sa.Column('image_height', sa.Integer)
- mime_type = sa.Column('mime-type', sa.Unicode(32), nullable=False)
+ type = sa.Column("type", sa.Unicode(32), nullable=False)
+ checksum = sa.Column("checksum", sa.Unicode(64), nullable=False)
+ file_size = sa.Column("file_size", sa.Integer)
+ canvas_width = sa.Column("image_width", sa.Integer)
+ canvas_height = sa.Column("image_height", sa.Integer)
+ mime_type = sa.Column("mime-type", sa.Unicode(32), nullable=False)
# foreign tables
- user = sa.orm.relationship('User')
- tags = sa.orm.relationship('Tag', backref='posts', secondary='post_tag')
+ user = sa.orm.relationship("User")
+ tags = sa.orm.relationship("Tag", backref="posts", secondary="post_tag")
signature = sa.orm.relationship(
- 'PostSignature',
+ "PostSignature",
uselist=False,
- cascade='all, delete, delete-orphan',
- lazy='joined')
+ cascade="all, delete, delete-orphan",
+ lazy="joined",
+ )
relations = sa.orm.relationship(
- 'Post',
- secondary='post_relation',
+ "Post",
+ secondary="post_relation",
primaryjoin=post_id == PostRelation.parent_id,
- secondaryjoin=post_id == PostRelation.child_id, lazy='joined',
- backref='related_by')
+ secondaryjoin=post_id == PostRelation.child_id,
+ lazy="joined",
+ backref="related_by",
+ )
features = sa.orm.relationship(
- 'PostFeature', cascade='all, delete-orphan', lazy='joined')
+ "PostFeature", cascade="all, delete-orphan", lazy="joined"
+ )
scores = sa.orm.relationship(
- 'PostScore', cascade='all, delete-orphan', lazy='joined')
+ "PostScore", cascade="all, delete-orphan", lazy="joined"
+ )
favorited_by = sa.orm.relationship(
- 'PostFavorite', cascade='all, delete-orphan', lazy='joined')
+ "PostFavorite", cascade="all, delete-orphan", lazy="joined"
+ )
notes = sa.orm.relationship(
- 'PostNote', cascade='all, delete-orphan', lazy='joined')
- comments = sa.orm.relationship('Comment', cascade='all, delete-orphan')
+ "PostNote", cascade="all, delete-orphan", lazy="joined"
+ )
+ comments = sa.orm.relationship("Comment", cascade="all, delete-orphan")
_pools = sa.orm.relationship(
- 'PoolPost',
- cascade='all,delete-orphan',
- lazy='select',
- order_by='PoolPost.order',
- back_populates='post')
- pools = association_proxy('_pools', 'pool')
+ "PoolPost",
+ cascade="all,delete-orphan",
+ lazy="select",
+ order_by="PoolPost.order",
+ back_populates="post",
+ )
+ pools = association_proxy("_pools", "pool")
# dynamic columns
tag_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PostTag.tag_id)])
+ [sa.sql.expression.func.count(PostTag.tag_id)]
+ )
.where(PostTag.post_id == post_id)
- .correlate_except(PostTag))
+ .correlate_except(PostTag)
+ )
canvas_area = sa.orm.column_property(canvas_width * canvas_height)
canvas_aspect_ratio = sa.orm.column_property(
- sa.sql.expression.func.cast(canvas_width, sa.Float) /
- sa.sql.expression.func.cast(canvas_height, sa.Float))
+ sa.sql.expression.func.cast(canvas_width, sa.Float)
+ / sa.sql.expression.func.cast(canvas_height, sa.Float)
+ )
@property
def is_featured(self) -> bool:
@@ -253,81 +282,106 @@ class Post(Base):
sa.orm.object_session(self)
.query(PostFeature)
.order_by(PostFeature.time.desc())
- .first())
+ .first()
+ )
return featured_post and featured_post.post_id == self.post_id
@hybrid_property
def flags(self) -> List[str]:
- return sorted([x for x in self.flags_string.split(',') if x])
+ return sorted([x for x in self.flags_string.split(",") if x])
@flags.setter
def flags(self, data: List[str]) -> None:
- self.flags_string = ','.join([x for x in data if x])
+ self.flags_string = ",".join([x for x in data if x])
score = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.coalesce(
- sa.sql.expression.func.sum(PostScore.score), 0)])
+ [
+ sa.sql.expression.func.coalesce(
+ sa.sql.expression.func.sum(PostScore.score), 0
+ )
+ ]
+ )
.where(PostScore.post_id == post_id)
- .correlate_except(PostScore))
+ .correlate_except(PostScore)
+ )
favorite_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PostFavorite.post_id)])
+ [sa.sql.expression.func.count(PostFavorite.post_id)]
+ )
.where(PostFavorite.post_id == post_id)
- .correlate_except(PostFavorite))
+ .correlate_except(PostFavorite)
+ )
last_favorite_time = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.max(PostFavorite.time)])
+ [sa.sql.expression.func.max(PostFavorite.time)]
+ )
.where(PostFavorite.post_id == post_id)
- .correlate_except(PostFavorite))
+ .correlate_except(PostFavorite)
+ )
feature_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PostFeature.post_id)])
+ [sa.sql.expression.func.count(PostFeature.post_id)]
+ )
.where(PostFeature.post_id == post_id)
- .correlate_except(PostFeature))
+ .correlate_except(PostFeature)
+ )
last_feature_time = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.max(PostFeature.time)])
+ [sa.sql.expression.func.max(PostFeature.time)]
+ )
.where(PostFeature.post_id == post_id)
- .correlate_except(PostFeature))
+ .correlate_except(PostFeature)
+ )
comment_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(Comment.post_id)])
+ [sa.sql.expression.func.count(Comment.post_id)]
+ )
.where(Comment.post_id == post_id)
- .correlate_except(Comment))
+ .correlate_except(Comment)
+ )
last_comment_creation_time = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.max(Comment.creation_time)])
+ [sa.sql.expression.func.max(Comment.creation_time)]
+ )
.where(Comment.post_id == post_id)
- .correlate_except(Comment))
+ .correlate_except(Comment)
+ )
last_comment_edit_time = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.max(Comment.last_edit_time)])
+ [sa.sql.expression.func.max(Comment.last_edit_time)]
+ )
.where(Comment.post_id == post_id)
- .correlate_except(Comment))
+ .correlate_except(Comment)
+ )
note_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PostNote.post_id)])
+ [sa.sql.expression.func.count(PostNote.post_id)]
+ )
.where(PostNote.post_id == post_id)
- .correlate_except(PostNote))
+ .correlate_except(PostNote)
+ )
relation_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PostRelation.child_id)])
+ [sa.sql.expression.func.count(PostRelation.child_id)]
+ )
.where(
- (PostRelation.parent_id == post_id) |
- (PostRelation.child_id == post_id))
- .correlate_except(PostRelation))
+ (PostRelation.parent_id == post_id)
+ | (PostRelation.child_id == post_id)
+ )
+ .correlate_except(PostRelation)
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
diff --git a/server/szurubooru/model/snapshot.py b/server/szurubooru/model/snapshot.py
index 7ee3ba6f..d4e3fc8c 100644
--- a/server/szurubooru/model/snapshot.py
+++ b/server/szurubooru/model/snapshot.py
@@ -1,29 +1,32 @@
import sqlalchemy as sa
+
from szurubooru.model.base import Base
class Snapshot(Base):
- __tablename__ = 'snapshot'
+ __tablename__ = "snapshot"
- OPERATION_CREATED = 'created'
- OPERATION_MODIFIED = 'modified'
- OPERATION_DELETED = 'deleted'
- OPERATION_MERGED = 'merged'
+ OPERATION_CREATED = "created"
+ OPERATION_MODIFIED = "modified"
+ OPERATION_DELETED = "deleted"
+ OPERATION_MERGED = "merged"
- snapshot_id = sa.Column('id', sa.Integer, primary_key=True)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- operation = sa.Column('operation', sa.Unicode(16), nullable=False)
+ snapshot_id = sa.Column("id", sa.Integer, primary_key=True)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ operation = sa.Column("operation", sa.Unicode(16), nullable=False)
resource_type = sa.Column(
- 'resource_type', sa.Unicode(32), nullable=False, index=True)
+ "resource_type", sa.Unicode(32), nullable=False, index=True
+ )
resource_pkey = sa.Column(
- 'resource_pkey', sa.Integer, nullable=False, index=True)
- resource_name = sa.Column(
- 'resource_name', sa.Unicode(128), nullable=False)
+ "resource_pkey", sa.Integer, nullable=False, index=True
+ )
+ resource_name = sa.Column("resource_name", sa.Unicode(128), nullable=False)
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id', ondelete='set null'),
- nullable=True)
- data = sa.Column('data', sa.PickleType)
+ sa.ForeignKey("user.id", ondelete="set null"),
+ nullable=True,
+ )
+ data = sa.Column("data", sa.PickleType)
- user = sa.orm.relationship('User')
+ user = sa.orm.relationship("User")
diff --git a/server/szurubooru/model/tag.py b/server/szurubooru/model/tag.py
index 2af87e74..61dbf83e 100644
--- a/server/szurubooru/model/tag.py
+++ b/server/szurubooru/model/tag.py
@@ -1,25 +1,28 @@
import sqlalchemy as sa
+
from szurubooru.model.base import Base
from szurubooru.model.post import PostTag
class TagSuggestion(Base):
- __tablename__ = 'tag_suggestion'
+ __tablename__ = "tag_suggestion"
parent_id = sa.Column(
- 'parent_id',
+ "parent_id",
sa.Integer,
- sa.ForeignKey('tag.id'),
+ sa.ForeignKey("tag.id"),
nullable=False,
primary_key=True,
- index=True)
+ index=True,
+ )
child_id = sa.Column(
- 'child_id',
+ "child_id",
sa.Integer,
- sa.ForeignKey('tag.id'),
+ sa.ForeignKey("tag.id"),
nullable=False,
primary_key=True,
- index=True)
+ index=True,
+ )
def __init__(self, parent_id: int, child_id: int) -> None:
self.parent_id = parent_id
@@ -27,22 +30,24 @@ class TagSuggestion(Base):
class TagImplication(Base):
- __tablename__ = 'tag_implication'
+ __tablename__ = "tag_implication"
parent_id = sa.Column(
- 'parent_id',
+ "parent_id",
sa.Integer,
- sa.ForeignKey('tag.id'),
+ sa.ForeignKey("tag.id"),
nullable=False,
primary_key=True,
- index=True)
+ index=True,
+ )
child_id = sa.Column(
- 'child_id',
+ "child_id",
sa.Integer,
- sa.ForeignKey('tag.id'),
+ sa.ForeignKey("tag.id"),
nullable=False,
primary_key=True,
- index=True)
+ index=True,
+ )
def __init__(self, parent_id: int, child_id: int) -> None:
self.parent_id = parent_id
@@ -50,17 +55,18 @@ class TagImplication(Base):
class TagName(Base):
- __tablename__ = 'tag_name'
+ __tablename__ = "tag_name"
- tag_name_id = sa.Column('tag_name_id', sa.Integer, primary_key=True)
+ tag_name_id = sa.Column("tag_name_id", sa.Integer, primary_key=True)
tag_id = sa.Column(
- 'tag_id',
+ "tag_id",
sa.Integer,
- sa.ForeignKey('tag.id'),
+ sa.ForeignKey("tag.id"),
nullable=False,
- index=True)
- name = sa.Column('name', sa.Unicode(128), nullable=False, unique=True)
- order = sa.Column('ord', sa.Integer, nullable=False, index=True)
+ index=True,
+ )
+ name = sa.Column("name", sa.Unicode(128), nullable=False, unique=True)
+ order = sa.Column("ord", sa.Integer, nullable=False, index=True)
def __init__(self, name: str, order: int) -> None:
self.name = name
@@ -68,44 +74,50 @@ class TagName(Base):
class Tag(Base):
- __tablename__ = 'tag'
+ __tablename__ = "tag"
- tag_id = sa.Column('id', sa.Integer, primary_key=True)
+ tag_id = sa.Column("id", sa.Integer, primary_key=True)
category_id = sa.Column(
- 'category_id',
+ "category_id",
sa.Integer,
- sa.ForeignKey('tag_category.id'),
+ sa.ForeignKey("tag_category.id"),
nullable=False,
- index=True)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_edit_time = sa.Column('last_edit_time', sa.DateTime)
- description = sa.Column('description', sa.UnicodeText, default=None)
+ index=True,
+ )
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_edit_time = sa.Column("last_edit_time", sa.DateTime)
+ description = sa.Column("description", sa.UnicodeText, default=None)
- category = sa.orm.relationship('TagCategory', lazy='joined')
+ category = sa.orm.relationship("TagCategory", lazy="joined")
names = sa.orm.relationship(
- 'TagName',
- cascade='all,delete-orphan',
- lazy='joined',
- order_by='TagName.order')
+ "TagName",
+ cascade="all,delete-orphan",
+ lazy="joined",
+ order_by="TagName.order",
+ )
suggestions = sa.orm.relationship(
- 'Tag',
- secondary='tag_suggestion',
+ "Tag",
+ secondary="tag_suggestion",
primaryjoin=tag_id == TagSuggestion.parent_id,
secondaryjoin=tag_id == TagSuggestion.child_id,
- lazy='joined')
+ lazy="joined",
+ )
implications = sa.orm.relationship(
- 'Tag',
- secondary='tag_implication',
+ "Tag",
+ secondary="tag_implication",
primaryjoin=tag_id == TagImplication.parent_id,
secondaryjoin=tag_id == TagImplication.child_id,
- lazy='joined')
+ lazy="joined",
+ )
post_count = sa.orm.column_property(
sa.sql.expression.select(
- [sa.sql.expression.func.count(PostTag.post_id)])
+ [sa.sql.expression.func.count(PostTag.post_id)]
+ )
.where(PostTag.tag_id == tag_id)
- .correlate_except(PostTag))
+ .correlate_except(PostTag)
+ )
first_name = sa.orm.column_property(
(
@@ -115,27 +127,32 @@ class Tag(Base):
.limit(1)
.as_scalar()
),
- deferred=True)
+ deferred=True,
+ )
suggestion_count = sa.orm.column_property(
(
sa.sql.expression.select(
- [sa.sql.expression.func.count(TagSuggestion.child_id)])
+ [sa.sql.expression.func.count(TagSuggestion.child_id)]
+ )
.where(TagSuggestion.parent_id == tag_id)
.as_scalar()
),
- deferred=True)
+ deferred=True,
+ )
implication_count = sa.orm.column_property(
(
sa.sql.expression.select(
- [sa.sql.expression.func.count(TagImplication.child_id)])
+ [sa.sql.expression.func.count(TagImplication.child_id)]
+ )
.where(TagImplication.parent_id == tag_id)
.as_scalar()
),
- deferred=True)
+ deferred=True,
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
diff --git a/server/szurubooru/model/tag_category.py b/server/szurubooru/model/tag_category.py
index 2c961ed6..1faf74ca 100644
--- a/server/szurubooru/model/tag_category.py
+++ b/server/szurubooru/model/tag_category.py
@@ -1,28 +1,32 @@
from typing import Optional
+
import sqlalchemy as sa
+
from szurubooru.model.base import Base
from szurubooru.model.tag import Tag
class TagCategory(Base):
- __tablename__ = 'tag_category'
+ __tablename__ = "tag_category"
- tag_category_id = sa.Column('id', sa.Integer, primary_key=True)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- name = sa.Column('name', sa.Unicode(32), nullable=False)
+ tag_category_id = sa.Column("id", sa.Integer, primary_key=True)
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ name = sa.Column("name", sa.Unicode(32), nullable=False)
color = sa.Column(
- 'color', sa.Unicode(32), nullable=False, default='#000000')
- default = sa.Column('default', sa.Boolean, nullable=False, default=False)
+ "color", sa.Unicode(32), nullable=False, default="#000000"
+ )
+ default = sa.Column("default", sa.Boolean, nullable=False, default=False)
def __init__(self, name: Optional[str] = None) -> None:
self.name = name
tag_count = sa.orm.column_property(
- sa.sql.expression.select([sa.sql.expression.func.count('Tag.tag_id')])
+ sa.sql.expression.select([sa.sql.expression.func.count("Tag.tag_id")])
.where(Tag.category_id == tag_category_id)
- .correlate_except(sa.table('Tag')))
+ .correlate_except(sa.table("Tag"))
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
diff --git a/server/szurubooru/model/user.py b/server/szurubooru/model/user.py
index 2d599e85..41a9b30b 100644
--- a/server/szurubooru/model/user.py
+++ b/server/szurubooru/model/user.py
@@ -1,110 +1,123 @@
import sqlalchemy as sa
+
from szurubooru.model.base import Base
-from szurubooru.model.post import Post, PostScore, PostFavorite
from szurubooru.model.comment import Comment
+from szurubooru.model.post import Post, PostFavorite, PostScore
class User(Base):
- __tablename__ = 'user'
+ __tablename__ = "user"
- AVATAR_GRAVATAR = 'gravatar'
- AVATAR_MANUAL = 'manual'
+ AVATAR_GRAVATAR = "gravatar"
+ AVATAR_MANUAL = "manual"
- RANK_ANONYMOUS = 'anonymous'
- RANK_RESTRICTED = 'restricted'
- RANK_REGULAR = 'regular'
- RANK_POWER = 'power'
- RANK_MODERATOR = 'moderator'
- RANK_ADMINISTRATOR = 'administrator'
- RANK_NOBODY = 'nobody' # unattainable, used for privileges
+ RANK_ANONYMOUS = "anonymous"
+ RANK_RESTRICTED = "restricted"
+ RANK_REGULAR = "regular"
+ RANK_POWER = "power"
+ RANK_MODERATOR = "moderator"
+ RANK_ADMINISTRATOR = "administrator"
+ RANK_NOBODY = "nobody" # unattainable, used for privileges
- user_id = sa.Column('id', sa.Integer, primary_key=True)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_login_time = sa.Column('last_login_time', sa.DateTime)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
- name = sa.Column('name', sa.Unicode(50), nullable=False, unique=True)
- password_hash = sa.Column('password_hash', sa.Unicode(128), nullable=False)
- password_salt = sa.Column('password_salt', sa.Unicode(32))
+ user_id = sa.Column("id", sa.Integer, primary_key=True)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_login_time = sa.Column("last_login_time", sa.DateTime)
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
+ name = sa.Column("name", sa.Unicode(50), nullable=False, unique=True)
+ password_hash = sa.Column("password_hash", sa.Unicode(128), nullable=False)
+ password_salt = sa.Column("password_salt", sa.Unicode(32))
password_revision = sa.Column(
- 'password_revision', sa.SmallInteger, default=0, nullable=False)
- email = sa.Column('email', sa.Unicode(64), nullable=True)
- rank = sa.Column('rank', sa.Unicode(32), nullable=False)
+ "password_revision", sa.SmallInteger, default=0, nullable=False
+ )
+ email = sa.Column("email", sa.Unicode(64), nullable=True)
+ rank = sa.Column("rank", sa.Unicode(32), nullable=False)
avatar_style = sa.Column(
- 'avatar_style', sa.Unicode(32), nullable=False,
- default=AVATAR_GRAVATAR)
+ "avatar_style", sa.Unicode(32), nullable=False, default=AVATAR_GRAVATAR
+ )
- comments = sa.orm.relationship('Comment')
+ comments = sa.orm.relationship("Comment")
@property
def post_count(self) -> int:
from szurubooru.db import session
+
return (
- session
- .query(sa.sql.expression.func.sum(1))
+ session.query(sa.sql.expression.func.sum(1))
.filter(Post.user_id == self.user_id)
- .one()[0] or 0)
+ .one()[0]
+ or 0
+ )
@property
def comment_count(self) -> int:
from szurubooru.db import session
+
return (
- session
- .query(sa.sql.expression.func.sum(1))
+ session.query(sa.sql.expression.func.sum(1))
.filter(Comment.user_id == self.user_id)
- .one()[0] or 0)
+ .one()[0]
+ or 0
+ )
@property
def favorite_post_count(self) -> int:
from szurubooru.db import session
+
return (
- session
- .query(sa.sql.expression.func.sum(1))
+ session.query(sa.sql.expression.func.sum(1))
.filter(PostFavorite.user_id == self.user_id)
- .one()[0] or 0)
+ .one()[0]
+ or 0
+ )
@property
def liked_post_count(self) -> int:
from szurubooru.db import session
+
return (
- session
- .query(sa.sql.expression.func.sum(1))
+ session.query(sa.sql.expression.func.sum(1))
.filter(PostScore.user_id == self.user_id)
.filter(PostScore.score == 1)
- .one()[0] or 0)
+ .one()[0]
+ or 0
+ )
@property
def disliked_post_count(self) -> int:
from szurubooru.db import session
+
return (
- session
- .query(sa.sql.expression.func.sum(1))
+ session.query(sa.sql.expression.func.sum(1))
.filter(PostScore.user_id == self.user_id)
.filter(PostScore.score == -1)
- .one()[0] or 0)
+ .one()[0]
+ or 0
+ )
__mapper_args__ = {
- 'version_id_col': version,
- 'version_id_generator': False,
+ "version_id_col": version,
+ "version_id_generator": False,
}
class UserToken(Base):
- __tablename__ = 'user_token'
+ __tablename__ = "user_token"
- user_token_id = sa.Column('id', sa.Integer, primary_key=True)
+ user_token_id = sa.Column("id", sa.Integer, primary_key=True)
user_id = sa.Column(
- 'user_id',
+ "user_id",
sa.Integer,
- sa.ForeignKey('user.id', ondelete='CASCADE'),
+ sa.ForeignKey("user.id", ondelete="CASCADE"),
nullable=False,
- index=True)
- token = sa.Column('token', sa.Unicode(36), nullable=False)
- note = sa.Column('note', sa.Unicode(128), nullable=True)
- enabled = sa.Column('enabled', sa.Boolean, nullable=False, default=True)
- expiration_time = sa.Column('expiration_time', sa.DateTime, nullable=True)
- creation_time = sa.Column('creation_time', sa.DateTime, nullable=False)
- last_edit_time = sa.Column('last_edit_time', sa.DateTime)
- last_usage_time = sa.Column('last_usage_time', sa.DateTime)
- version = sa.Column('version', sa.Integer, default=1, nullable=False)
+ index=True,
+ )
+ token = sa.Column("token", sa.Unicode(36), nullable=False)
+ note = sa.Column("note", sa.Unicode(128), nullable=True)
+ enabled = sa.Column("enabled", sa.Boolean, nullable=False, default=True)
+ expiration_time = sa.Column("expiration_time", sa.DateTime, nullable=True)
+ creation_time = sa.Column("creation_time", sa.DateTime, nullable=False)
+ last_edit_time = sa.Column("last_edit_time", sa.DateTime)
+ last_usage_time = sa.Column("last_usage_time", sa.DateTime)
+ version = sa.Column("version", sa.Integer, default=1, nullable=False)
- user = sa.orm.relationship('User')
+ user = sa.orm.relationship("User")
diff --git a/server/szurubooru/model/util.py b/server/szurubooru/model/util.py
index ab86100f..bece9e96 100644
--- a/server/szurubooru/model/util.py
+++ b/server/szurubooru/model/util.py
@@ -1,17 +1,19 @@
-from typing import Tuple, Any, Dict, Callable, Union, Optional
+from typing import Any, Callable, Dict, Optional, Tuple, Union
+
import sqlalchemy as sa
+
from szurubooru.model.base import Base
from szurubooru.model.user import User
def get_resource_info(entity: Base) -> Tuple[Any, Any, Union[str, int]]:
serializers = {
- 'tag': lambda tag: tag.first_name,
- 'tag_category': lambda category: category.name,
- 'comment': lambda comment: comment.comment_id,
- 'post': lambda post: post.post_id,
- 'pool': lambda pool: pool.pool_id,
- 'pool_category': lambda category: category.name,
+ "tag": lambda tag: tag.first_name,
+ "tag_category": lambda category: category.name,
+ "comment": lambda comment: comment.comment_id,
+ "post": lambda post: post.post_id,
+ "pool": lambda pool: pool.pool_id,
+ "pool_category": lambda category: category.name,
} # type: Dict[str, Callable[[Base], Any]]
resource_type = entity.__table__.name
@@ -31,14 +33,15 @@ def get_resource_info(entity: Base) -> Tuple[Any, Any, Union[str, int]]:
def get_aux_entity(
- session: Any,
- get_table_info: Callable[[Base], Tuple[Base, Callable[[Base], Any]]],
- entity: Base,
- user: User) -> Optional[Base]:
+ session: Any,
+ get_table_info: Callable[[Base], Tuple[Base, Callable[[Base], Any]]],
+ entity: Base,
+ user: User,
+) -> Optional[Base]:
table, get_column = get_table_info(entity)
return (
- session
- .query(table)
+ session.query(table)
.filter(get_column(table) == get_column(entity))
.filter(table.user_id == user.user_id)
- .one_or_none())
+ .one_or_none()
+ )
diff --git a/server/szurubooru/rest/__init__.py b/server/szurubooru/rest/__init__.py
index d6b3ef28..6db22e28 100644
--- a/server/szurubooru/rest/__init__.py
+++ b/server/szurubooru/rest/__init__.py
@@ -1,3 +1,3 @@
+import szurubooru.rest.routes
from szurubooru.rest.app import application
from szurubooru.rest.context import Context, Response
-import szurubooru.rest.routes
diff --git a/server/szurubooru/rest/app.py b/server/szurubooru/rest/app.py
index 8c9efba1..a6f10fbc 100644
--- a/server/szurubooru/rest/app.py
+++ b/server/szurubooru/rest/app.py
@@ -1,20 +1,21 @@
-import urllib.parse
import cgi
import json
import re
-from typing import Dict, Any, Callable, Tuple
+import urllib.parse
from datetime import datetime
+from typing import Any, Callable, Dict, Tuple
+
from szurubooru import db
from szurubooru.func import util
-from szurubooru.rest import errors, middleware, routes, context
+from szurubooru.rest import context, errors, middleware, routes
def _json_serializer(obj: Any) -> str:
- ''' JSON serializer for objects not serializable by default JSON code '''
+ """ JSON serializer for objects not serializable by default JSON code """
if isinstance(obj, datetime):
- serial = obj.isoformat('T') + 'Z'
+ serial = obj.isoformat("T") + "Z"
return serial
- raise TypeError('Type not serializable')
+ raise TypeError("Type not serializable")
def _dump_json(obj: Any) -> str:
@@ -24,71 +25,75 @@ def _dump_json(obj: Any) -> str:
def _get_headers(env: Dict[str, Any]) -> Dict[str, str]:
headers = {} # type: Dict[str, str]
for key, value in env.items():
- if key.startswith('HTTP_'):
+ if key.startswith("HTTP_"):
key = util.snake_case_to_upper_train_case(key[5:])
headers[key] = value
return headers
def _create_context(env: Dict[str, Any]) -> context.Context:
- method = env['REQUEST_METHOD']
- path = '/' + env['PATH_INFO'].lstrip('/')
- path = path.encode('latin-1').decode('utf-8') # PEP-3333
+ method = env["REQUEST_METHOD"]
+ path = "/" + env["PATH_INFO"].lstrip("/")
+ path = path.encode("latin-1").decode("utf-8") # PEP-3333
headers = _get_headers(env)
files = {}
- params = dict(urllib.parse.parse_qsl(env.get('QUERY_STRING', '')))
+ params = dict(urllib.parse.parse_qsl(env.get("QUERY_STRING", "")))
- if 'multipart' in env.get('CONTENT_TYPE', ''):
- form = cgi.FieldStorage(fp=env['wsgi.input'], environ=env)
+ if "multipart" in env.get("CONTENT_TYPE", ""):
+ form = cgi.FieldStorage(fp=env["wsgi.input"], environ=env)
if not form.list:
raise errors.HttpBadRequest(
- 'ValidationError', 'No files attached.')
- body = form.getvalue('metadata')
+ "ValidationError", "No files attached."
+ )
+ body = form.getvalue("metadata")
for key in form:
files[key] = form.getvalue(key)
else:
- body = env['wsgi.input'].read()
+ body = env["wsgi.input"].read()
if body:
try:
if isinstance(body, bytes):
- body = body.decode('utf-8')
+ body = body.decode("utf-8")
for key, value in json.loads(body).items():
params[key] = value
except (ValueError, UnicodeDecodeError):
raise errors.HttpBadRequest(
- 'ValidationError',
- 'Could not decode the request body. The JSON '
- 'was incorrect or was not encoded as UTF-8.')
+ "ValidationError",
+ "Could not decode the request body. The JSON "
+ "was incorrect or was not encoded as UTF-8.",
+ )
return context.Context(env, method, path, headers, params, files)
def application(
- env: Dict[str, Any],
- start_response: Callable[[str, Any], Any]) -> Tuple[bytes]:
+ env: Dict[str, Any], start_response: Callable[[str, Any], Any]
+) -> Tuple[bytes]:
try:
ctx = _create_context(env)
- if 'application/json' not in ctx.get_header('Accept'):
+ if "application/json" not in ctx.get_header("Accept"):
raise errors.HttpNotAcceptable(
- 'ValidationError',
- 'This API only supports JSON responses.')
+ "ValidationError", "This API only supports JSON responses."
+ )
for url, allowed_methods in routes.routes.items():
match = re.fullmatch(url, ctx.url)
if match:
if ctx.method not in allowed_methods:
raise errors.HttpMethodNotAllowed(
- 'ValidationError',
- 'Allowed methods: %r' % allowed_methods)
+ "ValidationError",
+ "Allowed methods: %r" % allowed_methods,
+ )
handler = allowed_methods[ctx.method]
break
else:
raise errors.HttpNotFound(
- 'ValidationError',
- 'Requested path ' + ctx.url + ' was not found.')
+ "ValidationError",
+ "Requested path " + ctx.url + " was not found.",
+ )
try:
ctx.session = db.session()
@@ -106,8 +111,8 @@ def application(
finally:
db.session.remove()
- start_response('200', [('content-type', 'application/json')])
- return (_dump_json(response).encode('utf-8'),)
+ start_response("200", [("content-type", "application/json")])
+ return (_dump_json(response).encode("utf-8"),)
except Exception as ex:
for exception_type, ex_handler in errors.error_handlers.items():
@@ -117,14 +122,15 @@ def application(
except errors.BaseHttpError as ex:
start_response(
- '%d %s' % (ex.code, ex.reason),
- [('content-type', 'application/json')])
+ "%d %s" % (ex.code, ex.reason),
+ [("content-type", "application/json")],
+ )
blob = {
- 'name': ex.name,
- 'title': ex.title,
- 'description': ex.description,
+ "name": ex.name,
+ "title": ex.title,
+ "description": ex.description,
}
if ex.extra_fields is not None:
for key, value in ex.extra_fields.items():
blob[key] = value
- return (_dump_json(blob).encode('utf-8'),)
+ return (_dump_json(blob).encode("utf-8"),)
diff --git a/server/szurubooru/rest/context.py b/server/szurubooru/rest/context.py
index 3e902436..40ba0bcb 100644
--- a/server/szurubooru/rest/context.py
+++ b/server/szurubooru/rest/context.py
@@ -1,7 +1,7 @@
-from typing import Any, Union, List, Dict, Optional, cast
-from szurubooru import model, errors
-from szurubooru.func import net, file_uploads
+from typing import Any, Dict, List, Optional, Union, cast
+from szurubooru import errors, model
+from szurubooru.func import file_uploads, net
MISSING = object()
Request = Dict[str, Any]
@@ -10,13 +10,14 @@ Response = Optional[Dict[str, Any]]
class Context:
def __init__(
- self,
- env: Dict[str, Any],
- method: str,
- url: str,
- headers: Dict[str, str] = None,
- params: Request = None,
- files: Dict[str, bytes] = None) -> None:
+ self,
+ env: Dict[str, Any],
+ method: str,
+ url: str,
+ headers: Dict[str, str] = None,
+ params: Request = None,
+ files: Dict[str, bytes] = None,
+ ) -> None:
self.env = env
self.method = method
self.url = url
@@ -26,7 +27,7 @@ class Context:
self.user = model.User()
self.user.name = None
- self.user.rank = 'anonymous'
+ self.user.rank = "anonymous"
self.session = None # type: Any
@@ -34,100 +35,106 @@ class Context:
return name in self._headers
def get_header(self, name: str) -> str:
- return self._headers.get(name, '')
+ return self._headers.get(name, "")
def has_file(self, name: str, allow_tokens: bool = True) -> bool:
return (
- name in self._files or
- name + 'Url' in self._params or
- (allow_tokens and name + 'Token' in self._params))
+ name in self._files
+ or name + "Url" in self._params
+ or (allow_tokens and name + "Token" in self._params)
+ )
def get_file(
- self,
- name: str,
- default: Union[object, bytes] = MISSING,
- use_video_downloader: bool = False,
- allow_tokens: bool = True) -> bytes:
+ self,
+ name: str,
+ default: Union[object, bytes] = MISSING,
+ use_video_downloader: bool = False,
+ allow_tokens: bool = True,
+ ) -> bytes:
if name in self._files and self._files[name]:
return self._files[name]
- if name + 'Url' in self._params:
+ if name + "Url" in self._params:
return net.download(
- self._params[name + 'Url'],
- use_video_downloader=use_video_downloader)
+ self._params[name + "Url"],
+ use_video_downloader=use_video_downloader,
+ )
- if allow_tokens and name + 'Token' in self._params:
- ret = file_uploads.get(self._params[name + 'Token'])
+ if allow_tokens and name + "Token" in self._params:
+ ret = file_uploads.get(self._params[name + "Token"])
if ret:
return ret
elif default is not MISSING:
raise errors.MissingOrExpiredRequiredFileError(
- 'Required file %r is missing or has expired.' % name)
+ "Required file %r is missing or has expired." % name
+ )
if default is not MISSING:
return cast(bytes, default)
raise errors.MissingRequiredFileError(
- 'Required file %r is missing.' % name)
+ "Required file %r is missing." % name
+ )
def has_param(self, name: str) -> bool:
return name in self._params
def get_param_as_list(
- self,
- name: str,
- default: Union[object, List[Any]] = MISSING) -> List[Any]:
+ self, name: str, default: Union[object, List[Any]] = MISSING
+ ) -> List[Any]:
if name not in self._params:
if default is not MISSING:
return cast(List[Any], default)
raise errors.MissingRequiredParameterError(
- 'Required parameter %r is missing.' % name)
+ "Required parameter %r is missing." % name
+ )
value = self._params[name]
if type(value) is str:
- if ',' in value:
- return value.split(',')
+ if "," in value:
+ return value.split(",")
return [value]
if type(value) is list:
return value
raise errors.InvalidParameterError(
- 'Parameter %r must be a list.' % name)
+ "Parameter %r must be a list." % name
+ )
def get_param_as_int_list(
- self,
- name: str,
- default: Union[object, List[int]] = MISSING) -> List[int]:
+ self, name: str, default: Union[object, List[int]] = MISSING
+ ) -> List[int]:
ret = self.get_param_as_list(name, default)
for item in ret:
if type(item) is not int:
raise errors.InvalidParameterError(
- 'Parameter %r must be a list of integer values.' % name)
+ "Parameter %r must be a list of integer values." % name
+ )
return ret
def get_param_as_string_list(
- self,
- name: str,
- default: Union[object, List[str]] = MISSING) -> List[str]:
+ self, name: str, default: Union[object, List[str]] = MISSING
+ ) -> List[str]:
ret = self.get_param_as_list(name, default)
for item in ret:
if type(item) is not str:
raise errors.InvalidParameterError(
- 'Parameter %r must be a list of string values.' % name)
+ "Parameter %r must be a list of string values." % name
+ )
return ret
def get_param_as_string(
- self,
- name: str,
- default: Union[object, str] = MISSING) -> str:
+ self, name: str, default: Union[object, str] = MISSING
+ ) -> str:
if name not in self._params:
if default is not MISSING:
return cast(str, default)
raise errors.MissingRequiredParameterError(
- 'Required parameter %r is missing.' % name)
+ "Required parameter %r is missing." % name
+ )
value = self._params[name]
try:
if value is None:
- return ''
+ return ""
if type(value) is list:
- return ','.join(value)
+ return ",".join(value)
if type(value) is int or type(value) is float:
return str(value)
if type(value) is str:
@@ -135,51 +142,58 @@ class Context:
except TypeError:
pass
raise errors.InvalidParameterError(
- 'Parameter %r must be a string value.' % name)
+ "Parameter %r must be a string value." % name
+ )
def get_param_as_int(
- self,
- name: str,
- default: Union[object, int] = MISSING,
- min: Optional[int] = None,
- max: Optional[int] = None) -> int:
+ self,
+ name: str,
+ default: Union[object, int] = MISSING,
+ min: Optional[int] = None,
+ max: Optional[int] = None,
+ ) -> int:
if name not in self._params:
if default is not MISSING:
return cast(int, default)
raise errors.MissingRequiredParameterError(
- 'Required parameter %r is missing.' % name)
+ "Required parameter %r is missing." % name
+ )
value = self._params[name]
try:
value = int(value)
if min is not None and value < min:
raise errors.InvalidParameterError(
- 'Parameter %r must be at least %r.' % (name, min))
+ "Parameter %r must be at least %r." % (name, min)
+ )
if max is not None and value > max:
raise errors.InvalidParameterError(
- 'Parameter %r may not exceed %r.' % (name, max))
+ "Parameter %r may not exceed %r." % (name, max)
+ )
return value
except (ValueError, TypeError):
pass
raise errors.InvalidParameterError(
- 'Parameter %r must be an integer value.' % name)
+ "Parameter %r must be an integer value." % name
+ )
def get_param_as_bool(
- self,
- name: str,
- default: Union[object, bool] = MISSING) -> bool:
+ self, name: str, default: Union[object, bool] = MISSING
+ ) -> bool:
if name not in self._params:
if default is not MISSING:
return cast(bool, default)
raise errors.MissingRequiredParameterError(
- 'Required parameter %r is missing.' % name)
+ "Required parameter %r is missing." % name
+ )
value = self._params[name]
try:
value = str(value).lower()
except TypeError:
pass
- if value in ['1', 'y', 'yes', 'yeah', 'yep', 'yup', 't', 'true']:
+ if value in ["1", "y", "yes", "yeah", "yep", "yup", "t", "true"]:
return True
- if value in ['0', 'n', 'no', 'nope', 'f', 'false']:
+ if value in ["0", "n", "no", "nope", "f", "false"]:
return False
raise errors.InvalidParameterError(
- 'Parameter %r must be a boolean value.' % name)
+ "Parameter %r must be a boolean value." % name
+ )
diff --git a/server/szurubooru/rest/errors.py b/server/szurubooru/rest/errors.py
index 45dc615f..446b7571 100644
--- a/server/szurubooru/rest/errors.py
+++ b/server/szurubooru/rest/errors.py
@@ -1,19 +1,19 @@
-from typing import Optional, Callable, Type, Dict
-
+from typing import Callable, Dict, Optional, Type
error_handlers = {}
class BaseHttpError(RuntimeError):
code = -1
- reason = ''
+ reason = ""
def __init__(
- self,
- name: str,
- description: str,
- title: Optional[str] = None,
- extra_fields: Optional[Dict[str, str]] = None) -> None:
+ self,
+ name: str,
+ description: str,
+ title: Optional[str] = None,
+ extra_fields: Optional[Dict[str, str]] = None,
+ ) -> None:
super().__init__()
# error name for programmers
self.name = name
@@ -27,40 +27,40 @@ class BaseHttpError(RuntimeError):
class HttpBadRequest(BaseHttpError):
code = 400
- reason = 'Bad Request'
+ reason = "Bad Request"
class HttpForbidden(BaseHttpError):
code = 403
- reason = 'Forbidden'
+ reason = "Forbidden"
class HttpNotFound(BaseHttpError):
code = 404
- reason = 'Not Found'
+ reason = "Not Found"
class HttpNotAcceptable(BaseHttpError):
code = 406
- reason = 'Not Acceptable'
+ reason = "Not Acceptable"
class HttpConflict(BaseHttpError):
code = 409
- reason = 'Conflict'
+ reason = "Conflict"
class HttpMethodNotAllowed(BaseHttpError):
code = 405
- reason = 'Method Not Allowed'
+ reason = "Method Not Allowed"
class HttpInternalServerError(BaseHttpError):
code = 500
- reason = 'Internal Server Error'
+ reason = "Internal Server Error"
def handle(
- exception_type: Type[Exception],
- handler: Callable[[Exception], None]) -> None:
+ exception_type: Type[Exception], handler: Callable[[Exception], None]
+) -> None:
error_handlers[exception_type] = handler
diff --git a/server/szurubooru/rest/middleware.py b/server/szurubooru/rest/middleware.py
index 18b6b465..2936abfb 100644
--- a/server/szurubooru/rest/middleware.py
+++ b/server/szurubooru/rest/middleware.py
@@ -1,6 +1,6 @@
-from typing import List, Callable
-from szurubooru.rest.context import Context
+from typing import Callable, List
+from szurubooru.rest.context import Context
pre_hooks = [] # type: List[Callable[[Context], None]]
post_hooks = [] # type: List[Callable[[Context], None]]
diff --git a/server/szurubooru/rest/routes.py b/server/szurubooru/rest/routes.py
index 93e124e7..b0946fb3 100644
--- a/server/szurubooru/rest/routes.py
+++ b/server/szurubooru/rest/routes.py
@@ -1,7 +1,7 @@
-from typing import Callable, Dict
from collections import defaultdict
-from szurubooru.rest.context import Context, Response
+from typing import Callable, Dict
+from szurubooru.rest.context import Context, Response
RouteHandler = Callable[[Context, Dict[str, str]], Response]
routes = defaultdict(dict) # type: Dict[str, Dict[str, RouteHandler]]
@@ -9,27 +9,31 @@ routes = defaultdict(dict) # type: Dict[str, Dict[str, RouteHandler]]
def get(url: str) -> Callable[[RouteHandler], RouteHandler]:
def wrapper(handler: RouteHandler) -> RouteHandler:
- routes[url]['GET'] = handler
+ routes[url]["GET"] = handler
return handler
+
return wrapper
def put(url: str) -> Callable[[RouteHandler], RouteHandler]:
def wrapper(handler: RouteHandler) -> RouteHandler:
- routes[url]['PUT'] = handler
+ routes[url]["PUT"] = handler
return handler
+
return wrapper
def post(url: str) -> Callable[[RouteHandler], RouteHandler]:
def wrapper(handler: RouteHandler) -> RouteHandler:
- routes[url]['POST'] = handler
+ routes[url]["POST"] = handler
return handler
+
return wrapper
def delete(url: str) -> Callable[[RouteHandler], RouteHandler]:
def wrapper(handler: RouteHandler) -> RouteHandler:
- routes[url]['DELETE'] = handler
+ routes[url]["DELETE"] = handler
return handler
+
return wrapper
diff --git a/server/szurubooru/search/__init__.py b/server/szurubooru/search/__init__.py
index 919475fe..6ad19207 100644
--- a/server/szurubooru/search/__init__.py
+++ b/server/szurubooru/search/__init__.py
@@ -1,2 +1,2 @@
-from szurubooru.search.executor import Executor
import szurubooru.search.configs
+from szurubooru.search.executor import Executor
diff --git a/server/szurubooru/search/configs/__init__.py b/server/szurubooru/search/configs/__init__.py
index c6a3ea49..c7218131 100644
--- a/server/szurubooru/search/configs/__init__.py
+++ b/server/szurubooru/search/configs/__init__.py
@@ -1,6 +1,6 @@
-from .user_search_config import UserSearchConfig
-from .tag_search_config import TagSearchConfig
-from .post_search_config import PostSearchConfig
-from .snapshot_search_config import SnapshotSearchConfig
from .comment_search_config import CommentSearchConfig
from .pool_search_config import PoolSearchConfig
+from .post_search_config import PostSearchConfig
+from .snapshot_search_config import SnapshotSearchConfig
+from .tag_search_config import TagSearchConfig
+from .user_search_config import UserSearchConfig
diff --git a/server/szurubooru/search/configs/base_search_config.py b/server/szurubooru/search/configs/base_search_config.py
index 0cb814d4..d60f3617 100644
--- a/server/szurubooru/search/configs/base_search_config.py
+++ b/server/szurubooru/search/configs/base_search_config.py
@@ -1,5 +1,6 @@
-from typing import Optional, Tuple, Dict, Callable
-from szurubooru.search import tokens, criteria
+from typing import Callable, Dict, Optional, Tuple
+
+from szurubooru.search import criteria, tokens
from szurubooru.search.query import SearchQuery
from szurubooru.search.typing import SaColumn, SaQuery
diff --git a/server/szurubooru/search/configs/comment_search_config.py b/server/szurubooru/search/configs/comment_search_config.py
index 8b154460..1145e517 100644
--- a/server/szurubooru/search/configs/comment_search_config.py
+++ b/server/szurubooru/search/configs/comment_search_config.py
@@ -1,10 +1,14 @@
-from typing import Tuple, Dict
+from typing import Dict, Tuple
+
import sqlalchemy as sa
+
from szurubooru import db, model
-from szurubooru.search.typing import SaColumn, SaQuery
from szurubooru.search.configs import util as search_util
from szurubooru.search.configs.base_search_config import (
- BaseSearchConfig, Filter)
+ BaseSearchConfig,
+ Filter,
+)
+from szurubooru.search.typing import SaColumn, SaQuery
class CommentSearchConfig(BaseSearchConfig):
@@ -27,36 +31,42 @@ class CommentSearchConfig(BaseSearchConfig):
@property
def named_filters(self) -> Dict[str, Filter]:
return {
- 'id': search_util.create_num_filter(model.Comment.comment_id),
- 'post': search_util.create_num_filter(model.Comment.post_id),
- 'user': search_util.create_str_filter(model.User.name),
- 'author': search_util.create_str_filter(model.User.name),
- 'text': search_util.create_str_filter(model.Comment.text),
- 'creation-date':
- search_util.create_date_filter(model.Comment.creation_time),
- 'creation-time':
- search_util.create_date_filter(model.Comment.creation_time),
- 'last-edit-date':
- search_util.create_date_filter(model.Comment.last_edit_time),
- 'last-edit-time':
- search_util.create_date_filter(model.Comment.last_edit_time),
- 'edit-date':
- search_util.create_date_filter(model.Comment.last_edit_time),
- 'edit-time':
- search_util.create_date_filter(model.Comment.last_edit_time),
+ "id": search_util.create_num_filter(model.Comment.comment_id),
+ "post": search_util.create_num_filter(model.Comment.post_id),
+ "user": search_util.create_str_filter(model.User.name),
+ "author": search_util.create_str_filter(model.User.name),
+ "text": search_util.create_str_filter(model.Comment.text),
+ "creation-date": search_util.create_date_filter(
+ model.Comment.creation_time
+ ),
+ "creation-time": search_util.create_date_filter(
+ model.Comment.creation_time
+ ),
+ "last-edit-date": search_util.create_date_filter(
+ model.Comment.last_edit_time
+ ),
+ "last-edit-time": search_util.create_date_filter(
+ model.Comment.last_edit_time
+ ),
+ "edit-date": search_util.create_date_filter(
+ model.Comment.last_edit_time
+ ),
+ "edit-time": search_util.create_date_filter(
+ model.Comment.last_edit_time
+ ),
}
@property
def sort_columns(self) -> Dict[str, Tuple[SaColumn, str]]:
return {
- 'random': (sa.sql.expression.func.random(), self.SORT_NONE),
- 'user': (model.User.name, self.SORT_ASC),
- 'author': (model.User.name, self.SORT_ASC),
- 'post': (model.Comment.post_id, self.SORT_DESC),
- 'creation-date': (model.Comment.creation_time, self.SORT_DESC),
- 'creation-time': (model.Comment.creation_time, self.SORT_DESC),
- 'last-edit-date': (model.Comment.last_edit_time, self.SORT_DESC),
- 'last-edit-time': (model.Comment.last_edit_time, self.SORT_DESC),
- 'edit-date': (model.Comment.last_edit_time, self.SORT_DESC),
- 'edit-time': (model.Comment.last_edit_time, self.SORT_DESC),
+ "random": (sa.sql.expression.func.random(), self.SORT_NONE),
+ "user": (model.User.name, self.SORT_ASC),
+ "author": (model.User.name, self.SORT_ASC),
+ "post": (model.Comment.post_id, self.SORT_DESC),
+ "creation-date": (model.Comment.creation_time, self.SORT_DESC),
+ "creation-time": (model.Comment.creation_time, self.SORT_DESC),
+ "last-edit-date": (model.Comment.last_edit_time, self.SORT_DESC),
+ "last-edit-time": (model.Comment.last_edit_time, self.SORT_DESC),
+ "edit-date": (model.Comment.last_edit_time, self.SORT_DESC),
+ "edit-time": (model.Comment.last_edit_time, self.SORT_DESC),
}
diff --git a/server/szurubooru/search/configs/pool_search_config.py b/server/szurubooru/search/configs/pool_search_config.py
index ddc325af..88b30a6e 100644
--- a/server/szurubooru/search/configs/pool_search_config.py
+++ b/server/szurubooru/search/configs/pool_search_config.py
@@ -1,24 +1,27 @@
-from typing import Tuple, Dict
+from typing import Dict, Tuple
+
import sqlalchemy as sa
+
from szurubooru import db, model
from szurubooru.func import util
-from szurubooru.search.typing import SaColumn, SaQuery
from szurubooru.search.configs import util as search_util
from szurubooru.search.configs.base_search_config import (
- BaseSearchConfig, Filter)
+ BaseSearchConfig,
+ Filter,
+)
+from szurubooru.search.typing import SaColumn, SaQuery
class PoolSearchConfig(BaseSearchConfig):
def create_filter_query(self, _disable_eager_loads: bool) -> SaQuery:
strategy = (
- sa.orm.lazyload
- if _disable_eager_loads
- else sa.orm.subqueryload)
+ sa.orm.lazyload if _disable_eager_loads else sa.orm.subqueryload
+ )
return (
db.session.query(model.Pool)
.join(model.PoolCategory)
- .options(
- strategy(model.Pool.names)))
+ .options(strategy(model.Pool.names))
+ )
def create_count_query(self, _disable_eager_loads: bool) -> SaQuery:
return db.session.query(model.Pool)
@@ -35,75 +38,74 @@ class PoolSearchConfig(BaseSearchConfig):
model.Pool.pool_id,
model.PoolName.pool_id,
model.PoolName.name,
- search_util.create_str_filter)
+ search_util.create_str_filter,
+ )
@property
def named_filters(self) -> Dict[str, Filter]:
- return util.unalias_dict([
- (
- ['name'],
- search_util.create_subquery_filter(
- model.Pool.pool_id,
- model.PoolName.pool_id,
- model.PoolName.name,
- search_util.create_str_filter)
- ),
-
- (
- ['category'],
- search_util.create_subquery_filter(
- model.Pool.category_id,
- model.PoolCategory.pool_category_id,
- model.PoolCategory.name,
- search_util.create_str_filter)
- ),
-
- (
- ['creation-date', 'creation-time'],
- search_util.create_date_filter(model.Pool.creation_time)
- ),
-
- (
- ['last-edit-date', 'last-edit-time', 'edit-date', 'edit-time'],
- search_util.create_date_filter(model.Pool.last_edit_time)
- ),
-
- (
- ['post-count'],
- search_util.create_num_filter(model.Pool.post_count)
- ),
- ])
+ return util.unalias_dict(
+ [
+ (
+ ["name"],
+ search_util.create_subquery_filter(
+ model.Pool.pool_id,
+ model.PoolName.pool_id,
+ model.PoolName.name,
+ search_util.create_str_filter,
+ ),
+ ),
+ (
+ ["category"],
+ search_util.create_subquery_filter(
+ model.Pool.category_id,
+ model.PoolCategory.pool_category_id,
+ model.PoolCategory.name,
+ search_util.create_str_filter,
+ ),
+ ),
+ (
+ ["creation-date", "creation-time"],
+ search_util.create_date_filter(model.Pool.creation_time),
+ ),
+ (
+ [
+ "last-edit-date",
+ "last-edit-time",
+ "edit-date",
+ "edit-time",
+ ],
+ search_util.create_date_filter(model.Pool.last_edit_time),
+ ),
+ (
+ ["post-count"],
+ search_util.create_num_filter(model.Pool.post_count),
+ ),
+ ]
+ )
@property
def sort_columns(self) -> Dict[str, Tuple[SaColumn, str]]:
- return util.unalias_dict([
- (
- ['random'],
- (sa.sql.expression.func.random(), self.SORT_NONE)
- ),
-
- (
- ['name'],
- (model.Pool.first_name, self.SORT_ASC)
- ),
-
- (
- ['category'],
- (model.PoolCategory.name, self.SORT_ASC)
- ),
-
- (
- ['creation-date', 'creation-time'],
- (model.Pool.creation_time, self.SORT_DESC)
- ),
-
- (
- ['last-edit-date', 'last-edit-time', 'edit-date', 'edit-time'],
- (model.Pool.last_edit_time, self.SORT_DESC)
- ),
-
- (
- ['post-count'],
- (model.Pool.post_count, self.SORT_DESC)
- ),
- ])
+ return util.unalias_dict(
+ [
+ (
+ ["random"],
+ (sa.sql.expression.func.random(), self.SORT_NONE),
+ ),
+ (["name"], (model.Pool.first_name, self.SORT_ASC)),
+ (["category"], (model.PoolCategory.name, self.SORT_ASC)),
+ (
+ ["creation-date", "creation-time"],
+ (model.Pool.creation_time, self.SORT_DESC),
+ ),
+ (
+ [
+ "last-edit-date",
+ "last-edit-time",
+ "edit-date",
+ "edit-time",
+ ],
+ (model.Pool.last_edit_time, self.SORT_DESC),
+ ),
+ (["post-count"], (model.Pool.post_count, self.SORT_DESC)),
+ ]
+ )
diff --git a/server/szurubooru/search/configs/post_search_config.py b/server/szurubooru/search/configs/post_search_config.py
index dcf02552..fc98e550 100644
--- a/server/szurubooru/search/configs/post_search_config.py
+++ b/server/szurubooru/search/configs/post_search_config.py
@@ -1,85 +1,91 @@
-from typing import Any, Optional, Tuple, Dict
+from typing import Any, Dict, Optional, Tuple
+
import sqlalchemy as sa
-from szurubooru import db, model, errors
+
+from szurubooru import db, errors, model
from szurubooru.func import util
from szurubooru.search import criteria, tokens
-from szurubooru.search.typing import SaColumn, SaQuery
-from szurubooru.search.query import SearchQuery
from szurubooru.search.configs import util as search_util
from szurubooru.search.configs.base_search_config import (
- BaseSearchConfig, Filter)
+ BaseSearchConfig,
+ Filter,
+)
+from szurubooru.search.query import SearchQuery
+from szurubooru.search.typing import SaColumn, SaQuery
def _type_transformer(value: str) -> str:
available_values = {
- 'image': model.Post.TYPE_IMAGE,
- 'animation': model.Post.TYPE_ANIMATION,
- 'animated': model.Post.TYPE_ANIMATION,
- 'anim': model.Post.TYPE_ANIMATION,
- 'gif': model.Post.TYPE_ANIMATION,
- 'video': model.Post.TYPE_VIDEO,
- 'webm': model.Post.TYPE_VIDEO,
- 'flash': model.Post.TYPE_FLASH,
- 'swf': model.Post.TYPE_FLASH,
+ "image": model.Post.TYPE_IMAGE,
+ "animation": model.Post.TYPE_ANIMATION,
+ "animated": model.Post.TYPE_ANIMATION,
+ "anim": model.Post.TYPE_ANIMATION,
+ "gif": model.Post.TYPE_ANIMATION,
+ "video": model.Post.TYPE_VIDEO,
+ "webm": model.Post.TYPE_VIDEO,
+ "flash": model.Post.TYPE_FLASH,
+ "swf": model.Post.TYPE_FLASH,
}
return search_util.enum_transformer(available_values, value)
def _safety_transformer(value: str) -> str:
available_values = {
- 'safe': model.Post.SAFETY_SAFE,
- 'sketchy': model.Post.SAFETY_SKETCHY,
- 'questionable': model.Post.SAFETY_SKETCHY,
- 'unsafe': model.Post.SAFETY_UNSAFE,
+ "safe": model.Post.SAFETY_SAFE,
+ "sketchy": model.Post.SAFETY_SKETCHY,
+ "questionable": model.Post.SAFETY_SKETCHY,
+ "unsafe": model.Post.SAFETY_UNSAFE,
}
return search_util.enum_transformer(available_values, value)
def _flag_transformer(value: str) -> str:
available_values = {
- 'loop': model.Post.FLAG_LOOP,
- 'sound': model.Post.FLAG_SOUND,
+ "loop": model.Post.FLAG_LOOP,
+ "sound": model.Post.FLAG_SOUND,
}
- return '%' + search_util.enum_transformer(available_values, value) + '%'
+ return "%" + search_util.enum_transformer(available_values, value) + "%"
def _source_transformer(value: str) -> str:
- return search_util.wildcard_transformer('*' + value + '*')
+ return search_util.wildcard_transformer("*" + value + "*")
def _create_score_filter(score: int) -> Filter:
def wrapper(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery,
+ criterion: Optional[criteria.BaseCriterion],
+ negated: bool,
+ ) -> SaQuery:
assert criterion
- if not getattr(criterion, 'internal', False):
+ if not getattr(criterion, "internal", False):
raise errors.SearchError(
- 'Votes cannot be seen publicly. Did you mean %r?'
- % 'special:liked')
+ "Votes cannot be seen publicly. Did you mean %r?"
+ % "special:liked"
+ )
user_alias = sa.orm.aliased(model.User)
score_alias = sa.orm.aliased(model.PostScore)
expr = score_alias.score == score
expr = expr & search_util.apply_str_criterion_to_column(
- user_alias.name, criterion)
+ user_alias.name, criterion
+ )
if negated:
expr = ~expr
ret = (
- query
- .join(score_alias, score_alias.post_id == model.Post.post_id)
+ query.join(score_alias, score_alias.post_id == model.Post.post_id)
.join(user_alias, user_alias.user_id == score_alias.user_id)
- .filter(expr))
+ .filter(expr)
+ )
return ret
+
return wrapper
def _user_filter(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery, criterion: Optional[criteria.BaseCriterion], negated: bool
+) -> SaQuery:
assert criterion
- if isinstance(criterion, criteria.PlainCriterion) \
- and not criterion.value:
+ if isinstance(criterion, criteria.PlainCriterion) and not criterion.value:
expr = model.Post.user_id == None # noqa: E711
if negated:
expr = ~expr
@@ -88,31 +94,32 @@ def _user_filter(
model.Post.user_id,
model.User.user_id,
model.User.name,
- search_util.create_str_filter)(query, criterion, negated)
+ search_util.create_str_filter,
+ )(query, criterion, negated)
def _note_filter(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery, criterion: Optional[criteria.BaseCriterion], negated: bool
+) -> SaQuery:
assert criterion
return search_util.create_subquery_filter(
model.Post.post_id,
model.PostNote.post_id,
model.PostNote.text,
- search_util.create_str_filter)(query, criterion, negated)
+ search_util.create_str_filter,
+ )(query, criterion, negated)
def _pool_filter(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery, criterion: Optional[criteria.BaseCriterion], negated: bool
+) -> SaQuery:
assert criterion
return search_util.create_subquery_filter(
model.Post.post_id,
model.PoolPost.post_id,
model.PoolPost.pool_id,
- search_util.create_num_filter)(query, criterion, negated)
+ search_util.create_num_filter,
+ )(query, criterion, negated)
class PostSearchConfig(BaseSearchConfig):
@@ -122,51 +129,52 @@ class PostSearchConfig(BaseSearchConfig):
def on_search_query_parsed(self, search_query: SearchQuery) -> SaQuery:
new_special_tokens = []
for token in search_query.special_tokens:
- if token.value in ('fav', 'liked', 'disliked'):
+ if token.value in ("fav", "liked", "disliked"):
assert self.user
- if self.user.rank == 'anonymous':
+ if self.user.rank == "anonymous":
raise errors.SearchError(
- 'Must be logged in to use this feature.')
+ "Must be logged in to use this feature."
+ )
criterion = criteria.PlainCriterion(
- original_text=self.user.name,
- value=self.user.name)
- setattr(criterion, 'internal', True)
+ original_text=self.user.name, value=self.user.name
+ )
+ setattr(criterion, "internal", True)
search_query.named_tokens.append(
tokens.NamedToken(
name=token.value,
criterion=criterion,
- negated=token.negated))
+ negated=token.negated,
+ )
+ )
else:
new_special_tokens.append(token)
search_query.special_tokens = new_special_tokens
def create_around_query(self) -> SaQuery:
- return db.session.query(model.Post).options(sa.orm.lazyload('*'))
+ return db.session.query(model.Post).options(sa.orm.lazyload("*"))
def create_filter_query(self, disable_eager_loads: bool) -> SaQuery:
strategy = (
- sa.orm.lazyload
- if disable_eager_loads
- else sa.orm.subqueryload)
- return (
- db.session.query(model.Post)
- .options(
- sa.orm.lazyload('*'),
- # use config optimized for official client
- # sa.orm.defer(model.Post.score),
- # sa.orm.defer(model.Post.favorite_count),
- # sa.orm.defer(model.Post.comment_count),
- sa.orm.defer(model.Post.last_favorite_time),
- sa.orm.defer(model.Post.feature_count),
- sa.orm.defer(model.Post.last_feature_time),
- sa.orm.defer(model.Post.last_comment_creation_time),
- sa.orm.defer(model.Post.last_comment_edit_time),
- sa.orm.defer(model.Post.note_count),
- sa.orm.defer(model.Post.tag_count),
- strategy(model.Post.tags).subqueryload(model.Tag.names),
- strategy(model.Post.tags).defer(model.Tag.post_count),
- strategy(model.Post.tags).lazyload(model.Tag.implications),
- strategy(model.Post.tags).lazyload(model.Tag.suggestions)))
+ sa.orm.lazyload if disable_eager_loads else sa.orm.subqueryload
+ )
+ return db.session.query(model.Post).options(
+ sa.orm.lazyload("*"),
+ # use config optimized for official client
+ # sa.orm.defer(model.Post.score),
+ # sa.orm.defer(model.Post.favorite_count),
+ # sa.orm.defer(model.Post.comment_count),
+ sa.orm.defer(model.Post.last_favorite_time),
+ sa.orm.defer(model.Post.feature_count),
+ sa.orm.defer(model.Post.last_feature_time),
+ sa.orm.defer(model.Post.last_comment_creation_time),
+ sa.orm.defer(model.Post.last_comment_edit_time),
+ sa.orm.defer(model.Post.note_count),
+ sa.orm.defer(model.Post.tag_count),
+ strategy(model.Post.tags).subqueryload(model.Tag.names),
+ strategy(model.Post.tags).defer(model.Tag.post_count),
+ strategy(model.Post.tags).lazyload(model.Tag.implications),
+ strategy(model.Post.tags).lazyload(model.Tag.suggestions),
+ )
def create_count_query(self, _disable_eager_loads: bool) -> SaQuery:
return db.session.query(model.Post)
@@ -185,309 +193,256 @@ class PostSearchConfig(BaseSearchConfig):
model.PostTag.post_id,
model.TagName.name,
search_util.create_str_filter,
- lambda subquery: subquery.join(model.Tag).join(model.TagName))
+ lambda subquery: subquery.join(model.Tag).join(model.TagName),
+ )
@property
def named_filters(self) -> Dict[str, Filter]:
- return util.unalias_dict([
- (
- ['id'],
- search_util.create_num_filter(model.Post.post_id)
- ),
-
- (
- ['tag'],
- search_util.create_subquery_filter(
- model.Post.post_id,
- model.PostTag.post_id,
- model.TagName.name,
- search_util.create_str_filter,
- lambda subquery:
- subquery.join(model.Tag).join(model.TagName))
- ),
-
- (
- ['score'],
- search_util.create_num_filter(model.Post.score)
- ),
-
- (
- ['uploader', 'upload', 'submit'],
- _user_filter
- ),
-
- (
- ['comment'],
- search_util.create_subquery_filter(
- model.Post.post_id,
- model.Comment.post_id,
- model.User.name,
- search_util.create_str_filter,
- lambda subquery: subquery.join(model.User))
- ),
-
- (
- ['fav'],
- search_util.create_subquery_filter(
- model.Post.post_id,
- model.PostFavorite.post_id,
- model.User.name,
- search_util.create_str_filter,
- lambda subquery: subquery.join(model.User))
- ),
-
- (
- ['liked'],
- _create_score_filter(1)
- ),
- (
- ['disliked'],
- _create_score_filter(-1)
- ),
-
- (
- ['source'],
- search_util.create_str_filter(
- model.Post.source, _source_transformer)
- ),
-
- (
- ['tag-count'],
- search_util.create_num_filter(model.Post.tag_count)
- ),
-
- (
- ['comment-count'],
- search_util.create_num_filter(model.Post.comment_count)
- ),
-
- (
- ['fav-count'],
- search_util.create_num_filter(model.Post.favorite_count)
- ),
-
- (
- ['note-count'],
- search_util.create_num_filter(model.Post.note_count)
- ),
-
- (
- ['relation-count'],
- search_util.create_num_filter(model.Post.relation_count)
- ),
-
- (
- ['feature-count'],
- search_util.create_num_filter(model.Post.feature_count)
- ),
-
- (
- ['type'],
- search_util.create_str_filter(
- model.Post.type, _type_transformer)
- ),
-
- (
- ['content-checksum'],
- search_util.create_str_filter(model.Post.checksum)
- ),
-
- (
- ['file-size'],
- search_util.create_num_filter(model.Post.file_size)
- ),
-
- (
- ['image-width', 'width'],
- search_util.create_num_filter(model.Post.canvas_width)
- ),
-
- (
- ['image-height', 'height'],
- search_util.create_num_filter(model.Post.canvas_height)
- ),
-
- (
- ['image-area', 'area'],
- search_util.create_num_filter(model.Post.canvas_area)
- ),
-
- (
- ['image-aspect-ratio', 'image-ar', 'aspect-ratio', 'ar'],
- search_util.create_num_filter(
- model.Post.canvas_aspect_ratio,
- transformer=search_util.float_transformer)
- ),
-
- (
- ['creation-date', 'creation-time', 'date', 'time'],
- search_util.create_date_filter(model.Post.creation_time)
- ),
-
- (
- ['last-edit-date', 'last-edit-time', 'edit-date', 'edit-time'],
- search_util.create_date_filter(model.Post.last_edit_time)
- ),
-
- (
- ['comment-date', 'comment-time'],
- search_util.create_date_filter(
- model.Post.last_comment_creation_time)
- ),
-
- (
- ['fav-date', 'fav-time'],
- search_util.create_date_filter(model.Post.last_favorite_time)
- ),
-
- (
- ['feature-date', 'feature-time'],
- search_util.create_date_filter(model.Post.last_feature_time)
- ),
-
- (
- ['safety', 'rating'],
- search_util.create_str_filter(
- model.Post.safety, _safety_transformer)
- ),
-
- (
- ['note-text'],
- _note_filter
- ),
-
- (
- ['flag'],
- search_util.create_str_filter(
- model.Post.flags_string, _flag_transformer)
- ),
-
- (
- ['pool'],
- _pool_filter
- ),
- ])
+ return util.unalias_dict(
+ [
+ (["id"], search_util.create_num_filter(model.Post.post_id)),
+ (
+ ["tag"],
+ search_util.create_subquery_filter(
+ model.Post.post_id,
+ model.PostTag.post_id,
+ model.TagName.name,
+ search_util.create_str_filter,
+ lambda subquery: subquery.join(model.Tag).join(
+ model.TagName
+ ),
+ ),
+ ),
+ (["score"], search_util.create_num_filter(model.Post.score)),
+ (["uploader", "upload", "submit"], _user_filter),
+ (
+ ["comment"],
+ search_util.create_subquery_filter(
+ model.Post.post_id,
+ model.Comment.post_id,
+ model.User.name,
+ search_util.create_str_filter,
+ lambda subquery: subquery.join(model.User),
+ ),
+ ),
+ (
+ ["fav"],
+ search_util.create_subquery_filter(
+ model.Post.post_id,
+ model.PostFavorite.post_id,
+ model.User.name,
+ search_util.create_str_filter,
+ lambda subquery: subquery.join(model.User),
+ ),
+ ),
+ (["liked"], _create_score_filter(1)),
+ (["disliked"], _create_score_filter(-1)),
+ (
+ ["source"],
+ search_util.create_str_filter(
+ model.Post.source, _source_transformer
+ ),
+ ),
+ (
+ ["tag-count"],
+ search_util.create_num_filter(model.Post.tag_count),
+ ),
+ (
+ ["comment-count"],
+ search_util.create_num_filter(model.Post.comment_count),
+ ),
+ (
+ ["fav-count"],
+ search_util.create_num_filter(model.Post.favorite_count),
+ ),
+ (
+ ["note-count"],
+ search_util.create_num_filter(model.Post.note_count),
+ ),
+ (
+ ["relation-count"],
+ search_util.create_num_filter(model.Post.relation_count),
+ ),
+ (
+ ["feature-count"],
+ search_util.create_num_filter(model.Post.feature_count),
+ ),
+ (
+ ["type"],
+ search_util.create_str_filter(
+ model.Post.type, _type_transformer
+ ),
+ ),
+ (
+ ["content-checksum"],
+ search_util.create_str_filter(model.Post.checksum),
+ ),
+ (
+ ["file-size"],
+ search_util.create_num_filter(model.Post.file_size),
+ ),
+ (
+ ["image-width", "width"],
+ search_util.create_num_filter(model.Post.canvas_width),
+ ),
+ (
+ ["image-height", "height"],
+ search_util.create_num_filter(model.Post.canvas_height),
+ ),
+ (
+ ["image-area", "area"],
+ search_util.create_num_filter(model.Post.canvas_area),
+ ),
+ (
+ ["image-aspect-ratio", "image-ar", "aspect-ratio", "ar"],
+ search_util.create_num_filter(
+ model.Post.canvas_aspect_ratio,
+ transformer=search_util.float_transformer,
+ ),
+ ),
+ (
+ ["creation-date", "creation-time", "date", "time"],
+ search_util.create_date_filter(model.Post.creation_time),
+ ),
+ (
+ [
+ "last-edit-date",
+ "last-edit-time",
+ "edit-date",
+ "edit-time",
+ ],
+ search_util.create_date_filter(model.Post.last_edit_time),
+ ),
+ (
+ ["comment-date", "comment-time"],
+ search_util.create_date_filter(
+ model.Post.last_comment_creation_time
+ ),
+ ),
+ (
+ ["fav-date", "fav-time"],
+ search_util.create_date_filter(
+ model.Post.last_favorite_time
+ ),
+ ),
+ (
+ ["feature-date", "feature-time"],
+ search_util.create_date_filter(
+ model.Post.last_feature_time
+ ),
+ ),
+ (
+ ["safety", "rating"],
+ search_util.create_str_filter(
+ model.Post.safety, _safety_transformer
+ ),
+ ),
+ (["note-text"], _note_filter),
+ (
+ ["flag"],
+ search_util.create_str_filter(
+ model.Post.flags_string, _flag_transformer
+ ),
+ ),
+ (["pool"], _pool_filter),
+ ]
+ )
@property
def sort_columns(self) -> Dict[str, Tuple[SaColumn, str]]:
- return util.unalias_dict([
- (
- ['random'],
- (sa.sql.expression.func.random(), self.SORT_NONE)
- ),
-
- (
- ['id'],
- (model.Post.post_id, self.SORT_DESC)
- ),
-
- (
- ['score'],
- (model.Post.score, self.SORT_DESC)
- ),
-
- (
- ['tag-count'],
- (model.Post.tag_count, self.SORT_DESC)
- ),
-
- (
- ['comment-count'],
- (model.Post.comment_count, self.SORT_DESC)
- ),
-
- (
- ['fav-count'],
- (model.Post.favorite_count, self.SORT_DESC)
- ),
-
- (
- ['note-count'],
- (model.Post.note_count, self.SORT_DESC)
- ),
-
- (
- ['relation-count'],
- (model.Post.relation_count, self.SORT_DESC)
- ),
-
- (
- ['feature-count'],
- (model.Post.feature_count, self.SORT_DESC)
- ),
-
- (
- ['file-size'],
- (model.Post.file_size, self.SORT_DESC)
- ),
-
- (
- ['image-width', 'width'],
- (model.Post.canvas_width, self.SORT_DESC)
- ),
-
- (
- ['image-height', 'height'],
- (model.Post.canvas_height, self.SORT_DESC)
- ),
-
- (
- ['image-area', 'area'],
- (model.Post.canvas_area, self.SORT_DESC)
- ),
-
- (
- ['creation-date', 'creation-time', 'date', 'time'],
- (model.Post.creation_time, self.SORT_DESC)
- ),
-
- (
- ['last-edit-date', 'last-edit-time', 'edit-date', 'edit-time'],
- (model.Post.last_edit_time, self.SORT_DESC)
- ),
-
- (
- ['comment-date', 'comment-time'],
- (model.Post.last_comment_creation_time, self.SORT_DESC)
- ),
-
- (
- ['fav-date', 'fav-time'],
- (model.Post.last_favorite_time, self.SORT_DESC)
- ),
-
- (
- ['feature-date', 'feature-time'],
- (model.Post.last_feature_time, self.SORT_DESC)
- ),
- ])
+ return util.unalias_dict(
+ [
+ (
+ ["random"],
+ (sa.sql.expression.func.random(), self.SORT_NONE),
+ ),
+ (["id"], (model.Post.post_id, self.SORT_DESC)),
+ (["score"], (model.Post.score, self.SORT_DESC)),
+ (["tag-count"], (model.Post.tag_count, self.SORT_DESC)),
+ (
+ ["comment-count"],
+ (model.Post.comment_count, self.SORT_DESC),
+ ),
+ (["fav-count"], (model.Post.favorite_count, self.SORT_DESC)),
+ (["note-count"], (model.Post.note_count, self.SORT_DESC)),
+ (
+ ["relation-count"],
+ (model.Post.relation_count, self.SORT_DESC),
+ ),
+ (
+ ["feature-count"],
+ (model.Post.feature_count, self.SORT_DESC),
+ ),
+ (["file-size"], (model.Post.file_size, self.SORT_DESC)),
+ (
+ ["image-width", "width"],
+ (model.Post.canvas_width, self.SORT_DESC),
+ ),
+ (
+ ["image-height", "height"],
+ (model.Post.canvas_height, self.SORT_DESC),
+ ),
+ (
+ ["image-area", "area"],
+ (model.Post.canvas_area, self.SORT_DESC),
+ ),
+ (
+ ["creation-date", "creation-time", "date", "time"],
+ (model.Post.creation_time, self.SORT_DESC),
+ ),
+ (
+ [
+ "last-edit-date",
+ "last-edit-time",
+ "edit-date",
+ "edit-time",
+ ],
+ (model.Post.last_edit_time, self.SORT_DESC),
+ ),
+ (
+ ["comment-date", "comment-time"],
+ (model.Post.last_comment_creation_time, self.SORT_DESC),
+ ),
+ (
+ ["fav-date", "fav-time"],
+ (model.Post.last_favorite_time, self.SORT_DESC),
+ ),
+ (
+ ["feature-date", "feature-time"],
+ (model.Post.last_feature_time, self.SORT_DESC),
+ ),
+ ]
+ )
@property
def special_filters(self) -> Dict[str, Filter]:
return {
# handled by parser
- 'fav': self.noop_filter,
- 'liked': self.noop_filter,
- 'disliked': self.noop_filter,
- 'tumbleweed': self.tumbleweed_filter,
+ "fav": self.noop_filter,
+ "liked": self.noop_filter,
+ "disliked": self.noop_filter,
+ "tumbleweed": self.tumbleweed_filter,
}
def noop_filter(
- self,
- query: SaQuery,
- _criterion: Optional[criteria.BaseCriterion],
- _negated: bool) -> SaQuery:
+ self,
+ query: SaQuery,
+ _criterion: Optional[criteria.BaseCriterion],
+ _negated: bool,
+ ) -> SaQuery:
return query
def tumbleweed_filter(
- self,
- query: SaQuery,
- _criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ self,
+ query: SaQuery,
+ _criterion: Optional[criteria.BaseCriterion],
+ negated: bool,
+ ) -> SaQuery:
expr = (
(model.Post.comment_count == 0)
& (model.Post.favorite_count == 0)
- & (model.Post.score == 0))
+ & (model.Post.score == 0)
+ )
if negated:
expr = ~expr
return query.filter(expr)
diff --git a/server/szurubooru/search/configs/snapshot_search_config.py b/server/szurubooru/search/configs/snapshot_search_config.py
index 0fdb69d0..df722d2e 100644
--- a/server/szurubooru/search/configs/snapshot_search_config.py
+++ b/server/szurubooru/search/configs/snapshot_search_config.py
@@ -1,9 +1,12 @@
from typing import Dict
+
from szurubooru import db, model
-from szurubooru.search.typing import SaQuery
from szurubooru.search.configs import util as search_util
from szurubooru.search.configs.base_search_config import (
- BaseSearchConfig, Filter)
+ BaseSearchConfig,
+ Filter,
+)
+from szurubooru.search.typing import SaQuery
class SnapshotSearchConfig(BaseSearchConfig):
@@ -22,16 +25,18 @@ class SnapshotSearchConfig(BaseSearchConfig):
@property
def named_filters(self) -> Dict[str, Filter]:
return {
- 'type':
- search_util.create_str_filter(model.Snapshot.resource_type),
- 'id':
- search_util.create_str_filter(model.Snapshot.resource_name),
- 'date':
- search_util.create_date_filter(model.Snapshot.creation_time),
- 'time':
- search_util.create_date_filter(model.Snapshot.creation_time),
- 'operation':
- search_util.create_str_filter(model.Snapshot.operation),
- 'user':
- search_util.create_str_filter(model.User.name),
+ "type": search_util.create_str_filter(
+ model.Snapshot.resource_type
+ ),
+ "id": search_util.create_str_filter(model.Snapshot.resource_name),
+ "date": search_util.create_date_filter(
+ model.Snapshot.creation_time
+ ),
+ "time": search_util.create_date_filter(
+ model.Snapshot.creation_time
+ ),
+ "operation": search_util.create_str_filter(
+ model.Snapshot.operation
+ ),
+ "user": search_util.create_str_filter(model.User.name),
}
diff --git a/server/szurubooru/search/configs/tag_search_config.py b/server/szurubooru/search/configs/tag_search_config.py
index db3b4b2c..5d416035 100644
--- a/server/szurubooru/search/configs/tag_search_config.py
+++ b/server/szurubooru/search/configs/tag_search_config.py
@@ -1,19 +1,22 @@
-from typing import Tuple, Dict
+from typing import Dict, Tuple
+
import sqlalchemy as sa
+
from szurubooru import db, model
from szurubooru.func import util
-from szurubooru.search.typing import SaColumn, SaQuery
from szurubooru.search.configs import util as search_util
from szurubooru.search.configs.base_search_config import (
- BaseSearchConfig, Filter)
+ BaseSearchConfig,
+ Filter,
+)
+from szurubooru.search.typing import SaColumn, SaQuery
class TagSearchConfig(BaseSearchConfig):
def create_filter_query(self, _disable_eager_loads: bool) -> SaQuery:
strategy = (
- sa.orm.lazyload
- if _disable_eager_loads
- else sa.orm.subqueryload)
+ sa.orm.lazyload if _disable_eager_loads else sa.orm.subqueryload
+ )
return (
db.session.query(model.Tag)
.join(model.TagCategory)
@@ -24,7 +27,9 @@ class TagSearchConfig(BaseSearchConfig):
sa.orm.defer(model.Tag.post_count),
strategy(model.Tag.names),
strategy(model.Tag.suggestions).joinedload(model.Tag.names),
- strategy(model.Tag.implications).joinedload(model.Tag.names)))
+ strategy(model.Tag.implications).joinedload(model.Tag.names),
+ )
+ )
def create_count_query(self, _disable_eager_loads: bool) -> SaQuery:
return db.session.query(model.Tag)
@@ -41,95 +46,93 @@ class TagSearchConfig(BaseSearchConfig):
model.Tag.tag_id,
model.TagName.tag_id,
model.TagName.name,
- search_util.create_str_filter)
+ search_util.create_str_filter,
+ )
@property
def named_filters(self) -> Dict[str, Filter]:
- return util.unalias_dict([
- (
- ['name'],
- search_util.create_subquery_filter(
- model.Tag.tag_id,
- model.TagName.tag_id,
- model.TagName.name,
- search_util.create_str_filter)
- ),
-
- (
- ['category'],
- search_util.create_subquery_filter(
- model.Tag.category_id,
- model.TagCategory.tag_category_id,
- model.TagCategory.name,
- search_util.create_str_filter)
- ),
-
- (
- ['creation-date', 'creation-time'],
- search_util.create_date_filter(model.Tag.creation_time)
- ),
-
- (
- ['last-edit-date', 'last-edit-time', 'edit-date', 'edit-time'],
- search_util.create_date_filter(model.Tag.last_edit_time)
- ),
-
- (
- ['usage-count', 'post-count', 'usages'],
- search_util.create_num_filter(model.Tag.post_count)
- ),
-
- (
- ['suggestion-count'],
- search_util.create_num_filter(model.Tag.suggestion_count)
- ),
-
- (
- ['implication-count'],
- search_util.create_num_filter(model.Tag.implication_count)
- ),
- ])
+ return util.unalias_dict(
+ [
+ (
+ ["name"],
+ search_util.create_subquery_filter(
+ model.Tag.tag_id,
+ model.TagName.tag_id,
+ model.TagName.name,
+ search_util.create_str_filter,
+ ),
+ ),
+ (
+ ["category"],
+ search_util.create_subquery_filter(
+ model.Tag.category_id,
+ model.TagCategory.tag_category_id,
+ model.TagCategory.name,
+ search_util.create_str_filter,
+ ),
+ ),
+ (
+ ["creation-date", "creation-time"],
+ search_util.create_date_filter(model.Tag.creation_time),
+ ),
+ (
+ [
+ "last-edit-date",
+ "last-edit-time",
+ "edit-date",
+ "edit-time",
+ ],
+ search_util.create_date_filter(model.Tag.last_edit_time),
+ ),
+ (
+ ["usage-count", "post-count", "usages"],
+ search_util.create_num_filter(model.Tag.post_count),
+ ),
+ (
+ ["suggestion-count"],
+ search_util.create_num_filter(model.Tag.suggestion_count),
+ ),
+ (
+ ["implication-count"],
+ search_util.create_num_filter(model.Tag.implication_count),
+ ),
+ ]
+ )
@property
def sort_columns(self) -> Dict[str, Tuple[SaColumn, str]]:
- return util.unalias_dict([
- (
- ['random'],
- (sa.sql.expression.func.random(), self.SORT_NONE)
- ),
-
- (
- ['name'],
- (model.Tag.first_name, self.SORT_ASC)
- ),
-
- (
- ['category'],
- (model.TagCategory.name, self.SORT_ASC)
- ),
-
- (
- ['creation-date', 'creation-time'],
- (model.Tag.creation_time, self.SORT_DESC)
- ),
-
- (
- ['last-edit-date', 'last-edit-time', 'edit-date', 'edit-time'],
- (model.Tag.last_edit_time, self.SORT_DESC)
- ),
-
- (
- ['usage-count', 'post-count', 'usages'],
- (model.Tag.post_count, self.SORT_DESC)
- ),
-
- (
- ['suggestion-count'],
- (model.Tag.suggestion_count, self.SORT_DESC)
- ),
-
- (
- ['implication-count'],
- (model.Tag.implication_count, self.SORT_DESC)
- ),
- ])
+ return util.unalias_dict(
+ [
+ (
+ ["random"],
+ (sa.sql.expression.func.random(), self.SORT_NONE),
+ ),
+ (["name"], (model.Tag.first_name, self.SORT_ASC)),
+ (["category"], (model.TagCategory.name, self.SORT_ASC)),
+ (
+ ["creation-date", "creation-time"],
+ (model.Tag.creation_time, self.SORT_DESC),
+ ),
+ (
+ [
+ "last-edit-date",
+ "last-edit-time",
+ "edit-date",
+ "edit-time",
+ ],
+ (model.Tag.last_edit_time, self.SORT_DESC),
+ ),
+ (
+ ["usage-count", "post-count", "usages"],
+ (model.Tag.post_count, self.SORT_DESC),
+ ),
+ (
+ ["suggestion-count"],
+ (model.Tag.suggestion_count, self.SORT_DESC),
+ ),
+ (
+ ["implication-count"],
+ (model.Tag.implication_count, self.SORT_DESC),
+ ),
+ ]
+ )
diff --git a/server/szurubooru/search/configs/user_search_config.py b/server/szurubooru/search/configs/user_search_config.py
index 64534009..bbf40342 100644
--- a/server/szurubooru/search/configs/user_search_config.py
+++ b/server/szurubooru/search/configs/user_search_config.py
@@ -1,10 +1,14 @@
-from typing import Tuple, Dict
+from typing import Dict, Tuple
+
import sqlalchemy as sa
+
from szurubooru import db, model
-from szurubooru.search.typing import SaColumn, SaQuery
from szurubooru.search.configs import util as search_util
from szurubooru.search.configs.base_search_config import (
- BaseSearchConfig, Filter)
+ BaseSearchConfig,
+ Filter,
+)
+from szurubooru.search.typing import SaColumn, SaQuery
class UserSearchConfig(BaseSearchConfig):
@@ -27,31 +31,36 @@ class UserSearchConfig(BaseSearchConfig):
@property
def named_filters(self) -> Dict[str, Filter]:
return {
- 'name':
- search_util.create_str_filter(model.User.name),
- 'creation-date':
- search_util.create_date_filter(model.User.creation_time),
- 'creation-time':
- search_util.create_date_filter(model.User.creation_time),
- 'last-login-date':
- search_util.create_date_filter(model.User.last_login_time),
- 'last-login-time':
- search_util.create_date_filter(model.User.last_login_time),
- 'login-date':
- search_util.create_date_filter(model.User.last_login_time),
- 'login-time':
- search_util.create_date_filter(model.User.last_login_time),
+ "name": search_util.create_str_filter(model.User.name),
+ "creation-date": search_util.create_date_filter(
+ model.User.creation_time
+ ),
+ "creation-time": search_util.create_date_filter(
+ model.User.creation_time
+ ),
+ "last-login-date": search_util.create_date_filter(
+ model.User.last_login_time
+ ),
+ "last-login-time": search_util.create_date_filter(
+ model.User.last_login_time
+ ),
+ "login-date": search_util.create_date_filter(
+ model.User.last_login_time
+ ),
+ "login-time": search_util.create_date_filter(
+ model.User.last_login_time
+ ),
}
@property
def sort_columns(self) -> Dict[str, Tuple[SaColumn, str]]:
return {
- 'random': (sa.sql.expression.func.random(), self.SORT_NONE),
- 'name': (model.User.name, self.SORT_ASC),
- 'creation-date': (model.User.creation_time, self.SORT_DESC),
- 'creation-time': (model.User.creation_time, self.SORT_DESC),
- 'last-login-date': (model.User.last_login_time, self.SORT_DESC),
- 'last-login-time': (model.User.last_login_time, self.SORT_DESC),
- 'login-date': (model.User.last_login_time, self.SORT_DESC),
- 'login-time': (model.User.last_login_time, self.SORT_DESC),
+ "random": (sa.sql.expression.func.random(), self.SORT_NONE),
+ "name": (model.User.name, self.SORT_ASC),
+ "creation-date": (model.User.creation_time, self.SORT_DESC),
+ "creation-time": (model.User.creation_time, self.SORT_DESC),
+ "last-login-date": (model.User.last_login_time, self.SORT_DESC),
+ "last-login-time": (model.User.last_login_time, self.SORT_DESC),
+ "login-date": (model.User.last_login_time, self.SORT_DESC),
+ "login-time": (model.User.last_login_time, self.SORT_DESC),
}
diff --git a/server/szurubooru/search/configs/util.py b/server/szurubooru/search/configs/util.py
index ee201a26..58e6ebe5 100644
--- a/server/szurubooru/search/configs/util.py
+++ b/server/szurubooru/search/configs/util.py
@@ -1,33 +1,36 @@
-from typing import Any, Optional, Union, Dict, Callable
+from typing import Any, Callable, Dict, Optional, Union
+
import sqlalchemy as sa
+
from szurubooru import db, errors
from szurubooru.func import util
from szurubooru.search import criteria
-from szurubooru.search.typing import SaColumn, SaQuery
from szurubooru.search.configs.base_search_config import Filter
-
+from szurubooru.search.typing import SaColumn, SaQuery
Number = Union[int, float]
-WILDCARD = '(--wildcard--)' # something unlikely to be used by the users
+WILDCARD = "(--wildcard--)" # something unlikely to be used by the users
def unescape(text: str, make_wildcards_special: bool = False) -> str:
- output = ''
+ output = ""
i = 0
while i < len(text):
- if text[i] == '\\':
+ if text[i] == "\\":
try:
char = text[i + 1]
i += 1
except IndexError:
raise errors.SearchError(
- 'Unterminated escape sequence (did you forget to escape '
- 'the ending backslash?)')
- if char not in '*\\:-.,':
+ "Unterminated escape sequence (did you forget to escape "
+ "the ending backslash?)"
+ )
+ if char not in "*\\:-.,":
raise errors.SearchError(
- 'Unknown escape sequence (did you forget to escape '
- 'the backslash?)')
- elif text[i] == '*' and make_wildcards_special:
+ "Unknown escape sequence (did you forget to escape "
+ "the backslash?)"
+ )
+ elif text[i] == "*" and make_wildcards_special:
char = WILDCARD
else:
char = text[i]
@@ -39,10 +42,11 @@ def unescape(text: str, make_wildcards_special: bool = False) -> str:
def wildcard_transformer(value: str) -> str:
return (
unescape(value, make_wildcards_special=True)
- .replace('\\', '\\\\')
- .replace('%', '\\%')
- .replace('_', '\\_')
- .replace(WILDCARD, '%'))
+ .replace("\\", "\\\\")
+ .replace("%", "\\%")
+ .replace("_", "\\_")
+ .replace(WILDCARD, "%")
+ )
def enum_transformer(available_values: Dict[str, Any], value: str) -> str:
@@ -50,8 +54,9 @@ def enum_transformer(available_values: Dict[str, Any], value: str) -> str:
return available_values[unescape(value.lower())]
except KeyError:
raise errors.SearchError(
- 'Invalid value: %r. Possible values: %r.' % (
- value, list(sorted(available_values.keys()))))
+ "Invalid value: %r. Possible values: %r."
+ % (value, list(sorted(available_values.keys())))
+ )
def integer_transformer(value: str) -> int:
@@ -59,7 +64,7 @@ def integer_transformer(value: str) -> int:
def float_transformer(value: str) -> float:
- for sep in list('/:'):
+ for sep in list("/:"):
if sep in value:
a, b = value.split(sep, 1)
return float(unescape(a)) / float(unescape(b))
@@ -67,9 +72,10 @@ def float_transformer(value: str) -> float:
def apply_num_criterion_to_column(
- column: Any,
- criterion: criteria.BaseCriterion,
- transformer: Callable[[str], Number] = integer_transformer) -> SaQuery:
+ column: Any,
+ criterion: criteria.BaseCriterion,
+ transformer: Callable[[str], Number] = integer_transformer,
+) -> SaQuery:
try:
if isinstance(criterion, criteria.PlainCriterion):
expr = column == transformer(criterion.value)
@@ -80,7 +86,8 @@ def apply_num_criterion_to_column(
if criterion.min_value and criterion.max_value:
expr = column.between(
transformer(criterion.min_value),
- transformer(criterion.max_value))
+ transformer(criterion.max_value),
+ )
elif criterion.min_value:
expr = column >= transformer(criterion.min_value)
elif criterion.max_value:
@@ -89,29 +96,33 @@ def apply_num_criterion_to_column(
assert False
except ValueError:
raise errors.SearchError(
- 'Criterion value %r must be a number.' % (criterion,))
+ "Criterion value %r must be a number." % (criterion,)
+ )
return expr
def create_num_filter(
- column: Any,
- transformer: Callable[[str], Number] = integer_transformer) -> SaQuery:
+ column: Any, transformer: Callable[[str], Number] = integer_transformer
+) -> SaQuery:
def wrapper(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery,
+ criterion: Optional[criteria.BaseCriterion],
+ negated: bool,
+ ) -> SaQuery:
assert criterion
expr = apply_num_criterion_to_column(column, criterion, transformer)
if negated:
expr = ~expr
return query.filter(expr)
+
return wrapper
def apply_str_criterion_to_column(
- column: SaColumn,
- criterion: criteria.BaseCriterion,
- transformer: Callable[[str], str] = wildcard_transformer) -> SaQuery:
+ column: SaColumn,
+ criterion: criteria.BaseCriterion,
+ transformer: Callable[[str], str] = wildcard_transformer,
+) -> SaQuery:
if isinstance(criterion, criteria.PlainCriterion):
expr = column.ilike(transformer(criterion.value))
elif isinstance(criterion, criteria.ArrayCriterion):
@@ -120,30 +131,34 @@ def apply_str_criterion_to_column(
expr = expr | column.ilike(transformer(value))
elif isinstance(criterion, criteria.RangedCriterion):
raise errors.SearchError(
- 'Ranged criterion is invalid in this context. '
- 'Did you forget to escape the dots?')
+ "Ranged criterion is invalid in this context. "
+ "Did you forget to escape the dots?"
+ )
else:
assert False
return expr
def create_str_filter(
- column: SaColumn,
- transformer: Callable[[str], str] = wildcard_transformer) -> Filter:
+ column: SaColumn, transformer: Callable[[str], str] = wildcard_transformer
+) -> Filter:
def wrapper(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery,
+ criterion: Optional[criteria.BaseCriterion],
+ negated: bool,
+ ) -> SaQuery:
assert criterion
expr = apply_str_criterion_to_column(column, criterion, transformer)
if negated:
expr = ~expr
return query.filter(expr)
+
return wrapper
def apply_date_criterion_to_column(
- column: SaQuery, criterion: criteria.BaseCriterion) -> SaQuery:
+ column: SaQuery, criterion: criteria.BaseCriterion
+) -> SaQuery:
if isinstance(criterion, criteria.PlainCriterion):
min_date, max_date = util.parse_time_range(criterion.value)
expr = column.between(min_date, max_date)
@@ -171,36 +186,40 @@ def apply_date_criterion_to_column(
def create_date_filter(column: SaColumn) -> Filter:
def wrapper(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery,
+ criterion: Optional[criteria.BaseCriterion],
+ negated: bool,
+ ) -> SaQuery:
assert criterion
expr = apply_date_criterion_to_column(column, criterion)
if negated:
expr = ~expr
return query.filter(expr)
+
return wrapper
def create_subquery_filter(
- left_id_column: SaColumn,
- right_id_column: SaColumn,
- filter_column: SaColumn,
- filter_factory: SaColumn,
- subquery_decorator: Callable[[SaQuery], None] = None) -> Filter:
+ left_id_column: SaColumn,
+ right_id_column: SaColumn,
+ filter_column: SaColumn,
+ filter_factory: SaColumn,
+ subquery_decorator: Callable[[SaQuery], None] = None,
+) -> Filter:
filter_func = filter_factory(filter_column)
def wrapper(
- query: SaQuery,
- criterion: Optional[criteria.BaseCriterion],
- negated: bool) -> SaQuery:
+ query: SaQuery,
+ criterion: Optional[criteria.BaseCriterion],
+ negated: bool,
+ ) -> SaQuery:
assert criterion
- subquery = db.session.query(right_id_column.label('foreign_id'))
+ subquery = db.session.query(right_id_column.label("foreign_id"))
if subquery_decorator:
subquery = subquery_decorator(subquery)
- subquery = subquery.options(sa.orm.lazyload('*'))
+ subquery = subquery.options(sa.orm.lazyload("*"))
subquery = filter_func(subquery, criterion, False)
- subquery = subquery.subquery('t')
+ subquery = subquery.subquery("t")
expression = left_id_column.in_(subquery)
if negated:
expression = ~expression
diff --git a/server/szurubooru/search/criteria.py b/server/szurubooru/search/criteria.py
index 4512b0fa..633c6f2e 100644
--- a/server/szurubooru/search/criteria.py
+++ b/server/szurubooru/search/criteria.py
@@ -1,4 +1,5 @@
-from typing import Optional, List
+from typing import List, Optional
+
from szurubooru.search.typing import SaQuery
@@ -12,16 +13,17 @@ class BaseCriterion:
class RangedCriterion(BaseCriterion):
def __init__(
- self,
- original_text: str,
- min_value: Optional[str],
- max_value: Optional[str]) -> None:
+ self,
+ original_text: str,
+ min_value: Optional[str],
+ max_value: Optional[str],
+ ) -> None:
super().__init__(original_text)
self.min_value = min_value
self.max_value = max_value
def __hash__(self) -> int:
- return hash(('range', self.min_value, self.max_value))
+ return hash(("range", self.min_value, self.max_value))
class PlainCriterion(BaseCriterion):
@@ -39,4 +41,4 @@ class ArrayCriterion(BaseCriterion):
self.values = values
def __hash__(self) -> int:
- return hash(tuple(['array'] + self.values))
+ return hash(tuple(["array"] + self.values))
diff --git a/server/szurubooru/search/executor.py b/server/szurubooru/search/executor.py
index 10b34b1c..a5ef9625 100644
--- a/server/szurubooru/search/executor.py
+++ b/server/szurubooru/search/executor.py
@@ -1,11 +1,13 @@
-from typing import Union, Tuple, List, Dict, Callable
+from typing import Callable, Dict, List, Tuple, Union
+
import sqlalchemy as sa
-from szurubooru import db, model, errors, rest
+
+from szurubooru import db, errors, model, rest
from szurubooru.func import cache
-from szurubooru.search import tokens, parser
-from szurubooru.search.typing import SaQuery
-from szurubooru.search.query import SearchQuery
+from szurubooru.search import parser, tokens
from szurubooru.search.configs.base_search_config import BaseSearchConfig
+from szurubooru.search.query import SearchQuery
+from szurubooru.search.typing import SaQuery
def _format_dict_keys(source: Dict) -> List[str]:
@@ -25,61 +27,59 @@ def _get_order(order: str, default_order: str) -> Union[bool, str]:
class Executor:
- '''
+ """
Class for search parsing and execution. Handles plaintext parsing and
delegates sqlalchemy filter decoration to SearchConfig instances.
- '''
+ """
def __init__(self, search_config: BaseSearchConfig) -> None:
self.config = search_config
self.parser = parser.Parser()
def get_around(
- self,
- query_text: str,
- entity_id: int) -> Tuple[model.Base, model.Base]:
+ self, query_text: str, entity_id: int
+ ) -> Tuple[model.Base, model.Base]:
search_query = self.parser.parse(query_text)
self.config.on_search_query_parsed(search_query)
- filter_query = (
- self.config
- .create_around_query()
- .options(sa.orm.lazyload('*')))
+ filter_query = self.config.create_around_query().options(
+ sa.orm.lazyload("*")
+ )
filter_query = self._prepare_db_query(
- filter_query, search_query, False)
+ filter_query, search_query, False
+ )
prev_filter_query = (
- filter_query
- .filter(self.config.id_column > entity_id)
+ filter_query.filter(self.config.id_column > entity_id)
.order_by(None)
.order_by(sa.func.abs(self.config.id_column - entity_id).asc())
- .limit(1))
+ .limit(1)
+ )
next_filter_query = (
- filter_query
- .filter(self.config.id_column < entity_id)
+ filter_query.filter(self.config.id_column < entity_id)
.order_by(None)
.order_by(sa.func.abs(self.config.id_column - entity_id).asc())
- .limit(1))
+ .limit(1)
+ )
return (
prev_filter_query.one_or_none(),
- next_filter_query.one_or_none())
+ next_filter_query.one_or_none(),
+ )
def get_around_and_serialize(
self,
ctx: rest.Context,
entity_id: int,
- serializer: Callable[[model.Base], rest.Response]
+ serializer: Callable[[model.Base], rest.Response],
) -> rest.Response:
entities = self.get_around(
- ctx.get_param_as_string('query', default=''), entity_id)
+ ctx.get_param_as_string("query", default=""), entity_id
+ )
return {
- 'prev': serializer(entities[0]),
- 'next': serializer(entities[1]),
+ "prev": serializer(entities[0]),
+ "next": serializer(entities[1]),
}
def execute(
- self,
- query_text: str,
- offset: int,
- limit: int
+ self, query_text: str, offset: int, limit: int
) -> Tuple[int, List[model.Base]]:
search_query = self.parser.parse(query_text)
self.config.on_search_query_parsed(search_query)
@@ -90,7 +90,7 @@ class Executor:
disable_eager_loads = False
for token in search_query.sort_tokens:
- if token.name == 'random':
+ if token.name == "random":
disable_eager_loads = True
key = (id(self.config), hash(search_query), offset, limit)
@@ -98,22 +98,16 @@ class Executor:
return cache.get(key)
filter_query = self.config.create_filter_query(disable_eager_loads)
- filter_query = filter_query.options(sa.orm.lazyload('*'))
+ filter_query = filter_query.options(sa.orm.lazyload("*"))
filter_query = self._prepare_db_query(filter_query, search_query, True)
- entities = (
- filter_query
- .offset(offset)
- .limit(limit)
- .all())
+ entities = filter_query.offset(offset).limit(limit).all()
count_query = self.config.create_count_query(disable_eager_loads)
- count_query = count_query.options(sa.orm.lazyload('*'))
+ count_query = count_query.options(sa.orm.lazyload("*"))
count_query = self._prepare_db_query(count_query, search_query, False)
- count_statement = (
- count_query
- .statement
- .with_only_columns([sa.func.count()])
- .order_by(None))
+ count_statement = count_query.statement.with_only_columns(
+ [sa.func.count()]
+ ).order_by(None)
count = db.session.execute(count_statement).scalar()
ret = (count, entities)
@@ -123,61 +117,73 @@ class Executor:
def execute_and_serialize(
self,
ctx: rest.Context,
- serializer: Callable[[model.Base], rest.Response]
+ serializer: Callable[[model.Base], rest.Response],
) -> rest.Response:
- query = ctx.get_param_as_string('query', default='')
- offset = ctx.get_param_as_int('offset', default=0, min=0)
- limit = ctx.get_param_as_int('limit', default=100, min=1, max=100)
+ query = ctx.get_param_as_string("query", default="")
+ offset = ctx.get_param_as_int("offset", default=0, min=0)
+ limit = ctx.get_param_as_int("limit", default=100, min=1, max=100)
count, entities = self.execute(query, offset, limit)
return {
- 'query': query,
- 'offset': offset,
- 'limit': limit,
- 'total': count,
- 'results': list([serializer(entity) for entity in entities]),
+ "query": query,
+ "offset": offset,
+ "limit": limit,
+ "total": count,
+ "results": list([serializer(entity) for entity in entities]),
}
def _prepare_db_query(
- self,
- db_query: SaQuery,
- search_query: SearchQuery,
- use_sort: bool) -> SaQuery:
+ self, db_query: SaQuery, search_query: SearchQuery, use_sort: bool
+ ) -> SaQuery:
for anon_token in search_query.anonymous_tokens:
if not self.config.anonymous_filter:
raise errors.SearchError(
- 'Anonymous tokens are not valid in this context.')
+ "Anonymous tokens are not valid in this context."
+ )
db_query = self.config.anonymous_filter(
- db_query, anon_token.criterion, anon_token.negated)
+ db_query, anon_token.criterion, anon_token.negated
+ )
for named_token in search_query.named_tokens:
if named_token.name not in self.config.named_filters:
raise errors.SearchError(
- 'Unknown named token: %r. Available named tokens: %r.' % (
+ "Unknown named token: %r. Available named tokens: %r."
+ % (
named_token.name,
- _format_dict_keys(self.config.named_filters)))
+ _format_dict_keys(self.config.named_filters),
+ )
+ )
db_query = self.config.named_filters[named_token.name](
- db_query, named_token.criterion, named_token.negated)
+ db_query, named_token.criterion, named_token.negated
+ )
for sp_token in search_query.special_tokens:
if sp_token.value not in self.config.special_filters:
raise errors.SearchError(
- 'Unknown special token: %r. '
- 'Available special tokens: %r.' % (
+ "Unknown special token: %r. "
+ "Available special tokens: %r."
+ % (
sp_token.value,
- _format_dict_keys(self.config.special_filters)))
+ _format_dict_keys(self.config.special_filters),
+ )
+ )
db_query = self.config.special_filters[sp_token.value](
- db_query, None, sp_token.negated)
+ db_query, None, sp_token.negated
+ )
if use_sort:
for sort_token in search_query.sort_tokens:
if sort_token.name not in self.config.sort_columns:
raise errors.SearchError(
- 'Unknown sort token: %r. '
- 'Available sort tokens: %r.' % (
+ "Unknown sort token: %r. "
+ "Available sort tokens: %r."
+ % (
sort_token.name,
- _format_dict_keys(self.config.sort_columns)))
- column, default_order = (
- self.config.sort_columns[sort_token.name])
+ _format_dict_keys(self.config.sort_columns),
+ )
+ )
+ column, default_order = self.config.sort_columns[
+ sort_token.name
+ ]
order = _get_order(sort_token.order, default_order)
if order == sort_token.SORT_ASC:
db_query = db_query.order_by(column.asc())
diff --git a/server/szurubooru/search/parser.py b/server/szurubooru/search/parser.py
index 79cf968e..d5e0f2f8 100644
--- a/server/szurubooru/search/parser.py
+++ b/server/szurubooru/search/parser.py
@@ -1,21 +1,23 @@
import re
+
from szurubooru import errors
from szurubooru.search import criteria, tokens
-from szurubooru.search.query import SearchQuery
from szurubooru.search.configs import util
+from szurubooru.search.query import SearchQuery
def _create_criterion(
- original_value: str, value: str) -> criteria.BaseCriterion:
- if re.search(r'(? criteria.BaseCriterion:
+ if re.search(r"(? tokens.AnonymousToken:
def _parse_named(key: str, value: str, negated: bool) -> tokens.NamedToken:
original_value = value
- if key.endswith('-min'):
+ if key.endswith("-min"):
key = key[:-4]
- value += '..'
- elif key.endswith('-max'):
+ value += ".."
+ elif key.endswith("-max"):
key = key[:-4]
- value = '..' + value
+ value = ".." + value
criterion = _create_criterion(original_value, value)
return tokens.NamedToken(key, criterion, negated)
@@ -42,32 +44,27 @@ def _parse_special(value: str, negated: bool) -> tokens.SpecialToken:
def _parse_sort(value: str, negated: bool) -> tokens.SortToken:
- if value.count(',') == 0:
+ if value.count(",") == 0:
order_str = None
- elif value.count(',') == 1:
- value, order_str = value.split(',')
+ elif value.count(",") == 1:
+ value, order_str = value.split(",")
else:
- raise errors.SearchError('Too many commas in sort style token.')
+ raise errors.SearchError("Too many commas in sort style token.")
try:
order = {
- 'asc': tokens.SortToken.SORT_ASC,
- 'desc': tokens.SortToken.SORT_DESC,
- '': tokens.SortToken.SORT_DEFAULT,
+ "asc": tokens.SortToken.SORT_ASC,
+ "desc": tokens.SortToken.SORT_DESC,
+ "": tokens.SortToken.SORT_DEFAULT,
None: tokens.SortToken.SORT_DEFAULT,
}[order_str]
except KeyError:
- raise errors.SearchError(
- 'Unknown search direction: %r.' % order_str)
+ raise errors.SearchError("Unknown search direction: %r." % order_str)
if negated:
order = {
- tokens.SortToken.SORT_ASC:
- tokens.SortToken.SORT_DESC,
- tokens.SortToken.SORT_DESC:
- tokens.SortToken.SORT_ASC,
- tokens.SortToken.SORT_DEFAULT:
- tokens.SortToken.SORT_NEGATED_DEFAULT,
- tokens.SortToken.SORT_NEGATED_DEFAULT:
- tokens.SortToken.SORT_DEFAULT,
+ tokens.SortToken.SORT_ASC: tokens.SortToken.SORT_DESC,
+ tokens.SortToken.SORT_DESC: tokens.SortToken.SORT_ASC,
+ tokens.SortToken.SORT_DEFAULT: tokens.SortToken.SORT_NEGATED_DEFAULT, # noqa: E501
+ tokens.SortToken.SORT_NEGATED_DEFAULT: tokens.SortToken.SORT_DEFAULT, # noqa: E501
}[order]
return tokens.SortToken(value, order)
@@ -75,29 +72,27 @@ def _parse_sort(value: str, negated: bool) -> tokens.SortToken:
class Parser:
def parse(self, query_text: str) -> SearchQuery:
query = SearchQuery()
- for chunk in re.split(r'\s+', (query_text or '').lower()):
+ for chunk in re.split(r"\s+", (query_text or "").lower()):
if not chunk:
continue
negated = False
- if chunk[0] == '-':
+ if chunk[0] == "-":
chunk = chunk[1:]
negated = True
if not chunk:
- raise errors.SearchError('Empty negated token.')
- match = re.match(r'^(.*?)(? None:
@@ -10,8 +11,11 @@ class SearchQuery:
self.sort_tokens = [] # type: List[tokens.SortToken]
def __hash__(self) -> int:
- return hash((
- tuple(self.anonymous_tokens),
- tuple(self.named_tokens),
- tuple(self.special_tokens),
- tuple(self.sort_tokens)))
+ return hash(
+ (
+ tuple(self.anonymous_tokens),
+ tuple(self.named_tokens),
+ tuple(self.special_tokens),
+ tuple(self.sort_tokens),
+ )
+ )
diff --git a/server/szurubooru/search/tokens.py b/server/szurubooru/search/tokens.py
index 0cd7fd7d..9f4eeedd 100644
--- a/server/szurubooru/search/tokens.py
+++ b/server/szurubooru/search/tokens.py
@@ -12,7 +12,8 @@ class AnonymousToken:
class NamedToken(AnonymousToken):
def __init__(
- self, name: str, criterion: BaseCriterion, negated: bool) -> None:
+ self, name: str, criterion: BaseCriterion, negated: bool
+ ) -> None:
super().__init__(criterion, negated)
self.name = name
@@ -21,11 +22,11 @@ class NamedToken(AnonymousToken):
class SortToken:
- SORT_DESC = 'desc'
- SORT_ASC = 'asc'
- SORT_NONE = ''
- SORT_DEFAULT = 'default'
- SORT_NEGATED_DEFAULT = 'negated default'
+ SORT_DESC = "desc"
+ SORT_ASC = "asc"
+ SORT_NONE = ""
+ SORT_DEFAULT = "default"
+ SORT_NEGATED_DEFAULT = "negated default"
def __init__(self, name: str, order: str) -> None:
self.name = name
diff --git a/server/szurubooru/search/typing.py b/server/szurubooru/search/typing.py
index ebb1b30d..686c2cb6 100644
--- a/server/szurubooru/search/typing.py
+++ b/server/szurubooru/search/typing.py
@@ -1,6 +1,5 @@
from typing import Any, Callable
-
SaColumn = Any
SaQuery = Any
SaQueryFactory = Callable[[], SaQuery]
diff --git a/server/szurubooru/tests/api/test_comment_creating.py b/server/szurubooru/tests/api/test_comment_creating.py
index ad243661..3f9479b1 100644
--- a/server/szurubooru/tests/api/test_comment_creating.py
+++ b/server/szurubooru/tests/api/test_comment_creating.py
@@ -1,70 +1,76 @@
from datetime import datetime
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import comments, posts
@pytest.fixture(autouse=True)
def inject_config(config_injector):
config_injector(
- {'privileges': {'comments:create': model.User.RANK_REGULAR}})
+ {"privileges": {"comments:create": model.User.RANK_REGULAR}}
+ )
def test_creating_comment(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
post = post_factory()
user = user_factory(rank=model.User.RANK_REGULAR)
db.session.add_all([post, user])
db.session.flush()
- with patch('szurubooru.func.comments.serialize_comment'), \
- fake_datetime('1997-01-01'):
- comments.serialize_comment.return_value = 'serialized comment'
+ with patch("szurubooru.func.comments.serialize_comment"), fake_datetime(
+ "1997-01-01"
+ ):
+ comments.serialize_comment.return_value = "serialized comment"
result = api.comment_api.create_comment(
context_factory(
- params={'text': 'input', 'postId': post.post_id},
- user=user))
- assert result == 'serialized comment'
+ params={"text": "input", "postId": post.post_id}, user=user
+ )
+ )
+ assert result == "serialized comment"
comment = db.session.query(model.Comment).one()
- assert comment.text == 'input'
+ assert comment.text == "input"
assert comment.creation_time == datetime(1997, 1, 1)
assert comment.last_edit_time is None
assert comment.user and comment.user.user_id == user.user_id
assert comment.post and comment.post.post_id == post.post_id
-@pytest.mark.parametrize('params', [
- {'text': None},
- {'text': ''},
- {'text': [None]},
- {'text': ['']},
-])
+@pytest.mark.parametrize(
+ "params", [{"text": None}, {"text": ""}, {"text": [None]}, {"text": [""]},]
+)
def test_trying_to_pass_invalid_params(
- user_factory, post_factory, context_factory, params):
+ user_factory, post_factory, context_factory, params
+):
post = post_factory()
user = user_factory(rank=model.User.RANK_REGULAR)
db.session.add_all([post, user])
db.session.flush()
- real_params = {'text': 'input', 'postId': post.post_id}
+ real_params = {"text": "input", "postId": post.post_id}
for key, value in params.items():
real_params[key] = value
with pytest.raises(errors.ValidationError):
api.comment_api.create_comment(
- context_factory(params=real_params, user=user))
+ context_factory(params=real_params, user=user)
+ )
-@pytest.mark.parametrize('field', ['text', 'postId'])
+@pytest.mark.parametrize("field", ["text", "postId"])
def test_trying_to_omit_mandatory_field(user_factory, context_factory, field):
params = {
- 'text': 'input',
- 'postId': 1,
+ "text": "input",
+ "postId": 1,
}
del params[field]
with pytest.raises(errors.ValidationError):
api.comment_api.create_comment(
context_factory(
- params={},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={}, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_comment_non_existing(user_factory, context_factory):
@@ -73,13 +79,14 @@ def test_trying_to_comment_non_existing(user_factory, context_factory):
db.session.flush()
with pytest.raises(posts.PostNotFoundError):
api.comment_api.create_comment(
- context_factory(
- params={'text': 'bad', 'postId': 5}, user=user))
+ context_factory(params={"text": "bad", "postId": 5}, user=user)
+ )
def test_trying_to_create_without_privileges(user_factory, context_factory):
with pytest.raises(errors.AuthError):
api.comment_api.create_comment(
context_factory(
- params={},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={}, user=user_factory(rank=model.User.RANK_ANONYMOUS)
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_comment_deleting.py b/server/szurubooru/tests/api/test_comment_deleting.py
index e1d1baa0..71df1ff8 100644
--- a/server/szurubooru/tests/api/test_comment_deleting.py
+++ b/server/szurubooru/tests/api/test_comment_deleting.py
@@ -1,16 +1,19 @@
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import comments
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'comments:delete:own': model.User.RANK_REGULAR,
- 'comments:delete:any': model.User.RANK_MODERATOR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "comments:delete:own": model.User.RANK_REGULAR,
+ "comments:delete:any": model.User.RANK_MODERATOR,
+ },
+ }
+ )
def test_deleting_own_comment(user_factory, comment_factory, context_factory):
@@ -19,27 +22,31 @@ def test_deleting_own_comment(user_factory, comment_factory, context_factory):
db.session.add(comment)
db.session.commit()
result = api.comment_api.delete_comment(
- context_factory(params={'version': 1}, user=user),
- {'comment_id': comment.comment_id})
+ context_factory(params={"version": 1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
assert result == {}
assert db.session.query(model.Comment).count() == 0
def test_deleting_someones_else_comment(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
user1 = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_MODERATOR)
comment = comment_factory(user=user1)
db.session.add(comment)
db.session.commit()
api.comment_api.delete_comment(
- context_factory(params={'version': 1}, user=user2),
- {'comment_id': comment.comment_id})
+ context_factory(params={"version": 1}, user=user2),
+ {"comment_id": comment.comment_id},
+ )
assert db.session.query(model.Comment).count() == 0
def test_trying_to_delete_someones_else_comment_without_privileges(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
user1 = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user1)
@@ -47,8 +54,9 @@ def test_trying_to_delete_someones_else_comment_without_privileges(
db.session.commit()
with pytest.raises(errors.AuthError):
api.comment_api.delete_comment(
- context_factory(params={'version': 1}, user=user2),
- {'comment_id': comment.comment_id})
+ context_factory(params={"version": 1}, user=user2),
+ {"comment_id": comment.comment_id},
+ )
assert db.session.query(model.Comment).count() == 1
@@ -56,6 +64,8 @@ def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(comments.CommentNotFoundError):
api.comment_api.delete_comment(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'comment_id': 1})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"comment_id": 1},
+ )
diff --git a/server/szurubooru/tests/api/test_comment_rating.py b/server/szurubooru/tests/api/test_comment_rating.py
index aae5e241..efb4bc23 100644
--- a/server/szurubooru/tests/api/test_comment_rating.py
+++ b/server/szurubooru/tests/api/test_comment_rating.py
@@ -1,116 +1,134 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import comments
@pytest.fixture(autouse=True)
def inject_config(config_injector):
config_injector(
- {'privileges': {'comments:score': model.User.RANK_REGULAR}})
+ {"privileges": {"comments:score": model.User.RANK_REGULAR}}
+ )
def test_simple_rating(
- user_factory, comment_factory, context_factory, fake_datetime):
+ user_factory, comment_factory, context_factory, fake_datetime
+):
user = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'), \
- fake_datetime('1997-12-01'):
- comments.serialize_comment.return_value = 'serialized comment'
+ with patch("szurubooru.func.comments.serialize_comment"), fake_datetime(
+ "1997-12-01"
+ ):
+ comments.serialize_comment.return_value = "serialized comment"
result = api.comment_api.set_comment_score(
- context_factory(params={'score': 1}, user=user),
- {'comment_id': comment.comment_id})
- assert result == 'serialized comment'
+ context_factory(params={"score": 1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
+ assert result == "serialized comment"
assert db.session.query(model.CommentScore).count() == 1
assert comment is not None
assert comment.score == 1
def test_updating_rating(
- user_factory, comment_factory, context_factory, fake_datetime):
+ user_factory, comment_factory, context_factory, fake_datetime
+):
user = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.comments.serialize_comment"):
+ with fake_datetime("1997-12-01"):
api.comment_api.set_comment_score(
- context_factory(params={'score': 1}, user=user),
- {'comment_id': comment.comment_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
+ with fake_datetime("1997-12-02"):
api.comment_api.set_comment_score(
- context_factory(params={'score': -1}, user=user),
- {'comment_id': comment.comment_id})
+ context_factory(params={"score": -1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
comment = db.session.query(model.Comment).one()
assert db.session.query(model.CommentScore).count() == 1
assert comment.score == -1
def test_updating_rating_to_zero(
- user_factory, comment_factory, context_factory, fake_datetime):
+ user_factory, comment_factory, context_factory, fake_datetime
+):
user = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.comments.serialize_comment"):
+ with fake_datetime("1997-12-01"):
api.comment_api.set_comment_score(
- context_factory(params={'score': 1}, user=user),
- {'comment_id': comment.comment_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
+ with fake_datetime("1997-12-02"):
api.comment_api.set_comment_score(
- context_factory(params={'score': 0}, user=user),
- {'comment_id': comment.comment_id})
+ context_factory(params={"score": 0}, user=user),
+ {"comment_id": comment.comment_id},
+ )
comment = db.session.query(model.Comment).one()
assert db.session.query(model.CommentScore).count() == 0
assert comment.score == 0
def test_deleting_rating(
- user_factory, comment_factory, context_factory, fake_datetime):
+ user_factory, comment_factory, context_factory, fake_datetime
+):
user = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.comments.serialize_comment"):
+ with fake_datetime("1997-12-01"):
api.comment_api.set_comment_score(
- context_factory(params={'score': 1}, user=user),
- {'comment_id': comment.comment_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
+ with fake_datetime("1997-12-02"):
api.comment_api.delete_comment_score(
- context_factory(user=user),
- {'comment_id': comment.comment_id})
+ context_factory(user=user), {"comment_id": comment.comment_id}
+ )
comment = db.session.query(model.Comment).one()
assert db.session.query(model.CommentScore).count() == 0
assert comment.score == 0
def test_ratings_from_multiple_users(
- user_factory, comment_factory, context_factory, fake_datetime):
+ user_factory, comment_factory, context_factory, fake_datetime
+):
user1 = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory()
db.session.add_all([user1, user2, comment])
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.comments.serialize_comment"):
+ with fake_datetime("1997-12-01"):
api.comment_api.set_comment_score(
- context_factory(params={'score': 1}, user=user1),
- {'comment_id': comment.comment_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user1),
+ {"comment_id": comment.comment_id},
+ )
+ with fake_datetime("1997-12-02"):
api.comment_api.set_comment_score(
- context_factory(params={'score': -1}, user=user2),
- {'comment_id': comment.comment_id})
+ context_factory(params={"score": -1}, user=user2),
+ {"comment_id": comment.comment_id},
+ )
comment = db.session.query(model.Comment).one()
assert db.session.query(model.CommentScore).count() == 2
assert comment.score == 0
def test_trying_to_omit_mandatory_field(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
user = user_factory()
comment = comment_factory(user=user)
db.session.add(comment)
@@ -118,26 +136,32 @@ def test_trying_to_omit_mandatory_field(
with pytest.raises(errors.ValidationError):
api.comment_api.set_comment_score(
context_factory(params={}, user=user),
- {'comment_id': comment.comment_id})
+ {"comment_id": comment.comment_id},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(comments.CommentNotFoundError):
api.comment_api.set_comment_score(
context_factory(
- params={'score': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'comment_id': 5})
+ params={"score": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"comment_id": 5},
+ )
def test_trying_to_rate_without_privileges(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
comment = comment_factory()
db.session.add(comment)
db.session.commit()
with pytest.raises(errors.AuthError):
api.comment_api.set_comment_score(
context_factory(
- params={'score': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'comment_id': comment.comment_id})
+ params={"score": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"comment_id": comment.comment_id},
+ )
diff --git a/server/szurubooru/tests/api/test_comment_retrieving.py b/server/szurubooru/tests/api/test_comment_retrieving.py
index 5c846bbf..404af76f 100644
--- a/server/szurubooru/tests/api/test_comment_retrieving.py
+++ b/server/szurubooru/tests/api/test_comment_retrieving.py
@@ -1,72 +1,83 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import comments
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'comments:list': model.User.RANK_REGULAR,
- 'comments:view': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "comments:list": model.User.RANK_REGULAR,
+ "comments:view": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_retrieving_multiple(user_factory, comment_factory, context_factory):
- comment1 = comment_factory(text='text 1')
- comment2 = comment_factory(text='text 2')
+ comment1 = comment_factory(text="text 1")
+ comment2 = comment_factory(text="text 2")
db.session.add_all([comment1, comment2])
db.session.flush()
- with patch('szurubooru.func.comments.serialize_comment'):
- comments.serialize_comment.return_value = 'serialized comment'
+ with patch("szurubooru.func.comments.serialize_comment"):
+ comments.serialize_comment.return_value = "serialized comment"
result = api.comment_api.get_comments(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
assert result == {
- 'query': '',
- 'offset': 0,
- 'limit': 100,
- 'total': 2,
- 'results': ['serialized comment', 'serialized comment'],
+ "query": "",
+ "offset": 0,
+ "limit": 100,
+ "total": 2,
+ "results": ["serialized comment", "serialized comment"],
}
def test_trying_to_retrieve_multiple_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.comment_api.get_comments(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_retrieving_single(user_factory, comment_factory, context_factory):
- comment = comment_factory(text='dummy text')
+ comment = comment_factory(text="dummy text")
db.session.add(comment)
db.session.flush()
- with patch('szurubooru.func.comments.serialize_comment'):
- comments.serialize_comment.return_value = 'serialized comment'
+ with patch("szurubooru.func.comments.serialize_comment"):
+ comments.serialize_comment.return_value = "serialized comment"
result = api.comment_api.get_comment(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'comment_id': comment.comment_id})
- assert result == 'serialized comment'
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
+ {"comment_id": comment.comment_id},
+ )
+ assert result == "serialized comment"
def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
with pytest.raises(comments.CommentNotFoundError):
api.comment_api.get_comment(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'comment_id': 5})
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
+ {"comment_id": 5},
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.comment_api.get_comment(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'comment_id': 5})
+ {"comment_id": 5},
+ )
diff --git a/server/szurubooru/tests/api/test_comment_updating.py b/server/szurubooru/tests/api/test_comment_updating.py
index 761b1ce0..e7c4fbe0 100644
--- a/server/szurubooru/tests/api/test_comment_updating.py
+++ b/server/szurubooru/tests/api/test_comment_updating.py
@@ -1,84 +1,97 @@
from datetime import datetime
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import comments
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'comments:edit:own': model.User.RANK_REGULAR,
- 'comments:edit:any': model.User.RANK_MODERATOR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "comments:edit:own": model.User.RANK_REGULAR,
+ "comments:edit:any": model.User.RANK_MODERATOR,
+ },
+ }
+ )
def test_simple_updating(
- user_factory, comment_factory, context_factory, fake_datetime):
+ user_factory, comment_factory, context_factory, fake_datetime
+):
user = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'), \
- fake_datetime('1997-12-01'):
- comments.serialize_comment.return_value = 'serialized comment'
+ with patch("szurubooru.func.comments.serialize_comment"), fake_datetime(
+ "1997-12-01"
+ ):
+ comments.serialize_comment.return_value = "serialized comment"
result = api.comment_api.update_comment(
context_factory(
- params={'text': 'new text', 'version': 1}, user=user),
- {'comment_id': comment.comment_id})
- assert result == 'serialized comment'
+ params={"text": "new text", "version": 1}, user=user
+ ),
+ {"comment_id": comment.comment_id},
+ )
+ assert result == "serialized comment"
assert comment.last_edit_time == datetime(1997, 12, 1)
-@pytest.mark.parametrize('params,expected_exception', [
- ({'text': None}, comments.EmptyCommentTextError),
- ({'text': ''}, comments.EmptyCommentTextError),
- ({'text': []}, comments.EmptyCommentTextError),
- ({'text': [None]}, errors.ValidationError),
- ({'text': ['']}, comments.EmptyCommentTextError),
-])
+@pytest.mark.parametrize(
+ "params,expected_exception",
+ [
+ ({"text": None}, comments.EmptyCommentTextError),
+ ({"text": ""}, comments.EmptyCommentTextError),
+ ({"text": []}, comments.EmptyCommentTextError),
+ ({"text": [None]}, errors.ValidationError),
+ ({"text": [""]}, comments.EmptyCommentTextError),
+ ],
+)
def test_trying_to_pass_invalid_params(
- user_factory,
- comment_factory,
- context_factory,
- params,
- expected_exception):
+ user_factory, comment_factory, context_factory, params, expected_exception
+):
user = user_factory()
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
with pytest.raises(expected_exception):
api.comment_api.update_comment(
- context_factory(
- params={**params, **{'version': 1}}, user=user),
- {'comment_id': comment.comment_id})
+ context_factory(params={**params, **{"version": 1}}, user=user),
+ {"comment_id": comment.comment_id},
+ )
def test_trying_to_omit_mandatory_field(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
user = user_factory()
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
with pytest.raises(errors.ValidationError):
api.comment_api.update_comment(
- context_factory(params={'version': 1}, user=user),
- {'comment_id': comment.comment_id})
+ context_factory(params={"version": 1}, user=user),
+ {"comment_id": comment.comment_id},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(comments.CommentNotFoundError):
api.comment_api.update_comment(
context_factory(
- params={'text': 'new text'},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'comment_id': 5})
+ params={"text": "new text"},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"comment_id": 5},
+ )
def test_trying_to_update_someones_comment_without_privileges(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
user = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_REGULAR)
comment = comment_factory(user=user)
@@ -87,19 +100,24 @@ def test_trying_to_update_someones_comment_without_privileges(
with pytest.raises(errors.AuthError):
api.comment_api.update_comment(
context_factory(
- params={'text': 'new text', 'version': 1}, user=user2),
- {'comment_id': comment.comment_id})
+ params={"text": "new text", "version": 1}, user=user2
+ ),
+ {"comment_id": comment.comment_id},
+ )
def test_updating_someones_comment_with_privileges(
- user_factory, comment_factory, context_factory):
+ user_factory, comment_factory, context_factory
+):
user = user_factory(rank=model.User.RANK_REGULAR)
user2 = user_factory(rank=model.User.RANK_MODERATOR)
comment = comment_factory(user=user)
db.session.add(comment)
db.session.commit()
- with patch('szurubooru.func.comments.serialize_comment'):
+ with patch("szurubooru.func.comments.serialize_comment"):
api.comment_api.update_comment(
context_factory(
- params={'text': 'new text', 'version': 1}, user=user2),
- {'comment_id': comment.comment_id})
+ params={"text": "new text", "version": 1}, user=user2
+ ),
+ {"comment_id": comment.comment_id},
+ )
diff --git a/server/szurubooru/tests/api/test_info.py b/server/szurubooru/tests/api/test_info.py
index cd157273..57bf7a30 100644
--- a/server/szurubooru/tests/api/test_info.py
+++ b/server/szurubooru/tests/api/test_info.py
@@ -1,88 +1,94 @@
from datetime import datetime
+
from szurubooru import api, db, model
def test_info_api(
- tmpdir, config_injector, context_factory, post_factory, user_factory,
- fake_datetime):
- directory = tmpdir.mkdir('data')
- directory.join('test.txt').write('abc')
+ tmpdir,
+ config_injector,
+ context_factory,
+ post_factory,
+ user_factory,
+ fake_datetime,
+):
+ directory = tmpdir.mkdir("data")
+ directory.join("test.txt").write("abc")
auth_user = user_factory(rank=model.User.RANK_REGULAR)
anon_user = user_factory(rank=model.User.RANK_ANONYMOUS)
- config_injector({
- 'name': 'test installation',
- 'contact_email': 'test@example.com',
- 'enable_safety': True,
- 'data_dir': str(directory),
- 'user_name_regex': '1',
- 'password_regex': '2',
- 'tag_name_regex': '3',
- 'tag_category_name_regex': '4',
- 'default_rank': '5',
- 'privileges': {
- 'test_key1': 'test_value1',
- 'test_key2': 'test_value2',
- 'posts:view:featured': 'regular',
- },
- 'smtp': {
- 'host': 'example.com',
+ config_injector(
+ {
+ "name": "test installation",
+ "contact_email": "test@example.com",
+ "enable_safety": True,
+ "data_dir": str(directory),
+ "user_name_regex": "1",
+ "password_regex": "2",
+ "tag_name_regex": "3",
+ "tag_category_name_regex": "4",
+ "default_rank": "5",
+ "privileges": {
+ "test_key1": "test_value1",
+ "test_key2": "test_value2",
+ "posts:view:featured": "regular",
+ },
+ "smtp": {"host": "example.com",},
}
- })
+ )
db.session.add_all([post_factory(), post_factory()])
db.session.flush()
expected_config_key = {
- 'name': 'test installation',
- 'contactEmail': 'test@example.com',
- 'enableSafety': True,
- 'userNameRegex': '1',
- 'passwordRegex': '2',
- 'tagNameRegex': '3',
- 'tagCategoryNameRegex': '4',
- 'defaultUserRank': '5',
- 'privileges': {
- 'testKey1': 'test_value1',
- 'testKey2': 'test_value2',
- 'posts:view:featured': 'regular',
+ "name": "test installation",
+ "contactEmail": "test@example.com",
+ "enableSafety": True,
+ "userNameRegex": "1",
+ "passwordRegex": "2",
+ "tagNameRegex": "3",
+ "tagCategoryNameRegex": "4",
+ "defaultUserRank": "5",
+ "privileges": {
+ "testKey1": "test_value1",
+ "testKey2": "test_value2",
+ "posts:view:featured": "regular",
},
- 'canSendMails': True
+ "canSendMails": True,
}
- with fake_datetime('2016-01-01 13:00'):
+ with fake_datetime("2016-01-01 13:00"):
assert api.info_api.get_info(context_factory(user=auth_user)) == {
- 'postCount': 2,
- 'diskUsage': 3,
- 'featuredPost': None,
- 'featuringTime': None,
- 'featuringUser': None,
- 'serverTime': datetime(2016, 1, 1, 13, 0),
- 'config': expected_config_key,
+ "postCount": 2,
+ "diskUsage": 3,
+ "featuredPost": None,
+ "featuringTime": None,
+ "featuringUser": None,
+ "serverTime": datetime(2016, 1, 1, 13, 0),
+ "config": expected_config_key,
}
- directory.join('test2.txt').write('abc')
- with fake_datetime('2016-01-03 12:59'):
+ directory.join("test2.txt").write("abc")
+ with fake_datetime("2016-01-03 12:59"):
assert api.info_api.get_info(context_factory(user=auth_user)) == {
- 'postCount': 2,
- 'diskUsage': 3, # still 3 - it's cached
- 'featuredPost': None,
- 'featuringTime': None,
- 'featuringUser': None,
- 'serverTime': datetime(2016, 1, 3, 12, 59),
- 'config': expected_config_key,
+ "postCount": 2,
+ "diskUsage": 3, # still 3 - it's cached
+ "featuredPost": None,
+ "featuringTime": None,
+ "featuringUser": None,
+ "serverTime": datetime(2016, 1, 3, 12, 59),
+ "config": expected_config_key,
}
- with fake_datetime('2016-01-03 13:01'):
+ with fake_datetime("2016-01-03 13:01"):
assert api.info_api.get_info(context_factory(user=auth_user)) == {
- 'postCount': 2,
- 'diskUsage': 6, # cache expired
- 'featuredPost': None,
- 'featuringTime': None,
- 'featuringUser': None,
- 'serverTime': datetime(2016, 1, 3, 13, 1),
- 'config': expected_config_key,
+ "postCount": 2,
+ "diskUsage": 6, # cache expired
+ "featuredPost": None,
+ "featuringTime": None,
+ "featuringUser": None,
+ "serverTime": datetime(2016, 1, 3, 13, 1),
+ "config": expected_config_key,
}
- with fake_datetime('2016-01-03 13:01'):
+ with fake_datetime("2016-01-03 13:01"):
assert api.info_api.get_info(context_factory(user=anon_user)) == {
- 'postCount': 2,
- 'diskUsage': 6, # cache expired
- 'serverTime': datetime(2016, 1, 3, 13, 1),
- 'config': expected_config_key,
+ "postCount": 2,
+ "diskUsage": 6, # cache expired
+ "serverTime": datetime(2016, 1, 3, 13, 1),
+ "config": expected_config_key,
}
diff --git a/server/szurubooru/tests/api/test_password_reset.py b/server/szurubooru/tests/api/test_password_reset.py
index 29917a07..28e4c124 100644
--- a/server/szurubooru/tests/api/test_password_reset.py
+++ b/server/szurubooru/tests/api/test_password_reset.py
@@ -1,87 +1,112 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import auth, mailer
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'secret': 'x',
- 'domain': 'http://example.com',
- 'name': 'Test instance',
- 'smtp': {
- 'from': 'noreply@example.com',
- },
- })
+ config_injector(
+ {
+ "secret": "x",
+ "domain": "http://example.com",
+ "name": "Test instance",
+ "smtp": {"from": "noreply@example.com",},
+ }
+ )
def test_reset_sending_email(context_factory, user_factory):
- db.session.add(user_factory(
- name='u1', rank=model.User.RANK_REGULAR, email='user@example.com'))
+ db.session.add(
+ user_factory(
+ name="u1", rank=model.User.RANK_REGULAR, email="user@example.com"
+ )
+ )
db.session.flush()
- for initiating_user in ['u1', 'user@example.com']:
- with patch('szurubooru.func.mailer.send_mail'):
- assert api.password_reset_api.start_password_reset(
- context_factory(), {'user_name': initiating_user}) == {}
+ for initiating_user in ["u1", "user@example.com"]:
+ with patch("szurubooru.func.mailer.send_mail"):
+ assert (
+ api.password_reset_api.start_password_reset(
+ context_factory(), {"user_name": initiating_user}
+ )
+ == {}
+ )
mailer.send_mail.assert_called_once_with(
- 'noreply@example.com',
- 'user@example.com',
- 'Password reset for Test instance',
- 'You (or someone else) requested to reset your password ' +
- 'on Test instance.\nIf you wish to proceed, click this l' +
- 'ink: http://example.com/password-reset/u1:4ac0be176fb36' +
- '4f13ee6b634c43220e2\nOtherwise, please ignore this email.')
+ "noreply@example.com",
+ "user@example.com",
+ "Password reset for Test instance",
+ "You (or someone else) requested to reset your password "
+ + "on Test instance.\nIf you wish to proceed, click this l"
+ + "ink: http://example.com/password-reset/u1:4ac0be176fb36"
+ + "4f13ee6b634c43220e2\nOtherwise, please ignore this email.",
+ )
def test_trying_to_reset_non_existing(context_factory):
with pytest.raises(errors.NotFoundError):
api.password_reset_api.start_password_reset(
- context_factory(), {'user_name': 'u1'})
+ context_factory(), {"user_name": "u1"}
+ )
def test_trying_to_reset_without_email(context_factory, user_factory):
db.session.add(
- user_factory(name='u1', rank=model.User.RANK_REGULAR, email=None))
+ user_factory(name="u1", rank=model.User.RANK_REGULAR, email=None)
+ )
db.session.flush()
with pytest.raises(errors.ValidationError):
api.password_reset_api.start_password_reset(
- context_factory(), {'user_name': 'u1'})
+ context_factory(), {"user_name": "u1"}
+ )
def test_confirming_with_good_token(context_factory, user_factory):
user = user_factory(
- name='u1', rank=model.User.RANK_REGULAR, email='user@example.com')
+ name="u1", rank=model.User.RANK_REGULAR, email="user@example.com"
+ )
old_hash = user.password_hash
db.session.add(user)
db.session.flush()
context = context_factory(
- params={'token': '4ac0be176fb364f13ee6b634c43220e2'})
+ params={"token": "4ac0be176fb364f13ee6b634c43220e2"}
+ )
result = api.password_reset_api.finish_password_reset(
- context, {'user_name': 'u1'})
+ context, {"user_name": "u1"}
+ )
assert user.password_hash != old_hash
- assert auth.is_valid_password(user, result['password']) is True
+ assert auth.is_valid_password(user, result["password"]) is True
def test_trying_to_confirm_non_existing(context_factory):
with pytest.raises(errors.NotFoundError):
api.password_reset_api.finish_password_reset(
- context_factory(), {'user_name': 'u1'})
+ context_factory(), {"user_name": "u1"}
+ )
def test_trying_to_confirm_without_token(context_factory, user_factory):
- db.session.add(user_factory(
- name='u1', rank=model.User.RANK_REGULAR, email='user@example.com'))
+ db.session.add(
+ user_factory(
+ name="u1", rank=model.User.RANK_REGULAR, email="user@example.com"
+ )
+ )
db.session.flush()
with pytest.raises(errors.ValidationError):
api.password_reset_api.finish_password_reset(
- context_factory(params={}), {'user_name': 'u1'})
+ context_factory(params={}), {"user_name": "u1"}
+ )
def test_trying_to_confirm_with_bad_token(context_factory, user_factory):
- db.session.add(user_factory(
- name='u1', rank=model.User.RANK_REGULAR, email='user@example.com'))
+ db.session.add(
+ user_factory(
+ name="u1", rank=model.User.RANK_REGULAR, email="user@example.com"
+ )
+ )
db.session.flush()
with pytest.raises(errors.ValidationError):
api.password_reset_api.finish_password_reset(
- context_factory(params={'token': 'bad'}), {'user_name': 'u1'})
+ context_factory(params={"token": "bad"}), {"user_name": "u1"}
+ )
diff --git a/server/szurubooru/tests/api/test_pool_category_creating.py b/server/szurubooru/tests/api/test_pool_category_creating.py
index 5e932a63..c13c1ec6 100644
--- a/server/szurubooru/tests/api/test_pool_category_creating.py
+++ b/server/szurubooru/tests/api/test_pool_category_creating.py
@@ -1,6 +1,8 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pool_categories, snapshots
@@ -10,51 +12,60 @@ def _update_category_name(category, name):
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {'pool_categories:create': model.User.RANK_REGULAR},
- })
+ config_injector(
+ {"privileges": {"pool_categories:create": model.User.RANK_REGULAR},}
+ )
def test_creating_category(
- pool_category_factory, user_factory, context_factory):
+ pool_category_factory, user_factory, context_factory
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- category = pool_category_factory(name='meta')
+ category = pool_category_factory(name="meta")
db.session.add(category)
- with patch('szurubooru.func.pool_categories.create_category'), \
- patch('szurubooru.func.pool_categories.serialize_category'), \
- patch('szurubooru.func.pool_categories.update_category_name'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.pool_categories.create_category"), patch(
+ "szurubooru.func.pool_categories.serialize_category"
+ ), patch("szurubooru.func.pool_categories.update_category_name"), patch(
+ "szurubooru.func.snapshots.create"
+ ):
pool_categories.create_category.return_value = category
- pool_categories.update_category_name.side_effect = \
+ pool_categories.update_category_name.side_effect = (
_update_category_name
- pool_categories.serialize_category.return_value = 'serialized category'
+ )
+ pool_categories.serialize_category.return_value = "serialized category"
result = api.pool_category_api.create_pool_category(
context_factory(
- params={'name': 'meta', 'color': 'black'}, user=auth_user))
- assert result == 'serialized category'
+ params={"name": "meta", "color": "black"}, user=auth_user
+ )
+ )
+ assert result == "serialized category"
pool_categories.create_category.assert_called_once_with(
- 'meta', 'black')
+ "meta", "black"
+ )
snapshots.create.assert_called_once_with(category, auth_user)
-@pytest.mark.parametrize('field', ['name', 'color'])
+@pytest.mark.parametrize("field", ["name", "color"])
def test_trying_to_omit_mandatory_field(user_factory, context_factory, field):
params = {
- 'name': 'meta',
- 'color': 'black',
+ "name": "meta",
+ "color": "black",
}
del params[field]
with pytest.raises(errors.ValidationError):
api.pool_category_api.create_pool_category(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_create_without_privileges(user_factory, context_factory):
with pytest.raises(errors.AuthError):
api.pool_category_api.create_pool_category(
context_factory(
- params={'name': 'meta', 'color': 'black'},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"name": "meta", "color": "black"},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_pool_category_deleting.py b/server/szurubooru/tests/api/test_pool_category_deleting.py
index 72853ac8..cdbdcfeb 100644
--- a/server/szurubooru/tests/api/test_pool_category_deleting.py
+++ b/server/szurubooru/tests/api/test_pool_category_deleting.py
@@ -1,76 +1,89 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pool_categories, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {'pool_categories:delete': model.User.RANK_REGULAR},
- })
+ config_injector(
+ {"privileges": {"pool_categories:delete": model.User.RANK_REGULAR},}
+ )
def test_deleting(user_factory, pool_category_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- category = pool_category_factory(name='category')
- db.session.add(pool_category_factory(name='root'))
+ category = pool_category_factory(name="category")
+ db.session.add(pool_category_factory(name="root"))
db.session.add(category)
db.session.flush()
- with patch('szurubooru.func.snapshots.delete'):
+ with patch("szurubooru.func.snapshots.delete"):
result = api.pool_category_api.delete_pool_category(
- context_factory(params={'version': 1}, user=auth_user),
- {'category_name': 'category'})
+ context_factory(params={"version": 1}, user=auth_user),
+ {"category_name": "category"},
+ )
assert result == {}
assert db.session.query(model.PoolCategory).count() == 1
- assert db.session.query(model.PoolCategory).one().name == 'root'
+ assert db.session.query(model.PoolCategory).one().name == "root"
snapshots.delete.assert_called_once_with(category, auth_user)
def test_trying_to_delete_used(
- user_factory, pool_category_factory, pool_factory, context_factory):
- category = pool_category_factory(name='category')
+ user_factory, pool_category_factory, pool_factory, context_factory
+):
+ category = pool_category_factory(name="category")
db.session.add(category)
db.session.flush()
- pool = pool_factory(names=['pool'], category=category)
+ pool = pool_factory(names=["pool"], category=category)
db.session.add(pool)
db.session.commit()
with pytest.raises(pool_categories.PoolCategoryIsInUseError):
api.pool_category_api.delete_pool_category(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'category'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "category"},
+ )
assert db.session.query(model.PoolCategory).count() == 1
def test_trying_to_delete_last(
- user_factory, pool_category_factory, context_factory):
- db.session.add(pool_category_factory(name='root'))
+ user_factory, pool_category_factory, context_factory
+):
+ db.session.add(pool_category_factory(name="root"))
db.session.commit()
with pytest.raises(pool_categories.PoolCategoryIsInUseError):
api.pool_category_api.delete_pool_category(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'root'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "root"},
+ )
def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(pool_categories.PoolCategoryNotFoundError):
api.pool_category_api.delete_pool_category(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'bad'})
+ {"category_name": "bad"},
+ )
def test_trying_to_delete_without_privileges(
- user_factory, pool_category_factory, context_factory):
- db.session.add(pool_category_factory(name='category'))
+ user_factory, pool_category_factory, context_factory
+):
+ db.session.add(pool_category_factory(name="category"))
db.session.commit()
with pytest.raises(errors.AuthError):
api.pool_category_api.delete_pool_category(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'category_name': 'category'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"category_name": "category"},
+ )
assert db.session.query(model.PoolCategory).count() == 1
diff --git a/server/szurubooru/tests/api/test_pool_category_retrieving.py b/server/szurubooru/tests/api/test_pool_category_retrieving.py
index 4a467c0f..ea837c4d 100644
--- a/server/szurubooru/tests/api/test_pool_category_retrieving.py
+++ b/server/szurubooru/tests/api/test_pool_category_retrieving.py
@@ -1,43 +1,49 @@
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pool_categories
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'pool_categories:list': model.User.RANK_REGULAR,
- 'pool_categories:view': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "pool_categories:list": model.User.RANK_REGULAR,
+ "pool_categories:view": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_retrieving_multiple(
- user_factory, pool_category_factory, context_factory):
- db.session.add_all([
- pool_category_factory(name='c1'),
- pool_category_factory(name='c2'),
- ])
+ user_factory, pool_category_factory, context_factory
+):
+ db.session.add_all(
+ [pool_category_factory(name="c1"), pool_category_factory(name="c2"),]
+ )
db.session.flush()
result = api.pool_category_api.get_pool_categories(
- context_factory(user=user_factory(rank=model.User.RANK_REGULAR)))
- assert [cat['name'] for cat in result['results']] == ['c1', 'c2']
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR))
+ )
+ assert [cat["name"] for cat in result["results"]] == ["c1", "c2"]
def test_retrieving_single(
- user_factory, pool_category_factory, context_factory):
- db.session.add(pool_category_factory(name='cat'))
+ user_factory, pool_category_factory, context_factory
+):
+ db.session.add(pool_category_factory(name="cat"))
db.session.flush()
result = api.pool_category_api.get_pool_category(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'cat'})
+ {"category_name": "cat"},
+ )
assert result == {
- 'name': 'cat',
- 'color': 'dummy',
- 'usages': 0,
- 'default': False,
- 'version': 1,
+ "name": "cat",
+ "color": "dummy",
+ "usages": 0,
+ "default": False,
+ "version": 1,
}
@@ -45,12 +51,15 @@ def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
with pytest.raises(pool_categories.PoolCategoryNotFoundError):
api.pool_category_api.get_pool_category(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': '-'})
+ {"category_name": "-"},
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.pool_category_api.get_pool_category(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'category_name': '-'})
+ {"category_name": "-"},
+ )
diff --git a/server/szurubooru/tests/api/test_pool_category_updating.py b/server/szurubooru/tests/api/test_pool_category_updating.py
index 028c5209..d934e603 100644
--- a/server/szurubooru/tests/api/test_pool_category_updating.py
+++ b/server/szurubooru/tests/api/test_pool_category_updating.py
@@ -1,6 +1,8 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pool_categories, snapshots
@@ -10,101 +12,117 @@ def _update_category_name(category, name):
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'pool_categories:edit:name': model.User.RANK_REGULAR,
- 'pool_categories:edit:color': model.User.RANK_REGULAR,
- 'pool_categories:set_default': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "pool_categories:edit:name": model.User.RANK_REGULAR,
+ "pool_categories:edit:color": model.User.RANK_REGULAR,
+ "pool_categories:set_default": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_simple_updating(user_factory, pool_category_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- category = pool_category_factory(name='name', color='black')
+ category = pool_category_factory(name="name", color="black")
db.session.add(category)
db.session.flush()
- with patch('szurubooru.func.pool_categories.serialize_category'), \
- patch('szurubooru.func.pool_categories.update_category_name'), \
- patch('szurubooru.func.pool_categories.update_category_color'), \
- patch('szurubooru.func.snapshots.modify'):
- pool_categories.update_category_name.side_effect = \
+ with patch("szurubooru.func.pool_categories.serialize_category"), patch(
+ "szurubooru.func.pool_categories.update_category_name"
+ ), patch("szurubooru.func.pool_categories.update_category_color"), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
+ pool_categories.update_category_name.side_effect = (
_update_category_name
- pool_categories.serialize_category.return_value = 'serialized category'
+ )
+ pool_categories.serialize_category.return_value = "serialized category"
result = api.pool_category_api.update_pool_category(
context_factory(
- params={'name': 'changed', 'color': 'white', 'version': 1},
- user=auth_user),
- {'category_name': 'name'})
- assert result == 'serialized category'
+ params={"name": "changed", "color": "white", "version": 1},
+ user=auth_user,
+ ),
+ {"category_name": "name"},
+ )
+ assert result == "serialized category"
pool_categories.update_category_name.assert_called_once_with(
- category, 'changed')
+ category, "changed"
+ )
pool_categories.update_category_color.assert_called_once_with(
- category, 'white')
+ category, "white"
+ )
snapshots.modify.assert_called_once_with(category, auth_user)
-@pytest.mark.parametrize('field', ['name', 'color'])
+@pytest.mark.parametrize("field", ["name", "color"])
def test_omitting_optional_field(
- user_factory, pool_category_factory, context_factory, field):
- db.session.add(pool_category_factory(name='name', color='black'))
+ user_factory, pool_category_factory, context_factory, field
+):
+ db.session.add(pool_category_factory(name="name", color="black"))
db.session.commit()
params = {
- 'name': 'changed',
- 'color': 'white',
+ "name": "changed",
+ "color": "white",
}
del params[field]
- with patch('szurubooru.func.pool_categories.serialize_category'), \
- patch('szurubooru.func.pool_categories.update_category_name'):
+ with patch("szurubooru.func.pool_categories.serialize_category"), patch(
+ "szurubooru.func.pool_categories.update_category_name"
+ ):
api.pool_category_api.update_pool_category(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'name'})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "name"},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(pool_categories.PoolCategoryNotFoundError):
api.pool_category_api.update_pool_category(
context_factory(
- params={'name': ['dummy']},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'bad'})
+ params={"name": ["dummy"]},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "bad"},
+ )
-@pytest.mark.parametrize('params', [
- {'name': 'whatever'},
- {'color': 'whatever'},
-])
+@pytest.mark.parametrize(
+ "params", [{"name": "whatever"}, {"color": "whatever"},]
+)
def test_trying_to_update_without_privileges(
- user_factory, pool_category_factory, context_factory, params):
- db.session.add(pool_category_factory(name='dummy'))
+ user_factory, pool_category_factory, context_factory, params
+):
+ db.session.add(pool_category_factory(name="dummy"))
db.session.commit()
with pytest.raises(errors.AuthError):
api.pool_category_api.update_pool_category(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'category_name': 'dummy'})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"category_name": "dummy"},
+ )
def test_set_as_default(user_factory, pool_category_factory, context_factory):
- category = pool_category_factory(name='name', color='black')
+ category = pool_category_factory(name="name", color="black")
db.session.add(category)
db.session.commit()
- with patch('szurubooru.func.pool_categories.serialize_category'), \
- patch('szurubooru.func.pool_categories.set_default_category'):
- pool_categories.update_category_name.side_effect = \
+ with patch("szurubooru.func.pool_categories.serialize_category"), patch(
+ "szurubooru.func.pool_categories.set_default_category"
+ ):
+ pool_categories.update_category_name.side_effect = (
_update_category_name
- pool_categories.serialize_category.return_value = 'serialized category'
+ )
+ pool_categories.serialize_category.return_value = "serialized category"
result = api.pool_category_api.set_pool_category_as_default(
context_factory(
- params={
- 'name': 'changed',
- 'color': 'white',
- 'version': 1,
- },
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'name'})
- assert result == 'serialized category'
+ params={"name": "changed", "color": "white", "version": 1,},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "name"},
+ )
+ assert result == "serialized category"
pool_categories.set_default_category.assert_called_once_with(category)
diff --git a/server/szurubooru/tests/api/test_pool_creating.py b/server/szurubooru/tests/api/test_pool_creating.py
index 5eeb2e29..a9f1473f 100644
--- a/server/szurubooru/tests/api/test_pool_creating.py
+++ b/server/szurubooru/tests/api/test_pool_creating.py
@@ -1,82 +1,95 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, model, errors
+
+from szurubooru import api, errors, model
from szurubooru.func import pools, posts, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'pools:create': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"pools:create": model.User.RANK_REGULAR}})
def test_creating_simple_pools(pool_factory, user_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
pool = pool_factory()
- with patch('szurubooru.func.pools.create_pool'), \
- patch('szurubooru.func.pools.get_or_create_pools_by_names'), \
- patch('szurubooru.func.pools.serialize_pool'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.pools.create_pool"), patch(
+ "szurubooru.func.pools.get_or_create_pools_by_names"
+ ), patch("szurubooru.func.pools.serialize_pool"), patch(
+ "szurubooru.func.snapshots.create"
+ ):
posts.get_posts_by_ids.return_value = ([], [])
pools.create_pool.return_value = pool
- pools.serialize_pool.return_value = 'serialized pool'
+ pools.serialize_pool.return_value = "serialized pool"
result = api.pool_api.create_pool(
context_factory(
params={
- 'names': ['pool1', 'pool2'],
- 'category': 'default',
- 'description': 'desc',
- 'posts': [1, 2],
+ "names": ["pool1", "pool2"],
+ "category": "default",
+ "description": "desc",
+ "posts": [1, 2],
},
- user=auth_user))
- assert result == 'serialized pool'
+ user=auth_user,
+ )
+ )
+ assert result == "serialized pool"
pools.create_pool.assert_called_once_with(
- ['pool1', 'pool2'], 'default', [1, 2])
+ ["pool1", "pool2"], "default", [1, 2]
+ )
snapshots.create.assert_called_once_with(pool, auth_user)
-@pytest.mark.parametrize('field', ['names', 'category'])
+@pytest.mark.parametrize("field", ["names", "category"])
def test_trying_to_omit_mandatory_field(user_factory, context_factory, field):
params = {
- 'names': ['pool1', 'pool2'],
- 'category': 'default',
- 'description': 'desc',
- 'posts': [],
+ "names": ["pool1", "pool2"],
+ "category": "default",
+ "description": "desc",
+ "posts": [],
}
del params[field]
with pytest.raises(errors.ValidationError):
api.pool_api.create_pool(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
-@pytest.mark.parametrize('field', ['description', 'posts'])
+@pytest.mark.parametrize("field", ["description", "posts"])
def test_omitting_optional_field(
- pool_factory, user_factory, context_factory, field):
+ pool_factory, user_factory, context_factory, field
+):
params = {
- 'names': ['pool1', 'pool2'],
- 'category': 'default',
- 'description': 'desc',
- 'posts': [],
+ "names": ["pool1", "pool2"],
+ "category": "default",
+ "description": "desc",
+ "posts": [],
}
del params[field]
- with patch('szurubooru.func.pools.create_pool'), \
- patch('szurubooru.func.pools.serialize_pool'):
+ with patch("szurubooru.func.pools.create_pool"), patch(
+ "szurubooru.func.pools.serialize_pool"
+ ):
pools.create_pool.return_value = pool_factory()
api.pool_api.create_pool(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_create_pool_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.pool_api.create_pool(
context_factory(
params={
- 'names': ['pool'],
- 'category': 'default',
- 'posts': [],
+ "names": ["pool"],
+ "category": "default",
+ "posts": [],
},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_pool_deleting.py b/server/szurubooru/tests/api/test_pool_deleting.py
index e29656d3..387aca10 100644
--- a/server/szurubooru/tests/api/test_pool_deleting.py
+++ b/server/szurubooru/tests/api/test_pool_deleting.py
@@ -1,12 +1,14 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pools, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'pools:delete': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"pools:delete": model.User.RANK_REGULAR}})
def test_deleting(user_factory, pool_factory, context_factory):
@@ -14,17 +16,19 @@ def test_deleting(user_factory, pool_factory, context_factory):
pool = pool_factory(id=1)
db.session.add(pool)
db.session.commit()
- with patch('szurubooru.func.snapshots.delete'):
+ with patch("szurubooru.func.snapshots.delete"):
result = api.pool_api.delete_pool(
- context_factory(params={'version': 1}, user=auth_user),
- {'pool_id': 1})
+ context_factory(params={"version": 1}, user=auth_user),
+ {"pool_id": 1},
+ )
assert result == {}
assert db.session.query(model.Pool).count() == 0
snapshots.delete.assert_called_once_with(pool, auth_user)
def test_deleting_used(
- user_factory, pool_factory, context_factory, post_factory):
+ user_factory, pool_factory, context_factory, post_factory
+):
pool = pool_factory(id=1)
post = post_factory(id=1)
pool.posts.append(post)
@@ -32,9 +36,11 @@ def test_deleting_used(
db.session.commit()
api.pool_api.delete_pool(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 1})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"pool_id": 1},
+ )
db.session.refresh(post)
assert db.session.query(model.Pool).count() == 0
assert db.session.query(model.PoolPost).count() == 0
@@ -45,17 +51,21 @@ def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(pools.PoolNotFoundError):
api.pool_api.delete_pool(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 9999})
+ {"pool_id": 9999},
+ )
def test_trying_to_delete_without_privileges(
- user_factory, pool_factory, context_factory):
+ user_factory, pool_factory, context_factory
+):
db.session.add(pool_factory(id=1))
db.session.commit()
with pytest.raises(errors.AuthError):
api.pool_api.delete_pool(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'pool_id': 1})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"pool_id": 1},
+ )
assert db.session.query(model.Pool).count() == 1
diff --git a/server/szurubooru/tests/api/test_pool_merging.py b/server/szurubooru/tests/api/test_pool_merging.py
index dc462d2d..d0e55325 100644
--- a/server/szurubooru/tests/api/test_pool_merging.py
+++ b/server/szurubooru/tests/api/test_pool_merging.py
@@ -1,12 +1,14 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pools, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'pools:merge': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"pools:merge": model.User.RANK_REGULAR}})
def test_merging(user_factory, pool_factory, context_factory, post_factory):
@@ -23,76 +25,88 @@ def test_merging(user_factory, pool_factory, context_factory, post_factory):
db.session.commit()
assert source_pool.post_count == 1
assert target_pool.post_count == 0
- with patch('szurubooru.func.pools.serialize_pool'), \
- patch('szurubooru.func.pools.merge_pools'), \
- patch('szurubooru.func.snapshots.merge'):
+ with patch("szurubooru.func.pools.serialize_pool"), patch(
+ "szurubooru.func.pools.merge_pools"
+ ), patch("szurubooru.func.snapshots.merge"):
api.pool_api.merge_pools(
context_factory(
params={
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': 1,
- 'mergeTo': 2,
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": 1,
+ "mergeTo": 2,
},
- user=auth_user))
+ user=auth_user,
+ )
+ )
pools.merge_pools.called_once_with(source_pool, target_pool)
snapshots.merge.assert_called_once_with(
- source_pool, target_pool, auth_user)
+ source_pool, target_pool, auth_user
+ )
@pytest.mark.parametrize(
- 'field', ['remove', 'mergeTo', 'removeVersion', 'mergeToVersion'])
+ "field", ["remove", "mergeTo", "removeVersion", "mergeToVersion"]
+)
def test_trying_to_omit_mandatory_field(
- user_factory, pool_factory, context_factory, field):
- db.session.add_all([
- pool_factory(id=1),
- pool_factory(id=2),
- ])
+ user_factory, pool_factory, context_factory, field
+):
+ db.session.add_all(
+ [pool_factory(id=1), pool_factory(id=2),]
+ )
db.session.commit()
params = {
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': 1,
- 'mergeTo': 2,
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": 1,
+ "mergeTo": 2,
}
del params[field]
with pytest.raises(errors.ValidationError):
api.pool_api.merge_pools(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_merge_non_existing(
- user_factory, pool_factory, context_factory):
+ user_factory, pool_factory, context_factory
+):
db.session.add(pool_factory(id=1))
db.session.commit()
with pytest.raises(pools.PoolNotFoundError):
api.pool_api.merge_pools(
context_factory(
- params={'remove': 1, 'mergeTo': 9999},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"remove": 1, "mergeTo": 9999},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
with pytest.raises(pools.PoolNotFoundError):
api.pool_api.merge_pools(
context_factory(
- params={'remove': 9999, 'mergeTo': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"remove": 9999, "mergeTo": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
def test_trying_to_merge_without_privileges(
- user_factory, pool_factory, context_factory):
- db.session.add_all([
- pool_factory(id=1),
- pool_factory(id=2),
- ])
+ user_factory, pool_factory, context_factory
+):
+ db.session.add_all(
+ [pool_factory(id=1), pool_factory(id=2),]
+ )
db.session.commit()
with pytest.raises(errors.AuthError):
api.pool_api.merge_pools(
context_factory(
params={
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': 1,
- 'mergeTo': 2,
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": 1,
+ "mergeTo": 2,
},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_pool_retrieving.py b/server/szurubooru/tests/api/test_pool_retrieving.py
index e48565a0..688dfa7d 100644
--- a/server/szurubooru/tests/api/test_pool_retrieving.py
+++ b/server/szurubooru/tests/api/test_pool_retrieving.py
@@ -1,17 +1,21 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pools
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'pools:list': model.User.RANK_REGULAR,
- 'pools:view': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "pools:list": model.User.RANK_REGULAR,
+ "pools:view": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_retrieving_multiple(user_factory, pool_factory, context_factory):
@@ -19,54 +23,60 @@ def test_retrieving_multiple(user_factory, pool_factory, context_factory):
pool2 = pool_factory(id=2)
db.session.add_all([pool2, pool1])
db.session.flush()
- with patch('szurubooru.func.pools.serialize_pool'):
- pools.serialize_pool.return_value = 'serialized pool'
+ with patch("szurubooru.func.pools.serialize_pool"):
+ pools.serialize_pool.return_value = "serialized pool"
result = api.pool_api.get_pools(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
assert result == {
- 'query': '',
- 'offset': 0,
- 'limit': 100,
- 'total': 2,
- 'results': ['serialized pool', 'serialized pool'],
+ "query": "",
+ "offset": 0,
+ "limit": 100,
+ "total": 2,
+ "results": ["serialized pool", "serialized pool"],
}
def test_trying_to_retrieve_multiple_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.pool_api.get_pools(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_retrieving_single(user_factory, pool_factory, context_factory):
db.session.add(pool_factory(id=1))
db.session.flush()
- with patch('szurubooru.func.pools.serialize_pool'):
- pools.serialize_pool.return_value = 'serialized pool'
+ with patch("szurubooru.func.pools.serialize_pool"):
+ pools.serialize_pool.return_value = "serialized pool"
result = api.pool_api.get_pool(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 1})
- assert result == 'serialized pool'
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
+ {"pool_id": 1},
+ )
+ assert result == "serialized pool"
def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
with pytest.raises(pools.PoolNotFoundError):
api.pool_api.get_pool(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 1})
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
+ {"pool_id": 1},
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.pool_api.get_pool(
- context_factory(
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'pool_id': 1})
+ context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
+ {"pool_id": 1},
+ )
diff --git a/server/szurubooru/tests/api/test_pool_updating.py b/server/szurubooru/tests/api/test_pool_updating.py
index bd6b71c1..aa2c09a0 100644
--- a/server/szurubooru/tests/api/test_pool_updating.py
+++ b/server/szurubooru/tests/api/test_pool_updating.py
@@ -1,131 +1,150 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import pools, posts, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'pools:create': model.User.RANK_REGULAR,
- 'pools:edit:names': model.User.RANK_REGULAR,
- 'pools:edit:category': model.User.RANK_REGULAR,
- 'pools:edit:description': model.User.RANK_REGULAR,
- 'pools:edit:posts': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "pools:create": model.User.RANK_REGULAR,
+ "pools:edit:names": model.User.RANK_REGULAR,
+ "pools:edit:category": model.User.RANK_REGULAR,
+ "pools:edit:description": model.User.RANK_REGULAR,
+ "pools:edit:posts": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_simple_updating(user_factory, pool_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- pool = pool_factory(id=1, names=['pool1', 'pool2'])
+ pool = pool_factory(id=1, names=["pool1", "pool2"])
db.session.add(pool)
db.session.commit()
- with patch('szurubooru.func.pools.create_pool'), \
- patch('szurubooru.func.posts.get_posts_by_ids'), \
- patch('szurubooru.func.pools.update_pool_names'), \
- patch('szurubooru.func.pools.update_pool_category_name'), \
- patch('szurubooru.func.pools.update_pool_description'), \
- patch('szurubooru.func.pools.update_pool_posts'), \
- patch('szurubooru.func.pools.serialize_pool'), \
- patch('szurubooru.func.snapshots.modify'):
+ with patch("szurubooru.func.pools.create_pool"), patch(
+ "szurubooru.func.posts.get_posts_by_ids"
+ ), patch("szurubooru.func.pools.update_pool_names"), patch(
+ "szurubooru.func.pools.update_pool_category_name"
+ ), patch(
+ "szurubooru.func.pools.update_pool_description"
+ ), patch(
+ "szurubooru.func.pools.update_pool_posts"
+ ), patch(
+ "szurubooru.func.pools.serialize_pool"
+ ), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
posts.get_posts_by_ids.return_value = ([], [])
- pools.serialize_pool.return_value = 'serialized pool'
+ pools.serialize_pool.return_value = "serialized pool"
result = api.pool_api.update_pool(
context_factory(
params={
- 'version': 1,
- 'names': ['pool3'],
- 'category': 'series',
- 'description': 'desc',
- 'posts': [1, 2]
+ "version": 1,
+ "names": ["pool3"],
+ "category": "series",
+ "description": "desc",
+ "posts": [1, 2],
},
- user=auth_user),
- {'pool_id': 1})
- assert result == 'serialized pool'
+ user=auth_user,
+ ),
+ {"pool_id": 1},
+ )
+ assert result == "serialized pool"
pools.create_pool.assert_not_called()
- pools.update_pool_names.assert_called_once_with(pool, ['pool3'])
- pools.update_pool_category_name.assert_called_once_with(pool, 'series')
- pools.update_pool_description.assert_called_once_with(pool, 'desc')
+ pools.update_pool_names.assert_called_once_with(pool, ["pool3"])
+ pools.update_pool_category_name.assert_called_once_with(pool, "series")
+ pools.update_pool_description.assert_called_once_with(pool, "desc")
pools.update_pool_posts.assert_called_once_with(pool, [1, 2])
pools.serialize_pool.assert_called_once_with(pool, options=[])
snapshots.modify.assert_called_once_with(pool, auth_user)
@pytest.mark.parametrize(
- 'field', [
- 'names',
- 'category',
- 'description',
- 'posts',
- ])
+ "field", ["names", "category", "description", "posts",]
+)
def test_omitting_optional_field(
- user_factory, pool_factory, context_factory, field):
+ user_factory, pool_factory, context_factory, field
+):
db.session.add(pool_factory(id=1))
db.session.commit()
params = {
- 'names': ['pool1', 'pool2'],
- 'category': 'default',
- 'description': 'desc',
- 'posts': [],
+ "names": ["pool1", "pool2"],
+ "category": "default",
+ "description": "desc",
+ "posts": [],
}
del params[field]
- with patch('szurubooru.func.pools.create_pool'), \
- patch('szurubooru.func.pools.update_pool_names'), \
- patch('szurubooru.func.pools.update_pool_category_name'), \
- patch('szurubooru.func.pools.serialize_pool'):
+ with patch("szurubooru.func.pools.create_pool"), patch(
+ "szurubooru.func.pools.update_pool_names"
+ ), patch("szurubooru.func.pools.update_pool_category_name"), patch(
+ "szurubooru.func.pools.serialize_pool"
+ ):
api.pool_api.update_pool(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 1})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"pool_id": 1},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(pools.PoolNotFoundError):
api.pool_api.update_pool(
context_factory(
- params={'names': ['dummy']},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 9999})
+ params={"names": ["dummy"]},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"pool_id": 9999},
+ )
-@pytest.mark.parametrize('params', [
- {'names': ['whatever']},
- {'category': 'whatever'},
- {'posts': [1]},
-])
+@pytest.mark.parametrize(
+ "params",
+ [{"names": ["whatever"]}, {"category": "whatever"}, {"posts": [1]},],
+)
def test_trying_to_update_without_privileges(
- user_factory, pool_factory, context_factory, params):
+ user_factory, pool_factory, context_factory, params
+):
db.session.add(pool_factory(id=1))
db.session.commit()
with pytest.raises(errors.AuthError):
api.pool_api.update_pool(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'pool_id': 1})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"pool_id": 1},
+ )
def test_trying_to_create_pools_without_privileges(
- config_injector, context_factory, pool_factory, user_factory):
+ config_injector, context_factory, pool_factory, user_factory
+):
pool = pool_factory(id=1)
db.session.add(pool)
db.session.commit()
config_injector(
{
- 'privileges': {
- 'pools:create': model.User.RANK_ADMINISTRATOR,
- 'pools:edit:posts': model.User.RANK_REGULAR,
+ "privileges": {
+ "pools:create": model.User.RANK_ADMINISTRATOR,
+ "pools:edit:posts": model.User.RANK_REGULAR,
},
- 'delete_source_files': False,
- })
- with patch('szurubooru.func.posts.get_posts_by_ids'):
- posts.get_posts_by_ids.return_value = ([], ['new-post'])
+ "delete_source_files": False,
+ }
+ )
+ with patch("szurubooru.func.posts.get_posts_by_ids"):
+ posts.get_posts_by_ids.return_value = ([], ["new-post"])
with pytest.raises(errors.AuthError):
api.pool_api.create_pool(
context_factory(
- params={'posts': [1, 2], 'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'pool_id': 1})
+ params={"posts": [1, 2], "version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"pool_id": 1},
+ )
diff --git a/server/szurubooru/tests/api/test_post_creating.py b/server/szurubooru/tests/api/test_post_creating.py
index dc4d51c7..4e05cf11 100644
--- a/server/szurubooru/tests/api/test_post_creating.py
+++ b/server/szurubooru/tests/api/test_post_creating.py
@@ -1,67 +1,79 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import posts, tags, snapshots, net
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import net, posts, snapshots, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'posts:create:anonymous': model.User.RANK_REGULAR,
- 'posts:create:identified': model.User.RANK_REGULAR,
- 'tags:create': model.User.RANK_REGULAR,
- 'uploads:use_downloader': model.User.RANK_REGULAR,
- },
- 'allow_broken_uploads': False,
- })
+ config_injector(
+ {
+ "privileges": {
+ "posts:create:anonymous": model.User.RANK_REGULAR,
+ "posts:create:identified": model.User.RANK_REGULAR,
+ "tags:create": model.User.RANK_REGULAR,
+ "uploads:use_downloader": model.User.RANK_REGULAR,
+ },
+ "allow_broken_uploads": False,
+ }
+ )
-def test_creating_minimal_posts(
- context_factory, post_factory, user_factory):
+def test_creating_minimal_posts(context_factory, post_factory, user_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_safety'), \
- patch('szurubooru.func.posts.update_post_source'), \
- patch('szurubooru.func.posts.update_post_relations'), \
- patch('szurubooru.func.posts.update_post_notes'), \
- patch('szurubooru.func.posts.update_post_flags'), \
- patch('szurubooru.func.posts.update_post_thumbnail'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.posts.create_post"), patch(
+ "szurubooru.func.posts.update_post_safety"
+ ), patch("szurubooru.func.posts.update_post_source"), patch(
+ "szurubooru.func.posts.update_post_relations"
+ ), patch(
+ "szurubooru.func.posts.update_post_notes"
+ ), patch(
+ "szurubooru.func.posts.update_post_flags"
+ ), patch(
+ "szurubooru.func.posts.update_post_thumbnail"
+ ), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch(
+ "szurubooru.func.snapshots.create"
+ ):
posts.create_post.return_value = (post, [])
- posts.serialize_post.return_value = 'serialized post'
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.create_post(
context_factory(
- params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- },
+ params={"safety": "safe", "tags": ["tag1", "tag2"],},
files={
- 'content': 'post-content',
- 'thumbnail': 'post-thumbnail',
+ "content": "post-content",
+ "thumbnail": "post-thumbnail",
},
- user=auth_user))
+ user=auth_user,
+ )
+ )
- assert result == 'serialized post'
+ assert result == "serialized post"
posts.create_post.assert_called_once_with(
- 'post-content', ['tag1', 'tag2'], auth_user)
+ "post-content", ["tag1", "tag2"], auth_user
+ )
posts.update_post_thumbnail.assert_called_once_with(
- post, 'post-thumbnail')
- posts.update_post_safety.assert_called_once_with(post, 'safe')
- posts.update_post_source.assert_called_once_with(post, '')
+ post, "post-thumbnail"
+ )
+ posts.update_post_safety.assert_called_once_with(post, "safe")
+ posts.update_post_source.assert_called_once_with(post, "")
posts.update_post_relations.assert_called_once_with(post, [])
posts.update_post_notes.assert_called_once_with(post, [])
posts.update_post_flags.assert_called_once_with(post, [])
posts.update_post_thumbnail.assert_called_once_with(
- post, 'post-thumbnail')
+ post, "post-thumbnail"
+ )
posts.serialize_post.assert_called_once_with(
- post, auth_user, options=[])
+ post, auth_user, options=[]
+ )
snapshots.create.assert_called_once_with(post, auth_user)
@@ -71,241 +83,289 @@ def test_creating_full_posts(context_factory, post_factory, user_factory):
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_safety'), \
- patch('szurubooru.func.posts.update_post_source'), \
- patch('szurubooru.func.posts.update_post_relations'), \
- patch('szurubooru.func.posts.update_post_notes'), \
- patch('szurubooru.func.posts.update_post_flags'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.posts.create_post"), patch(
+ "szurubooru.func.posts.update_post_safety"
+ ), patch("szurubooru.func.posts.update_post_source"), patch(
+ "szurubooru.func.posts.update_post_relations"
+ ), patch(
+ "szurubooru.func.posts.update_post_notes"
+ ), patch(
+ "szurubooru.func.posts.update_post_flags"
+ ), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch(
+ "szurubooru.func.snapshots.create"
+ ):
posts.create_post.return_value = (post, [])
- posts.serialize_post.return_value = 'serialized post'
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.create_post(
context_factory(
params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- 'relations': [1, 2],
- 'source': 'source',
- 'notes': ['note1', 'note2'],
- 'flags': ['flag1', 'flag2'],
+ "safety": "safe",
+ "tags": ["tag1", "tag2"],
+ "relations": [1, 2],
+ "source": "source",
+ "notes": ["note1", "note2"],
+ "flags": ["flag1", "flag2"],
},
- files={
- 'content': 'post-content',
- },
- user=auth_user))
+ files={"content": "post-content",},
+ user=auth_user,
+ )
+ )
- assert result == 'serialized post'
+ assert result == "serialized post"
posts.create_post.assert_called_once_with(
- 'post-content', ['tag1', 'tag2'], auth_user)
- posts.update_post_safety.assert_called_once_with(post, 'safe')
- posts.update_post_source.assert_called_once_with(post, 'source')
+ "post-content", ["tag1", "tag2"], auth_user
+ )
+ posts.update_post_safety.assert_called_once_with(post, "safe")
+ posts.update_post_source.assert_called_once_with(post, "source")
posts.update_post_relations.assert_called_once_with(post, [1, 2])
posts.update_post_notes.assert_called_once_with(
- post, ['note1', 'note2'])
+ post, ["note1", "note2"]
+ )
posts.update_post_flags.assert_called_once_with(
- post, ['flag1', 'flag2'])
+ post, ["flag1", "flag2"]
+ )
posts.serialize_post.assert_called_once_with(
- post, auth_user, options=[])
+ post, auth_user, options=[]
+ )
snapshots.create.assert_called_once_with(post, auth_user)
def test_anonymous_uploads(
- config_injector, context_factory, post_factory, user_factory):
+ config_injector, context_factory, post_factory, user_factory
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_source'):
- config_injector({
- 'privileges': {
- 'posts:create:anonymous': model.User.RANK_REGULAR,
- 'uploads:use_downloader': model.User.RANK_POWER,
- },
- })
+ with patch("szurubooru.func.posts.serialize_post"), patch(
+ "szurubooru.func.posts.create_post"
+ ), patch("szurubooru.func.posts.update_post_source"):
+ config_injector(
+ {
+ "privileges": {
+ "posts:create:anonymous": model.User.RANK_REGULAR,
+ "uploads:use_downloader": model.User.RANK_POWER,
+ },
+ }
+ )
posts.create_post.return_value = [post, []]
api.post_api.create_post(
context_factory(
params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- 'anonymous': 'True',
+ "safety": "safe",
+ "tags": ["tag1", "tag2"],
+ "anonymous": "True",
},
- files={
- 'content': 'post-content',
- },
- user=auth_user))
+ files={"content": "post-content",},
+ user=auth_user,
+ )
+ )
posts.create_post.assert_called_once_with(
- 'post-content', ['tag1', 'tag2'], None)
+ "post-content", ["tag1", "tag2"], None
+ )
def test_creating_from_url_saves_source(
- config_injector, context_factory, post_factory, user_factory):
+ config_injector, context_factory, post_factory, user_factory
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.net.download'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_source'):
- config_injector({
- 'privileges': {
- 'posts:create:identified': model.User.RANK_REGULAR,
- 'uploads:use_downloader': model.User.RANK_POWER,
- },
- })
- net.download.return_value = b'content'
+ with patch("szurubooru.func.net.download"), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch("szurubooru.func.posts.create_post"), patch(
+ "szurubooru.func.posts.update_post_source"
+ ):
+ config_injector(
+ {
+ "privileges": {
+ "posts:create:identified": model.User.RANK_REGULAR,
+ "uploads:use_downloader": model.User.RANK_POWER,
+ },
+ }
+ )
+ net.download.return_value = b"content"
posts.create_post.return_value = [post, []]
api.post_api.create_post(
context_factory(
params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- 'contentUrl': 'example.com',
+ "safety": "safe",
+ "tags": ["tag1", "tag2"],
+ "contentUrl": "example.com",
},
- user=auth_user))
+ user=auth_user,
+ )
+ )
net.download.assert_called_once_with(
- 'example.com', use_video_downloader=False)
+ "example.com", use_video_downloader=False
+ )
posts.create_post.assert_called_once_with(
- b'content', ['tag1', 'tag2'], auth_user)
- posts.update_post_source.assert_called_once_with(post, 'example.com')
+ b"content", ["tag1", "tag2"], auth_user
+ )
+ posts.update_post_source.assert_called_once_with(post, "example.com")
def test_creating_from_url_with_source_specified(
- config_injector, context_factory, post_factory, user_factory):
+ config_injector, context_factory, post_factory, user_factory
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.net.download'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_source'):
- config_injector({
- 'privileges': {
- 'posts:create:identified': model.User.RANK_REGULAR,
- 'uploads:use_downloader': model.User.RANK_REGULAR,
- },
- })
- net.download.return_value = b'content'
+ with patch("szurubooru.func.net.download"), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch("szurubooru.func.posts.create_post"), patch(
+ "szurubooru.func.posts.update_post_source"
+ ):
+ config_injector(
+ {
+ "privileges": {
+ "posts:create:identified": model.User.RANK_REGULAR,
+ "uploads:use_downloader": model.User.RANK_REGULAR,
+ },
+ }
+ )
+ net.download.return_value = b"content"
posts.create_post.return_value = [post, []]
api.post_api.create_post(
context_factory(
params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- 'contentUrl': 'example.com',
- 'source': 'example2.com',
+ "safety": "safe",
+ "tags": ["tag1", "tag2"],
+ "contentUrl": "example.com",
+ "source": "example2.com",
},
- user=auth_user))
+ user=auth_user,
+ )
+ )
net.download.assert_called_once_with(
- 'example.com', use_video_downloader=True)
+ "example.com", use_video_downloader=True
+ )
posts.create_post.assert_called_once_with(
- b'content', ['tag1', 'tag2'], auth_user)
- posts.update_post_source.assert_called_once_with(post, 'example2.com')
+ b"content", ["tag1", "tag2"], auth_user
+ )
+ posts.update_post_source.assert_called_once_with(post, "example2.com")
-@pytest.mark.parametrize('field', ['safety'])
+@pytest.mark.parametrize("field", ["safety"])
def test_trying_to_omit_mandatory_field(context_factory, user_factory, field):
params = {
- 'safety': 'safe',
+ "safety": "safe",
}
del params[field]
with pytest.raises(errors.MissingRequiredParameterError):
api.post_api.create_post(
context_factory(
params=params,
- files={'content': '...'},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ files={"content": "..."},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
@pytest.mark.parametrize(
- 'field', ['tags', 'relations', 'source', 'notes', 'flags'])
+ "field", ["tags", "relations", "source", "notes", "flags"]
+)
def test_omitting_optional_field(
- field, context_factory, post_factory, user_factory):
+ field, context_factory, post_factory, user_factory
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post = post_factory()
db.session.add(post)
db.session.flush()
params = {
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- 'relations': [1, 2],
- 'source': 'source',
- 'notes': ['note1', 'note2'],
- 'flags': ['flag1', 'flag2'],
+ "safety": "safe",
+ "tags": ["tag1", "tag2"],
+ "relations": [1, 2],
+ "source": "source",
+ "notes": ["note1", "note2"],
+ "flags": ["flag1", "flag2"],
}
del params[field]
- with patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_safety'), \
- patch('szurubooru.func.posts.update_post_source'), \
- patch('szurubooru.func.posts.update_post_relations'), \
- patch('szurubooru.func.posts.update_post_notes'), \
- patch('szurubooru.func.posts.update_post_flags'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.posts.create_post"), patch(
+ "szurubooru.func.posts.update_post_safety"
+ ), patch("szurubooru.func.posts.update_post_source"), patch(
+ "szurubooru.func.posts.update_post_relations"
+ ), patch(
+ "szurubooru.func.posts.update_post_notes"
+ ), patch(
+ "szurubooru.func.posts.update_post_flags"
+ ), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch(
+ "szurubooru.func.snapshots.create"
+ ):
posts.create_post.return_value = (post, [])
- posts.serialize_post.return_value = 'serialized post'
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.create_post(
context_factory(
params=params,
- files={'content': 'post-content'},
- user=auth_user))
- assert result == 'serialized post'
+ files={"content": "post-content"},
+ user=auth_user,
+ )
+ )
+ assert result == "serialized post"
def test_errors_not_spending_ids(
- config_injector, tmpdir, context_factory, read_asset, user_factory):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'data_url': 'example.com',
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'privileges': {
- 'posts:create:identified': model.User.RANK_REGULAR,
- 'uploads:use_downloader': model.User.RANK_POWER,
- },
- 'secret': 'test',
- })
+ config_injector, tmpdir, context_factory, read_asset, user_factory
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "data_url": "example.com",
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "privileges": {
+ "posts:create:identified": model.User.RANK_REGULAR,
+ "uploads:use_downloader": model.User.RANK_POWER,
+ },
+ "secret": "test",
+ }
+ )
auth_user = user_factory(rank=model.User.RANK_REGULAR)
# successful request
- with patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.update_post_tags'):
+ with patch("szurubooru.func.posts.serialize_post"), patch(
+ "szurubooru.func.posts.update_post_tags"
+ ):
posts.serialize_post.side_effect = lambda post, *_, **__: post.post_id
post1_id = api.post_api.create_post(
context_factory(
- params={'safety': 'safe', 'tags': []},
- files={'content': read_asset('png.png')},
- user=auth_user))
+ params={"safety": "safe", "tags": []},
+ files={"content": read_asset("png.png")},
+ user=auth_user,
+ )
+ )
# erroreous request (duplicate post)
with pytest.raises(posts.PostAlreadyUploadedError):
api.post_api.create_post(
context_factory(
- params={'safety': 'safe', 'tags': []},
- files={'content': read_asset('png.png')},
- user=auth_user))
+ params={"safety": "safe", "tags": []},
+ files={"content": read_asset("png.png")},
+ user=auth_user,
+ )
+ )
# successful request
- with patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.update_post_tags'):
+ with patch("szurubooru.func.posts.serialize_post"), patch(
+ "szurubooru.func.posts.update_post_tags"
+ ):
posts.serialize_post.side_effect = lambda post, *_, **__: post.post_id
post2_id = api.post_api.create_post(
context_factory(
- params={'safety': 'safe', 'tags': []},
- files={'content': read_asset('jpeg.jpg')},
- user=auth_user))
+ params={"safety": "safe", "tags": []},
+ files={"content": read_asset("jpeg.jpg")},
+ user=auth_user,
+ )
+ )
assert post1_id > 0
assert post2_id > 0
@@ -316,42 +376,45 @@ def test_trying_to_omit_content(context_factory, user_factory):
with pytest.raises(errors.MissingRequiredFileError):
api.post_api.create_post(
context_factory(
- params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- },
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"safety": "safe", "tags": ["tag1", "tag2"],},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
def test_trying_to_create_post_without_privileges(
- context_factory, user_factory):
+ context_factory, user_factory
+):
with pytest.raises(errors.AuthError):
- api.post_api.create_post(context_factory(
- params='whatever',
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ api.post_api.create_post(
+ context_factory(
+ params="whatever",
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_trying_to_create_tags_without_privileges(
- config_injector, context_factory, user_factory):
- config_injector({
- 'privileges': {
- 'posts:create:anonymous': model.User.RANK_REGULAR,
- 'posts:create:identified': model.User.RANK_REGULAR,
- 'tags:create': model.User.RANK_ADMINISTRATOR,
- 'uploads:use_downloader': model.User.RANK_POWER,
- },
- })
- with pytest.raises(errors.AuthError), \
- patch('szurubooru.func.posts.update_post_content'), \
- patch('szurubooru.func.posts.update_post_tags'):
- posts.update_post_tags.return_value = ['new-tag']
+ config_injector, context_factory, user_factory
+):
+ config_injector(
+ {
+ "privileges": {
+ "posts:create:anonymous": model.User.RANK_REGULAR,
+ "posts:create:identified": model.User.RANK_REGULAR,
+ "tags:create": model.User.RANK_ADMINISTRATOR,
+ "uploads:use_downloader": model.User.RANK_POWER,
+ },
+ }
+ )
+ with pytest.raises(errors.AuthError), patch(
+ "szurubooru.func.posts.update_post_content"
+ ), patch("szurubooru.func.posts.update_post_tags"):
+ posts.update_post_tags.return_value = ["new-tag"]
api.post_api.create_post(
context_factory(
- params={
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- },
- files={
- 'content': posts.EMPTY_PIXEL,
- },
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"safety": "safe", "tags": ["tag1", "tag2"],},
+ files={"content": posts.EMPTY_PIXEL,},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_post_deleting.py b/server/szurubooru/tests/api/test_post_deleting.py
index bb5f9ced..fd2cb758 100644
--- a/server/szurubooru/tests/api/test_post_deleting.py
+++ b/server/szurubooru/tests/api/test_post_deleting.py
@@ -1,19 +1,21 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import posts, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'secret': 'secret',
- 'data_dir': '',
- 'delete_source_files': False,
- 'privileges': {
- 'posts:delete': model.User.RANK_REGULAR
+ config_injector(
+ {
+ "secret": "secret",
+ "data_dir": "",
+ "delete_source_files": False,
+ "privileges": {"posts:delete": model.User.RANK_REGULAR},
}
- })
+ )
def test_deleting(user_factory, post_factory, context_factory):
@@ -21,10 +23,11 @@ def test_deleting(user_factory, post_factory, context_factory):
post = post_factory(id=1)
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.snapshots.delete'):
+ with patch("szurubooru.func.snapshots.delete"):
result = api.post_api.delete_post(
- context_factory(params={'version': 1}, user=auth_user),
- {'post_id': 1})
+ context_factory(params={"version": 1}, user=auth_user),
+ {"post_id": 1},
+ )
assert result == {}
assert db.session.query(model.Post).count() == 0
snapshots.delete.assert_called_once_with(post, auth_user)
@@ -34,15 +37,18 @@ def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(posts.PostNotFoundError):
api.post_api.delete_post(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': 999})
+ {"post_id": 999},
+ )
def test_trying_to_delete_without_privileges(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
db.session.add(post_factory(id=1))
db.session.commit()
with pytest.raises(errors.AuthError):
api.post_api.delete_post(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'post_id': 1})
+ {"post_id": 1},
+ )
assert db.session.query(model.Post).count() == 1
diff --git a/server/szurubooru/tests/api/test_post_favoriting.py b/server/szurubooru/tests/api/test_post_favoriting.py
index ce91a028..dc92bd97 100644
--- a/server/szurubooru/tests/api/test_post_favoriting.py
+++ b/server/szurubooru/tests/api/test_post_favoriting.py
@@ -1,29 +1,34 @@
from datetime import datetime
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import posts
@pytest.fixture(autouse=True)
def inject_config(config_injector):
config_injector(
- {'privileges': {'posts:favorite': model.User.RANK_REGULAR}})
+ {"privileges": {"posts:favorite": model.User.RANK_REGULAR}}
+ )
def test_adding_to_favorites(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
post = post_factory()
db.session.add(post)
db.session.commit()
assert post.score == 0
- with patch('szurubooru.func.posts.serialize_post'), \
- fake_datetime('1997-12-01'):
- posts.serialize_post.return_value = 'serialized post'
+ with patch("szurubooru.func.posts.serialize_post"), fake_datetime(
+ "1997-12-01"
+ ):
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.add_post_to_favorites(
- context_factory(user=user_factory()),
- {'post_id': post.post_id})
- assert result == 'serialized post'
+ context_factory(user=user_factory()), {"post_id": post.post_id}
+ )
+ assert result == "serialized post"
post = db.session.query(model.Post).one()
assert db.session.query(model.PostFavorite).count() == 1
assert post is not None
@@ -32,22 +37,23 @@ def test_adding_to_favorites(
def test_removing_from_favorites(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user = user_factory()
post = post_factory()
db.session.add(post)
db.session.commit()
assert post.score == 0
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.add_post_to_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
+ context_factory(user=user), {"post_id": post.post_id}
+ )
assert post.score == 1
- with fake_datetime('1997-12-02'):
+ with fake_datetime("1997-12-02"):
api.post_api.delete_post_from_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
+ context_factory(user=user), {"post_id": post.post_id}
+ )
post = db.session.query(model.Post).one()
assert post.score == 1
assert db.session.query(model.PostFavorite).count() == 0
@@ -55,65 +61,68 @@ def test_removing_from_favorites(
def test_favoriting_twice(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user = user_factory()
post = post_factory()
db.session.add(post)
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.add_post_to_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(user=user), {"post_id": post.post_id}
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.add_post_to_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
+ context_factory(user=user), {"post_id": post.post_id}
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostFavorite).count() == 1
assert post.favorite_count == 1
def test_removing_twice(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user = user_factory()
post = post_factory()
db.session.add(post)
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.add_post_to_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(user=user), {"post_id": post.post_id}
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.delete_post_from_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(user=user), {"post_id": post.post_id}
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.delete_post_from_favorites(
- context_factory(user=user),
- {'post_id': post.post_id})
+ context_factory(user=user), {"post_id": post.post_id}
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostFavorite).count() == 0
assert post.favorite_count == 0
def test_favorites_from_multiple_users(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user1 = user_factory()
user2 = user_factory()
post = post_factory()
db.session.add_all([user1, user2, post])
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.add_post_to_favorites(
- context_factory(user=user1),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(user=user1), {"post_id": post.post_id}
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.add_post_to_favorites(
- context_factory(user=user2),
- {'post_id': post.post_id})
+ context_factory(user=user2), {"post_id": post.post_id}
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostFavorite).count() == 2
assert post.favorite_count == 2
@@ -123,16 +132,18 @@ def test_favorites_from_multiple_users(
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(posts.PostNotFoundError):
api.post_api.add_post_to_favorites(
- context_factory(user=user_factory()),
- {'post_id': 5})
+ context_factory(user=user_factory()), {"post_id": 5}
+ )
def test_trying_to_rate_without_privileges(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
post = post_factory()
db.session.add(post)
db.session.commit()
with pytest.raises(errors.AuthError):
api.post_api.add_post_to_favorites(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'post_id': post.post_id})
+ {"post_id": post.post_id},
+ )
diff --git a/server/szurubooru/tests/api/test_post_featuring.py b/server/szurubooru/tests/api/test_post_featuring.py
index 6e9e7569..e1f8cef8 100644
--- a/server/szurubooru/tests/api/test_post_featuring.py
+++ b/server/szurubooru/tests/api/test_post_featuring.py
@@ -1,18 +1,22 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import posts, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'posts:feature': model.User.RANK_REGULAR,
- 'posts:view': model.User.RANK_REGULAR,
- 'posts:view:featured': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "posts:feature": model.User.RANK_REGULAR,
+ "posts:view": model.User.RANK_REGULAR,
+ "posts:view:featured": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_featuring(user_factory, post_factory, context_factory):
@@ -21,64 +25,76 @@ def test_featuring(user_factory, post_factory, context_factory):
db.session.add(post)
db.session.flush()
assert not posts.get_post_by_id(1).is_featured
- with patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.snapshots.modify'):
- posts.serialize_post.return_value = 'serialized post'
+ with patch("szurubooru.func.posts.serialize_post"), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.set_featured_post(
- context_factory(params={'id': 1}, user=auth_user))
- assert result == 'serialized post'
+ context_factory(params={"id": 1}, user=auth_user)
+ )
+ assert result == "serialized post"
assert posts.try_get_featured_post() is not None
assert posts.try_get_featured_post().post_id == 1
assert posts.get_post_by_id(1).is_featured
result = api.post_api.get_featured_post(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)))
- assert result == 'serialized post'
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR))
+ )
+ assert result == "serialized post"
snapshots.modify.assert_called_once_with(post, auth_user)
def test_trying_to_omit_required_parameter(user_factory, context_factory):
with pytest.raises(errors.MissingRequiredParameterError):
api.post_api.set_featured_post(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR))
+ )
def test_trying_to_feature_the_same_post_twice(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
db.session.add(post_factory(id=1))
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
+ with patch("szurubooru.func.posts.serialize_post"):
api.post_api.set_featured_post(
context_factory(
- params={'id': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"id": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
with pytest.raises(posts.PostAlreadyFeaturedError):
api.post_api.set_featured_post(
context_factory(
- params={'id': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"id": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
def test_featuring_one_post_after_another(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
db.session.add(post_factory(id=1))
db.session.add(post_factory(id=2))
db.session.commit()
assert posts.try_get_featured_post() is None
assert not posts.get_post_by_id(1).is_featured
assert not posts.get_post_by_id(2).is_featured
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997"):
api.post_api.set_featured_post(
context_factory(
- params={'id': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)))
- with fake_datetime('1998'):
+ params={"id": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
+ with fake_datetime("1998"):
api.post_api.set_featured_post(
context_factory(
- params={'id': 2},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"id": 2},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
assert posts.try_get_featured_post() is not None
assert posts.try_get_featured_post().post_id == 2
assert not posts.get_post_by_id(1).is_featured
@@ -89,18 +105,21 @@ def test_trying_to_feature_non_existing(user_factory, context_factory):
with pytest.raises(posts.PostNotFoundError):
api.post_api.set_featured_post(
context_factory(
- params={'id': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"id": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
-def test_trying_to_retrieve_without_privileges(
- user_factory, context_factory):
+def test_trying_to_retrieve_without_privileges(user_factory, context_factory):
with pytest.raises(errors.AuthError):
api.post_api.get_featured_post(
- context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS))
+ )
def test_trying_to_feature_without_privileges(user_factory, context_factory):
with pytest.raises(errors.AuthError):
api.post_api.set_featured_post(
- context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS))
+ )
diff --git a/server/szurubooru/tests/api/test_post_merging.py b/server/szurubooru/tests/api/test_post_merging.py
index eb8464f8..cdcb0af0 100644
--- a/server/szurubooru/tests/api/test_post_merging.py
+++ b/server/szurubooru/tests/api/test_post_merging.py
@@ -1,12 +1,14 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import posts, snapshots
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'posts:merge': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"posts:merge": model.User.RANK_REGULAR}})
def test_merging(user_factory, context_factory, post_factory):
@@ -15,66 +17,78 @@ def test_merging(user_factory, context_factory, post_factory):
target_post = post_factory()
db.session.add_all([source_post, target_post])
db.session.flush()
- with patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.merge_posts'), \
- patch('szurubooru.func.snapshots.merge'):
+ with patch("szurubooru.func.posts.serialize_post"), patch(
+ "szurubooru.func.posts.merge_posts"
+ ), patch("szurubooru.func.snapshots.merge"):
api.post_api.merge_posts(
context_factory(
params={
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': source_post.post_id,
- 'mergeTo': target_post.post_id,
- 'replaceContent': False,
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": source_post.post_id,
+ "mergeTo": target_post.post_id,
+ "replaceContent": False,
},
- user=auth_user))
+ user=auth_user,
+ )
+ )
posts.merge_posts.called_once_with(source_post, target_post)
snapshots.merge.assert_called_once_with(
- source_post, target_post, auth_user)
+ source_post, target_post, auth_user
+ )
@pytest.mark.parametrize(
- 'field', ['remove', 'mergeTo', 'removeVersion', 'mergeToVersion'])
+ "field", ["remove", "mergeTo", "removeVersion", "mergeToVersion"]
+)
def test_trying_to_omit_mandatory_field(
- user_factory, post_factory, context_factory, field):
+ user_factory, post_factory, context_factory, field
+):
source_post = post_factory()
target_post = post_factory()
db.session.add_all([source_post, target_post])
db.session.commit()
params = {
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': source_post.post_id,
- 'mergeTo': target_post.post_id,
- 'replaceContent': False,
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": source_post.post_id,
+ "mergeTo": target_post.post_id,
+ "replaceContent": False,
}
del params[field]
with pytest.raises(errors.ValidationError):
api.post_api.merge_posts(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_merge_non_existing(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
post = post_factory()
db.session.add(post)
db.session.commit()
with pytest.raises(posts.PostNotFoundError):
api.post_api.merge_posts(
context_factory(
- params={'remove': post.post_id, 'mergeTo': 999},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"remove": post.post_id, "mergeTo": 999},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
with pytest.raises(posts.PostNotFoundError):
api.post_api.merge_posts(
context_factory(
- params={'remove': 999, 'mergeTo': post.post_id},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"remove": 999, "mergeTo": post.post_id},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
def test_trying_to_merge_without_privileges(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
source_post = post_factory()
target_post = post_factory()
db.session.add_all([source_post, target_post])
@@ -83,10 +97,12 @@ def test_trying_to_merge_without_privileges(
api.post_api.merge_posts(
context_factory(
params={
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': source_post.post_id,
- 'mergeTo': target_post.post_id,
- 'replaceContent': False,
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": source_post.post_id,
+ "mergeTo": target_post.post_id,
+ "replaceContent": False,
},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_post_rating.py b/server/szurubooru/tests/api/test_post_rating.py
index 0fca2f56..2db17db8 100644
--- a/server/szurubooru/tests/api/test_post_rating.py
+++ b/server/szurubooru/tests/api/test_post_rating.py
@@ -1,27 +1,31 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import posts
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'posts:score': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"posts:score": model.User.RANK_REGULAR}})
def test_simple_rating(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
post = post_factory()
db.session.add(post)
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'), \
- fake_datetime('1997-12-01'):
- posts.serialize_post.return_value = 'serialized post'
+ with patch("szurubooru.func.posts.serialize_post"), fake_datetime(
+ "1997-12-01"
+ ):
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.set_post_score(
- context_factory(
- params={'score': 1}, user=user_factory()),
- {'post_id': post.post_id})
- assert result == 'serialized post'
+ context_factory(params={"score": 1}, user=user_factory()),
+ {"post_id": post.post_id},
+ )
+ assert result == "serialized post"
post = db.session.query(model.Post).one()
assert db.session.query(model.PostScore).count() == 1
assert post is not None
@@ -29,112 +33,129 @@ def test_simple_rating(
def test_updating_rating(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user = user_factory()
post = post_factory()
db.session.add(post)
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.set_post_score(
- context_factory(params={'score': 1}, user=user),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user),
+ {"post_id": post.post_id},
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.set_post_score(
- context_factory(params={'score': -1}, user=user),
- {'post_id': post.post_id})
+ context_factory(params={"score": -1}, user=user),
+ {"post_id": post.post_id},
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostScore).count() == 1
assert post.score == -1
def test_updating_rating_to_zero(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user = user_factory()
post = post_factory()
db.session.add(post)
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.set_post_score(
- context_factory(params={'score': 1}, user=user),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user),
+ {"post_id": post.post_id},
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.set_post_score(
- context_factory(params={'score': 0}, user=user),
- {'post_id': post.post_id})
+ context_factory(params={"score": 0}, user=user),
+ {"post_id": post.post_id},
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostScore).count() == 0
assert post.score == 0
def test_deleting_rating(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user = user_factory()
post = post_factory()
db.session.add(post)
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.set_post_score(
- context_factory(params={'score': 1}, user=user),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user),
+ {"post_id": post.post_id},
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.delete_post_score(
- context_factory(user=user),
- {'post_id': post.post_id})
+ context_factory(user=user), {"post_id": post.post_id}
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostScore).count() == 0
assert post.score == 0
def test_ratings_from_multiple_users(
- user_factory, post_factory, context_factory, fake_datetime):
+ user_factory, post_factory, context_factory, fake_datetime
+):
user1 = user_factory()
user2 = user_factory()
post = post_factory()
db.session.add_all([user1, user2, post])
db.session.commit()
- with patch('szurubooru.func.posts.serialize_post'):
- with fake_datetime('1997-12-01'):
+ with patch("szurubooru.func.posts.serialize_post"):
+ with fake_datetime("1997-12-01"):
api.post_api.set_post_score(
- context_factory(params={'score': 1}, user=user1),
- {'post_id': post.post_id})
- with fake_datetime('1997-12-02'):
+ context_factory(params={"score": 1}, user=user1),
+ {"post_id": post.post_id},
+ )
+ with fake_datetime("1997-12-02"):
api.post_api.set_post_score(
- context_factory(params={'score': -1}, user=user2),
- {'post_id': post.post_id})
+ context_factory(params={"score": -1}, user=user2),
+ {"post_id": post.post_id},
+ )
post = db.session.query(model.Post).one()
assert db.session.query(model.PostScore).count() == 2
assert post.score == 0
def test_trying_to_omit_mandatory_field(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
post = post_factory()
db.session.add(post)
db.session.commit()
with pytest.raises(errors.ValidationError):
api.post_api.set_post_score(
context_factory(params={}, user=user_factory()),
- {'post_id': post.post_id})
+ {"post_id": post.post_id},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(posts.PostNotFoundError):
api.post_api.set_post_score(
- context_factory(params={'score': 1}, user=user_factory()),
- {'post_id': 5})
+ context_factory(params={"score": 1}, user=user_factory()),
+ {"post_id": 5},
+ )
def test_trying_to_rate_without_privileges(
- user_factory, post_factory, context_factory):
+ user_factory, post_factory, context_factory
+):
post = post_factory()
db.session.add(post)
db.session.commit()
with pytest.raises(errors.AuthError):
api.post_api.set_post_score(
context_factory(
- params={'score': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'post_id': post.post_id})
+ params={"score": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"post_id": post.post_id},
+ )
diff --git a/server/szurubooru/tests/api/test_post_retrieving.py b/server/szurubooru/tests/api/test_post_retrieving.py
index 7270547e..8f2c371b 100644
--- a/server/szurubooru/tests/api/test_post_retrieving.py
+++ b/server/szurubooru/tests/api/test_post_retrieving.py
@@ -1,18 +1,22 @@
from datetime import datetime
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import posts
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'posts:list': model.User.RANK_REGULAR,
- 'posts:view': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "posts:list": model.User.RANK_REGULAR,
+ "posts:view": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_retrieving_multiple(user_factory, post_factory, context_factory):
@@ -20,18 +24,20 @@ def test_retrieving_multiple(user_factory, post_factory, context_factory):
post2 = post_factory(id=2)
db.session.add_all([post1, post2])
db.session.flush()
- with patch('szurubooru.func.posts.serialize_post'):
- posts.serialize_post.return_value = 'serialized post'
+ with patch("szurubooru.func.posts.serialize_post"):
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.get_posts(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
assert result == {
- 'query': '',
- 'offset': 0,
- 'limit': 100,
- 'total': 2,
- 'results': ['serialized post', 'serialized post'],
+ "query": "",
+ "offset": 0,
+ "limit": 100,
+ "total": 2,
+ "results": ["serialized post", "serialized post"],
}
@@ -39,68 +45,81 @@ def test_using_special_tokens(user_factory, post_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post1 = post_factory(id=1)
post2 = post_factory(id=2)
- post1.favorited_by = [model.PostFavorite(
- user=auth_user, time=datetime.utcnow())]
+ post1.favorited_by = [
+ model.PostFavorite(user=auth_user, time=datetime.utcnow())
+ ]
db.session.add_all([post1, post2, auth_user])
db.session.flush()
- with patch('szurubooru.func.posts.serialize_post'):
- posts.serialize_post.side_effect = lambda post, *_args, **_kwargs: \
- 'serialized post %d' % post.post_id
+ with patch("szurubooru.func.posts.serialize_post"):
+ posts.serialize_post.side_effect = (
+ lambda post, *_args, **_kwargs: "serialized post %d" % post.post_id
+ )
result = api.post_api.get_posts(
context_factory(
- params={'query': 'special:fav', 'offset': 0},
- user=auth_user))
+ params={"query": "special:fav", "offset": 0}, user=auth_user
+ )
+ )
assert result == {
- 'query': 'special:fav',
- 'offset': 0,
- 'limit': 100,
- 'total': 1,
- 'results': ['serialized post 1'],
+ "query": "special:fav",
+ "offset": 0,
+ "limit": 100,
+ "total": 1,
+ "results": ["serialized post 1"],
}
def test_trying_to_use_special_tokens_without_logging_in(
- user_factory, context_factory, config_injector):
- config_injector({
- 'privileges': {'posts:list': 'anonymous'},
- })
+ user_factory, context_factory, config_injector
+):
+ config_injector(
+ {"privileges": {"posts:list": "anonymous"},}
+ )
with pytest.raises(errors.SearchError):
api.post_api.get_posts(
context_factory(
- params={'query': 'special:fav', 'offset': 0},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "special:fav", "offset": 0},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_trying_to_retrieve_multiple_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.post_api.get_posts(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_retrieving_single(user_factory, post_factory, context_factory):
db.session.add(post_factory(id=1))
db.session.flush()
- with patch('szurubooru.func.posts.serialize_post'):
- posts.serialize_post.return_value = 'serialized post'
+ with patch("szurubooru.func.posts.serialize_post"):
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.get_post(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': 1})
- assert result == 'serialized post'
+ {"post_id": 1},
+ )
+ assert result == "serialized post"
def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
with pytest.raises(posts.PostNotFoundError):
api.post_api.get_post(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': 999})
+ {"post_id": 999},
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.post_api.get_post(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'post_id': 999})
+ {"post_id": 999},
+ )
diff --git a/server/szurubooru/tests/api/test_post_updating.py b/server/szurubooru/tests/api/test_post_updating.py
index 887f54fb..e4a606d2 100644
--- a/server/szurubooru/tests/api/test_post_updating.py
+++ b/server/szurubooru/tests/api/test_post_updating.py
@@ -1,177 +1,222 @@
from datetime import datetime
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import posts, tags, snapshots, net
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import net, posts, snapshots, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'posts:edit:tags': model.User.RANK_REGULAR,
- 'posts:edit:content': model.User.RANK_REGULAR,
- 'posts:edit:safety': model.User.RANK_REGULAR,
- 'posts:edit:source': model.User.RANK_REGULAR,
- 'posts:edit:relations': model.User.RANK_REGULAR,
- 'posts:edit:notes': model.User.RANK_REGULAR,
- 'posts:edit:flags': model.User.RANK_REGULAR,
- 'posts:edit:thumbnail': model.User.RANK_REGULAR,
- 'tags:create': model.User.RANK_MODERATOR,
- 'uploads:use_downloader': model.User.RANK_REGULAR,
- },
- 'allow_broken_uploads': False,
- })
+ config_injector(
+ {
+ "privileges": {
+ "posts:edit:tags": model.User.RANK_REGULAR,
+ "posts:edit:content": model.User.RANK_REGULAR,
+ "posts:edit:safety": model.User.RANK_REGULAR,
+ "posts:edit:source": model.User.RANK_REGULAR,
+ "posts:edit:relations": model.User.RANK_REGULAR,
+ "posts:edit:notes": model.User.RANK_REGULAR,
+ "posts:edit:flags": model.User.RANK_REGULAR,
+ "posts:edit:thumbnail": model.User.RANK_REGULAR,
+ "tags:create": model.User.RANK_MODERATOR,
+ "uploads:use_downloader": model.User.RANK_REGULAR,
+ },
+ "allow_broken_uploads": False,
+ }
+ )
def test_post_updating(
- context_factory, post_factory, user_factory, fake_datetime):
+ context_factory, post_factory, user_factory, fake_datetime
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.posts.create_post'), \
- patch('szurubooru.func.posts.update_post_tags'), \
- patch('szurubooru.func.posts.update_post_content'), \
- patch('szurubooru.func.posts.update_post_thumbnail'), \
- patch('szurubooru.func.posts.update_post_safety'), \
- patch('szurubooru.func.posts.update_post_source'), \
- patch('szurubooru.func.posts.update_post_relations'), \
- patch('szurubooru.func.posts.update_post_notes'), \
- patch('szurubooru.func.posts.update_post_flags'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.snapshots.modify'), \
- fake_datetime('1997-01-01'):
- posts.serialize_post.return_value = 'serialized post'
+ with patch("szurubooru.func.posts.create_post"), patch(
+ "szurubooru.func.posts.update_post_tags"
+ ), patch("szurubooru.func.posts.update_post_content"), patch(
+ "szurubooru.func.posts.update_post_thumbnail"
+ ), patch(
+ "szurubooru.func.posts.update_post_safety"
+ ), patch(
+ "szurubooru.func.posts.update_post_source"
+ ), patch(
+ "szurubooru.func.posts.update_post_relations"
+ ), patch(
+ "szurubooru.func.posts.update_post_notes"
+ ), patch(
+ "szurubooru.func.posts.update_post_flags"
+ ), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch(
+ "szurubooru.func.snapshots.modify"
+ ), fake_datetime(
+ "1997-01-01"
+ ):
+ posts.serialize_post.return_value = "serialized post"
result = api.post_api.update_post(
context_factory(
params={
- 'version': 1,
- 'safety': 'safe',
- 'tags': ['tag1', 'tag2'],
- 'relations': [1, 2],
- 'source': 'source',
- 'notes': ['note1', 'note2'],
- 'flags': ['flag1', 'flag2'],
+ "version": 1,
+ "safety": "safe",
+ "tags": ["tag1", "tag2"],
+ "relations": [1, 2],
+ "source": "source",
+ "notes": ["note1", "note2"],
+ "flags": ["flag1", "flag2"],
},
files={
- 'content': 'post-content',
- 'thumbnail': 'post-thumbnail',
+ "content": "post-content",
+ "thumbnail": "post-thumbnail",
},
- user=auth_user),
- {'post_id': post.post_id})
+ user=auth_user,
+ ),
+ {"post_id": post.post_id},
+ )
- assert result == 'serialized post'
+ assert result == "serialized post"
posts.create_post.assert_not_called()
- posts.update_post_tags.assert_called_once_with(post, ['tag1', 'tag2'])
- posts.update_post_content.assert_called_once_with(post, 'post-content')
+ posts.update_post_tags.assert_called_once_with(post, ["tag1", "tag2"])
+ posts.update_post_content.assert_called_once_with(post, "post-content")
posts.update_post_thumbnail.assert_called_once_with(
- post, 'post-thumbnail')
- posts.update_post_safety.assert_called_once_with(post, 'safe')
- posts.update_post_source.assert_called_once_with(post, 'source')
+ post, "post-thumbnail"
+ )
+ posts.update_post_safety.assert_called_once_with(post, "safe")
+ posts.update_post_source.assert_called_once_with(post, "source")
posts.update_post_relations.assert_called_once_with(post, [1, 2])
posts.update_post_notes.assert_called_once_with(
- post, ['note1', 'note2'])
+ post, ["note1", "note2"]
+ )
posts.update_post_flags.assert_called_once_with(
- post, ['flag1', 'flag2'])
+ post, ["flag1", "flag2"]
+ )
posts.serialize_post.assert_called_once_with(
- post, auth_user, options=[])
+ post, auth_user, options=[]
+ )
snapshots.modify.assert_called_once_with(post, auth_user)
assert post.last_edit_time == datetime(1997, 1, 1)
def test_uploading_from_url_saves_source(
- context_factory, post_factory, user_factory):
+ context_factory, post_factory, user_factory
+):
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.net.download'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.update_post_content'), \
- patch('szurubooru.func.posts.update_post_source'), \
- patch('szurubooru.func.snapshots.modify'):
- net.download.return_value = b'content'
+ with patch("szurubooru.func.net.download"), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch("szurubooru.func.posts.update_post_content"), patch(
+ "szurubooru.func.posts.update_post_source"
+ ), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
+ net.download.return_value = b"content"
api.post_api.update_post(
context_factory(
- params={'contentUrl': 'example.com', 'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': post.post_id})
+ params={"contentUrl": "example.com", "version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"post_id": post.post_id},
+ )
net.download.assert_called_once_with(
- 'example.com', use_video_downloader=True)
- posts.update_post_content.assert_called_once_with(post, b'content')
- posts.update_post_source.assert_called_once_with(post, 'example.com')
+ "example.com", use_video_downloader=True
+ )
+ posts.update_post_content.assert_called_once_with(post, b"content")
+ posts.update_post_source.assert_called_once_with(post, "example.com")
def test_uploading_from_url_with_source_specified(
- context_factory, post_factory, user_factory):
+ context_factory, post_factory, user_factory
+):
post = post_factory()
db.session.add(post)
db.session.flush()
- with patch('szurubooru.func.net.download'), \
- patch('szurubooru.func.posts.serialize_post'), \
- patch('szurubooru.func.posts.update_post_content'), \
- patch('szurubooru.func.posts.update_post_source'), \
- patch('szurubooru.func.snapshots.modify'):
- net.download.return_value = b'content'
+ with patch("szurubooru.func.net.download"), patch(
+ "szurubooru.func.posts.serialize_post"
+ ), patch("szurubooru.func.posts.update_post_content"), patch(
+ "szurubooru.func.posts.update_post_source"
+ ), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
+ net.download.return_value = b"content"
api.post_api.update_post(
context_factory(
params={
- 'contentUrl': 'example.com',
- 'source': 'example2.com',
- 'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': post.post_id})
+ "contentUrl": "example.com",
+ "source": "example2.com",
+ "version": 1,
+ },
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"post_id": post.post_id},
+ )
net.download.assert_called_once_with(
- 'example.com', use_video_downloader=True)
- posts.update_post_content.assert_called_once_with(post, b'content')
- posts.update_post_source.assert_called_once_with(post, 'example2.com')
+ "example.com", use_video_downloader=True
+ )
+ posts.update_post_content.assert_called_once_with(post, b"content")
+ posts.update_post_source.assert_called_once_with(post, "example2.com")
def test_trying_to_update_non_existing(context_factory, user_factory):
with pytest.raises(posts.PostNotFoundError):
api.post_api.update_post(
context_factory(
- params='whatever',
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': 1})
+ params="whatever",
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"post_id": 1},
+ )
-@pytest.mark.parametrize('files,params', [
- ({}, {'tags': '...'}),
- ({}, {'safety': '...'}),
- ({}, {'source': '...'}),
- ({}, {'relations': '...'}),
- ({}, {'notes': '...'}),
- ({}, {'flags': '...'}),
- ({'content': '...'}, {}),
- ({'thumbnail': '...'}, {}),
-])
+@pytest.mark.parametrize(
+ "files,params",
+ [
+ ({}, {"tags": "..."}),
+ ({}, {"safety": "..."}),
+ ({}, {"source": "..."}),
+ ({}, {"relations": "..."}),
+ ({}, {"notes": "..."}),
+ ({}, {"flags": "..."}),
+ ({"content": "..."}, {}),
+ ({"thumbnail": "..."}, {}),
+ ],
+)
def test_trying_to_update_field_without_privileges(
- context_factory, post_factory, user_factory, files, params):
+ context_factory, post_factory, user_factory, files, params
+):
post = post_factory()
db.session.add(post)
db.session.flush()
with pytest.raises(errors.AuthError):
api.post_api.update_post(
context_factory(
- params={**params, **{'version': 1}},
+ params={**params, **{"version": 1}},
files=files,
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'post_id': post.post_id})
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"post_id": post.post_id},
+ )
def test_trying_to_create_tags_without_privileges(
- context_factory, post_factory, user_factory):
+ context_factory, post_factory, user_factory
+):
post = post_factory()
db.session.add(post)
db.session.flush()
- with pytest.raises(errors.AuthError), \
- patch('szurubooru.func.posts.update_post_tags'):
- posts.update_post_tags.return_value = ['new-tag']
+ with pytest.raises(errors.AuthError), patch(
+ "szurubooru.func.posts.update_post_tags"
+ ):
+ posts.update_post_tags.return_value = ["new-tag"]
api.post_api.update_post(
context_factory(
- params={'tags': ['tag1', 'tag2'], 'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'post_id': post.post_id})
+ params={"tags": ["tag1", "tag2"], "version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"post_id": post.post_id},
+ )
diff --git a/server/szurubooru/tests/api/test_snapshot_retrieving.py b/server/szurubooru/tests/api/test_snapshot_retrieving.py
index 41f0bebc..ec24e820 100644
--- a/server/szurubooru/tests/api/test_snapshot_retrieving.py
+++ b/server/szurubooru/tests/api/test_snapshot_retrieving.py
@@ -1,24 +1,26 @@
from datetime import datetime
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
def snapshot_factory():
snapshot = model.Snapshot()
snapshot.creation_time = datetime(1999, 1, 1)
- snapshot.resource_type = 'dummy'
+ snapshot.resource_type = "dummy"
snapshot.resource_pkey = 1
- snapshot.resource_name = 'dummy'
- snapshot.operation = 'added'
- snapshot.data = '{}'
+ snapshot.resource_name = "dummy"
+ snapshot.operation = "added"
+ snapshot.data = "{}"
return snapshot
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {'snapshots:list': model.User.RANK_REGULAR},
- })
+ config_injector(
+ {"privileges": {"snapshots:list": model.User.RANK_REGULAR},}
+ )
def test_retrieving_multiple(user_factory, context_factory):
@@ -28,19 +30,24 @@ def test_retrieving_multiple(user_factory, context_factory):
db.session.flush()
result = api.snapshot_api.get_snapshots(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_REGULAR)))
- assert result['query'] == ''
- assert result['offset'] == 0
- assert result['limit'] == 100
- assert result['total'] == 2
- assert len(result['results']) == 2
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
+ assert result["query"] == ""
+ assert result["offset"] == 0
+ assert result["limit"] == 100
+ assert result["total"] == 2
+ assert len(result["results"]) == 2
def test_trying_to_retrieve_multiple_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.snapshot_api.get_snapshots(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_tag_category_creating.py b/server/szurubooru/tests/api/test_tag_category_creating.py
index 47e8405c..78f30682 100644
--- a/server/szurubooru/tests/api/test_tag_category_creating.py
+++ b/server/szurubooru/tests/api/test_tag_category_creating.py
@@ -1,7 +1,9 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import tag_categories, tags, snapshots
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import snapshots, tag_categories, tags
def _update_category_name(category, name):
@@ -10,49 +12,56 @@ def _update_category_name(category, name):
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {'tag_categories:create': model.User.RANK_REGULAR},
- })
+ config_injector(
+ {"privileges": {"tag_categories:create": model.User.RANK_REGULAR},}
+ )
def test_creating_category(
- tag_category_factory, user_factory, context_factory):
+ tag_category_factory, user_factory, context_factory
+):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- category = tag_category_factory(name='meta')
+ category = tag_category_factory(name="meta")
db.session.add(category)
- with patch('szurubooru.func.tag_categories.create_category'), \
- patch('szurubooru.func.tag_categories.serialize_category'), \
- patch('szurubooru.func.tag_categories.update_category_name'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.tag_categories.create_category"), patch(
+ "szurubooru.func.tag_categories.serialize_category"
+ ), patch("szurubooru.func.tag_categories.update_category_name"), patch(
+ "szurubooru.func.snapshots.create"
+ ):
tag_categories.create_category.return_value = category
tag_categories.update_category_name.side_effect = _update_category_name
- tag_categories.serialize_category.return_value = 'serialized category'
+ tag_categories.serialize_category.return_value = "serialized category"
result = api.tag_category_api.create_tag_category(
context_factory(
- params={'name': 'meta', 'color': 'black'}, user=auth_user))
- assert result == 'serialized category'
- tag_categories.create_category.assert_called_once_with('meta', 'black')
+ params={"name": "meta", "color": "black"}, user=auth_user
+ )
+ )
+ assert result == "serialized category"
+ tag_categories.create_category.assert_called_once_with("meta", "black")
snapshots.create.assert_called_once_with(category, auth_user)
-@pytest.mark.parametrize('field', ['name', 'color'])
+@pytest.mark.parametrize("field", ["name", "color"])
def test_trying_to_omit_mandatory_field(user_factory, context_factory, field):
params = {
- 'name': 'meta',
- 'color': 'black',
+ "name": "meta",
+ "color": "black",
}
del params[field]
with pytest.raises(errors.ValidationError):
api.tag_category_api.create_tag_category(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_create_without_privileges(user_factory, context_factory):
with pytest.raises(errors.AuthError):
api.tag_category_api.create_tag_category(
context_factory(
- params={'name': 'meta', 'color': 'black'},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"name": "meta", "color": "black"},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_tag_category_deleting.py b/server/szurubooru/tests/api/test_tag_category_deleting.py
index 2bee5137..2e6be642 100644
--- a/server/szurubooru/tests/api/test_tag_category_deleting.py
+++ b/server/szurubooru/tests/api/test_tag_category_deleting.py
@@ -1,76 +1,89 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import tag_categories, tags, snapshots
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import snapshots, tag_categories, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {'tag_categories:delete': model.User.RANK_REGULAR},
- })
+ config_injector(
+ {"privileges": {"tag_categories:delete": model.User.RANK_REGULAR},}
+ )
def test_deleting(user_factory, tag_category_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- category = tag_category_factory(name='category')
- db.session.add(tag_category_factory(name='root'))
+ category = tag_category_factory(name="category")
+ db.session.add(tag_category_factory(name="root"))
db.session.add(category)
db.session.flush()
- with patch('szurubooru.func.snapshots.delete'):
+ with patch("szurubooru.func.snapshots.delete"):
result = api.tag_category_api.delete_tag_category(
- context_factory(params={'version': 1}, user=auth_user),
- {'category_name': 'category'})
+ context_factory(params={"version": 1}, user=auth_user),
+ {"category_name": "category"},
+ )
assert result == {}
assert db.session.query(model.TagCategory).count() == 1
- assert db.session.query(model.TagCategory).one().name == 'root'
+ assert db.session.query(model.TagCategory).one().name == "root"
snapshots.delete.assert_called_once_with(category, auth_user)
def test_trying_to_delete_used(
- user_factory, tag_category_factory, tag_factory, context_factory):
- category = tag_category_factory(name='category')
+ user_factory, tag_category_factory, tag_factory, context_factory
+):
+ category = tag_category_factory(name="category")
db.session.add(category)
db.session.flush()
- tag = tag_factory(names=['tag'], category=category)
+ tag = tag_factory(names=["tag"], category=category)
db.session.add(tag)
db.session.commit()
with pytest.raises(tag_categories.TagCategoryIsInUseError):
api.tag_category_api.delete_tag_category(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'category'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "category"},
+ )
assert db.session.query(model.TagCategory).count() == 1
def test_trying_to_delete_last(
- user_factory, tag_category_factory, context_factory):
- db.session.add(tag_category_factory(name='root'))
+ user_factory, tag_category_factory, context_factory
+):
+ db.session.add(tag_category_factory(name="root"))
db.session.commit()
with pytest.raises(tag_categories.TagCategoryIsInUseError):
api.tag_category_api.delete_tag_category(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'root'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "root"},
+ )
def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(tag_categories.TagCategoryNotFoundError):
api.tag_category_api.delete_tag_category(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'bad'})
+ {"category_name": "bad"},
+ )
def test_trying_to_delete_without_privileges(
- user_factory, tag_category_factory, context_factory):
- db.session.add(tag_category_factory(name='category'))
+ user_factory, tag_category_factory, context_factory
+):
+ db.session.add(tag_category_factory(name="category"))
db.session.commit()
with pytest.raises(errors.AuthError):
api.tag_category_api.delete_tag_category(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'category_name': 'category'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"category_name": "category"},
+ )
assert db.session.query(model.TagCategory).count() == 1
diff --git a/server/szurubooru/tests/api/test_tag_category_retrieving.py b/server/szurubooru/tests/api/test_tag_category_retrieving.py
index 0b98d743..9c2c6fbd 100644
--- a/server/szurubooru/tests/api/test_tag_category_retrieving.py
+++ b/server/szurubooru/tests/api/test_tag_category_retrieving.py
@@ -1,43 +1,49 @@
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import tag_categories
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'tag_categories:list': model.User.RANK_REGULAR,
- 'tag_categories:view': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "tag_categories:list": model.User.RANK_REGULAR,
+ "tag_categories:view": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_retrieving_multiple(
- user_factory, tag_category_factory, context_factory):
- db.session.add_all([
- tag_category_factory(name='c1'),
- tag_category_factory(name='c2'),
- ])
+ user_factory, tag_category_factory, context_factory
+):
+ db.session.add_all(
+ [tag_category_factory(name="c1"), tag_category_factory(name="c2"),]
+ )
db.session.flush()
result = api.tag_category_api.get_tag_categories(
- context_factory(user=user_factory(rank=model.User.RANK_REGULAR)))
- assert [cat['name'] for cat in result['results']] == ['c1', 'c2']
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR))
+ )
+ assert [cat["name"] for cat in result["results"]] == ["c1", "c2"]
def test_retrieving_single(
- user_factory, tag_category_factory, context_factory):
- db.session.add(tag_category_factory(name='cat'))
+ user_factory, tag_category_factory, context_factory
+):
+ db.session.add(tag_category_factory(name="cat"))
db.session.flush()
result = api.tag_category_api.get_tag_category(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'cat'})
+ {"category_name": "cat"},
+ )
assert result == {
- 'name': 'cat',
- 'color': 'dummy',
- 'usages': 0,
- 'default': False,
- 'version': 1,
+ "name": "cat",
+ "color": "dummy",
+ "usages": 0,
+ "default": False,
+ "version": 1,
}
@@ -45,12 +51,15 @@ def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
with pytest.raises(tag_categories.TagCategoryNotFoundError):
api.tag_category_api.get_tag_category(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': '-'})
+ {"category_name": "-"},
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.tag_category_api.get_tag_category(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'category_name': '-'})
+ {"category_name": "-"},
+ )
diff --git a/server/szurubooru/tests/api/test_tag_category_updating.py b/server/szurubooru/tests/api/test_tag_category_updating.py
index 24a9f6ed..0d593900 100644
--- a/server/szurubooru/tests/api/test_tag_category_updating.py
+++ b/server/szurubooru/tests/api/test_tag_category_updating.py
@@ -1,7 +1,9 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import tag_categories, tags, snapshots
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import snapshots, tag_categories, tags
def _update_category_name(category, name):
@@ -10,99 +12,113 @@ def _update_category_name(category, name):
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'tag_categories:edit:name': model.User.RANK_REGULAR,
- 'tag_categories:edit:color': model.User.RANK_REGULAR,
- 'tag_categories:set_default': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "tag_categories:edit:name": model.User.RANK_REGULAR,
+ "tag_categories:edit:color": model.User.RANK_REGULAR,
+ "tag_categories:set_default": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_simple_updating(user_factory, tag_category_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- category = tag_category_factory(name='name', color='black')
+ category = tag_category_factory(name="name", color="black")
db.session.add(category)
db.session.flush()
- with patch('szurubooru.func.tag_categories.serialize_category'), \
- patch('szurubooru.func.tag_categories.update_category_name'), \
- patch('szurubooru.func.tag_categories.update_category_color'), \
- patch('szurubooru.func.snapshots.modify'):
+ with patch("szurubooru.func.tag_categories.serialize_category"), patch(
+ "szurubooru.func.tag_categories.update_category_name"
+ ), patch("szurubooru.func.tag_categories.update_category_color"), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
tag_categories.update_category_name.side_effect = _update_category_name
- tag_categories.serialize_category.return_value = 'serialized category'
+ tag_categories.serialize_category.return_value = "serialized category"
result = api.tag_category_api.update_tag_category(
context_factory(
- params={'name': 'changed', 'color': 'white', 'version': 1},
- user=auth_user),
- {'category_name': 'name'})
- assert result == 'serialized category'
+ params={"name": "changed", "color": "white", "version": 1},
+ user=auth_user,
+ ),
+ {"category_name": "name"},
+ )
+ assert result == "serialized category"
tag_categories.update_category_name.assert_called_once_with(
- category, 'changed')
+ category, "changed"
+ )
tag_categories.update_category_color.assert_called_once_with(
- category, 'white')
+ category, "white"
+ )
snapshots.modify.assert_called_once_with(category, auth_user)
-@pytest.mark.parametrize('field', ['name', 'color'])
+@pytest.mark.parametrize("field", ["name", "color"])
def test_omitting_optional_field(
- user_factory, tag_category_factory, context_factory, field):
- db.session.add(tag_category_factory(name='name', color='black'))
+ user_factory, tag_category_factory, context_factory, field
+):
+ db.session.add(tag_category_factory(name="name", color="black"))
db.session.commit()
params = {
- 'name': 'changed',
- 'color': 'white',
+ "name": "changed",
+ "color": "white",
}
del params[field]
- with patch('szurubooru.func.tag_categories.serialize_category'), \
- patch('szurubooru.func.tag_categories.update_category_name'):
+ with patch("szurubooru.func.tag_categories.serialize_category"), patch(
+ "szurubooru.func.tag_categories.update_category_name"
+ ):
api.tag_category_api.update_tag_category(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'name'})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "name"},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(tag_categories.TagCategoryNotFoundError):
api.tag_category_api.update_tag_category(
context_factory(
- params={'name': ['dummy']},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'bad'})
+ params={"name": ["dummy"]},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "bad"},
+ )
-@pytest.mark.parametrize('params', [
- {'name': 'whatever'},
- {'color': 'whatever'},
-])
+@pytest.mark.parametrize(
+ "params", [{"name": "whatever"}, {"color": "whatever"},]
+)
def test_trying_to_update_without_privileges(
- user_factory, tag_category_factory, context_factory, params):
- db.session.add(tag_category_factory(name='dummy'))
+ user_factory, tag_category_factory, context_factory, params
+):
+ db.session.add(tag_category_factory(name="dummy"))
db.session.commit()
with pytest.raises(errors.AuthError):
api.tag_category_api.update_tag_category(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'category_name': 'dummy'})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"category_name": "dummy"},
+ )
def test_set_as_default(user_factory, tag_category_factory, context_factory):
- category = tag_category_factory(name='name', color='black')
+ category = tag_category_factory(name="name", color="black")
db.session.add(category)
db.session.commit()
- with patch('szurubooru.func.tag_categories.serialize_category'), \
- patch('szurubooru.func.tag_categories.set_default_category'):
+ with patch("szurubooru.func.tag_categories.serialize_category"), patch(
+ "szurubooru.func.tag_categories.set_default_category"
+ ):
tag_categories.update_category_name.side_effect = _update_category_name
- tag_categories.serialize_category.return_value = 'serialized category'
+ tag_categories.serialize_category.return_value = "serialized category"
result = api.tag_category_api.set_tag_category_as_default(
context_factory(
- params={
- 'name': 'changed',
- 'color': 'white',
- 'version': 1,
- },
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'category_name': 'name'})
- assert result == 'serialized category'
+ params={"name": "changed", "color": "white", "version": 1,},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"category_name": "name"},
+ )
+ assert result == "serialized category"
tag_categories.set_default_category.assert_called_once_with(category)
diff --git a/server/szurubooru/tests/api/test_tag_creating.py b/server/szurubooru/tests/api/test_tag_creating.py
index 4cee7101..648b3178 100644
--- a/server/szurubooru/tests/api/test_tag_creating.py
+++ b/server/szurubooru/tests/api/test_tag_creating.py
@@ -1,84 +1,97 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, model, errors
-from szurubooru.func import tags, snapshots
+
+from szurubooru import api, errors, model
+from szurubooru.func import snapshots, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'tags:create': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"tags:create": model.User.RANK_REGULAR}})
def test_creating_simple_tags(tag_factory, user_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
tag = tag_factory()
- with patch('szurubooru.func.tags.create_tag'), \
- patch('szurubooru.func.tags.get_or_create_tags_by_names'), \
- patch('szurubooru.func.tags.serialize_tag'), \
- patch('szurubooru.func.snapshots.create'):
+ with patch("szurubooru.func.tags.create_tag"), patch(
+ "szurubooru.func.tags.get_or_create_tags_by_names"
+ ), patch("szurubooru.func.tags.serialize_tag"), patch(
+ "szurubooru.func.snapshots.create"
+ ):
tags.get_or_create_tags_by_names.return_value = ([], [])
tags.create_tag.return_value = tag
- tags.serialize_tag.return_value = 'serialized tag'
+ tags.serialize_tag.return_value = "serialized tag"
result = api.tag_api.create_tag(
context_factory(
params={
- 'names': ['tag1', 'tag2'],
- 'category': 'meta',
- 'description': 'desc',
- 'suggestions': ['sug1', 'sug2'],
- 'implications': ['imp1', 'imp2'],
+ "names": ["tag1", "tag2"],
+ "category": "meta",
+ "description": "desc",
+ "suggestions": ["sug1", "sug2"],
+ "implications": ["imp1", "imp2"],
},
- user=auth_user))
- assert result == 'serialized tag'
+ user=auth_user,
+ )
+ )
+ assert result == "serialized tag"
tags.create_tag.assert_called_once_with(
- ['tag1', 'tag2'], 'meta', ['sug1', 'sug2'], ['imp1', 'imp2'])
+ ["tag1", "tag2"], "meta", ["sug1", "sug2"], ["imp1", "imp2"]
+ )
snapshots.create.assert_called_once_with(tag, auth_user)
-@pytest.mark.parametrize('field', ['names', 'category'])
+@pytest.mark.parametrize("field", ["names", "category"])
def test_trying_to_omit_mandatory_field(user_factory, context_factory, field):
params = {
- 'names': ['tag1', 'tag2'],
- 'category': 'meta',
- 'suggestions': [],
- 'implications': [],
+ "names": ["tag1", "tag2"],
+ "category": "meta",
+ "suggestions": [],
+ "implications": [],
}
del params[field]
with pytest.raises(errors.ValidationError):
api.tag_api.create_tag(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
-@pytest.mark.parametrize('field', ['implications', 'suggestions'])
+@pytest.mark.parametrize("field", ["implications", "suggestions"])
def test_omitting_optional_field(
- tag_factory, user_factory, context_factory, field):
+ tag_factory, user_factory, context_factory, field
+):
params = {
- 'names': ['tag1', 'tag2'],
- 'category': 'meta',
- 'suggestions': [],
- 'implications': [],
+ "names": ["tag1", "tag2"],
+ "category": "meta",
+ "suggestions": [],
+ "implications": [],
}
del params[field]
- with patch('szurubooru.func.tags.create_tag'), \
- patch('szurubooru.func.tags.serialize_tag'):
+ with patch("szurubooru.func.tags.create_tag"), patch(
+ "szurubooru.func.tags.serialize_tag"
+ ):
tags.create_tag.return_value = tag_factory()
api.tag_api.create_tag(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_create_tag_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.tag_api.create_tag(
context_factory(
params={
- 'names': ['tag'],
- 'category': 'meta',
- 'suggestions': ['tag'],
- 'implications': [],
+ "names": ["tag"],
+ "category": "meta",
+ "suggestions": ["tag"],
+ "implications": [],
},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_tag_deleting.py b/server/szurubooru/tests/api/test_tag_deleting.py
index a0367f22..b45ef556 100644
--- a/server/szurubooru/tests/api/test_tag_deleting.py
+++ b/server/szurubooru/tests/api/test_tag_deleting.py
@@ -1,40 +1,46 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import tags, snapshots
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import snapshots, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'tags:delete': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"tags:delete": model.User.RANK_REGULAR}})
def test_deleting(user_factory, tag_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- tag = tag_factory(names=['tag'])
+ tag = tag_factory(names=["tag"])
db.session.add(tag)
db.session.commit()
- with patch('szurubooru.func.snapshots.delete'):
+ with patch("szurubooru.func.snapshots.delete"):
result = api.tag_api.delete_tag(
- context_factory(params={'version': 1}, user=auth_user),
- {'tag_name': 'tag'})
+ context_factory(params={"version": 1}, user=auth_user),
+ {"tag_name": "tag"},
+ )
assert result == {}
assert db.session.query(model.Tag).count() == 0
snapshots.delete.assert_called_once_with(tag, auth_user)
def test_deleting_used(
- user_factory, tag_factory, context_factory, post_factory):
- tag = tag_factory(names=['tag'])
+ user_factory, tag_factory, context_factory, post_factory
+):
+ tag = tag_factory(names=["tag"])
post = post_factory()
post.tags.append(tag)
db.session.add_all([tag, post])
db.session.commit()
api.tag_api.delete_tag(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"tag_name": "tag"},
+ )
db.session.refresh(post)
assert db.session.query(model.Tag).count() == 0
assert post.tags == []
@@ -44,17 +50,21 @@ def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(tags.TagNotFoundError):
api.tag_api.delete_tag(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'bad'})
+ {"tag_name": "bad"},
+ )
def test_trying_to_delete_without_privileges(
- user_factory, tag_factory, context_factory):
- db.session.add(tag_factory(names=['tag']))
+ user_factory, tag_factory, context_factory
+):
+ db.session.add(tag_factory(names=["tag"]))
db.session.commit()
with pytest.raises(errors.AuthError):
api.tag_api.delete_tag(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'tag_name': 'tag'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"tag_name": "tag"},
+ )
assert db.session.query(model.Tag).count() == 1
diff --git a/server/szurubooru/tests/api/test_tag_merging.py b/server/szurubooru/tests/api/test_tag_merging.py
index 671e2e45..be7e887b 100644
--- a/server/szurubooru/tests/api/test_tag_merging.py
+++ b/server/szurubooru/tests/api/test_tag_merging.py
@@ -1,18 +1,20 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import tags, snapshots
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import snapshots, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'tags:merge': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"tags:merge": model.User.RANK_REGULAR}})
def test_merging(user_factory, tag_factory, context_factory, post_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
db.session.add_all([source_tag, target_tag])
db.session.flush()
assert source_tag.post_count == 0
@@ -23,76 +25,88 @@ def test_merging(user_factory, tag_factory, context_factory, post_factory):
db.session.commit()
assert source_tag.post_count == 1
assert target_tag.post_count == 0
- with patch('szurubooru.func.tags.serialize_tag'), \
- patch('szurubooru.func.tags.merge_tags'), \
- patch('szurubooru.func.snapshots.merge'):
+ with patch("szurubooru.func.tags.serialize_tag"), patch(
+ "szurubooru.func.tags.merge_tags"
+ ), patch("szurubooru.func.snapshots.merge"):
api.tag_api.merge_tags(
context_factory(
params={
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': 'source',
- 'mergeTo': 'target',
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": "source",
+ "mergeTo": "target",
},
- user=auth_user))
+ user=auth_user,
+ )
+ )
tags.merge_tags.called_once_with(source_tag, target_tag)
snapshots.merge.assert_called_once_with(
- source_tag, target_tag, auth_user)
+ source_tag, target_tag, auth_user
+ )
@pytest.mark.parametrize(
- 'field', ['remove', 'mergeTo', 'removeVersion', 'mergeToVersion'])
+ "field", ["remove", "mergeTo", "removeVersion", "mergeToVersion"]
+)
def test_trying_to_omit_mandatory_field(
- user_factory, tag_factory, context_factory, field):
- db.session.add_all([
- tag_factory(names=['source']),
- tag_factory(names=['target']),
- ])
+ user_factory, tag_factory, context_factory, field
+):
+ db.session.add_all(
+ [tag_factory(names=["source"]), tag_factory(names=["target"]),]
+ )
db.session.commit()
params = {
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': 'source',
- 'mergeTo': 'target',
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": "source",
+ "mergeTo": "target",
}
del params[field]
with pytest.raises(errors.ValidationError):
api.tag_api.merge_tags(
context_factory(
- params=params,
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params=params, user=user_factory(rank=model.User.RANK_REGULAR)
+ )
+ )
def test_trying_to_merge_non_existing(
- user_factory, tag_factory, context_factory):
- db.session.add(tag_factory(names=['good']))
+ user_factory, tag_factory, context_factory
+):
+ db.session.add(tag_factory(names=["good"]))
db.session.commit()
with pytest.raises(tags.TagNotFoundError):
api.tag_api.merge_tags(
context_factory(
- params={'remove': 'good', 'mergeTo': 'bad'},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"remove": "good", "mergeTo": "bad"},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
with pytest.raises(tags.TagNotFoundError):
api.tag_api.merge_tags(
context_factory(
- params={'remove': 'bad', 'mergeTo': 'good'},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"remove": "bad", "mergeTo": "good"},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
def test_trying_to_merge_without_privileges(
- user_factory, tag_factory, context_factory):
- db.session.add_all([
- tag_factory(names=['source']),
- tag_factory(names=['target']),
- ])
+ user_factory, tag_factory, context_factory
+):
+ db.session.add_all(
+ [tag_factory(names=["source"]), tag_factory(names=["target"]),]
+ )
db.session.commit()
with pytest.raises(errors.AuthError):
api.tag_api.merge_tags(
context_factory(
params={
- 'removeVersion': 1,
- 'mergeToVersion': 1,
- 'remove': 'source',
- 'mergeTo': 'target',
+ "removeVersion": 1,
+ "mergeToVersion": 1,
+ "remove": "source",
+ "mergeTo": "target",
},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_tag_retrieving.py b/server/szurubooru/tests/api/test_tag_retrieving.py
index c720b6ed..8b51de51 100644
--- a/server/szurubooru/tests/api/test_tag_retrieving.py
+++ b/server/szurubooru/tests/api/test_tag_retrieving.py
@@ -1,72 +1,82 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'tags:list': model.User.RANK_REGULAR,
- 'tags:view': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "tags:list": model.User.RANK_REGULAR,
+ "tags:view": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_retrieving_multiple(user_factory, tag_factory, context_factory):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([tag2, tag1])
db.session.flush()
- with patch('szurubooru.func.tags.serialize_tag'):
- tags.serialize_tag.return_value = 'serialized tag'
+ with patch("szurubooru.func.tags.serialize_tag"):
+ tags.serialize_tag.return_value = "serialized tag"
result = api.tag_api.get_tags(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
assert result == {
- 'query': '',
- 'offset': 0,
- 'limit': 100,
- 'total': 2,
- 'results': ['serialized tag', 'serialized tag'],
+ "query": "",
+ "offset": 0,
+ "limit": 100,
+ "total": 2,
+ "results": ["serialized tag", "serialized tag"],
}
def test_trying_to_retrieve_multiple_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.tag_api.get_tags(
context_factory(
- params={'query': '', 'offset': 0},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "", "offset": 0},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_retrieving_single(user_factory, tag_factory, context_factory):
- db.session.add(tag_factory(names=['tag']))
+ db.session.add(tag_factory(names=["tag"]))
db.session.flush()
- with patch('szurubooru.func.tags.serialize_tag'):
- tags.serialize_tag.return_value = 'serialized tag'
+ with patch("szurubooru.func.tags.serialize_tag"):
+ tags.serialize_tag.return_value = "serialized tag"
result = api.tag_api.get_tag(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag'})
- assert result == 'serialized tag'
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
+ {"tag_name": "tag"},
+ )
+ assert result == "serialized tag"
def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
with pytest.raises(tags.TagNotFoundError):
api.tag_api.get_tag(
- context_factory(
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': '-'})
+ context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
+ {"tag_name": "-"},
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.tag_api.get_tag(
- context_factory(
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'tag_name': '-'})
+ context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
+ {"tag_name": "-"},
+ )
diff --git a/server/szurubooru/tests/api/test_tag_siblings_retrieving.py b/server/szurubooru/tests/api/test_tag_siblings_retrieving.py
index fc2f5aaa..26f66668 100644
--- a/server/szurubooru/tests/api/test_tag_siblings_retrieving.py
+++ b/server/szurubooru/tests/api/test_tag_siblings_retrieving.py
@@ -1,38 +1,38 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'tags:view': model.User.RANK_REGULAR}})
+ config_injector({"privileges": {"tags:view": model.User.RANK_REGULAR}})
def test_get_tag_siblings(user_factory, tag_factory, context_factory):
- db.session.add(tag_factory(names=['tag']))
+ db.session.add(tag_factory(names=["tag"]))
db.session.flush()
- with patch('szurubooru.func.tags.serialize_tag'), \
- patch('szurubooru.func.tags.get_tag_siblings'):
- tags.serialize_tag.side_effect = lambda tag, *args, **kwargs: \
- 'serialized tag %s' % tag.names[0].name
+ with patch("szurubooru.func.tags.serialize_tag"), patch(
+ "szurubooru.func.tags.get_tag_siblings"
+ ):
+ tags.serialize_tag.side_effect = (
+ lambda tag, *args, **kwargs: "serialized tag %s"
+ % tag.names[0].name
+ )
tags.get_tag_siblings.return_value = [
- (tag_factory(names=['sib1']), 1),
- (tag_factory(names=['sib2']), 3),
+ (tag_factory(names=["sib1"]), 1),
+ (tag_factory(names=["sib2"]), 3),
]
result = api.tag_api.get_tag_siblings(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag'})
+ {"tag_name": "tag"},
+ )
assert result == {
- 'results': [
- {
- 'tag': 'serialized tag sib1',
- 'occurrences': 1,
- },
- {
- 'tag': 'serialized tag sib2',
- 'occurrences': 3,
- },
+ "results": [
+ {"tag": "serialized tag sib1", "occurrences": 1,},
+ {"tag": "serialized tag sib2", "occurrences": 3,},
],
}
@@ -41,11 +41,13 @@ def test_trying_to_retrieve_non_existing(user_factory, context_factory):
with pytest.raises(tags.TagNotFoundError):
api.tag_api.get_tag_siblings(
context_factory(user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': '-'})
+ {"tag_name": "-"},
+ )
def test_trying_to_retrieve_without_privileges(user_factory, context_factory):
with pytest.raises(errors.AuthError):
api.tag_api.get_tag_siblings(
context_factory(user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'tag_name': '-'})
+ {"tag_name": "-"},
+ )
diff --git a/server/szurubooru/tests/api/test_tag_updating.py b/server/szurubooru/tests/api/test_tag_updating.py
index 0b8aa703..e2b0e00a 100644
--- a/server/szurubooru/tests/api/test_tag_updating.py
+++ b/server/szurubooru/tests/api/test_tag_updating.py
@@ -1,144 +1,175 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
-from szurubooru.func import tags, snapshots
+
+from szurubooru import api, db, errors, model
+from szurubooru.func import snapshots, tags
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'tags:create': model.User.RANK_REGULAR,
- 'tags:edit:names': model.User.RANK_REGULAR,
- 'tags:edit:category': model.User.RANK_REGULAR,
- 'tags:edit:description': model.User.RANK_REGULAR,
- 'tags:edit:suggestions': model.User.RANK_REGULAR,
- 'tags:edit:implications': model.User.RANK_REGULAR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "tags:create": model.User.RANK_REGULAR,
+ "tags:edit:names": model.User.RANK_REGULAR,
+ "tags:edit:category": model.User.RANK_REGULAR,
+ "tags:edit:description": model.User.RANK_REGULAR,
+ "tags:edit:suggestions": model.User.RANK_REGULAR,
+ "tags:edit:implications": model.User.RANK_REGULAR,
+ },
+ }
+ )
def test_simple_updating(user_factory, tag_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
- tag = tag_factory(names=['tag1', 'tag2'])
+ tag = tag_factory(names=["tag1", "tag2"])
db.session.add(tag)
db.session.commit()
- with patch('szurubooru.func.tags.create_tag'), \
- patch('szurubooru.func.tags.get_or_create_tags_by_names'), \
- patch('szurubooru.func.tags.update_tag_names'), \
- patch('szurubooru.func.tags.update_tag_category_name'), \
- patch('szurubooru.func.tags.update_tag_description'), \
- patch('szurubooru.func.tags.update_tag_suggestions'), \
- patch('szurubooru.func.tags.update_tag_implications'), \
- patch('szurubooru.func.tags.serialize_tag'), \
- patch('szurubooru.func.snapshots.modify'):
+ with patch("szurubooru.func.tags.create_tag"), patch(
+ "szurubooru.func.tags.get_or_create_tags_by_names"
+ ), patch("szurubooru.func.tags.update_tag_names"), patch(
+ "szurubooru.func.tags.update_tag_category_name"
+ ), patch(
+ "szurubooru.func.tags.update_tag_description"
+ ), patch(
+ "szurubooru.func.tags.update_tag_suggestions"
+ ), patch(
+ "szurubooru.func.tags.update_tag_implications"
+ ), patch(
+ "szurubooru.func.tags.serialize_tag"
+ ), patch(
+ "szurubooru.func.snapshots.modify"
+ ):
tags.get_or_create_tags_by_names.return_value = ([], [])
- tags.serialize_tag.return_value = 'serialized tag'
+ tags.serialize_tag.return_value = "serialized tag"
result = api.tag_api.update_tag(
context_factory(
params={
- 'version': 1,
- 'names': ['tag3'],
- 'category': 'character',
- 'description': 'desc',
- 'suggestions': ['sug1', 'sug2'],
- 'implications': ['imp1', 'imp2'],
+ "version": 1,
+ "names": ["tag3"],
+ "category": "character",
+ "description": "desc",
+ "suggestions": ["sug1", "sug2"],
+ "implications": ["imp1", "imp2"],
},
- user=auth_user),
- {'tag_name': 'tag1'})
- assert result == 'serialized tag'
+ user=auth_user,
+ ),
+ {"tag_name": "tag1"},
+ )
+ assert result == "serialized tag"
tags.create_tag.assert_not_called()
- tags.update_tag_names.assert_called_once_with(tag, ['tag3'])
- tags.update_tag_category_name.assert_called_once_with(tag, 'character')
- tags.update_tag_description.assert_called_once_with(tag, 'desc')
+ tags.update_tag_names.assert_called_once_with(tag, ["tag3"])
+ tags.update_tag_category_name.assert_called_once_with(tag, "character")
+ tags.update_tag_description.assert_called_once_with(tag, "desc")
tags.update_tag_suggestions.assert_called_once_with(
- tag, ['sug1', 'sug2'])
+ tag, ["sug1", "sug2"]
+ )
tags.update_tag_implications.assert_called_once_with(
- tag, ['imp1', 'imp2'])
+ tag, ["imp1", "imp2"]
+ )
tags.serialize_tag.assert_called_once_with(tag, options=[])
snapshots.modify.assert_called_once_with(tag, auth_user)
@pytest.mark.parametrize(
- 'field', [
- 'names',
- 'category',
- 'description',
- 'implications',
- 'suggestions',
- ])
+ "field",
+ ["names", "category", "description", "implications", "suggestions",],
+)
def test_omitting_optional_field(
- user_factory, tag_factory, context_factory, field):
- db.session.add(tag_factory(names=['tag']))
+ user_factory, tag_factory, context_factory, field
+):
+ db.session.add(tag_factory(names=["tag"]))
db.session.commit()
params = {
- 'names': ['tag1', 'tag2'],
- 'category': 'meta',
- 'description': 'desc',
- 'suggestions': [],
- 'implications': [],
+ "names": ["tag1", "tag2"],
+ "category": "meta",
+ "description": "desc",
+ "suggestions": [],
+ "implications": [],
}
del params[field]
- with patch('szurubooru.func.tags.create_tag'), \
- patch('szurubooru.func.tags.update_tag_names'), \
- patch('szurubooru.func.tags.update_tag_category_name'), \
- patch('szurubooru.func.tags.serialize_tag'):
+ with patch("szurubooru.func.tags.create_tag"), patch(
+ "szurubooru.func.tags.update_tag_names"
+ ), patch("szurubooru.func.tags.update_tag_category_name"), patch(
+ "szurubooru.func.tags.serialize_tag"
+ ):
api.tag_api.update_tag(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag'})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"tag_name": "tag"},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
with pytest.raises(tags.TagNotFoundError):
api.tag_api.update_tag(
context_factory(
- params={'names': ['dummy']},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag1'})
+ params={"names": ["dummy"]},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"tag_name": "tag1"},
+ )
-@pytest.mark.parametrize('params', [
- {'names': 'whatever'},
- {'category': 'whatever'},
- {'suggestions': ['whatever']},
- {'implications': ['whatever']},
-])
+@pytest.mark.parametrize(
+ "params",
+ [
+ {"names": "whatever"},
+ {"category": "whatever"},
+ {"suggestions": ["whatever"]},
+ {"implications": ["whatever"]},
+ ],
+)
def test_trying_to_update_without_privileges(
- user_factory, tag_factory, context_factory, params):
- db.session.add(tag_factory(names=['tag']))
+ user_factory, tag_factory, context_factory, params
+):
+ db.session.add(tag_factory(names=["tag"]))
db.session.commit()
with pytest.raises(errors.AuthError):
api.tag_api.update_tag(
context_factory(
- params={**params, **{'version': 1}},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)),
- {'tag_name': 'tag'})
+ params={**params, **{"version": 1}},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ ),
+ {"tag_name": "tag"},
+ )
def test_trying_to_create_tags_without_privileges(
- config_injector, context_factory, tag_factory, user_factory):
- tag = tag_factory(names=['tag'])
+ config_injector, context_factory, tag_factory, user_factory
+):
+ tag = tag_factory(names=["tag"])
db.session.add(tag)
db.session.commit()
- config_injector({'privileges': {
- 'tags:create': model.User.RANK_ADMINISTRATOR,
- 'tags:edit:suggestions': model.User.RANK_REGULAR,
- 'tags:edit:implications': model.User.RANK_REGULAR,
- }})
- with patch('szurubooru.func.tags.get_or_create_tags_by_names'):
- tags.get_or_create_tags_by_names.return_value = ([], ['new-tag'])
+ config_injector(
+ {
+ "privileges": {
+ "tags:create": model.User.RANK_ADMINISTRATOR,
+ "tags:edit:suggestions": model.User.RANK_REGULAR,
+ "tags:edit:implications": model.User.RANK_REGULAR,
+ }
+ }
+ )
+ with patch("szurubooru.func.tags.get_or_create_tags_by_names"):
+ tags.get_or_create_tags_by_names.return_value = ([], ["new-tag"])
with pytest.raises(errors.AuthError):
api.tag_api.update_tag(
context_factory(
- params={'suggestions': ['tag1', 'tag2'], 'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag'})
+ params={"suggestions": ["tag1", "tag2"], "version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"tag_name": "tag"},
+ )
db.session.rollback()
with pytest.raises(errors.AuthError):
api.tag_api.update_tag(
context_factory(
- params={'implications': ['tag1', 'tag2'], 'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'tag_name': 'tag'})
+ params={"implications": ["tag1", "tag2"], "version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"tag_name": "tag"},
+ )
diff --git a/server/szurubooru/tests/api/test_user_creating.py b/server/szurubooru/tests/api/test_user_creating.py
index 699bfefb..d55e1f7f 100644
--- a/server/szurubooru/tests/api/test_user_creating.py
+++ b/server/szurubooru/tests/api/test_user_creating.py
@@ -1,87 +1,104 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, model, errors
+
+from szurubooru import api, errors, model
from szurubooru.func import users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'users:create:self': 'regular'}})
+ config_injector({"privileges": {"users:create:self": "regular"}})
def test_creating_user(user_factory, context_factory, fake_datetime):
user = user_factory()
- with patch('szurubooru.func.users.create_user'), \
- patch('szurubooru.func.users.update_user_name'), \
- patch('szurubooru.func.users.update_user_password'), \
- patch('szurubooru.func.users.update_user_email'), \
- patch('szurubooru.func.users.update_user_rank'), \
- patch('szurubooru.func.users.update_user_avatar'), \
- patch('szurubooru.func.users.serialize_user'), \
- fake_datetime('1969-02-12'):
- users.serialize_user.return_value = 'serialized user'
+ with patch("szurubooru.func.users.create_user"), patch(
+ "szurubooru.func.users.update_user_name"
+ ), patch("szurubooru.func.users.update_user_password"), patch(
+ "szurubooru.func.users.update_user_email"
+ ), patch(
+ "szurubooru.func.users.update_user_rank"
+ ), patch(
+ "szurubooru.func.users.update_user_avatar"
+ ), patch(
+ "szurubooru.func.users.serialize_user"
+ ), fake_datetime(
+ "1969-02-12"
+ ):
+ users.serialize_user.return_value = "serialized user"
users.create_user.return_value = user
result = api.user_api.create_user(
context_factory(
params={
- 'name': 'chewie1',
- 'email': 'asd@asd.asd',
- 'password': 'oks',
- 'rank': 'moderator',
- 'avatarStyle': 'manual',
+ "name": "chewie1",
+ "email": "asd@asd.asd",
+ "password": "oks",
+ "rank": "moderator",
+ "avatarStyle": "manual",
},
- files={'avatar': b'...'},
- user=user_factory(rank=model.User.RANK_REGULAR)))
- assert result == 'serialized user'
+ files={"avatar": b"..."},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
+ assert result == "serialized user"
users.create_user.assert_called_once_with(
- 'chewie1', 'oks', 'asd@asd.asd')
+ "chewie1", "oks", "asd@asd.asd"
+ )
assert not users.update_user_name.called
assert not users.update_user_password.called
assert not users.update_user_email.called
- users.update_user_rank.called_once_with(user, 'moderator')
- users.update_user_avatar.called_once_with(user, 'manual', b'...')
+ users.update_user_rank.called_once_with(user, "moderator")
+ users.update_user_avatar.called_once_with(user, "manual", b"...")
-@pytest.mark.parametrize('field', ['name', 'password'])
+@pytest.mark.parametrize("field", ["name", "password"])
def test_trying_to_omit_mandatory_field(user_factory, context_factory, field):
params = {
- 'name': 'chewie',
- 'email': 'asd@asd.asd',
- 'password': 'oks',
+ "name": "chewie",
+ "email": "asd@asd.asd",
+ "password": "oks",
}
user = user_factory()
auth_user = user_factory(rank=model.User.RANK_REGULAR)
del params[field]
- with patch('szurubooru.func.users.create_user'), \
- pytest.raises(errors.MissingRequiredParameterError):
+ with patch("szurubooru.func.users.create_user"), pytest.raises(
+ errors.MissingRequiredParameterError
+ ):
users.create_user.return_value = user
api.user_api.create_user(
- context_factory(params=params, user=auth_user))
+ context_factory(params=params, user=auth_user)
+ )
-@pytest.mark.parametrize('field', ['rank', 'email', 'avatarStyle'])
+@pytest.mark.parametrize("field", ["rank", "email", "avatarStyle"])
def test_omitting_optional_field(user_factory, context_factory, field):
params = {
- 'name': 'chewie',
- 'email': 'asd@asd.asd',
- 'password': 'oks',
- 'rank': 'moderator',
- 'avatarStyle': 'gravatar',
+ "name": "chewie",
+ "email": "asd@asd.asd",
+ "password": "oks",
+ "rank": "moderator",
+ "avatarStyle": "gravatar",
}
del params[field]
user = user_factory()
auth_user = user_factory(rank=model.User.RANK_MODERATOR)
- with patch('szurubooru.func.users.create_user'), \
- patch('szurubooru.func.users.update_user_avatar'), \
- patch('szurubooru.func.users.serialize_user'):
+ with patch("szurubooru.func.users.create_user"), patch(
+ "szurubooru.func.users.update_user_avatar"
+ ), patch("szurubooru.func.users.serialize_user"):
users.create_user.return_value = user
api.user_api.create_user(
- context_factory(params=params, user=auth_user))
+ context_factory(params=params, user=auth_user)
+ )
def test_trying_to_create_user_without_privileges(
- context_factory, user_factory):
+ context_factory, user_factory
+):
with pytest.raises(errors.AuthError):
- api.user_api.create_user(context_factory(
- params='whatever',
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ api.user_api.create_user(
+ context_factory(
+ params="whatever",
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
diff --git a/server/szurubooru/tests/api/test_user_deleting.py b/server/szurubooru/tests/api/test_user_deleting.py
index 2bd53e2b..6ab3f1d7 100644
--- a/server/szurubooru/tests/api/test_user_deleting.py
+++ b/server/szurubooru/tests/api/test_user_deleting.py
@@ -1,50 +1,55 @@
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'users:delete:self': model.User.RANK_REGULAR,
- 'users:delete:any': model.User.RANK_MODERATOR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "users:delete:self": model.User.RANK_REGULAR,
+ "users:delete:any": model.User.RANK_MODERATOR,
+ },
+ }
+ )
def test_deleting_oneself(user_factory, context_factory):
- user = user_factory(name='u', rank=model.User.RANK_REGULAR)
+ user = user_factory(name="u", rank=model.User.RANK_REGULAR)
db.session.add(user)
db.session.commit()
result = api.user_api.delete_user(
- context_factory(
- params={'version': 1}, user=user), {'user_name': 'u'})
+ context_factory(params={"version": 1}, user=user), {"user_name": "u"}
+ )
assert result == {}
assert db.session.query(model.User).count() == 0
def test_deleting_someone_else(user_factory, context_factory):
- user1 = user_factory(name='u1', rank=model.User.RANK_REGULAR)
- user2 = user_factory(name='u2', rank=model.User.RANK_MODERATOR)
+ user1 = user_factory(name="u1", rank=model.User.RANK_REGULAR)
+ user2 = user_factory(name="u2", rank=model.User.RANK_MODERATOR)
db.session.add_all([user1, user2])
db.session.commit()
api.user_api.delete_user(
- context_factory(
- params={'version': 1}, user=user2), {'user_name': 'u1'})
+ context_factory(params={"version": 1}, user=user2), {"user_name": "u1"}
+ )
assert db.session.query(model.User).count() == 1
def test_trying_to_delete_someone_else_without_privileges(
- user_factory, context_factory):
- user1 = user_factory(name='u1', rank=model.User.RANK_REGULAR)
- user2 = user_factory(name='u2', rank=model.User.RANK_REGULAR)
+ user_factory, context_factory
+):
+ user1 = user_factory(name="u1", rank=model.User.RANK_REGULAR)
+ user2 = user_factory(name="u2", rank=model.User.RANK_REGULAR)
db.session.add_all([user1, user2])
db.session.commit()
with pytest.raises(errors.AuthError):
api.user_api.delete_user(
- context_factory(
- params={'version': 1}, user=user2), {'user_name': 'u1'})
+ context_factory(params={"version": 1}, user=user2),
+ {"user_name": "u1"},
+ )
assert db.session.query(model.User).count() == 2
@@ -52,6 +57,8 @@ def test_trying_to_delete_non_existing(user_factory, context_factory):
with pytest.raises(users.UserNotFoundError):
api.user_api.delete_user(
context_factory(
- params={'version': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)),
- {'user_name': 'bad'})
+ params={"version": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ ),
+ {"user_name": "bad"},
+ )
diff --git a/server/szurubooru/tests/api/test_user_retrieving.py b/server/szurubooru/tests/api/test_user_retrieving.py
index 2e797e87..b18b4d55 100644
--- a/server/szurubooru/tests/api/test_user_retrieving.py
+++ b/server/szurubooru/tests/api/test_user_retrieving.py
@@ -1,73 +1,86 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'users:list': model.User.RANK_REGULAR,
- 'users:view': model.User.RANK_REGULAR,
- 'users:edit:any:email': model.User.RANK_MODERATOR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "users:list": model.User.RANK_REGULAR,
+ "users:view": model.User.RANK_REGULAR,
+ "users:edit:any:email": model.User.RANK_MODERATOR,
+ },
+ }
+ )
def test_retrieving_multiple(user_factory, context_factory):
- user1 = user_factory(name='u1', rank=model.User.RANK_MODERATOR)
- user2 = user_factory(name='u2', rank=model.User.RANK_MODERATOR)
+ user1 = user_factory(name="u1", rank=model.User.RANK_MODERATOR)
+ user2 = user_factory(name="u2", rank=model.User.RANK_MODERATOR)
db.session.add_all([user1, user2])
db.session.flush()
- with patch('szurubooru.func.users.serialize_user'):
- users.serialize_user.return_value = 'serialized user'
+ with patch("szurubooru.func.users.serialize_user"):
+ users.serialize_user.return_value = "serialized user"
result = api.user_api.get_users(
context_factory(
- params={'query': '', 'page': 1},
- user=user_factory(rank=model.User.RANK_REGULAR)))
+ params={"query": "", "page": 1},
+ user=user_factory(rank=model.User.RANK_REGULAR),
+ )
+ )
assert result == {
- 'query': '',
- 'offset': 0,
- 'limit': 100,
- 'total': 2,
- 'results': ['serialized user', 'serialized user'],
+ "query": "",
+ "offset": 0,
+ "limit": 100,
+ "total": 2,
+ "results": ["serialized user", "serialized user"],
}
def test_trying_to_retrieve_multiple_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
with pytest.raises(errors.AuthError):
api.user_api.get_users(
context_factory(
- params={'query': '', 'page': 1},
- user=user_factory(rank=model.User.RANK_ANONYMOUS)))
+ params={"query": "", "page": 1},
+ user=user_factory(rank=model.User.RANK_ANONYMOUS),
+ )
+ )
def test_retrieving_single(user_factory, context_factory):
- user = user_factory(name='u1', rank=model.User.RANK_REGULAR)
+ user = user_factory(name="u1", rank=model.User.RANK_REGULAR)
auth_user = user_factory(rank=model.User.RANK_REGULAR)
db.session.add(user)
db.session.flush()
- with patch('szurubooru.func.users.serialize_user'):
- users.serialize_user.return_value = 'serialized user'
+ with patch("szurubooru.func.users.serialize_user"):
+ users.serialize_user.return_value = "serialized user"
result = api.user_api.get_user(
- context_factory(user=auth_user), {'user_name': 'u1'})
- assert result == 'serialized user'
+ context_factory(user=auth_user), {"user_name": "u1"}
+ )
+ assert result == "serialized user"
def test_trying_to_retrieve_single_non_existing(user_factory, context_factory):
auth_user = user_factory(rank=model.User.RANK_REGULAR)
with pytest.raises(users.UserNotFoundError):
api.user_api.get_user(
- context_factory(user=auth_user), {'user_name': '-'})
+ context_factory(user=auth_user), {"user_name": "-"}
+ )
def test_trying_to_retrieve_single_without_privileges(
- user_factory, context_factory):
+ user_factory, context_factory
+):
auth_user = user_factory(rank=model.User.RANK_ANONYMOUS)
- db.session.add(user_factory(name='u1', rank=model.User.RANK_REGULAR))
+ db.session.add(user_factory(name="u1", rank=model.User.RANK_REGULAR))
db.session.flush()
with pytest.raises(errors.AuthError):
api.user_api.get_user(
- context_factory(user=auth_user), {'user_name': 'u1'})
+ context_factory(user=auth_user), {"user_name": "u1"}
+ )
diff --git a/server/szurubooru/tests/api/test_user_token_creating.py b/server/szurubooru/tests/api/test_user_token_creating.py
index f550f63f..bce41ec2 100644
--- a/server/szurubooru/tests/api/test_user_token_creating.py
+++ b/server/szurubooru/tests/api/test_user_token_creating.py
@@ -1,29 +1,33 @@
from unittest.mock import patch
+
import pytest
+
from szurubooru import api
from szurubooru.func import user_tokens, users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'user_tokens:create:self': 'regular'}})
+ config_injector({"privileges": {"user_tokens:create:self": "regular"}})
def test_creating_user_token(
- user_token_factory, context_factory, fake_datetime):
+ user_token_factory, context_factory, fake_datetime
+):
user_token = user_token_factory()
- with patch('szurubooru.func.user_tokens.create_user_token'), \
- patch('szurubooru.func.user_tokens.serialize_user_token'), \
- patch('szurubooru.func.users.get_user_by_name'), \
- fake_datetime('1969-02-12'):
+ with patch("szurubooru.func.user_tokens.create_user_token"), patch(
+ "szurubooru.func.user_tokens.serialize_user_token"
+ ), patch("szurubooru.func.users.get_user_by_name"), fake_datetime(
+ "1969-02-12"
+ ):
users.get_user_by_name.return_value = user_token.user
- user_tokens.serialize_user_token.return_value = 'serialized user token'
+ user_tokens.serialize_user_token.return_value = "serialized user token"
user_tokens.create_user_token.return_value = user_token
result = api.user_token_api.create_user_token(
context_factory(user=user_token.user),
- {
- 'user_name': user_token.user.name
- })
- assert result == 'serialized user token'
+ {"user_name": user_token.user.name},
+ )
+ assert result == "serialized user token"
user_tokens.create_user_token.assert_called_once_with(
- user_token.user, True)
+ user_token.user, True
+ )
diff --git a/server/szurubooru/tests/api/test_user_token_deleting.py b/server/szurubooru/tests/api/test_user_token_deleting.py
index 85341522..19f93fad 100644
--- a/server/szurubooru/tests/api/test_user_token_deleting.py
+++ b/server/szurubooru/tests/api/test_user_token_deleting.py
@@ -1,30 +1,35 @@
from unittest.mock import patch
+
import pytest
+
from szurubooru import api, db
from szurubooru.func import user_tokens, users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'user_tokens:delete:self': 'regular'}})
+ config_injector({"privileges": {"user_tokens:delete:self": "regular"}})
def test_deleting_user_token(
- user_token_factory, context_factory, fake_datetime):
+ user_token_factory, context_factory, fake_datetime
+):
user_token = user_token_factory()
db.session.add(user_token)
db.session.commit()
- with patch('szurubooru.func.user_tokens.get_by_user_and_token'), \
- patch('szurubooru.func.users.get_user_by_name'), \
- fake_datetime('1969-02-12'):
+ with patch("szurubooru.func.user_tokens.get_by_user_and_token"), patch(
+ "szurubooru.func.users.get_user_by_name"
+ ), fake_datetime("1969-02-12"):
users.get_user_by_name.return_value = user_token.user
user_tokens.get_by_user_and_token.return_value = user_token
result = api.user_token_api.delete_user_token(
context_factory(user=user_token.user),
{
- 'user_name': user_token.user.name,
- 'user_token': user_token.token
- })
+ "user_name": user_token.user.name,
+ "user_token": user_token.token,
+ },
+ )
assert result == {}
user_tokens.get_by_user_and_token.assert_called_once_with(
- user_token.user, user_token.token)
+ user_token.user, user_token.token
+ )
diff --git a/server/szurubooru/tests/api/test_user_token_retrieving.py b/server/szurubooru/tests/api/test_user_token_retrieving.py
index 01b25342..e3351b45 100644
--- a/server/szurubooru/tests/api/test_user_token_retrieving.py
+++ b/server/szurubooru/tests/api/test_user_token_retrieving.py
@@ -1,31 +1,37 @@
from unittest.mock import patch
+
import pytest
+
from szurubooru import api
from szurubooru.func import user_tokens, users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'user_tokens:list:self': 'regular'}})
+ config_injector({"privileges": {"user_tokens:list:self": "regular"}})
def test_retrieving_user_tokens(
- user_token_factory, context_factory, fake_datetime):
+ user_token_factory, context_factory, fake_datetime
+):
user_token1 = user_token_factory()
user_token2 = user_token_factory(user=user_token1.user)
user_token3 = user_token_factory(user=user_token1.user)
- with patch('szurubooru.func.user_tokens.get_user_tokens'), \
- patch('szurubooru.func.user_tokens.serialize_user_token'), \
- patch('szurubooru.func.users.get_user_by_name'), \
- fake_datetime('1969-02-12'):
+ with patch("szurubooru.func.user_tokens.get_user_tokens"), patch(
+ "szurubooru.func.user_tokens.serialize_user_token"
+ ), patch("szurubooru.func.users.get_user_by_name"), fake_datetime(
+ "1969-02-12"
+ ):
users.get_user_by_name.return_value = user_token1.user
- user_tokens.serialize_user_token.return_value = 'serialized user token'
- user_tokens.get_user_tokens.return_value = [user_token1, user_token2,
- user_token3]
+ user_tokens.serialize_user_token.return_value = "serialized user token"
+ user_tokens.get_user_tokens.return_value = [
+ user_token1,
+ user_token2,
+ user_token3,
+ ]
result = api.user_token_api.get_user_tokens(
context_factory(user=user_token1.user),
- {
- 'user_name': user_token1.user.name
- })
- assert result == {'results': ['serialized user token'] * 3}
+ {"user_name": user_token1.user.name},
+ )
+ assert result == {"results": ["serialized user token"] * 3}
user_tokens.get_user_tokens.assert_called_once_with(user_token1.user)
diff --git a/server/szurubooru/tests/api/test_user_token_updating.py b/server/szurubooru/tests/api/test_user_token_updating.py
index bf725a35..6bc5e11d 100644
--- a/server/szurubooru/tests/api/test_user_token_updating.py
+++ b/server/szurubooru/tests/api/test_user_token_updating.py
@@ -1,42 +1,49 @@
from unittest.mock import patch
+
import pytest
+
from szurubooru import api, db
from szurubooru.func import user_tokens, users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'privileges': {'user_tokens:edit:self': 'regular'}})
+ config_injector({"privileges": {"user_tokens:edit:self": "regular"}})
def test_edit_user_token(user_token_factory, context_factory, fake_datetime):
user_token = user_token_factory()
db.session.add(user_token)
db.session.commit()
- with patch('szurubooru.func.user_tokens.get_by_user_and_token'), \
- patch('szurubooru.func.user_tokens.update_user_token_enabled'), \
- patch('szurubooru.func.user_tokens.update_user_token_edit_time'), \
- patch('szurubooru.func.user_tokens.serialize_user_token'), \
- patch('szurubooru.func.users.get_user_by_name'), \
- fake_datetime('1969-02-12'):
+ with patch("szurubooru.func.user_tokens.get_by_user_and_token"), patch(
+ "szurubooru.func.user_tokens.update_user_token_enabled"
+ ), patch("szurubooru.func.user_tokens.update_user_token_edit_time"), patch(
+ "szurubooru.func.user_tokens.serialize_user_token"
+ ), patch(
+ "szurubooru.func.users.get_user_by_name"
+ ), fake_datetime(
+ "1969-02-12"
+ ):
users.get_user_by_name.return_value = user_token.user
- user_tokens.serialize_user_token.return_value = 'serialized user token'
+ user_tokens.serialize_user_token.return_value = "serialized user token"
user_tokens.get_by_user_and_token.return_value = user_token
result = api.user_token_api.update_user_token(
context_factory(
- params={
- 'version': user_token.version,
- 'enabled': False,
- },
- user=user_token.user),
+ params={"version": user_token.version, "enabled": False,},
+ user=user_token.user,
+ ),
{
- 'user_name': user_token.user.name,
- 'user_token': user_token.token
- })
- assert result == 'serialized user token'
+ "user_name": user_token.user.name,
+ "user_token": user_token.token,
+ },
+ )
+ assert result == "serialized user token"
user_tokens.get_by_user_and_token.assert_called_once_with(
- user_token.user, user_token.token)
+ user_token.user, user_token.token
+ )
user_tokens.update_user_token_enabled.assert_called_once_with(
- user_token, False)
+ user_token, False
+ )
user_tokens.update_user_token_edit_time.assert_called_once_with(
- user_token)
+ user_token
+ )
diff --git a/server/szurubooru/tests/api/test_user_updating.py b/server/szurubooru/tests/api/test_user_updating.py
index af750493..08ccc788 100644
--- a/server/szurubooru/tests/api/test_user_updating.py
+++ b/server/szurubooru/tests/api/test_user_updating.py
@@ -1,125 +1,147 @@
from unittest.mock import patch
+
import pytest
-from szurubooru import api, db, model, errors
+
+from szurubooru import api, db, errors, model
from szurubooru.func import users
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'privileges': {
- 'users:edit:self:name': model.User.RANK_REGULAR,
- 'users:edit:self:pass': model.User.RANK_REGULAR,
- 'users:edit:self:email': model.User.RANK_REGULAR,
- 'users:edit:self:rank': model.User.RANK_MODERATOR,
- 'users:edit:self:avatar': model.User.RANK_MODERATOR,
- 'users:edit:any:name': model.User.RANK_MODERATOR,
- 'users:edit:any:pass': model.User.RANK_MODERATOR,
- 'users:edit:any:email': model.User.RANK_MODERATOR,
- 'users:edit:any:rank': model.User.RANK_ADMINISTRATOR,
- 'users:edit:any:avatar': model.User.RANK_ADMINISTRATOR,
- },
- })
+ config_injector(
+ {
+ "privileges": {
+ "users:edit:self:name": model.User.RANK_REGULAR,
+ "users:edit:self:pass": model.User.RANK_REGULAR,
+ "users:edit:self:email": model.User.RANK_REGULAR,
+ "users:edit:self:rank": model.User.RANK_MODERATOR,
+ "users:edit:self:avatar": model.User.RANK_MODERATOR,
+ "users:edit:any:name": model.User.RANK_MODERATOR,
+ "users:edit:any:pass": model.User.RANK_MODERATOR,
+ "users:edit:any:email": model.User.RANK_MODERATOR,
+ "users:edit:any:rank": model.User.RANK_ADMINISTRATOR,
+ "users:edit:any:avatar": model.User.RANK_ADMINISTRATOR,
+ },
+ }
+ )
def test_updating_user(context_factory, user_factory):
- user = user_factory(name='u1', rank=model.User.RANK_ADMINISTRATOR)
+ user = user_factory(name="u1", rank=model.User.RANK_ADMINISTRATOR)
auth_user = user_factory(rank=model.User.RANK_ADMINISTRATOR)
db.session.add(user)
db.session.flush()
- with patch('szurubooru.func.users.create_user'), \
- patch('szurubooru.func.users.update_user_name'), \
- patch('szurubooru.func.users.update_user_password'), \
- patch('szurubooru.func.users.update_user_email'), \
- patch('szurubooru.func.users.update_user_rank'), \
- patch('szurubooru.func.users.update_user_avatar'), \
- patch('szurubooru.func.users.serialize_user'):
- users.serialize_user.return_value = 'serialized user'
+ with patch("szurubooru.func.users.create_user"), patch(
+ "szurubooru.func.users.update_user_name"
+ ), patch("szurubooru.func.users.update_user_password"), patch(
+ "szurubooru.func.users.update_user_email"
+ ), patch(
+ "szurubooru.func.users.update_user_rank"
+ ), patch(
+ "szurubooru.func.users.update_user_avatar"
+ ), patch(
+ "szurubooru.func.users.serialize_user"
+ ):
+ users.serialize_user.return_value = "serialized user"
result = api.user_api.update_user(
context_factory(
params={
- 'version': 1,
- 'name': 'chewie',
- 'email': 'asd@asd.asd',
- 'password': 'oks',
- 'rank': 'moderator',
- 'avatarStyle': 'manual',
+ "version": 1,
+ "name": "chewie",
+ "email": "asd@asd.asd",
+ "password": "oks",
+ "rank": "moderator",
+ "avatarStyle": "manual",
},
- files={
- 'avatar': b'...',
- },
- user=auth_user),
- {'user_name': 'u1'})
+ files={"avatar": b"...",},
+ user=auth_user,
+ ),
+ {"user_name": "u1"},
+ )
- assert result == 'serialized user'
+ assert result == "serialized user"
users.create_user.assert_not_called()
- users.update_user_name.assert_called_once_with(user, 'chewie')
- users.update_user_password.assert_called_once_with(user, 'oks')
- users.update_user_email.assert_called_once_with(user, 'asd@asd.asd')
+ users.update_user_name.assert_called_once_with(user, "chewie")
+ users.update_user_password.assert_called_once_with(user, "oks")
+ users.update_user_email.assert_called_once_with(user, "asd@asd.asd")
users.update_user_rank.assert_called_once_with(
- user, 'moderator', auth_user)
+ user, "moderator", auth_user
+ )
users.update_user_avatar.assert_called_once_with(
- user, 'manual', b'...')
+ user, "manual", b"..."
+ )
users.serialize_user.assert_called_once_with(
- user, auth_user, options=[])
+ user, auth_user, options=[]
+ )
@pytest.mark.parametrize(
- 'field', ['name', 'email', 'password', 'rank', 'avatarStyle'])
+ "field", ["name", "email", "password", "rank", "avatarStyle"]
+)
def test_omitting_optional_field(user_factory, context_factory, field):
- user = user_factory(name='u1', rank=model.User.RANK_ADMINISTRATOR)
+ user = user_factory(name="u1", rank=model.User.RANK_ADMINISTRATOR)
db.session.add(user)
db.session.flush()
params = {
- 'name': 'chewie',
- 'email': 'asd@asd.asd',
- 'password': 'oks',
- 'rank': 'moderator',
- 'avatarStyle': 'gravatar',
+ "name": "chewie",
+ "email": "asd@asd.asd",
+ "password": "oks",
+ "rank": "moderator",
+ "avatarStyle": "gravatar",
}
del params[field]
- with patch('szurubooru.func.users.create_user'), \
- patch('szurubooru.func.users.update_user_name'), \
- patch('szurubooru.func.users.update_user_password'), \
- patch('szurubooru.func.users.update_user_email'), \
- patch('szurubooru.func.users.update_user_rank'), \
- patch('szurubooru.func.users.update_user_avatar'), \
- patch('szurubooru.func.users.serialize_user'):
+ with patch("szurubooru.func.users.create_user"), patch(
+ "szurubooru.func.users.update_user_name"
+ ), patch("szurubooru.func.users.update_user_password"), patch(
+ "szurubooru.func.users.update_user_email"
+ ), patch(
+ "szurubooru.func.users.update_user_rank"
+ ), patch(
+ "szurubooru.func.users.update_user_avatar"
+ ), patch(
+ "szurubooru.func.users.serialize_user"
+ ):
api.user_api.update_user(
context_factory(
- params={**params, **{'version': 1}},
- files={'avatar': b'...'},
- user=user),
- {'user_name': 'u1'})
+ params={**params, **{"version": 1}},
+ files={"avatar": b"..."},
+ user=user,
+ ),
+ {"user_name": "u1"},
+ )
def test_trying_to_update_non_existing(user_factory, context_factory):
- user = user_factory(name='u1', rank=model.User.RANK_ADMINISTRATOR)
+ user = user_factory(name="u1", rank=model.User.RANK_ADMINISTRATOR)
db.session.add(user)
db.session.flush()
with pytest.raises(users.UserNotFoundError):
api.user_api.update_user(
- context_factory(user=user), {'user_name': 'u2'})
+ context_factory(user=user), {"user_name": "u2"}
+ )
-@pytest.mark.parametrize('params', [
- {'name': 'whatever'},
- {'email': 'whatever'},
- {'rank': 'whatever'},
- {'password': 'whatever'},
- {'avatarStyle': 'whatever'},
-])
+@pytest.mark.parametrize(
+ "params",
+ [
+ {"name": "whatever"},
+ {"email": "whatever"},
+ {"rank": "whatever"},
+ {"password": "whatever"},
+ {"avatarStyle": "whatever"},
+ ],
+)
def test_trying_to_update_field_without_privileges(
- user_factory, context_factory, params):
- user1 = user_factory(name='u1', rank=model.User.RANK_REGULAR)
- user2 = user_factory(name='u2', rank=model.User.RANK_REGULAR)
+ user_factory, context_factory, params
+):
+ user1 = user_factory(name="u1", rank=model.User.RANK_REGULAR)
+ user2 = user_factory(name="u2", rank=model.User.RANK_REGULAR)
db.session.add_all([user1, user2])
db.session.flush()
with pytest.raises(errors.AuthError):
api.user_api.update_user(
- context_factory(
- params={**params, **{'version': 1}},
- user=user1),
- {'user_name': user2.name})
+ context_factory(params={**params, **{"version": 1}}, user=user1),
+ {"user_name": user2.name},
+ )
diff --git a/server/szurubooru/tests/conftest.py b/server/szurubooru/tests/conftest.py
index c776601d..6e127bcd 100644
--- a/server/szurubooru/tests/conftest.py
+++ b/server/szurubooru/tests/conftest.py
@@ -2,17 +2,19 @@ import contextlib
import os
import random
import string
-from unittest.mock import patch
from datetime import datetime
-import pytest
+from unittest.mock import patch
+
import freezegun
+import pytest
import sqlalchemy as sa
+
from szurubooru import config, db, model, rest
def get_unique_name():
alphabet = string.ascii_letters + string.digits
- return ''.join(random.choice(alphabet) for _ in range(8))
+ return "".join(random.choice(alphabet) for _ in range(8))
@pytest.fixture
@@ -23,21 +25,24 @@ def fake_datetime():
freezer.start()
yield
freezer.stop()
+
return injector
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def query_logger(pytestconfig):
if pytestconfig.option.verbose > 0:
import logging
import coloredlogs
+
coloredlogs.install(
- fmt='[%(asctime)-15s] %(name)s %(message)s', isatty=True)
+ fmt="[%(asctime)-15s] %(name)s %(message)s", isatty=True
+ )
logging.basicConfig()
- logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
+ logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
-@pytest.yield_fixture(scope='function', autouse=True)
+@pytest.yield_fixture(scope="function", autouse=True)
def session(query_logger, postgresql_db):
db.session = postgresql_db.session
postgresql_db.create_table(*model.Base.metadata.sorted_tables)
@@ -51,15 +56,17 @@ def session(query_logger, postgresql_db):
def context_factory(session):
def factory(params=None, files=None, user=None, headers=None):
ctx = rest.Context(
- env={'HTTP_ORIGIN': 'http://example.com'},
+ env={"HTTP_ORIGIN": "http://example.com"},
method=None,
url=None,
headers=headers or {},
params=params or {},
- files=files or {})
+ files=files or {},
+ )
ctx.session = session
ctx.user = user or model.User()
return ctx
+
return factory
@@ -67,58 +74,64 @@ def context_factory(session):
def config_injector():
def injector(new_config_content):
config.config = new_config_content
+
return injector
@pytest.fixture
def user_factory():
def factory(
- name=None,
- rank=model.User.RANK_REGULAR,
- email='dummy',
- password_salt=None,
- password_hash=None):
+ name=None,
+ rank=model.User.RANK_REGULAR,
+ email="dummy",
+ password_salt=None,
+ password_hash=None,
+ ):
user = model.User()
user.name = name or get_unique_name()
- user.password_salt = password_salt or 'dummy'
- user.password_hash = password_hash or 'dummy'
+ user.password_salt = password_salt or "dummy"
+ user.password_hash = password_hash or "dummy"
user.email = email
user.rank = rank
user.creation_time = datetime(1997, 1, 1)
user.avatar_style = model.User.AVATAR_GRAVATAR
return user
+
return factory
@pytest.fixture
def user_token_factory(user_factory):
def factory(
- user=None,
- token=None,
- expiration_time=None,
- enabled=None,
- creation_time=None):
+ user=None,
+ token=None,
+ expiration_time=None,
+ enabled=None,
+ creation_time=None,
+ ):
if user is None:
user = user_factory()
db.session.add(user)
user_token = model.UserToken()
user_token.user = user
- user_token.token = token or 'dummy'
+ user_token.token = token or "dummy"
user_token.expiration_time = expiration_time
user_token.enabled = enabled if enabled is not None else True
user_token.creation_time = creation_time or datetime(1997, 1, 1)
return user_token
+
return factory
@pytest.fixture
def tag_category_factory():
- def factory(name=None, color='dummy', default=False):
+ def factory(name=None, color="dummy", default=False):
category = model.TagCategory()
category.name = name or get_unique_name()
category.color = color
category.default = default
return category
+
return factory
@@ -135,31 +148,34 @@ def tag_factory():
tag.category = category
tag.creation_time = datetime(1996, 1, 1)
return tag
+
return factory
@pytest.fixture
def post_factory():
def factory(
- id=None,
- safety=model.Post.SAFETY_SAFE,
- type=model.Post.TYPE_IMAGE,
- checksum='...'):
+ id=None,
+ safety=model.Post.SAFETY_SAFE,
+ type=model.Post.TYPE_IMAGE,
+ checksum="...",
+ ):
post = model.Post()
post.post_id = id
post.safety = safety
post.type = type
post.checksum = checksum
post.flags = []
- post.mime_type = 'application/octet-stream'
+ post.mime_type = "application/octet-stream"
post.creation_time = datetime(1996, 1, 1)
return post
+
return factory
@pytest.fixture
def comment_factory(user_factory, post_factory):
- def factory(user=None, post=None, text='dummy', time=None):
+ def factory(user=None, post=None, text="dummy", time=None):
if not user:
user = user_factory()
db.session.add(user)
@@ -172,6 +188,7 @@ def comment_factory(user_factory, post_factory):
comment.text = text
comment.creation_time = time or datetime(1996, 1, 1)
return comment
+
return factory
@@ -183,7 +200,9 @@ def post_score_factory(user_factory, post_factory):
if post is None:
post = post_factory()
return model.PostScore(
- post=post, user=user, score=score, time=datetime(1999, 1, 1))
+ post=post, user=user, score=score, time=datetime(1999, 1, 1)
+ )
+
return factory
@@ -195,25 +214,29 @@ def post_favorite_factory(user_factory, post_factory):
if post is None:
post = post_factory()
return model.PostFavorite(
- post=post, user=user, time=datetime(1999, 1, 1))
+ post=post, user=user, time=datetime(1999, 1, 1)
+ )
+
return factory
@pytest.fixture
def pool_category_factory():
- def factory(name=None, color='dummy', default=False):
+ def factory(name=None, color="dummy", default=False):
category = model.PoolCategory()
category.name = name or get_unique_name()
category.color = color
category.default = default
return category
+
return factory
@pytest.fixture
def pool_factory():
def factory(
- id=None, names=None, description=None, category=None, time=None):
+ id=None, names=None, description=None, category=None, time=None
+ ):
if not category:
category = model.PoolCategory(get_unique_name())
db.session.add(category)
@@ -226,6 +249,7 @@ def pool_factory():
pool.category = category
pool.creation_time = time or datetime(1996, 1, 1)
return pool
+
return factory
@@ -243,13 +267,15 @@ def pool_post_factory(pool_factory, post_factory):
pool_post.post = post
pool_post.order = order or 0
return pool_post
+
return factory
@pytest.fixture
def read_asset():
def get(path):
- path = os.path.join(os.path.dirname(__file__), 'assets', path)
- with open(path, 'rb') as handle:
+ path = os.path.join(os.path.dirname(__file__), "assets", path)
+ with open(path, "rb") as handle:
return handle.read()
+
return get
diff --git a/server/szurubooru/tests/func/test_auth.py b/server/szurubooru/tests/func/test_auth.py
index 6dc79bb5..2141d3c2 100644
--- a/server/szurubooru/tests/func/test_auth.py
+++ b/server/szurubooru/tests/func/test_auth.py
@@ -1,41 +1,44 @@
from datetime import datetime, timedelta
+
import pytest
+
from szurubooru.func import auth
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({'secret': 'testSecret'})
+ config_injector({"secret": "testSecret"})
def test_get_password_hash():
- salt, password = ('testSalt', 'pass')
+ salt, password = ("testSalt", "pass")
result, revision = auth.get_password_hash(salt, password)
assert result
assert revision == 3
hash_parts = list(
- filter(lambda e: e is not None and e != '', result.split('$')))
+ filter(lambda e: e is not None and e != "", result.split("$"))
+ )
assert len(hash_parts) == 5
- assert hash_parts[0] == 'argon2id'
+ assert hash_parts[0] == "argon2id"
def test_get_sha256_legacy_password_hash():
- salt, password = ('testSalt', 'pass')
+ salt, password = ("testSalt", "pass")
result, revision = auth.get_sha256_legacy_password_hash(salt, password)
- hash = '2031ac9631353ac9303719a7f808a24f79aa1d71712c98523e4bb4cce579428a'
+ hash = "2031ac9631353ac9303719a7f808a24f79aa1d71712c98523e4bb4cce579428a"
assert result == hash
assert revision == 2
def test_get_sha1_legacy_password_hash():
- salt, password = ('testSalt', 'pass')
+ salt, password = ("testSalt", "pass")
result, revision = auth.get_sha1_legacy_password_hash(salt, password)
- assert result == '1eb1f953d9be303a1b54627e903e6124cfb1245b'
+ assert result == "1eb1f953d9be303a1b54627e903e6124cfb1245b"
assert revision == 1
def test_is_valid_password_auto_upgrades_user_password_hash(user_factory):
- salt, password = ('testSalt', 'pass')
+ salt, password = ("testSalt", "pass")
hash, revision = auth.get_sha256_legacy_password_hash(salt, password)
user = user_factory(password_salt=salt, password_hash=hash)
result = auth.is_valid_password(user, password)
@@ -50,7 +53,7 @@ def test_is_valid_token(user_token_factory):
def test_expired_token_is_invalid(user_token_factory):
- past_expiration = (datetime.utcnow() - timedelta(minutes=30))
+ past_expiration = datetime.utcnow() - timedelta(minutes=30)
user_token = user_token_factory(expiration_time=past_expiration)
assert not auth.is_valid_token(user_token)
diff --git a/server/szurubooru/tests/func/test_comments.py b/server/szurubooru/tests/func/test_comments.py
index f1e5d0f1..ef5ca4f3 100644
--- a/server/szurubooru/tests/func/test_comments.py
+++ b/server/szurubooru/tests/func/test_comments.py
@@ -1,34 +1,36 @@
-from unittest.mock import patch
from datetime import datetime
+from unittest.mock import patch
+
import pytest
+
from szurubooru import db
from szurubooru.func import comments, users
def test_serialize_user(user_factory, comment_factory):
- with patch('szurubooru.func.users.get_avatar_url'):
- users.get_avatar_url.return_value = 'https://example.com/avatar.png'
- comment = comment_factory(user=user_factory(name='dummy'))
+ with patch("szurubooru.func.users.get_avatar_url"):
+ users.get_avatar_url.return_value = "https://example.com/avatar.png"
+ comment = comment_factory(user=user_factory(name="dummy"))
comment.comment_id = 77
comment.creation_time = datetime(1997, 1, 1)
comment.last_edit_time = datetime(1998, 1, 1)
- comment.text = 'text'
+ comment.text = "text"
db.session.add(comment)
db.session.flush()
auth_user = user_factory()
assert comments.serialize_comment(comment, auth_user) == {
- 'id': comment.comment_id,
- 'postId': comment.post.post_id,
- 'creationTime': datetime(1997, 1, 1, 0, 0),
- 'lastEditTime': datetime(1998, 1, 1, 0, 0),
- 'score': 0,
- 'ownScore': 0,
- 'text': 'text',
- 'user': {
- 'name': 'dummy',
- 'avatarUrl': 'https://example.com/avatar.png',
+ "id": comment.comment_id,
+ "postId": comment.post.post_id,
+ "creationTime": datetime(1997, 1, 1, 0, 0),
+ "lastEditTime": datetime(1998, 1, 1, 0, 0),
+ "score": 0,
+ "ownScore": 0,
+ "text": "text",
+ "user": {
+ "name": "dummy",
+ "avatarUrl": "https://example.com/avatar.png",
},
- 'version': 1,
+ "version": 1,
}
@@ -53,13 +55,14 @@ def test_create_comment(user_factory, post_factory, fake_datetime):
user = user_factory()
post = post_factory()
db.session.add_all([user, post])
- with patch('szurubooru.func.comments.update_comment_text'), \
- fake_datetime('1997-01-01'):
- comment = comments.create_comment(user, post, 'text')
+ with patch("szurubooru.func.comments.update_comment_text"), fake_datetime(
+ "1997-01-01"
+ ):
+ comment = comments.create_comment(user, post, "text")
assert comment.creation_time == datetime(1997, 1, 1)
assert comment.user == user
assert comment.post == post
- comments.update_comment_text.assert_called_once_with(comment, 'text')
+ comments.update_comment_text.assert_called_once_with(comment, "text")
def test_update_comment_text_with_emptry_string(comment_factory):
@@ -70,5 +73,5 @@ def test_update_comment_text_with_emptry_string(comment_factory):
def test_update_comment_text(comment_factory):
comment = comment_factory()
- comments.update_comment_text(comment, 'text')
- assert comment.text == 'text'
+ comments.update_comment_text(comment, "text")
+ assert comment.text == "text"
diff --git a/server/szurubooru/tests/func/test_diff.py b/server/szurubooru/tests/func/test_diff.py
index 0134a3fd..5ea57809 100644
--- a/server/szurubooru/tests/func/test_diff.py
+++ b/server/szurubooru/tests/func/test_diff.py
@@ -1,275 +1,222 @@
import pytest
+
from szurubooru.func import diff
-@pytest.mark.parametrize('old,new,expected', [
- (
- [], [], None,
- ),
-
- (
- [],
- ['added'],
- {'type': 'list change', 'added': ['added'], 'removed': []},
- ),
-
- (
- ['removed'],
- [],
- {'type': 'list change', 'added': [], 'removed': ['removed']},
- ),
-
- (
- ['untouched'],
- ['untouched'],
- None,
- ),
-
- (
- ['untouched'],
- ['untouched', 'added'],
- {'type': 'list change', 'added': ['added'], 'removed': []},
- ),
-
- (
- ['untouched', 'removed'],
- ['untouched'],
- {'type': 'list change', 'added': [], 'removed': ['removed']},
- ),
-])
+@pytest.mark.parametrize(
+ "old,new,expected",
+ [
+ ([], [], None,),
+ (
+ [],
+ ["added"],
+ {"type": "list change", "added": ["added"], "removed": []},
+ ),
+ (
+ ["removed"],
+ [],
+ {"type": "list change", "added": [], "removed": ["removed"]},
+ ),
+ (["untouched"], ["untouched"], None,),
+ (
+ ["untouched"],
+ ["untouched", "added"],
+ {"type": "list change", "added": ["added"], "removed": []},
+ ),
+ (
+ ["untouched", "removed"],
+ ["untouched"],
+ {"type": "list change", "added": [], "removed": ["removed"]},
+ ),
+ ],
+)
def test_get_list_diff(old, new, expected):
assert diff.get_list_diff(old, new) == expected
-@pytest.mark.parametrize('old,new,expected', [
- (
- {}, {}, None,
- ),
-
- (
- {'removed key': 'removed value'},
- {},
- {
- 'type': 'object change',
- 'value':
+@pytest.mark.parametrize(
+ "old,new,expected",
+ [
+ ({}, {}, None,),
+ (
+ {"removed key": "removed value"},
+ {},
{
- 'removed key':
- {
- 'type': 'deleted property',
- 'value': 'removed value',
+ "type": "object change",
+ "value": {
+ "removed key": {
+ "type": "deleted property",
+ "value": "removed value",
+ },
},
},
- },
- ),
-
- (
- {},
- {'added key': 'added value'},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {},
+ {"added key": "added value"},
{
- 'added key':
- {
- 'type': 'added property',
- 'value': 'added value',
+ "type": "object change",
+ "value": {
+ "added key": {
+ "type": "added property",
+ "value": "added value",
+ },
},
},
- },
- ),
-
- (
- {'key': 'old value'},
- {'key': 'new value'},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": "old value"},
+ {"key": "new value"},
{
- 'key':
- {
- 'type': 'primitive change',
- 'old-value': 'old value',
- 'new-value': 'new value',
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "primitive change",
+ "old-value": "old value",
+ "new-value": "new value",
+ },
},
},
- },
- ),
-
- (
- {'key': 'untouched'},
- {'key': 'untouched'},
- None,
- ),
-
- (
- {'key': 'untouched', 'removed key': 'removed value'},
- {'key': 'untouched'},
- {
- 'type': 'object change',
- 'value':
+ ),
+ ({"key": "untouched"}, {"key": "untouched"}, None,),
+ (
+ {"key": "untouched", "removed key": "removed value"},
+ {"key": "untouched"},
{
- 'removed key':
- {
- 'type': 'deleted property',
- 'value': 'removed value',
+ "type": "object change",
+ "value": {
+ "removed key": {
+ "type": "deleted property",
+ "value": "removed value",
+ },
},
},
- },
- ),
-
- (
- {'key': 'untouched'},
- {'key': 'untouched', 'added key': 'added value'},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": "untouched"},
+ {"key": "untouched", "added key": "added value"},
{
- 'added key':
- {
- 'type': 'added property',
- 'value': 'added value',
+ "type": "object change",
+ "value": {
+ "added key": {
+ "type": "added property",
+ "value": "added value",
+ },
},
},
- },
- ),
-
- (
- {'key': 'untouched', 'changed key': 'old value'},
- {'key': 'untouched', 'changed key': 'new value'},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": "untouched", "changed key": "old value"},
+ {"key": "untouched", "changed key": "new value"},
{
- 'changed key':
- {
- 'type': 'primitive change',
- 'old-value': 'old value',
- 'new-value': 'new value',
+ "type": "object change",
+ "value": {
+ "changed key": {
+ "type": "primitive change",
+ "old-value": "old value",
+ "new-value": "new value",
+ },
},
},
- },
- ),
-
- (
- {'key': {'subkey': 'old value'}},
- {'key': {'subkey': 'new value'}},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": {"subkey": "old value"}},
+ {"key": {"subkey": "new value"}},
{
- 'key':
- {
- 'type': 'object change',
- 'value':
- {
- 'subkey':
- {
- 'type': 'primitive change',
- 'old-value': 'old value',
- 'new-value': 'new value',
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "object change",
+ "value": {
+ "subkey": {
+ "type": "primitive change",
+ "old-value": "old value",
+ "new-value": "new value",
+ },
},
},
},
},
- },
- ),
-
- (
- {'key': {}},
- {'key': {'subkey': 'removed value'}},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": {}},
+ {"key": {"subkey": "removed value"}},
{
- 'key':
- {
- 'type': 'object change',
- 'value':
- {
- 'subkey':
- {
- 'type': 'added property',
- 'value': 'removed value',
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "object change",
+ "value": {
+ "subkey": {
+ "type": "added property",
+ "value": "removed value",
+ },
},
},
},
},
- },
- ),
-
- (
- {'key': {'subkey': 'removed value'}},
- {'key': {}},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": {"subkey": "removed value"}},
+ {"key": {}},
{
- 'key':
- {
- 'type': 'object change',
- 'value':
- {
- 'subkey':
- {
- 'type': 'deleted property',
- 'value': 'removed value',
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "object change",
+ "value": {
+ "subkey": {
+ "type": "deleted property",
+ "value": "removed value",
+ },
},
},
},
},
- },
- ),
-
- (
- {'key': ['old value']},
- {'key': ['new value']},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": ["old value"]},
+ {"key": ["new value"]},
{
- 'key':
- {
- 'type': 'list change',
- 'added': ['new value'],
- 'removed': ['old value'],
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "list change",
+ "added": ["new value"],
+ "removed": ["old value"],
+ },
},
},
- },
- ),
-
- (
- {'key': []},
- {'key': ['new value']},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": []},
+ {"key": ["new value"]},
{
- 'key':
- {
- 'type': 'list change',
- 'added': ['new value'],
- 'removed': [],
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "list change",
+ "added": ["new value"],
+ "removed": [],
+ },
},
},
- },
- ),
-
- (
- {'key': ['removed value']},
- {'key': []},
- {
- 'type': 'object change',
- 'value':
+ ),
+ (
+ {"key": ["removed value"]},
+ {"key": []},
{
- 'key':
- {
- 'type': 'list change',
- 'added': [],
- 'removed': ['removed value'],
+ "type": "object change",
+ "value": {
+ "key": {
+ "type": "list change",
+ "added": [],
+ "removed": ["removed value"],
+ },
},
},
- },
- ),
-])
+ ),
+ ],
+)
def test_get_dict_diff(old, new, expected):
assert diff.get_dict_diff(old, new) == expected
diff --git a/server/szurubooru/tests/func/test_image_hash.py b/server/szurubooru/tests/func/test_image_hash.py
index 13bfae83..e7028b6d 100644
--- a/server/szurubooru/tests/func/test_image_hash.py
+++ b/server/szurubooru/tests/func/test_image_hash.py
@@ -1,16 +1,19 @@
import pytest
-from szurubooru.func import image_hash
from numpy import array_equal
+from szurubooru.func import image_hash
+
def test_signature_functions(read_asset, config_injector):
- sig1 = image_hash.generate_signature(read_asset('jpeg.jpg'))
- sig2 = image_hash.generate_signature(read_asset('jpeg-similar.jpg'))
+ sig1 = image_hash.generate_signature(read_asset("jpeg.jpg"))
+ sig2 = image_hash.generate_signature(read_asset("jpeg-similar.jpg"))
sig1_repacked = image_hash.unpack_signature(
- image_hash.pack_signature(sig1))
+ image_hash.pack_signature(sig1)
+ )
sig2_repacked = image_hash.unpack_signature(
- image_hash.pack_signature(sig2))
+ image_hash.pack_signature(sig2)
+ )
assert array_equal(sig1, sig1_repacked)
assert array_equal(sig2, sig2_repacked)
diff --git a/server/szurubooru/tests/func/test_mime.py b/server/szurubooru/tests/func/test_mime.py
index 821ed20b..4eb9b161 100644
--- a/server/szurubooru/tests/func/test_mime.py
+++ b/server/szurubooru/tests/func/test_mime.py
@@ -1,78 +1,94 @@
import pytest
+
from szurubooru.func import mime
-@pytest.mark.parametrize('input_path,expected_mime_type', [
- ('mp4.mp4', 'video/mp4'),
- ('webm.webm', 'video/webm'),
- ('flash.swf', 'application/x-shockwave-flash'),
- ('png.png', 'image/png'),
- ('jpeg.jpg', 'image/jpeg'),
- ('gif.gif', 'image/gif'),
- ('webp.webp', 'image/webp'),
- ('text.txt', 'application/octet-stream'),
-])
+@pytest.mark.parametrize(
+ "input_path,expected_mime_type",
+ [
+ ("mp4.mp4", "video/mp4"),
+ ("webm.webm", "video/webm"),
+ ("flash.swf", "application/x-shockwave-flash"),
+ ("png.png", "image/png"),
+ ("jpeg.jpg", "image/jpeg"),
+ ("gif.gif", "image/gif"),
+ ("webp.webp", "image/webp"),
+ ("text.txt", "application/octet-stream"),
+ ],
+)
def test_get_mime_type(read_asset, input_path, expected_mime_type):
assert mime.get_mime_type(read_asset(input_path)) == expected_mime_type
def test_get_mime_type_for_empty_file():
- assert mime.get_mime_type(b'') == 'application/octet-stream'
+ assert mime.get_mime_type(b"") == "application/octet-stream"
-@pytest.mark.parametrize('mime_type,expected_extension', [
- ('video/mp4', 'mp4'),
- ('video/webm', 'webm'),
- ('application/x-shockwave-flash', 'swf'),
- ('image/png', 'png'),
- ('image/jpeg', 'jpg'),
- ('image/gif', 'gif'),
- ('image/webp', 'webp'),
- ('application/octet-stream', 'dat'),
-])
+@pytest.mark.parametrize(
+ "mime_type,expected_extension",
+ [
+ ("video/mp4", "mp4"),
+ ("video/webm", "webm"),
+ ("application/x-shockwave-flash", "swf"),
+ ("image/png", "png"),
+ ("image/jpeg", "jpg"),
+ ("image/gif", "gif"),
+ ("image/webp", "webp"),
+ ("application/octet-stream", "dat"),
+ ],
+)
def test_get_extension(mime_type, expected_extension):
assert mime.get_extension(mime_type) == expected_extension
-@pytest.mark.parametrize('input_mime_type,expected_state', [
- ('application/x-shockwave-flash', True),
- ('APPLICATION/X-SHOCKWAVE-FLASH', True),
- ('application/x-shockwave', False),
-])
+@pytest.mark.parametrize(
+ "input_mime_type,expected_state",
+ [
+ ("application/x-shockwave-flash", True),
+ ("APPLICATION/X-SHOCKWAVE-FLASH", True),
+ ("application/x-shockwave", False),
+ ],
+)
def test_is_flash(input_mime_type, expected_state):
assert mime.is_flash(input_mime_type) == expected_state
-@pytest.mark.parametrize('input_mime_type,expected_state', [
- ('video/webm', True),
- ('VIDEO/WEBM', True),
- ('video/mp4', True),
- ('VIDEO/MP4', True),
- ('video/anything_else', False),
- ('application/ogg', True),
- ('not a video', False),
-])
+@pytest.mark.parametrize(
+ "input_mime_type,expected_state",
+ [
+ ("video/webm", True),
+ ("VIDEO/WEBM", True),
+ ("video/mp4", True),
+ ("VIDEO/MP4", True),
+ ("video/anything_else", False),
+ ("application/ogg", True),
+ ("not a video", False),
+ ],
+)
def test_is_video(input_mime_type, expected_state):
assert mime.is_video(input_mime_type) == expected_state
-@pytest.mark.parametrize('input_mime_type,expected_state', [
- ('image/gif', True),
- ('image/png', True),
- ('image/jpeg', True),
- ('IMAGE/GIF', True),
- ('IMAGE/PNG', True),
- ('IMAGE/JPEG', True),
- ('image/anything_else', False),
- ('not an image', False),
-])
+@pytest.mark.parametrize(
+ "input_mime_type,expected_state",
+ [
+ ("image/gif", True),
+ ("image/png", True),
+ ("image/jpeg", True),
+ ("IMAGE/GIF", True),
+ ("IMAGE/PNG", True),
+ ("IMAGE/JPEG", True),
+ ("image/anything_else", False),
+ ("not an image", False),
+ ],
+)
def test_is_image(input_mime_type, expected_state):
assert mime.is_image(input_mime_type) == expected_state
-@pytest.mark.parametrize('input_path,expected_state', [
- ('gif.gif', False),
- ('gif-animated.gif', True),
-])
+@pytest.mark.parametrize(
+ "input_path,expected_state",
+ [("gif.gif", False), ("gif-animated.gif", True),],
+)
def test_is_animated_gif(read_asset, input_path, expected_state):
assert mime.is_animated_gif(read_asset(input_path)) == expected_state
diff --git a/server/szurubooru/tests/func/test_net.py b/server/szurubooru/tests/func/test_net.py
index 7fe14b62..58e16287 100644
--- a/server/szurubooru/tests/func/test_net.py
+++ b/server/szurubooru/tests/func/test_net.py
@@ -1,4 +1,5 @@
import pytest
+
from szurubooru import errors
from szurubooru.func import net
from szurubooru.func.util import get_sha1
@@ -6,82 +7,95 @@ from szurubooru.func.util import get_sha1
@pytest.fixture(autouse=True)
def inject_config(tmpdir, config_injector):
- config_injector({
- 'user_agent': None,
- 'max_dl_filesize': 1.0E+6,
- 'data_dir': str(tmpdir.mkdir('data')),
- })
+ config_injector(
+ {
+ "user_agent": None,
+ "max_dl_filesize": 1.0e6,
+ "data_dir": str(tmpdir.mkdir("data")),
+ }
+ )
def test_download():
- url = 'http://info.cern.ch/hypertext/WWW/TheProject.html'
+ url = "http://info.cern.ch/hypertext/WWW/TheProject.html"
expected_content = (
- b'\nThe World Wide Web project \n\n \n\nWorld Wide Web The WorldWideWeb' +
- b' (W3) is a wide-area\nhypermedia' +
- b'A> information retrieval\ninitiative aiming to give universal\na' +
- b'ccess to a large universe of documents.\nEverything there is ' +
- b'online about\nW3 is linked directly or indirectly\nto this docum' +
- b'ent, including an executive\nsum' +
- b'mary of the project, Mailing lists \n, Policy , November\'s W3 news ,\nFrequently Ask' +
- b'ed Questions .\n
\nWhat\'s out there? \n Pointers to the\nworld\'s ' +
- b'online information, subjects \n, W3 servers , etc.\n Help \n on the browser you are using\n Software Products \n A list of W' +
- b'3 project\ncomponents and their current state.\n(e.g. Line Mode ,X11 Viola , NeXTStep \n, Servers , Tools , Mail robot ,<' +
- b'A\nNAME=52 HREF="Status.html#57">\nLibrary )\nTechnical \n Details of protocols' +
- b', formats,\nprogram internals etc\n Bibliography \n Paper documentation\non W3 a' +
- b'nd references.\n People \n<' +
- b'DD> A list of some people involved\nin the project.\nHistory \n A summary of the hist' +
- b'ory\nof the project.\n How ca' +
- b'n I help ?\n If you would like\nto support the web..\nGetting code \n Getti' +
- b'ng the code by\nanonymous FTP , etc.\n\n\n')
+ b'\nThe World Wide Web project \n\n \n\nWorld Wide Web The WorldWideWeb'
+ + b' (W3) is a wide-area\nhypermedia'
+ + b"A> information retrieval\ninitiative aiming to give universal\na"
+ + b"ccess to a large universe of documents.\nEverything there is "
+ + b"online about\nW3 is linked directly or indirectly\nto this docum"
+ + b'ent, including an executive\nsum'
+ + b'mary of the project, Mailing lists \n, Policy , November\'s W3 news ,\nFrequently Ask'
+ + b'ed Questions .\n
\nWhat's out there? \n Pointers to the\nworld's "
+ + b'online information, subjects \n, W3 servers , etc.\n Help \n on the browser you are using\n Software Products \n A list of W'
+ + b"3 project\ncomponents and their current state.\n(e.g. Line Mode ,X11 Viola , NeXTStep \n, Servers , Tools , Mail robot ,<'
+ + b'A\nNAME=52 HREF="Status.html#57">\nLibrary )\n Technical \n Details of protocols'
+ + b', formats,\nprogram internals etc\n Bibliography \n Paper documentation\non W3 a'
+ + b'nd references.\n People \n<'
+ + b"DD> A list of some people involved\nin the project.\nHistory \n A summary of the hist'
+ + b'ory\nof the project.\n How ca'
+ + b"n I help ?\n If you would like\nto support the web..\nGetting code \n Getti'
+ + b'ng the code by\nanonymous FTP , etc.\n\n\n'
+ )
actual_content = net.download(url)
assert actual_content == expected_content
-@pytest.mark.parametrize('url', [
- 'https://samples.ffmpeg.org/MPEG-4/video.mp4',
-])
+@pytest.mark.parametrize(
+ "url", ["https://samples.ffmpeg.org/MPEG-4/video.mp4",]
+)
def test_too_large_download(url):
- pytest.xfail('Download limit not implemented yet')
+ pytest.xfail("Download limit not implemented yet")
with pytest.raises(errors.ProcessingError):
net.download(url)
-@pytest.mark.parametrize('url,expected_sha1', [
- ('https://www.youtube.com/watch?v=C0DPdy98e4c',
- '365af1c8f59c6865e1a84c6e13e3e25ff89e0ba1'),
- ('https://gfycat.com/immaterialchillyiberianmole',
- '953000e81d7bd1da95ce264f872e7b6c4a6484be'),
-])
+@pytest.mark.parametrize(
+ "url,expected_sha1",
+ [
+ (
+ "https://www.youtube.com/watch?v=C0DPdy98e4c",
+ "365af1c8f59c6865e1a84c6e13e3e25ff89e0ba1",
+ ),
+ (
+ "https://gfycat.com/immaterialchillyiberianmole",
+ "953000e81d7bd1da95ce264f872e7b6c4a6484be",
+ ),
+ ],
+)
def test_video_download(url, expected_sha1):
actual_content = net.download(url, use_video_downloader=True)
assert get_sha1(actual_content) == expected_sha1
-@pytest.mark.parametrize('url', [
- 'https://samples.ffmpeg.org/flac/short.flac', # not a video
- 'https://www.youtube.com/watch?v=dQw4w9WgXcQ', # video too large
-])
+@pytest.mark.parametrize(
+ "url",
+ [
+ "https://samples.ffmpeg.org/flac/short.flac", # not a video
+ "https://www.youtube.com/watch?v=dQw4w9WgXcQ", # video too large
+ ],
+)
def test_failed_video_download(url):
with pytest.raises(errors.ThirdPartyError):
net.download(url, use_video_downloader=True)
diff --git a/server/szurubooru/tests/func/test_posts.py b/server/szurubooru/tests/func/test_posts.py
index ed008792..acedf6f3 100644
--- a/server/szurubooru/tests/func/test_posts.py
+++ b/server/szurubooru/tests/func/test_posts.py
@@ -1,40 +1,58 @@
+import os
from datetime import datetime
from unittest.mock import patch
-import os
+
import pytest
+
from szurubooru import db, model
from szurubooru.func import (
- posts, users, comments, tags, images, files, util, image_hash)
+ comments,
+ files,
+ image_hash,
+ images,
+ posts,
+ tags,
+ users,
+ util,
+)
-@pytest.mark.parametrize('input_mime_type,expected_url', [
- ('image/jpeg', 'http://example.com/posts/1_244c8840887984c4.jpg'),
- ('image/gif', 'http://example.com/posts/1_244c8840887984c4.gif'),
- ('totally/unknown', 'http://example.com/posts/1_244c8840887984c4.dat'),
-])
+@pytest.mark.parametrize(
+ "input_mime_type,expected_url",
+ [
+ ("image/jpeg", "http://example.com/posts/1_244c8840887984c4.jpg"),
+ ("image/gif", "http://example.com/posts/1_244c8840887984c4.gif"),
+ ("totally/unknown", "http://example.com/posts/1_244c8840887984c4.dat"),
+ ],
+)
def test_get_post_url(input_mime_type, expected_url, config_injector):
- config_injector({'data_url': 'http://example.com/', 'secret': 'test'})
+ config_injector({"data_url": "http://example.com/", "secret": "test"})
post = model.Post()
post.post_id = 1
post.mime_type = input_mime_type
assert posts.get_post_content_url(post) == expected_url
-@pytest.mark.parametrize('input_mime_type', ['image/jpeg', 'image/gif'])
+@pytest.mark.parametrize("input_mime_type", ["image/jpeg", "image/gif"])
def test_get_post_thumbnail_url(input_mime_type, config_injector):
- config_injector({'data_url': 'http://example.com/', 'secret': 'test'})
+ config_injector({"data_url": "http://example.com/", "secret": "test"})
post = model.Post()
post.post_id = 1
post.mime_type = input_mime_type
- assert posts.get_post_thumbnail_url(post) \
- == 'http://example.com/generated-thumbnails/1_244c8840887984c4.jpg'
+ assert (
+ posts.get_post_thumbnail_url(post)
+ == "http://example.com/generated-thumbnails/1_244c8840887984c4.jpg"
+ )
-@pytest.mark.parametrize('input_mime_type,expected_path', [
- ('image/jpeg', 'posts/1_244c8840887984c4.jpg'),
- ('image/gif', 'posts/1_244c8840887984c4.gif'),
- ('totally/unknown', 'posts/1_244c8840887984c4.dat'),
-])
+@pytest.mark.parametrize(
+ "input_mime_type,expected_path",
+ [
+ ("image/jpeg", "posts/1_244c8840887984c4.jpg"),
+ ("image/gif", "posts/1_244c8840887984c4.gif"),
+ ("totally/unknown", "posts/1_244c8840887984c4.dat"),
+ ],
+)
def test_get_post_content_path(input_mime_type, expected_path):
post = model.Post()
post.post_id = 1
@@ -42,31 +60,35 @@ def test_get_post_content_path(input_mime_type, expected_path):
assert posts.get_post_content_path(post) == expected_path
-@pytest.mark.parametrize('input_mime_type', ['image/jpeg', 'image/gif'])
+@pytest.mark.parametrize("input_mime_type", ["image/jpeg", "image/gif"])
def test_get_post_thumbnail_path(input_mime_type):
post = model.Post()
post.post_id = 1
post.mime_type = input_mime_type
- assert posts.get_post_thumbnail_path(post) \
- == 'generated-thumbnails/1_244c8840887984c4.jpg'
+ assert (
+ posts.get_post_thumbnail_path(post)
+ == "generated-thumbnails/1_244c8840887984c4.jpg"
+ )
-@pytest.mark.parametrize('input_mime_type', ['image/jpeg', 'image/gif'])
+@pytest.mark.parametrize("input_mime_type", ["image/jpeg", "image/gif"])
def test_get_post_thumbnail_backup_path(input_mime_type):
post = model.Post()
post.post_id = 1
post.mime_type = input_mime_type
- assert posts.get_post_thumbnail_backup_path(post) \
- == 'posts/custom-thumbnails/1_244c8840887984c4.dat'
+ assert (
+ posts.get_post_thumbnail_backup_path(post)
+ == "posts/custom-thumbnails/1_244c8840887984c4.dat"
+ )
def test_serialize_note():
note = model.PostNote()
note.polygon = [[0, 1], [1, 1], [1, 0], [0, 0]]
- note.text = '...'
+ note.text = "..."
assert posts.serialize_note(note) == {
- 'polygon': [[0, 1], [1, 1], [1, 0], [0, 0]],
- 'text': '...'
+ "polygon": [[0, 1], [1, 1], [1, 0], [0, 0]],
+ "text": "...",
}
@@ -75,94 +97,110 @@ def test_serialize_post_when_empty():
def test_serialize_post(
- user_factory,
- comment_factory,
- tag_factory,
- tag_category_factory,
- pool_factory,
- pool_category_factory,
- config_injector):
- config_injector({'data_url': 'http://example.com/', 'secret': 'test'})
- with patch('szurubooru.func.comments.serialize_comment'), \
- patch('szurubooru.func.users.serialize_micro_user'), \
- patch('szurubooru.func.posts.files.has'):
+ user_factory,
+ comment_factory,
+ tag_factory,
+ tag_category_factory,
+ pool_factory,
+ pool_category_factory,
+ config_injector,
+):
+ config_injector({"data_url": "http://example.com/", "secret": "test"})
+ with patch("szurubooru.func.comments.serialize_comment"), patch(
+ "szurubooru.func.users.serialize_micro_user"
+ ), patch("szurubooru.func.posts.files.has"):
files.has.return_value = True
- users.serialize_micro_user.side_effect \
- = lambda user, auth_user: user.name
- comments.serialize_comment.side_effect \
- = lambda comment, auth_user: comment.user.name
+ users.serialize_micro_user.side_effect = (
+ lambda user, auth_user: user.name
+ )
+ comments.serialize_comment.side_effect = (
+ lambda comment, auth_user: comment.user.name
+ )
- auth_user = user_factory(name='auth user')
+ auth_user = user_factory(name="auth user")
post = model.Post()
post.post_id = 1
post.creation_time = datetime(1997, 1, 1)
post.last_edit_time = datetime(1998, 1, 1)
post.tags = [
tag_factory(
- names=['tag1', 'tag2'],
- category=tag_category_factory('test-cat1')),
+ names=["tag1", "tag2"],
+ category=tag_category_factory("test-cat1"),
+ ),
tag_factory(
- names=['tag3'],
- category=tag_category_factory('test-cat2'))
+ names=["tag3"], category=tag_category_factory("test-cat2")
+ ),
]
post.safety = model.Post.SAFETY_SAFE
- post.source = '4gag'
+ post.source = "4gag"
post.type = model.Post.TYPE_IMAGE
- post.checksum = 'deadbeef'
- post.mime_type = 'image/jpeg'
+ post.checksum = "deadbeef"
+ post.mime_type = "image/jpeg"
post.file_size = 100
- post.user = user_factory(name='post author')
+ post.user = user_factory(name="post author")
post.canvas_width = 200
post.canvas_height = 300
- post.flags = ['loop']
+ post.flags = ["loop"]
db.session.add(post)
db.session.flush()
- db.session.add_all([
- comment_factory(
- user=user_factory(name='commenter1'),
- post=post,
- time=datetime(1999, 1, 1)),
- comment_factory(
- user=user_factory(name='commenter2'),
- post=post,
- time=datetime(1999, 1, 2)),
- model.PostFavorite(
- post=post,
- user=user_factory(name='fav1'),
- time=datetime(1800, 1, 1)),
- model.PostFeature(
- post=post,
- user=user_factory(),
- time=datetime(1999, 1, 1)),
- model.PostScore(
- post=post,
- user=auth_user,
- score=-1,
- time=datetime(1800, 1, 1)),
- model.PostScore(
- post=post,
- user=user_factory(),
- score=1,
- time=datetime(1800, 1, 1)),
- model.PostScore(
- post=post,
- user=user_factory(),
- score=1,
- time=datetime(1800, 1, 1))])
+ db.session.add_all(
+ [
+ comment_factory(
+ user=user_factory(name="commenter1"),
+ post=post,
+ time=datetime(1999, 1, 1),
+ ),
+ comment_factory(
+ user=user_factory(name="commenter2"),
+ post=post,
+ time=datetime(1999, 1, 2),
+ ),
+ model.PostFavorite(
+ post=post,
+ user=user_factory(name="fav1"),
+ time=datetime(1800, 1, 1),
+ ),
+ model.PostFeature(
+ post=post, user=user_factory(), time=datetime(1999, 1, 1)
+ ),
+ model.PostScore(
+ post=post,
+ user=auth_user,
+ score=-1,
+ time=datetime(1800, 1, 1),
+ ),
+ model.PostScore(
+ post=post,
+ user=user_factory(),
+ score=1,
+ time=datetime(1800, 1, 1),
+ ),
+ model.PostScore(
+ post=post,
+ user=user_factory(),
+ score=1,
+ time=datetime(1800, 1, 1),
+ ),
+ ]
+ )
db.session.flush()
- pool1 = pool_factory(id=1,
- names=['pool1', 'pool2'],
- description='desc',
- category=pool_category_factory('test-cat1'))
+ pool1 = pool_factory(
+ id=1,
+ names=["pool1", "pool2"],
+ description="desc",
+ category=pool_category_factory("test-cat1"),
+ )
pool1.last_edit_time = datetime(1998, 1, 1)
pool1.posts.append(post)
- pool2 = pool_factory(id=2,
- names=['pool3'],
- description='desc2',
- category=pool_category_factory('test-cat2'))
+ pool2 = pool_factory(
+ id=2,
+ names=["pool3"],
+ description="desc2",
+ category=pool_category_factory("test-cat2"),
+ )
pool2.last_edit_time = datetime(1998, 1, 1)
pool2.posts.append(post)
@@ -170,105 +208,100 @@ def test_serialize_post(
db.session.flush()
result = posts.serialize_post(post, auth_user)
- result['tags'].sort(key=lambda tag: tag['names'][0])
+ result["tags"].sort(key=lambda tag: tag["names"][0])
assert result == {
- 'id': 1,
- 'version': 1,
- 'creationTime': datetime(1997, 1, 1),
- 'lastEditTime': datetime(1998, 1, 1),
- 'safety': 'safe',
- 'source': '4gag',
- 'type': 'image',
- 'checksum': 'deadbeef',
- 'fileSize': 100,
- 'canvasWidth': 200,
- 'canvasHeight': 300,
- 'contentUrl': 'http://example.com/posts/1_244c8840887984c4.jpg',
- 'thumbnailUrl':
- 'http://example.com/'
- 'generated-thumbnails/1_244c8840887984c4.jpg',
- 'flags': ['loop'],
- 'tags': [
+ "id": 1,
+ "version": 1,
+ "creationTime": datetime(1997, 1, 1),
+ "lastEditTime": datetime(1998, 1, 1),
+ "safety": "safe",
+ "source": "4gag",
+ "type": "image",
+ "checksum": "deadbeef",
+ "fileSize": 100,
+ "canvasWidth": 200,
+ "canvasHeight": 300,
+ "contentUrl": "http://example.com/posts/1_244c8840887984c4.jpg",
+ "thumbnailUrl": "http://example.com/"
+ "generated-thumbnails/1_244c8840887984c4.jpg",
+ "flags": ["loop"],
+ "tags": [
{
- 'names': ['tag1', 'tag2'],
- 'category': 'test-cat1', 'usages': 1,
- },
- {
- 'names': ['tag3'],
- 'category': 'test-cat2',
- 'usages': 1,
+ "names": ["tag1", "tag2"],
+ "category": "test-cat1",
+ "usages": 1,
},
+ {"names": ["tag3"], "category": "test-cat2", "usages": 1,},
],
- 'relations': [],
- 'notes': [],
- 'pools': [
+ "relations": [],
+ "notes": [],
+ "pools": [
{
- 'id': 1,
- 'names': ['pool1', 'pool2'],
- 'description': 'desc',
- 'category': 'test-cat1',
- 'postCount': 1,
- 'posts': [
+ "id": 1,
+ "names": ["pool1", "pool2"],
+ "description": "desc",
+ "category": "test-cat1",
+ "postCount": 1,
+ "posts": [
{
- 'id': 1,
- 'thumbnailUrl':
- 'http://example.com/'
- 'generated-thumbnails/1_244c8840887984c4.jpg',
+ "id": 1,
+ "thumbnailUrl": "http://example.com/"
+ "generated-thumbnails/1_244c8840887984c4.jpg",
}
],
- 'version': 1,
- 'creationTime': datetime(1996, 1, 1),
- 'lastEditTime': datetime(1998, 1, 1),
+ "version": 1,
+ "creationTime": datetime(1996, 1, 1),
+ "lastEditTime": datetime(1998, 1, 1),
},
{
- 'id': 2,
- 'names': ['pool3'],
- 'description': 'desc2',
- 'category': 'test-cat2',
- 'postCount': 1,
- 'posts': [
+ "id": 2,
+ "names": ["pool3"],
+ "description": "desc2",
+ "category": "test-cat2",
+ "postCount": 1,
+ "posts": [
{
- 'id': 1,
- 'thumbnailUrl':
- 'http://example.com/'
- 'generated-thumbnails/1_244c8840887984c4.jpg',
+ "id": 1,
+ "thumbnailUrl": "http://example.com/"
+ "generated-thumbnails/1_244c8840887984c4.jpg",
}
],
- 'version': 1,
- 'creationTime': datetime(1996, 1, 1),
- 'lastEditTime': datetime(1998, 1, 1),
- }
+ "version": 1,
+ "creationTime": datetime(1996, 1, 1),
+ "lastEditTime": datetime(1998, 1, 1),
+ },
],
- 'user': 'post author',
- 'score': 1,
- 'ownFavorite': False,
- 'ownScore': -1,
- 'tagCount': 2,
- 'favoriteCount': 1,
- 'commentCount': 2,
- 'noteCount': 0,
- 'featureCount': 1,
- 'relationCount': 0,
- 'lastFeatureTime': datetime(1999, 1, 1),
- 'favoritedBy': ['fav1'],
- 'hasCustomThumbnail': True,
- 'mimeType': 'image/jpeg',
- 'comments': ['commenter1', 'commenter2'],
+ "user": "post author",
+ "score": 1,
+ "ownFavorite": False,
+ "ownScore": -1,
+ "tagCount": 2,
+ "favoriteCount": 1,
+ "commentCount": 2,
+ "noteCount": 0,
+ "featureCount": 1,
+ "relationCount": 0,
+ "lastFeatureTime": datetime(1999, 1, 1),
+ "favoritedBy": ["fav1"],
+ "hasCustomThumbnail": True,
+ "mimeType": "image/jpeg",
+ "comments": ["commenter1", "commenter2"],
}
def test_serialize_micro_post(post_factory, user_factory):
- with patch('szurubooru.func.posts.get_post_thumbnail_url'):
- posts.get_post_thumbnail_url.return_value \
- = 'https://example.com/thumb.png'
+ with patch("szurubooru.func.posts.get_post_thumbnail_url"):
+ posts.get_post_thumbnail_url.return_value = (
+ "https://example.com/thumb.png"
+ )
auth_user = user_factory()
post = post_factory()
db.session.add(post)
db.session.flush()
assert posts.serialize_micro_post(post, auth_user) == {
- 'id': post.post_id,
- 'thumbnailUrl': 'https://example.com/thumb.png',
+ "id": post.post_id,
+ "thumbnailUrl": "https://example.com/thumb.png",
}
@@ -299,22 +332,25 @@ def test_get_post_by_id(post_factory):
def test_create_post(user_factory, fake_datetime):
- with patch('szurubooru.func.posts.update_post_content'), \
- patch('szurubooru.func.posts.update_post_tags'), \
- fake_datetime('1997-01-01'):
+ with patch("szurubooru.func.posts.update_post_content"), patch(
+ "szurubooru.func.posts.update_post_tags"
+ ), fake_datetime("1997-01-01"):
auth_user = user_factory()
- post, _new_tags = posts.create_post('content', ['tag'], auth_user)
+ post, _new_tags = posts.create_post("content", ["tag"], auth_user)
assert post.creation_time == datetime(1997, 1, 1)
assert post.last_edit_time is None
- posts.update_post_tags.assert_called_once_with(post, ['tag'])
- posts.update_post_content.assert_called_once_with(post, 'content')
+ posts.update_post_tags.assert_called_once_with(post, ["tag"])
+ posts.update_post_content.assert_called_once_with(post, "content")
-@pytest.mark.parametrize('input_safety,expected_safety', [
- ('safe', model.Post.SAFETY_SAFE),
- ('sketchy', model.Post.SAFETY_SKETCHY),
- ('unsafe', model.Post.SAFETY_UNSAFE),
-])
+@pytest.mark.parametrize(
+ "input_safety,expected_safety",
+ [
+ ("safe", model.Post.SAFETY_SAFE),
+ ("sketchy", model.Post.SAFETY_SKETCHY),
+ ("unsafe", model.Post.SAFETY_UNSAFE),
+ ],
+)
def test_update_post_safety(input_safety, expected_safety):
post = model.Post()
posts.update_post_safety(post, input_safety)
@@ -324,96 +360,104 @@ def test_update_post_safety(input_safety, expected_safety):
def test_update_post_safety_with_invalid_string():
post = model.Post()
with pytest.raises(posts.InvalidPostSafetyError):
- posts.update_post_safety(post, 'bad')
+ posts.update_post_safety(post, "bad")
def test_update_post_source():
post = model.Post()
- posts.update_post_source(post, 'x')
- assert post.source == 'x'
+ posts.update_post_source(post, "x")
+ assert post.source == "x"
def test_update_post_source_with_too_long_string():
post = model.Post()
with pytest.raises(posts.InvalidPostSourceError):
- posts.update_post_source(post, 'x' * 3000)
+ posts.update_post_source(post, "x" * 3000)
@pytest.mark.parametrize(
- 'is_existing,input_file,expected_mime_type,expected_type,output_file_name',
+ "is_existing,input_file,expected_mime_type,expected_type,output_file_name",
[
(
True,
- 'png.png',
- 'image/png',
+ "png.png",
+ "image/png",
model.Post.TYPE_IMAGE,
- '1_244c8840887984c4.png',
+ "1_244c8840887984c4.png",
),
(
False,
- 'png.png',
- 'image/png',
+ "png.png",
+ "image/png",
model.Post.TYPE_IMAGE,
- '1_244c8840887984c4.png',
+ "1_244c8840887984c4.png",
),
(
False,
- 'jpeg.jpg',
- 'image/jpeg',
+ "jpeg.jpg",
+ "image/jpeg",
model.Post.TYPE_IMAGE,
- '1_244c8840887984c4.jpg',
+ "1_244c8840887984c4.jpg",
),
(
False,
- 'gif.gif',
- 'image/gif',
+ "gif.gif",
+ "image/gif",
model.Post.TYPE_IMAGE,
- '1_244c8840887984c4.gif',
+ "1_244c8840887984c4.gif",
),
(
False,
- 'gif-animated.gif',
- 'image/gif',
+ "gif-animated.gif",
+ "image/gif",
model.Post.TYPE_ANIMATION,
- '1_244c8840887984c4.gif',
+ "1_244c8840887984c4.gif",
),
(
False,
- 'webm.webm',
- 'video/webm',
+ "webm.webm",
+ "video/webm",
model.Post.TYPE_VIDEO,
- '1_244c8840887984c4.webm',
+ "1_244c8840887984c4.webm",
),
(
False,
- 'mp4.mp4',
- 'video/mp4',
+ "mp4.mp4",
+ "video/mp4",
model.Post.TYPE_VIDEO,
- '1_244c8840887984c4.mp4',
+ "1_244c8840887984c4.mp4",
),
(
False,
- 'flash.swf',
- 'application/x-shockwave-flash',
+ "flash.swf",
+ "application/x-shockwave-flash",
model.Post.TYPE_FLASH,
- '1_244c8840887984c4.swf',
+ "1_244c8840887984c4.swf",
),
- ])
+ ],
+)
def test_update_post_content_for_new_post(
- tmpdir, config_injector, post_factory, read_asset, is_existing,
- input_file, expected_mime_type, expected_type, output_file_name):
- with patch('szurubooru.func.util.get_sha1'):
- util.get_sha1.return_value = 'crc'
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': False,
- })
- output_file_path = '{}/data/posts/{}'.format(tmpdir, output_file_name)
+ tmpdir,
+ config_injector,
+ post_factory,
+ read_asset,
+ is_existing,
+ input_file,
+ expected_mime_type,
+ expected_type,
+ output_file_name,
+):
+ with patch("szurubooru.func.util.get_sha1"):
+ util.get_sha1.return_value = "crc"
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": False,
+ }
+ )
+ output_file_path = "{}/data/posts/{}".format(tmpdir, output_file_name)
post = post_factory(id=1)
db.session.add(post)
if is_existing:
@@ -426,7 +470,7 @@ def test_update_post_content_for_new_post(
db.session.flush()
assert post.mime_type == expected_mime_type
assert post.type == expected_type
- assert post.checksum == 'crc'
+ assert post.checksum == "crc"
assert os.path.exists(output_file_path)
if post.type in (model.Post.TYPE_IMAGE, model.Post.TYPE_ANIMATION):
assert db.session.query(model.PostSignature).count() == 1
@@ -435,130 +479,134 @@ def test_update_post_content_for_new_post(
def test_update_post_content_to_existing_content(
- tmpdir, config_injector, post_factory, read_asset):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'data_url': 'example.com',
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': False,
- })
+ tmpdir, config_injector, post_factory, read_asset
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "data_url": "example.com",
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": False,
+ }
+ )
post = post_factory()
another_post = post_factory()
db.session.add_all([post, another_post])
- posts.update_post_content(post, read_asset('png.png'))
+ posts.update_post_content(post, read_asset("png.png"))
db.session.flush()
with pytest.raises(posts.PostAlreadyUploadedError):
- posts.update_post_content(another_post, read_asset('png.png'))
+ posts.update_post_content(another_post, read_asset("png.png"))
-@pytest.mark.parametrize('allow_broken_uploads', [True, False])
+@pytest.mark.parametrize("allow_broken_uploads", [True, False])
def test_update_post_content_with_broken_content(
- tmpdir, config_injector, post_factory, read_asset,
- allow_broken_uploads):
+ tmpdir, config_injector, post_factory, read_asset, allow_broken_uploads
+):
# the rationale behind this behavior is to salvage user upload even if the
# server software thinks it's broken. chances are the server is wrong,
# especially about flash movies.
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': allow_broken_uploads,
- })
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": allow_broken_uploads,
+ }
+ )
post = post_factory()
another_post = post_factory()
db.session.add_all([post, another_post])
if allow_broken_uploads:
- posts.update_post_content(post, read_asset('png-broken.png'))
+ posts.update_post_content(post, read_asset("png-broken.png"))
db.session.flush()
assert post.canvas_width is None
assert post.canvas_height is None
else:
with pytest.raises(posts.InvalidPostContentError):
- posts.update_post_content(post, read_asset('png-broken.png'))
+ posts.update_post_content(post, read_asset("png-broken.png"))
db.session.flush()
-@pytest.mark.parametrize('input_content', [None, b'not a media file'])
+@pytest.mark.parametrize("input_content", [None, b"not a media file"])
def test_update_post_content_with_invalid_content(
- config_injector, input_content):
- config_injector({
- 'allow_broken_uploads': True,
- })
+ config_injector, input_content
+):
+ config_injector(
+ {"allow_broken_uploads": True,}
+ )
post = model.Post()
with pytest.raises(posts.InvalidPostContentError):
posts.update_post_content(post, input_content)
-@pytest.mark.parametrize('is_existing', (True, False))
+@pytest.mark.parametrize("is_existing", (True, False))
def test_update_post_thumbnail_to_new_one(
- tmpdir, config_injector, read_asset, post_factory, is_existing):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': False,
- })
+ tmpdir, config_injector, read_asset, post_factory, is_existing
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": False,
+ }
+ )
post = post_factory(id=1)
db.session.add(post)
if is_existing:
db.session.flush()
assert post.post_id
generated_path = (
- '{}/data/generated-thumbnails/1_244c8840887984c4.jpg'
- .format(tmpdir))
+ "{}/data/generated-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.jpg"
+ )
source_path = (
- '{}/data/posts/custom-thumbnails/1_244c8840887984c4.dat'
- .format(tmpdir))
+ "{}/data/posts/custom-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.dat"
+ )
assert not os.path.exists(generated_path)
assert not os.path.exists(source_path)
- posts.update_post_content(post, read_asset('png.png'))
- posts.update_post_thumbnail(post, read_asset('jpeg.jpg'))
+ posts.update_post_content(post, read_asset("png.png"))
+ posts.update_post_thumbnail(post, read_asset("jpeg.jpg"))
assert not os.path.exists(generated_path)
assert not os.path.exists(source_path)
db.session.flush()
assert os.path.exists(generated_path)
assert os.path.exists(source_path)
- with open(source_path, 'rb') as handle:
- assert handle.read() == read_asset('jpeg.jpg')
+ with open(source_path, "rb") as handle:
+ assert handle.read() == read_asset("jpeg.jpg")
-@pytest.mark.parametrize('is_existing', (True, False))
+@pytest.mark.parametrize("is_existing", (True, False))
def test_update_post_thumbnail_to_default(
- tmpdir, config_injector, read_asset, post_factory, is_existing):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': False,
- })
+ tmpdir, config_injector, read_asset, post_factory, is_existing
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": False,
+ }
+ )
post = post_factory(id=1)
db.session.add(post)
if is_existing:
db.session.flush()
assert post.post_id
generated_path = (
- '{}/data/generated-thumbnails/1_244c8840887984c4.jpg'
- .format(tmpdir))
+ "{}/data/generated-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.jpg"
+ )
source_path = (
- '{}/data/posts/custom-thumbnails/1_244c8840887984c4.dat'
- .format(tmpdir))
+ "{}/data/posts/custom-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.dat"
+ )
assert not os.path.exists(generated_path)
assert not os.path.exists(source_path)
- posts.update_post_content(post, read_asset('png.png'))
- posts.update_post_thumbnail(post, read_asset('jpeg.jpg'))
+ posts.update_post_content(post, read_asset("png.png"))
+ posts.update_post_thumbnail(post, read_asset("jpeg.jpg"))
posts.update_post_thumbnail(post, None)
assert not os.path.exists(generated_path)
assert not os.path.exists(source_path)
@@ -567,82 +615,88 @@ def test_update_post_thumbnail_to_default(
assert not os.path.exists(source_path)
-@pytest.mark.parametrize('is_existing', (True, False))
+@pytest.mark.parametrize("is_existing", (True, False))
def test_update_post_thumbnail_with_broken_thumbnail(
- tmpdir, config_injector, read_asset, post_factory, is_existing):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': False,
- })
+ tmpdir, config_injector, read_asset, post_factory, is_existing
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": False,
+ }
+ )
post = post_factory(id=1)
db.session.add(post)
if is_existing:
db.session.flush()
assert post.post_id
generated_path = (
- '{}/data/generated-thumbnails/1_244c8840887984c4.jpg'
- .format(tmpdir))
+ "{}/data/generated-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.jpg"
+ )
source_path = (
- '{}/data/posts/custom-thumbnails/1_244c8840887984c4.dat'
- .format(tmpdir))
+ "{}/data/posts/custom-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.dat"
+ )
assert not os.path.exists(generated_path)
assert not os.path.exists(source_path)
- posts.update_post_content(post, read_asset('png.png'))
- posts.update_post_thumbnail(post, read_asset('png-broken.png'))
+ posts.update_post_content(post, read_asset("png.png"))
+ posts.update_post_thumbnail(post, read_asset("png-broken.png"))
assert not os.path.exists(generated_path)
assert not os.path.exists(source_path)
db.session.flush()
assert os.path.exists(generated_path)
assert os.path.exists(source_path)
- with open(source_path, 'rb') as handle:
- assert handle.read() == read_asset('png-broken.png')
- with open(generated_path, 'rb') as handle:
+ with open(source_path, "rb") as handle:
+ assert handle.read() == read_asset("png-broken.png")
+ with open(generated_path, "rb") as handle:
image = images.Image(handle.read())
assert image.width == 1
assert image.height == 1
def test_update_post_content_leaving_custom_thumbnail(
- tmpdir, config_injector, read_asset, post_factory):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- 'allow_broken_uploads': False,
- })
+ tmpdir, config_injector, read_asset, post_factory
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ "allow_broken_uploads": False,
+ }
+ )
post = post_factory(id=1)
db.session.add(post)
- posts.update_post_content(post, read_asset('png.png'))
- posts.update_post_thumbnail(post, read_asset('jpeg.jpg'))
- posts.update_post_content(post, read_asset('png.png'))
+ posts.update_post_content(post, read_asset("png.png"))
+ posts.update_post_thumbnail(post, read_asset("jpeg.jpg"))
+ posts.update_post_content(post, read_asset("png.png"))
db.session.flush()
generated_path = (
- '{}/data/generated-thumbnails/1_244c8840887984c4.jpg'
- .format(tmpdir))
+ "{}/data/generated-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.jpg"
+ )
source_path = (
- '{}/data/posts/custom-thumbnails/1_244c8840887984c4.dat'
- .format(tmpdir))
+ "{}/data/posts/custom-thumbnails/".format(tmpdir)
+ + "1_244c8840887984c4.dat"
+ )
assert os.path.exists(source_path)
assert os.path.exists(generated_path)
def test_update_post_tags(tag_factory):
post = model.Post()
- with patch('szurubooru.func.tags.get_or_create_tags_by_names'):
- tags.get_or_create_tags_by_names.side_effect = lambda tag_names: \
- ([tag_factory(names=[name]) for name in tag_names], [])
- posts.update_post_tags(post, ['tag1', 'tag2'])
+ with patch("szurubooru.func.tags.get_or_create_tags_by_names"):
+ tags.get_or_create_tags_by_names.side_effect = lambda tag_names: (
+ [tag_factory(names=[name]) for name in tag_names],
+ [],
+ )
+ posts.update_post_tags(post, ["tag1", "tag2"])
assert len(post.tags) == 2
- assert post.tags[0].names[0].name == 'tag1'
- assert post.tags[1].names[0].name == 'tag2'
+ assert post.tags[0].names[0].name == "tag1"
+ assert post.tags[1].names[0].name == "tag2"
def test_update_post_relations(post_factory):
@@ -654,7 +708,9 @@ def test_update_post_relations(post_factory):
posts.update_post_relations(post, [relation1.post_id, relation2.post_id])
assert len(post.relations) == 2
assert sorted(r.post_id for r in post.relations) == [
- relation1.post_id, relation2.post_id]
+ relation1.post_id,
+ relation2.post_id,
+ ]
def test_update_post_relations_bidirectionality(post_factory):
@@ -689,35 +745,44 @@ def test_update_post_notes():
posts.update_post_notes(
post,
[
- {'polygon': [[0, 0], [0, 1], [1, 0], [0, 0]], 'text': 'text1'},
- {'polygon': [[0, 0], [0, 1], [1, 0], [0, 0]], 'text': 'text2'},
- ])
+ {"polygon": [[0, 0], [0, 1], [1, 0], [0, 0]], "text": "text1"},
+ {"polygon": [[0, 0], [0, 1], [1, 0], [0, 0]], "text": "text2"},
+ ],
+ )
assert len(post.notes) == 2
assert post.notes[0].polygon == [[0, 0], [0, 1], [1, 0], [0, 0]]
- assert post.notes[0].text == 'text1'
+ assert post.notes[0].text == "text1"
assert post.notes[1].polygon == [[0, 0], [0, 1], [1, 0], [0, 0]]
- assert post.notes[1].text == 'text2'
+ assert post.notes[1].text == "text2"
-@pytest.mark.parametrize('input', [
- [{'text': '...'}],
- [{'polygon': None, 'text': '...'}],
- [{'polygon': 'trash', 'text': '...'}],
- [{'polygon': ['trash', 'trash', 'trash'], 'text': '...'}],
- [{'polygon': {2: 'trash', 3: 'trash', 4: 'trash'}, 'text': '...'}],
- [{'polygon': [[0, 0]], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], None], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], 'surprise'], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], {2: 'trash', 3: 'trash'}], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], 5], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], [0, 2]], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], [0, '...']], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], [0, 0, 0]], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], [0]], 'text': '...'}],
- [{'polygon': [[0, 0], [0, 0], [0, 1]], 'text': ''}],
- [{'polygon': [[0, 0], [0, 0], [0, 1]], 'text': None}],
- [{'polygon': [[0, 0], [0, 0], [0, 1]]}],
-])
+@pytest.mark.parametrize(
+ "input",
+ [
+ [{"text": "..."}],
+ [{"polygon": None, "text": "..."}],
+ [{"polygon": "trash", "text": "..."}],
+ [{"polygon": ["trash", "trash", "trash"], "text": "..."}],
+ [{"polygon": {2: "trash", 3: "trash", 4: "trash"}, "text": "..."}],
+ [{"polygon": [[0, 0]], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], None], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], "surprise"], "text": "..."}],
+ [
+ {
+ "polygon": [[0, 0], [0, 0], {2: "trash", 3: "trash"}],
+ "text": "...",
+ }
+ ],
+ [{"polygon": [[0, 0], [0, 0], 5], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], [0, 2]], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], [0, "..."]], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], [0, 0, 0]], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], [0]], "text": "..."}],
+ [{"polygon": [[0, 0], [0, 0], [0, 1]], "text": ""}],
+ [{"polygon": [[0, 0], [0, 0], [0, 1]], "text": None}],
+ [{"polygon": [[0, 0], [0, 0], [0, 1]]}],
+ ],
+)
def test_update_post_notes_with_invalid_content(input):
post = model.Post()
with pytest.raises(posts.InvalidPostNoteError):
@@ -726,14 +791,14 @@ def test_update_post_notes_with_invalid_content(input):
def test_update_post_flags():
post = model.Post()
- posts.update_post_flags(post, ['loop'])
- assert post.flags == ['loop']
+ posts.update_post_flags(post, ["loop"])
+ assert post.flags == ["loop"]
def test_update_post_flags_with_invalid_content():
post = model.Post()
with pytest.raises(posts.InvalidPostFlagError):
- posts.update_post_flags(post, ['invalid'])
+ posts.update_post_flags(post, ["invalid"])
def test_feature_post(post_factory, user_factory):
@@ -749,7 +814,7 @@ def test_feature_post(post_factory, user_factory):
def test_delete(post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ config_injector({"delete_source_files": False})
post = post_factory()
db.session.add(post)
db.session.flush()
@@ -760,7 +825,7 @@ def test_delete(post_factory, config_injector):
def test_merge_posts_deletes_source_post(post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
db.session.add_all([source_post, target_post])
@@ -773,7 +838,7 @@ def test_merge_posts_deletes_source_post(post_factory, config_injector):
def test_merge_posts_with_itself(post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ config_injector({"delete_source_files": False})
source_post = post_factory()
db.session.add(source_post)
db.session.flush()
@@ -782,7 +847,7 @@ def test_merge_posts_with_itself(post_factory, config_injector):
def test_merge_posts_moves_tags(post_factory, tag_factory, config_injector):
- config_injector({'delete_source_files': False})
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
tag = tag_factory()
@@ -798,8 +863,9 @@ def test_merge_posts_moves_tags(post_factory, tag_factory, config_injector):
def test_merge_posts_doesnt_duplicate_tags(
- post_factory, tag_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, tag_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
tag = tag_factory()
@@ -815,8 +881,9 @@ def test_merge_posts_doesnt_duplicate_tags(
def test_merge_posts_moves_comments(
- post_factory, comment_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, comment_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
comment = comment_factory(post=source_post)
@@ -831,8 +898,9 @@ def test_merge_posts_moves_comments(
def test_merge_posts_moves_scores(
- post_factory, post_score_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, post_score_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
score = post_score_factory(post=source_post, score=1)
@@ -847,8 +915,9 @@ def test_merge_posts_moves_scores(
def test_merge_posts_doesnt_duplicate_scores(
- post_factory, user_factory, post_score_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, user_factory, post_score_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
user = user_factory()
@@ -865,8 +934,9 @@ def test_merge_posts_doesnt_duplicate_scores(
def test_merge_posts_moves_favorites(
- post_factory, post_favorite_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, post_favorite_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
favorite = post_favorite_factory(post=source_post)
@@ -881,8 +951,9 @@ def test_merge_posts_moves_favorites(
def test_merge_posts_doesnt_duplicate_favorites(
- post_factory, user_factory, post_favorite_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, user_factory, post_favorite_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
user = user_factory()
@@ -899,7 +970,7 @@ def test_merge_posts_doesnt_duplicate_favorites(
def test_merge_posts_moves_child_relations(post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
related_post = post_factory()
@@ -915,8 +986,9 @@ def test_merge_posts_moves_child_relations(post_factory, config_injector):
def test_merge_posts_doesnt_duplicate_child_relations(
- post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
related_post = post_factory()
@@ -933,7 +1005,7 @@ def test_merge_posts_doesnt_duplicate_child_relations(
def test_merge_posts_moves_parent_relations(post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
related_post = post_factory()
@@ -951,8 +1023,9 @@ def test_merge_posts_moves_parent_relations(post_factory, config_injector):
def test_merge_posts_doesnt_duplicate_parent_relations(
- post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
related_post = post_factory()
@@ -970,8 +1043,9 @@ def test_merge_posts_doesnt_duplicate_parent_relations(
def test_merge_posts_doesnt_create_relation_loop_for_children(
- post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
source_post.relations = [target_post]
@@ -986,8 +1060,9 @@ def test_merge_posts_doesnt_create_relation_loop_for_children(
def test_merge_posts_doesnt_create_relation_loop_for_parents(
- post_factory, config_injector):
- config_injector({'delete_source_files': False})
+ post_factory, config_injector
+):
+ config_injector({"delete_source_files": False})
source_post = post_factory()
target_post = post_factory()
target_post.relations = [source_post]
@@ -1002,30 +1077,33 @@ def test_merge_posts_doesnt_create_relation_loop_for_parents(
def test_merge_posts_replaces_content(
- post_factory, config_injector, tmpdir, read_asset):
- config_injector({
- 'data_dir': str(tmpdir.mkdir('data')),
- 'data_url': 'example.com',
- 'delete_source_files': False,
- 'thumbnails': {
- 'post_width': 300,
- 'post_height': 300,
- },
- 'secret': 'test',
- })
+ post_factory, config_injector, tmpdir, read_asset
+):
+ config_injector(
+ {
+ "data_dir": str(tmpdir.mkdir("data")),
+ "data_url": "example.com",
+ "delete_source_files": False,
+ "thumbnails": {"post_width": 300, "post_height": 300,},
+ "secret": "test",
+ }
+ )
source_post = post_factory(id=1)
target_post = post_factory(id=2)
- content = read_asset('png.png')
+ content = read_asset("png.png")
db.session.add_all([source_post, target_post])
db.session.commit()
posts.update_post_content(source_post, content)
db.session.flush()
- source_path = (
- os.path.join('{}/data/posts/1_244c8840887984c4.png'.format(tmpdir)))
- target_path1 = (
- os.path.join('{}/data/posts/2_49caeb3ec1643406.png'.format(tmpdir)))
- target_path2 = (
- os.path.join('{}/data/posts/2_49caeb3ec1643406.dat'.format(tmpdir)))
+ source_path = os.path.join(
+ "{}/data/posts/1_244c8840887984c4.png".format(tmpdir)
+ )
+ target_path1 = os.path.join(
+ "{}/data/posts/2_49caeb3ec1643406.png".format(tmpdir)
+ )
+ target_path2 = os.path.join(
+ "{}/data/posts/2_49caeb3ec1643406.dat".format(tmpdir)
+ )
assert os.path.exists(source_path)
assert not os.path.exists(target_path1)
assert not os.path.exists(target_path2)
@@ -1040,16 +1118,16 @@ def test_merge_posts_replaces_content(
def test_search_by_image(post_factory, config_injector, read_asset):
- config_injector({'allow_broken_uploads': False})
+ config_injector({"allow_broken_uploads": False})
post = post_factory()
- posts.generate_post_signature(post, read_asset('jpeg.jpg'))
+ posts.generate_post_signature(post, read_asset("jpeg.jpg"))
db.session.flush()
- result1 = posts.search_by_image(read_asset('jpeg-similar.jpg'))
+ result1 = posts.search_by_image(read_asset("jpeg-similar.jpg"))
assert len(result1) == 1
result1_distance, result1_post = result1[0]
assert abs(result1_distance - 0.19713075553164386) < 1e-8
assert result1_post.post_id == post.post_id
- result2 = posts.search_by_image(read_asset('png.png'))
+ result2 = posts.search_by_image(read_asset("png.png"))
assert not result2
diff --git a/server/szurubooru/tests/func/test_snapshots.py b/server/szurubooru/tests/func/test_snapshots.py
index 053cf982..b4dda1cc 100644
--- a/server/szurubooru/tests/func/test_snapshots.py
+++ b/server/szurubooru/tests/func/test_snapshots.py
@@ -1,55 +1,57 @@
-from unittest.mock import patch
from datetime import datetime
+from unittest.mock import patch
+
import pytest
+
from szurubooru import db, model
from szurubooru.func import snapshots, users
def test_get_tag_category_snapshot(tag_category_factory):
- category = tag_category_factory(name='name', color='color')
+ category = tag_category_factory(name="name", color="color")
assert snapshots.get_tag_category_snapshot(category) == {
- 'name': 'name',
- 'color': 'color',
- 'default': False,
+ "name": "name",
+ "color": "color",
+ "default": False,
}
category.default = True
assert snapshots.get_tag_category_snapshot(category) == {
- 'name': 'name',
- 'color': 'color',
- 'default': True,
+ "name": "name",
+ "color": "color",
+ "default": True,
}
def test_get_tag_snapshot(tag_factory, tag_category_factory):
- category = tag_category_factory(name='dummy')
- tag = tag_factory(names=['main_name', 'alias'], category=category)
+ category = tag_category_factory(name="dummy")
+ tag = tag_factory(names=["main_name", "alias"], category=category)
assert snapshots.get_tag_snapshot(tag) == {
- 'names': ['main_name', 'alias'],
- 'category': 'dummy',
- 'suggestions': [],
- 'implications': [],
+ "names": ["main_name", "alias"],
+ "category": "dummy",
+ "suggestions": [],
+ "implications": [],
}
- tag = tag_factory(names=['main_name', 'alias'], category=category)
- imp1 = tag_factory(names=['imp1_main_name', 'imp1_alias'])
- imp2 = tag_factory(names=['imp2_main_name', 'imp2_alias'])
- sug1 = tag_factory(names=['sug1_main_name', 'sug1_alias'])
- sug2 = tag_factory(names=['sug2_main_name', 'sug2_alias'])
+ tag = tag_factory(names=["main_name", "alias"], category=category)
+ imp1 = tag_factory(names=["imp1_main_name", "imp1_alias"])
+ imp2 = tag_factory(names=["imp2_main_name", "imp2_alias"])
+ sug1 = tag_factory(names=["sug1_main_name", "sug1_alias"])
+ sug2 = tag_factory(names=["sug2_main_name", "sug2_alias"])
db.session.add_all([imp1, imp2, sug1, sug2])
tag.implications = [imp1, imp2]
tag.suggestions = [sug1, sug2]
db.session.flush()
assert snapshots.get_tag_snapshot(tag) == {
- 'names': ['main_name', 'alias'],
- 'category': 'dummy',
- 'implications': ['imp1_main_name', 'imp2_main_name'],
- 'suggestions': ['sug1_main_name', 'sug2_main_name'],
+ "names": ["main_name", "alias"],
+ "category": "dummy",
+ "implications": ["imp1_main_name", "imp2_main_name"],
+ "suggestions": ["sug1_main_name", "sug2_main_name"],
}
def test_get_post_snapshot(post_factory, user_factory, tag_factory):
- user = user_factory(name='dummy-user')
- tag1 = tag_factory(names=['dummy-tag1'])
- tag2 = tag_factory(names=['dummy-tag2'])
+ user = user_factory(name="dummy-user")
+ tag1 = tag_factory(names=["dummy-tag1"])
+ tag2 = tag_factory(names=["dummy-tag2"])
post = post_factory(id=1)
related_post1 = post_factory(id=2)
related_post2 = post_factory(id=3)
@@ -72,13 +74,13 @@ def test_get_post_snapshot(post_factory, user_factory, tag_factory):
note = model.PostNote()
note.post = post
note.polygon = [(1, 1), (200, 1), (200, 200), (1, 200)]
- note.text = 'some text'
+ note.text = "some text"
db.session.add_all([score])
db.session.flush()
post.user = user
- post.checksum = 'deadbeef'
- post.source = 'example.com'
+ post.checksum = "deadbeef"
+ post.source = "example.com"
post.tags.append(tag1)
post.tags.append(tag2)
post.relations.append(related_post1)
@@ -89,17 +91,19 @@ def test_get_post_snapshot(post_factory, user_factory, tag_factory):
post.notes.append(note)
assert snapshots.get_post_snapshot(post) == {
- 'checksum': 'deadbeef',
- 'featured': True,
- 'flags': [],
- 'notes': [{
- 'polygon': [[1, 1], [200, 1], [200, 200], [1, 200]],
- 'text': 'some text',
- }],
- 'relations': [2, 3],
- 'safety': 'safe',
- 'source': 'example.com',
- 'tags': ['dummy-tag1', 'dummy-tag2'],
+ "checksum": "deadbeef",
+ "featured": True,
+ "flags": [],
+ "notes": [
+ {
+ "polygon": [[1, 1], [200, 1], [200, 200], [1, 200]],
+ "text": "some text",
+ }
+ ],
+ "relations": [2, 3],
+ "safety": "safe",
+ "source": "example.com",
+ "tags": ["dummy-tag1", "dummy-tag2"],
}
@@ -107,75 +111,78 @@ def test_serialize_snapshot(user_factory):
auth_user = user_factory()
snapshot = model.Snapshot()
snapshot.operation = snapshot.OPERATION_CREATED
- snapshot.resource_type = 'type'
- snapshot.resource_name = 'id'
- snapshot.user = user_factory(name='issuer')
- snapshot.data = {'complex': list('object')}
+ snapshot.resource_type = "type"
+ snapshot.resource_name = "id"
+ snapshot.user = user_factory(name="issuer")
+ snapshot.data = {"complex": list("object")}
snapshot.creation_time = datetime(1997, 1, 1)
- with patch('szurubooru.func.users.serialize_micro_user'):
- users.serialize_micro_user.return_value = 'mocked'
+ with patch("szurubooru.func.users.serialize_micro_user"):
+ users.serialize_micro_user.return_value = "mocked"
assert snapshots.serialize_snapshot(snapshot, auth_user) == {
- 'operation': 'created',
- 'type': 'type',
- 'id': 'id',
- 'user': 'mocked',
- 'data': {'complex': list('object')},
- 'time': datetime(1997, 1, 1),
+ "operation": "created",
+ "type": "type",
+ "id": "id",
+ "user": "mocked",
+ "data": {"complex": list("object")},
+ "time": datetime(1997, 1, 1),
}
def test_create(tag_factory, user_factory):
- tag = tag_factory(names=['dummy'])
+ tag = tag_factory(names=["dummy"])
db.session.add(tag)
db.session.flush()
- with patch('szurubooru.func.snapshots.get_tag_snapshot'):
- snapshots.get_tag_snapshot.return_value = 'mocked'
+ with patch("szurubooru.func.snapshots.get_tag_snapshot"):
+ snapshots.get_tag_snapshot.return_value = "mocked"
snapshots.create(tag, user_factory())
db.session.flush()
results = db.session.query(model.Snapshot).all()
assert len(results) == 1
assert results[0].operation == model.Snapshot.OPERATION_CREATED
- assert results[0].data == 'mocked'
+ assert results[0].data == "mocked"
def test_modify_saves_non_empty_diffs(post_factory, user_factory):
- if 'sqlite' in db.session.get_bind().driver:
+ if "sqlite" in db.session.get_bind().driver:
pytest.xfail(
- 'SQLite doesn\'t support transaction isolation, '
- 'which is required to retrieve original entity')
+ "SQLite doesn't support transaction isolation, "
+ "which is required to retrieve original entity"
+ )
post = post_factory()
- post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text='old')]
+ post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text="old")]
user = user_factory()
db.session.add_all([post, user])
db.session.commit()
- post.source = 'new source'
- post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text='new')]
+ post.source = "new source"
+ post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text="new")]
db.session.flush()
snapshots.modify(post, user)
db.session.flush()
results = db.session.query(model.Snapshot).all()
assert len(results) == 1
assert results[0].data == {
- 'type': 'object change',
- 'value': {
- 'source': {
- 'type': 'primitive change',
- 'old-value': None,
- 'new-value': 'new source',
+ "type": "object change",
+ "value": {
+ "source": {
+ "type": "primitive change",
+ "old-value": None,
+ "new-value": "new source",
},
- 'notes': {
- 'type': 'list change',
- 'removed': [
- {'polygon': [[0, 0], [0, 1], [1, 1]], 'text': 'old'}],
- 'added': [
- {'polygon': [[0, 0], [0, 1], [1, 1]], 'text': 'new'}],
+ "notes": {
+ "type": "list change",
+ "removed": [
+ {"polygon": [[0, 0], [0, 1], [1, 1]], "text": "old"}
+ ],
+ "added": [
+ {"polygon": [[0, 0], [0, 1], [1, 1]], "text": "new"}
+ ],
},
},
}
def test_modify_doesnt_save_empty_diffs(tag_factory, user_factory):
- tag = tag_factory(names=['dummy'])
+ tag = tag_factory(names=["dummy"])
user = user_factory()
db.session.add_all([tag, user])
db.session.commit()
@@ -185,26 +192,26 @@ def test_modify_doesnt_save_empty_diffs(tag_factory, user_factory):
def test_delete(tag_factory, user_factory):
- tag = tag_factory(names=['dummy'])
+ tag = tag_factory(names=["dummy"])
db.session.add(tag)
db.session.flush()
- with patch('szurubooru.func.snapshots.get_tag_snapshot'):
- snapshots.get_tag_snapshot.return_value = 'mocked'
+ with patch("szurubooru.func.snapshots.get_tag_snapshot"):
+ snapshots.get_tag_snapshot.return_value = "mocked"
snapshots.delete(tag, user_factory())
db.session.flush()
results = db.session.query(model.Snapshot).all()
assert len(results) == 1
assert results[0].operation == model.Snapshot.OPERATION_DELETED
- assert results[0].data == 'mocked'
+ assert results[0].data == "mocked"
def test_merge(tag_factory, user_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
db.session.add_all([source_tag, target_tag])
db.session.flush()
snapshots.merge(source_tag, target_tag, user_factory())
db.session.flush()
result = db.session.query(model.Snapshot).one()
assert result.operation == model.Snapshot.OPERATION_MERGED
- assert result.data == ['tag', 'target']
+ assert result.data == ["tag", "target"]
diff --git a/server/szurubooru/tests/func/test_tag_categories.py b/server/szurubooru/tests/func/test_tag_categories.py
index d8867034..143cc49f 100644
--- a/server/szurubooru/tests/func/test_tag_categories.py
+++ b/server/szurubooru/tests/func/test_tag_categories.py
@@ -1,7 +1,9 @@
from unittest.mock import patch
+
import pytest
+
from szurubooru import db, model
-from szurubooru.func import tag_categories, cache
+from szurubooru.func import cache, tag_categories
@pytest.fixture(autouse=True)
@@ -14,7 +16,7 @@ def test_serialize_category_when_empty():
def test_serialize_category(tag_category_factory, tag_factory):
- category = tag_category_factory(name='name', color='color')
+ category = tag_category_factory(name="name", color="color")
category.category_id = 1
category.default = True
tag1 = tag_factory(category=category)
@@ -23,36 +25,42 @@ def test_serialize_category(tag_category_factory, tag_factory):
db.session.flush()
result = tag_categories.serialize_category(category)
assert result == {
- 'name': 'name',
- 'color': 'color',
- 'default': True,
- 'version': 1,
- 'usages': 2,
+ "name": "name",
+ "color": "color",
+ "default": True,
+ "version": 1,
+ "usages": 2,
}
def test_create_category_when_first():
- with patch('szurubooru.func.tag_categories.update_category_name'), \
- patch('szurubooru.func.tag_categories.update_category_color'):
- category = tag_categories.create_category('name', 'color')
+ with patch("szurubooru.func.tag_categories.update_category_name"), patch(
+ "szurubooru.func.tag_categories.update_category_color"
+ ):
+ category = tag_categories.create_category("name", "color")
assert category.default
- tag_categories.update_category_name \
- .assert_called_once_with(category, 'name')
- tag_categories.update_category_color \
- .assert_called_once_with(category, 'color')
+ tag_categories.update_category_name.assert_called_once_with(
+ category, "name"
+ )
+ tag_categories.update_category_color.assert_called_once_with(
+ category, "color"
+ )
def test_create_category_when_subsequent(tag_category_factory):
db.session.add(tag_category_factory())
db.session.flush()
- with patch('szurubooru.func.tag_categories.update_category_name'), \
- patch('szurubooru.func.tag_categories.update_category_color'):
- category = tag_categories.create_category('name', 'color')
+ with patch("szurubooru.func.tag_categories.update_category_name"), patch(
+ "szurubooru.func.tag_categories.update_category_color"
+ ):
+ category = tag_categories.create_category("name", "color")
assert not category.default
- tag_categories.update_category_name \
- .assert_called_once_with(category, 'name')
- tag_categories.update_category_color \
- .assert_called_once_with(category, 'color')
+ tag_categories.update_category_name.assert_called_once_with(
+ category, "name"
+ )
+ tag_categories.update_category_color.assert_called_once_with(
+ category, "color"
+ )
def test_update_category_name_with_empty_string(tag_category_factory):
@@ -62,38 +70,42 @@ def test_update_category_name_with_empty_string(tag_category_factory):
def test_update_category_name_with_invalid_name(
- config_injector, tag_category_factory):
- config_injector({'tag_category_name_regex': '^[a-z]+$'})
+ config_injector, tag_category_factory
+):
+ config_injector({"tag_category_name_regex": "^[a-z]+$"})
category = tag_category_factory()
with pytest.raises(tag_categories.InvalidTagCategoryNameError):
- tag_categories.update_category_name(category, '0')
+ tag_categories.update_category_name(category, "0")
def test_update_category_name_with_too_long_string(
- config_injector, tag_category_factory):
- config_injector({'tag_category_name_regex': '^[a-z]+$'})
+ config_injector, tag_category_factory
+):
+ config_injector({"tag_category_name_regex": "^[a-z]+$"})
category = tag_category_factory()
with pytest.raises(tag_categories.InvalidTagCategoryNameError):
- tag_categories.update_category_name(category, 'a' * 3000)
+ tag_categories.update_category_name(category, "a" * 3000)
def test_update_category_name_reusing_other_name(
- config_injector, tag_category_factory):
- config_injector({'tag_category_name_regex': '.*'})
- db.session.add(tag_category_factory(name='name'))
+ config_injector, tag_category_factory
+):
+ config_injector({"tag_category_name_regex": ".*"})
+ db.session.add(tag_category_factory(name="name"))
db.session.flush()
category = tag_category_factory()
with pytest.raises(tag_categories.TagCategoryAlreadyExistsError):
- tag_categories.update_category_name(category, 'name')
+ tag_categories.update_category_name(category, "name")
with pytest.raises(tag_categories.TagCategoryAlreadyExistsError):
- tag_categories.update_category_name(category, 'NAME')
+ tag_categories.update_category_name(category, "NAME")
def test_update_category_name_reusing_own_name(
- config_injector, tag_category_factory):
- config_injector({'tag_category_name_regex': '.*'})
- for name in ['name', 'NAME']:
- category = tag_category_factory(name='name')
+ config_injector, tag_category_factory
+):
+ config_injector({"tag_category_name_regex": ".*"})
+ for name in ["name", "NAME"]:
+ category = tag_category_factory(name="name")
db.session.add(category)
db.session.flush()
tag_categories.update_category_name(category, name)
@@ -110,16 +122,16 @@ def test_update_category_color_with_empty_string(tag_category_factory):
def test_update_category_color_with_too_long_string(tag_category_factory):
category = tag_category_factory()
with pytest.raises(tag_categories.InvalidTagCategoryColorError):
- tag_categories.update_category_color(category, 'a' * 3000)
+ tag_categories.update_category_color(category, "a" * 3000)
def test_update_category_color_with_invalid_string(tag_category_factory):
category = tag_category_factory()
with pytest.raises(tag_categories.InvalidTagCategoryColorError):
- tag_categories.update_category_color(category, 'NOPE')
+ tag_categories.update_category_color(category, "NOPE")
-@pytest.mark.parametrize('attempt', ['#aaaaaa', '#012345', '012345', 'red'])
+@pytest.mark.parametrize("attempt", ["#aaaaaa", "#012345", "012345", "red"])
def test_update_category_color(attempt, tag_category_factory):
category = tag_category_factory()
tag_categories.update_category_color(category, attempt)
@@ -127,35 +139,35 @@ def test_update_category_color(attempt, tag_category_factory):
def test_try_get_category_by_name(tag_category_factory):
- category = tag_category_factory(name='test')
+ category = tag_category_factory(name="test")
db.session.add(category)
db.session.flush()
- assert tag_categories.try_get_category_by_name('test') == category
- assert tag_categories.try_get_category_by_name('TEST') == category
- assert tag_categories.try_get_category_by_name('-') is None
+ assert tag_categories.try_get_category_by_name("test") == category
+ assert tag_categories.try_get_category_by_name("TEST") == category
+ assert tag_categories.try_get_category_by_name("-") is None
def test_get_category_by_name(tag_category_factory):
- category = tag_category_factory(name='test')
+ category = tag_category_factory(name="test")
db.session.add(category)
db.session.flush()
- assert tag_categories.get_category_by_name('test') == category
- assert tag_categories.get_category_by_name('TEST') == category
+ assert tag_categories.get_category_by_name("test") == category
+ assert tag_categories.get_category_by_name("TEST") == category
with pytest.raises(tag_categories.TagCategoryNotFoundError):
- tag_categories.get_category_by_name('-')
+ tag_categories.get_category_by_name("-")
def test_get_all_category_names(tag_category_factory):
- category1 = tag_category_factory(name='cat1')
- category2 = tag_category_factory(name='cat2')
+ category1 = tag_category_factory(name="cat1")
+ category2 = tag_category_factory(name="cat2")
db.session.add_all([category2, category1])
db.session.flush()
- assert tag_categories.get_all_category_names() == ['cat1', 'cat2']
+ assert tag_categories.get_all_category_names() == ["cat1", "cat2"]
def test_get_all_categories(tag_category_factory):
- category1 = tag_category_factory(name='cat1')
- category2 = tag_category_factory(name='cat2')
+ category1 = tag_category_factory(name="cat1")
+ category2 = tag_category_factory(name="cat2")
db.session.add_all([category2, category1])
db.session.flush()
assert tag_categories.get_all_categories() == [category1, category2]
@@ -211,12 +223,12 @@ def test_get_default_category_name_caching(tag_category_factory):
def test_get_default_category():
- with patch('szurubooru.func.tag_categories.try_get_default_category'):
+ with patch("szurubooru.func.tag_categories.try_get_default_category"):
tag_categories.try_get_default_category.return_value = None
with pytest.raises(tag_categories.TagCategoryNotFoundError):
tag_categories.get_default_category()
- tag_categories.try_get_default_category.return_value = 'mocked'
- assert tag_categories.get_default_category() == 'mocked'
+ tag_categories.try_get_default_category.return_value = "mocked"
+ assert tag_categories.get_default_category() == "mocked"
def test_set_default_category_with_previous_default(tag_category_factory):
@@ -257,9 +269,9 @@ def test_delete_category_with_usages(tag_category_factory, tag_factory):
def test_delete_category(tag_category_factory):
db.session.add(tag_category_factory())
- category = tag_category_factory(name='target')
+ category = tag_category_factory(name="target")
db.session.add(category)
db.session.flush()
tag_categories.delete_category(category)
db.session.flush()
- assert tag_categories.try_get_category_by_name('target') is None
+ assert tag_categories.try_get_category_by_name("target") is None
diff --git a/server/szurubooru/tests/func/test_tags.py b/server/szurubooru/tests/func/test_tags.py
index 673e37f8..ac8963c7 100644
--- a/server/szurubooru/tests/func/test_tags.py
+++ b/server/szurubooru/tests/func/test_tags.py
@@ -1,10 +1,12 @@
-import os
import json
-from unittest.mock import patch
+import os
from datetime import datetime
+from unittest.mock import patch
+
import pytest
+
from szurubooru import db, model
-from szurubooru.func import tags, tag_categories, cache
+from szurubooru.func import cache, tag_categories, tags
@pytest.fixture(autouse=True)
@@ -14,18 +16,35 @@ def purge_cache():
def _assert_tag_siblings(result, expected_names_and_occurrences):
actual_names_and_occurences = [
- (tag.names[0].name, occurrences) for tag, occurrences in result]
+ (tag.names[0].name, occurrences) for tag, occurrences in result
+ ]
assert actual_names_and_occurences == expected_names_and_occurrences
-@pytest.mark.parametrize('input,expected_tag_names', [
- ([('a', 'a', True), ('b', 'b', False), ('c', 'c', False)], list('bca')),
- ([('c', 'a', True), ('b', 'b', False), ('a', 'c', False)], list('bac')),
- ([('a', 'c', True), ('b', 'b', False), ('c', 'a', False)], list('cba')),
- ([('a', 'c', False), ('b', 'b', False), ('c', 'a', True)], list('bac')),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ (
+ [("a", "a", True), ("b", "b", False), ("c", "c", False)],
+ list("bca"),
+ ),
+ (
+ [("c", "a", True), ("b", "b", False), ("a", "c", False)],
+ list("bac"),
+ ),
+ (
+ [("a", "c", True), ("b", "b", False), ("c", "a", False)],
+ list("cba"),
+ ),
+ (
+ [("a", "c", False), ("b", "b", False), ("c", "a", True)],
+ list("bac"),
+ ),
+ ],
+)
def test_sort_tags(
- input, expected_tag_names, tag_factory, tag_category_factory):
+ input, expected_tag_names, tag_factory, tag_category_factory
+):
db_tags = []
for tag in input:
tag_name, category_name, category_is_default = tag
@@ -33,7 +52,10 @@ def test_sort_tags(
tag_factory(
names=[tag_name],
category=tag_category_factory(
- name=category_name, default=category_is_default)))
+ name=category_name, default=category_is_default
+ ),
+ )
+ )
db.session.add_all(db_tags)
db.session.flush()
actual_tag_names = [tag.names[0].name for tag in tags.sort_tags(db_tags)]
@@ -45,17 +67,17 @@ def test_serialize_tag_when_empty():
def test_serialize_tag(post_factory, tag_factory, tag_category_factory):
- cat = tag_category_factory(name='cat')
- tag = tag_factory(names=['tag1', 'tag2'], category=cat)
+ cat = tag_category_factory(name="cat")
+ tag = tag_factory(names=["tag1", "tag2"], category=cat)
# tag.tag_id = 1
- tag.description = 'description'
+ tag.description = "description"
tag.suggestions = [
- tag_factory(names=['sug1'], category=cat),
- tag_factory(names=['sug2'], category=cat),
+ tag_factory(names=["sug1"], category=cat),
+ tag_factory(names=["sug2"], category=cat),
]
tag.implications = [
- tag_factory(names=['impl1'], category=cat),
- tag_factory(names=['impl2'], category=cat),
+ tag_factory(names=["impl1"], category=cat),
+ tag_factory(names=["impl2"], category=cat),
]
tag.last_edit_time = datetime(1998, 1, 1)
@@ -67,36 +89,39 @@ def test_serialize_tag(post_factory, tag_factory, tag_category_factory):
db.session.flush()
result = tags.serialize_tag(tag)
- result['suggestions'].sort(key=lambda relation: relation['names'][0])
- result['implications'].sort(key=lambda relation: relation['names'][0])
+ result["suggestions"].sort(key=lambda relation: relation["names"][0])
+ result["implications"].sort(key=lambda relation: relation["names"][0])
assert result == {
- 'names': ['tag1', 'tag2'],
- 'version': 1,
- 'category': 'cat',
- 'creationTime': datetime(1996, 1, 1, 0, 0),
- 'lastEditTime': datetime(1998, 1, 1, 0, 0),
- 'description': 'description',
- 'suggestions': [
- {'names': ['sug1'], 'category': 'cat', 'usages': 0},
- {'names': ['sug2'], 'category': 'cat', 'usages': 0},
+ "names": ["tag1", "tag2"],
+ "version": 1,
+ "category": "cat",
+ "creationTime": datetime(1996, 1, 1, 0, 0),
+ "lastEditTime": datetime(1998, 1, 1, 0, 0),
+ "description": "description",
+ "suggestions": [
+ {"names": ["sug1"], "category": "cat", "usages": 0},
+ {"names": ["sug2"], "category": "cat", "usages": 0},
],
- 'implications': [
- {'names': ['impl1'], 'category': 'cat', 'usages': 0},
- {'names': ['impl2'], 'category': 'cat', 'usages': 0},
+ "implications": [
+ {"names": ["impl1"], "category": "cat", "usages": 0},
+ {"names": ["impl2"], "category": "cat", "usages": 0},
],
- 'usages': 2,
+ "usages": 2,
}
-@pytest.mark.parametrize('name_to_search,expected_to_find', [
- ('name', True),
- ('NAME', True),
- ('alias', True),
- ('ALIAS', True),
- ('-', False),
-])
+@pytest.mark.parametrize(
+ "name_to_search,expected_to_find",
+ [
+ ("name", True),
+ ("NAME", True),
+ ("alias", True),
+ ("ALIAS", True),
+ ("-", False),
+ ],
+)
def test_try_get_tag_by_name(name_to_search, expected_to_find, tag_factory):
- tag = tag_factory(names=['name', 'ALIAS'])
+ tag = tag_factory(names=["name", "ALIAS"])
db.session.add(tag)
db.session.flush()
if expected_to_find:
@@ -105,15 +130,18 @@ def test_try_get_tag_by_name(name_to_search, expected_to_find, tag_factory):
assert tags.try_get_tag_by_name(name_to_search) is None
-@pytest.mark.parametrize('name_to_search,expected_to_find', [
- ('name', True),
- ('NAME', True),
- ('alias', True),
- ('ALIAS', True),
- ('-', False),
-])
+@pytest.mark.parametrize(
+ "name_to_search,expected_to_find",
+ [
+ ("name", True),
+ ("NAME", True),
+ ("alias", True),
+ ("ALIAS", True),
+ ("-", False),
+ ],
+)
def test_get_tag_by_name(name_to_search, expected_to_find, tag_factory):
- tag = tag_factory(names=['name', 'ALIAS'])
+ tag = tag_factory(names=["name", "ALIAS"])
db.session.add(tag)
db.session.flush()
if expected_to_find:
@@ -123,25 +151,28 @@ def test_get_tag_by_name(name_to_search, expected_to_find, tag_factory):
tags.get_tag_by_name(name_to_search)
-@pytest.mark.parametrize('names,expected_indexes', [
- ([], []),
- (['name1'], [0]),
- (['NAME1'], [0]),
- (['alias1'], [0]),
- (['ALIAS1'], [0]),
- (['name2'], [1]),
- (['name1', 'name1'], [0]),
- (['name1', 'NAME1'], [0]),
- (['name1', 'alias1'], [0]),
- (['name1', 'alias2'], [0, 1]),
- (['NAME1', 'alias2'], [0, 1]),
- (['name1', 'ALIAS2'], [0, 1]),
- (['name2', 'alias1'], [0, 1]),
-])
+@pytest.mark.parametrize(
+ "names,expected_indexes",
+ [
+ ([], []),
+ (["name1"], [0]),
+ (["NAME1"], [0]),
+ (["alias1"], [0]),
+ (["ALIAS1"], [0]),
+ (["name2"], [1]),
+ (["name1", "name1"], [0]),
+ (["name1", "NAME1"], [0]),
+ (["name1", "alias1"], [0]),
+ (["name1", "alias2"], [0, 1]),
+ (["NAME1", "alias2"], [0, 1]),
+ (["name1", "ALIAS2"], [0, 1]),
+ (["name2", "alias1"], [0, 1]),
+ ],
+)
def test_get_tag_by_names(names, expected_indexes, tag_factory):
input_tags = [
- tag_factory(names=['name1', 'ALIAS1']),
- tag_factory(names=['name2', 'ALIAS2']),
+ tag_factory(names=["name1", "ALIAS1"]),
+ tag_factory(names=["name2", "ALIAS2"]),
]
db.session.add_all(input_tags)
db.session.flush()
@@ -151,49 +182,52 @@ def test_get_tag_by_names(names, expected_indexes, tag_factory):
@pytest.mark.parametrize(
- 'names,expected_indexes,expected_created_names', [
+ "names,expected_indexes,expected_created_names",
+ [
([], [], []),
- (['name1'], [0], []),
- (['NAME1'], [0], []),
- (['alias1'], [0], []),
- (['ALIAS1'], [0], []),
- (['name2'], [1], []),
- (['name1', 'name1'], [0], []),
- (['name1', 'NAME1'], [0], []),
- (['name1', 'alias1'], [0], []),
- (['name1', 'alias2'], [0, 1], []),
- (['NAME1', 'alias2'], [0, 1], []),
- (['name1', 'ALIAS2'], [0, 1], []),
- (['name2', 'alias1'], [0, 1], []),
- (['new'], [], ['new']),
- (['new', 'name1'], [0], ['new']),
- (['new', 'NAME1'], [0], ['new']),
- (['new', 'alias1'], [0], ['new']),
- (['new', 'ALIAS1'], [0], ['new']),
- (['new', 'name2'], [1], ['new']),
- (['new', 'name1', 'name1'], [0], ['new']),
- (['new', 'name1', 'NAME1'], [0], ['new']),
- (['new', 'name1', 'alias1'], [0], ['new']),
- (['new', 'name1', 'alias2'], [0, 1], ['new']),
- (['new', 'NAME1', 'alias2'], [0, 1], ['new']),
- (['new', 'name1', 'ALIAS2'], [0, 1], ['new']),
- (['new', 'name2', 'alias1'], [0, 1], ['new']),
- (['new', 'new'], [], ['new']),
- (['new', 'NEW'], [], ['new']),
- (['new', 'new2'], [], ['new', 'new2']),
- ])
+ (["name1"], [0], []),
+ (["NAME1"], [0], []),
+ (["alias1"], [0], []),
+ (["ALIAS1"], [0], []),
+ (["name2"], [1], []),
+ (["name1", "name1"], [0], []),
+ (["name1", "NAME1"], [0], []),
+ (["name1", "alias1"], [0], []),
+ (["name1", "alias2"], [0, 1], []),
+ (["NAME1", "alias2"], [0, 1], []),
+ (["name1", "ALIAS2"], [0, 1], []),
+ (["name2", "alias1"], [0, 1], []),
+ (["new"], [], ["new"]),
+ (["new", "name1"], [0], ["new"]),
+ (["new", "NAME1"], [0], ["new"]),
+ (["new", "alias1"], [0], ["new"]),
+ (["new", "ALIAS1"], [0], ["new"]),
+ (["new", "name2"], [1], ["new"]),
+ (["new", "name1", "name1"], [0], ["new"]),
+ (["new", "name1", "NAME1"], [0], ["new"]),
+ (["new", "name1", "alias1"], [0], ["new"]),
+ (["new", "name1", "alias2"], [0, 1], ["new"]),
+ (["new", "NAME1", "alias2"], [0, 1], ["new"]),
+ (["new", "name1", "ALIAS2"], [0, 1], ["new"]),
+ (["new", "name2", "alias1"], [0, 1], ["new"]),
+ (["new", "new"], [], ["new"]),
+ (["new", "NEW"], [], ["new"]),
+ (["new", "new2"], [], ["new", "new2"]),
+ ],
+)
def test_get_or_create_tags_by_names(
- names,
- expected_indexes,
- expected_created_names,
- tag_factory,
- tag_category_factory,
- config_injector):
- config_injector({'tag_name_regex': '.*'})
+ names,
+ expected_indexes,
+ expected_created_names,
+ tag_factory,
+ tag_category_factory,
+ config_injector,
+):
+ config_injector({"tag_name_regex": ".*"})
category = tag_category_factory()
input_tags = [
- tag_factory(names=['name1', 'ALIAS1'], category=category),
- tag_factory(names=['name2', 'ALIAS2'], category=category),
+ tag_factory(names=["name1", "ALIAS1"], category=category),
+ tag_factory(names=["name2", "ALIAS2"], category=category),
]
db.session.add_all(input_tags)
db.session.flush()
@@ -206,14 +240,14 @@ def test_get_or_create_tags_by_names(
def test_get_tag_siblings_for_unused(tag_factory):
- tag = tag_factory(names=['tag'])
+ tag = tag_factory(names=["tag"])
db.session.add(tag)
db.session.flush()
_assert_tag_siblings(tags.get_tag_siblings(tag), [])
def test_get_tag_siblings_for_used_alone(tag_factory, post_factory):
- tag = tag_factory(names=['tag'])
+ tag = tag_factory(names=["tag"])
post = post_factory()
post.tags = [tag]
db.session.add_all([post, tag])
@@ -222,20 +256,20 @@ def test_get_tag_siblings_for_used_alone(tag_factory, post_factory):
def test_get_tag_siblings_for_used_with_others(tag_factory, post_factory):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
post = post_factory()
post.tags = [tag1, tag2]
db.session.add_all([post, tag1, tag2])
db.session.flush()
- _assert_tag_siblings(tags.get_tag_siblings(tag1), [('t2', 1)])
- _assert_tag_siblings(tags.get_tag_siblings(tag2), [('t1', 1)])
+ _assert_tag_siblings(tags.get_tag_siblings(tag1), [("t2", 1)])
+ _assert_tag_siblings(tags.get_tag_siblings(tag2), [("t1", 1)])
def test_get_tag_siblings_used_for_multiple_others(tag_factory, post_factory):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
- tag3 = tag_factory(names=['t3'])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
+ tag3 = tag_factory(names=["t3"])
post1 = post_factory()
post2 = post_factory()
post3 = post_factory()
@@ -246,16 +280,16 @@ def test_get_tag_siblings_used_for_multiple_others(tag_factory, post_factory):
post4.tags = [tag2]
db.session.add_all([post1, post2, post3, post4, tag1, tag2, tag3])
db.session.flush()
- _assert_tag_siblings(tags.get_tag_siblings(tag1), [('t3', 2), ('t2', 1)])
- _assert_tag_siblings(tags.get_tag_siblings(tag2), [('t1', 1), ('t3', 1)])
+ _assert_tag_siblings(tags.get_tag_siblings(tag1), [("t3", 2), ("t2", 1)])
+ _assert_tag_siblings(tags.get_tag_siblings(tag2), [("t1", 1), ("t3", 1)])
# even though tag2 is used more widely, tag1 is more relevant to tag3
- _assert_tag_siblings(tags.get_tag_siblings(tag3), [('t1', 2), ('t2', 1)])
+ _assert_tag_siblings(tags.get_tag_siblings(tag3), [("t1", 2), ("t2", 1)])
def test_delete(tag_factory):
- tag = tag_factory(names=['tag'])
- tag.suggestions = [tag_factory(names=['sug'])]
- tag.implications = [tag_factory(names=['imp'])]
+ tag = tag_factory(names=["tag"])
+ tag.suggestions = [tag_factory(names=["sug"])]
+ tag.implications = [tag_factory(names=["imp"])]
db.session.add(tag)
db.session.flush()
assert db.session.query(model.Tag).count() == 3
@@ -265,19 +299,19 @@ def test_delete(tag_factory):
def test_merge_tags_deletes_source_tag(tag_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
db.session.add_all([source_tag, target_tag])
db.session.flush()
tags.merge_tags(source_tag, target_tag)
db.session.flush()
- assert tags.try_get_tag_by_name('source') is None
- tag = tags.get_tag_by_name('target')
+ assert tags.try_get_tag_by_name("source") is None
+ tag = tags.get_tag_by_name("target")
assert tag is not None
def test_merge_tags_with_itself(tag_factory):
- source_tag = tag_factory(names=['source'])
+ source_tag = tag_factory(names=["source"])
db.session.add(source_tag)
db.session.flush()
with pytest.raises(tags.InvalidTagRelationError):
@@ -285,8 +319,8 @@ def test_merge_tags_with_itself(tag_factory):
def test_merge_tags_moves_usages(tag_factory, post_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
post = post_factory()
post.tags = [source_tag]
db.session.add_all([source_tag, target_tag, post])
@@ -295,13 +329,13 @@ def test_merge_tags_moves_usages(tag_factory, post_factory):
assert target_tag.post_count == 0
tags.merge_tags(source_tag, target_tag)
db.session.commit()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('target').post_count == 1
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("target").post_count == 1
def test_merge_tags_doesnt_duplicate_usages(tag_factory, post_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
post = post_factory()
post.tags = [source_tag, target_tag]
db.session.add_all([source_tag, target_tag, post])
@@ -310,13 +344,13 @@ def test_merge_tags_doesnt_duplicate_usages(tag_factory, post_factory):
assert target_tag.post_count == 1
tags.merge_tags(source_tag, target_tag)
db.session.flush()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('target').post_count == 1
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("target").post_count == 1
def test_merge_tags_moves_child_relations(tag_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
related_tag = tag_factory()
source_tag.suggestions = [related_tag]
source_tag.implications = [related_tag]
@@ -328,14 +362,14 @@ def test_merge_tags_moves_child_relations(tag_factory):
assert target_tag.implication_count == 0
tags.merge_tags(source_tag, target_tag)
db.session.commit()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('target').suggestion_count == 1
- assert tags.get_tag_by_name('target').implication_count == 1
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("target").suggestion_count == 1
+ assert tags.get_tag_by_name("target").implication_count == 1
def test_merge_tags_doesnt_duplicate_child_relations(tag_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
related_tag = tag_factory()
source_tag.suggestions = [related_tag]
source_tag.implications = [related_tag]
@@ -349,15 +383,15 @@ def test_merge_tags_doesnt_duplicate_child_relations(tag_factory):
assert target_tag.implication_count == 1
tags.merge_tags(source_tag, target_tag)
db.session.commit()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('target').suggestion_count == 1
- assert tags.get_tag_by_name('target').implication_count == 1
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("target").suggestion_count == 1
+ assert tags.get_tag_by_name("target").implication_count == 1
def test_merge_tags_moves_parent_relations(tag_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
- related_tag = tag_factory(names=['related'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
+ related_tag = tag_factory(names=["related"])
related_tag.suggestions = [related_tag]
related_tag.implications = [related_tag]
db.session.add_all([source_tag, target_tag, related_tag])
@@ -368,16 +402,16 @@ def test_merge_tags_moves_parent_relations(tag_factory):
assert target_tag.implication_count == 0
tags.merge_tags(source_tag, target_tag)
db.session.commit()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('related').suggestion_count == 1
- assert tags.get_tag_by_name('related').suggestion_count == 1
- assert tags.get_tag_by_name('target').suggestion_count == 0
- assert tags.get_tag_by_name('target').implication_count == 0
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("related").suggestion_count == 1
+ assert tags.get_tag_by_name("related").suggestion_count == 1
+ assert tags.get_tag_by_name("target").suggestion_count == 0
+ assert tags.get_tag_by_name("target").implication_count == 0
def test_merge_tags_doesnt_create_relation_loop_for_children(tag_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
source_tag.suggestions = [target_tag]
source_tag.implications = [target_tag]
db.session.add_all([source_tag, target_tag])
@@ -388,14 +422,14 @@ def test_merge_tags_doesnt_create_relation_loop_for_children(tag_factory):
assert target_tag.implication_count == 0
tags.merge_tags(source_tag, target_tag)
db.session.commit()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('target').suggestion_count == 0
- assert tags.get_tag_by_name('target').implication_count == 0
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("target").suggestion_count == 0
+ assert tags.get_tag_by_name("target").implication_count == 0
def test_merge_tags_doesnt_create_relation_loop_for_parents(tag_factory):
- source_tag = tag_factory(names=['source'])
- target_tag = tag_factory(names=['target'])
+ source_tag = tag_factory(names=["source"])
+ target_tag = tag_factory(names=["target"])
target_tag.suggestions = [source_tag]
target_tag.implications = [source_tag]
db.session.add_all([source_tag, target_tag])
@@ -406,33 +440,35 @@ def test_merge_tags_doesnt_create_relation_loop_for_parents(tag_factory):
assert target_tag.implication_count == 1
tags.merge_tags(source_tag, target_tag)
db.session.commit()
- assert tags.try_get_tag_by_name('source') is None
- assert tags.get_tag_by_name('target').suggestion_count == 0
- assert tags.get_tag_by_name('target').implication_count == 0
+ assert tags.try_get_tag_by_name("source") is None
+ assert tags.get_tag_by_name("target").suggestion_count == 0
+ assert tags.get_tag_by_name("target").implication_count == 0
def test_create_tag(fake_datetime):
- with patch('szurubooru.func.tags.update_tag_names'), \
- patch('szurubooru.func.tags.update_tag_category_name'), \
- patch('szurubooru.func.tags.update_tag_suggestions'), \
- patch('szurubooru.func.tags.update_tag_implications'), \
- fake_datetime('1997-01-01'):
- tag = tags.create_tag(['name'], 'cat', ['sug'], ['imp'])
+ with patch("szurubooru.func.tags.update_tag_names"), patch(
+ "szurubooru.func.tags.update_tag_category_name"
+ ), patch("szurubooru.func.tags.update_tag_suggestions"), patch(
+ "szurubooru.func.tags.update_tag_implications"
+ ), fake_datetime(
+ "1997-01-01"
+ ):
+ tag = tags.create_tag(["name"], "cat", ["sug"], ["imp"])
assert tag.creation_time == datetime(1997, 1, 1)
assert tag.last_edit_time is None
- tags.update_tag_names.assert_called_once_with(tag, ['name'])
- tags.update_tag_category_name.assert_called_once_with(tag, 'cat')
- tags.update_tag_suggestions.assert_called_once_with(tag, ['sug'])
- tags.update_tag_implications.assert_called_once_with(tag, ['imp'])
+ tags.update_tag_names.assert_called_once_with(tag, ["name"])
+ tags.update_tag_category_name.assert_called_once_with(tag, "cat")
+ tags.update_tag_suggestions.assert_called_once_with(tag, ["sug"])
+ tags.update_tag_implications.assert_called_once_with(tag, ["imp"])
def test_update_tag_category_name(tag_factory):
- with patch('szurubooru.func.tag_categories.get_category_by_name'):
- tag_categories.get_category_by_name.return_value = 'mocked'
+ with patch("szurubooru.func.tag_categories.get_category_by_name"):
+ tag_categories.get_category_by_name.return_value = "mocked"
tag = tag_factory()
- tags.update_tag_category_name(tag, 'cat')
- assert tag_categories.get_category_by_name.called_once_with('cat')
- assert tag.category == 'mocked'
+ tags.update_tag_category_name(tag, "cat")
+ assert tag_categories.get_category_by_name.called_once_with("cat")
+ assert tag.category == "mocked"
def test_update_tag_names_to_empty(tag_factory):
@@ -442,44 +478,45 @@ def test_update_tag_names_to_empty(tag_factory):
def test_update_tag_names_with_invalid_name(config_injector, tag_factory):
- config_injector({'tag_name_regex': '^[a-z]*$'})
+ config_injector({"tag_name_regex": "^[a-z]*$"})
tag = tag_factory()
with pytest.raises(tags.InvalidTagNameError):
- tags.update_tag_names(tag, ['0'])
+ tags.update_tag_names(tag, ["0"])
def test_update_tag_names_with_too_long_string(config_injector, tag_factory):
- config_injector({'tag_name_regex': '^[a-z]*$'})
+ config_injector({"tag_name_regex": "^[a-z]*$"})
tag = tag_factory()
with pytest.raises(tags.InvalidTagNameError):
- tags.update_tag_names(tag, ['a' * 300])
+ tags.update_tag_names(tag, ["a" * 300])
def test_update_tag_names_with_duplicate_names(config_injector, tag_factory):
- config_injector({'tag_name_regex': '^[a-z]*$'})
+ config_injector({"tag_name_regex": "^[a-z]*$"})
tag = tag_factory()
- tags.update_tag_names(tag, ['a', 'A'])
- assert [tag_name.name for tag_name in tag.names] == ['a']
+ tags.update_tag_names(tag, ["a", "A"])
+ assert [tag_name.name for tag_name in tag.names] == ["a"]
def test_update_tag_names_trying_to_use_taken_name(
- config_injector, tag_factory):
- config_injector({'tag_name_regex': '^[a-zA-Z]*$'})
- existing_tag = tag_factory(names=['a'])
+ config_injector, tag_factory
+):
+ config_injector({"tag_name_regex": "^[a-zA-Z]*$"})
+ existing_tag = tag_factory(names=["a"])
db.session.add(existing_tag)
tag = tag_factory()
db.session.add(tag)
db.session.flush()
with pytest.raises(tags.TagAlreadyExistsError):
- tags.update_tag_names(tag, ['a'])
+ tags.update_tag_names(tag, ["a"])
with pytest.raises(tags.TagAlreadyExistsError):
- tags.update_tag_names(tag, ['A'])
+ tags.update_tag_names(tag, ["A"])
def test_update_tag_names_reusing_own_name(config_injector, tag_factory):
- config_injector({'tag_name_regex': '^[a-zA-Z]*$'})
- for name in list('aA'):
- tag = tag_factory(names=['a'])
+ config_injector({"tag_name_regex": "^[a-zA-Z]*$"})
+ for name in list("aA"):
+ tag = tag_factory(names=["a"])
db.session.add(tag)
db.session.flush()
tags.update_tag_names(tag, [name])
@@ -488,48 +525,48 @@ def test_update_tag_names_reusing_own_name(config_injector, tag_factory):
def test_update_tag_names_changing_primary_name(config_injector, tag_factory):
- config_injector({'tag_name_regex': '^[a-zA-Z]*$'})
- tag = tag_factory(names=['a', 'b'])
+ config_injector({"tag_name_regex": "^[a-zA-Z]*$"})
+ tag = tag_factory(names=["a", "b"])
db.session.add(tag)
db.session.flush()
- tags.update_tag_names(tag, ['b', 'a'])
+ tags.update_tag_names(tag, ["b", "a"])
db.session.flush()
db.session.refresh(tag)
- assert [tag_name.name for tag_name in tag.names] == ['b', 'a']
+ assert [tag_name.name for tag_name in tag.names] == ["b", "a"]
db.session.rollback()
-@pytest.mark.parametrize('attempt', ['name', 'NAME', 'alias', 'ALIAS'])
+@pytest.mark.parametrize("attempt", ["name", "NAME", "alias", "ALIAS"])
def test_update_tag_suggestions_with_itself(attempt, tag_factory):
- tag = tag_factory(names=['name', 'ALIAS'])
+ tag = tag_factory(names=["name", "ALIAS"])
with pytest.raises(tags.InvalidTagRelationError):
tags.update_tag_suggestions(tag, [attempt])
def test_update_tag_suggestions(tag_factory):
- tag = tag_factory(names=['name', 'ALIAS'])
- with patch('szurubooru.func.tags.get_tags_by_names'):
- tags.get_tags_by_names.return_value = ['returned tags']
- tags.update_tag_suggestions(tag, ['test'])
- assert tag.suggestions == ['returned tags']
+ tag = tag_factory(names=["name", "ALIAS"])
+ with patch("szurubooru.func.tags.get_tags_by_names"):
+ tags.get_tags_by_names.return_value = ["returned tags"]
+ tags.update_tag_suggestions(tag, ["test"])
+ assert tag.suggestions == ["returned tags"]
-@pytest.mark.parametrize('attempt', ['name', 'NAME', 'alias', 'ALIAS'])
+@pytest.mark.parametrize("attempt", ["name", "NAME", "alias", "ALIAS"])
def test_update_tag_implications_with_itself(attempt, tag_factory):
- tag = tag_factory(names=['name', 'ALIAS'])
+ tag = tag_factory(names=["name", "ALIAS"])
with pytest.raises(tags.InvalidTagRelationError):
tags.update_tag_implications(tag, [attempt])
def test_update_tag_implications(tag_factory):
- tag = tag_factory(names=['name', 'ALIAS'])
- with patch('szurubooru.func.tags.get_tags_by_names'):
- tags.get_tags_by_names.return_value = ['returned tags']
- tags.update_tag_implications(tag, ['test'])
- assert tag.implications == ['returned tags']
+ tag = tag_factory(names=["name", "ALIAS"])
+ with patch("szurubooru.func.tags.get_tags_by_names"):
+ tags.get_tags_by_names.return_value = ["returned tags"]
+ tags.update_tag_implications(tag, ["test"])
+ assert tag.implications == ["returned tags"]
def test_update_tag_description(tag_factory):
tag = tag_factory()
- tags.update_tag_description(tag, 'test')
- assert tag.description == 'test'
+ tags.update_tag_description(tag, "test")
+ assert tag.description == "test"
diff --git a/server/szurubooru/tests/func/test_user_tokens.py b/server/szurubooru/tests/func/test_user_tokens.py
index 8c3577c8..0422f4d3 100644
--- a/server/szurubooru/tests/func/test_user_tokens.py
+++ b/server/szurubooru/tests/func/test_user_tokens.py
@@ -1,32 +1,35 @@
-from datetime import datetime, timedelta
-from unittest.mock import patch
-import pytest
-import pytz
import random
import string
+from datetime import datetime, timedelta
+from unittest.mock import patch
+
+import pytest
+import pytz
+
from szurubooru import db, model
-from szurubooru.func import user_tokens, users, auth, util
+from szurubooru.func import auth, user_tokens, users, util
def test_serialize_user_token(user_token_factory):
user_token = user_token_factory()
db.session.add(user_token)
db.session.flush()
- with patch('szurubooru.func.users.get_avatar_url'):
- users.get_avatar_url.return_value = 'https://example.com/avatar.png'
+ with patch("szurubooru.func.users.get_avatar_url"):
+ users.get_avatar_url.return_value = "https://example.com/avatar.png"
result = user_tokens.serialize_user_token(user_token, user_token.user)
assert result == {
- 'creationTime': datetime(1997, 1, 1, 0, 0),
- 'enabled': True,
- 'expirationTime': None,
- 'lastEditTime': None,
- 'lastUsageTime': None,
- 'note': None,
- 'token': 'dummy',
- 'user': {
- 'avatarUrl': 'https://example.com/avatar.png',
- 'name': user_token.user.name},
- 'version': 1
+ "creationTime": datetime(1997, 1, 1, 0, 0),
+ "enabled": True,
+ "expirationTime": None,
+ "lastEditTime": None,
+ "lastUsageTime": None,
+ "note": None,
+ "token": "dummy",
+ "user": {
+ "avatarUrl": "https://example.com/avatar.png",
+ "name": user_token.user.name,
+ },
+ "version": 1,
}
@@ -41,7 +44,8 @@ def test_get_by_user_and_token(user_token_factory):
db.session.flush()
db.session.commit()
result = user_tokens.get_by_user_and_token(
- user_token.user, user_token.token)
+ user_token.user, user_token.token
+ )
assert result == user_token
@@ -61,10 +65,10 @@ def test_create_user_token(user_factory):
db.session.add(user)
db.session.flush()
db.session.commit()
- with patch('szurubooru.func.auth.generate_authorization_token'):
- auth.generate_authorization_token.return_value = 'test'
+ with patch("szurubooru.func.auth.generate_authorization_token"):
+ auth.generate_authorization_token.return_value = "test"
result = user_tokens.create_user_token(user, True)
- assert result.token == 'test'
+ assert result.token == "test"
assert result.user == user
@@ -85,8 +89,8 @@ def test_update_user_token_edit_time(user_token_factory):
def test_update_user_token_note(user_token_factory):
user_token = user_token_factory()
assert user_token.note is None
- user_tokens.update_user_token_note(user_token, ' Test Note ')
- assert user_token.note == 'Test Note'
+ user_tokens.update_user_token_note(user_token, " Test Note ")
+ assert user_token.note == "Test Note"
assert user_token.last_edit_time is not None
@@ -94,8 +98,9 @@ def test_update_user_token_note_input_too_long(user_token_factory):
user_token = user_token_factory()
assert user_token.note is None
note_max_length = util.get_column_size(model.UserToken.note) + 1
- note = ''.join(
- random.choice(string.ascii_letters) for _ in range(note_max_length))
+ note = "".join(
+ random.choice(string.ascii_letters) for _ in range(note_max_length)
+ )
with pytest.raises(user_tokens.InvalidNoteError):
user_tokens.update_user_token_note(user_token, note)
@@ -104,11 +109,11 @@ def test_update_user_token_expiration_time(user_token_factory):
user_token = user_token_factory()
assert user_token.expiration_time is None
expiration_time_str = (
- (datetime.utcnow() + timedelta(days=1))
- .replace(tzinfo=pytz.utc)
+ (datetime.utcnow() + timedelta(days=1)).replace(tzinfo=pytz.utc)
).isoformat()
user_tokens.update_user_token_expiration_time(
- user_token, expiration_time_str)
+ user_token, expiration_time_str
+ )
assert user_token.expiration_time.isoformat() == expiration_time_str
assert user_token.last_edit_time is not None
@@ -117,39 +122,45 @@ def test_update_user_token_expiration_time_in_past(user_token_factory):
user_token = user_token_factory()
assert user_token.expiration_time is None
expiration_time_str = (
- (datetime.utcnow() - timedelta(days=1))
- .replace(tzinfo=pytz.utc)
+ (datetime.utcnow() - timedelta(days=1)).replace(tzinfo=pytz.utc)
).isoformat()
with pytest.raises(
- user_tokens.InvalidExpirationError,
- match='Expiration cannot happen in the past'):
+ user_tokens.InvalidExpirationError,
+ match="Expiration cannot happen in the past",
+ ):
user_tokens.update_user_token_expiration_time(
- user_token, expiration_time_str)
+ user_token, expiration_time_str
+ )
-@pytest.mark.parametrize('expiration_time_str', [
- datetime.utcnow().isoformat(),
- (datetime.utcnow() - timedelta(days=1)).ctime(),
- '1970/01/01 00:00:01.0000Z',
- '70/01/01 00:00:01.0000Z',
- ''.join(random.choice(string.ascii_letters) for _ in range(15)),
- ''.join(random.choice(string.digits) for _ in range(8))
-])
+@pytest.mark.parametrize(
+ "expiration_time_str",
+ [
+ datetime.utcnow().isoformat(),
+ (datetime.utcnow() - timedelta(days=1)).ctime(),
+ "1970/01/01 00:00:01.0000Z",
+ "70/01/01 00:00:01.0000Z",
+ "".join(random.choice(string.ascii_letters) for _ in range(15)),
+ "".join(random.choice(string.digits) for _ in range(8)),
+ ],
+)
def test_update_user_token_expiration_time_invalid_format(
- expiration_time_str, user_token_factory):
+ expiration_time_str, user_token_factory
+):
user_token = user_token_factory()
assert user_token.expiration_time is None
with pytest.raises(
- user_tokens.InvalidExpirationError,
- match='Expiration is in an invalid format %s'
- % expiration_time_str):
+ user_tokens.InvalidExpirationError,
+ match="Expiration is in an invalid format %s" % expiration_time_str,
+ ):
user_tokens.update_user_token_expiration_time(
- user_token, expiration_time_str)
+ user_token, expiration_time_str
+ )
def test_bump_usage_time(user_token_factory, fake_datetime):
user_token = user_token_factory()
- with fake_datetime('1997-01-01'):
+ with fake_datetime("1997-01-01"):
user_tokens.bump_usage_time(user_token)
assert user_token.last_usage_time == datetime(1997, 1, 1)
diff --git a/server/szurubooru/tests/func/test_users.py b/server/szurubooru/tests/func/test_users.py
index 55061276..94e9c7c1 100644
--- a/server/szurubooru/tests/func/test_users.py
+++ b/server/szurubooru/tests/func/test_users.py
@@ -1,56 +1,70 @@
-from unittest.mock import patch
from datetime import datetime
-import pytest
-from szurubooru import db, model, errors
-from szurubooru.func import auth, users, files, util
+from unittest.mock import patch
+import pytest
+
+from szurubooru import db, errors, model
+from szurubooru.func import auth, files, users, util
EMPTY_PIXEL = (
- b'\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00'
- b'\xff\xff\xff\x21\xf9\x04\x01\x00\x00\x01\x00\x2c\x00\x00\x00\x00'
- b'\x01\x00\x01\x00\x00\x02\x02\x4c\x01\x00\x3b')
+ b"\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00"
+ b"\xff\xff\xff\x21\xf9\x04\x01\x00\x00\x01\x00\x2c\x00\x00\x00\x00"
+ b"\x01\x00\x01\x00\x00\x02\x02\x4c\x01\x00\x3b"
+)
-@pytest.mark.parametrize('user_name', ['test', 'TEST'])
+@pytest.mark.parametrize("user_name", ["test", "TEST"])
def test_get_avatar_path(user_name):
- assert users.get_avatar_path(user_name) == 'avatars/test.png'
+ assert users.get_avatar_path(user_name) == "avatars/test.png"
-@pytest.mark.parametrize('user_name,user_email,avatar_style,expected_url', [
- (
- 'user',
- None,
- model.User.AVATAR_GRAVATAR,
- ('https://gravatar.com/avatar/' +
- 'ee11cbb19052e40b07aac0ca060c23ee?d=retro&s=100'),
- ),
- (
- None,
- 'user@example.com',
- model.User.AVATAR_GRAVATAR,
- ('https://gravatar.com/avatar/' +
- 'b58996c504c5638798eb6b511e6f49af?d=retro&s=100'),
- ),
- (
- 'user',
- 'user@example.com',
- model.User.AVATAR_GRAVATAR,
- ('https://gravatar.com/avatar/' +
- 'b58996c504c5638798eb6b511e6f49af?d=retro&s=100'),
- ),
- (
- 'user',
- None,
- model.User.AVATAR_MANUAL,
- 'http://example.com/avatars/user.png',
- ),
-])
+@pytest.mark.parametrize(
+ "user_name,user_email,avatar_style,expected_url",
+ [
+ (
+ "user",
+ None,
+ model.User.AVATAR_GRAVATAR,
+ (
+ "https://gravatar.com/avatar/"
+ + "ee11cbb19052e40b07aac0ca060c23ee?d=retro&s=100"
+ ),
+ ),
+ (
+ None,
+ "user@example.com",
+ model.User.AVATAR_GRAVATAR,
+ (
+ "https://gravatar.com/avatar/"
+ + "b58996c504c5638798eb6b511e6f49af?d=retro&s=100"
+ ),
+ ),
+ (
+ "user",
+ "user@example.com",
+ model.User.AVATAR_GRAVATAR,
+ (
+ "https://gravatar.com/avatar/"
+ + "b58996c504c5638798eb6b511e6f49af?d=retro&s=100"
+ ),
+ ),
+ (
+ "user",
+ None,
+ model.User.AVATAR_MANUAL,
+ "http://example.com/avatars/user.png",
+ ),
+ ],
+)
def test_get_avatar_url(
- user_name, user_email, avatar_style, expected_url, config_injector):
- config_injector({
- 'data_url': 'http://example.com/',
- 'thumbnails': {'avatar_width': 100},
- })
+ user_name, user_email, avatar_style, expected_url, config_injector
+):
+ config_injector(
+ {
+ "data_url": "http://example.com/",
+ "thumbnails": {"avatar_width": 100},
+ }
+ )
user = model.User()
user.name = user_name
user.email = user_email
@@ -59,23 +73,21 @@ def test_get_avatar_url(
@pytest.mark.parametrize(
- 'same_user,can_edit_any_email,force_show,expected_email',
+ "same_user,can_edit_any_email,force_show,expected_email",
[
(False, False, False, False),
- (True, False, False, 'test@example.com'),
- (False, True, False, 'test@example.com'),
- (False, False, True, 'test@example.com'),
- ])
+ (True, False, False, "test@example.com"),
+ (False, True, False, "test@example.com"),
+ (False, False, True, "test@example.com"),
+ ],
+)
def test_get_email(
- same_user,
- can_edit_any_email,
- force_show,
- expected_email,
- user_factory):
- with patch('szurubooru.func.auth.has_privilege'):
+ same_user, can_edit_any_email, force_show, expected_email, user_factory
+):
+ with patch("szurubooru.func.auth.has_privilege"):
auth.has_privilege = lambda user, name: can_edit_any_email
user = user_factory()
- user.email = 'test@example.com'
+ user.email = "test@example.com"
auth_user = user if same_user else user_factory()
db.session.add_all([user, auth_user])
db.session.flush()
@@ -83,25 +95,28 @@ def test_get_email(
@pytest.mark.parametrize(
- 'same_user,score,expected_liked_post_count,expected_disliked_post_count',
+ "same_user,score,expected_liked_post_count,expected_disliked_post_count",
[
(False, 1, False, False),
(False, -1, False, False),
(True, 1, 1, 0),
(True, -1, 0, 1),
- ])
+ ],
+)
def test_get_liked_post_count(
- same_user,
- score,
- expected_liked_post_count,
- expected_disliked_post_count,
- user_factory,
- post_factory):
+ same_user,
+ score,
+ expected_liked_post_count,
+ expected_disliked_post_count,
+ user_factory,
+ post_factory,
+):
user = user_factory()
post = post_factory()
auth_user = user if same_user else user_factory()
score = model.PostScore(
- post=post, user=user, score=score, time=datetime.now())
+ post=post, user=user, score=score, time=datetime.now()
+ )
db.session.add_all([post, user, score])
db.session.flush()
actual_liked_post_count = users.get_liked_post_count(user, auth_user)
@@ -115,16 +130,17 @@ def test_serialize_user_when_empty():
def test_serialize_user(user_factory):
- with patch('szurubooru.func.users.get_email'), \
- patch('szurubooru.func.users.get_avatar_url'), \
- patch('szurubooru.func.users.get_liked_post_count'), \
- patch('szurubooru.func.users.get_disliked_post_count'):
- users.get_email.return_value = 'test@example.com'
- users.get_avatar_url.return_value = 'https://example.com/avatar.png'
+ with patch("szurubooru.func.users.get_email"), patch(
+ "szurubooru.func.users.get_avatar_url"
+ ), patch("szurubooru.func.users.get_liked_post_count"), patch(
+ "szurubooru.func.users.get_disliked_post_count"
+ ):
+ users.get_email.return_value = "test@example.com"
+ users.get_avatar_url.return_value = "https://example.com/avatar.png"
users.get_liked_post_count.return_value = 66
users.get_disliked_post_count.return_value = 33
auth_user = user_factory()
- user = user_factory(name='dummy user')
+ user = user_factory(name="dummy user")
user.creation_time = datetime(1997, 1, 1)
user.last_edit_time = datetime(1998, 1, 1)
user.avatar_style = model.User.AVATAR_MANUAL
@@ -132,36 +148,36 @@ def test_serialize_user(user_factory):
db.session.add(user)
db.session.flush()
assert users.serialize_user(user, auth_user) == {
- 'version': 1,
- 'name': 'dummy user',
- 'email': 'test@example.com',
- 'rank': 'administrator',
- 'creationTime': datetime(1997, 1, 1, 0, 0),
- 'lastLoginTime': None,
- 'avatarStyle': 'manual',
- 'avatarUrl': 'https://example.com/avatar.png',
- 'likedPostCount': 66,
- 'dislikedPostCount': 33,
- 'commentCount': 0,
- 'favoritePostCount': 0,
- 'uploadedPostCount': 0,
+ "version": 1,
+ "name": "dummy user",
+ "email": "test@example.com",
+ "rank": "administrator",
+ "creationTime": datetime(1997, 1, 1, 0, 0),
+ "lastLoginTime": None,
+ "avatarStyle": "manual",
+ "avatarUrl": "https://example.com/avatar.png",
+ "likedPostCount": 66,
+ "dislikedPostCount": 33,
+ "commentCount": 0,
+ "favoritePostCount": 0,
+ "uploadedPostCount": 0,
}
def test_serialize_micro_user(user_factory):
- with patch('szurubooru.func.users.get_avatar_url'):
- users.get_avatar_url.return_value = 'https://example.com/avatar.png'
+ with patch("szurubooru.func.users.get_avatar_url"):
+ users.get_avatar_url.return_value = "https://example.com/avatar.png"
auth_user = user_factory()
- user = user_factory(name='dummy user')
+ user = user_factory(name="dummy user")
db.session.add(user)
db.session.flush()
assert users.serialize_micro_user(user, auth_user) == {
- 'name': 'dummy user',
- 'avatarUrl': 'https://example.com/avatar.png',
+ "name": "dummy user",
+ "avatarUrl": "https://example.com/avatar.png",
}
-@pytest.mark.parametrize('count', [0, 1, 2])
+@pytest.mark.parametrize("count", [0, 1, 2])
def test_get_user_count(count, user_factory):
for _ in range(count):
db.session.add(user_factory())
@@ -170,72 +186,73 @@ def test_get_user_count(count, user_factory):
def test_try_get_user_by_name(user_factory):
- user = user_factory(name='name', email='email')
+ user = user_factory(name="name", email="email")
db.session.add(user)
db.session.flush()
- assert users.try_get_user_by_name('non-existing') is None
- assert users.try_get_user_by_name('email') is None
- assert users.try_get_user_by_name('name') is user
- assert users.try_get_user_by_name('NAME') is user
+ assert users.try_get_user_by_name("non-existing") is None
+ assert users.try_get_user_by_name("email") is None
+ assert users.try_get_user_by_name("name") is user
+ assert users.try_get_user_by_name("NAME") is user
def test_get_user_by_name(user_factory):
- user = user_factory(name='name', email='email')
+ user = user_factory(name="name", email="email")
db.session.add(user)
db.session.flush()
with pytest.raises(users.UserNotFoundError):
- assert users.get_user_by_name('non-existing')
+ assert users.get_user_by_name("non-existing")
with pytest.raises(users.UserNotFoundError):
- assert users.get_user_by_name('email')
- assert users.get_user_by_name('name') is user
- assert users.get_user_by_name('NAME') is user
+ assert users.get_user_by_name("email")
+ assert users.get_user_by_name("name") is user
+ assert users.get_user_by_name("NAME") is user
def test_try_get_user_by_name_or_email(user_factory):
- user = user_factory(name='name', email='email')
+ user = user_factory(name="name", email="email")
db.session.add(user)
db.session.flush()
- assert users.try_get_user_by_name_or_email('non-existing') is None
- assert users.try_get_user_by_name_or_email('email') is user
- assert users.try_get_user_by_name_or_email('EMAIL') is user
- assert users.try_get_user_by_name_or_email('name') is user
- assert users.try_get_user_by_name_or_email('NAME') is user
+ assert users.try_get_user_by_name_or_email("non-existing") is None
+ assert users.try_get_user_by_name_or_email("email") is user
+ assert users.try_get_user_by_name_or_email("EMAIL") is user
+ assert users.try_get_user_by_name_or_email("name") is user
+ assert users.try_get_user_by_name_or_email("NAME") is user
def test_get_user_by_name_or_email(user_factory):
- user = user_factory(name='name', email='email')
+ user = user_factory(name="name", email="email")
db.session.add(user)
db.session.flush()
with pytest.raises(users.UserNotFoundError):
- assert users.get_user_by_name_or_email('non-existing')
- assert users.get_user_by_name_or_email('email') is user
- assert users.get_user_by_name_or_email('EMAIL') is user
- assert users.get_user_by_name_or_email('name') is user
- assert users.get_user_by_name_or_email('NAME') is user
+ assert users.get_user_by_name_or_email("non-existing")
+ assert users.get_user_by_name_or_email("email") is user
+ assert users.get_user_by_name_or_email("EMAIL") is user
+ assert users.get_user_by_name_or_email("name") is user
+ assert users.get_user_by_name_or_email("NAME") is user
def test_create_user_for_first_user(fake_datetime):
- with patch('szurubooru.func.users.update_user_name'), \
- patch('szurubooru.func.users.update_user_password'), \
- patch('szurubooru.func.users.update_user_email'), \
- fake_datetime('1997-01-01'):
- user = users.create_user('name', 'password', 'email')
+ with patch("szurubooru.func.users.update_user_name"), patch(
+ "szurubooru.func.users.update_user_password"
+ ), patch("szurubooru.func.users.update_user_email"), fake_datetime(
+ "1997-01-01"
+ ):
+ user = users.create_user("name", "password", "email")
assert user.creation_time == datetime(1997, 1, 1)
assert user.last_login_time is None
assert user.rank == model.User.RANK_ADMINISTRATOR
- users.update_user_name.assert_called_once_with(user, 'name')
- users.update_user_password.assert_called_once_with(user, 'password')
- users.update_user_email.assert_called_once_with(user, 'email')
+ users.update_user_name.assert_called_once_with(user, "name")
+ users.update_user_password.assert_called_once_with(user, "password")
+ users.update_user_email.assert_called_once_with(user, "email")
def test_create_user_for_subsequent_users(user_factory, config_injector):
- config_injector({'default_rank': 'regular'})
+ config_injector({"default_rank": "regular"})
db.session.add(user_factory())
db.session.flush()
- with patch('szurubooru.func.users.update_user_name'), \
- patch('szurubooru.func.users.update_user_email'), \
- patch('szurubooru.func.users.update_user_password'):
- user = users.create_user('name', 'password', 'email')
+ with patch("szurubooru.func.users.update_user_name"), patch(
+ "szurubooru.func.users.update_user_email"
+ ), patch("szurubooru.func.users.update_user_password"):
+ user = users.create_user("name", "password", "email")
assert user.rank == model.User.RANK_REGULAR
@@ -248,56 +265,58 @@ def test_update_user_name_with_empty_string(user_factory):
def test_update_user_name_with_too_long_string(user_factory):
user = user_factory()
with pytest.raises(users.InvalidUserNameError):
- users.update_user_name(user, 'a' * 300)
+ users.update_user_name(user, "a" * 300)
def test_update_user_name_with_invalid_name(user_factory, config_injector):
- config_injector({'user_name_regex': '^[a-z]+$'})
+ config_injector({"user_name_regex": "^[a-z]+$"})
user = user_factory()
with pytest.raises(users.InvalidUserNameError):
- users.update_user_name(user, '0')
+ users.update_user_name(user, "0")
def test_update_user_name_with_duplicate_name(user_factory, config_injector):
- config_injector({'user_name_regex': '^[a-z]+$'})
+ config_injector({"user_name_regex": "^[a-z]+$"})
user = user_factory()
- existing_user = user_factory(name='dummy')
+ existing_user = user_factory(name="dummy")
db.session.add(existing_user)
db.session.flush()
with pytest.raises(users.UserAlreadyExistsError):
- users.update_user_name(user, 'dummy')
+ users.update_user_name(user, "dummy")
def test_update_user_name_reusing_own_name(user_factory, config_injector):
- config_injector({'user_name_regex': '^[a-z]+$'})
- user = user_factory(name='dummy')
+ config_injector({"user_name_regex": "^[a-z]+$"})
+ user = user_factory(name="dummy")
db.session.add(user)
db.session.flush()
- with patch('szurubooru.func.files.has'):
+ with patch("szurubooru.func.files.has"):
files.has.return_value = False
- users.update_user_name(user, 'dummy')
+ users.update_user_name(user, "dummy")
db.session.flush()
- assert users.try_get_user_by_name('dummy') is user
+ assert users.try_get_user_by_name("dummy") is user
def test_update_user_name_for_new_user(user_factory, config_injector):
- config_injector({'user_name_regex': '^[a-z]+$'})
+ config_injector({"user_name_regex": "^[a-z]+$"})
user = user_factory()
- with patch('szurubooru.func.files.has'):
+ with patch("szurubooru.func.files.has"):
files.has.return_value = False
- users.update_user_name(user, 'dummy')
- assert user.name == 'dummy'
+ users.update_user_name(user, "dummy")
+ assert user.name == "dummy"
def test_update_user_name_moves_avatar(user_factory, config_injector):
- config_injector({'user_name_regex': '^[a-z]+$'})
- user = user_factory(name='old')
- with patch('szurubooru.func.files.has'), \
- patch('szurubooru.func.files.move'):
+ config_injector({"user_name_regex": "^[a-z]+$"})
+ user = user_factory(name="old")
+ with patch("szurubooru.func.files.has"), patch(
+ "szurubooru.func.files.move"
+ ):
files.has.return_value = True
- users.update_user_name(user, 'new')
+ users.update_user_name(user, "new")
files.move.assert_called_once_with(
- 'avatars/old.png', 'avatars/new.png')
+ "avatars/old.png", "avatars/new.png"
+ )
def test_update_user_password_with_empty_string(user_factory):
@@ -307,72 +326,74 @@ def test_update_user_password_with_empty_string(user_factory):
def test_update_user_password_with_invalid_string(
- user_factory, config_injector):
- config_injector({'password_regex': '^[a-z]+$'})
+ user_factory, config_injector
+):
+ config_injector({"password_regex": "^[a-z]+$"})
user = user_factory()
with pytest.raises(users.InvalidPasswordError):
- users.update_user_password(user, '0')
+ users.update_user_password(user, "0")
def test_update_user_password(user_factory, config_injector):
- config_injector({'password_regex': '^[a-z]+$'})
+ config_injector({"password_regex": "^[a-z]+$"})
user = user_factory()
- with patch('szurubooru.func.auth.create_password'), \
- patch('szurubooru.func.auth.get_password_hash'):
- auth.create_password.return_value = 'salt'
- auth.get_password_hash.return_value = ('hash', 3)
- users.update_user_password(user, 'a')
- assert user.password_salt == 'salt'
- assert user.password_hash == 'hash'
+ with patch("szurubooru.func.auth.create_password"), patch(
+ "szurubooru.func.auth.get_password_hash"
+ ):
+ auth.create_password.return_value = "salt"
+ auth.get_password_hash.return_value = ("hash", 3)
+ users.update_user_password(user, "a")
+ assert user.password_salt == "salt"
+ assert user.password_hash == "hash"
assert user.password_revision == 3
def test_update_user_email_with_too_long_string(user_factory):
user = user_factory()
with pytest.raises(users.InvalidEmailError):
- users.update_user_email(user, 'a' * 300)
+ users.update_user_email(user, "a" * 300)
def test_update_user_email_with_invalid_email(user_factory):
user = user_factory()
- with patch('szurubooru.func.util.is_valid_email'):
+ with patch("szurubooru.func.util.is_valid_email"):
util.is_valid_email.return_value = False
with pytest.raises(users.InvalidEmailError):
- users.update_user_email(user, 'a')
+ users.update_user_email(user, "a")
def test_update_user_email_with_empty_string(user_factory):
user = user_factory()
- with patch('szurubooru.func.util.is_valid_email'):
+ with patch("szurubooru.func.util.is_valid_email"):
util.is_valid_email.return_value = True
- users.update_user_email(user, '')
+ users.update_user_email(user, "")
assert user.email is None
def test_update_user_email(user_factory):
user = user_factory()
- with patch('szurubooru.func.util.is_valid_email'):
+ with patch("szurubooru.func.util.is_valid_email"):
util.is_valid_email.return_value = True
- users.update_user_email(user, 'a')
- assert user.email == 'a'
+ users.update_user_email(user, "a")
+ assert user.email == "a"
def test_update_user_rank_with_empty_string(user_factory):
user = user_factory()
auth_user = user_factory()
with pytest.raises(users.InvalidRankError):
- users.update_user_rank(user, '', auth_user)
+ users.update_user_rank(user, "", auth_user)
def test_update_user_rank_with_invalid_string(user_factory):
user = user_factory()
auth_user = user_factory()
with pytest.raises(users.InvalidRankError):
- users.update_user_rank(user, 'invalid', auth_user)
+ users.update_user_rank(user, "invalid", auth_user)
with pytest.raises(users.InvalidRankError):
- users.update_user_rank(user, 'anonymous', auth_user)
+ users.update_user_rank(user, "anonymous", auth_user)
with pytest.raises(users.InvalidRankError):
- users.update_user_rank(user, 'nobody', auth_user)
+ users.update_user_rank(user, "nobody", auth_user)
def test_update_user_rank_with_higher_rank_than_possible(user_factory):
@@ -382,9 +403,9 @@ def test_update_user_rank_with_higher_rank_than_possible(user_factory):
auth_user = user_factory()
auth_user.rank = model.User.RANK_ANONYMOUS
with pytest.raises(errors.AuthError):
- users.update_user_rank(user, 'regular', auth_user)
+ users.update_user_rank(user, "regular", auth_user)
with pytest.raises(errors.AuthError):
- users.update_user_rank(auth_user, 'regular', auth_user)
+ users.update_user_rank(auth_user, "regular", auth_user)
def test_update_user_rank(user_factory):
@@ -393,8 +414,8 @@ def test_update_user_rank(user_factory):
user = user_factory()
auth_user = user_factory()
auth_user.rank = model.User.RANK_ADMINISTRATOR
- users.update_user_rank(user, 'regular', auth_user)
- users.update_user_rank(auth_user, 'regular', auth_user)
+ users.update_user_rank(user, "regular", auth_user)
+ users.update_user_rank(auth_user, "regular", auth_user)
assert user.rank == model.User.RANK_REGULAR
assert auth_user.rank == model.User.RANK_REGULAR
@@ -402,54 +423,57 @@ def test_update_user_rank(user_factory):
def test_update_user_avatar_with_invalid_style(user_factory):
user = user_factory()
with pytest.raises(users.InvalidAvatarError):
- users.update_user_avatar(user, 'invalid', b'')
+ users.update_user_avatar(user, "invalid", b"")
def test_update_user_avatar_to_gravatar(user_factory):
user = user_factory()
- users.update_user_avatar(user, 'gravatar')
+ users.update_user_avatar(user, "gravatar")
assert user.avatar_style == model.User.AVATAR_GRAVATAR
def test_update_user_avatar_to_empty_manual(user_factory):
user = user_factory()
- with patch('szurubooru.func.files.has'), \
- pytest.raises(users.InvalidAvatarError):
+ with patch("szurubooru.func.files.has"), pytest.raises(
+ users.InvalidAvatarError
+ ):
files.has.return_value = False
- users.update_user_avatar(user, 'manual', b'')
+ users.update_user_avatar(user, "manual", b"")
def test_update_user_avatar_to_previous_manual(user_factory):
user = user_factory()
- with patch('szurubooru.func.files.has'):
+ with patch("szurubooru.func.files.has"):
files.has.return_value = True
- users.update_user_avatar(user, 'manual', b'')
+ users.update_user_avatar(user, "manual", b"")
def test_update_user_avatar_to_new_manual(user_factory, config_injector):
config_injector(
- {'thumbnails': {'avatar_width': 500, 'avatar_height': 500}})
+ {"thumbnails": {"avatar_width": 500, "avatar_height": 500}}
+ )
user = user_factory()
- with patch('szurubooru.func.files.save'):
- users.update_user_avatar(user, 'manual', EMPTY_PIXEL)
+ with patch("szurubooru.func.files.save"):
+ users.update_user_avatar(user, "manual", EMPTY_PIXEL)
assert user.avatar_style == model.User.AVATAR_MANUAL
assert files.save.called
def test_bump_user_login_time(user_factory, fake_datetime):
user = user_factory()
- with fake_datetime('1997-01-01'):
+ with fake_datetime("1997-01-01"):
users.bump_user_login_time(user)
assert user.last_login_time == datetime(1997, 1, 1)
def test_reset_user_password(user_factory):
- with patch('szurubooru.func.auth.create_password'), \
- patch('szurubooru.func.auth.get_password_hash'):
+ with patch("szurubooru.func.auth.create_password"), patch(
+ "szurubooru.func.auth.get_password_hash"
+ ):
user = user_factory()
- auth.create_password.return_value = 'salt'
- auth.get_password_hash.return_value = ('hash', 3)
+ auth.create_password.return_value = "salt"
+ auth.get_password_hash.return_value = ("hash", 3)
users.reset_user_password(user)
- assert user.password_salt == 'salt'
- assert user.password_hash == 'hash'
+ assert user.password_salt == "salt"
+ assert user.password_hash == "hash"
assert user.password_revision == 3
diff --git a/server/szurubooru/tests/func/test_util.py b/server/szurubooru/tests/func/test_util.py
index 1307ab90..f42ba29e 100644
--- a/server/szurubooru/tests/func/test_util.py
+++ b/server/szurubooru/tests/func/test_util.py
@@ -1,40 +1,47 @@
from datetime import datetime
+
import pytest
+
from szurubooru import errors
from szurubooru.func import util
-
dt = datetime
def test_parsing_empty_date_time():
with pytest.raises(errors.ValidationError):
- util.parse_time_range('')
+ util.parse_time_range("")
-@pytest.mark.parametrize('output,input', [
- ((dt(1997, 1, 2, 0, 0, 0), dt(1997, 1, 2, 23, 59, 59)), 'today'),
- ((dt(1997, 1, 1, 0, 0, 0), dt(1997, 1, 1, 23, 59, 59)), 'yesterday'),
- ((dt(1999, 1, 1, 0, 0, 0), dt(1999, 12, 31, 23, 59, 59)), '1999'),
- ((dt(1999, 2, 1, 0, 0, 0), dt(1999, 2, 28, 23, 59, 59)), '1999-2'),
- ((dt(1999, 2, 1, 0, 0, 0), dt(1999, 2, 28, 23, 59, 59)), '1999-02'),
- ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), '1999-2-6'),
- ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), '1999-02-6'),
- ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), '1999-2-06'),
- ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), '1999-02-06'),
-])
+@pytest.mark.parametrize(
+ "output,input",
+ [
+ ((dt(1997, 1, 2, 0, 0, 0), dt(1997, 1, 2, 23, 59, 59)), "today"),
+ ((dt(1997, 1, 1, 0, 0, 0), dt(1997, 1, 1, 23, 59, 59)), "yesterday"),
+ ((dt(1999, 1, 1, 0, 0, 0), dt(1999, 12, 31, 23, 59, 59)), "1999"),
+ ((dt(1999, 2, 1, 0, 0, 0), dt(1999, 2, 28, 23, 59, 59)), "1999-2"),
+ ((dt(1999, 2, 1, 0, 0, 0), dt(1999, 2, 28, 23, 59, 59)), "1999-02"),
+ ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), "1999-2-6"),
+ ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), "1999-02-6"),
+ ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), "1999-2-06"),
+ ((dt(1999, 2, 6, 0, 0, 0), dt(1999, 2, 6, 23, 59, 59)), "1999-02-06"),
+ ],
+)
def test_parsing_date_time(fake_datetime, input, output):
- with fake_datetime('1997-01-02 03:04:05'):
+ with fake_datetime("1997-01-02 03:04:05"):
assert util.parse_time_range(input) == output
-@pytest.mark.parametrize('input,output', [
- ([], []),
- (['a', 'b', 'c'], ['a', 'b', 'c']),
- (['a', 'b', 'a'], ['a', 'b']),
- (['a', 'a', 'b'], ['a', 'b']),
- (['a', 'A', 'b'], ['a', 'b']),
- (['a', 'A', 'b', 'B'], ['a', 'b']),
-])
+@pytest.mark.parametrize(
+ "input,output",
+ [
+ ([], []),
+ (["a", "b", "c"], ["a", "b", "c"]),
+ (["a", "b", "a"], ["a", "b"]),
+ (["a", "a", "b"], ["a", "b"]),
+ (["a", "A", "b"], ["a", "b"]),
+ (["a", "A", "b", "B"], ["a", "b"]),
+ ],
+)
def test_icase_unique(input, output):
assert util.icase_unique(input) == output
diff --git a/server/szurubooru/tests/middleware/test_authenticator.py b/server/szurubooru/tests/middleware/test_authenticator.py
index be21a931..9a4a3cc6 100644
--- a/server/szurubooru/tests/middleware/test_authenticator.py
+++ b/server/szurubooru/tests/middleware/test_authenticator.py
@@ -1,7 +1,9 @@
from unittest.mock import patch
+
import pytest
+
from szurubooru import db
-from szurubooru.func import auth, users, user_tokens
+from szurubooru.func import auth, user_tokens, users
from szurubooru.middleware import authenticator
from szurubooru.rest import errors
@@ -17,14 +19,12 @@ def test_process_request_bump_login(context_factory, user_factory):
db.session.add(user)
db.session.flush()
ctx = context_factory(
- headers={
- 'Authorization': 'Basic dGVzdFVzZXI6dGVzdFRva2Vu'
- },
- params={
- 'bump-login': 'true'
- })
- with patch('szurubooru.func.auth.is_valid_password'), \
- patch('szurubooru.func.users.get_user_by_name'):
+ headers={"Authorization": "Basic dGVzdFVzZXI6dGVzdFRva2Vu"},
+ params={"bump-login": "true"},
+ )
+ with patch("szurubooru.func.auth.is_valid_password"), patch(
+ "szurubooru.func.users.get_user_by_name"
+ ):
users.get_user_by_name.return_value = user
auth.is_valid_password.return_value = True
authenticator.process_request(ctx)
@@ -32,20 +32,18 @@ def test_process_request_bump_login(context_factory, user_factory):
def test_process_request_bump_login_with_token(
- context_factory, user_token_factory):
+ context_factory, user_token_factory
+):
user_token = user_token_factory()
db.session.add(user_token)
db.session.flush()
ctx = context_factory(
- headers={
- 'Authorization': 'Token dGVzdFVzZXI6dGVzdFRva2Vu'
- },
- params={
- 'bump-login': 'true'
- })
- with patch('szurubooru.func.auth.is_valid_token'), \
- patch('szurubooru.func.users.get_user_by_name'), \
- patch('szurubooru.func.user_tokens.get_by_user_and_token'):
+ headers={"Authorization": "Token dGVzdFVzZXI6dGVzdFRva2Vu"},
+ params={"bump-login": "true"},
+ )
+ with patch("szurubooru.func.auth.is_valid_token"), patch(
+ "szurubooru.func.users.get_user_by_name"
+ ), patch("szurubooru.func.user_tokens.get_by_user_and_token"):
users.get_user_by_name.return_value = user_token.user
user_tokens.get_by_user_and_token.return_value = user_token
auth.is_valid_token.return_value = True
@@ -57,11 +55,11 @@ def test_process_request_bump_login_with_token(
def test_process_request_basic_auth_valid(context_factory, user_factory):
user = user_factory()
ctx = context_factory(
- headers={
- 'Authorization': 'Basic dGVzdFVzZXI6dGVzdFBhc3N3b3Jk'
- })
- with patch('szurubooru.func.auth.is_valid_password'), \
- patch('szurubooru.func.users.get_user_by_name'):
+ headers={"Authorization": "Basic dGVzdFVzZXI6dGVzdFBhc3N3b3Jk"}
+ )
+ with patch("szurubooru.func.auth.is_valid_password"), patch(
+ "szurubooru.func.users.get_user_by_name"
+ ):
users.get_user_by_name.return_value = user
auth.is_valid_password.return_value = True
authenticator.process_request(ctx)
@@ -71,12 +69,11 @@ def test_process_request_basic_auth_valid(context_factory, user_factory):
def test_process_request_token_auth_valid(context_factory, user_token_factory):
user_token = user_token_factory()
ctx = context_factory(
- headers={
- 'Authorization': 'Token dGVzdFVzZXI6dGVzdFRva2Vu'
- })
- with patch('szurubooru.func.auth.is_valid_token'), \
- patch('szurubooru.func.users.get_user_by_name'), \
- patch('szurubooru.func.user_tokens.get_by_user_and_token'):
+ headers={"Authorization": "Token dGVzdFVzZXI6dGVzdFRva2Vu"}
+ )
+ with patch("szurubooru.func.auth.is_valid_token"), patch(
+ "szurubooru.func.users.get_user_by_name"
+ ), patch("szurubooru.func.user_tokens.get_by_user_and_token"):
users.get_user_by_name.return_value = user_token.user
user_tokens.get_by_user_and_token.return_value = user_token
auth.is_valid_token.return_value = True
@@ -85,9 +82,6 @@ def test_process_request_token_auth_valid(context_factory, user_token_factory):
def test_process_request_bad_header(context_factory):
- ctx = context_factory(
- headers={
- 'Authorization': 'Secret SuperSecretValue'
- })
+ ctx = context_factory(headers={"Authorization": "Secret SuperSecretValue"})
with pytest.raises(errors.HttpBadRequest):
authenticator.process_request(ctx)
diff --git a/server/szurubooru/tests/model/test_comment.py b/server/szurubooru/tests/model/test_comment.py
index ffd51893..fcbf1763 100644
--- a/server/szurubooru/tests/model/test_comment.py
+++ b/server/szurubooru/tests/model/test_comment.py
@@ -1,4 +1,5 @@
from datetime import datetime
+
from szurubooru import db, model
@@ -6,7 +7,7 @@ def test_saving_comment(user_factory, post_factory):
user = user_factory()
post = post_factory()
comment = model.Comment()
- comment.text = 'long text' * 1000
+ comment.text = "long text" * 1000
comment.user = user
comment.post = post
comment.creation_time = datetime(1997, 1, 1)
@@ -17,7 +18,7 @@ def test_saving_comment(user_factory, post_factory):
db.session.refresh(comment)
assert not db.session.dirty
assert comment.user is not None and comment.user.user_id is not None
- assert comment.text == 'long text' * 1000
+ assert comment.text == "long text" * 1000
assert comment.creation_time == datetime(1997, 1, 1)
assert comment.last_edit_time == datetime(1998, 1, 1)
diff --git a/server/szurubooru/tests/model/test_pool.py b/server/szurubooru/tests/model/test_pool.py
index 589e9ba6..bec95605 100644
--- a/server/szurubooru/tests/model/test_pool.py
+++ b/server/szurubooru/tests/model/test_pool.py
@@ -1,24 +1,24 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, model
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'delete_source_files': False,
- 'secret': 'secret',
- 'data_dir': ''
- })
+ config_injector(
+ {"delete_source_files": False, "secret": "secret", "data_dir": ""}
+ )
def test_saving_pool(pool_factory, post_factory):
post1 = post_factory()
post2 = post_factory()
pool = model.Pool()
- pool.names = [model.PoolName('alias1', 0), model.PoolName('alias2', 1)]
+ pool.names = [model.PoolName("alias1", 0), model.PoolName("alias2", 1)]
pool.posts = []
- pool.category = model.PoolCategory('category')
+ pool.category = model.PoolCategory("category")
pool.creation_time = datetime(1997, 1, 1)
pool.last_edit_time = datetime(1998, 1, 1)
db.session.add_all([pool, post1, post2])
@@ -30,13 +30,13 @@ def test_saving_pool(pool_factory, post_factory):
db.session.commit()
pool = (
- db.session
- .query(model.Pool)
+ db.session.query(model.Pool)
.join(model.PoolName)
- .filter(model.PoolName.name == 'alias1')
- .one())
- assert [pool_name.name for pool_name in pool.names] == ['alias1', 'alias2']
- assert pool.category.name == 'category'
+ .filter(model.PoolName.name == "alias1")
+ .one()
+ )
+ assert [pool_name.name for pool_name in pool.names] == ["alias1", "alias2"]
+ assert pool.category.name == "category"
assert pool.creation_time == datetime(1997, 1, 1)
assert pool.last_edit_time == datetime(1998, 1, 1)
assert [post.post_id for post in pool.posts] == [1, 2]
@@ -46,9 +46,9 @@ def test_cascade_deletions(pool_factory, post_factory):
post1 = post_factory()
post2 = post_factory()
pool = model.Pool()
- pool.names = [model.PoolName('alias1', 0), model.PoolName('alias2', 1)]
+ pool.names = [model.PoolName("alias1", 0), model.PoolName("alias2", 1)]
pool.posts = []
- pool.category = model.PoolCategory('category')
+ pool.category = model.PoolCategory("category")
pool.creation_time = datetime(1997, 1, 1)
pool.last_edit_time = datetime(1998, 1, 1)
db.session.add_all([pool, post1, post2])
diff --git a/server/szurubooru/tests/model/test_post.py b/server/szurubooru/tests/model/test_post.py
index 47c088d3..f01455d3 100644
--- a/server/szurubooru/tests/model/test_post.py
+++ b/server/szurubooru/tests/model/test_post.py
@@ -1,15 +1,15 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, model
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'secret': 'secret',
- 'data_dir': '',
- 'delete_source_files': False
- })
+ config_injector(
+ {"secret": "secret", "data_dir": "", "delete_source_files": False}
+ )
def test_saving_post(post_factory, user_factory, tag_factory):
@@ -19,12 +19,12 @@ def test_saving_post(post_factory, user_factory, tag_factory):
related_post1 = post_factory()
related_post2 = post_factory()
post = model.Post()
- post.safety = 'safety'
- post.type = 'type'
- post.checksum = 'deadbeef'
+ post.safety = "safety"
+ post.type = "type"
+ post.checksum = "deadbeef"
post.creation_time = datetime(1997, 1, 1)
post.last_edit_time = datetime(1998, 1, 1)
- post.mime_type = 'application/whatever'
+ post.mime_type = "application/whatever"
db.session.add_all([user, tag1, tag2, related_post1, related_post2, post])
post.user = user
@@ -39,9 +39,9 @@ def test_saving_post(post_factory, user_factory, tag_factory):
db.session.refresh(related_post2)
assert not db.session.dirty
assert post.user.user_id is not None
- assert post.safety == 'safety'
- assert post.type == 'type'
- assert post.checksum == 'deadbeef'
+ assert post.safety == "safety"
+ assert post.type == "type"
+ assert post.checksum == "deadbeef"
assert post.creation_time == datetime(1997, 1, 1)
assert post.last_edit_time == datetime(1998, 1, 1)
assert len(post.relations) == 2
@@ -51,7 +51,8 @@ def test_saving_post(post_factory, user_factory, tag_factory):
def test_cascade_deletions(
- post_factory, user_factory, tag_factory, comment_factory):
+ post_factory, user_factory, tag_factory, comment_factory
+):
user = user_factory()
tag1 = tag_factory()
tag2 = tag_factory()
@@ -59,8 +60,9 @@ def test_cascade_deletions(
related_post2 = post_factory()
post = post_factory()
comment = comment_factory(post=post, user=user)
- db.session.add_all([
- user, tag1, tag2, post, related_post1, related_post2, comment])
+ db.session.add_all(
+ [user, tag1, tag2, post, related_post1, related_post2, comment]
+ )
db.session.flush()
score = model.PostScore()
@@ -78,11 +80,11 @@ def test_cascade_deletions(
feature.time = datetime(1997, 1, 1)
note = model.PostNote()
note.post = post
- note.polygon = ''
- note.text = ''
+ note.polygon = ""
+ note.text = ""
signature = model.PostSignature()
signature.post = post
- signature.signature = b'testvalue'
+ signature.signature = b"testvalue"
signature.words = list(range(50))
db.session.add_all([score, favorite, feature, note, signature])
db.session.flush()
diff --git a/server/szurubooru/tests/model/test_tag.py b/server/szurubooru/tests/model/test_tag.py
index b677eeff..9332f0da 100644
--- a/server/szurubooru/tests/model/test_tag.py
+++ b/server/szurubooru/tests/model/test_tag.py
@@ -1,27 +1,27 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, model
@pytest.fixture(autouse=True)
def inject_config(config_injector):
- config_injector({
- 'delete_source_files': False,
- 'secret': 'secret',
- 'data_dir': ''
- })
+ config_injector(
+ {"delete_source_files": False, "secret": "secret", "data_dir": ""}
+ )
def test_saving_tag(tag_factory):
- sug1 = tag_factory(names=['sug1'])
- sug2 = tag_factory(names=['sug2'])
- imp1 = tag_factory(names=['imp1'])
- imp2 = tag_factory(names=['imp2'])
+ sug1 = tag_factory(names=["sug1"])
+ sug2 = tag_factory(names=["sug2"])
+ imp1 = tag_factory(names=["imp1"])
+ imp2 = tag_factory(names=["imp2"])
tag = model.Tag()
- tag.names = [model.TagName('alias1', 0), model.TagName('alias2', 1)]
+ tag.names = [model.TagName("alias1", 0), model.TagName("alias2", 1)]
tag.suggestions = []
tag.implications = []
- tag.category = model.TagCategory('category')
+ tag.category = model.TagCategory("category")
tag.creation_time = datetime(1997, 1, 1)
tag.last_edit_time = datetime(1998, 1, 1)
db.session.add_all([tag, sug1, sug2, imp1, imp2])
@@ -39,31 +39,35 @@ def test_saving_tag(tag_factory):
db.session.commit()
tag = (
- db.session
- .query(model.Tag)
+ db.session.query(model.Tag)
.join(model.TagName)
- .filter(model.TagName.name == 'alias1')
- .one())
- assert [tag_name.name for tag_name in tag.names] == ['alias1', 'alias2']
- assert tag.category.name == 'category'
+ .filter(model.TagName.name == "alias1")
+ .one()
+ )
+ assert [tag_name.name for tag_name in tag.names] == ["alias1", "alias2"]
+ assert tag.category.name == "category"
assert tag.creation_time == datetime(1997, 1, 1)
assert tag.last_edit_time == datetime(1998, 1, 1)
- assert [relation.names[0].name for relation in tag.suggestions] \
- == ['sug1', 'sug2']
- assert [relation.names[0].name for relation in tag.implications] \
- == ['imp1', 'imp2']
+ assert [relation.names[0].name for relation in tag.suggestions] == [
+ "sug1",
+ "sug2",
+ ]
+ assert [relation.names[0].name for relation in tag.implications] == [
+ "imp1",
+ "imp2",
+ ]
def test_cascade_deletions(tag_factory):
- sug1 = tag_factory(names=['sug1'])
- sug2 = tag_factory(names=['sug2'])
- imp1 = tag_factory(names=['imp1'])
- imp2 = tag_factory(names=['imp2'])
+ sug1 = tag_factory(names=["sug1"])
+ sug2 = tag_factory(names=["sug2"])
+ imp1 = tag_factory(names=["imp1"])
+ imp2 = tag_factory(names=["imp2"])
tag = model.Tag()
- tag.names = [model.TagName('alias1', 0), model.TagName('alias2', 1)]
+ tag.names = [model.TagName("alias1", 0), model.TagName("alias2", 1)]
tag.suggestions = []
tag.implications = []
- tag.category = model.TagCategory('category')
+ tag.category = model.TagCategory("category")
tag.creation_time = datetime(1997, 1, 1)
tag.last_edit_time = datetime(1998, 1, 1)
tag.post_count = 1
diff --git a/server/szurubooru/tests/model/test_user.py b/server/szurubooru/tests/model/test_user.py
index ced3a5ef..20279a00 100644
--- a/server/szurubooru/tests/model/test_user.py
+++ b/server/szurubooru/tests/model/test_user.py
@@ -1,25 +1,26 @@
from datetime import datetime
+
from szurubooru import db, model
def test_saving_user():
user = model.User()
- user.name = 'name'
- user.password_salt = 'salt'
- user.password_hash = 'hash'
- user.email = 'email'
- user.rank = 'rank'
+ user.name = "name"
+ user.password_salt = "salt"
+ user.password_hash = "hash"
+ user.email = "email"
+ user.rank = "rank"
user.creation_time = datetime(1997, 1, 1)
user.avatar_style = model.User.AVATAR_GRAVATAR
db.session.add(user)
db.session.flush()
db.session.refresh(user)
assert not db.session.dirty
- assert user.name == 'name'
- assert user.password_salt == 'salt'
- assert user.password_hash == 'hash'
- assert user.email == 'email'
- assert user.rank == 'rank'
+ assert user.name == "name"
+ assert user.password_salt == "salt"
+ assert user.password_hash == "hash"
+ assert user.email == "email"
+ assert user.rank == "rank"
assert user.creation_time == datetime(1997, 1, 1)
assert user.avatar_style == model.User.AVATAR_GRAVATAR
@@ -43,10 +44,9 @@ def test_comment_count(user_factory, comment_factory):
db.session.add(user)
db.session.flush()
assert user.comment_count == 0
- db.session.add_all([
- comment_factory(user=user),
- comment_factory(),
- ])
+ db.session.add_all(
+ [comment_factory(user=user), comment_factory(),]
+ )
db.session.flush()
db.session.refresh(user)
assert user.comment_count == 1
@@ -60,10 +60,12 @@ def test_favorite_count(user_factory, post_factory):
assert user1.comment_count == 0
post1 = post_factory()
post2 = post_factory()
- db.session.add_all([
- model.PostFavorite(post=post1, time=datetime.utcnow(), user=user1),
- model.PostFavorite(post=post2, time=datetime.utcnow(), user=user2),
- ])
+ db.session.add_all(
+ [
+ model.PostFavorite(post=post1, time=datetime.utcnow(), user=user1),
+ model.PostFavorite(post=post2, time=datetime.utcnow(), user=user2),
+ ]
+ )
db.session.flush()
db.session.refresh(user1)
assert user1.favorite_post_count == 1
@@ -78,12 +80,16 @@ def test_liked_post_count(user_factory, post_factory):
assert user1.disliked_post_count == 0
post1 = post_factory()
post2 = post_factory()
- db.session.add_all([
- model.PostScore(
- post=post1, time=datetime.utcnow(), user=user1, score=1),
- model.PostScore(
- post=post2, time=datetime.utcnow(), user=user2, score=1),
- ])
+ db.session.add_all(
+ [
+ model.PostScore(
+ post=post1, time=datetime.utcnow(), user=user1, score=1
+ ),
+ model.PostScore(
+ post=post2, time=datetime.utcnow(), user=user2, score=1
+ ),
+ ]
+ )
db.session.flush()
db.session.refresh(user1)
assert user1.liked_post_count == 1
@@ -99,12 +105,16 @@ def test_disliked_post_count(user_factory, post_factory):
assert user1.disliked_post_count == 0
post1 = post_factory()
post2 = post_factory()
- db.session.add_all([
- model.PostScore(
- post=post1, time=datetime.utcnow(), user=user1, score=-1),
- model.PostScore(
- post=post2, time=datetime.utcnow(), user=user2, score=1),
- ])
+ db.session.add_all(
+ [
+ model.PostScore(
+ post=post1, time=datetime.utcnow(), user=user1, score=-1
+ ),
+ model.PostScore(
+ post=post2, time=datetime.utcnow(), user=user2, score=1
+ ),
+ ]
+ )
db.session.flush()
db.session.refresh(user1)
assert user1.liked_post_count == 0
@@ -147,10 +157,10 @@ def test_cascade_deletions(post_factory, user_factory, comment_factory):
snapshot = model.Snapshot()
snapshot.user = user
snapshot.creation_time = datetime(1997, 1, 1)
- snapshot.resource_type = '-'
+ snapshot.resource_type = "-"
snapshot.resource_pkey = 1
- snapshot.resource_name = '-'
- snapshot.operation = '-'
+ snapshot.resource_name = "-"
+ snapshot.operation = "-"
db.session.add_all([user, post, comment, snapshot])
db.session.commit()
diff --git a/server/szurubooru/tests/model/test_user_token.py b/server/szurubooru/tests/model/test_user_token.py
index 0280082e..cddb5cf5 100644
--- a/server/szurubooru/tests/model/test_user_token.py
+++ b/server/szurubooru/tests/model/test_user_token.py
@@ -1,4 +1,5 @@
from datetime import datetime
+
from szurubooru import db
@@ -9,6 +10,6 @@ def test_saving_user_token(user_token_factory):
db.session.refresh(user_token)
assert not db.session.dirty
assert user_token.user is not None
- assert user_token.token == 'dummy'
+ assert user_token.token == "dummy"
assert user_token.enabled is True
assert user_token.creation_time == datetime(1997, 1, 1)
diff --git a/server/szurubooru/tests/rest/test_context.py b/server/szurubooru/tests/rest/test_context.py
index 0de1e320..ec652b18 100644
--- a/server/szurubooru/tests/rest/test_context.py
+++ b/server/szurubooru/tests/rest/test_context.py
@@ -1,33 +1,38 @@
import unittest.mock
+
import pytest
-from szurubooru import rest, errors
+
+from szurubooru import errors, rest
from szurubooru.func import net
def test_has_param():
- ctx = rest.Context(env={}, method=None, url=None, params={'key': 'value'})
- assert ctx.has_param('key')
- assert not ctx.has_param('non-existing')
+ ctx = rest.Context(env={}, method=None, url=None, params={"key": "value"})
+ assert ctx.has_param("key")
+ assert not ctx.has_param("non-existing")
def test_get_file():
ctx = rest.Context(
- env={}, method=None, url=None, files={'key': b'content'})
- assert ctx.get_file('key') == b'content'
+ env={}, method=None, url=None, files={"key": b"content"}
+ )
+ assert ctx.get_file("key") == b"content"
with pytest.raises(errors.ValidationError):
- ctx.get_file('non-existing')
+ ctx.get_file("non-existing")
def test_get_file_from_url():
- with unittest.mock.patch('szurubooru.func.net.download'):
- net.download.return_value = b'content'
+ with unittest.mock.patch("szurubooru.func.net.download"):
+ net.download.return_value = b"content"
ctx = rest.Context(
- env={}, method=None, url=None, params={'keyUrl': 'example.com'})
- assert ctx.get_file('key') == b'content'
+ env={}, method=None, url=None, params={"keyUrl": "example.com"}
+ )
+ assert ctx.get_file("key") == b"content"
net.download.assert_called_once_with(
- 'example.com', use_video_downloader=False)
+ "example.com", use_video_downloader=False
+ )
with pytest.raises(errors.ValidationError):
- assert ctx.get_file('non-existing')
+ assert ctx.get_file("non-existing")
def test_getting_list_parameter():
@@ -35,12 +40,13 @@ def test_getting_list_parameter():
env={},
method=None,
url=None,
- params={'key': 'value', 'list': ['1', '2', '3']})
- assert ctx.get_param_as_list('key') == ['value']
- assert ctx.get_param_as_list('list') == ['1', '2', '3']
+ params={"key": "value", "list": ["1", "2", "3"]},
+ )
+ assert ctx.get_param_as_list("key") == ["value"]
+ assert ctx.get_param_as_list("list") == ["1", "2", "3"]
with pytest.raises(errors.ValidationError):
- ctx.get_param_as_list('non-existing')
- assert ctx.get_param_as_list('non-existing', default=['def']) == ['def']
+ ctx.get_param_as_list("non-existing")
+ assert ctx.get_param_as_list("non-existing", default=["def"]) == ["def"]
def test_getting_string_parameter():
@@ -48,12 +54,13 @@ def test_getting_string_parameter():
env={},
method=None,
url=None,
- params={'key': 'value', 'list': ['1', '2', '3']})
- assert ctx.get_param_as_string('key') == 'value'
- assert ctx.get_param_as_string('list') == '1,2,3'
+ params={"key": "value", "list": ["1", "2", "3"]},
+ )
+ assert ctx.get_param_as_string("key") == "value"
+ assert ctx.get_param_as_string("list") == "1,2,3"
with pytest.raises(errors.ValidationError):
- ctx.get_param_as_string('non-existing')
- assert ctx.get_param_as_string('non-existing', default='x') == 'x'
+ ctx.get_param_as_string("non-existing")
+ assert ctx.get_param_as_string("non-existing", default="x") == "x"
def test_getting_int_parameter():
@@ -61,55 +68,57 @@ def test_getting_int_parameter():
env={},
method=None,
url=None,
- params={'key': '50', 'err': 'invalid', 'list': [1, 2, 3]})
- assert ctx.get_param_as_int('key') == 50
+ params={"key": "50", "err": "invalid", "list": [1, 2, 3]},
+ )
+ assert ctx.get_param_as_int("key") == 50
with pytest.raises(errors.ValidationError):
- ctx.get_param_as_int('list')
+ ctx.get_param_as_int("list")
with pytest.raises(errors.ValidationError):
- ctx.get_param_as_int('non-existing')
- assert ctx.get_param_as_int('non-existing', default=5) == 5
+ ctx.get_param_as_int("non-existing")
+ assert ctx.get_param_as_int("non-existing", default=5) == 5
with pytest.raises(errors.ValidationError):
- ctx.get_param_as_int('err')
+ ctx.get_param_as_int("err")
with pytest.raises(errors.ValidationError):
- assert ctx.get_param_as_int('key', min=50) == 50
- ctx.get_param_as_int('key', min=51)
+ assert ctx.get_param_as_int("key", min=50) == 50
+ ctx.get_param_as_int("key", min=51)
with pytest.raises(errors.ValidationError):
- assert ctx.get_param_as_int('key', max=50) == 50
- ctx.get_param_as_int('key', max=49)
+ assert ctx.get_param_as_int("key", max=50) == 50
+ ctx.get_param_as_int("key", max=49)
def test_getting_bool_parameter():
def test(value):
ctx = rest.Context(
- env={}, method=None, url=None, params={'key': value})
- return ctx.get_param_as_bool('key')
+ env={}, method=None, url=None, params={"key": value}
+ )
+ return ctx.get_param_as_bool("key")
- assert test('1') is True
- assert test('y') is True
- assert test('yes') is True
- assert test('yep') is True
- assert test('yup') is True
- assert test('yeah') is True
- assert test('t') is True
- assert test('true') is True
- assert test('TRUE') is True
+ assert test("1") is True
+ assert test("y") is True
+ assert test("yes") is True
+ assert test("yep") is True
+ assert test("yup") is True
+ assert test("yeah") is True
+ assert test("t") is True
+ assert test("true") is True
+ assert test("TRUE") is True
- assert test('0') is False
- assert test('n') is False
- assert test('no') is False
- assert test('nope') is False
- assert test('f') is False
- assert test('false') is False
- assert test('FALSE') is False
+ assert test("0") is False
+ assert test("n") is False
+ assert test("no") is False
+ assert test("nope") is False
+ assert test("f") is False
+ assert test("false") is False
+ assert test("FALSE") is False
with pytest.raises(errors.ValidationError):
- test('herp')
+ test("herp")
with pytest.raises(errors.ValidationError):
- test('2')
+ test("2")
with pytest.raises(errors.ValidationError):
- test(['1', '2'])
+ test(["1", "2"])
ctx = rest.Context(env={}, method=None, url=None)
with pytest.raises(errors.ValidationError):
- ctx.get_param_as_bool('non-existing')
- assert ctx.get_param_as_bool('non-existing', default=True) is True
+ ctx.get_param_as_bool("non-existing")
+ assert ctx.get_param_as_bool("non-existing", default=True) is True
diff --git a/server/szurubooru/tests/search/configs/test_comment_search_config.py b/server/szurubooru/tests/search/configs/test_comment_search_config.py
index 7279c1be..a592b302 100644
--- a/server/szurubooru/tests/search/configs/test_comment_search_config.py
+++ b/server/szurubooru/tests/search/configs/test_comment_search_config.py
@@ -1,5 +1,7 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, search
@@ -12,22 +14,28 @@ def executor():
def verify_unpaged(executor):
def verify(input, expected_comment_text):
actual_count, actual_comments = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_comment_text = [c.text for c in actual_comments]
assert actual_count == len(expected_comment_text)
assert actual_comment_text == expected_comment_text
+
return verify
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('creation-time:2014', ['t2', 't1']),
- ('creation-date:2014', ['t2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [
+ ("creation-time:2014", ["t2", "t1"]),
+ ("creation-date:2014", ["t2", "t1"]),
+ ],
+)
def test_filter_by_creation_time(
- verify_unpaged, comment_factory, input, expected_comment_text):
- comment1 = comment_factory(text='t1')
- comment2 = comment_factory(text='t2')
- comment3 = comment_factory(text='t3')
+ verify_unpaged, comment_factory, input, expected_comment_text
+):
+ comment1 = comment_factory(text="t1")
+ comment2 = comment_factory(text="t2")
+ comment3 = comment_factory(text="t3")
comment1.creation_time = datetime(2014, 1, 1)
comment2.creation_time = datetime(2014, 6, 1)
comment3.creation_time = datetime(2015, 1, 1)
@@ -36,109 +44,107 @@ def test_filter_by_creation_time(
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('text:t1', ['t1']),
- ('text:t2', ['t2']),
- ('text:t1,t2', ['t1', 't2']),
- ('text:t*', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [
+ ("text:t1", ["t1"]),
+ ("text:t2", ["t2"]),
+ ("text:t1,t2", ["t1", "t2"]),
+ ("text:t*", ["t1", "t2"]),
+ ],
+)
def test_filter_by_text(
- verify_unpaged, comment_factory, input, expected_comment_text):
- comment1 = comment_factory(text='t1')
- comment2 = comment_factory(text='t2')
+ verify_unpaged, comment_factory, input, expected_comment_text
+):
+ comment1 = comment_factory(text="t1")
+ comment2 = comment_factory(text="t2")
db.session.add_all([comment1, comment2])
db.session.flush()
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('user:u1', ['t1']),
- ('user:u2', ['t2']),
- ('user:u1,u2', ['t2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [("user:u1", ["t1"]), ("user:u2", ["t2"]), ("user:u1,u2", ["t2", "t1"]),],
+)
def test_filter_by_user(
- verify_unpaged,
- comment_factory,
- user_factory,
- input,
- expected_comment_text):
- db.session.add(comment_factory(text='t2', user=user_factory(name='u2')))
- db.session.add(comment_factory(text='t1', user=user_factory(name='u1')))
+ verify_unpaged, comment_factory, user_factory, input, expected_comment_text
+):
+ db.session.add(comment_factory(text="t2", user=user_factory(name="u2")))
+ db.session.add(comment_factory(text="t1", user=user_factory(name="u1")))
db.session.flush()
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('post:1', ['t1']),
- ('post:2', ['t2']),
- ('post:1,2', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [("post:1", ["t1"]), ("post:2", ["t2"]), ("post:1,2", ["t1", "t2"]),],
+)
def test_filter_by_post(
- verify_unpaged,
- comment_factory,
- post_factory,
- input,
- expected_comment_text):
- db.session.add(comment_factory(text='t1', post=post_factory(id=1)))
- db.session.add(comment_factory(text='t2', post=post_factory(id=2)))
+ verify_unpaged, comment_factory, post_factory, input, expected_comment_text
+):
+ db.session.add(comment_factory(text="t1", post=post_factory(id=1)))
+ db.session.add(comment_factory(text="t2", post=post_factory(id=2)))
db.session.flush()
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('', ['t1', 't2']),
- ('t1', ['t1']),
- ('t2', ['t2']),
- ('t1,t2', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [
+ ("", ["t1", "t2"]),
+ ("t1", ["t1"]),
+ ("t2", ["t2"]),
+ ("t1,t2", ["t1", "t2"]),
+ ],
+)
def test_anonymous(
- verify_unpaged, comment_factory, input, expected_comment_text):
- db.session.add(comment_factory(text='t1'))
- db.session.add(comment_factory(text='t2'))
+ verify_unpaged, comment_factory, input, expected_comment_text
+):
+ db.session.add(comment_factory(text="t1"))
+ db.session.add(comment_factory(text="t2"))
db.session.flush()
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('sort:user', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text", [("sort:user", ["t1", "t2"]),]
+)
def test_sort_by_user(
- verify_unpaged,
- comment_factory,
- user_factory,
- input,
- expected_comment_text):
- db.session.add(comment_factory(text='t2', user=user_factory(name='u2')))
- db.session.add(comment_factory(text='t1', user=user_factory(name='u1')))
+ verify_unpaged, comment_factory, user_factory, input, expected_comment_text
+):
+ db.session.add(comment_factory(text="t2", user=user_factory(name="u2")))
+ db.session.add(comment_factory(text="t1", user=user_factory(name="u1")))
db.session.flush()
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('sort:post', ['t2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text", [("sort:post", ["t2", "t1"]),]
+)
def test_sort_by_post(
- verify_unpaged,
- comment_factory,
- post_factory,
- input,
- expected_comment_text):
- db.session.add(comment_factory(text='t1', post=post_factory(id=1)))
- db.session.add(comment_factory(text='t2', post=post_factory(id=2)))
+ verify_unpaged, comment_factory, post_factory, input, expected_comment_text
+):
+ db.session.add(comment_factory(text="t1", post=post_factory(id=1)))
+ db.session.add(comment_factory(text="t2", post=post_factory(id=2)))
db.session.flush()
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('', ['t3', 't2', 't1']),
- ('sort:creation-date', ['t3', 't2', 't1']),
- ('sort:creation-time', ['t3', 't2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [
+ ("", ["t3", "t2", "t1"]),
+ ("sort:creation-date", ["t3", "t2", "t1"]),
+ ("sort:creation-time", ["t3", "t2", "t1"]),
+ ],
+)
def test_sort_by_creation_time(
- verify_unpaged, comment_factory, input, expected_comment_text):
- comment1 = comment_factory(text='t1')
- comment2 = comment_factory(text='t2')
- comment3 = comment_factory(text='t3')
+ verify_unpaged, comment_factory, input, expected_comment_text
+):
+ comment1 = comment_factory(text="t1")
+ comment2 = comment_factory(text="t2")
+ comment3 = comment_factory(text="t3")
comment1.creation_time = datetime(1991, 1, 1)
comment2.creation_time = datetime(1991, 1, 2)
comment3.creation_time = datetime(1991, 1, 3)
@@ -147,17 +153,21 @@ def test_sort_by_creation_time(
verify_unpaged(input, expected_comment_text)
-@pytest.mark.parametrize('input,expected_comment_text', [
- ('sort:last-edit-date', ['t3', 't2', 't1']),
- ('sort:last-edit-time', ['t3', 't2', 't1']),
- ('sort:edit-date', ['t3', 't2', 't1']),
- ('sort:edit-time', ['t3', 't2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_comment_text",
+ [
+ ("sort:last-edit-date", ["t3", "t2", "t1"]),
+ ("sort:last-edit-time", ["t3", "t2", "t1"]),
+ ("sort:edit-date", ["t3", "t2", "t1"]),
+ ("sort:edit-time", ["t3", "t2", "t1"]),
+ ],
+)
def test_sort_by_last_edit_time(
- verify_unpaged, comment_factory, input, expected_comment_text):
- comment1 = comment_factory(text='t1')
- comment2 = comment_factory(text='t2')
- comment3 = comment_factory(text='t3')
+ verify_unpaged, comment_factory, input, expected_comment_text
+):
+ comment1 = comment_factory(text="t1")
+ comment2 = comment_factory(text="t2")
+ comment3 = comment_factory(text="t3")
comment1.last_edit_time = datetime(1991, 1, 1)
comment2.last_edit_time = datetime(1991, 1, 2)
comment3.last_edit_time = datetime(1991, 1, 3)
diff --git a/server/szurubooru/tests/search/configs/test_pool_search_config.py b/server/szurubooru/tests/search/configs/test_pool_search_config.py
index 731a6767..201bf793 100644
--- a/server/szurubooru/tests/search/configs/test_pool_search_config.py
+++ b/server/szurubooru/tests/search/configs/test_pool_search_config.py
@@ -1,5 +1,7 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, errors, search
@@ -12,77 +14,106 @@ def executor():
def verify_unpaged(executor):
def verify(input, expected_pool_names):
actual_count, actual_pools = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_pool_names = [u.names[0].name for u in actual_pools]
assert actual_count == len(expected_pool_names)
assert actual_pool_names == expected_pool_names
+
return verify
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('', ['t1', 't2']),
- ('t1', ['t1']),
- ('t2', ['t2']),
- ('t1,t2', ['t1', 't2']),
- ('T1,T2', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("", ["t1", "t2"]),
+ ("t1", ["t1"]),
+ ("t2", ["t2"]),
+ ("t1,t2", ["t1", "t2"]),
+ ("T1,T2", ["t1", "t2"]),
+ ],
+)
def test_filter_anonymous(
- verify_unpaged, pool_factory, input, expected_pool_names):
- db.session.add(pool_factory(id=1, names=['t1']))
- db.session.add(pool_factory(id=2, names=['t2']))
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ db.session.add(pool_factory(id=1, names=["t1"]))
+ db.session.add(pool_factory(id=2, names=["t2"]))
db.session.flush()
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('db_driver,input,expected_pool_names', [
- (None, ',', None),
- (None, 't1,', None),
- (None, 't1,t2', ['t1', 't2']),
- (None, 't1\\,', []),
- (None, 'asd..asd', None),
- (None, 'asd\\..asd', []),
- (None, 'asd.\\.asd', []),
- (None, 'asd\\.\\.asd', []),
- (None, '-', None),
- (None, '\\-', ['-']),
- (None, '--', [
- 't1', 't2', '*', '*asd*', ':', 'asd:asd', '\\', '\\asd', '-asd',
- ]),
- (None, '\\--', []),
- (None, '-\\-', [
- 't1', 't2', '*', '*asd*', ':', 'asd:asd', '\\', '\\asd', '-asd',
- ]),
- (None, '-*', []),
- (None, '\\-*', ['-', '-asd']),
- (None, ':', None),
- (None, '\\:', [':']),
- (None, '\\:asd', []),
- (None, '*\\:*', [':', 'asd:asd']),
- (None, 'asd:asd', None),
- (None, 'asd\\:asd', ['asd:asd']),
- (None, '*', [
- 't1', 't2', '*', '*asd*', ':', 'asd:asd', '\\', '\\asd', '-', '-asd'
- ]),
- (None, '\\*', ['*']),
- (None, '\\', None),
- (None, '\\asd', None),
- ('psycopg2', '\\\\', ['\\']),
- ('psycopg2', '\\\\asd', ['\\asd']),
-])
+@pytest.mark.parametrize(
+ "db_driver,input,expected_pool_names",
+ [
+ (None, ",", None),
+ (None, "t1,", None),
+ (None, "t1,t2", ["t1", "t2"]),
+ (None, "t1\\,", []),
+ (None, "asd..asd", None),
+ (None, "asd\\..asd", []),
+ (None, "asd.\\.asd", []),
+ (None, "asd\\.\\.asd", []),
+ (None, "-", None),
+ (None, "\\-", ["-"]),
+ (
+ None,
+ "--",
+ ["t1", "t2", "*", "*asd*", ":", "asd:asd", "\\", "\\asd", "-asd",],
+ ),
+ (None, "\\--", []),
+ (
+ None,
+ "-\\-",
+ ["t1", "t2", "*", "*asd*", ":", "asd:asd", "\\", "\\asd", "-asd",],
+ ),
+ (None, "-*", []),
+ (None, "\\-*", ["-", "-asd"]),
+ (None, ":", None),
+ (None, "\\:", [":"]),
+ (None, "\\:asd", []),
+ (None, "*\\:*", [":", "asd:asd"]),
+ (None, "asd:asd", None),
+ (None, "asd\\:asd", ["asd:asd"]),
+ (
+ None,
+ "*",
+ [
+ "t1",
+ "t2",
+ "*",
+ "*asd*",
+ ":",
+ "asd:asd",
+ "\\",
+ "\\asd",
+ "-",
+ "-asd",
+ ],
+ ),
+ (None, "\\*", ["*"]),
+ (None, "\\", None),
+ (None, "\\asd", None),
+ ("psycopg2", "\\\\", ["\\"]),
+ ("psycopg2", "\\\\asd", ["\\asd"]),
+ ],
+)
def test_escaping(
- executor, pool_factory, input, expected_pool_names, db_driver):
- db.session.add_all([
- pool_factory(id=1, names=['t1']),
- pool_factory(id=2, names=['t2']),
- pool_factory(id=3, names=['*']),
- pool_factory(id=4, names=['*asd*']),
- pool_factory(id=5, names=[':']),
- pool_factory(id=6, names=['asd:asd']),
- pool_factory(id=7, names=['\\']),
- pool_factory(id=8, names=['\\asd']),
- pool_factory(id=9, names=['-']),
- pool_factory(id=10, names=['-asd'])
- ])
+ executor, pool_factory, input, expected_pool_names, db_driver
+):
+ db.session.add_all(
+ [
+ pool_factory(id=1, names=["t1"]),
+ pool_factory(id=2, names=["t2"]),
+ pool_factory(id=3, names=["*"]),
+ pool_factory(id=4, names=["*asd*"]),
+ pool_factory(id=5, names=[":"]),
+ pool_factory(id=6, names=["asd:asd"]),
+ pool_factory(id=7, names=["\\"]),
+ pool_factory(id=8, names=["\\asd"]),
+ pool_factory(id=9, names=["-"]),
+ pool_factory(id=10, names=["-asd"]),
+ ]
+ )
db.session.flush()
if db_driver and db.session.get_bind().driver != db_driver:
@@ -92,99 +123,112 @@ def test_escaping(
executor.execute(input, offset=0, limit=100)
else:
actual_count, actual_pools = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_pool_names = [u.names[0].name for u in actual_pools]
assert actual_count == len(expected_pool_names)
assert sorted(actual_pool_names) == sorted(expected_pool_names)
def test_filter_anonymous_starting_with_colon(verify_unpaged, pool_factory):
- db.session.add(pool_factory(id=1, names=[':t']))
+ db.session.add(pool_factory(id=1, names=[":t"]))
db.session.flush()
with pytest.raises(errors.SearchError):
- verify_unpaged(':t', [':t'])
- verify_unpaged('\\:t', [':t'])
+ verify_unpaged(":t", [":t"])
+ verify_unpaged("\\:t", [":t"])
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('name:pool1', ['pool1']),
- ('name:pool2', ['pool2']),
- ('name:none', []),
- ('name:', []),
- ('name:*1', ['pool1']),
- ('name:*2', ['pool2']),
- ('name:*', ['pool1', 'pool2', 'pool3', 'pool4']),
- ('name:p*', ['pool1', 'pool2', 'pool3', 'pool4']),
- ('name:*o*', ['pool1', 'pool2', 'pool3', 'pool4']),
- ('name:*!*', []),
- ('name:!*', []),
- ('name:*!', []),
- ('-name:pool1', ['pool2', 'pool3', 'pool4']),
- ('-name:pool2', ['pool1', 'pool3', 'pool4']),
- ('name:pool1,pool2', ['pool1', 'pool2']),
- ('-name:pool1,pool3', ['pool2', 'pool4']),
- ('name:pool4', ['pool4']),
- ('name:pool5', ['pool4']),
- ('name:pool4,pool5', ['pool4']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("name:pool1", ["pool1"]),
+ ("name:pool2", ["pool2"]),
+ ("name:none", []),
+ ("name:", []),
+ ("name:*1", ["pool1"]),
+ ("name:*2", ["pool2"]),
+ ("name:*", ["pool1", "pool2", "pool3", "pool4"]),
+ ("name:p*", ["pool1", "pool2", "pool3", "pool4"]),
+ ("name:*o*", ["pool1", "pool2", "pool3", "pool4"]),
+ ("name:*!*", []),
+ ("name:!*", []),
+ ("name:*!", []),
+ ("-name:pool1", ["pool2", "pool3", "pool4"]),
+ ("-name:pool2", ["pool1", "pool3", "pool4"]),
+ ("name:pool1,pool2", ["pool1", "pool2"]),
+ ("-name:pool1,pool3", ["pool2", "pool4"]),
+ ("name:pool4", ["pool4"]),
+ ("name:pool5", ["pool4"]),
+ ("name:pool4,pool5", ["pool4"]),
+ ],
+)
def test_filter_by_name(
- verify_unpaged, pool_factory, input, expected_pool_names):
- db.session.add(pool_factory(id=1, names=['pool1']))
- db.session.add(pool_factory(id=2, names=['pool2']))
- db.session.add(pool_factory(id=3, names=['pool3']))
- db.session.add(pool_factory(id=4, names=['pool4', 'pool5', 'pool6']))
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ db.session.add(pool_factory(id=1, names=["pool1"]))
+ db.session.add(pool_factory(id=2, names=["pool2"]))
+ db.session.add(pool_factory(id=3, names=["pool3"]))
+ db.session.add(pool_factory(id=4, names=["pool4", "pool5", "pool6"]))
db.session.flush()
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('category:cat1', ['t1', 't2']),
- ('category:cat2', ['t3']),
- ('category:cat1,cat2', ['t1', 't2', 't3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("category:cat1", ["t1", "t2"]),
+ ("category:cat2", ["t3"]),
+ ("category:cat1,cat2", ["t1", "t2", "t3"]),
+ ],
+)
def test_filter_by_category(
- verify_unpaged,
- pool_factory,
- pool_category_factory,
- input,
- expected_pool_names):
- cat1 = pool_category_factory(name='cat1')
- cat2 = pool_category_factory(name='cat2')
- pool1 = pool_factory(id=1, names=['t1'], category=cat1)
- pool2 = pool_factory(id=2, names=['t2'], category=cat1)
- pool3 = pool_factory(id=3, names=['t3'], category=cat2)
+ verify_unpaged,
+ pool_factory,
+ pool_category_factory,
+ input,
+ expected_pool_names,
+):
+ cat1 = pool_category_factory(name="cat1")
+ cat2 = pool_category_factory(name="cat2")
+ pool1 = pool_factory(id=1, names=["t1"], category=cat1)
+ pool2 = pool_factory(id=2, names=["t2"], category=cat1)
+ pool3 = pool_factory(id=3, names=["t3"], category=cat2)
db.session.add_all([pool1, pool2, pool3])
db.session.flush()
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('creation-time:2014', ['t1', 't2']),
- ('creation-date:2014', ['t1', 't2']),
- ('-creation-time:2014', ['t3']),
- ('-creation-date:2014', ['t3']),
- ('creation-time:2014..2014-06', ['t1', 't2']),
- ('creation-time:2014-06..2015-01-01', ['t2', 't3']),
- ('creation-time:2014-06..', ['t2', 't3']),
- ('creation-time:..2014-06', ['t1', 't2']),
- ('-creation-time:2014..2014-06', ['t3']),
- ('-creation-time:2014-06..2015-01-01', ['t1']),
- ('creation-date:2014..2014-06', ['t1', 't2']),
- ('creation-date:2014-06..2015-01-01', ['t2', 't3']),
- ('creation-date:2014-06..', ['t2', 't3']),
- ('creation-date:..2014-06', ['t1', 't2']),
- ('-creation-date:2014..2014-06', ['t3']),
- ('-creation-date:2014-06..2015-01-01', ['t1']),
- ('creation-time:2014-01,2015', ['t1', 't3']),
- ('creation-date:2014-01,2015', ['t1', 't3']),
- ('-creation-time:2014-01,2015', ['t2']),
- ('-creation-date:2014-01,2015', ['t2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("creation-time:2014", ["t1", "t2"]),
+ ("creation-date:2014", ["t1", "t2"]),
+ ("-creation-time:2014", ["t3"]),
+ ("-creation-date:2014", ["t3"]),
+ ("creation-time:2014..2014-06", ["t1", "t2"]),
+ ("creation-time:2014-06..2015-01-01", ["t2", "t3"]),
+ ("creation-time:2014-06..", ["t2", "t3"]),
+ ("creation-time:..2014-06", ["t1", "t2"]),
+ ("-creation-time:2014..2014-06", ["t3"]),
+ ("-creation-time:2014-06..2015-01-01", ["t1"]),
+ ("creation-date:2014..2014-06", ["t1", "t2"]),
+ ("creation-date:2014-06..2015-01-01", ["t2", "t3"]),
+ ("creation-date:2014-06..", ["t2", "t3"]),
+ ("creation-date:..2014-06", ["t1", "t2"]),
+ ("-creation-date:2014..2014-06", ["t3"]),
+ ("-creation-date:2014-06..2015-01-01", ["t1"]),
+ ("creation-time:2014-01,2015", ["t1", "t3"]),
+ ("creation-date:2014-01,2015", ["t1", "t3"]),
+ ("-creation-time:2014-01,2015", ["t2"]),
+ ("-creation-date:2014-01,2015", ["t2"]),
+ ],
+)
def test_filter_by_creation_time(
- verify_unpaged, pool_factory, input, expected_pool_names):
- pool1 = pool_factory(id=1, names=['t1'])
- pool2 = pool_factory(id=2, names=['t2'])
- pool3 = pool_factory(id=3, names=['t3'])
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ pool1 = pool_factory(id=1, names=["t1"])
+ pool2 = pool_factory(id=2, names=["t2"])
+ pool3 = pool_factory(id=3, names=["t3"])
pool1.creation_time = datetime(2014, 1, 1)
pool2.creation_time = datetime(2014, 6, 1)
pool3.creation_time = datetime(2015, 1, 1)
@@ -193,17 +237,21 @@ def test_filter_by_creation_time(
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('last-edit-date:2014', ['t1', 't3']),
- ('last-edit-time:2014', ['t1', 't3']),
- ('edit-date:2014', ['t1', 't3']),
- ('edit-time:2014', ['t1', 't3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("last-edit-date:2014", ["t1", "t3"]),
+ ("last-edit-time:2014", ["t1", "t3"]),
+ ("edit-date:2014", ["t1", "t3"]),
+ ("edit-time:2014", ["t1", "t3"]),
+ ],
+)
def test_filter_by_edit_time(
- verify_unpaged, pool_factory, input, expected_pool_names):
- pool1 = pool_factory(id=1, names=['t1'])
- pool2 = pool_factory(id=2, names=['t2'])
- pool3 = pool_factory(id=3, names=['t3'])
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ pool1 = pool_factory(id=1, names=["t1"])
+ pool2 = pool_factory(id=2, names=["t2"])
+ pool3 = pool_factory(id=3, names=["t3"])
pool1.last_edit_time = datetime(2014, 1, 1)
pool2.last_edit_time = datetime(2015, 1, 1)
pool3.last_edit_time = datetime(2014, 1, 1)
@@ -212,24 +260,24 @@ def test_filter_by_edit_time(
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('post-count:2', ['t1']),
- ('post-count:1', ['t2']),
- ('post-count:1..', ['t1', 't2']),
- ('post-count-min:1', ['t1', 't2']),
- ('post-count:..1', ['t2']),
- ('post-count-max:1', ['t2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("post-count:2", ["t1"]),
+ ("post-count:1", ["t2"]),
+ ("post-count:1..", ["t1", "t2"]),
+ ("post-count-min:1", ["t1", "t2"]),
+ ("post-count:..1", ["t2"]),
+ ("post-count-max:1", ["t2"]),
+ ],
+)
def test_filter_by_post_count(
- verify_unpaged,
- pool_factory,
- post_factory,
- input,
- expected_pool_names):
+ verify_unpaged, pool_factory, post_factory, input, expected_pool_names
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
- pool1 = pool_factory(id=1, names=['t1'])
- pool2 = pool_factory(id=2, names=['t2'])
+ pool1 = pool_factory(id=1, names=["t1"])
+ pool2 = pool_factory(id=2, names=["t2"])
db.session.add_all([post1, post2, pool1, pool2])
pool1.posts.append(post1)
pool1.posts.append(post2)
@@ -238,49 +286,57 @@ def test_filter_by_post_count(
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input', [
- 'post-count:..',
- 'post-count:asd',
- 'post-count:asd,1',
- 'post-count:1,asd',
- 'post-count:asd..1',
- 'post-count:1..asd',
-])
+@pytest.mark.parametrize(
+ "input",
+ [
+ "post-count:..",
+ "post-count:asd",
+ "post-count:asd,1",
+ "post-count:1,asd",
+ "post-count:asd..1",
+ "post-count:1..asd",
+ ],
+)
def test_filter_by_invalid_input(executor, input):
with pytest.raises(errors.SearchError):
executor.execute(input, offset=0, limit=100)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('', ['t1', 't2']),
- ('sort:name', ['t1', 't2']),
- ('-sort:name', ['t2', 't1']),
- ('sort:name,asc', ['t1', 't2']),
- ('sort:name,desc', ['t2', 't1']),
- ('-sort:name,asc', ['t2', 't1']),
- ('-sort:name,desc', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("", ["t1", "t2"]),
+ ("sort:name", ["t1", "t2"]),
+ ("-sort:name", ["t2", "t1"]),
+ ("sort:name,asc", ["t1", "t2"]),
+ ("sort:name,desc", ["t2", "t1"]),
+ ("-sort:name,asc", ["t2", "t1"]),
+ ("-sort:name,desc", ["t1", "t2"]),
+ ],
+)
def test_sort_by_name(
- verify_unpaged,
- pool_factory,
- input,
- expected_pool_names):
- db.session.add(pool_factory(id=2, names=['t2']))
- db.session.add(pool_factory(id=1, names=['t1']))
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ db.session.add(pool_factory(id=2, names=["t2"]))
+ db.session.add(pool_factory(id=1, names=["t1"]))
db.session.flush()
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('', ['t1', 't2', 't3']),
- ('sort:creation-date', ['t3', 't2', 't1']),
- ('sort:creation-time', ['t3', 't2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("", ["t1", "t2", "t3"]),
+ ("sort:creation-date", ["t3", "t2", "t1"]),
+ ("sort:creation-time", ["t3", "t2", "t1"]),
+ ],
+)
def test_sort_by_creation_time(
- verify_unpaged, pool_factory, input, expected_pool_names):
- pool1 = pool_factory(id=1, names=['t1'])
- pool2 = pool_factory(id=2, names=['t2'])
- pool3 = pool_factory(id=3, names=['t3'])
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ pool1 = pool_factory(id=1, names=["t1"])
+ pool2 = pool_factory(id=2, names=["t2"])
+ pool3 = pool_factory(id=3, names=["t3"])
pool1.creation_time = datetime(1991, 1, 1)
pool2.creation_time = datetime(1991, 1, 2)
pool3.creation_time = datetime(1991, 1, 3)
@@ -289,18 +345,22 @@ def test_sort_by_creation_time(
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('', ['t1', 't2', 't3']),
- ('sort:last-edit-date', ['t3', 't2', 't1']),
- ('sort:last-edit-time', ['t3', 't2', 't1']),
- ('sort:edit-date', ['t3', 't2', 't1']),
- ('sort:edit-time', ['t3', 't2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names",
+ [
+ ("", ["t1", "t2", "t3"]),
+ ("sort:last-edit-date", ["t3", "t2", "t1"]),
+ ("sort:last-edit-time", ["t3", "t2", "t1"]),
+ ("sort:edit-date", ["t3", "t2", "t1"]),
+ ("sort:edit-time", ["t3", "t2", "t1"]),
+ ],
+)
def test_sort_by_last_edit_time(
- verify_unpaged, pool_factory, input, expected_pool_names):
- pool1 = pool_factory(id=1, names=['t1'])
- pool2 = pool_factory(id=2, names=['t2'])
- pool3 = pool_factory(id=3, names=['t3'])
+ verify_unpaged, pool_factory, input, expected_pool_names
+):
+ pool1 = pool_factory(id=1, names=["t1"])
+ pool2 = pool_factory(id=2, names=["t2"])
+ pool3 = pool_factory(id=3, names=["t3"])
pool1.last_edit_time = datetime(1991, 1, 1)
pool2.last_edit_time = datetime(1991, 1, 2)
pool3.last_edit_time = datetime(1991, 1, 3)
@@ -309,19 +369,16 @@ def test_sort_by_last_edit_time(
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('sort:post-count', ['t2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names", [("sort:post-count", ["t2", "t1"]),]
+)
def test_sort_by_post_count(
- verify_unpaged,
- pool_factory,
- post_factory,
- input,
- expected_pool_names):
+ verify_unpaged, pool_factory, post_factory, input, expected_pool_names
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
- pool1 = pool_factory(id=1, names=['t1'])
- pool2 = pool_factory(id=2, names=['t2'])
+ pool1 = pool_factory(id=1, names=["t1"])
+ pool2 = pool_factory(id=2, names=["t2"])
db.session.add_all([post1, post2, pool1, pool2])
pool1.posts.append(post1)
pool2.posts.append(post1)
@@ -330,20 +387,21 @@ def test_sort_by_post_count(
verify_unpaged(input, expected_pool_names)
-@pytest.mark.parametrize('input,expected_pool_names', [
- ('sort:category', ['t3', 't1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_pool_names", [("sort:category", ["t3", "t1", "t2"]),]
+)
def test_sort_by_category(
- verify_unpaged,
- pool_factory,
- pool_category_factory,
- input,
- expected_pool_names):
- cat1 = pool_category_factory(name='cat1')
- cat2 = pool_category_factory(name='cat2')
- pool1 = pool_factory(id=1, names=['t1'], category=cat2)
- pool2 = pool_factory(id=2, names=['t2'], category=cat2)
- pool3 = pool_factory(id=3, names=['t3'], category=cat1)
+ verify_unpaged,
+ pool_factory,
+ pool_category_factory,
+ input,
+ expected_pool_names,
+):
+ cat1 = pool_category_factory(name="cat1")
+ cat2 = pool_category_factory(name="cat2")
+ pool1 = pool_factory(id=1, names=["t1"], category=cat2)
+ pool2 = pool_factory(id=2, names=["t2"], category=cat2)
+ pool3 = pool_factory(id=3, names=["t3"], category=cat1)
db.session.add_all([pool1, pool2, pool3])
db.session.flush()
verify_unpaged(input, expected_pool_names)
diff --git a/server/szurubooru/tests/search/configs/test_post_search_config.py b/server/szurubooru/tests/search/configs/test_post_search_config.py
index 462594c6..172d1ff4 100644
--- a/server/szurubooru/tests/search/configs/test_post_search_config.py
+++ b/server/szurubooru/tests/search/configs/test_post_search_config.py
@@ -1,15 +1,17 @@
from datetime import datetime
+
import pytest
-from szurubooru import db, model, errors, search
+
+from szurubooru import db, errors, model, search
@pytest.fixture
def fav_factory(user_factory):
def factory(post, user=None):
return model.PostFavorite(
- post=post,
- user=user or user_factory(),
- time=datetime.utcnow())
+ post=post, user=user or user_factory(), time=datetime.utcnow()
+ )
+
return factory
@@ -20,14 +22,17 @@ def score_factory(user_factory):
post=post,
user=user or user_factory(),
time=datetime.utcnow(),
- score=score)
+ score=score,
+ )
+
return factory
@pytest.fixture
def note_factory():
- def factory(text='...'):
- return model.PostNote(polygon='...', text=text)
+ def factory(text="..."):
+ return model.PostNote(polygon="...", text=text)
+
return factory
@@ -36,11 +41,10 @@ def feature_factory(user_factory):
def factory(post=None):
if post:
return model.PostFeature(
- time=datetime.utcnow(),
- user=user_factory(),
- post=post)
- return model.PostFeature(
- time=datetime.utcnow(), user=user_factory())
+ time=datetime.utcnow(), user=user_factory(), post=post
+ )
+ return model.PostFeature(time=datetime.utcnow(), user=user_factory())
+
return factory
@@ -57,6 +61,7 @@ def auth_executor(executor, user_factory):
db.session.flush()
executor.config.user = auth_user
return auth_user
+
return wrapper
@@ -64,21 +69,22 @@ def auth_executor(executor, user_factory):
def verify_unpaged(executor):
def verify(input, expected_post_ids, test_order=False):
actual_count, actual_posts = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_post_ids = list([p.post_id for p in actual_posts])
if not test_order:
actual_post_ids = sorted(actual_post_ids)
expected_post_ids = sorted(expected_post_ids)
assert actual_post_ids == expected_post_ids
assert actual_count == len(expected_post_ids)
+
return verify
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('id:1', [1]),
- ('id:3', [3]),
- ('id:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [("id:1", [1]), ("id:3", [3]), ("id:1,3", [1, 3]),],
+)
def test_filter_by_id(verify_unpaged, post_factory, input, expected_post_ids):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
@@ -88,35 +94,39 @@ def test_filter_by_id(verify_unpaged, post_factory, input, expected_post_ids):
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('tag:t1', [1]),
- ('tag:t2', [2]),
- ('tag:t1,t2', [1, 2]),
- ('tag:t4a', [4]),
- ('tag:t4b', [4]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("tag:t1", [1]),
+ ("tag:t2", [2]),
+ ("tag:t1,t2", [1, 2]),
+ ("tag:t4a", [4]),
+ ("tag:t4b", [4]),
+ ],
+)
def test_filter_by_tag(
- verify_unpaged, post_factory, tag_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, tag_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
post4 = post_factory(id=4)
- post1.tags = [tag_factory(names=['t1'])]
- post2.tags = [tag_factory(names=['t2'])]
- post3.tags = [tag_factory(names=['t3'])]
- post4.tags = [tag_factory(names=['t4a', 't4b'])]
+ post1.tags = [tag_factory(names=["t1"])]
+ post2.tags = [tag_factory(names=["t2"])]
+ post3.tags = [tag_factory(names=["t3"])]
+ post4.tags = [tag_factory(names=["t4a", "t4b"])]
db.session.add_all([post1, post2, post3, post4])
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('score:1', [1]),
- ('score:3', [3]),
- ('score:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [("score:1", [1]), ("score:3", [3]), ("score:1,3", [1, 3]),],
+)
def test_filter_by_score(
- verify_unpaged, post_factory, user_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, user_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -126,96 +136,111 @@ def test_filter_by_score(
score=post.post_id,
time=datetime.utcnow(),
post=post,
- user=user_factory()))
+ user=user_factory(),
+ )
+ )
db.session.add_all([post1, post2, post3])
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('uploader:', [4]),
- ('uploader:u1', [1]),
- ('uploader:u3', [3]),
- ('uploader:u1,u3', [1, 3]),
- ('upload:', [4]),
- ('upload:u1', [1]),
- ('upload:u3', [3]),
- ('upload:u1,u3', [1, 3]),
- ('submit:', [4]),
- ('submit:u1', [1]),
- ('submit:u3', [3]),
- ('submit:u1,u3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("uploader:", [4]),
+ ("uploader:u1", [1]),
+ ("uploader:u3", [3]),
+ ("uploader:u1,u3", [1, 3]),
+ ("upload:", [4]),
+ ("upload:u1", [1]),
+ ("upload:u3", [3]),
+ ("upload:u1,u3", [1, 3]),
+ ("submit:", [4]),
+ ("submit:u1", [1]),
+ ("submit:u3", [3]),
+ ("submit:u1,u3", [1, 3]),
+ ],
+)
def test_filter_by_uploader(
- verify_unpaged, post_factory, user_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, user_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
post4 = post_factory(id=4)
- post1.user = user_factory(name='u1')
- post2.user = user_factory(name='u2')
- post3.user = user_factory(name='u3')
+ post1.user = user_factory(name="u1")
+ post2.user = user_factory(name="u2")
+ post3.user = user_factory(name="u3")
db.session.add_all([post1, post2, post3, post4])
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('comment:u1', [1]),
- ('comment:u3', [3]),
- ('comment:u1,u3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [("comment:u1", [1]), ("comment:u3", [3]), ("comment:u1,u3", [1, 3]),],
+)
def test_filter_by_commenter(
- verify_unpaged,
- post_factory,
- user_factory,
- comment_factory,
- input,
- expected_post_ids):
+ verify_unpaged,
+ post_factory,
+ user_factory,
+ comment_factory,
+ input,
+ expected_post_ids,
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- db.session.add_all([
- comment_factory(post=post1, user=user_factory(name='u1')),
- comment_factory(post=post2, user=user_factory(name='u2')),
- comment_factory(post=post3, user=user_factory(name='u3')),
- post1, post2, post3,
- ])
+ db.session.add_all(
+ [
+ comment_factory(post=post1, user=user_factory(name="u1")),
+ comment_factory(post=post2, user=user_factory(name="u2")),
+ comment_factory(post=post3, user=user_factory(name="u3")),
+ post1,
+ post2,
+ post3,
+ ]
+ )
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('fav:u1', [1]),
- ('fav:u3', [3]),
- ('fav:u1,u3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [("fav:u1", [1]), ("fav:u3", [3]), ("fav:u1,u3", [1, 3]),],
+)
def test_filter_by_favorite(
- verify_unpaged,
- post_factory,
- user_factory,
- fav_factory,
- input,
- expected_post_ids):
+ verify_unpaged,
+ post_factory,
+ user_factory,
+ fav_factory,
+ input,
+ expected_post_ids,
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- db.session.add_all([
- fav_factory(post=post1, user=user_factory(name='u1')),
- fav_factory(post=post2, user=user_factory(name='u2')),
- fav_factory(post=post3, user=user_factory(name='u3')),
- post1, post2, post3])
+ db.session.add_all(
+ [
+ fav_factory(post=post1, user=user_factory(name="u1")),
+ fav_factory(post=post2, user=user_factory(name="u2")),
+ fav_factory(post=post3, user=user_factory(name="u3")),
+ post1,
+ post2,
+ post3,
+ ]
+ )
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('tag-count:1', [1]),
- ('tag-count:3', [3]),
- ('tag-count:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [("tag-count:1", [1]), ("tag-count:3", [3]), ("tag-count:1,3", [1, 3]),],
+)
def test_filter_by_tag_count(
- verify_unpaged, post_factory, tag_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, tag_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -227,61 +252,75 @@ def test_filter_by_tag_count(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('comment-count:1', [1]),
- ('comment-count:3', [3]),
- ('comment-count:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("comment-count:1", [1]),
+ ("comment-count:3", [3]),
+ ("comment-count:1,3", [1, 3]),
+ ],
+)
def test_filter_by_comment_count(
- verify_unpaged,
- post_factory,
- comment_factory,
- input,
- expected_post_ids):
+ verify_unpaged, post_factory, comment_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- db.session.add_all([
- comment_factory(post=post1),
- comment_factory(post=post2),
- comment_factory(post=post2),
- comment_factory(post=post3),
- comment_factory(post=post3),
- comment_factory(post=post3),
- post1, post2, post3])
+ db.session.add_all(
+ [
+ comment_factory(post=post1),
+ comment_factory(post=post2),
+ comment_factory(post=post2),
+ comment_factory(post=post3),
+ comment_factory(post=post3),
+ comment_factory(post=post3),
+ post1,
+ post2,
+ post3,
+ ]
+ )
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('fav-count:1', [1]),
- ('fav-count:3', [3]),
- ('fav-count:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [("fav-count:1", [1]), ("fav-count:3", [3]), ("fav-count:1,3", [1, 3]),],
+)
def test_filter_by_favorite_count(
- verify_unpaged, post_factory, fav_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, fav_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- db.session.add_all([
- fav_factory(post=post1),
- fav_factory(post=post2),
- fav_factory(post=post2),
- fav_factory(post=post3),
- fav_factory(post=post3),
- fav_factory(post=post3),
- post1, post2, post3])
+ db.session.add_all(
+ [
+ fav_factory(post=post1),
+ fav_factory(post=post2),
+ fav_factory(post=post2),
+ fav_factory(post=post3),
+ fav_factory(post=post3),
+ fav_factory(post=post3),
+ post1,
+ post2,
+ post3,
+ ]
+ )
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('note-count:1', [1]),
- ('note-count:3', [3]),
- ('note-count:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("note-count:1", [1]),
+ ("note-count:3", [3]),
+ ("note-count:1,3", [1, 3]),
+ ],
+)
def test_filter_by_note_count(
- verify_unpaged, post_factory, note_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, note_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -293,36 +332,40 @@ def test_filter_by_note_count(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('note-text:*', [1, 2, 3]),
- ('note-text:text2', [2]),
- ('note-text:text3*', [3]),
- ('note-text:text3a,text2', [2, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("note-text:*", [1, 2, 3]),
+ ("note-text:text2", [2]),
+ ("note-text:text3*", [3]),
+ ("note-text:text3a,text2", [2, 3]),
+ ],
+)
def test_filter_by_note_text(
- verify_unpaged, post_factory, note_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, note_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- post1.notes = [note_factory(text='text1')]
- post2.notes = [note_factory(text='text2'), note_factory(text='text2')]
- post3.notes = [note_factory(text='text3a'), note_factory(text='text3b')]
+ post1.notes = [note_factory(text="text1")]
+ post2.notes = [note_factory(text="text2"), note_factory(text="text2")]
+ post3.notes = [note_factory(text="text3a"), note_factory(text="text3b")]
db.session.add_all([post1, post2, post3])
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('feature-count:1', [1]),
- ('feature-count:3', [3]),
- ('feature-count:1,3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("feature-count:1", [1]),
+ ("feature-count:3", [3]),
+ ("feature-count:1,3", [1, 3]),
+ ],
+)
def test_filter_by_feature_count(
- verify_unpaged,
- post_factory,
- feature_factory,
- input,
- expected_post_ids):
+ verify_unpaged, post_factory, feature_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -334,18 +377,22 @@ def test_filter_by_feature_count(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('type:image', [1]),
- ('type:anim', [2]),
- ('type:animation', [2]),
- ('type:gif', [2]),
- ('type:video', [3]),
- ('type:webm', [3]),
- ('type:flash', [4]),
- ('type:swf', [4]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("type:image", [1]),
+ ("type:anim", [2]),
+ ("type:animation", [2]),
+ ("type:gif", [2]),
+ ("type:video", [3]),
+ ("type:webm", [3]),
+ ("type:flash", [4]),
+ ("type:swf", [4]),
+ ],
+)
def test_filter_by_type(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -359,14 +406,18 @@ def test_filter_by_type(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('safety:safe', [1]),
- ('safety:sketchy', [2]),
- ('safety:questionable', [2]),
- ('safety:unsafe', [3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("safety:safe", [1]),
+ ("safety:sketchy", [2]),
+ ("safety:questionable", [2]),
+ ("safety:unsafe", [3]),
+ ],
+)
def test_filter_by_safety(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -380,34 +431,42 @@ def test_filter_by_safety(
def test_filter_by_invalid_type(executor):
with pytest.raises(errors.SearchError):
- executor.execute('type:invalid', offset=0, limit=100)
+ executor.execute("type:invalid", offset=0, limit=100)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('content-checksum:checksum1', [1]),
- ('content-checksum:checksum3', [3]),
- ('content-checksum:checksum1,checksum3', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("content-checksum:checksum1", [1]),
+ ("content-checksum:checksum3", [3]),
+ ("content-checksum:checksum1,checksum3", [1, 3]),
+ ],
+)
def test_filter_by_content_checksum(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- post1.checksum = 'checksum1'
- post2.checksum = 'checksum2'
- post3.checksum = 'checksum3'
+ post1.checksum = "checksum1"
+ post2.checksum = "checksum2"
+ post3.checksum = "checksum3"
db.session.add_all([post1, post2, post3])
db.session.flush()
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('file-size:100', [1]),
- ('file-size:102', [3]),
- ('file-size:100,102', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("file-size:100", [1]),
+ ("file-size:102", [3]),
+ ("file-size:100,102", [1, 3]),
+ ],
+)
def test_filter_by_file_size(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -419,25 +478,29 @@ def test_filter_by_file_size(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('image-width:100', [1]),
- ('image-width:200', [2]),
- ('image-width:100,300', [1, 3]),
- ('image-height:200', [1]),
- ('image-height:100', [2]),
- ('image-height:200,300', [1, 3]),
- ('image-area:20000', [1, 2]),
- ('image-area:90000', [3]),
- ('image-area:20000,90000', [1, 2, 3]),
- ('image-ar:1', [3]),
- ('image-ar:..0.9', [1, 4]),
- ('image-ar:1.1..', [2]),
- ('image-ar:1/1..1/1', [3]),
- ('image-ar:1:1..1:1', [3]),
- ('image-ar:0.62..0.63', [4]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("image-width:100", [1]),
+ ("image-width:200", [2]),
+ ("image-width:100,300", [1, 3]),
+ ("image-height:200", [1]),
+ ("image-height:100", [2]),
+ ("image-height:200,300", [1, 3]),
+ ("image-area:20000", [1, 2]),
+ ("image-area:90000", [3]),
+ ("image-area:20000,90000", [1, 2, 3]),
+ ("image-ar:1", [3]),
+ ("image-ar:..0.9", [1, 4]),
+ ("image-ar:1.1..", [2]),
+ ("image-ar:1/1..1/1", [3]),
+ ("image-ar:1:1..1:1", [3]),
+ ("image-ar:0.62..0.63", [4]),
+ ],
+)
def test_filter_by_image_size(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -457,25 +520,29 @@ def test_filter_by_image_size(
def test_filter_by_invalid_aspect_ratio(executor):
with pytest.raises(errors.SearchError):
- executor.execute('image-ar:1:1:1', offset=0, limit=100)
+ executor.execute("image-ar:1:1:1", offset=0, limit=100)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('creation-date:2014', [1]),
- ('creation-date:2016', [3]),
- ('creation-date:2014,2016', [1, 3]),
- ('creation-time:2014', [1]),
- ('creation-time:2016', [3]),
- ('creation-time:2014,2016', [1, 3]),
- ('date:2014', [1]),
- ('date:2016', [3]),
- ('date:2014,2016', [1, 3]),
- ('time:2014', [1]),
- ('time:2016', [3]),
- ('time:2014,2016', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("creation-date:2014", [1]),
+ ("creation-date:2016", [3]),
+ ("creation-date:2014,2016", [1, 3]),
+ ("creation-time:2014", [1]),
+ ("creation-time:2016", [3]),
+ ("creation-time:2014,2016", [1, 3]),
+ ("date:2014", [1]),
+ ("date:2016", [3]),
+ ("date:2014,2016", [1, 3]),
+ ("time:2014", [1]),
+ ("time:2016", [3]),
+ ("time:2014,2016", [1, 3]),
+ ],
+)
def test_filter_by_creation_time(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -487,22 +554,26 @@ def test_filter_by_creation_time(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('last-edit-date:2014', [1]),
- ('last-edit-date:2016', [3]),
- ('last-edit-date:2014,2016', [1, 3]),
- ('last-edit-time:2014', [1]),
- ('last-edit-time:2016', [3]),
- ('last-edit-time:2014,2016', [1, 3]),
- ('edit-date:2014', [1]),
- ('edit-date:2016', [3]),
- ('edit-date:2014,2016', [1, 3]),
- ('edit-time:2014', [1]),
- ('edit-time:2016', [3]),
- ('edit-time:2014,2016', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("last-edit-date:2014", [1]),
+ ("last-edit-date:2016", [3]),
+ ("last-edit-date:2014,2016", [1, 3]),
+ ("last-edit-time:2014", [1]),
+ ("last-edit-time:2016", [3]),
+ ("last-edit-time:2014,2016", [1, 3]),
+ ("edit-date:2014", [1]),
+ ("edit-date:2016", [3]),
+ ("edit-date:2014,2016", [1, 3]),
+ ("edit-time:2014", [1]),
+ ("edit-time:2016", [3]),
+ ("edit-time:2014,2016", [1, 3]),
+ ],
+)
def test_filter_by_last_edit_time(
- verify_unpaged, post_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -514,20 +585,20 @@ def test_filter_by_last_edit_time(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('comment-date:2014', [1]),
- ('comment-date:2016', [3]),
- ('comment-date:2014,2016', [1, 3]),
- ('comment-time:2014', [1]),
- ('comment-time:2016', [3]),
- ('comment-time:2014,2016', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("comment-date:2014", [1]),
+ ("comment-date:2016", [3]),
+ ("comment-date:2014,2016", [1, 3]),
+ ("comment-time:2014", [1]),
+ ("comment-time:2016", [3]),
+ ("comment-time:2014,2016", [1, 3]),
+ ],
+)
def test_filter_by_comment_date(
- verify_unpaged,
- post_factory,
- comment_factory,
- input,
- expected_post_ids):
+ verify_unpaged, post_factory, comment_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -542,16 +613,20 @@ def test_filter_by_comment_date(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('fav-date:2014', [1]),
- ('fav-date:2016', [3]),
- ('fav-date:2014,2016', [1, 3]),
- ('fav-time:2014', [1]),
- ('fav-time:2016', [3]),
- ('fav-time:2014,2016', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("fav-date:2014", [1]),
+ ("fav-date:2016", [3]),
+ ("fav-date:2014,2016", [1, 3]),
+ ("fav-time:2014", [1]),
+ ("fav-time:2016", [3]),
+ ("fav-time:2014,2016", [1, 3]),
+ ],
+)
def test_filter_by_fav_date(
- verify_unpaged, post_factory, fav_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, fav_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -566,20 +641,20 @@ def test_filter_by_fav_date(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('feature-date:2014', [1]),
- ('feature-date:2016', [3]),
- ('feature-date:2014,2016', [1, 3]),
- ('feature-time:2014', [1]),
- ('feature-time:2016', [3]),
- ('feature-time:2014,2016', [1, 3]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("feature-date:2014", [1]),
+ ("feature-date:2016", [3]),
+ ("feature-date:2014,2016", [1, 3]),
+ ("feature-time:2014", [1]),
+ ("feature-time:2016", [3]),
+ ("feature-time:2014,2016", [1, 3]),
+ ],
+)
def test_filter_by_feature_date(
- verify_unpaged,
- post_factory,
- feature_factory,
- input,
- expected_post_ids):
+ verify_unpaged, post_factory, feature_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
@@ -594,37 +669,40 @@ def test_filter_by_feature_date(
verify_unpaged(input, expected_post_ids)
-@pytest.mark.parametrize('input', [
- 'sort:random',
- 'sort:id',
- 'sort:score',
- 'sort:tag-count',
- 'sort:comment-count',
- 'sort:fav-count',
- 'sort:note-count',
- 'sort:feature-count',
- 'sort:file-size',
- 'sort:image-width',
- 'sort:width',
- 'sort:image-height',
- 'sort:height',
- 'sort:image-area',
- 'sort:area',
- 'sort:creation-date',
- 'sort:creation-time',
- 'sort:date',
- 'sort:time',
- 'sort:last-edit-date',
- 'sort:last-edit-time',
- 'sort:edit-date',
- 'sort:edit-time',
- 'sort:comment-date',
- 'sort:comment-time',
- 'sort:fav-date',
- 'sort:fav-time',
- 'sort:feature-date',
- 'sort:feature-time',
-])
+@pytest.mark.parametrize(
+ "input",
+ [
+ "sort:random",
+ "sort:id",
+ "sort:score",
+ "sort:tag-count",
+ "sort:comment-count",
+ "sort:fav-count",
+ "sort:note-count",
+ "sort:feature-count",
+ "sort:file-size",
+ "sort:image-width",
+ "sort:width",
+ "sort:image-height",
+ "sort:height",
+ "sort:image-area",
+ "sort:area",
+ "sort:creation-date",
+ "sort:creation-time",
+ "sort:date",
+ "sort:time",
+ "sort:last-edit-date",
+ "sort:last-edit-time",
+ "sort:edit-date",
+ "sort:edit-time",
+ "sort:comment-date",
+ "sort:comment-time",
+ "sort:fav-date",
+ "sort:fav-time",
+ "sort:feature-date",
+ "sort:feature-time",
+ ],
+)
def test_sort_tokens(verify_unpaged, post_factory, input):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
@@ -634,115 +712,124 @@ def test_sort_tokens(verify_unpaged, post_factory, input):
verify_unpaged(input, [1, 2, 3])
-@pytest.mark.parametrize('input,expected_post_ids', [
- ('', [1, 2, 3, 4]),
- ('t1', [1]),
- ('t2', [2]),
- ('t1,t2', [1, 2]),
- ('t4a', [4]),
- ('t4b', [4]),
-])
+@pytest.mark.parametrize(
+ "input,expected_post_ids",
+ [
+ ("", [1, 2, 3, 4]),
+ ("t1", [1]),
+ ("t2", [2]),
+ ("t1,t2", [1, 2]),
+ ("t4a", [4]),
+ ("t4b", [4]),
+ ],
+)
def test_anonymous(
- verify_unpaged, post_factory, tag_factory, input, expected_post_ids):
+ verify_unpaged, post_factory, tag_factory, input, expected_post_ids
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
post4 = post_factory(id=4)
- post1.tags = [tag_factory(names=['t1'])]
- post2.tags = [tag_factory(names=['t2'])]
- post3.tags = [tag_factory(names=['t3'])]
- post4.tags = [tag_factory(names=['t4a', 't4b'])]
+ post1.tags = [tag_factory(names=["t1"])]
+ post2.tags = [tag_factory(names=["t2"])]
+ post3.tags = [tag_factory(names=["t3"])]
+ post4.tags = [tag_factory(names=["t4a", "t4b"])]
db.session.add_all([post1, post2, post3, post4])
db.session.flush()
verify_unpaged(input, expected_post_ids)
def test_own_liked(
- auth_executor,
- post_factory,
- score_factory,
- user_factory,
- verify_unpaged):
+ auth_executor, post_factory, score_factory, user_factory, verify_unpaged
+):
auth_user = auth_executor()
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- db.session.add_all([
- score_factory(post=post1, user=auth_user, score=1),
- score_factory(post=post2, user=user_factory(name='dummy'), score=1),
- score_factory(post=post3, user=auth_user, score=-1),
- post1, post2, post3,
- ])
+ db.session.add_all(
+ [
+ score_factory(post=post1, user=auth_user, score=1),
+ score_factory(
+ post=post2, user=user_factory(name="dummy"), score=1
+ ),
+ score_factory(post=post3, user=auth_user, score=-1),
+ post1,
+ post2,
+ post3,
+ ]
+ )
db.session.flush()
- verify_unpaged('special:liked', [1])
- verify_unpaged('-special:liked', [2, 3])
+ verify_unpaged("special:liked", [1])
+ verify_unpaged("-special:liked", [2, 3])
def test_own_disliked(
- auth_executor,
- post_factory,
- score_factory,
- user_factory,
- verify_unpaged):
+ auth_executor, post_factory, score_factory, user_factory, verify_unpaged
+):
auth_user = auth_executor()
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
- db.session.add_all([
- score_factory(post=post1, user=auth_user, score=-1),
- score_factory(post=post2, user=user_factory(name='dummy'), score=-1),
- score_factory(post=post3, user=auth_user, score=1),
- post1, post2, post3,
- ])
+ db.session.add_all(
+ [
+ score_factory(post=post1, user=auth_user, score=-1),
+ score_factory(
+ post=post2, user=user_factory(name="dummy"), score=-1
+ ),
+ score_factory(post=post3, user=auth_user, score=1),
+ post1,
+ post2,
+ post3,
+ ]
+ )
db.session.flush()
- verify_unpaged('special:disliked', [1])
- verify_unpaged('-special:disliked', [2, 3])
+ verify_unpaged("special:disliked", [1])
+ verify_unpaged("-special:disliked", [2, 3])
-@pytest.mark.parametrize('input', [
- 'liked:x',
- 'disliked:x',
-])
+@pytest.mark.parametrize("input", ["liked:x", "disliked:x",])
def test_someones_score(executor, input):
with pytest.raises(errors.SearchError):
executor.execute(input, offset=0, limit=100)
def test_own_fav(
- auth_executor,
- post_factory,
- fav_factory,
- user_factory,
- verify_unpaged):
+ auth_executor, post_factory, fav_factory, user_factory, verify_unpaged
+):
auth_user = auth_executor()
post1 = post_factory(id=1)
post2 = post_factory(id=2)
- db.session.add_all([
- fav_factory(post=post1, user=auth_user),
- fav_factory(post=post2, user=user_factory(name='unrelated')),
- post1, post2,
- ])
+ db.session.add_all(
+ [
+ fav_factory(post=post1, user=auth_user),
+ fav_factory(post=post2, user=user_factory(name="unrelated")),
+ post1,
+ post2,
+ ]
+ )
db.session.flush()
- verify_unpaged('special:fav', [1])
- verify_unpaged('-special:fav', [2])
+ verify_unpaged("special:fav", [1])
+ verify_unpaged("-special:fav", [2])
def test_tumbleweed(
- post_factory,
- fav_factory,
- comment_factory,
- score_factory,
- verify_unpaged):
+ post_factory, fav_factory, comment_factory, score_factory, verify_unpaged
+):
post1 = post_factory(id=1)
post2 = post_factory(id=2)
post3 = post_factory(id=3)
post4 = post_factory(id=4)
- db.session.add_all([
- comment_factory(post=post1),
- score_factory(post=post2),
- fav_factory(post=post3),
- post1, post2, post3, post4,
- ])
+ db.session.add_all(
+ [
+ comment_factory(post=post1),
+ score_factory(post=post2),
+ fav_factory(post=post3),
+ post1,
+ post2,
+ post3,
+ post4,
+ ]
+ )
db.session.flush()
- verify_unpaged('special:tumbleweed', [4])
- verify_unpaged('-special:tumbleweed', [1, 2, 3])
+ verify_unpaged("special:tumbleweed", [4])
+ verify_unpaged("-special:tumbleweed", [1, 2, 3])
diff --git a/server/szurubooru/tests/search/configs/test_tag_search_config.py b/server/szurubooru/tests/search/configs/test_tag_search_config.py
index 09a4c403..2ba1b9bc 100644
--- a/server/szurubooru/tests/search/configs/test_tag_search_config.py
+++ b/server/szurubooru/tests/search/configs/test_tag_search_config.py
@@ -1,5 +1,7 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, errors, search
@@ -12,77 +14,104 @@ def executor():
def verify_unpaged(executor):
def verify(input, expected_tag_names):
actual_count, actual_tags = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_tag_names = [u.names[0].name for u in actual_tags]
assert actual_count == len(expected_tag_names)
assert actual_tag_names == expected_tag_names
+
return verify
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('', ['t1', 't2']),
- ('t1', ['t1']),
- ('t2', ['t2']),
- ('t1,t2', ['t1', 't2']),
- ('T1,T2', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("", ["t1", "t2"]),
+ ("t1", ["t1"]),
+ ("t2", ["t2"]),
+ ("t1,t2", ["t1", "t2"]),
+ ("T1,T2", ["t1", "t2"]),
+ ],
+)
def test_filter_anonymous(
- verify_unpaged, tag_factory, input, expected_tag_names):
- db.session.add(tag_factory(names=['t1']))
- db.session.add(tag_factory(names=['t2']))
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ db.session.add(tag_factory(names=["t1"]))
+ db.session.add(tag_factory(names=["t2"]))
db.session.flush()
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('db_driver,input,expected_tag_names', [
- (None, ',', None),
- (None, 't1,', None),
- (None, 't1,t2', ['t1', 't2']),
- (None, 't1\\,', []),
- (None, 'asd..asd', None),
- (None, 'asd\\..asd', []),
- (None, 'asd.\\.asd', []),
- (None, 'asd\\.\\.asd', []),
- (None, '-', None),
- (None, '\\-', ['-']),
- (None, '--', [
- 't1', 't2', '*', '*asd*', ':', 'asd:asd', '\\', '\\asd', '-asd',
- ]),
- (None, '\\--', []),
- (None, '-\\-', [
- 't1', 't2', '*', '*asd*', ':', 'asd:asd', '\\', '\\asd', '-asd',
- ]),
- (None, '-*', []),
- (None, '\\-*', ['-', '-asd']),
- (None, ':', None),
- (None, '\\:', [':']),
- (None, '\\:asd', []),
- (None, '*\\:*', [':', 'asd:asd']),
- (None, 'asd:asd', None),
- (None, 'asd\\:asd', ['asd:asd']),
- (None, '*', [
- 't1', 't2', '*', '*asd*', ':', 'asd:asd', '\\', '\\asd', '-', '-asd'
- ]),
- (None, '\\*', ['*']),
- (None, '\\', None),
- (None, '\\asd', None),
- ('psycopg2', '\\\\', ['\\']),
- ('psycopg2', '\\\\asd', ['\\asd']),
-])
-def test_escaping(
- executor, tag_factory, input, expected_tag_names, db_driver):
- db.session.add_all([
- tag_factory(names=['t1']),
- tag_factory(names=['t2']),
- tag_factory(names=['*']),
- tag_factory(names=['*asd*']),
- tag_factory(names=[':']),
- tag_factory(names=['asd:asd']),
- tag_factory(names=['\\']),
- tag_factory(names=['\\asd']),
- tag_factory(names=['-']),
- tag_factory(names=['-asd'])
- ])
+@pytest.mark.parametrize(
+ "db_driver,input,expected_tag_names",
+ [
+ (None, ",", None),
+ (None, "t1,", None),
+ (None, "t1,t2", ["t1", "t2"]),
+ (None, "t1\\,", []),
+ (None, "asd..asd", None),
+ (None, "asd\\..asd", []),
+ (None, "asd.\\.asd", []),
+ (None, "asd\\.\\.asd", []),
+ (None, "-", None),
+ (None, "\\-", ["-"]),
+ (
+ None,
+ "--",
+ ["t1", "t2", "*", "*asd*", ":", "asd:asd", "\\", "\\asd", "-asd",],
+ ),
+ (None, "\\--", []),
+ (
+ None,
+ "-\\-",
+ ["t1", "t2", "*", "*asd*", ":", "asd:asd", "\\", "\\asd", "-asd",],
+ ),
+ (None, "-*", []),
+ (None, "\\-*", ["-", "-asd"]),
+ (None, ":", None),
+ (None, "\\:", [":"]),
+ (None, "\\:asd", []),
+ (None, "*\\:*", [":", "asd:asd"]),
+ (None, "asd:asd", None),
+ (None, "asd\\:asd", ["asd:asd"]),
+ (
+ None,
+ "*",
+ [
+ "t1",
+ "t2",
+ "*",
+ "*asd*",
+ ":",
+ "asd:asd",
+ "\\",
+ "\\asd",
+ "-",
+ "-asd",
+ ],
+ ),
+ (None, "\\*", ["*"]),
+ (None, "\\", None),
+ (None, "\\asd", None),
+ ("psycopg2", "\\\\", ["\\"]),
+ ("psycopg2", "\\\\asd", ["\\asd"]),
+ ],
+)
+def test_escaping(executor, tag_factory, input, expected_tag_names, db_driver):
+ db.session.add_all(
+ [
+ tag_factory(names=["t1"]),
+ tag_factory(names=["t2"]),
+ tag_factory(names=["*"]),
+ tag_factory(names=["*asd*"]),
+ tag_factory(names=[":"]),
+ tag_factory(names=["asd:asd"]),
+ tag_factory(names=["\\"]),
+ tag_factory(names=["\\asd"]),
+ tag_factory(names=["-"]),
+ tag_factory(names=["-asd"]),
+ ]
+ )
db.session.flush()
if db_driver and db.session.get_bind().driver != db_driver:
@@ -92,99 +121,112 @@ def test_escaping(
executor.execute(input, offset=0, limit=100)
else:
actual_count, actual_tags = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_tag_names = [u.names[0].name for u in actual_tags]
assert actual_count == len(expected_tag_names)
assert sorted(actual_tag_names) == sorted(expected_tag_names)
def test_filter_anonymous_starting_with_colon(verify_unpaged, tag_factory):
- db.session.add(tag_factory(names=[':t']))
+ db.session.add(tag_factory(names=[":t"]))
db.session.flush()
with pytest.raises(errors.SearchError):
- verify_unpaged(':t', [':t'])
- verify_unpaged('\\:t', [':t'])
+ verify_unpaged(":t", [":t"])
+ verify_unpaged("\\:t", [":t"])
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('name:tag1', ['tag1']),
- ('name:tag2', ['tag2']),
- ('name:none', []),
- ('name:', []),
- ('name:*1', ['tag1']),
- ('name:*2', ['tag2']),
- ('name:*', ['tag1', 'tag2', 'tag3', 'tag4']),
- ('name:t*', ['tag1', 'tag2', 'tag3', 'tag4']),
- ('name:*a*', ['tag1', 'tag2', 'tag3', 'tag4']),
- ('name:*!*', []),
- ('name:!*', []),
- ('name:*!', []),
- ('-name:tag1', ['tag2', 'tag3', 'tag4']),
- ('-name:tag2', ['tag1', 'tag3', 'tag4']),
- ('name:tag1,tag2', ['tag1', 'tag2']),
- ('-name:tag1,tag3', ['tag2', 'tag4']),
- ('name:tag4', ['tag4']),
- ('name:tag5', ['tag4']),
- ('name:tag4,tag5', ['tag4']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("name:tag1", ["tag1"]),
+ ("name:tag2", ["tag2"]),
+ ("name:none", []),
+ ("name:", []),
+ ("name:*1", ["tag1"]),
+ ("name:*2", ["tag2"]),
+ ("name:*", ["tag1", "tag2", "tag3", "tag4"]),
+ ("name:t*", ["tag1", "tag2", "tag3", "tag4"]),
+ ("name:*a*", ["tag1", "tag2", "tag3", "tag4"]),
+ ("name:*!*", []),
+ ("name:!*", []),
+ ("name:*!", []),
+ ("-name:tag1", ["tag2", "tag3", "tag4"]),
+ ("-name:tag2", ["tag1", "tag3", "tag4"]),
+ ("name:tag1,tag2", ["tag1", "tag2"]),
+ ("-name:tag1,tag3", ["tag2", "tag4"]),
+ ("name:tag4", ["tag4"]),
+ ("name:tag5", ["tag4"]),
+ ("name:tag4,tag5", ["tag4"]),
+ ],
+)
def test_filter_by_name(
- verify_unpaged, tag_factory, input, expected_tag_names):
- db.session.add(tag_factory(names=['tag1']))
- db.session.add(tag_factory(names=['tag2']))
- db.session.add(tag_factory(names=['tag3']))
- db.session.add(tag_factory(names=['tag4', 'tag5', 'tag6']))
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ db.session.add(tag_factory(names=["tag1"]))
+ db.session.add(tag_factory(names=["tag2"]))
+ db.session.add(tag_factory(names=["tag3"]))
+ db.session.add(tag_factory(names=["tag4", "tag5", "tag6"]))
db.session.flush()
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('category:cat1', ['t1', 't2']),
- ('category:cat2', ['t3']),
- ('category:cat1,cat2', ['t1', 't2', 't3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("category:cat1", ["t1", "t2"]),
+ ("category:cat2", ["t3"]),
+ ("category:cat1,cat2", ["t1", "t2", "t3"]),
+ ],
+)
def test_filter_by_category(
- verify_unpaged,
- tag_factory,
- tag_category_factory,
- input,
- expected_tag_names):
- cat1 = tag_category_factory(name='cat1')
- cat2 = tag_category_factory(name='cat2')
- tag1 = tag_factory(names=['t1'], category=cat1)
- tag2 = tag_factory(names=['t2'], category=cat1)
- tag3 = tag_factory(names=['t3'], category=cat2)
+ verify_unpaged,
+ tag_factory,
+ tag_category_factory,
+ input,
+ expected_tag_names,
+):
+ cat1 = tag_category_factory(name="cat1")
+ cat2 = tag_category_factory(name="cat2")
+ tag1 = tag_factory(names=["t1"], category=cat1)
+ tag2 = tag_factory(names=["t2"], category=cat1)
+ tag3 = tag_factory(names=["t3"], category=cat2)
db.session.add_all([tag1, tag2, tag3])
db.session.flush()
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('creation-time:2014', ['t1', 't2']),
- ('creation-date:2014', ['t1', 't2']),
- ('-creation-time:2014', ['t3']),
- ('-creation-date:2014', ['t3']),
- ('creation-time:2014..2014-06', ['t1', 't2']),
- ('creation-time:2014-06..2015-01-01', ['t2', 't3']),
- ('creation-time:2014-06..', ['t2', 't3']),
- ('creation-time:..2014-06', ['t1', 't2']),
- ('-creation-time:2014..2014-06', ['t3']),
- ('-creation-time:2014-06..2015-01-01', ['t1']),
- ('creation-date:2014..2014-06', ['t1', 't2']),
- ('creation-date:2014-06..2015-01-01', ['t2', 't3']),
- ('creation-date:2014-06..', ['t2', 't3']),
- ('creation-date:..2014-06', ['t1', 't2']),
- ('-creation-date:2014..2014-06', ['t3']),
- ('-creation-date:2014-06..2015-01-01', ['t1']),
- ('creation-time:2014-01,2015', ['t1', 't3']),
- ('creation-date:2014-01,2015', ['t1', 't3']),
- ('-creation-time:2014-01,2015', ['t2']),
- ('-creation-date:2014-01,2015', ['t2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("creation-time:2014", ["t1", "t2"]),
+ ("creation-date:2014", ["t1", "t2"]),
+ ("-creation-time:2014", ["t3"]),
+ ("-creation-date:2014", ["t3"]),
+ ("creation-time:2014..2014-06", ["t1", "t2"]),
+ ("creation-time:2014-06..2015-01-01", ["t2", "t3"]),
+ ("creation-time:2014-06..", ["t2", "t3"]),
+ ("creation-time:..2014-06", ["t1", "t2"]),
+ ("-creation-time:2014..2014-06", ["t3"]),
+ ("-creation-time:2014-06..2015-01-01", ["t1"]),
+ ("creation-date:2014..2014-06", ["t1", "t2"]),
+ ("creation-date:2014-06..2015-01-01", ["t2", "t3"]),
+ ("creation-date:2014-06..", ["t2", "t3"]),
+ ("creation-date:..2014-06", ["t1", "t2"]),
+ ("-creation-date:2014..2014-06", ["t3"]),
+ ("-creation-date:2014-06..2015-01-01", ["t1"]),
+ ("creation-time:2014-01,2015", ["t1", "t3"]),
+ ("creation-date:2014-01,2015", ["t1", "t3"]),
+ ("-creation-time:2014-01,2015", ["t2"]),
+ ("-creation-date:2014-01,2015", ["t2"]),
+ ],
+)
def test_filter_by_creation_time(
- verify_unpaged, tag_factory, input, expected_tag_names):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
- tag3 = tag_factory(names=['t3'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
+ tag3 = tag_factory(names=["t3"])
tag1.creation_time = datetime(2014, 1, 1)
tag2.creation_time = datetime(2014, 6, 1)
tag3.creation_time = datetime(2015, 1, 1)
@@ -193,17 +235,21 @@ def test_filter_by_creation_time(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('last-edit-date:2014', ['t1', 't3']),
- ('last-edit-time:2014', ['t1', 't3']),
- ('edit-date:2014', ['t1', 't3']),
- ('edit-time:2014', ['t1', 't3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("last-edit-date:2014", ["t1", "t3"]),
+ ("last-edit-time:2014", ["t1", "t3"]),
+ ("edit-date:2014", ["t1", "t3"]),
+ ("edit-time:2014", ["t1", "t3"]),
+ ],
+)
def test_filter_by_edit_time(
- verify_unpaged, tag_factory, input, expected_tag_names):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
- tag3 = tag_factory(names=['t3'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
+ tag3 = tag_factory(names=["t3"])
tag1.last_edit_time = datetime(2014, 1, 1)
tag2.last_edit_time = datetime(2015, 1, 1)
tag3.last_edit_time = datetime(2014, 1, 1)
@@ -212,24 +258,28 @@ def test_filter_by_edit_time(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('post-count:2', ['t1']),
- ('post-count:1', ['t2']),
- ('post-count:1..', ['t1', 't2']),
- ('post-count-min:1', ['t1', 't2']),
- ('post-count:..1', ['t2']),
- ('post-count-max:1', ['t2']),
- ('usage-count:2', ['t1']),
- ('usage-count:1', ['t2']),
- ('usages:2', ['t1']),
- ('usages:1', ['t2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("post-count:2", ["t1"]),
+ ("post-count:1", ["t2"]),
+ ("post-count:1..", ["t1", "t2"]),
+ ("post-count-min:1", ["t1", "t2"]),
+ ("post-count:..1", ["t2"]),
+ ("post-count-max:1", ["t2"]),
+ ("usage-count:2", ["t1"]),
+ ("usage-count:1", ["t2"]),
+ ("usages:2", ["t1"]),
+ ("usages:1", ["t2"]),
+ ],
+)
def test_filter_by_post_count(
- verify_unpaged, tag_factory, post_factory, input, expected_tag_names):
+ verify_unpaged, tag_factory, post_factory, input, expected_tag_names
+):
post1 = post_factory()
post2 = post_factory()
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([post1, post2, tag1, tag2])
post1.tags.append(tag1)
post1.tags.append(tag2)
@@ -238,31 +288,38 @@ def test_filter_by_post_count(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input', [
- 'post-count:..',
- 'post-count:asd',
- 'post-count:asd,1',
- 'post-count:1,asd',
- 'post-count:asd..1',
- 'post-count:1..asd',
-])
+@pytest.mark.parametrize(
+ "input",
+ [
+ "post-count:..",
+ "post-count:asd",
+ "post-count:asd,1",
+ "post-count:1,asd",
+ "post-count:asd..1",
+ "post-count:1..asd",
+ ],
+)
def test_filter_by_invalid_input(executor, input):
with pytest.raises(errors.SearchError):
executor.execute(input, offset=0, limit=100)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('suggestion-count:2', ['t1']),
- ('suggestion-count:1', ['t2']),
- ('suggestion-count:0', ['sug1', 'sug2', 'sug3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("suggestion-count:2", ["t1"]),
+ ("suggestion-count:1", ["t2"]),
+ ("suggestion-count:0", ["sug1", "sug2", "sug3"]),
+ ],
+)
def test_filter_by_suggestion_count(
- verify_unpaged, tag_factory, input, expected_tag_names):
- sug1 = tag_factory(names=['sug1'])
- sug2 = tag_factory(names=['sug2'])
- sug3 = tag_factory(names=['sug3'])
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ sug1 = tag_factory(names=["sug1"])
+ sug2 = tag_factory(names=["sug2"])
+ sug3 = tag_factory(names=["sug3"])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([sug1, sug3, tag2, sug2, tag1])
tag1.suggestions.append(sug1)
tag1.suggestions.append(sug2)
@@ -271,18 +328,22 @@ def test_filter_by_suggestion_count(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('implication-count:2', ['t1']),
- ('implication-count:1', ['t2']),
- ('implication-count:0', ['sug1', 'sug2', 'sug3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("implication-count:2", ["t1"]),
+ ("implication-count:1", ["t2"]),
+ ("implication-count:0", ["sug1", "sug2", "sug3"]),
+ ],
+)
def test_filter_by_implication_count(
- verify_unpaged, tag_factory, input, expected_tag_names):
- sug1 = tag_factory(names=['sug1'])
- sug2 = tag_factory(names=['sug2'])
- sug3 = tag_factory(names=['sug3'])
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ sug1 = tag_factory(names=["sug1"])
+ sug2 = tag_factory(names=["sug2"])
+ sug3 = tag_factory(names=["sug3"])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([sug1, sug3, tag2, sug2, tag1])
tag1.implications.append(sug1)
tag1.implications.append(sug2)
@@ -291,32 +352,39 @@ def test_filter_by_implication_count(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('', ['t1', 't2']),
- ('sort:name', ['t1', 't2']),
- ('-sort:name', ['t2', 't1']),
- ('sort:name,asc', ['t1', 't2']),
- ('sort:name,desc', ['t2', 't1']),
- ('-sort:name,asc', ['t2', 't1']),
- ('-sort:name,desc', ['t1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("", ["t1", "t2"]),
+ ("sort:name", ["t1", "t2"]),
+ ("-sort:name", ["t2", "t1"]),
+ ("sort:name,asc", ["t1", "t2"]),
+ ("sort:name,desc", ["t2", "t1"]),
+ ("-sort:name,asc", ["t2", "t1"]),
+ ("-sort:name,desc", ["t1", "t2"]),
+ ],
+)
def test_sort_by_name(verify_unpaged, tag_factory, input, expected_tag_names):
- db.session.add(tag_factory(names=['t2']))
- db.session.add(tag_factory(names=['t1']))
+ db.session.add(tag_factory(names=["t2"]))
+ db.session.add(tag_factory(names=["t1"]))
db.session.flush()
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('', ['t1', 't2', 't3']),
- ('sort:creation-date', ['t3', 't2', 't1']),
- ('sort:creation-time', ['t3', 't2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("", ["t1", "t2", "t3"]),
+ ("sort:creation-date", ["t3", "t2", "t1"]),
+ ("sort:creation-time", ["t3", "t2", "t1"]),
+ ],
+)
def test_sort_by_creation_time(
- verify_unpaged, tag_factory, input, expected_tag_names):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
- tag3 = tag_factory(names=['t3'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
+ tag3 = tag_factory(names=["t3"])
tag1.creation_time = datetime(1991, 1, 1)
tag2.creation_time = datetime(1991, 1, 2)
tag3.creation_time = datetime(1991, 1, 3)
@@ -325,18 +393,22 @@ def test_sort_by_creation_time(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('', ['t1', 't2', 't3']),
- ('sort:last-edit-date', ['t3', 't2', 't1']),
- ('sort:last-edit-time', ['t3', 't2', 't1']),
- ('sort:edit-date', ['t3', 't2', 't1']),
- ('sort:edit-time', ['t3', 't2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("", ["t1", "t2", "t3"]),
+ ("sort:last-edit-date", ["t3", "t2", "t1"]),
+ ("sort:last-edit-time", ["t3", "t2", "t1"]),
+ ("sort:edit-date", ["t3", "t2", "t1"]),
+ ("sort:edit-time", ["t3", "t2", "t1"]),
+ ],
+)
def test_sort_by_last_edit_time(
- verify_unpaged, tag_factory, input, expected_tag_names):
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
- tag3 = tag_factory(names=['t3'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
+ tag3 = tag_factory(names=["t3"])
tag1.last_edit_time = datetime(1991, 1, 1)
tag2.last_edit_time = datetime(1991, 1, 2)
tag3.last_edit_time = datetime(1991, 1, 3)
@@ -345,17 +417,21 @@ def test_sort_by_last_edit_time(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('sort:post-count', ['t2', 't1']),
- ('sort:usage-count', ['t2', 't1']),
- ('sort:usages', ['t2', 't1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [
+ ("sort:post-count", ["t2", "t1"]),
+ ("sort:usage-count", ["t2", "t1"]),
+ ("sort:usages", ["t2", "t1"]),
+ ],
+)
def test_sort_by_post_count(
- verify_unpaged, tag_factory, post_factory, input, expected_tag_names):
+ verify_unpaged, tag_factory, post_factory, input, expected_tag_names
+):
post1 = post_factory()
post2 = post_factory()
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([post1, post2, tag1, tag2])
post1.tags.append(tag1)
post1.tags.append(tag2)
@@ -364,16 +440,18 @@ def test_sort_by_post_count(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('sort:suggestion-count', ['t1', 't2', 'sug1', 'sug2', 'sug3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [("sort:suggestion-count", ["t1", "t2", "sug1", "sug2", "sug3"]),],
+)
def test_sort_by_suggestion_count(
- verify_unpaged, tag_factory, input, expected_tag_names):
- sug1 = tag_factory(names=['sug1'])
- sug2 = tag_factory(names=['sug2'])
- sug3 = tag_factory(names=['sug3'])
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ sug1 = tag_factory(names=["sug1"])
+ sug2 = tag_factory(names=["sug2"])
+ sug3 = tag_factory(names=["sug3"])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([sug1, sug3, tag2, sug2, tag1])
tag1.suggestions.append(sug1)
tag1.suggestions.append(sug2)
@@ -382,16 +460,18 @@ def test_sort_by_suggestion_count(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('sort:implication-count', ['t1', 't2', 'sug1', 'sug2', 'sug3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names",
+ [("sort:implication-count", ["t1", "t2", "sug1", "sug2", "sug3"]),],
+)
def test_sort_by_implication_count(
- verify_unpaged, tag_factory, input, expected_tag_names):
- sug1 = tag_factory(names=['sug1'])
- sug2 = tag_factory(names=['sug2'])
- sug3 = tag_factory(names=['sug3'])
- tag1 = tag_factory(names=['t1'])
- tag2 = tag_factory(names=['t2'])
+ verify_unpaged, tag_factory, input, expected_tag_names
+):
+ sug1 = tag_factory(names=["sug1"])
+ sug2 = tag_factory(names=["sug2"])
+ sug3 = tag_factory(names=["sug3"])
+ tag1 = tag_factory(names=["t1"])
+ tag2 = tag_factory(names=["t2"])
db.session.add_all([sug1, sug3, tag2, sug2, tag1])
tag1.implications.append(sug1)
tag1.implications.append(sug2)
@@ -400,20 +480,21 @@ def test_sort_by_implication_count(
verify_unpaged(input, expected_tag_names)
-@pytest.mark.parametrize('input,expected_tag_names', [
- ('sort:category', ['t3', 't1', 't2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_tag_names", [("sort:category", ["t3", "t1", "t2"]),]
+)
def test_sort_by_category(
- verify_unpaged,
- tag_factory,
- tag_category_factory,
- input,
- expected_tag_names):
- cat1 = tag_category_factory(name='cat1')
- cat2 = tag_category_factory(name='cat2')
- tag1 = tag_factory(names=['t1'], category=cat2)
- tag2 = tag_factory(names=['t2'], category=cat2)
- tag3 = tag_factory(names=['t3'], category=cat1)
+ verify_unpaged,
+ tag_factory,
+ tag_category_factory,
+ input,
+ expected_tag_names,
+):
+ cat1 = tag_category_factory(name="cat1")
+ cat2 = tag_category_factory(name="cat2")
+ tag1 = tag_factory(names=["t1"], category=cat2)
+ tag2 = tag_factory(names=["t2"], category=cat2)
+ tag3 = tag_factory(names=["t3"], category=cat1)
db.session.add_all([tag1, tag2, tag3])
db.session.flush()
verify_unpaged(input, expected_tag_names)
diff --git a/server/szurubooru/tests/search/configs/test_user_search_config.py b/server/szurubooru/tests/search/configs/test_user_search_config.py
index e9cea5a6..485ab1c6 100644
--- a/server/szurubooru/tests/search/configs/test_user_search_config.py
+++ b/server/szurubooru/tests/search/configs/test_user_search_config.py
@@ -1,5 +1,7 @@
from datetime import datetime
+
import pytest
+
from szurubooru import db, errors, search
@@ -12,135 +14,48 @@ def executor():
def verify_unpaged(executor):
def verify(input, expected_user_names):
actual_count, actual_users = executor.execute(
- input, offset=0, limit=100)
+ input, offset=0, limit=100
+ )
actual_user_names = [u.name for u in actual_users]
assert actual_count == len(expected_user_names)
assert actual_user_names == expected_user_names
+
return verify
-@pytest.mark.parametrize('input,expected_user_names', [
- ('creation-time:2014', ['u1', 'u2']),
- ('creation-date:2014', ['u1', 'u2']),
- ('-creation-time:2014', ['u3']),
- ('-creation-date:2014', ['u3']),
- ('creation-time:2014..2014-06', ['u1', 'u2']),
- ('creation-time:2014-06..2015-01-01', ['u2', 'u3']),
- ('creation-time:2014-06..', ['u2', 'u3']),
- ('creation-time:..2014-06', ['u1', 'u2']),
- ('creation-time-min:2014-06', ['u2', 'u3']),
- ('creation-time-max:2014-06', ['u1', 'u2']),
- ('-creation-time:2014..2014-06', ['u3']),
- ('-creation-time:2014-06..2015-01-01', ['u1']),
- ('creation-date:2014..2014-06', ['u1', 'u2']),
- ('creation-date:2014-06..2015-01-01', ['u2', 'u3']),
- ('creation-date:2014-06..', ['u2', 'u3']),
- ('creation-date:..2014-06', ['u1', 'u2']),
- ('-creation-date:2014..2014-06', ['u3']),
- ('-creation-date:2014-06..2015-01-01', ['u1']),
- ('creation-time:2014-01,2015', ['u1', 'u3']),
- ('creation-date:2014-01,2015', ['u1', 'u3']),
- ('-creation-time:2014-01,2015', ['u2']),
- ('-creation-date:2014-01,2015', ['u2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("creation-time:2014", ["u1", "u2"]),
+ ("creation-date:2014", ["u1", "u2"]),
+ ("-creation-time:2014", ["u3"]),
+ ("-creation-date:2014", ["u3"]),
+ ("creation-time:2014..2014-06", ["u1", "u2"]),
+ ("creation-time:2014-06..2015-01-01", ["u2", "u3"]),
+ ("creation-time:2014-06..", ["u2", "u3"]),
+ ("creation-time:..2014-06", ["u1", "u2"]),
+ ("creation-time-min:2014-06", ["u2", "u3"]),
+ ("creation-time-max:2014-06", ["u1", "u2"]),
+ ("-creation-time:2014..2014-06", ["u3"]),
+ ("-creation-time:2014-06..2015-01-01", ["u1"]),
+ ("creation-date:2014..2014-06", ["u1", "u2"]),
+ ("creation-date:2014-06..2015-01-01", ["u2", "u3"]),
+ ("creation-date:2014-06..", ["u2", "u3"]),
+ ("creation-date:..2014-06", ["u1", "u2"]),
+ ("-creation-date:2014..2014-06", ["u3"]),
+ ("-creation-date:2014-06..2015-01-01", ["u1"]),
+ ("creation-time:2014-01,2015", ["u1", "u3"]),
+ ("creation-date:2014-01,2015", ["u1", "u3"]),
+ ("-creation-time:2014-01,2015", ["u2"]),
+ ("-creation-date:2014-01,2015", ["u2"]),
+ ],
+)
def test_filter_by_creation_time(
- verify_unpaged, input, expected_user_names, user_factory):
- user1 = user_factory(name='u1')
- user2 = user_factory(name='u2')
- user3 = user_factory(name='u3')
- user1.creation_time = datetime(2014, 1, 1)
- user2.creation_time = datetime(2014, 6, 1)
- user3.creation_time = datetime(2015, 1, 1)
- db.session.add_all([user1, user2, user3])
- db.session.flush()
- verify_unpaged(input, expected_user_names)
-
-
-@pytest.mark.parametrize('input,expected_user_names', [
- ('name:user1', ['user1']),
- ('name:user2', ['user2']),
- ('name:none', []),
- ('name:', []),
- ('name:*1', ['user1']),
- ('name:*2', ['user2']),
- ('name:*', ['user1', 'user2', 'user3']),
- ('name:u*', ['user1', 'user2', 'user3']),
- ('name:*ser*', ['user1', 'user2', 'user3']),
- ('name:*zer*', []),
- ('name:zer*', []),
- ('name:*zer', []),
- ('-name:user1', ['user2', 'user3']),
- ('-name:user2', ['user1', 'user3']),
- ('name:user1,user2', ['user1', 'user2']),
- ('-name:user1,user3', ['user2']),
-])
-def test_filter_by_name(
- verify_unpaged, input, expected_user_names, user_factory):
- db.session.add(user_factory(name='user1'))
- db.session.add(user_factory(name='user2'))
- db.session.add(user_factory(name='user3'))
- db.session.flush()
- verify_unpaged(input, expected_user_names)
-
-
-@pytest.mark.parametrize('input,expected_user_names', [
- ('name:u1', ['u1']),
- ('name:u2*', ['u2..']),
- ('name:u1,u3..x', ['u1', 'u3..x']),
- ('name:u2..', None),
- ('name:*..*', None),
- ('name:u3..x', None),
- ('name:*..x', None),
- ('name:u2\\..', ['u2..']),
- ('name:*\\..*', ['u2..', 'u3..x']),
- ('name:u3\\..x', ['u3..x']),
- ('name:*\\..x', ['u3..x']),
- ('name:u2.\\.', ['u2..']),
- ('name:*.\\.*', ['u2..', 'u3..x']),
- ('name:u3.\\.x', ['u3..x']),
- ('name:*.\\.x', ['u3..x']),
- ('name:u2\\.\\.', ['u2..']),
- ('name:*\\.\\.*', ['u2..', 'u3..x']),
- ('name:u3\\.\\.x', ['u3..x']),
- ('name:*\\.\\.x', ['u3..x']),
-])
-def test_filter_by_name_that_looks_like_range(
- verify_unpaged, input, expected_user_names, user_factory):
- db.session.add(user_factory(name='u1'))
- db.session.add(user_factory(name='u2..'))
- db.session.add(user_factory(name='u3..x'))
- db.session.flush()
- if not expected_user_names:
- with pytest.raises(errors.SearchError):
- verify_unpaged(input, expected_user_names)
- else:
- verify_unpaged(input, expected_user_names)
-
-
-@pytest.mark.parametrize('input,expected_user_names', [
- ('', ['u1', 'u2']),
- ('u1', ['u1']),
- ('u2', ['u2']),
- ('u1,u2', ['u1', 'u2']),
-])
-def test_anonymous(
- verify_unpaged, input, expected_user_names, user_factory):
- db.session.add(user_factory(name='u1'))
- db.session.add(user_factory(name='u2'))
- db.session.flush()
- verify_unpaged(input, expected_user_names)
-
-
-@pytest.mark.parametrize('input,expected_user_names', [
- ('creation-time:2014 u1', ['u1']),
- ('creation-time:2014 u2', ['u2']),
- ('creation-time:2016 u2', []),
-])
-def test_combining_tokens(
- verify_unpaged, input, expected_user_names, user_factory):
- user1 = user_factory(name='u1')
- user2 = user_factory(name='u2')
- user3 = user_factory(name='u3')
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ user1 = user_factory(name="u1")
+ user2 = user_factory(name="u2")
+ user3 = user_factory(name="u3")
user1.creation_time = datetime(2014, 1, 1)
user2.creation_time = datetime(2014, 6, 1)
user3.creation_time = datetime(2015, 1, 1)
@@ -150,61 +65,184 @@ def test_combining_tokens(
@pytest.mark.parametrize(
- 'offset,limit,expected_total_count,expected_user_names', [
- (0, 1, 2, ['u1']),
- (1, 1, 2, ['u2']),
+ "input,expected_user_names",
+ [
+ ("name:user1", ["user1"]),
+ ("name:user2", ["user2"]),
+ ("name:none", []),
+ ("name:", []),
+ ("name:*1", ["user1"]),
+ ("name:*2", ["user2"]),
+ ("name:*", ["user1", "user2", "user3"]),
+ ("name:u*", ["user1", "user2", "user3"]),
+ ("name:*ser*", ["user1", "user2", "user3"]),
+ ("name:*zer*", []),
+ ("name:zer*", []),
+ ("name:*zer", []),
+ ("-name:user1", ["user2", "user3"]),
+ ("-name:user2", ["user1", "user3"]),
+ ("name:user1,user2", ["user1", "user2"]),
+ ("-name:user1,user3", ["user2"]),
+ ],
+)
+def test_filter_by_name(
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ db.session.add(user_factory(name="user1"))
+ db.session.add(user_factory(name="user2"))
+ db.session.add(user_factory(name="user3"))
+ db.session.flush()
+ verify_unpaged(input, expected_user_names)
+
+
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("name:u1", ["u1"]),
+ ("name:u2*", ["u2.."]),
+ ("name:u1,u3..x", ["u1", "u3..x"]),
+ ("name:u2..", None),
+ ("name:*..*", None),
+ ("name:u3..x", None),
+ ("name:*..x", None),
+ ("name:u2\\..", ["u2.."]),
+ ("name:*\\..*", ["u2..", "u3..x"]),
+ ("name:u3\\..x", ["u3..x"]),
+ ("name:*\\..x", ["u3..x"]),
+ ("name:u2.\\.", ["u2.."]),
+ ("name:*.\\.*", ["u2..", "u3..x"]),
+ ("name:u3.\\.x", ["u3..x"]),
+ ("name:*.\\.x", ["u3..x"]),
+ ("name:u2\\.\\.", ["u2.."]),
+ ("name:*\\.\\.*", ["u2..", "u3..x"]),
+ ("name:u3\\.\\.x", ["u3..x"]),
+ ("name:*\\.\\.x", ["u3..x"]),
+ ],
+)
+def test_filter_by_name_that_looks_like_range(
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ db.session.add(user_factory(name="u1"))
+ db.session.add(user_factory(name="u2.."))
+ db.session.add(user_factory(name="u3..x"))
+ db.session.flush()
+ if not expected_user_names:
+ with pytest.raises(errors.SearchError):
+ verify_unpaged(input, expected_user_names)
+ else:
+ verify_unpaged(input, expected_user_names)
+
+
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("", ["u1", "u2"]),
+ ("u1", ["u1"]),
+ ("u2", ["u2"]),
+ ("u1,u2", ["u1", "u2"]),
+ ],
+)
+def test_anonymous(verify_unpaged, input, expected_user_names, user_factory):
+ db.session.add(user_factory(name="u1"))
+ db.session.add(user_factory(name="u2"))
+ db.session.flush()
+ verify_unpaged(input, expected_user_names)
+
+
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("creation-time:2014 u1", ["u1"]),
+ ("creation-time:2014 u2", ["u2"]),
+ ("creation-time:2016 u2", []),
+ ],
+)
+def test_combining_tokens(
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ user1 = user_factory(name="u1")
+ user2 = user_factory(name="u2")
+ user3 = user_factory(name="u3")
+ user1.creation_time = datetime(2014, 1, 1)
+ user2.creation_time = datetime(2014, 6, 1)
+ user3.creation_time = datetime(2015, 1, 1)
+ db.session.add_all([user1, user2, user3])
+ db.session.flush()
+ verify_unpaged(input, expected_user_names)
+
+
+@pytest.mark.parametrize(
+ "offset,limit,expected_total_count,expected_user_names",
+ [
+ (0, 1, 2, ["u1"]),
+ (1, 1, 2, ["u2"]),
(2, 1, 2, []),
(-1, 1, 2, []),
- (-1, 2, 2, ['u1']),
- (0, 2, 2, ['u1', 'u2']),
+ (-1, 2, 2, ["u1"]),
+ (0, 2, 2, ["u1", "u2"]),
(3, 1, 2, []),
(0, 0, 2, []),
- ])
+ ],
+)
def test_paging(
- executor, user_factory, offset, limit,
- expected_total_count, expected_user_names):
- db.session.add(user_factory(name='u1'))
- db.session.add(user_factory(name='u2'))
+ executor,
+ user_factory,
+ offset,
+ limit,
+ expected_total_count,
+ expected_user_names,
+):
+ db.session.add(user_factory(name="u1"))
+ db.session.add(user_factory(name="u2"))
db.session.flush()
actual_count, actual_users = executor.execute(
- '', offset=offset, limit=limit)
+ "", offset=offset, limit=limit
+ )
actual_user_names = [u.name for u in actual_users]
assert actual_count == expected_total_count
assert actual_user_names == expected_user_names
-@pytest.mark.parametrize('input,expected_user_names', [
- ('', ['u1', 'u2']),
- ('sort:name', ['u1', 'u2']),
- ('-sort:name', ['u2', 'u1']),
- ('sort:name,asc', ['u1', 'u2']),
- ('sort:name,desc', ['u2', 'u1']),
- ('-sort:name,asc', ['u2', 'u1']),
- ('-sort:name,desc', ['u1', 'u2']),
-])
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("", ["u1", "u2"]),
+ ("sort:name", ["u1", "u2"]),
+ ("-sort:name", ["u2", "u1"]),
+ ("sort:name,asc", ["u1", "u2"]),
+ ("sort:name,desc", ["u2", "u1"]),
+ ("-sort:name,asc", ["u2", "u1"]),
+ ("-sort:name,desc", ["u1", "u2"]),
+ ],
+)
def test_sort_by_name(
- verify_unpaged, input, expected_user_names, user_factory):
- db.session.add(user_factory(name='u2'))
- db.session.add(user_factory(name='u1'))
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ db.session.add(user_factory(name="u2"))
+ db.session.add(user_factory(name="u1"))
db.session.flush()
verify_unpaged(input, expected_user_names)
-@pytest.mark.parametrize('input,expected_user_names', [
- ('', ['u1', 'u2', 'u3']),
- ('sort:creation-date', ['u3', 'u2', 'u1']),
- ('sort:creation-time', ['u3', 'u2', 'u1']),
- ('-sort:creation-date', ['u1', 'u2', 'u3']),
- ('sort:creation-date,asc', ['u1', 'u2', 'u3']),
- ('sort:creation-date,desc', ['u3', 'u2', 'u1']),
- ('-sort:creation-date,asc', ['u3', 'u2', 'u1']),
- ('-sort:creation-date,desc', ['u1', 'u2', 'u3']),
-])
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("", ["u1", "u2", "u3"]),
+ ("sort:creation-date", ["u3", "u2", "u1"]),
+ ("sort:creation-time", ["u3", "u2", "u1"]),
+ ("-sort:creation-date", ["u1", "u2", "u3"]),
+ ("sort:creation-date,asc", ["u1", "u2", "u3"]),
+ ("sort:creation-date,desc", ["u3", "u2", "u1"]),
+ ("-sort:creation-date,asc", ["u3", "u2", "u1"]),
+ ("-sort:creation-date,desc", ["u1", "u2", "u3"]),
+ ],
+)
def test_sort_by_creation_time(
- verify_unpaged, input, expected_user_names, user_factory):
- user1 = user_factory(name='u1')
- user2 = user_factory(name='u2')
- user3 = user_factory(name='u3')
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ user1 = user_factory(name="u1")
+ user2 = user_factory(name="u2")
+ user3 = user_factory(name="u3")
user1.creation_time = datetime(1991, 1, 1)
user2.creation_time = datetime(1991, 1, 2)
user3.creation_time = datetime(1991, 1, 3)
@@ -213,18 +251,22 @@ def test_sort_by_creation_time(
verify_unpaged(input, expected_user_names)
-@pytest.mark.parametrize('input,expected_user_names', [
- ('', ['u1', 'u2', 'u3']),
- ('sort:last-login-date', ['u3', 'u2', 'u1']),
- ('sort:last-login-time', ['u3', 'u2', 'u1']),
- ('sort:login-date', ['u3', 'u2', 'u1']),
- ('sort:login-time', ['u3', 'u2', 'u1']),
-])
+@pytest.mark.parametrize(
+ "input,expected_user_names",
+ [
+ ("", ["u1", "u2", "u3"]),
+ ("sort:last-login-date", ["u3", "u2", "u1"]),
+ ("sort:last-login-time", ["u3", "u2", "u1"]),
+ ("sort:login-date", ["u3", "u2", "u1"]),
+ ("sort:login-time", ["u3", "u2", "u1"]),
+ ],
+)
def test_sort_by_last_login_time(
- verify_unpaged, input, expected_user_names, user_factory):
- user1 = user_factory(name='u1')
- user2 = user_factory(name='u2')
- user3 = user_factory(name='u3')
+ verify_unpaged, input, expected_user_names, user_factory
+):
+ user1 = user_factory(name="u1")
+ user2 = user_factory(name="u2")
+ user3 = user_factory(name="u3")
user1.last_login_time = datetime(1991, 1, 1)
user2.last_login_time = datetime(1991, 1, 2)
user3.last_login_time = datetime(1991, 1, 3)
@@ -234,39 +276,43 @@ def test_sort_by_last_login_time(
def test_random_sort(executor, user_factory):
- user1 = user_factory(name='u1')
- user2 = user_factory(name='u2')
- user3 = user_factory(name='u3')
+ user1 = user_factory(name="u1")
+ user2 = user_factory(name="u2")
+ user3 = user_factory(name="u3")
db.session.add_all([user3, user1, user2])
db.session.flush()
actual_count, actual_users = executor.execute(
- 'sort:random', offset=0, limit=100)
+ "sort:random", offset=0, limit=100
+ )
actual_user_names = [u.name for u in actual_users]
assert actual_count == 3
assert len(actual_user_names) == 3
- assert 'u1' in actual_user_names
- assert 'u2' in actual_user_names
- assert 'u3' in actual_user_names
+ assert "u1" in actual_user_names
+ assert "u2" in actual_user_names
+ assert "u3" in actual_user_names
-@pytest.mark.parametrize('input,expected_error', [
- ('creation-date:..', errors.SearchError),
- ('creation-date-min:..', errors.ValidationError),
- ('creation-date-min:..2014-01-01', errors.ValidationError),
- ('creation-date-min:2014-01-01..', errors.ValidationError),
- ('creation-date-max:..2014-01-01', errors.ValidationError),
- ('creation-date-max:2014-01-01..', errors.ValidationError),
- ('creation-date-max:yesterday,today', errors.ValidationError),
- ('creation-date:bad..', errors.ValidationError),
- ('creation-date:..bad', errors.ValidationError),
- ('creation-date:bad..bad', errors.ValidationError),
- ('sort:', errors.SearchError),
- ('sort:nam', errors.SearchError),
- ('sort:name,as', errors.SearchError),
- ('sort:name,asc,desc', errors.SearchError),
- ('bad:x', errors.SearchError),
- ('special:unsupported', errors.SearchError),
-])
+@pytest.mark.parametrize(
+ "input,expected_error",
+ [
+ ("creation-date:..", errors.SearchError),
+ ("creation-date-min:..", errors.ValidationError),
+ ("creation-date-min:..2014-01-01", errors.ValidationError),
+ ("creation-date-min:2014-01-01..", errors.ValidationError),
+ ("creation-date-max:..2014-01-01", errors.ValidationError),
+ ("creation-date-max:2014-01-01..", errors.ValidationError),
+ ("creation-date-max:yesterday,today", errors.ValidationError),
+ ("creation-date:bad..", errors.ValidationError),
+ ("creation-date:..bad", errors.ValidationError),
+ ("creation-date:bad..bad", errors.ValidationError),
+ ("sort:", errors.SearchError),
+ ("sort:nam", errors.SearchError),
+ ("sort:name,as", errors.SearchError),
+ ("sort:name,asc,desc", errors.SearchError),
+ ("bad:x", errors.SearchError),
+ ("special:unsupported", errors.SearchError),
+ ],
+)
def test_bad_tokens(executor, input, expected_error):
with pytest.raises(expected_error):
executor.execute(input, offset=0, limit=100)
diff --git a/server/szurubooru/tests/search/test_executor.py b/server/szurubooru/tests/search/test_executor.py
index e1b2dacb..4530beec 100644
--- a/server/szurubooru/tests/search/test_executor.py
+++ b/server/szurubooru/tests/search/test_executor.py
@@ -1,23 +1,27 @@
import unittest.mock
+
import pytest
+
from szurubooru import search
from szurubooru.func import cache
def test_retrieving_from_cache():
config = unittest.mock.MagicMock()
- with unittest.mock.patch('szurubooru.func.cache.has'), \
- unittest.mock.patch('szurubooru.func.cache.get'):
+ with unittest.mock.patch("szurubooru.func.cache.has"), unittest.mock.patch(
+ "szurubooru.func.cache.get"
+ ):
cache.has.side_effect = lambda *args: True
executor = search.Executor(config)
- executor.execute('test:whatever', 1, 10)
+ executor.execute("test:whatever", 1, 10)
assert cache.get.called
def test_putting_equivalent_queries_into_cache():
config = search.configs.PostSearchConfig()
- with unittest.mock.patch('szurubooru.func.cache.has'), \
- unittest.mock.patch('szurubooru.func.cache.put'):
+ with unittest.mock.patch("szurubooru.func.cache.has"), unittest.mock.patch(
+ "szurubooru.func.cache.put"
+ ):
hashes = []
def appender(key, _value):
@@ -26,20 +30,21 @@ def test_putting_equivalent_queries_into_cache():
cache.has.side_effect = lambda *args: False
cache.put.side_effect = appender
executor = search.Executor(config)
- executor.execute('safety:safe test', 1, 10)
- executor.execute('safety:safe test', 1, 10)
- executor.execute('safety:safe test ', 1, 10)
- executor.execute(' safety:safe test', 1, 10)
- executor.execute(' SAFETY:safe test', 1, 10)
- executor.execute('test safety:safe', 1, 10)
+ executor.execute("safety:safe test", 1, 10)
+ executor.execute("safety:safe test", 1, 10)
+ executor.execute("safety:safe test ", 1, 10)
+ executor.execute(" safety:safe test", 1, 10)
+ executor.execute(" SAFETY:safe test", 1, 10)
+ executor.execute("test safety:safe", 1, 10)
assert len(hashes) == 6
assert len(set(hashes)) == 1
def test_putting_non_equivalent_queries_into_cache():
config = search.configs.PostSearchConfig()
- with unittest.mock.patch('szurubooru.func.cache.has'), \
- unittest.mock.patch('szurubooru.func.cache.put'):
+ with unittest.mock.patch("szurubooru.func.cache.has"), unittest.mock.patch(
+ "szurubooru.func.cache.put"
+ ):
hashes = []
def appender(key, _value):
@@ -49,42 +54,42 @@ def test_putting_non_equivalent_queries_into_cache():
cache.put.side_effect = appender
executor = search.Executor(config)
args = [
- ('', 1, 10),
- ('creation-time:2016', 1, 10),
- ('creation-time:2015', 1, 10),
- ('creation-time:2016-01', 1, 10),
- ('creation-time:2016-02', 1, 10),
- ('creation-time:2016-01-01', 1, 10),
- ('creation-time:2016-01-02', 1, 10),
- ('tag-count:1,3', 1, 10),
- ('tag-count:1,2', 1, 10),
- ('tag-count:1', 1, 10),
- ('tag-count:1..3', 1, 10),
- ('tag-count:1..4', 1, 10),
- ('tag-count:2..3', 1, 10),
- ('tag-count:1..', 1, 10),
- ('tag-count:2..', 1, 10),
- ('tag-count:..3', 1, 10),
- ('tag-count:..4', 1, 10),
- ('-tag-count:1..3', 1, 10),
- ('-tag-count:1..4', 1, 10),
- ('-tag-count:2..3', 1, 10),
- ('-tag-count:1..', 1, 10),
- ('-tag-count:2..', 1, 10),
- ('-tag-count:..3', 1, 10),
- ('-tag-count:..4', 1, 10),
- ('safety:safe', 1, 10),
- ('safety:safe', 1, 20),
- ('safety:safe', 2, 10),
- ('safety:sketchy', 1, 10),
- ('safety:safe test', 1, 10),
- ('-safety:safe', 1, 10),
- ('-safety:safe', 1, 20),
- ('-safety:safe', 2, 10),
- ('-safety:sketchy', 1, 10),
- ('-safety:safe test', 1, 10),
- ('safety:safe -test', 1, 10),
- ('-test', 1, 10),
+ ("", 1, 10),
+ ("creation-time:2016", 1, 10),
+ ("creation-time:2015", 1, 10),
+ ("creation-time:2016-01", 1, 10),
+ ("creation-time:2016-02", 1, 10),
+ ("creation-time:2016-01-01", 1, 10),
+ ("creation-time:2016-01-02", 1, 10),
+ ("tag-count:1,3", 1, 10),
+ ("tag-count:1,2", 1, 10),
+ ("tag-count:1", 1, 10),
+ ("tag-count:1..3", 1, 10),
+ ("tag-count:1..4", 1, 10),
+ ("tag-count:2..3", 1, 10),
+ ("tag-count:1..", 1, 10),
+ ("tag-count:2..", 1, 10),
+ ("tag-count:..3", 1, 10),
+ ("tag-count:..4", 1, 10),
+ ("-tag-count:1..3", 1, 10),
+ ("-tag-count:1..4", 1, 10),
+ ("-tag-count:2..3", 1, 10),
+ ("-tag-count:1..", 1, 10),
+ ("-tag-count:2..", 1, 10),
+ ("-tag-count:..3", 1, 10),
+ ("-tag-count:..4", 1, 10),
+ ("safety:safe", 1, 10),
+ ("safety:safe", 1, 20),
+ ("safety:safe", 2, 10),
+ ("safety:sketchy", 1, 10),
+ ("safety:safe test", 1, 10),
+ ("-safety:safe", 1, 10),
+ ("-safety:safe", 1, 20),
+ ("-safety:safe", 2, 10),
+ ("-safety:sketchy", 1, 10),
+ ("-safety:safe test", 1, 10),
+ ("safety:safe -test", 1, 10),
+ ("-test", 1, 10),
]
for arg in args:
executor.execute(*arg)
@@ -92,18 +97,22 @@ def test_putting_non_equivalent_queries_into_cache():
assert len(set(hashes)) == len(args)
-@pytest.mark.parametrize('input', [
- 'special:fav',
- 'special:liked',
- 'special:disliked',
- '-special:fav',
- '-special:liked',
- '-special:disliked',
-])
+@pytest.mark.parametrize(
+ "input",
+ [
+ "special:fav",
+ "special:liked",
+ "special:disliked",
+ "-special:fav",
+ "-special:liked",
+ "-special:disliked",
+ ],
+)
def test_putting_auth_dependent_queries_into_cache(user_factory, input):
config = search.configs.PostSearchConfig()
- with unittest.mock.patch('szurubooru.func.cache.has'), \
- unittest.mock.patch('szurubooru.func.cache.put'):
+ with unittest.mock.patch("szurubooru.func.cache.has"), unittest.mock.patch(
+ "szurubooru.func.cache.put"
+ ):
hashes = []
def appender(key, _value):