Merge remote-tracking branch 'origin/master' into robo/og-tags

This commit is contained in:
Ben Klein 2023-08-18 20:08:23 -04:00
commit 22835fa5c7
65 changed files with 906 additions and 486 deletions

5
.gitattributes vendored Normal file
View file

@ -0,0 +1,5 @@
# Auto detect text files and perform LF normalization
* text=auto
# Shell scripts require LF
*.sh text eol=lf

108
.github/workflows/build-containers.yml vendored Normal file
View file

@ -0,0 +1,108 @@
name: Build Docker containers
on:
push:
branches:
- master
jobs:
build-client:
name: Build and push client/ Docker container
runs-on: ubuntu-latest
steps:
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Determine metadata
run: |
CLOSEST_VER="$(git describe --tags --abbrev=0 $GITHUB_SHA)"
CLOSEST_MAJOR_VER="$(echo ${CLOSEST_VER} | cut -d'.' -f1)"
CLOSEST_MINOR_VER="$(echo ${CLOSEST_VER} | cut -d'.' -f2)"
SHORT_COMMIT=$(echo $GITHUB_SHA | cut -c1-8)
BUILD_INFO="v${CLOSEST_VER}-${SHORT_COMMIT}"
BUILD_DATE="$(date -u +'%Y-%m-%dT%H:%M:%SZ')"
echo "major_tag=${CLOSEST_MAJOR_VER}" >> $GITHUB_ENV
echo "minor_tag=${CLOSEST_MAJOR_VER}.${CLOSEST_MINOR_VER}" >> $GITHUB_ENV
echo "build_info=${BUILD_INFO}" >> $GITHUB_ENV
echo "build_date=${BUILD_DATE}" >> $GITHUB_ENV
echo "Build Info: ${BUILD_INFO}"
echo "Build Date: ${BUILD_DATE}"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Build container
run: >
docker buildx build --push
--platform linux/amd64,linux/arm/v7,linux/arm64/v8
--build-arg BUILD_INFO=${{ env.build_info }}
--build-arg BUILD_DATE=${{ env.build_date }}
--build-arg SOURCE_COMMIT=$GITHUB_SHA
--build-arg DOCKER_REPO=szurubooru/client
-t "szurubooru/client:latest"
-t "szurubooru/client:${{ env.major_tag }}"
-t "szurubooru/client:${{ env.minor_tag }}"
./client
build-server:
name: Build and push server/ Docker container
runs-on: ubuntu-latest
steps:
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Determine metadata
run: |
CLOSEST_VER="$(git describe --tags --abbrev=0 $GITHUB_SHA)"
CLOSEST_MAJOR_VER="$(echo ${CLOSEST_VER} | cut -d'.' -f1)"
CLOSEST_MINOR_VER="$(echo ${CLOSEST_VER} | cut -d'.' -f2)"
SHORT_COMMIT=$(echo $GITHUB_SHA | cut -c1-8)
BUILD_INFO="v${CLOSEST_VER}-${SHORT_COMMIT}"
BUILD_DATE="$(date -u +'%Y-%m-%dT%H:%M:%SZ')"
echo "major_tag=${CLOSEST_MAJOR_VER}" >> $GITHUB_ENV
echo "minor_tag=${CLOSEST_MAJOR_VER}.${CLOSEST_MINOR_VER}" >> $GITHUB_ENV
echo "build_info=${BUILD_INFO}" >> $GITHUB_ENV
echo "build_date=${BUILD_DATE}" >> $GITHUB_ENV
echo "Build Info: ${BUILD_INFO}"
echo "Build Date: ${BUILD_DATE}"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Build container
run: >
docker buildx build --push
--platform linux/amd64,linux/arm/v7,linux/arm64/v8
--build-arg BUILD_DATE=${{ env.build_date }}
--build-arg SOURCE_COMMIT=$GITHUB_SHA
--build-arg DOCKER_REPO=szurubooru/server
-t "szurubooru/server:latest"
-t "szurubooru/server:${{ env.major_tag }}"
-t "szurubooru/server:${{ env.minor_tag }}"
./server

28
.github/workflows/run-unit-tests.yml vendored Normal file
View file

@ -0,0 +1,28 @@
name: Run unit tests
on: [push, pull_request]
jobs:
test-server:
name: Run pytest for server/
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Build test container
run: >
docker buildx build --load
--platform linux/amd64 --target testing
-t test_container
./server
- name: Run unit tests
run: >
docker run --rm -t test_container
--color=no
--cov-report=term-missing:skip-covered
--cov=szurubooru
szurubooru/

View file

@ -1,28 +1,29 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: mixed-line-ending
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.1.9
rev: v1.4.2
hooks:
- id: remove-tabs
- repo: https://github.com/psf/black
rev: 20.8b1
rev: '23.1.0'
hooks:
- id: black
files: 'server/'
types: [python]
language_version: python3.8
language_version: python3.9
- repo: https://github.com/timothycrosley/isort
rev: '5.4.2'
- repo: https://github.com/PyCQA/isort
rev: '5.12.0'
hooks:
- id: isort
files: 'server/'
@ -31,8 +32,8 @@ repos:
additional_dependencies:
- toml
- repo: https://github.com/prettier/prettier
rev: '2.1.1'
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.7.1
hooks:
- id: prettier
files: client/js/
@ -40,7 +41,7 @@ repos:
args: ['--config', 'client/.prettierrc.yml']
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v7.8.0
rev: v8.33.0
hooks:
- id: eslint
files: client/js/
@ -48,8 +49,8 @@ repos:
additional_dependencies:
- eslint-config-prettier
- repo: https://gitlab.com/pycqa/flake8
rev: '3.8.3'
- repo: https://github.com/PyCQA/flake8
rev: '6.0.0'
hooks:
- id: flake8
files: server/szurubooru/
@ -57,44 +58,5 @@ repos:
- flake8-print
args: ['--config=server/.flake8']
- repo: local
hooks:
- id: docker-build-client
name: Docker - build client
entry: bash -c 'docker build client/'
language: system
types: [file]
files: client/
pass_filenames: false
- id: docker-build-server
name: Docker - build server
entry: bash -c 'docker build server/'
language: system
types: [file]
files: server/
pass_filenames: false
- id: pytest
name: pytest
entry: bash -c 'docker run --rm -t $(docker build --target testing -q server/) szurubooru/'
language: system
types: [python]
files: server/szurubooru/
exclude: server/szurubooru/migrations/
pass_filenames: false
stages: [push]
- id: pytest-cov
name: pytest
entry: bash -c 'docker run --rm -t $(docker build --target testing -q server/) --cov-report=term-missing:skip-covered --cov=szurubooru szurubooru/'
language: system
types: [python]
files: server/szurubooru/
exclude: server/szurubooru/migrations/
pass_filenames: false
verbose: true
stages: [manual]
fail_fast: true
exclude: LICENSE.md

View file

@ -3,7 +3,7 @@
Szurubooru is an image board engine inspired by services such as Danbooru,
Gelbooru and Moebooru dedicated for small and medium communities. Its name [has
its roots in Polish language and has onomatopeic meaning of scraping or
scrubbing](http://sjp.pwn.pl/sjp/;2527372). It is pronounced as *shoorubooru*.
scrubbing](https://sjp.pwn.pl/sjp/;2527372). It is pronounced as *shoorubooru*.
## Features

View file

@ -1,8 +1,7 @@
FROM node:lts as builder
FROM --platform=$BUILDPLATFORM node:lts as builder
WORKDIR /opt/app
COPY package.json package-lock.json ./
RUN npm install -g npm@lts
RUN npm install
COPY . ./
@ -12,7 +11,7 @@ ARG CLIENT_BUILD_ARGS=""
RUN BASE_URL="__BASEURL__" node build.js --gzip ${CLIENT_BUILD_ARGS}
FROM scratch as approot
FROM --platform=$BUILDPLATFORM scratch as approot
COPY docker-start.sh /

View file

@ -300,10 +300,10 @@ a .access-key
background-size: 20px 20px
img
opacity: 0
width: 100%
width: auto
height: 100%
video
width: 100%
width: auto
height: 100%
.flexbox-dummy

View file

@ -114,6 +114,29 @@
&[data-disabled]
background: rgba(200, 200, 200, 0.7)
.delete-flipper
display: inline-block
padding: 0.5em
box-sizing: border-box
border: 0
&:after
display: inline-block
width: 1em
height: 1em
text-align: center
line-height: 1em
font-size: 2.2em
&.delete
background: rgba(255, 0, 0, 0.7)
&:after
color: white
font-family: FontAwesome;
content: "\f1f8"; // fa-trash
&:not(.delete)
background: rgba(200, 200, 200, 0.7)
&:after
color: white
content: '-'
.thumbnail
width: 100%
@ -215,7 +238,19 @@
.append
@media (max-width: 1000px)
margin-left: 0
.bulk-edit-delete
&.opened
.start
@media (max-width: 1000px)
margin-left: 0
&:not(.opened)
.start
display: none
.append.open
@media (max-width: 1000px)
margin-left: 0
.start
margin-left: 1em
.safety
margin-right: 0.25em
&.safety-safe

View file

@ -14,9 +14,11 @@ $cancel-button-color = tomato
&.inactive input[type=submit],
&.inactive .skip-duplicates
&.inactive .always-upload-similar
&.inactive .pause-remain-on-error
&.uploading input[type=submit],
&.uploading .skip-duplicates,
&.uploading .always-upload-similar
&.uploading .pause-remain-on-error
&:not(.uploading) .cancel
display: none
@ -44,6 +46,9 @@ $cancel-button-color = tomato
.always-upload-similar
margin-left: 1em
.pause-remain-on-error
margin-left: 1em
form>.messages
margin-top: 1em
@ -57,6 +62,14 @@ $cancel-button-color = tomato
margin: 0 0 1.2em 0
padding-left: 13em
img
width: 100%
height: 100%
video
width: 100%
height: 100%
&>.thumbnail-wrapper
float: left
width: 12em

View file

@ -1,16 +0,0 @@
#!/bin/sh
CLOSEST_VER=$(git describe --tags --abbrev=0 ${SOURCE_COMMIT})
if git describe --exact-match --abbrev=0 ${SOURCE_COMMIT} 2> /dev/null; then
BUILD_INFO="v${CLOSEST_VER}"
else
BUILD_INFO="v${CLOSEST_VER}-edge-$(git rev-parse --short ${SOURCE_COMMIT})"
fi
echo "Using BUILD_INFO=${BUILD_INFO}"
docker build \
--build-arg BUILD_INFO=${BUILD_INFO} \
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') \
--build-arg SOURCE_COMMIT \
--build-arg DOCKER_REPO \
-f $DOCKERFILE_PATH -t $IMAGE_NAME .

View file

@ -1,19 +0,0 @@
#!/bin/sh
add_tag() {
echo "Also tagging image as ${DOCKER_REPO}:${1}"
docker tag $IMAGE_NAME $DOCKER_REPO:$1
docker push $DOCKER_REPO:$1
}
CLOSEST_VER=$(git describe --tags --abbrev=0)
CLOSEST_MAJOR_VER=$(echo ${CLOSEST_VER} | cut -d'.' -f1)
CLOSEST_MINOR_VER=$(echo ${CLOSEST_VER} | cut -d'.' -f2)
add_tag "${CLOSEST_MAJOR_VER}-edge"
add_tag "${CLOSEST_MAJOR_VER}.${CLOSEST_MINOR_VER}-edge"
if git describe --exact-match --abbrev=0 2> /dev/null; then
add_tag "${CLOSEST_MAJOR_VER}"
add_tag "${CLOSEST_MAJOR_VER}.${CLOSEST_MINOR_VER}"
fi

View file

@ -42,6 +42,7 @@
'image/heic': 'HEIC',
'video/webm': 'WEBM',
'video/mp4': 'MPEG-4',
'video/quicktime': 'MOV',
'application/x-shockwave-flash': 'SWF',
}[ctx.post.mimeType] +
' (' +

View file

@ -15,6 +15,7 @@
'image/heic': 'HEIC',
'video/webm': 'WEBM',
'video/mp4': 'MPEG-4',
'video/quicktime': 'MOV',
'application/x-shockwave-flash': 'SWF',
}[ctx.post.mimeType] %>
</a>

View file

@ -7,7 +7,7 @@
<span class='skip-duplicates'>
<%= ctx.makeCheckbox({
text: 'Skip duplicates',
text: 'Skip duplicate',
name: 'skip-duplicates',
checked: false,
}) %>
@ -15,12 +15,20 @@
<span class='always-upload-similar'>
<%= ctx.makeCheckbox({
text: 'Always upload similar',
text: 'Force upload similar',
name: 'always-upload-similar',
checked: false,
}) %>
</span>
<span class='pause-remain-on-error'>
<%= ctx.makeCheckbox({
text: 'Pause on error',
name: 'pause-remain-on-error',
checked: true,
}) %>
</span>
<input type='button' value='Cancel' class='cancel'/>
</div>

View file

@ -28,4 +28,11 @@
%><a href class='mousetrap button append close'>Stop editing safety</a><%
%></form><%
%><% } %><%
%><% if (ctx.canBulkDelete) { %><%
%><form class='horizontal bulk-edit bulk-edit-delete'><%
%><a href class='mousetrap button append open'>Mass delete</a><%
%><input class='mousetrap start' type='submit' value='Delete selected posts'/><%
%><a href class='mousetrap button append close'>Stop deleting</a><%
%></form><%
%><% } %><%
%></div>

View file

@ -50,6 +50,10 @@
<% } %>
</span>
<% } %>
<% if (ctx.canBulkDelete && ctx.parameters && ctx.parameters.delete) { %>
<a href class='delete-flipper'>
</a>
<% } %>
</span>
</li>
<% } %>

View file

@ -44,6 +44,7 @@ class PostListController {
enableSafety: api.safetyEnabled(),
canBulkEditTags: api.hasPrivilege("posts:bulk-edit:tags"),
canBulkEditSafety: api.hasPrivilege("posts:bulk-edit:safety"),
canBulkDelete: api.hasPrivilege("posts:bulk-edit:delete"),
bulkEdit: {
tags: this._bulkEditTags,
},
@ -52,6 +53,16 @@ class PostListController {
this._evtNavigate(e)
);
if (this._headerView._bulkDeleteEditor) {
this._headerView._bulkDeleteEditor.addEventListener(
"deleteSelectedPosts",
(e) => {
this._evtDeleteSelectedPosts(e);
}
);
}
this._postsMarkedForDeletion = [];
this._syncPageController();
}
@ -91,6 +102,38 @@ class PostListController {
e.detail.post.save().catch((error) => window.alert(error.message));
}
_evtMarkForDeletion(e) {
const postId = e.detail;
// Add or remove post from delete list
if (e.detail.delete) {
this._postsMarkedForDeletion.push(e.detail.post);
} else {
this._postsMarkedForDeletion = this._postsMarkedForDeletion.filter(
(x) => x.id != e.detail.post.id
);
}
}
_evtDeleteSelectedPosts(e) {
if (this._postsMarkedForDeletion.length == 0) return;
if (
confirm(
`Are you sure you want to delete ${this._postsMarkedForDeletion.length} posts?`
)
) {
Promise.all(
this._postsMarkedForDeletion.map((post) => post.delete())
)
.catch((error) => window.alert(error.message))
.then(() => {
this._postsMarkedForDeletion = [];
this._headerView._navigate();
});
}
}
_syncPageController() {
this._pageController.run({
parameters: this._ctx.parameters,
@ -117,8 +160,10 @@ class PostListController {
canBulkEditSafety: api.hasPrivilege(
"posts:bulk-edit:safety"
),
canBulkDelete: api.hasPrivilege("posts:bulk-edit:delete"),
bulkEdit: {
tags: this._bulkEditTags,
markedForDeletion: this._postsMarkedForDeletion,
},
postFlow: settings.get().postFlow,
});
@ -128,6 +173,9 @@ class PostListController {
view.addEventListener("changeSafety", (e) =>
this._evtChangeSafety(e)
);
view.addEventListener("markForDeletion", (e) =>
this._evtMarkForDeletion(e)
);
return view;
},
});

View file

@ -90,21 +90,30 @@ class PostUploadController {
uploadable
);
}
if (e.detail.pauseRemainOnError) {
return Promise.reject();
}
})
),
Promise.resolve()
)
.then(() => {
if (anyFailures) {
this._view.showError(genericErrorMessage);
this._view.enableForm();
} else {
return Promise.reject();
}
})
.then(
() => {
this._view.clearMessages();
misc.disableExitConfirmation();
const ctx = router.show(uri.formatClientLink("posts"));
ctx.controller.showSuccess("Posts uploaded.");
},
(error) => {
this._view.showError(genericErrorMessage);
this._view.enableForm();
}
});
);
}
_uploadSinglePost(uploadable, skipDuplicates, alwaysUploadSimilar) {

View file

@ -31,9 +31,8 @@ class UserController {
userTokenPromise = UserToken.get(userName).then(
(userTokens) => {
return userTokens.map((token) => {
token.isCurrentAuthToken = api.isCurrentAuthToken(
token
);
token.isCurrentAuthToken =
api.isCurrentAuthToken(token);
return token;
});
},

View file

@ -45,9 +45,8 @@ class ExpanderControl {
// eslint-disable-next-line accessor-pairs
set title(newTitle) {
if (this._expanderNode) {
this._expanderNode.querySelector(
"header span"
).textContent = newTitle;
this._expanderNode.querySelector("header span").textContent =
newTitle;
}
}

View file

@ -203,9 +203,8 @@ class PostEditSidebarControl extends events.EventTarget {
);
if (this._formNode) {
const inputNodes = this._formNode.querySelectorAll(
"input, textarea"
);
const inputNodes =
this._formNode.querySelectorAll("input, textarea");
for (let node of inputNodes) {
node.addEventListener("change", (e) =>
this.dispatchEvent(new CustomEvent("change"))

View file

@ -727,9 +727,8 @@ class PostNotesOverlayControl extends events.EventTarget {
}
_showNoteText(note) {
this._textNode.querySelector(
".wrapper"
).innerHTML = misc.formatMarkdown(note.text);
this._textNode.querySelector(".wrapper").innerHTML =
misc.formatMarkdown(note.text);
this._textNode.style.display = "block";
const bodyRect = document.body.getBoundingClientRect();
const noteRect = this._textNode.getBoundingClientRect();

View file

@ -65,17 +65,6 @@ class TagPermalinkFixWrapper extends BaseMarkdownWrapper {
// post, user and tags permalinks
class EntityPermalinkWrapper extends BaseMarkdownWrapper {
preprocess(text) {
// URL-based permalinks
text = text.replace(new RegExp("\\b/post/(\\d+)/?\\b", "g"), "@$1");
text = text.replace(
new RegExp("\\b/tag/([a-zA-Z0-9_-]+?)/?", "g"),
"#$1"
);
text = text.replace(
new RegExp("\\b/user/([a-zA-Z0-9_-]+?)/?", "g"),
"+$1"
);
text = text.replace(
/(^|^\(|(?:[^\]])\(|[\s<>\[\]\)])([+#@][a-zA-Z0-9_-]+)/g,
"$1[$2]($2)"
@ -136,12 +125,8 @@ function createRenderer() {
const renderer = new marked.Renderer();
renderer.image = (href, title, alt) => {
let [
_,
url,
width,
height,
] = /^(.+?)(?:\s=\s*(\d*)\s*x\s*(\d*)\s*)?$/.exec(href);
let [_, url, width, height] =
/^(.+?)(?:\s=\s*(\d*)\s*x\s*(\d*)\s*)?$/.exec(href);
let res = '<img src="' + sanitize(url) + '" alt="' + sanitize(alt);
if (width) {
res += '" width="' + width;
@ -174,7 +159,7 @@ function formatMarkdown(text) {
for (let wrapper of wrappers) {
text = wrapper.preprocess(text);
}
text = marked(text, options);
text = marked.parse(text, options);
wrappers.reverse();
for (let wrapper of wrappers) {
text = wrapper.postprocess(text);
@ -200,7 +185,7 @@ function formatInlineMarkdown(text) {
for (let wrapper of wrappers) {
text = wrapper.preprocess(text);
}
text = marked.inlineLexer(text, [], options);
text = marked.parseInline(text, options);
wrappers.reverse();
for (let wrapper of wrappers) {
text = wrapper.postprocess(text);

View file

@ -25,9 +25,8 @@ class PostMainView {
views.replaceContent(this._hostNode, sourceNode);
views.syncScrollPosition();
const topNavigationNode = document.body.querySelector(
"#top-navigation"
);
const topNavigationNode =
document.body.querySelector("#top-navigation");
this._postContentControl = new PostContentControl(
postContainerNode,

View file

@ -22,6 +22,7 @@ function _mimeTypeToPostType(mimeType) {
"image/heic": "image",
"video/mp4": "video",
"video/webm": "video",
"video/quicktime": "video",
}[mimeType] || "unknown"
);
}
@ -120,6 +121,7 @@ class Url extends Uploadable {
heif: "image/heif",
heic: "image/heic",
mp4: "video/mp4",
mov: "video/quicktime",
webm: "video/webm",
};
for (let extension of Object.keys(mime)) {
@ -285,7 +287,7 @@ class PostUploadView extends events.EventTarget {
for (let uploadable of this._uploadables) {
this._updateUploadableFromDom(uploadable);
}
this._submitButtonNode.value = "Resume upload";
this._submitButtonNode.value = "Resume";
this._emit("submit");
}
@ -360,8 +362,10 @@ class PostUploadView extends events.EventTarget {
detail: {
uploadables: this._uploadables,
skipDuplicates: this._skipDuplicatesCheckboxNode.checked,
alwaysUploadSimilar: this._alwaysUploadSimilarCheckboxNode
.checked,
alwaysUploadSimilar:
this._alwaysUploadSimilarCheckboxNode.checked,
pauseRemainOnError:
this._pauseRemainOnErrorCheckboxNode.checked,
},
})
);
@ -431,6 +435,12 @@ class PostUploadView extends events.EventTarget {
);
}
get _pauseRemainOnErrorCheckboxNode() {
return this._hostNode.querySelector(
"form [name=pause-remain-on-error]"
);
}
get _submitButtonNode() {
return this._hostNode.querySelector("form [type=submit]");
}

View file

@ -141,6 +141,34 @@ class BulkTagEditor extends BulkEditor {
}
}
class BulkDeleteEditor extends BulkEditor {
constructor(hostNode) {
super(hostNode);
this._hostNode.addEventListener("submit", (e) =>
this._evtFormSubmit(e)
);
}
_evtFormSubmit(e) {
e.preventDefault();
this.dispatchEvent(
new CustomEvent("deleteSelectedPosts", { detail: {} })
);
}
_evtOpenLinkClick(e) {
e.preventDefault();
this.toggleOpen(true);
this.dispatchEvent(new CustomEvent("open", { detail: {} }));
}
_evtCloseLinkClick(e) {
e.preventDefault();
this.toggleOpen(false);
this.dispatchEvent(new CustomEvent("close", { detail: {} }));
}
}
class PostsHeaderView extends events.EventTarget {
constructor(ctx) {
super();
@ -186,6 +214,13 @@ class PostsHeaderView extends events.EventTarget {
this._bulkEditors.push(this._bulkSafetyEditor);
}
if (this._bulkEditDeleteNode) {
this._bulkDeleteEditor = new BulkDeleteEditor(
this._bulkEditDeleteNode
);
this._bulkEditors.push(this._bulkDeleteEditor);
}
for (let editor of this._bulkEditors) {
editor.addEventListener("submit", (e) => {
this._navigate();
@ -204,6 +239,8 @@ class PostsHeaderView extends events.EventTarget {
this._openBulkEditor(this._bulkTagEditor);
} else if (ctx.parameters.safety && this._bulkSafetyEditor) {
this._openBulkEditor(this._bulkSafetyEditor);
} else if (ctx.parameters.delete && this._bulkDeleteEditor) {
this._openBulkEditor(this._bulkDeleteEditor);
}
}
@ -227,6 +264,10 @@ class PostsHeaderView extends events.EventTarget {
return this._hostNode.querySelector(".bulk-edit-safety");
}
get _bulkEditDeleteNode() {
return this._hostNode.querySelector(".bulk-edit-delete");
}
_openBulkEditor(editor) {
editor.toggleOpen(true);
this._hideBulkEditorsExcept(editor);
@ -253,9 +294,8 @@ class PostsHeaderView extends events.EventTarget {
e.target.classList.toggle("disabled");
const safety = e.target.getAttribute("data-safety");
let browsingSettings = settings.get();
browsingSettings.listPosts[safety] = !browsingSettings.listPosts[
safety
];
browsingSettings.listPosts[safety] =
!browsingSettings.listPosts[safety];
settings.save(browsingSettings, true);
this.dispatchEvent(
new CustomEvent("navigate", {
@ -294,6 +334,10 @@ class PostsHeaderView extends events.EventTarget {
this._bulkSafetyEditor && this._bulkSafetyEditor.opened
? "1"
: null;
parameters.delete =
this._bulkDeleteEditor && this._bulkDeleteEditor.opened
? "1"
: null;
this.dispatchEvent(
new CustomEvent("navigate", { detail: { parameters: parameters } })
);

View file

@ -39,6 +39,13 @@ class PostsPageView extends events.EventTarget {
);
}
}
const deleteFlipperNode = this._getDeleteFlipperNode(listItemNode);
if (deleteFlipperNode) {
deleteFlipperNode.addEventListener("click", (e) =>
this._evtBulkToggleDeleteClick(e, post)
);
}
}
this._syncBulkEditorsHighlights();
@ -56,6 +63,10 @@ class PostsPageView extends events.EventTarget {
return listItemNode.querySelector(".safety-flipper");
}
_getDeleteFlipperNode(listItemNode) {
return listItemNode.querySelector(".delete-flipper");
}
_evtPostChange(e) {
const listItemNode = this._postIdToListItemNode[e.detail.post.id];
for (let node of listItemNode.querySelectorAll("[data-disabled]")) {
@ -99,6 +110,20 @@ class PostsPageView extends events.EventTarget {
);
}
_evtBulkToggleDeleteClick(e, post) {
e.preventDefault();
const linkNode = e.target;
linkNode.classList.toggle("delete");
this.dispatchEvent(
new CustomEvent("markForDeletion", {
detail: {
post,
delete: linkNode.classList.contains("delete"),
},
})
);
}
_syncBulkEditorsHighlights() {
for (let listItemNode of this._listItemNodes) {
const postId = listItemNode.getAttribute("data-post-id");
@ -123,6 +148,16 @@ class PostsPageView extends events.EventTarget {
);
}
}
const deleteFlipperNode = this._getDeleteFlipperNode(listItemNode);
if (deleteFlipperNode) {
deleteFlipperNode.classList.toggle(
"delete",
this._ctx.bulkEdit.markedForDeletion.some(
(x) => x.id == postId
)
);
}
}
}
}

View file

@ -72,9 +72,8 @@ class UserTokenView extends events.EventTarget {
_evtDelete(e) {
e.preventDefault();
const userToken = this._tokens[
parseInt(e.target.getAttribute("data-token-id"))
];
const userToken =
this._tokens[parseInt(e.target.getAttribute("data-token-id"))];
this.dispatchEvent(
new CustomEvent("delete", {
detail: {
@ -110,9 +109,8 @@ class UserTokenView extends events.EventTarget {
_evtChangeNoteClick(e) {
e.preventDefault();
const userToken = this._tokens[
parseInt(e.target.getAttribute("data-token-id"))
];
const userToken =
this._tokens[parseInt(e.target.getAttribute("data-token-id"))];
const text = window.prompt(
"Please enter the new name:",
userToken.note !== null ? userToken.note : undefined

274
client/package-lock.json generated
View file

@ -10,7 +10,7 @@
"font-awesome": "^4.7.0",
"ios-inner-height": "^1.0.3",
"js-cookie": "^2.2.0",
"marked": "^0.7.0",
"marked": "^4.0.10",
"mousetrap": "^1.6.2",
"nprogress": "^0.2.0",
"superagent": "^3.8.3"
@ -28,7 +28,7 @@
"jimp": "^0.13.0",
"pretty-error": "^3.0.3",
"stylus": "^0.54.8",
"terser": "^3.7.7",
"terser": "^4.8.1",
"underscore": "^1.12.1",
"watchify": "^4.0.0",
"ws": "^7.4.6"
@ -476,24 +476,6 @@
"node": ">= 8"
}
},
"node_modules/array-filter": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/array-filter/-/array-filter-0.0.1.tgz",
"integrity": "sha1-fajPLiZijtcygDWB/SH2fKzS7uw=",
"dev": true
},
"node_modules/array-map": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/array-map/-/array-map-0.0.0.tgz",
"integrity": "sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI=",
"dev": true
},
"node_modules/array-reduce": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/array-reduce/-/array-reduce-0.0.0.tgz",
"integrity": "sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys=",
"dev": true
},
"node_modules/asn1.js": {
"version": "4.10.1",
"resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz",
@ -1506,16 +1488,15 @@
"dev": true
},
"node_modules/cached-path-relative": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.0.2.tgz",
"integrity": "sha512-5r2GqsoEb4qMTTN9J+WzXfjov+hjxT+j3u5K+kIVNIwAd99DLCJE9pBIMP1qVeybV6JiijL385Oz0DcYxfbOIg==",
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.1.0.tgz",
"integrity": "sha512-WF0LihfemtesFcJgO7xfOoOcnWzY/QHR4qeDqV44jPU3HTI54+LnfXK3SA27AVVGCdZFgjjFFaqUA9Jx7dMJZA==",
"dev": true
},
"node_modules/call-bind": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
"integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
"dev": true,
"dependencies": {
"function-bind": "^1.1.1",
"get-intrinsic": "^1.0.2"
@ -1682,9 +1663,9 @@
"dev": true
},
"node_modules/cookiejar": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.2.tgz",
"integrity": "sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA=="
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz",
"integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw=="
},
"node_modules/core-js": {
"version": "2.5.7",
@ -1850,9 +1831,9 @@
}
},
"node_modules/decode-uri-component": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz",
"integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=",
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz",
"integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==",
"dev": true,
"engines": {
"node": ">=0.10"
@ -2254,8 +2235,7 @@
"node_modules/function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
"dev": true
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
},
"node_modules/get-assigned-identifiers": {
"version": "1.2.0",
@ -2267,7 +2247,6 @@
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
"integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
"dev": true,
"dependencies": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
@ -2351,7 +2330,6 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
"dev": true,
"dependencies": {
"function-bind": "^1.1.1"
},
@ -2384,7 +2362,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
"integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==",
"dev": true,
"engines": {
"node": ">= 0.4"
},
@ -2859,9 +2836,9 @@
"dev": true
},
"node_modules/jpeg-js": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.0.tgz",
"integrity": "sha512-960VHmtN1vTpasX/1LupLohdP5odwAT7oK/VSm6mW0M58LbrBnowLAPWAZhWGhDAGjzbMnPXZxzB/QYgBwkN0w==",
"version": "0.4.4",
"resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.4.tgz",
"integrity": "sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==",
"dev": true
},
"node_modules/js-cookie": {
@ -2997,14 +2974,14 @@
"dev": true
},
"node_modules/marked": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/marked/-/marked-0.7.0.tgz",
"integrity": "sha512-c+yYdCZJQrsRjTPhUx7VKkApw9bwDkNbHUKo1ovgcfDjb2kc8rLuRbIFyXL5WOEUwzSSKo3IXpph2K6DqB/KZg==",
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/marked/-/marked-4.0.10.tgz",
"integrity": "sha512-+QvuFj0nGgO970fySghXGmuw+Fd0gD2x3+MqCWLIPf5oxdv1Ka6b2q+z9RP01P/IaKPMEramy+7cNy/Lw8c3hw==",
"bin": {
"marked": "bin/marked"
"marked": "bin/marked.js"
},
"engines": {
"node": ">=0.10.0"
"node": ">= 12"
}
},
"node_modules/md5.js": {
@ -3108,9 +3085,9 @@
}
},
"node_modules/minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true
},
"node_modules/mkdirp": {
@ -3224,7 +3201,6 @@
"version": "1.10.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz",
"integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==",
"dev": true,
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@ -3385,9 +3361,9 @@
}
},
"node_modules/path-parse": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==",
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true
},
"node_modules/path-platform": {
@ -3507,11 +3483,17 @@
"dev": true
},
"node_modules/qs": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==",
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
"integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==",
"dependencies": {
"side-channel": "^1.0.4"
},
"engines": {
"node": ">=0.6"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/querystring": {
@ -3770,15 +3752,22 @@
}
},
"node_modules/shell-quote": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.6.1.tgz",
"integrity": "sha1-9HgZSczkAmlxJ0MOo7PFR29IF2c=",
"dev": true,
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz",
"integrity": "sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==",
"dev": true
},
"node_modules/side-channel": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
"integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
"dependencies": {
"array-filter": "~0.0.0",
"array-map": "~0.0.0",
"array-reduce": "~0.0.0",
"jsonify": "~0.0.0"
"call-bind": "^1.0.0",
"get-intrinsic": "^1.0.2",
"object-inspect": "^1.9.0"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/simple-concat": {
@ -3983,12 +3972,6 @@
"minimist": "^1.1.0"
}
},
"node_modules/subarg/node_modules/minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"dev": true
},
"node_modules/superagent": {
"version": "3.8.3",
"resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz",
@ -4028,26 +4011,26 @@
}
},
"node_modules/terser": {
"version": "3.7.7",
"resolved": "https://registry.npmjs.org/terser/-/terser-3.7.7.tgz",
"integrity": "sha512-RRLIxE7S52vSOI9cEbOaisgBd2y6MNgfg2ihUkidsFnuP1eDmZ79+lBWbyvgfFTAc/r8nSjL0k3cpZDDIYiYiA==",
"version": "4.8.1",
"resolved": "https://registry.npmjs.org/terser/-/terser-4.8.1.tgz",
"integrity": "sha512-4GnLC0x667eJG0ewJTa6z/yXrbLGv80D9Ru6HIpCQmO+Q4PfEtBFi0ObSckqwL6VyQv/7ENJieXHo2ANmdQwgw==",
"dev": true,
"dependencies": {
"commander": "~2.14.1",
"commander": "^2.20.0",
"source-map": "~0.6.1",
"source-map-support": "~0.5.6"
"source-map-support": "~0.5.12"
},
"bin": {
"terser": "bin/uglifyjs"
"terser": "bin/terser"
},
"engines": {
"node": ">=0.8.0"
"node": ">=6.0.0"
}
},
"node_modules/terser/node_modules/commander": {
"version": "2.14.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.14.1.tgz",
"integrity": "sha512-+YR16o3rK53SmWHU3rEM3tPAh2rwb1yPcQX5irVn7mb0gXbwuCCrnkbV5+PBfETdfg1vui07nM6PCG1zndcjQw==",
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"dev": true
},
"node_modules/terser/node_modules/source-map": {
@ -4060,9 +4043,9 @@
}
},
"node_modules/terser/node_modules/source-map-support": {
"version": "0.5.6",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.6.tgz",
"integrity": "sha512-N4KXEz7jcKqPf2b2vZF11lQIz9W5ZMuUcIOGj243lduidkf2fjkVKJS9vNxVWn3u/uxX38AcE8U9nnH9FPcq+g==",
"version": "0.5.21",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
"dev": true,
"dependencies": {
"buffer-from": "^1.0.0",
@ -5047,24 +5030,6 @@
"picomatch": "^2.0.4"
}
},
"array-filter": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/array-filter/-/array-filter-0.0.1.tgz",
"integrity": "sha1-fajPLiZijtcygDWB/SH2fKzS7uw=",
"dev": true
},
"array-map": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/array-map/-/array-map-0.0.0.tgz",
"integrity": "sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI=",
"dev": true
},
"array-reduce": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/array-reduce/-/array-reduce-0.0.0.tgz",
"integrity": "sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys=",
"dev": true
},
"asn1.js": {
"version": "4.10.1",
"resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz",
@ -6053,16 +6018,15 @@
"dev": true
},
"cached-path-relative": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.0.2.tgz",
"integrity": "sha512-5r2GqsoEb4qMTTN9J+WzXfjov+hjxT+j3u5K+kIVNIwAd99DLCJE9pBIMP1qVeybV6JiijL385Oz0DcYxfbOIg==",
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/cached-path-relative/-/cached-path-relative-1.1.0.tgz",
"integrity": "sha512-WF0LihfemtesFcJgO7xfOoOcnWzY/QHR4qeDqV44jPU3HTI54+LnfXK3SA27AVVGCdZFgjjFFaqUA9Jx7dMJZA==",
"dev": true
},
"call-bind": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
"integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
"dev": true,
"requires": {
"function-bind": "^1.1.1",
"get-intrinsic": "^1.0.2"
@ -6211,9 +6175,9 @@
"dev": true
},
"cookiejar": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.2.tgz",
"integrity": "sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA=="
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz",
"integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw=="
},
"core-js": {
"version": "2.5.7",
@ -6363,9 +6327,9 @@
}
},
"decode-uri-component": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz",
"integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=",
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz",
"integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==",
"dev": true
},
"define-properties": {
@ -6697,8 +6661,7 @@
"function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
"dev": true
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
},
"get-assigned-identifiers": {
"version": "1.2.0",
@ -6710,7 +6673,6 @@
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
"integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
"dev": true,
"requires": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
@ -6778,7 +6740,6 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
"dev": true,
"requires": {
"function-bind": "^1.1.1"
}
@ -6801,8 +6762,7 @@
"has-symbols": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz",
"integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==",
"dev": true
"integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw=="
},
"hash-base": {
"version": "3.0.4",
@ -7158,9 +7118,9 @@
}
},
"jpeg-js": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.0.tgz",
"integrity": "sha512-960VHmtN1vTpasX/1LupLohdP5odwAT7oK/VSm6mW0M58LbrBnowLAPWAZhWGhDAGjzbMnPXZxzB/QYgBwkN0w==",
"version": "0.4.4",
"resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.4.tgz",
"integrity": "sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==",
"dev": true
},
"js-cookie": {
@ -7280,9 +7240,9 @@
"dev": true
},
"marked": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/marked/-/marked-0.7.0.tgz",
"integrity": "sha512-c+yYdCZJQrsRjTPhUx7VKkApw9bwDkNbHUKo1ovgcfDjb2kc8rLuRbIFyXL5WOEUwzSSKo3IXpph2K6DqB/KZg=="
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/marked/-/marked-4.0.10.tgz",
"integrity": "sha512-+QvuFj0nGgO970fySghXGmuw+Fd0gD2x3+MqCWLIPf5oxdv1Ka6b2q+z9RP01P/IaKPMEramy+7cNy/Lw8c3hw=="
},
"md5.js": {
"version": "1.3.4",
@ -7364,9 +7324,9 @@
}
},
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
"dev": true
},
"mkdirp": {
@ -7466,8 +7426,7 @@
"object-inspect": {
"version": "1.10.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.10.3.tgz",
"integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw==",
"dev": true
"integrity": "sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw=="
},
"object-keys": {
"version": "1.1.1",
@ -7607,9 +7566,9 @@
"dev": true
},
"path-parse": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
"integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==",
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
"dev": true
},
"path-platform": {
@ -7705,9 +7664,12 @@
"dev": true
},
"qs": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
"integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==",
"requires": {
"side-channel": "^1.0.4"
}
},
"querystring": {
"version": "0.2.0",
@ -7936,15 +7898,19 @@
}
},
"shell-quote": {
"version": "1.6.1",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.6.1.tgz",
"integrity": "sha1-9HgZSczkAmlxJ0MOo7PFR29IF2c=",
"dev": true,
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz",
"integrity": "sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==",
"dev": true
},
"side-channel": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
"integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
"requires": {
"array-filter": "~0.0.0",
"array-map": "~0.0.0",
"array-reduce": "~0.0.0",
"jsonify": "~0.0.0"
"call-bind": "^1.0.0",
"get-intrinsic": "^1.0.2",
"object-inspect": "^1.9.0"
}
},
"simple-concat": {
@ -8116,14 +8082,6 @@
"dev": true,
"requires": {
"minimist": "^1.1.0"
},
"dependencies": {
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
"dev": true
}
}
},
"superagent": {
@ -8159,20 +8117,20 @@
}
},
"terser": {
"version": "3.7.7",
"resolved": "https://registry.npmjs.org/terser/-/terser-3.7.7.tgz",
"integrity": "sha512-RRLIxE7S52vSOI9cEbOaisgBd2y6MNgfg2ihUkidsFnuP1eDmZ79+lBWbyvgfFTAc/r8nSjL0k3cpZDDIYiYiA==",
"version": "4.8.1",
"resolved": "https://registry.npmjs.org/terser/-/terser-4.8.1.tgz",
"integrity": "sha512-4GnLC0x667eJG0ewJTa6z/yXrbLGv80D9Ru6HIpCQmO+Q4PfEtBFi0ObSckqwL6VyQv/7ENJieXHo2ANmdQwgw==",
"dev": true,
"requires": {
"commander": "~2.14.1",
"commander": "^2.20.0",
"source-map": "~0.6.1",
"source-map-support": "~0.5.6"
"source-map-support": "~0.5.12"
},
"dependencies": {
"commander": {
"version": "2.14.1",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.14.1.tgz",
"integrity": "sha512-+YR16o3rK53SmWHU3rEM3tPAh2rwb1yPcQX5irVn7mb0gXbwuCCrnkbV5+PBfETdfg1vui07nM6PCG1zndcjQw==",
"version": "2.20.3",
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"dev": true
},
"source-map": {
@ -8182,9 +8140,9 @@
"dev": true
},
"source-map-support": {
"version": "0.5.6",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.6.tgz",
"integrity": "sha512-N4KXEz7jcKqPf2b2vZF11lQIz9W5ZMuUcIOGj243lduidkf2fjkVKJS9vNxVWn3u/uxX38AcE8U9nnH9FPcq+g==",
"version": "0.5.21",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
"dev": true,
"requires": {
"buffer-from": "^1.0.0",

View file

@ -3,14 +3,15 @@
"private": true,
"scripts": {
"build": "node build.js",
"watch": "node build.js --watch"
"watch": "node build.js --watch",
"build-container": "docker build -t szurubooru/client:dev ."
},
"dependencies": {
"dompurify": "^2.0.17",
"font-awesome": "^4.7.0",
"ios-inner-height": "^1.0.3",
"js-cookie": "^2.2.0",
"marked": "^0.7.0",
"marked": "^4.0.10",
"mousetrap": "^1.6.2",
"nprogress": "^0.2.0",
"superagent": "^3.8.3"
@ -28,7 +29,7 @@
"jimp": "^0.13.0",
"pretty-error": "^3.0.3",
"stylus": "^0.54.8",
"terser": "^3.7.7",
"terser": "^4.8.1",
"underscore": "^1.12.1",
"watchify": "^4.0.0",
"ws": "^7.4.6"

View file

@ -37,6 +37,7 @@
- [Creating post](#creating-post)
- [Updating post](#updating-post)
- [Getting post](#getting-post)
- [Getting around post](#getting-around-post)
- [Deleting post](#deleting-post)
- [Merging posts](#merging-posts)
- [Rating post](#rating-post)
@ -322,7 +323,7 @@ data.
{
"name": <name>,
"color": <color>,
"order": <order> // optional
"order": <order>
}
```
@ -951,6 +952,29 @@ data.
Retrieves information about an existing post.
## Getting around post
- **Request**
`GET /post/<id>/around`
- **Output**
```json5
{
"prev": <post-resource>,
"next": <post-resource>
}
```
- **Errors**
- the post does not exist
- privileges are too low
- **Description**
Retrieves information about posts that are before or after an existing post.
## Deleting post
- **Request**
@ -2467,7 +2491,7 @@ One file together with its metadata posted to the site.
## Micro post
**Description**
A [post resource](#post) stripped down to `name` and `thumbnailUrl` fields.
A [post resource](#post) stripped down to `id` and `thumbnailUrl` fields.
## Note
**Description**

View file

@ -34,33 +34,79 @@ and Docker Compose (version 1.6.0 or greater) already installed.
Read the comments to guide you. Note that `.env` should be in the root
directory of this repository.
### Running the Application
4. Pull the containers:
Download containers:
```console
user@host:szuru$ docker-compose pull
```
This pulls the latest containers from docker.io:
```console
user@host:szuru$ docker-compose pull
```
For first run, it is recommended to start the database separately:
```console
user@host:szuru$ docker-compose up -d sql
```
If you have modified the application's source and would like to manually
build it, follow the instructions in [**Building**](#Building) instead,
then read here once you're done.
To start all containers:
```console
user@host:szuru$ docker-compose up -d
```
5. Run it!
To view/monitor the application logs:
```console
user@host:szuru$ docker-compose logs -f
# (CTRL+C to exit)
```
For first run, it is recommended to start the database separately:
```console
user@host:szuru$ docker-compose up -d sql
```
To start all containers:
```console
user@host:szuru$ docker-compose up -d
```
To view/monitor the application logs:
```console
user@host:szuru$ docker-compose logs -f
# (CTRL+C to exit)
```
### Building
1. Edit `docker-compose.yml` to tell Docker to build instead of pull containers:
```diff yaml
...
server:
- image: szurubooru/server:latest
+ build: server
...
client:
- image: szurubooru/client:latest
+ build: client
...
```
You can choose to build either one from source.
2. Build the containers:
```console
user@host:szuru$ docker-compose build
```
That will attempt to build both containers, but you can specify `client`
or `server` to make it build only one.
If `docker-compose build` spits out:
```
ERROR: Service 'server' failed to build: failed to parse platform : "" is an invalid component of "": platform specifier component must match "^[A-Za-z0-9_-]+$": invalid argument
```
...you will need to export Docker BuildKit flags:
```console
user@host:szuru$ export DOCKER_BUILDKIT=1; export COMPOSE_DOCKER_CLI_BUILD=1
```
...and run `docker-compose build` again.
*Note: If your changes are not taking effect in your builds, consider building
with `--no-cache`.*
To stop all containers:
```console
user@host:szuru$ docker-compose down
```
### Additional Features

View file

@ -10,6 +10,12 @@ BUILD_INFO=latest
# otherwise the port specified here will be publicly accessible
PORT=8080
# How many waitress threads to start
# 4 is the default amount of threads. If you experience performance
# degradation with a large number of posts, increasing this may
# improve performance, since waitress is most likely clogging up with Tasks.
THREADS=4
# URL base to run szurubooru under
# See "Additional Features" section in INSTALL.md
BASE_URL=/

View file

@ -21,6 +21,7 @@ services:
#POSTGRES_DB: defaults to same as POSTGRES_USER
#POSTGRES_PORT: 5432
#LOG_SQL: 0 (1 for verbose SQL logs)
THREADS:
volumes:
- "${MOUNT_DATA}:/data"
- "./server/config.yaml:/opt/app/config.yaml"

View file

@ -7,8 +7,13 @@ WORKDIR /opt/app
RUN apk --no-cache add \
python3 \
python3-dev \
ffmpeg \
py3-pip \
build-base \
libheif \
libheif-dev \
libavif \
libavif-dev \
ffmpeg \
# from requirements.txt:
py3-yaml \
py3-psycopg2 \
@ -19,25 +24,20 @@ RUN apk --no-cache add \
py3-pynacl \
py3-tz \
py3-pyrfc3339 \
build-base \
&& apk --no-cache add \
libheif \
libavif \
libheif-dev \
libavif-dev \
&& pip3 install --no-cache-dir --disable-pip-version-check \
alembic \
"alembic>=0.8.5" \
"coloredlogs==5.0" \
"pyheif==0.6.1" \
"heif-image-plugin>=0.3.2" \
youtube_dl \
pillow-avif-plugin \
pyheif-pillow-opener \
"pillow-avif-plugin>=1.1.0" \
&& apk --no-cache del py3-pip
COPY ./ /opt/app/
RUN rm -rf /opt/app/szurubooru/tests
FROM prereqs as testing
FROM --platform=$BUILDPLATFORM prereqs as testing
WORKDIR /opt/app
RUN apk --no-cache add \
@ -83,6 +83,9 @@ ARG PORT=6666
ENV PORT=${PORT}
EXPOSE ${PORT}
ARG THREADS=4
ENV THREADS=${THREADS}
VOLUME ["/data/"]
ARG DOCKER_REPO

View file

@ -115,6 +115,7 @@ privileges:
'posts:favorite': regular
'posts:bulk-edit:tags': power
'posts:bulk-edit:safety': power
'posts:bulk-edit:delete': power
'tags:create': regular
'tags:edit:names': power

View file

@ -4,5 +4,5 @@ cd /opt/app
alembic upgrade head
echo "Starting szurubooru API on port ${PORT}"
exec waitress-serve-3 --port ${PORT} szurubooru.facade:app
echo "Starting szurubooru API on port ${PORT} - Running on ${THREADS} threads"
exec waitress-serve-3 --port ${PORT} --threads ${THREADS} szurubooru.facade:app

View file

@ -1,7 +0,0 @@
#!/bin/sh
docker build \
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') \
--build-arg SOURCE_COMMIT \
--build-arg DOCKER_REPO \
-f $DOCKERFILE_PATH -t $IMAGE_NAME .

View file

@ -1,19 +0,0 @@
#!/bin/sh
add_tag() {
echo "Also tagging image as ${DOCKER_REPO}:${1}"
docker tag $IMAGE_NAME $DOCKER_REPO:$1
docker push $DOCKER_REPO:$1
}
CLOSEST_VER=$(git describe --tags --abbrev=0)
CLOSEST_MAJOR_VER=$(echo ${CLOSEST_VER} | cut -d'.' -f1)
CLOSEST_MINOR_VER=$(echo ${CLOSEST_VER} | cut -d'.' -f2)
add_tag "${CLOSEST_MAJOR_VER}-edge"
add_tag "${CLOSEST_MAJOR_VER}.${CLOSEST_MINOR_VER}-edge"
if git describe --exact-match --abbrev=0 2> /dev/null; then
add_tag "${CLOSEST_MAJOR_VER}"
add_tag "${CLOSEST_MAJOR_VER}.${CLOSEST_MINOR_VER}"
fi

View file

@ -1,8 +0,0 @@
#!/bin/sh
set -e
docker run --rm \
-t $(docker build --target testing -q .) \
--color=no szurubooru/
exit $?

View file

@ -1,14 +1,15 @@
alembic>=0.8.5
pyyaml>=3.11
psycopg2-binary>=2.6.1
SQLAlchemy>=1.0.12, <1.4
coloredlogs==5.0
certifi>=2017.11.5
coloredlogs==5.0
heif-image-plugin==0.3.2
numpy>=1.8.2
pillow>=4.3.0
pynacl>=1.2.1
pytz>=2018.3
pyRFC3339>=1.0
pillow-avif-plugin>=1.1.0
pyheif-pillow-opener>=0.1.0
pillow>=4.3.0
psycopg2-binary>=2.6.1
pyheif==0.6.1
pynacl>=1.2.1
pyRFC3339>=1.0
pytz>=2018.3
pyyaml>=3.11
SQLAlchemy>=1.0.12, <1.4
youtube_dl

View file

@ -91,6 +91,15 @@ def reset_filenames() -> None:
rename_in_dir("posts/custom-thumbnails/")
def regenerate_thumbnails() -> None:
for post in db.session.query(model.Post).all():
print("Generating tumbnail for post %d ..." % post.post_id, end="\r")
try:
postfuncs.generate_post_thumbnail(post)
except Exception:
pass
def main() -> None:
parser_top = ArgumentParser(
description="Collection of CLI commands for an administrator to use",
@ -114,6 +123,12 @@ def main() -> None:
help="reset and rename the content and thumbnail "
"filenames in case of a lost/changed secret key",
)
parser.add_argument(
"--regenerate-thumbnails",
action="store_true",
help="regenerate the thumbnails for posts if the "
"thumbnail files are missing",
)
command = parser_top.parse_args()
try:
@ -123,6 +138,8 @@ def main() -> None:
check_audio()
elif command.reset_filenames:
reset_filenames()
elif command.regenerate_thumbnails:
regenerate_thumbnails()
except errors.BaseError as e:
print(e, file=stderr)

View file

@ -33,7 +33,7 @@ def _docker_config() -> Dict:
"show_sql": int(os.getenv("LOG_SQL", 0)),
"data_url": os.getenv("DATA_URL", "data/"),
"data_dir": "/data/",
"database": "postgres://%(user)s:%(pass)s@%(host)s:%(port)d/%(db)s"
"database": "postgresql://%(user)s:%(pass)s@%(host)s:%(port)d/%(db)s"
% {
"user": os.getenv("POSTGRES_USER"),
"pass": os.getenv("POSTGRES_PASSWORD"),

View file

@ -135,7 +135,7 @@ _live_migrations = (
def create_app() -> Callable[[Any, Any], Any]:
""" Create a WSGI compatible App object. """
"""Create a WSGI compatible App object."""
validate_config()
coloredlogs.install(fmt="[%(asctime)-15s] %(name)s %(message)s")
if config.config["debug"]:

View file

@ -25,7 +25,7 @@ RANK_MAP = OrderedDict(
def get_password_hash(salt: str, password: str) -> Tuple[str, int]:
""" Retrieve argon2id password hash. """
"""Retrieve argon2id password hash."""
return (
pwhash.argon2id.str(
(config.config["secret"] + salt + password).encode("utf8")
@ -37,7 +37,7 @@ def get_password_hash(salt: str, password: str) -> Tuple[str, int]:
def get_sha256_legacy_password_hash(
salt: str, password: str
) -> Tuple[str, int]:
""" Retrieve old-style sha256 password hash. """
"""Retrieve old-style sha256 password hash."""
digest = hashlib.sha256()
digest.update(config.config["secret"].encode("utf8"))
digest.update(salt.encode("utf8"))
@ -46,7 +46,7 @@ def get_sha256_legacy_password_hash(
def get_sha1_legacy_password_hash(salt: str, password: str) -> Tuple[str, int]:
""" Retrieve old-style sha1 password hash. """
"""Retrieve old-style sha1 password hash."""
digest = hashlib.sha1()
digest.update(b"1A2/$_4xVa")
digest.update(salt.encode("utf8"))
@ -125,7 +125,7 @@ def verify_privilege(user: model.User, privilege_name: str) -> None:
def generate_authentication_token(user: model.User) -> str:
""" Generate nonguessable challenge (e.g. links in password reminder). """
"""Generate nonguessable challenge (e.g. links in password reminder)."""
assert user
digest = hashlib.md5()
digest.update(config.config["secret"].encode("utf8"))

View file

@ -4,16 +4,13 @@ from datetime import datetime
from io import BytesIO
from typing import Any, Callable, List, Optional, Set, Tuple
import HeifImagePlugin
import numpy as np
import pillow_avif
import pyheif
from PIL import Image
from pyheif_pillow_opener import register_heif_opener
from szurubooru import config, errors
register_heif_opener()
logger = logging.getLogger(__name__)
# Math based on paper from H. Chi Wong, Marshall Bern and David Goldberg

View file

@ -7,6 +7,8 @@ import subprocess
from io import BytesIO
from typing import List
import HeifImagePlugin
import pillow_avif
from PIL import Image as PILImage
from szurubooru import errors
@ -277,10 +279,10 @@ class Image:
proc = subprocess.Popen(
cli,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stdin=subprocess.DEVNULL,
stderr=subprocess.PIPE,
)
out, err = proc.communicate(input=self.content)
out, err = proc.communicate()
if proc.returncode != 0:
logger.warning(
"Failed to execute ffmpeg command (cli=%r, err=%r)",

View file

@ -36,9 +36,12 @@ def get_mime_type(content: bytes) -> str:
if content[0:4] == b"\x1A\x45\xDF\xA3":
return "video/webm"
if content[4:12] in (b"ftypisom", b"ftypiso5", b"ftypmp42", b"ftypM4V "):
if content[4:12] in (b"ftypisom", b"ftypiso5", b"ftypiso6", b"ftypmp42", b"ftypM4V "):
return "video/mp4"
if content[4:12] == b"ftypqt ":
return "video/quicktime"
return "application/octet-stream"
@ -54,6 +57,7 @@ def get_extension(mime_type: str) -> Optional[str]:
"image/heif": "heif",
"image/heic": "heic",
"video/mp4": "mp4",
"video/quicktime": "mov",
"video/webm": "webm",
"application/octet-stream": "dat",
}
@ -65,7 +69,12 @@ def is_flash(mime_type: str) -> bool:
def is_video(mime_type: str) -> bool:
return mime_type.lower() in ("application/ogg", "video/mp4", "video/webm")
return mime_type.lower() in (
"application/ogg",
"video/mp4",
"video/quicktime",
"video/webm",
)
def is_image(mime_type: str) -> bool:

View file

@ -39,13 +39,20 @@ def download(url: str, use_video_downloader: bool = False) -> bytes:
length_tally = 0
try:
with urllib.request.urlopen(request) as handle:
while (chunk := handle.read(_dl_chunk_size)) :
while chunk := handle.read(_dl_chunk_size):
length_tally += len(chunk)
if length_tally > config.config["max_dl_filesize"]:
raise DownloadTooLargeError(url)
raise DownloadTooLargeError(
"Download target exceeds maximum. (%d)"
% (config.config["max_dl_filesize"]),
extra_fields={"URL": url},
)
content_buffer += chunk
except urllib.error.HTTPError as ex:
raise DownloadError(url) from ex
raise DownloadError(
"Download target returned HTTP %d. (%s)" % (ex.code, ex.reason),
extra_fields={"URL": url},
) from ex
if (
youtube_dl_error
@ -69,7 +76,8 @@ def _get_youtube_dl_content_url(url: str) -> str:
)
except subprocess.CalledProcessError:
raise errors.ThirdPartyError(
"Could not extract content location from %s" % (url)
"Could not extract content location from URL.",
extra_fields={"URL": url},
) from None

View file

@ -83,12 +83,12 @@ def flip(source: Dict[Any, Any]) -> Dict[Any, Any]:
def is_valid_email(email: Optional[str]) -> bool:
""" Return whether given email address is valid or empty. """
"""Return whether given email address is valid or empty."""
return not email or re.match(r"^[^@]*@[^@]*\.[^@]*$", email) is not None
class dotdict(dict):
""" dot.notation access to dictionary attributes. """
"""dot.notation access to dictionary attributes."""
def __getattr__(self, attr: str) -> Any:
return self.get(attr)
@ -98,7 +98,7 @@ class dotdict(dict):
def parse_time_range(value: str) -> Tuple[datetime, datetime]:
""" Return tuple containing min/max time for given text representation. """
"""Return tuple containing min/max time for given text representation."""
one_day = timedelta(days=1)
one_second = timedelta(seconds=1)
almost_one_day = one_day - one_second

View file

@ -7,7 +7,7 @@ from szurubooru.rest.errors import HttpBadRequest
def _authenticate_basic_auth(username: str, password: str) -> model.User:
""" Try to authenticate user. Throw AuthError for invalid users. """
"""Try to authenticate user. Throw AuthError for invalid users."""
user = users.get_user_by_name(username)
if not auth.is_valid_password(user, password):
raise errors.AuthError("Invalid password.")
@ -17,7 +17,7 @@ def _authenticate_basic_auth(username: str, password: str) -> model.User:
def _authenticate_token(
username: str, token: str
) -> Tuple[model.User, model.UserToken]:
""" Try to authenticate user. Throw AuthError for invalid users. """
"""Try to authenticate user. Throw AuthError for invalid users."""
user = users.get_user_by_name(username)
user_token = user_tokens.get_by_user_and_token(user, token)
if not auth.is_valid_token(user_token):
@ -72,7 +72,7 @@ def _get_user(ctx: rest.Context, bump_login: bool) -> Optional[model.User]:
def process_request(ctx: rest.Context) -> None:
""" Bind the user to request. Update last login time if needed. """
"""Bind the user to request. Update last login time if needed."""
bump_login = ctx.get_param_as_bool("bump-login", default=False)
auth_user = _get_user(ctx, bump_login)
if auth_user:

View file

@ -11,7 +11,7 @@ from szurubooru.rest import context, errors, middleware, routes
def _json_serializer(obj: Any) -> str:
""" JSON serializer for objects not serializable by default JSON code """
"""JSON serializer for objects not serializable by default JSON code"""
if isinstance(obj, datetime):
serial = obj.isoformat("T") + "Z"
return serial

View file

@ -122,6 +122,34 @@ def _pool_filter(
)(query, criterion, negated)
def _category_filter(
query: SaQuery, criterion: Optional[criteria.BaseCriterion], negated: bool
) -> SaQuery:
assert criterion
# Step 1. find the id for the category
q1 = db.session.query(model.TagCategory.tag_category_id).filter(
model.TagCategory.name == criterion.value
)
# Step 2. find the tags with that category
q2 = db.session.query(model.Tag.tag_id).filter(
model.Tag.category_id.in_(q1)
)
# Step 3. find all posts that have at least one of those tags
q3 = db.session.query(model.PostTag.post_id).filter(
model.PostTag.tag_id.in_(q2)
)
# Step 4. profit
expr = model.Post.post_id.in_(q3)
if negated:
expr = ~expr
return query.filter(expr)
class PostSearchConfig(BaseSearchConfig):
def __init__(self) -> None:
self.user = None # type: Optional[model.User]
@ -349,6 +377,7 @@ class PostSearchConfig(BaseSearchConfig):
),
),
(["pool"], _pool_filter),
(["category"], _category_filter),
]
)

View file

@ -145,8 +145,9 @@ def test_trying_to_update_without_privileges(
)
@pytest.mark.parametrize("type", ["suggestions", "implications"])
def test_trying_to_create_tags_without_privileges(
config_injector, context_factory, tag_factory, user_factory
config_injector, context_factory, tag_factory, user_factory, type
):
tag = tag_factory(names=["tag"])
db.session.add(tag)
@ -165,16 +166,7 @@ def test_trying_to_create_tags_without_privileges(
with pytest.raises(errors.AuthError):
api.tag_api.update_tag(
context_factory(
params={"suggestions": ["tag1", "tag2"], "version": 1},
user=user_factory(rank=model.User.RANK_REGULAR),
),
{"tag_name": "tag"},
)
db.session.rollback()
with pytest.raises(errors.AuthError):
api.tag_api.update_tag(
context_factory(
params={"implications": ["tag1", "tag2"], "version": 1},
params={type: ["tag1", "tag2"], "version": 1},
user=user_factory(rank=model.User.RANK_REGULAR),
),
{"tag_name": "tag"},

Binary file not shown.

View file

@ -43,14 +43,26 @@ def query_logger(pytestconfig):
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
@pytest.yield_fixture(scope="function", autouse=True)
def session(query_logger, postgresql_db):
@pytest.fixture(scope="function", autouse=True)
def session(query_logger, transacted_postgresql_db):
db.session = transacted_postgresql_db.session
transacted_postgresql_db.create_table(*model.Base.metadata.sorted_tables)
try:
yield transacted_postgresql_db.session
finally:
transacted_postgresql_db.reset_db()
@pytest.fixture(scope="function")
def nontransacted_session(query_logger, postgresql_db):
old_db_session = db.session
db.session = postgresql_db.session
postgresql_db.create_table(*model.Base.metadata.sorted_tables)
try:
yield postgresql_db.session
finally:
postgresql_db.reset_db()
db.session = old_db_session
@pytest.fixture

View file

@ -7,6 +7,7 @@ from szurubooru.func import mime
"input_path,expected_mime_type",
[
("mp4.mp4", "video/mp4"),
("mov.mov", "video/quicktime"),
("webm.webm", "video/webm"),
("flash.swf", "application/x-shockwave-flash"),
("png.png", "image/png"),
@ -35,6 +36,7 @@ def test_get_mime_type_for_empty_file():
[
("video/mp4", "mp4"),
("video/webm", "webm"),
("video/quicktime", "mov"),
("application/x-shockwave-flash", "swf"),
("image/png", "png"),
("image/jpeg", "jpg"),
@ -70,6 +72,8 @@ def test_is_flash(input_mime_type, expected_state):
("VIDEO/WEBM", True),
("video/mp4", True),
("VIDEO/MP4", True),
("video/quicktime", True),
("VIDEO/QUICKTIME", True),
("video/anything_else", False),
("application/ogg", True),
("not a video", False),

View file

@ -1,3 +1,5 @@
import os
import pytest
from szurubooru import errors
@ -16,6 +18,9 @@ def inject_config(tmpdir, config_injector):
)
@pytest.mark.skipif(
"TEST_NET" not in os.environ, reason="Network tests skipped by default."
)
def test_download():
url = "http://info.cern.ch/hypertext/WWW/TheProject.html"
@ -62,6 +67,9 @@ def test_download():
assert actual_content == expected_content
@pytest.mark.skipif(
"TEST_NET" not in os.environ, reason="Network tests skipped by default."
)
@pytest.mark.parametrize(
"url",
[
@ -74,6 +82,9 @@ def test_too_large_download(url):
net.download(url, use_video_downloader=True)
@pytest.mark.skipif(
"TEST_NET" not in os.environ, reason="Network tests skipped by default."
)
@pytest.mark.parametrize(
"url,expected_sha1",
[
@ -96,6 +107,9 @@ def test_content_download(url, expected_sha1):
assert get_sha1(actual_content) == expected_sha1
@pytest.mark.skipif(
"TEST_NET" not in os.environ, reason="Network tests skipped by default."
)
def test_bad_content_downlaod():
url = "http://info.cern.ch/hypertext/WWW/TheProject.html"
with pytest.raises(errors.ThirdPartyError):
@ -108,11 +122,13 @@ def test_no_webhooks(config_injector):
assert len(res) == 0
@pytest.mark.skipif(
"TEST_NET" not in os.environ, reason="Network tests skipped by default."
)
@pytest.mark.parametrize(
"webhook,status_code",
[
("https://postman-echo.com/post", 200),
("http://localhost/", 400),
("https://postman-echo.com/get", 400),
],
)
@ -121,6 +137,9 @@ def test_single_webhook(config_injector, webhook, status_code):
assert ret == status_code
@pytest.mark.skipif(
"TEST_NET" not in os.environ, reason="Network tests skipped by default."
)
def test_multiple_webhooks(config_injector):
config_injector(
{

View file

@ -1,7 +1,7 @@
from datetime import datetime
from unittest.mock import patch
import pytest
import pytest # noqa: F401
from szurubooru import db, model
from szurubooru.func import snapshots, users
@ -144,46 +144,6 @@ def test_create(tag_factory, user_factory):
assert results[0].data == "mocked"
def test_modify_saves_non_empty_diffs(post_factory, user_factory):
if "sqlite" in db.session.get_bind().driver:
pytest.xfail(
"SQLite doesn't support transaction isolation, "
"which is required to retrieve original entity"
)
post = post_factory()
post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text="old")]
user = user_factory()
db.session.add_all([post, user])
db.session.commit()
post.source = "new source"
post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text="new")]
db.session.flush()
with patch("szurubooru.func.snapshots._post_to_webhooks"):
snapshots.modify(post, user)
db.session.flush()
results = db.session.query(model.Snapshot).all()
assert len(results) == 1
assert results[0].data == {
"type": "object change",
"value": {
"source": {
"type": "primitive change",
"old-value": None,
"new-value": "new source",
},
"notes": {
"type": "list change",
"removed": [
{"polygon": [[0, 0], [0, 1], [1, 1]], "text": "old"}
],
"added": [
{"polygon": [[0, 0], [0, 1], [1, 1]], "text": "new"}
],
},
},
}
def test_modify_doesnt_save_empty_diffs(tag_factory, user_factory):
tag = tag_factory(names=["dummy"])
user = user_factory()

View file

@ -0,0 +1,59 @@
from unittest.mock import patch
import pytest
from szurubooru import db, model
from szurubooru.func import snapshots
@pytest.fixture(autouse=True)
def session(query_logger, postgresql_db):
"""
Override db session for this specific test section only
"""
db.session = postgresql_db.session
postgresql_db.create_table(*model.Base.metadata.sorted_tables)
try:
yield postgresql_db.session
finally:
postgresql_db.reset_db()
def test_modify_saves_non_empty_diffs(post_factory, user_factory):
if "sqlite" in db.session.get_bind().driver:
pytest.xfail(
"SQLite doesn't support transaction isolation, "
"which is required to retrieve original entity"
)
post = post_factory()
post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text="old")]
user = user_factory()
db.session.add_all([post, user])
db.session.commit()
post.source = "new source"
post.notes = [model.PostNote(polygon=[(0, 0), (0, 1), (1, 1)], text="new")]
db.session.flush()
with patch("szurubooru.func.snapshots._post_to_webhooks"):
snapshots.modify(post, user)
db.session.flush()
results = db.session.query(model.Snapshot).all()
assert len(results) == 1
assert results[0].data == {
"type": "object change",
"value": {
"source": {
"type": "primitive change",
"old-value": None,
"new-value": "new source",
},
"notes": {
"type": "list change",
"removed": [
{"polygon": [[0, 0], [0, 1], [1, 1]], "text": "old"}
],
"added": [
{"polygon": [[0, 0], [0, 1], [1, 1]], "text": "new"}
],
},
},
}

View file

@ -107,17 +107,16 @@ def test_update_category_name_reusing_other_name(
tag_categories.update_category_name(category, "NAME")
@pytest.mark.parametrize("name", ["name", "NAME"])
def test_update_category_name_reusing_own_name(
config_injector, tag_category_factory
config_injector, tag_category_factory, name
):
config_injector({"tag_category_name_regex": ".*"})
for name in ["name", "NAME"]:
category = tag_category_factory(name="name")
db.session.add(category)
db.session.flush()
tag_categories.update_category_name(category, name)
assert category.name == name
db.session.rollback()
category = tag_category_factory(name="name")
db.session.add(category)
db.session.flush()
tag_categories.update_category_name(category, name)
assert category.name == name
def test_update_category_color_with_empty_string(tag_category_factory):

View file

@ -513,15 +513,14 @@ def test_update_tag_names_trying_to_use_taken_name(
tags.update_tag_names(tag, ["A"])
def test_update_tag_names_reusing_own_name(config_injector, tag_factory):
@pytest.mark.parametrize("name", list("aA"))
def test_update_tag_names_reusing_own_name(config_injector, tag_factory, name):
config_injector({"tag_name_regex": "^[a-zA-Z]*$"})
for name in list("aA"):
tag = tag_factory(names=["a"])
db.session.add(tag)
db.session.flush()
tags.update_tag_names(tag, [name])
assert [tag_name.name for tag_name in tag.names] == [name]
db.session.rollback()
tag = tag_factory(names=["a"])
db.session.add(tag)
db.session.flush()
tags.update_tag_names(tag, [name])
assert [tag_name.name for tag_name in tag.names] == [name]
def test_update_tag_names_changing_primary_name(config_injector, tag_factory):
@ -533,7 +532,6 @@ def test_update_tag_names_changing_primary_name(config_injector, tag_factory):
db.session.flush()
db.session.refresh(tag)
assert [tag_name.name for tag_name in tag.names] == ["b", "a"]
db.session.rollback()
@pytest.mark.parametrize("attempt", ["name", "NAME", "alias", "ALIAS"])

View file

@ -136,8 +136,6 @@ def test_escaping(
)
db.session.flush()
if db_driver and db.session.get_bind().driver != db_driver:
pytest.xfail()
if expected_pool_names is None:
with pytest.raises(errors.SearchError):
executor.execute(input, offset=0, limit=100)

View file

@ -863,3 +863,55 @@ def test_tumbleweed(
db.session.flush()
verify_unpaged("special:tumbleweed", [4])
verify_unpaged("-special:tumbleweed", [1, 2, 3])
@pytest.mark.parametrize(
"input,expected_post_ids",
[
("category:cat1", [1, 2, 3]),
("category:cat2", [3, 4]),
],
)
def test_search_by_tag_category(
verify_unpaged,
post_factory,
tag_factory,
tag_category_factory,
input,
expected_post_ids,
):
cat1 = tag_category_factory(name="cat1")
cat2 = tag_category_factory(name="cat2")
tag1 = tag_factory(names=["t1"], category=cat1)
tag2 = tag_factory(names=["t2"], category=cat1)
tag3 = tag_factory(names=["t3"], category=cat2)
post1 = post_factory(id=1)
post1.tags.append(tag1)
post2 = post_factory(id=2)
post2.tags.append(tag2)
post3 = post_factory(id=3)
post3.tags.append(tag1)
post3.tags.append(tag3)
post4 = post_factory(id=4)
post4.tags.append(tag3)
post5 = post_factory(id=5)
db.session.add_all(
[
tag1,
tag2,
tag3,
post1,
post2,
post3,
post4,
post5,
]
)
db.session.flush()
verify_unpaged(input, expected_post_ids)

View file

@ -134,8 +134,6 @@ def test_escaping(executor, tag_factory, input, expected_tag_names, db_driver):
)
db.session.flush()
if db_driver and db.session.get_bind().driver != db_driver:
pytest.xfail()
if expected_tag_names is None:
with pytest.raises(errors.SearchError):
executor.execute(input, offset=0, limit=100)