Merge branch 'develop' into 'main'

release: v20240504

Co-authored-by: 老周部落 <laozhoubuluo@gmail.com>
Co-authored-by: Lhcfl <Lhcfl@outlook.com>
Co-authored-by: Linca <lhcfllinca@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Gary O'Regan Kelly <gmoregan@icloud.com>

See merge request firefish/firefish!10790
This commit is contained in:
naskya 2024-05-04 07:05:40 +00:00
commit ac70abf6a1
36 changed files with 368 additions and 154 deletions

View file

@ -39,6 +39,9 @@ COPY packages/backend-rs packages/backend-rs/
# Compile backend-rs
RUN NODE_ENV='production' pnpm run --filter backend-rs build
# Copy/Overwrite index.js to mitigate the bug in napi-rs codegen
COPY packages/backend-rs/index.js packages/backend-rs/built/index.js
# Copy in the rest of the files to compile
COPY . ./
RUN NODE_ENV='production' pnpm run --filter firefish-js build

View file

@ -3,7 +3,7 @@ export
.PHONY: pre-commit
pre-commit: format entities napi-index
pre-commit: format entities napi
.PHONY: format
format:
@ -11,11 +11,12 @@ format:
.PHONY: entities
entities:
pnpm --filter=backend run build:debug
pnpm run migrate
$(MAKE) -C ./packages/backend-rs regenerate-entities
.PHONY: napi-index
napi-index:
.PHONY: napi
napi:
$(MAKE) -C ./packages/backend-rs update-index

View file

@ -7,6 +7,8 @@
- Node.js
- pnpm
- Rust toolchain
- Python 3
- Perl
- FFmpeg
- Container runtime
- [Docker](https://docs.docker.com/get-docker/)
@ -31,7 +33,7 @@ You can refer to [local-installation.md](./local-installation.md) to install the
1. Copy example config file
```sh
cp dev/config.example.env dev/config.env
# If you use container runtime other than Docker, you need to modify the "COMPOSE" variable
# If you use container runtime other than Podman, you need to modify the "COMPOSE" variable
# vim dev/config.env
```
1. Create `.config/default.yml` with the following content
@ -51,12 +53,7 @@ You can refer to [local-installation.md](./local-installation.md) to install the
host: localhost
port: 26379
logLevel: [
'error',
'success',
'warning',
'info'
]
maxlogLevel: 'debug' # or 'trace'
```
1. Start database containers
```sh
@ -84,6 +81,19 @@ You can refer to [local-installation.md](./local-installation.md) to install the
DONE * [core boot] Now listening on port 3000 on http://localhost:3000
```
## Update auto-generated files in `package/backend-rs`
You need to install `sea-orm-cli` to regenerate database entities.
```sh
cargo install sea-orm-cli
```
```sh
make entities
make napi
```
## Reset the environment
You can recreate a fresh local Firefish environment by recreating the database containers:

View file

@ -141,12 +141,7 @@ sudo apt install ffmpeg
host: localhost
port: 6379
logLevel: [
'error',
'success',
'warning',
'info'
]
maxLogLevel: 'debug' # or 'trace'
```
## 4. Build and start Firefish

View file

@ -2,6 +2,10 @@
Breaking changes are indicated by the :warning: icon.
## v20240504
- :warning: Removed `release` endpoint.
## v20240424
- Added `antennaLimit` field to the response of `meta` and `admin/meta`, and the request of `admin/update-meta` (optional).

View file

@ -5,6 +5,10 @@ Critical security updates are indicated by the :warning: icon.
- Server administrators should check [notice-for-admins.md](./notice-for-admins.md) as well.
- Third-party client/bot developers may want to check [api-change.md](./api-change.md) as well.
## [v20240504](https://firefish.dev/firefish/firefish/-/merge_requests/10790/commits)
- Fix bugs
## :warning: [v20240430](https://firefish.dev/firefish/firefish/-/merge_requests/10781/commits)
- Add ability to group similar notifications

View file

@ -1,6 +1,7 @@
BEGIN;
DELETE FROM "migrations" WHERE name IN (
'DropUnusedIndexes1714643926317',
'AlterAkaType1714099399879',
'AddDriveFileUsage1713451569342',
'ConvertCwVarcharToText1713225866247',
@ -25,6 +26,22 @@ DELETE FROM "migrations" WHERE name IN (
'RemoveNativeUtilsMigration1705877093218'
);
-- drop-unused-indexes
CREATE INDEX "IDX_01f4581f114e0ebd2bbb876f0b" ON "note_reaction" ("createdAt");
CREATE INDEX "IDX_0610ebcfcfb4a18441a9bcdab2" ON "poll" ("userId");
CREATE INDEX "IDX_25dfc71b0369b003a4cd434d0b" ON "note" ("attachedFileTypes");
CREATE INDEX "IDX_2710a55f826ee236ea1a62698f" ON "hashtag" ("mentionedUsersCount");
CREATE INDEX "IDX_4c02d38a976c3ae132228c6fce" ON "hashtag" ("mentionedRemoteUsersCount");
CREATE INDEX "IDX_51c063b6a133a9cb87145450f5" ON "note" ("fileIds");
CREATE INDEX "IDX_54ebcb6d27222913b908d56fd8" ON "note" ("mentions");
CREATE INDEX "IDX_7fa20a12319c7f6dc3aed98c0a" ON "poll" ("userHost");
CREATE INDEX "IDX_88937d94d7443d9a99a76fa5c0" ON "note" ("tags");
CREATE INDEX "IDX_b11a5e627c41d4dc3170f1d370" ON "notification" ("createdAt");
CREATE INDEX "IDX_c8dfad3b72196dd1d6b5db168a" ON "drive_file" ("createdAt");
CREATE INDEX "IDX_d57f9030cd3af7f63ffb1c267c" ON "hashtag" ("attachedUsersCount");
CREATE INDEX "IDX_e5848eac4940934e23dbc17581" ON "drive_file" ("uri");
CREATE INDEX "IDX_fa99d777623947a5b05f394cae" ON "user" ("tags");
-- alter-aka-type
ALTER TABLE "user" RENAME COLUMN "alsoKnownAs" TO "alsoKnownAsOld";
ALTER TABLE "user" ADD COLUMN "alsoKnownAs" text;

View file

@ -24,6 +24,7 @@ Firefish depends on the following software.
- `build-essential` on Debian/Ubuntu Linux
- `base-devel` on Arch Linux
- [Python 3](https://www.python.org/)
- [Perl](https://www.perl.org/)
This document shows an example procedure for installing these dependencies and Firefish on Debian 12. Note that there is much room for customizing the server setup; this document merely demonstrates a simple installation.
@ -269,7 +270,7 @@ In this instruction, we use [Caddy](https://caddyserver.com/) to make the Firefi
WorkingDirectory=/home/firefish/firefish
Environment="NODE_ENV=production"
Environment="npm_config_cache=/tmp"
Environment="NODE_OPTIONS=--max-old-space-size=3072"
Environment="NODE_OPTIONS=--max-old-space-size=3072"
# uncomment the following line if you use jemalloc (note that the path varies on different environments)
# Environment="LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2"
StandardOutput=journal

View file

@ -10,24 +10,22 @@ You can control the verbosity of the server log by adding `maxLogLevel` in `.con
### For systemd/pm2 users
Not only Firefish but also Node.js has recently fixed a few security issues:
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
So, it is highly recommended that you upgrade your Node.js version as well. The new versions are
- Node v18.20.2 (v18.x LTS)
- Node v20.12.2 (v20.x LTS)
- Node v21.7.3 (v21.x)
You can check your Node.js version by this command:
```sh
node --version
```
[Node v22](https://nodejs.org/en/blog/announcements/v22-release-announce) was also released several days ago, but we have not yet tested Firefish with this version.
- You need to install Perl to build Firefish. Since Git depends on Perl in many packaging systems, you probably already have Perl installed on your system. You can check the Perl version by this command:
```sh
perl --version
```
- Not only Firefish but also Node.js has recently fixed a few security issues:
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases
- https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
So, it is highly recommended that you upgrade your Node.js version as well. The new versions are
- Node v18.20.2 (v18.x LTS)
- Node v20.12.2 (v20.x LTS)
- Node v21.7.3 (v21.x)
- You can check your Node.js version by this command:
```sh
node --version
```
[Node v22](https://nodejs.org/en/blog/announcements/v22-release-announce) was also released several days ago, but we have not yet tested Firefish with this version.
## v20240413

View file

@ -928,7 +928,7 @@ colored: "Coloré"
label: "Étiquette"
localOnly: "Local seulement"
account: "Comptes"
getQrCode: "Obtenir le code QR"
getQrCode: "Afficher le code QR"
_emailUnavailable:
used: "Adresse non disponible"
@ -1836,6 +1836,7 @@ _notification:
reacted: a réagit à votre publication
renoted: a boosté votre publication
voted: a voté pour votre sondage
andCountUsers: et {count} utilisateur(s) de plus {acted}
_deck:
alwaysShowMainColumn: "Toujours afficher la colonne principale"
columnAlign: "Aligner les colonnes"
@ -2321,3 +2322,13 @@ markLocalFilesNsfwByDefaultDescription: Indépendamment de ce réglage, les util
ne sont pas affectés.
noteEditHistory: Historique des publications
media: Multimédia
antennaLimit: Le nombre maximal d'antennes que chaque utilisateur peut créer
showAddFileDescriptionAtFirstPost: Ouvrez automatiquement un formulaire pour écrire
une description lorsque vous tentez de publier des fichiers sans description
foldNotification: Grouper les notifications similaires
cannotEditVisibility: Vous ne pouvez pas modifier la visibilité
useThisAccountConfirm: Voulez-vous continuer avec ce compte?
inputAccountId: Veuillez saisir votre compte (par exemple, @firefish@info.firefish.dev)
remoteFollow: Abonnement à distance
copyRemoteFollowUrl: Copier l'URL d'abonnement à distance
slashQuote: Citation enchaînée

View file

@ -1,6 +1,6 @@
{
"name": "firefish",
"version": "20240430",
"version": "20240504",
"repository": {
"type": "git",
"url": "https://firefish.dev/firefish/firefish.git"

View file

@ -261,6 +261,7 @@ export interface NoteLikeForGetNoteSummary {
hasPoll: boolean
}
export function getNoteSummary(note: NoteLikeForGetNoteSummary): string
export function latestVersion(): Promise<string>
export function toMastodonId(firefishId: string): string | null
export function fromMastodonId(mastodonId: string): string | null
export function fetchMeta(useCache: boolean): Promise<Meta>

View file

@ -310,7 +310,7 @@ if (!nativeBinding) {
throw new Error(`Failed to load native binding`)
}
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
const { SECOND, MINUTE, HOUR, DAY, USER_ONLINE_THRESHOLD, USER_ACTIVE_THRESHOLD, FILE_TYPE_BROWSERSAFE, loadEnv, loadConfig, stringToAcct, acctToString, addNoteToAntenna, isBlockedServer, isSilencedServer, isAllowedServer, checkWordMute, getFullApAccount, isSelfHost, isSameOrigin, extractHost, toPuny, isUnicodeEmoji, sqlLikeEscape, safeForSql, formatMilliseconds, getImageSizeFromUrl, getNoteSummary, latestVersion, toMastodonId, fromMastodonId, fetchMeta, metaToPugArgs, nyaify, hashPassword, verifyPassword, isOldPasswordAlgorithm, decodeReaction, countReactions, toDbReaction, removeOldAttestationChallenges, AntennaSrcEnum, DriveFileUsageHintEnum, MutedNoteReasonEnum, NoteVisibilityEnum, NotificationTypeEnum, PageVisibilityEnum, PollNotevisibilityEnum, RelayStatusEnum, UserEmojimodpermEnum, UserProfileFfvisibilityEnum, UserProfileMutingnotificationtypesEnum, initializeRustLogger, watchNote, unwatchNote, publishToChannelStream, ChatEvent, publishToChatStream, ChatIndexEvent, publishToChatIndexStream, publishToBroadcastStream, publishToGroupChatStream, publishToModerationStream, getTimestamp, genId, genIdAt, secureRndstr } = nativeBinding
module.exports.SECOND = SECOND
module.exports.MINUTE = MINUTE
@ -339,6 +339,7 @@ module.exports.safeForSql = safeForSql
module.exports.formatMilliseconds = formatMilliseconds
module.exports.getImageSizeFromUrl = getImageSizeFromUrl
module.exports.getNoteSummary = getNoteSummary
module.exports.latestVersion = latestVersion
module.exports.toMastodonId = toMastodonId
module.exports.fromMastodonId = fromMastodonId
module.exports.fetchMeta = fetchMeta

View file

@ -2,8 +2,14 @@ use crate::database::{redis_conn, redis_key};
use redis::{Commands, RedisError};
use serde::{Deserialize, Serialize};
#[derive(strum::Display)]
pub enum Category {
#[strum(serialize = "fetchUrl")]
FetchUrl,
}
#[derive(thiserror::Error, Debug)]
pub enum CacheError {
pub enum Error {
#[error("Redis error: {0}")]
RedisError(#[from] RedisError),
#[error("Data serialization error: {0}")]
@ -12,15 +18,19 @@ pub enum CacheError {
DeserializeError(#[from] rmp_serde::decode::Error),
}
fn categorize(category: Category, key: &str) -> String {
format!("{}:{}", category, key)
}
fn prefix_key(key: &str) -> String {
redis_key(format!("cache:{}", key))
}
pub fn set_cache<V: for<'a> Deserialize<'a> + Serialize>(
pub fn set<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), CacheError> {
) -> Result<(), Error> {
redis_conn()?.set_ex(
prefix_key(key),
rmp_serde::encode::to_vec(&value)?,
@ -29,9 +39,7 @@ pub fn set_cache<V: for<'a> Deserialize<'a> + Serialize>(
Ok(())
}
pub fn get_cache<V: for<'a> Deserialize<'a> + Serialize>(
key: &str,
) -> Result<Option<V>, CacheError> {
pub fn get<V: for<'a> Deserialize<'a> + Serialize>(key: &str) -> Result<Option<V>, Error> {
let serialized_value: Option<Vec<u8>> = redis_conn()?.get(prefix_key(key))?;
Ok(match serialized_value {
Some(v) => Some(rmp_serde::from_slice::<V>(v.as_ref())?),
@ -39,13 +47,35 @@ pub fn get_cache<V: for<'a> Deserialize<'a> + Serialize>(
})
}
pub fn delete_cache(key: &str) -> Result<(), CacheError> {
pub fn delete(key: &str) -> Result<(), Error> {
Ok(redis_conn()?.del(prefix_key(key))?)
}
pub fn set_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
value: &V,
expire_seconds: u64,
) -> Result<(), Error> {
set(&categorize(category, key), value, expire_seconds)
}
pub fn get_one<V: for<'a> Deserialize<'a> + Serialize>(
category: Category,
key: &str,
) -> Result<Option<V>, Error> {
get(&categorize(category, key))
}
pub fn delete_one(category: Category, key: &str) -> Result<(), Error> {
delete(&categorize(category, key))
}
// TODO: set_all(), get_all(), delete_all()
#[cfg(test)]
mod unit_test {
use super::{get_cache, set_cache};
use super::{get, set};
use pretty_assertions::assert_eq;
#[test]
@ -68,13 +98,13 @@ mod unit_test {
kind: "prime number".to_string(),
};
set_cache(key_1, &value_1, 1).unwrap();
set_cache(key_2, &value_2, 1).unwrap();
set_cache(key_3, &value_3, 1).unwrap();
set(key_1, &value_1, 1).unwrap();
set(key_2, &value_2, 1).unwrap();
set(key_3, &value_3, 1).unwrap();
let cached_value_1: Vec<i32> = get_cache(key_1).unwrap().unwrap();
let cached_value_2: String = get_cache(key_2).unwrap().unwrap();
let cached_value_3: Data = get_cache(key_3).unwrap().unwrap();
let cached_value_1: Vec<i32> = get(key_1).unwrap().unwrap();
let cached_value_2: String = get(key_2).unwrap().unwrap();
let cached_value_3: Data = get(key_3).unwrap().unwrap();
assert_eq!(value_1, cached_value_1);
assert_eq!(value_2, cached_value_2);
@ -83,9 +113,9 @@ mod unit_test {
// wait for the cache to expire
std::thread::sleep(std::time::Duration::from_millis(1100));
let expired_value_1: Option<Vec<i32>> = get_cache(key_1).unwrap();
let expired_value_2: Option<Vec<i32>> = get_cache(key_2).unwrap();
let expired_value_3: Option<Vec<i32>> = get_cache(key_3).unwrap();
let expired_value_1: Option<Vec<i32>> = get(key_1).unwrap();
let expired_value_2: Option<Vec<i32>> = get(key_2).unwrap();
let expired_value_3: Option<Vec<i32>> = get(key_3).unwrap();
assert!(expired_value_1.is_none());
assert!(expired_value_2.is_none());

View file

@ -2,5 +2,6 @@ pub use postgresql::db_conn;
pub use redis::key as redis_key;
pub use redis::redis_conn;
pub mod cache;
pub mod postgresql;
pub mod redis;

View file

@ -1,4 +1,4 @@
use crate::misc::redis_cache::{get_cache, set_cache, CacheError};
use crate::database::cache;
use crate::util::http_client;
use image::{io::Reader, ImageError, ImageFormat};
use nom_exif::{parse_jpeg_exif, EntryValue, ExifTag};
@ -8,7 +8,7 @@ use tokio::sync::Mutex;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Redis cache error: {0}")]
CacheErr(#[from] CacheError),
CacheErr(#[from] cache::Error),
#[error("Reqwest error: {0}")]
ReqwestErr(#[from] reqwest::Error),
#[error("Image decoding error: {0}")]
@ -50,11 +50,10 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
{
let _ = MTX_GUARD.lock().await;
let key = format!("fetchImage:{}", url);
attempted = get_cache::<bool>(&key)?.is_some();
attempted = cache::get_one::<bool>(cache::Category::FetchUrl, url)?.is_some();
if !attempted {
set_cache(&key, &true, 10 * 60)?;
cache::set_one(cache::Category::FetchUrl, url, &true, 10 * 60)?;
}
}
@ -109,7 +108,7 @@ pub async fn get_image_size_from_url(url: &str) -> Result<ImageSize, Error> {
#[cfg(test)]
mod unit_test {
use super::{get_image_size_from_url, ImageSize};
use crate::misc::redis_cache::delete_cache;
use crate::database::cache;
use pretty_assertions::assert_eq;
#[tokio::test]
@ -126,15 +125,15 @@ mod unit_test {
// Delete caches in case you run this test multiple times
// (should be disabled in CI tasks)
delete_cache(&format!("fetchImage:{}", png_url_1)).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_2)).unwrap();
delete_cache(&format!("fetchImage:{}", png_url_3)).unwrap();
delete_cache(&format!("fetchImage:{}", rotated_jpeg_url)).unwrap();
delete_cache(&format!("fetchImage:{}", webp_url_1)).unwrap();
delete_cache(&format!("fetchImage:{}", webp_url_2)).unwrap();
delete_cache(&format!("fetchImage:{}", ico_url)).unwrap();
delete_cache(&format!("fetchImage:{}", gif_url)).unwrap();
delete_cache(&format!("fetchImage:{}", mp3_url)).unwrap();
cache::delete_one(cache::Category::FetchUrl, png_url_1).unwrap();
cache::delete_one(cache::Category::FetchUrl, png_url_2).unwrap();
cache::delete_one(cache::Category::FetchUrl, png_url_3).unwrap();
cache::delete_one(cache::Category::FetchUrl, rotated_jpeg_url).unwrap();
cache::delete_one(cache::Category::FetchUrl, webp_url_1).unwrap();
cache::delete_one(cache::Category::FetchUrl, webp_url_2).unwrap();
cache::delete_one(cache::Category::FetchUrl, ico_url).unwrap();
cache::delete_one(cache::Category::FetchUrl, gif_url).unwrap();
cache::delete_one(cache::Category::FetchUrl, mp3_url).unwrap();
let png_size_1 = ImageSize {
width: 1024,

View file

@ -0,0 +1,91 @@
use crate::database::cache;
use crate::util::http_client::http_client;
use serde::{Deserialize, Serialize};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Cache error: {0}")]
CacheErr(#[from] cache::Error),
#[error("Reqwest error: {0}")]
ReqwestErr(#[from] reqwest::Error),
#[error("Failed to deserialize JSON: {0}")]
JsonErr(#[from] serde_json::Error),
}
const UPSTREAM_PACKAGE_JSON_URL: &'static str =
"https://firefish.dev/firefish/firefish/-/raw/main/package.json";
async fn get_latest_version() -> Result<String, Error> {
#[derive(Debug, Deserialize, Serialize)]
struct Response {
version: String,
}
let res = http_client()?
.get(UPSTREAM_PACKAGE_JSON_URL)
.send()
.await?
.text()
.await?;
let res_parsed: Response = serde_json::from_str(&res)?;
Ok(res_parsed.version)
}
#[crate::export]
pub async fn latest_version() -> Result<String, Error> {
let version: Option<String> =
cache::get_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL)?;
if let Some(v) = version {
tracing::trace!("use cached value: {}", v);
Ok(v)
} else {
tracing::trace!("cache is expired, fetching the latest version");
let fetched_version = get_latest_version().await?;
tracing::trace!("fetched value: {}", fetched_version);
cache::set_one(
cache::Category::FetchUrl,
UPSTREAM_PACKAGE_JSON_URL,
&fetched_version,
3 * 60 * 60,
)?;
Ok(fetched_version)
}
}
#[cfg(test)]
mod unit_test {
use super::{latest_version, UPSTREAM_PACKAGE_JSON_URL};
use crate::database::cache;
fn validate_version(version: String) {
// version: YYYYMMDD
assert!(version.len() == 8);
assert!(version.chars().all(|c| c.is_ascii_digit()));
// YYYY
assert!(&version[..4] >= "2024");
// MM
assert!(&version[4..6] >= "01");
assert!(&version[4..6] <= "12");
// DD
assert!(&version[6..] >= "01");
assert!(&version[6..] <= "31");
}
#[tokio::test]
async fn check_version() {
// TODO: don't need to do this in CI tasks
cache::delete_one(cache::Category::FetchUrl, UPSTREAM_PACKAGE_JSON_URL).unwrap();
// fetch from firefish.dev
validate_version(latest_version().await.unwrap());
// use cache
validate_version(latest_version().await.unwrap());
}
}

View file

@ -8,10 +8,10 @@ pub mod escape_sql;
pub mod format_milliseconds;
pub mod get_image_size;
pub mod get_note_summary;
pub mod latest_version;
pub mod mastodon_id;
pub mod meta;
pub mod nyaify;
pub mod password;
pub mod reaction;
pub mod redis_cache;
pub mod remove_old_attestation_challenges;

View file

@ -19,6 +19,13 @@ export function fromHtml(html: string, hashtagNames?: string[]): string {
return appendChildren(childNodes, background).join("").trim();
}
/**
* We only exclude text containing asterisks, since the other marks can almost be considered intentionally used.
*/
function escapeAmbiguousMfmMarks(text: string) {
return text.includes("*") ? `<plain>${text}</plain>` : text;
}
/**
* Get only the text, ignoring all formatting inside
* @param node
@ -62,7 +69,7 @@ export function fromHtml(html: string, hashtagNames?: string[]): string {
background = "",
): (string | string[])[] {
if (treeAdapter.isTextNode(node)) {
return [node.value];
return [escapeAmbiguousMfmMarks(node.value)];
}
// Skip comment or document type node

View file

@ -0,0 +1,65 @@
import type { MigrationInterface, QueryRunner } from "typeorm";
export class DropUnusedIndexes1714643926317 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "IDX_01f4581f114e0ebd2bbb876f0b"`);
await queryRunner.query(`DROP INDEX "IDX_0610ebcfcfb4a18441a9bcdab2"`);
await queryRunner.query(`DROP INDEX "IDX_25dfc71b0369b003a4cd434d0b"`);
await queryRunner.query(`DROP INDEX "IDX_2710a55f826ee236ea1a62698f"`);
await queryRunner.query(`DROP INDEX "IDX_4c02d38a976c3ae132228c6fce"`);
await queryRunner.query(`DROP INDEX "IDX_51c063b6a133a9cb87145450f5"`);
await queryRunner.query(`DROP INDEX "IDX_54ebcb6d27222913b908d56fd8"`);
await queryRunner.query(`DROP INDEX "IDX_7fa20a12319c7f6dc3aed98c0a"`);
await queryRunner.query(`DROP INDEX "IDX_88937d94d7443d9a99a76fa5c0"`);
await queryRunner.query(`DROP INDEX "IDX_b11a5e627c41d4dc3170f1d370"`);
await queryRunner.query(`DROP INDEX "IDX_c8dfad3b72196dd1d6b5db168a"`);
await queryRunner.query(`DROP INDEX "IDX_d57f9030cd3af7f63ffb1c267c"`);
await queryRunner.query(`DROP INDEX "IDX_e5848eac4940934e23dbc17581"`);
await queryRunner.query(`DROP INDEX "IDX_fa99d777623947a5b05f394cae"`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE INDEX "IDX_01f4581f114e0ebd2bbb876f0b" ON "note_reaction" ("createdAt")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_0610ebcfcfb4a18441a9bcdab2" ON "poll" ("userId")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_25dfc71b0369b003a4cd434d0b" ON "note" ("attachedFileTypes")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_2710a55f826ee236ea1a62698f" ON "hashtag" ("mentionedUsersCount")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_4c02d38a976c3ae132228c6fce" ON "hashtag" ("mentionedRemoteUsersCount")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_51c063b6a133a9cb87145450f5" ON "note" ("fileIds")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_54ebcb6d27222913b908d56fd8" ON "note" ("mentions")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_7fa20a12319c7f6dc3aed98c0a" ON "poll" ("userHost")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_88937d94d7443d9a99a76fa5c0" ON "note" ("tags")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_b11a5e627c41d4dc3170f1d370" ON "notification" ("createdAt")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_c8dfad3b72196dd1d6b5db168a" ON "drive_file" ("createdAt")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_d57f9030cd3af7f63ffb1c267c" ON "hashtag" ("attachedUsersCount")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_e5848eac4940934e23dbc17581" ON "drive_file" ("uri")`,
);
await queryRunner.query(
`CREATE INDEX "IDX_fa99d777623947a5b05f394cae" ON "user" ("tags")`,
);
}
}

View file

@ -23,7 +23,6 @@ export class DriveFile {
@PrimaryColumn(id())
public id: string;
@Index()
@Column("timestamp without time zone", {
comment: "The created date of the DriveFile.",
})
@ -147,7 +146,6 @@ export class DriveFile {
})
public webpublicAccessKey: string | null;
@Index()
@Column("varchar", {
length: 512,
nullable: true,

View file

@ -19,7 +19,6 @@ export class Hashtag {
})
public mentionedUserIds: User["id"][];
@Index()
@Column("integer", {
default: 0,
})
@ -43,7 +42,6 @@ export class Hashtag {
})
public mentionedRemoteUserIds: User["id"][];
@Index()
@Column("integer", {
default: 0,
})
@ -55,7 +53,6 @@ export class Hashtag {
})
public attachedUserIds: User["id"][];
@Index()
@Column("integer", {
default: 0,
})

View file

@ -17,7 +17,6 @@ export class NoteReaction {
@PrimaryColumn(id())
public id: string;
@Index()
@Column("timestamp without time zone", {
comment: "The created date of the NoteReaction.",
})

View file

@ -139,7 +139,6 @@ export class Note {
// FIXME: file id is not removed from this array even if the file is deleted
// TODO: drop this column and use note_files
@Index()
@Column({
...id(),
array: true,
@ -147,7 +146,6 @@ export class Note {
})
public fileIds: DriveFile["id"][];
@Index()
@Column("varchar", {
length: 256,
array: true,
@ -163,7 +161,6 @@ export class Note {
})
public visibleUserIds: User["id"][];
@Index()
@Column({
...id(),
array: true,
@ -184,7 +181,6 @@ export class Note {
})
public emojis: string[];
@Index()
@Column("varchar", {
length: 128,
array: true,

View file

@ -20,7 +20,6 @@ export class Notification {
@PrimaryColumn(id())
public id: string;
@Index()
@Column("timestamp without time zone", {
comment: "The created date of the Notification.",
})

View file

@ -44,14 +44,12 @@ export class Poll {
})
public noteVisibility: (typeof noteVisibilities)[number];
@Index()
@Column({
...id(),
comment: "[Denormalized]",
})
public userId: User["id"];
@Index()
@Column("varchar", {
length: 512,
nullable: true,

View file

@ -116,7 +116,6 @@ export class User {
})
public bannerId: DriveFile["id"] | null;
@Index()
@Column("varchar", {
length: 128,
array: true,

View file

@ -527,7 +527,7 @@ export const WellKnownContext = {
manuallyApprovesFollowers: "as:manuallyApprovesFollowers",
movedTo: {
"@id": "https://www.w3.org/ns/activitystreams#movedTo",
"@type": "@id"
"@type": "@id",
},
movedToUri: "as:movedTo",
sensitive: "as:sensitive",

View file

@ -286,7 +286,6 @@ import * as ep___pinnedUsers from "./endpoints/pinned-users.js";
import * as ep___customMotd from "./endpoints/custom-motd.js";
import * as ep___customSplashIcons from "./endpoints/custom-splash-icons.js";
import * as ep___latestVersion from "./endpoints/latest-version.js";
import * as ep___release from "./endpoints/release.js";
import * as ep___promo_read from "./endpoints/promo/read.js";
import * as ep___requestResetPassword from "./endpoints/request-reset-password.js";
import * as ep___resetPassword from "./endpoints/reset-password.js";
@ -635,7 +634,6 @@ const eps = [
["custom-motd", ep___customMotd],
["custom-splash-icons", ep___customSplashIcons],
["latest-version", ep___latestVersion],
["release", ep___release],
["promo/read", ep___promo_read],
["request-reset-password", ep___requestResetPassword],
["reset-password", ep___resetPassword],

View file

@ -1,4 +1,5 @@
import define from "@/server/api/define.js";
import { latestVersion } from "backend-rs";
export const meta = {
tags: ["meta"],
@ -14,14 +15,7 @@ export const paramDef = {
} as const;
export default define(meta, paramDef, async () => {
let latest_version;
await fetch("https://firefish.dev/firefish/firefish/-/raw/main/package.json")
.then((response) => response.json())
.then((data) => {
latest_version = data.version;
});
return {
latest_version,
latest_version: await latestVersion(),
};
});

View file

@ -1,28 +0,0 @@
import define from "@/server/api/define.js";
export const meta = {
tags: ["meta"],
description: "Get release notes from Codeberg",
requireCredential: false,
requireCredentialPrivateMode: false,
} as const;
export const paramDef = {
type: "object",
properties: {},
required: [],
} as const;
export default define(meta, paramDef, async () => {
let release;
await fetch(
"https://firefish.dev/firefish/firefish/-/raw/develop/release.json",
)
.then((response) => response.json())
.then((data) => {
release = data;
});
return release;
});

View file

@ -71,7 +71,7 @@ import { foldNotifications } from "@/scripts/fold";
import { defaultStore } from "@/store";
const props = defineProps<{
includeTypes?: (typeof notificationTypes)[number][];
includeTypes?: (typeof notificationTypes)[number][] | null;
unreadOnly?: boolean;
}>();

View file

@ -173,11 +173,15 @@ const rootEl = ref<HTMLElement>();
const items = ref<Item[]>([]);
const foldedItems = ref([]) as Ref<Fold[]>;
function toReversed<T>(arr: T[]) {
return [...arr].reverse();
}
// To improve performance, we do not use vues `computed` here
function calculateItems() {
function getItems<T>(folder: (ns: Item[]) => T[]) {
const res = [
folder(prepended.value.toReversed()),
folder(toReversed(prepended.value)),
...arrItems.value.map((arr) => folder(arr)),
folder(appended.value),
].flat(1);
@ -351,7 +355,7 @@ async function fetch(firstFetching?: boolean) {
if (firstFetching && props.folder != null) {
// In this way, prepended has some initial values for folding
prepended.value = res.toReversed();
prepended.value = toReversed(res);
} else {
// For ascending and offset modes, append and prepend may cause item duplication
// so they need to be filtered out.
@ -398,7 +402,7 @@ const prepend = (...item: Item[]): void => {
prepended.value.length >
(props.pagination.secondFetchLimit || SECOND_FETCH_LIMIT_DEFAULT)
) {
arrItems.value.unshift(prepended.value.toReversed());
arrItems.value.unshift(toReversed(prepended.value));
prepended.value = [];
// We don't need to calculate here because it won't cause any changes in items
}

View file

@ -44,6 +44,7 @@ const FIRE_THRESHOLD = defaultStore.state.pullToRefreshThreshold;
const RELEASE_TRANSITION_DURATION = 200;
const PULL_BRAKE_BASE = 1.5;
const PULL_BRAKE_FACTOR = 170;
const MAX_PULL_TAN_ANGLE = Math.tan((1 / 6) * Math.PI); // 30°
const pullStarted = ref(false);
const pullEnded = ref(false);
@ -53,6 +54,7 @@ const pullDistance = ref(0);
let disabled = false;
const supportPointerDesktop = false;
let startScreenY: number | null = null;
let startScreenX: number | null = null;
const rootEl = shallowRef<HTMLDivElement>();
let scrollEl: HTMLElement | null = null;
@ -72,11 +74,16 @@ function getScreenY(event) {
if (supportPointerDesktop) return event.screenY;
return event.touches[0].screenY;
}
function getScreenX(event) {
if (supportPointerDesktop) return event.screenX;
return event.touches[0].screenX;
}
function moveStart(event) {
if (!pullStarted.value && !isRefreshing.value && !disabled) {
pullStarted.value = true;
startScreenY = getScreenY(event);
startScreenX = getScreenX(event);
pullDistance.value = 0;
}
}
@ -117,6 +124,7 @@ async function closeContent() {
function moveEnd() {
if (pullStarted.value && !isRefreshing.value) {
startScreenY = null;
startScreenX = null;
if (pullEnded.value) {
pullEnded.value = false;
isRefreshing.value = true;
@ -146,11 +154,17 @@ function moving(event: TouchEvent | PointerEvent) {
moveEnd();
return;
}
if (startScreenY === null) {
startScreenY = getScreenY(event);
}
startScreenX ??= getScreenX(event);
startScreenY ??= getScreenY(event);
const moveScreenY = getScreenY(event);
const moveScreenX = getScreenX(event);
const moveHeight = moveScreenY - startScreenY!;
const moveWidth = moveScreenX - startScreenX!;
if (Math.abs(moveWidth / moveHeight) > MAX_PULL_TAN_ANGLE) {
if (Math.abs(moveWidth) > 30) pullStarted.value = false;
return;
}
pullDistance.value = Math.min(Math.max(moveHeight, 0), MAX_PULL_DISTANCE);
if (pullDistance.value > 0) {

View file

@ -27,6 +27,7 @@
>
<swiper-slide>
<XNotifications
:key="'tab1'"
class="notifications"
:include-types="includeTypes"
:unread-only="false"
@ -34,16 +35,18 @@
</swiper-slide>
<swiper-slide>
<XNotifications
v-if="tab === 'reactions'"
:key="'tab2'"
class="notifications"
:include-types="['reaction']"
:unread-only="false"
/>
</swiper-slide>
<swiper-slide>
<XNotes :pagination="mentionsPagination" />
<XNotes v-if="tab === 'mentions'" :key="'tab3'" :pagination="mentionsPagination" />
</swiper-slide>
<swiper-slide>
<XNotes :pagination="directNotesPagination" />
<XNotes v-if="tab === 'directNotes'" :key="'tab4'" :pagination="directNotesPagination" />
</swiper-slide>
</swiper>
</MkSpacer>
@ -54,6 +57,7 @@
import { computed, ref, watch } from "vue";
import { Virtual } from "swiper/modules";
import { Swiper, SwiperSlide } from "swiper/vue";
import type { Swiper as SwiperType } from "swiper/types";
import { notificationTypes } from "firefish-js";
import XNotifications from "@/components/MkNotifications.vue";
import XNotes from "@/components/MkNotes.vue";
@ -70,7 +74,7 @@ const tabs = ["all", "reactions", "mentions", "directNotes"];
const tab = ref(tabs[0]);
watch(tab, () => syncSlide(tabs.indexOf(tab.value)));
const includeTypes = ref<string[] | null>(null);
const includeTypes = ref<(typeof notificationTypes)[number][] | null>(null);
os.api("notifications/mark-all-as-read");
const MOBILE_THRESHOLD = 500;
@ -98,7 +102,7 @@ const directNotesPagination = {
function setFilter(ev) {
const typeItems = notificationTypes.map((t) => ({
text: i18n.t(`_notification._types.${t}`),
active: includeTypes.value && includeTypes.value.includes(t),
active: includeTypes.value?.includes(t),
action: () => {
includeTypes.value = [t];
},
@ -121,25 +125,23 @@ function setFilter(ev) {
}
const headerActions = computed(() =>
[
tab.value === "all"
? {
tab.value === "all"
? [
{
text: i18n.ts.filter,
icon: `${icon("ph-funnel")}`,
highlighted: includeTypes.value != null,
handler: setFilter,
}
: undefined,
tab.value === "all"
? {
},
{
text: i18n.ts.markAllAsRead,
icon: `${icon("ph-check")}`,
handler: () => {
os.apiWithDialog("notifications/mark-all-as-read");
},
}
: undefined,
].filter((x) => x !== undefined),
},
]
: [],
);
const headerTabs = computed(() => [
@ -172,18 +174,19 @@ definePageMetadata(
})),
);
let swiperRef = null;
let swiperRef: SwiperType | null = null;
function setSwiperRef(swiper) {
function setSwiperRef(swiper: SwiperType) {
swiperRef = swiper;
syncSlide(tabs.indexOf(tab.value));
}
function onSlideChange() {
tab.value = tabs[swiperRef.activeIndex];
if (tab.value !== tabs[swiperRef!.activeIndex])
tab.value = tabs[swiperRef!.activeIndex];
}
function syncSlide(index) {
swiperRef.slideTo(index);
function syncSlide(index: number) {
if (index !== swiperRef!.activeIndex) swiperRef!.slideTo(index);
}
</script>

View file

@ -265,14 +265,18 @@ export function getUserMenu(user, router: Router = mainRouter) {
icon: "ph-qr-code ph-bold ph-lg",
text: i18n.ts.getQrCode,
action: () => {
os.displayQrCode(`https://${host}/follow-me?acct=${user.username}`);
os.displayQrCode(
`https://${host}/follow-me?acct=${acct.toString(user)}`,
);
},
},
{
icon: `${icon("ph-hand-waving")}`,
text: i18n.ts.copyRemoteFollowUrl,
action: () => {
copyToClipboard(`https://${host}/follow-me?acct=${user.username}`);
copyToClipboard(
`https://${host}/follow-me?acct=${acct.toString(user)}`,
);
os.success();
},
},
@ -321,7 +325,7 @@ export function getUserMenu(user, router: Router = mainRouter) {
icon: `${icon("ph-hand-waving")}`,
text: i18n.ts.remoteFollow,
action: () => {
router.push(`/follow-me?acct=${user.username}`);
router.push(`/follow-me?acct=${acct.toString(user)}`);
},
}
: undefined,