Merge branch 'master' into sticker
This commit is contained in:
commit
e1b991dfd1
2407
api/package-lock.json
generated
2407
api/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -38,6 +38,8 @@
|
||||
"homepage": "https://fosscord.com",
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.15.5",
|
||||
"@babel/preset-env": "^7.15.8",
|
||||
"@babel/preset-typescript": "^7.15.0",
|
||||
"@types/amqplib": "^0.8.1",
|
||||
"@types/bcrypt": "^5.0.0",
|
||||
"@types/express": "^4.17.9",
|
||||
@ -45,6 +47,7 @@
|
||||
"@types/jest": "^27.0.1",
|
||||
"@types/jest-expect-message": "^1.0.3",
|
||||
"@types/jsonwebtoken": "^8.5.0",
|
||||
"@types/morgan": "^1.9.3",
|
||||
"@types/multer": "^1.4.5",
|
||||
"@types/node": "^14.17.9",
|
||||
"@types/node-fetch": "^2.5.7",
|
||||
@ -57,8 +60,7 @@
|
||||
"ts-node-dev": "^1.1.6",
|
||||
"ts-patch": "^1.4.4",
|
||||
"typescript": "^4.4.2",
|
||||
"typescript-json-schema": "0.50.1",
|
||||
"@types/morgan": "^1.9.3"
|
||||
"typescript-json-schema": "0.50.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fosscord/util": "file:../util",
|
||||
@ -77,7 +79,7 @@
|
||||
"i18next-node-fs-backend": "^2.1.3",
|
||||
"image-size": "^1.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"lambert-server": "^1.2.11",
|
||||
"lambert-server": "^1.2.12",
|
||||
"missing-native-js-functions": "^1.2.17",
|
||||
"morgan": "^1.10.0",
|
||||
"multer": "^1.4.2",
|
||||
|
@ -9,6 +9,8 @@ export const NO_AUTHORIZATION_ROUTES = [
|
||||
"/ping",
|
||||
"/gateway",
|
||||
"/experiments",
|
||||
"/-/readyz",
|
||||
"/-/healthz",
|
||||
/\/guilds\/\d+\/widget\.(json|png)/
|
||||
];
|
||||
|
||||
|
17
api/src/routes/-/healthz.ts
Normal file
17
api/src/routes/-/healthz.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { Router, Response, Request } from "express";
|
||||
import { route } from "@fosscord/api";
|
||||
import { getConnection } from "typeorm";
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.get("/", route({}), (req: Request, res: Response) => {
|
||||
try {
|
||||
// test that the database is alive & responding
|
||||
getConnection();
|
||||
return res.sendStatus(200);
|
||||
} catch(e) {
|
||||
res.sendStatus(503);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
17
api/src/routes/-/readyz.ts
Normal file
17
api/src/routes/-/readyz.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { Router, Response, Request } from "express";
|
||||
import { route } from "@fosscord/api";
|
||||
import { getConnection } from "typeorm";
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.get("/", route({}), (req: Request, res: Response) => {
|
||||
try {
|
||||
// test that the database is alive & responding
|
||||
getConnection();
|
||||
return res.sendStatus(200);
|
||||
} catch(e) {
|
||||
res.sendStatus(503);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
@ -10,7 +10,8 @@ import {
|
||||
getPermission,
|
||||
Message,
|
||||
MessageCreateEvent,
|
||||
uploadFile
|
||||
uploadFile,
|
||||
Member
|
||||
} from "@fosscord/util";
|
||||
import { HTTPError } from "lambert-server";
|
||||
import { handleMessage, postHandleMessage, route } from "@fosscord/api";
|
||||
@ -187,33 +188,34 @@ router.post(
|
||||
|
||||
message = await message.save();
|
||||
|
||||
await channel.assign({ last_message_id: message.id }).save();
|
||||
|
||||
if (channel.isDm()) {
|
||||
const channel_dto = await DmChannelDTO.from(channel);
|
||||
|
||||
for (let recipient of channel.recipients!) {
|
||||
if (recipient.closed) {
|
||||
await emitEvent({
|
||||
event: "CHANNEL_CREATE",
|
||||
data: channel_dto.excludedRecipients([recipient.user_id]),
|
||||
user_id: recipient.user_id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//Only one recipients should be closed here, since in group DMs the recipient is deleted not closed
|
||||
|
||||
await Promise.all(
|
||||
channel
|
||||
.recipients!.filter((r) => r.closed)
|
||||
.map(async (r) => {
|
||||
r.closed = false;
|
||||
return await r.save();
|
||||
})
|
||||
channel.recipients!.map((recipient) => {
|
||||
if (recipient.closed) {
|
||||
recipient.closed = false;
|
||||
return Promise.all([
|
||||
recipient.save(),
|
||||
emitEvent({
|
||||
event: "CHANNEL_CREATE",
|
||||
data: channel_dto.excludedRecipients([recipient.user_id]),
|
||||
user_id: recipient.user_id
|
||||
})
|
||||
]);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await emitEvent({ event: "MESSAGE_CREATE", channel_id: channel_id, data: message } as MessageCreateEvent);
|
||||
await Promise.all([
|
||||
channel.assign({ last_message_id: message.id }).save(),
|
||||
new Member({ id: req.user_id, last_message_id: message.id }).save(),
|
||||
emitEvent({ event: "MESSAGE_CREATE", channel_id: channel_id, data: message } as MessageCreateEvent)
|
||||
]);
|
||||
|
||||
postHandleMessage(message).catch((e) => {}); // no await as it shouldnt block the message send function and silently catch error
|
||||
|
||||
return res.json(message);
|
||||
|
82
api/src/routes/guilds/#guild_id/prune.ts
Normal file
82
api/src/routes/guilds/#guild_id/prune.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import { Router, Request, Response } from "express";
|
||||
import { Guild, Member, Snowflake } from "@fosscord/util";
|
||||
import { LessThan, IsNull } from "typeorm";
|
||||
import { route } from "@fosscord/api";
|
||||
const router = Router();
|
||||
|
||||
//Returns all inactive members, respecting role hierarchy
|
||||
export const inactiveMembers = async (guild_id: string, user_id: string, days: number, roles: string[] = []) => {
|
||||
var date = new Date();
|
||||
date.setDate(date.getDate() - days);
|
||||
//Snowflake should have `generateFromTime` method? Or similar?
|
||||
var minId = BigInt(date.valueOf() - Snowflake.EPOCH) << BigInt(22);
|
||||
|
||||
var members = await Member.find({
|
||||
where: [
|
||||
{
|
||||
guild_id,
|
||||
last_message_id: LessThan(minId.toString())
|
||||
},
|
||||
{
|
||||
last_message_id: IsNull()
|
||||
}
|
||||
],
|
||||
relations: ["roles"]
|
||||
});
|
||||
console.log(members);
|
||||
if (!members.length) return [];
|
||||
|
||||
//I'm sure I can do this in the above db query ( and it would probably be better to do so ), but oh well.
|
||||
if (roles.length && members.length) members = members.filter((user) => user.roles?.some((role) => roles.includes(role.id)));
|
||||
|
||||
const me = await Member.findOneOrFail({ id: user_id, guild_id }, { relations: ["roles"] });
|
||||
const myHighestRole = Math.max(...(me.roles?.map((x) => x.position) || []));
|
||||
|
||||
const guild = await Guild.findOneOrFail({ where: { id: guild_id } });
|
||||
|
||||
members = members.filter(
|
||||
(member) =>
|
||||
member.id !== guild.owner_id && //can't kick owner
|
||||
member.roles?.some(
|
||||
(role) =>
|
||||
role.position < myHighestRole || //roles higher than me can't be kicked
|
||||
me.id === guild.owner_id //owner can kick anyone
|
||||
)
|
||||
);
|
||||
|
||||
return members;
|
||||
};
|
||||
|
||||
router.get("/", route({ permission: "KICK_MEMBERS" }), async (req: Request, res: Response) => {
|
||||
const days = parseInt(req.query.days as string);
|
||||
|
||||
var roles = req.query.include_roles;
|
||||
if (typeof roles === "string") roles = [roles]; //express will return array otherwise
|
||||
|
||||
const members = await inactiveMembers(req.params.guild_id, req.user_id, days, roles as string[]);
|
||||
|
||||
res.send({ pruned: members.length });
|
||||
});
|
||||
|
||||
export interface PruneSchema {
|
||||
/**
|
||||
* @min 0
|
||||
*/
|
||||
days: number;
|
||||
}
|
||||
|
||||
router.post("/", route({ permission: "KICK_MEMBERS" }), async (req: Request, res: Response) => {
|
||||
const days = parseInt(req.body.days);
|
||||
|
||||
var roles = req.query.include_roles;
|
||||
if (typeof roles === "string") roles = [roles];
|
||||
|
||||
const { guild_id } = req.params;
|
||||
const members = await inactiveMembers(guild_id, req.user_id, days, roles as string[]);
|
||||
|
||||
await Promise.all(members.map((x) => Member.removeFromGuild(x.id, guild_id)));
|
||||
|
||||
res.send({ purged: members.length });
|
||||
});
|
||||
|
||||
export default router;
|
@ -10,9 +10,9 @@ export async function initInstance() {
|
||||
|
||||
if (autoJoin.enabled && !autoJoin.guilds?.length) {
|
||||
let guild = await Guild.findOne({});
|
||||
if (!guild) guild = await Guild.createGuild({});
|
||||
|
||||
// @ts-ignore
|
||||
await Config.set({ guild: { autoJoin: { guilds: [guild.id] } } });
|
||||
if (guild) {
|
||||
// @ts-ignore
|
||||
await Config.set({ guild: { autoJoin: { guilds: [guild.id] } } });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ const request = async (path: string, opts: any = {}): Promise<any> => {
|
||||
|
||||
var data = await response.text();
|
||||
try {
|
||||
data = JSON.stringify(data);
|
||||
data = JSON.parse(data);
|
||||
if (response.status >= 400) throw data;
|
||||
return data;
|
||||
} catch (error) {
|
||||
|
2365
bundle/package-lock.json
generated
2365
bundle/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -47,7 +47,7 @@
|
||||
"jest": "^27.0.6",
|
||||
"jest-expect-message": "^1.0.2",
|
||||
"jest-runtime": "^27.2.1",
|
||||
"ts-node": "^9.1.1",
|
||||
"ts-node": "^10.2.1",
|
||||
"ts-node-dev": "^1.1.6",
|
||||
"ts-patch": "^1.4.4",
|
||||
"typescript": "^4.2.3",
|
||||
@ -93,6 +93,8 @@
|
||||
"typescript": "^4.1.2",
|
||||
"typescript-json-schema": "^0.50.1",
|
||||
"ws": "^7.4.2",
|
||||
"cheerio": "^1.0.0-rc.10"
|
||||
"cheerio": "^1.0.0-rc.10",
|
||||
"@aws-sdk/client-s3": "^3.36.1",
|
||||
"@aws-sdk/node-http-handler": "^3.36.0"
|
||||
}
|
||||
}
|
||||
}
|
@ -8,12 +8,12 @@ const dirs = ["api", "util", "cdn", "gateway", "bundle"];
|
||||
|
||||
const verbose = argv.includes("verbose") || argv.includes("v");
|
||||
|
||||
if(argv.includes("clean")){
|
||||
dirs.forEach(a=>{
|
||||
var d = "../"+a+"/dist";
|
||||
if(fse.existsSync(d)) {
|
||||
fse.rmSync(d,{recursive: true});
|
||||
if(verbose) console.log(`Deleted ${d}!`);
|
||||
if (argv.includes("clean")) {
|
||||
dirs.forEach((a) => {
|
||||
var d = "../" + a + "/dist";
|
||||
if (fse.existsSync(d)) {
|
||||
fse.rmSync(d, { recursive: true });
|
||||
if (verbose) console.log(`Deleted ${d}!`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -24,9 +24,9 @@ fse.copySync(
|
||||
path.join(__dirname, "..", "dist", "api", "client_test")
|
||||
);
|
||||
fse.copySync(path.join(__dirname, "..", "..", "api", "locales"), path.join(__dirname, "..", "dist", "api", "locales"));
|
||||
dirs.forEach(a=>{
|
||||
fse.copySync("../"+a+"/src", "dist/"+a+"/src");
|
||||
if(verbose) console.log(`Copied ${"../"+a+"/dist"} -> ${"dist/"+a+"/src"}!`);
|
||||
dirs.forEach((a) => {
|
||||
fse.copySync("../" + a + "/src", "dist/" + a + "/src");
|
||||
if (verbose) console.log(`Copied ${"../" + a + "/dist"} -> ${"dist/" + a + "/src"}!`);
|
||||
});
|
||||
|
||||
console.log("Copying src files done");
|
||||
@ -34,10 +34,11 @@ console.log("Compiling src files ...");
|
||||
|
||||
console.log(
|
||||
execSync(
|
||||
"node \"" +
|
||||
'node "' +
|
||||
path.join(__dirname, "..", "node_modules", "typescript", "lib", "tsc.js") +
|
||||
"\" -p \"" +
|
||||
path.join(__dirname, "..") + "\"",
|
||||
'" -p "' +
|
||||
path.join(__dirname, "..") +
|
||||
'"',
|
||||
{
|
||||
cwd: path.join(__dirname, ".."),
|
||||
shell: true,
|
||||
|
15169
cdn/package-lock.json
generated
15169
cdn/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -36,6 +36,8 @@
|
||||
"ts-patch": "^1.4.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.36.1",
|
||||
"@aws-sdk/node-http-handler": "^3.36.0",
|
||||
"@fosscord/util": "file:../util",
|
||||
"body-parser": "^1.19.0",
|
||||
"btoa": "^1.2.1",
|
||||
|
60
cdn/src/util/S3Storage.ts
Normal file
60
cdn/src/util/S3Storage.ts
Normal file
@ -0,0 +1,60 @@
|
||||
import { S3 } from "@aws-sdk/client-s3";
|
||||
import { Readable } from "stream";
|
||||
import { Storage } from "./Storage";
|
||||
|
||||
const readableToBuffer = (readable: Readable): Promise<Buffer> =>
|
||||
new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
readable.on('data', chunk => chunks.push(chunk));
|
||||
readable.on('error', reject);
|
||||
readable.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
});
|
||||
|
||||
export class S3Storage implements Storage {
|
||||
public constructor(
|
||||
private client: S3,
|
||||
private bucket: string,
|
||||
private basePath?: string,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Always return a string, to ensure consistency.
|
||||
*/
|
||||
get bucketBasePath() {
|
||||
return this.basePath ?? '';
|
||||
}
|
||||
|
||||
async set(path: string, data: Buffer): Promise<void> {
|
||||
await this.client.putObject({
|
||||
Bucket: this.bucket,
|
||||
Key: `${this.bucketBasePath}${path}`,
|
||||
Body: data
|
||||
});
|
||||
}
|
||||
|
||||
async get(path: string): Promise<Buffer | null> {
|
||||
try {
|
||||
const s3Object = await this.client.getObject({
|
||||
Bucket: this.bucket,
|
||||
Key: `${this.bucketBasePath ?? ''}${path}`
|
||||
});
|
||||
|
||||
if (!s3Object.Body) return null;
|
||||
|
||||
const body = s3Object.Body;
|
||||
|
||||
return await readableToBuffer(<Readable> body);
|
||||
} catch(err) {
|
||||
console.error(`[CDN] Unable to get S3 object at path ${path}.`);
|
||||
console.error(err);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async delete(path: string): Promise<void> {
|
||||
await this.client.deleteObject({
|
||||
Bucket: this.bucket,
|
||||
Key: `${this.bucketBasePath}${path}`
|
||||
});
|
||||
}
|
||||
}
|
@ -2,6 +2,8 @@ import { FileStorage } from "./FileStorage";
|
||||
import path from "path";
|
||||
import fse from "fs-extra";
|
||||
import { bgCyan, black } from "nanocolors";
|
||||
import { S3 } from '@aws-sdk/client-s3';
|
||||
import { S3Storage } from "./S3Storage";
|
||||
process.cwd();
|
||||
|
||||
export interface Storage {
|
||||
@ -10,10 +12,10 @@ export interface Storage {
|
||||
delete(path: string): Promise<void>;
|
||||
}
|
||||
|
||||
var storage: Storage;
|
||||
let storage: Storage;
|
||||
|
||||
if (process.env.STORAGE_PROVIDER === "file" || !process.env.STORAGE_PROVIDER) {
|
||||
var location = process.env.STORAGE_LOCATION;
|
||||
let location = process.env.STORAGE_LOCATION;
|
||||
if (location) {
|
||||
location = path.resolve(location);
|
||||
} else {
|
||||
@ -24,6 +26,32 @@ if (process.env.STORAGE_PROVIDER === "file" || !process.env.STORAGE_PROVIDER) {
|
||||
process.env.STORAGE_LOCATION = location;
|
||||
|
||||
storage = new FileStorage();
|
||||
} else if (process.env.STORAGE_PROVIDER === "s3") {
|
||||
const
|
||||
region = process.env.STORAGE_REGION,
|
||||
bucket = process.env.STORAGE_BUCKET;
|
||||
|
||||
if (!region) {
|
||||
console.error(`[CDN] You must provide a region when using the S3 storage provider.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!bucket) {
|
||||
console.error(`[CDN] You must provide a bucket when using the S3 storage provider.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// in the S3 provider, this should be the root path in the bucket
|
||||
let location = process.env.STORAGE_LOCATION;
|
||||
|
||||
if (!location) {
|
||||
console.warn(`[CDN] STORAGE_LOCATION unconfigured for S3 provider, defaulting to the bucket root...`);
|
||||
location = undefined;
|
||||
}
|
||||
|
||||
const client = new S3({ region });
|
||||
|
||||
storage = new S3Storage(client, bucket, location);
|
||||
}
|
||||
|
||||
export { storage };
|
||||
|
14
util/package-lock.json
generated
14
util/package-lock.json
generated
@ -12,7 +12,7 @@
|
||||
"dependencies": {
|
||||
"amqplib": "^0.8.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"lambert-server": "^1.2.11",
|
||||
"lambert-server": "^1.2.12",
|
||||
"missing-native-js-functions": "^1.2.17",
|
||||
"multer": "^1.4.3",
|
||||
"nanocolors": "^0.2.12",
|
||||
@ -4409,9 +4409,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/lambert-server": {
|
||||
"version": "1.2.11",
|
||||
"resolved": "https://registry.npmjs.org/lambert-server/-/lambert-server-1.2.11.tgz",
|
||||
"integrity": "sha512-6Uqritpx+ryNWrzLv9YXNqSfCZ10EQh97cHtz8nZMH2WiZm0wL+C3vSMhevdKAE8xdtmeIQXc3idH94nQEx9mA==",
|
||||
"version": "1.2.12",
|
||||
"resolved": "https://registry.npmjs.org/lambert-server/-/lambert-server-1.2.12.tgz",
|
||||
"integrity": "sha512-TY6k60KLVfBpPrl9lcrN54RJdTBg9f8JqJPoHg5d/FMLnnwwQtT4budpoQjyLDwBLhS+zpXo0aBCwnnGgTVGaw==",
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"chalk": "^4.1.1",
|
||||
@ -10845,9 +10845,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"lambert-server": {
|
||||
"version": "1.2.11",
|
||||
"resolved": "https://registry.npmjs.org/lambert-server/-/lambert-server-1.2.11.tgz",
|
||||
"integrity": "sha512-6Uqritpx+ryNWrzLv9YXNqSfCZ10EQh97cHtz8nZMH2WiZm0wL+C3vSMhevdKAE8xdtmeIQXc3idH94nQEx9mA==",
|
||||
"version": "1.2.12",
|
||||
"resolved": "https://registry.npmjs.org/lambert-server/-/lambert-server-1.2.12.tgz",
|
||||
"integrity": "sha512-TY6k60KLVfBpPrl9lcrN54RJdTBg9f8JqJPoHg5d/FMLnnwwQtT4budpoQjyLDwBLhS+zpXo0aBCwnnGgTVGaw==",
|
||||
"requires": {
|
||||
"body-parser": "^1.19.0",
|
||||
"chalk": "^4.1.1",
|
||||
|
@ -40,7 +40,7 @@
|
||||
"dependencies": {
|
||||
"amqplib": "^0.8.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"lambert-server": "^1.2.11",
|
||||
"lambert-server": "^1.2.12",
|
||||
"missing-native-js-functions": "^1.2.17",
|
||||
"multer": "^1.4.3",
|
||||
"nanocolors": "^0.2.12",
|
||||
|
@ -84,6 +84,9 @@ export class Member extends BaseClassWithoutId {
|
||||
@Column({ type: "simple-json" })
|
||||
settings: UserGuildSettings;
|
||||
|
||||
@Column({ nullable: true })
|
||||
last_message_id?: string;
|
||||
|
||||
// TODO: update
|
||||
// @Column({ type: "simple-json" })
|
||||
// read_state: ReadState;
|
||||
|
@ -84,7 +84,7 @@ export class Snowflake {
|
||||
}
|
||||
|
||||
static generate() {
|
||||
var time = BigInt(Date.now() - Snowflake.EPOCH) << 22n;
|
||||
var time = BigInt(Date.now() - Snowflake.EPOCH) << BigInt(22);
|
||||
var worker = Snowflake.workerId << 17n;
|
||||
var process = Snowflake.processId << 12n;
|
||||
var increment = Snowflake.INCREMENT++;
|
||||
|
Loading…
x
Reference in New Issue
Block a user