Format with prettier
parent
7bf5b18fd6
commit
e0fd96ab78
105
src/example.ts
105
src/example.ts
|
@ -12,13 +12,14 @@ F95_PASSWORD = YOUR_PASSWORD
|
||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
|
|
||||||
// Modules from file
|
// Modules from file
|
||||||
import { login,
|
import {
|
||||||
getUserData,
|
login,
|
||||||
getLatestUpdates,
|
getUserData,
|
||||||
LatestSearchQuery,
|
getLatestUpdates,
|
||||||
Game,
|
LatestSearchQuery,
|
||||||
searchHandiwork,
|
Game,
|
||||||
HandiworkSearchQuery
|
searchHandiwork,
|
||||||
|
HandiworkSearchQuery
|
||||||
} from "./index.js";
|
} from "./index.js";
|
||||||
|
|
||||||
// Configure the .env reader
|
// Configure the .env reader
|
||||||
|
@ -27,54 +28,62 @@ dotenv.config();
|
||||||
main();
|
main();
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
// Local variables
|
// Local variables
|
||||||
const gameList = [
|
const gameList = ["City of broken dreamers", "Seeds of chaos", "MIST"];
|
||||||
"City of broken dreamers",
|
|
||||||
"Seeds of chaos",
|
|
||||||
"MIST"
|
|
||||||
];
|
|
||||||
|
|
||||||
// Log in the platform
|
// Log in the platform
|
||||||
console.log("Authenticating...");
|
console.log("Authenticating...");
|
||||||
const result = await login(process.env.F95_USERNAME, process.env.F95_PASSWORD);
|
const result = await login(
|
||||||
console.log(`Authentication result: ${result.message}\n`);
|
process.env.F95_USERNAME,
|
||||||
|
process.env.F95_PASSWORD
|
||||||
|
);
|
||||||
|
console.log(`Authentication result: ${result.message}\n`);
|
||||||
|
|
||||||
// Get user data
|
// Get user data
|
||||||
console.log("Fetching user data...");
|
console.log("Fetching user data...");
|
||||||
const userdata = await getUserData();
|
const userdata = await getUserData();
|
||||||
const gameThreads = userdata.watched.filter(e => e.forum === "Games").length;
|
const gameThreads = userdata.watched.filter((e) => e.forum === "Games")
|
||||||
console.log(`${userdata.name} follows ${userdata.watched.length} threads of which ${gameThreads} are games\n`);
|
.length;
|
||||||
|
console.log(
|
||||||
|
`${userdata.name} follows ${userdata.watched.length} threads of which ${gameThreads} are games\n`
|
||||||
|
);
|
||||||
|
|
||||||
// Get latest game update
|
// Get latest game update
|
||||||
const latestQuery: LatestSearchQuery = new LatestSearchQuery();
|
const latestQuery: LatestSearchQuery = new LatestSearchQuery();
|
||||||
latestQuery.category = "games";
|
latestQuery.category = "games";
|
||||||
latestQuery.includedTags = ["3d game"];
|
latestQuery.includedTags = ["3d game"];
|
||||||
|
|
||||||
const latestUpdates = await getLatestUpdates<Game>(latestQuery, 1);
|
const latestUpdates = await getLatestUpdates<Game>(latestQuery, 1);
|
||||||
console.log(`"${latestUpdates.shift().name}" was the last "3d game" tagged game to be updated\n`);
|
console.log(
|
||||||
|
`"${
|
||||||
|
latestUpdates.shift().name
|
||||||
|
}" was the last "3d game" tagged game to be updated\n`
|
||||||
|
);
|
||||||
|
|
||||||
// Get game data
|
// Get game data
|
||||||
for(const gamename of gameList) {
|
for (const gamename of gameList) {
|
||||||
console.log(`Searching '${gamename}'...`);
|
console.log(`Searching '${gamename}'...`);
|
||||||
|
|
||||||
// Prepare the query
|
// Prepare the query
|
||||||
const query: HandiworkSearchQuery = new HandiworkSearchQuery();
|
const query: HandiworkSearchQuery = new HandiworkSearchQuery();
|
||||||
query.category = "games";
|
query.category = "games";
|
||||||
query.keywords = gamename;
|
query.keywords = gamename;
|
||||||
query.order = "likes"; // To find the most popular games
|
query.order = "likes"; // To find the most popular games
|
||||||
|
|
||||||
// Fetch the first result
|
// Fetch the first result
|
||||||
const searchResult = await searchHandiwork<Game>(query, 1);
|
const searchResult = await searchHandiwork<Game>(query, 1);
|
||||||
|
|
||||||
// No game found
|
// No game found
|
||||||
if (searchResult.length === 0) {
|
if (searchResult.length === 0) {
|
||||||
console.log(`No data found for '${gamename}'\n`);
|
console.log(`No data found for '${gamename}'\n`);
|
||||||
continue;
|
continue;
|
||||||
}
|
|
||||||
|
|
||||||
// Extract first game
|
|
||||||
const gamedata = searchResult.shift();
|
|
||||||
const authors = gamedata.authors.map((a, idx) => a.name).join(", ");
|
|
||||||
console.log(`Found: ${gamedata.name} (${gamedata.version}) by ${authors}\n`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract first game
|
||||||
|
const gamedata = searchResult.shift();
|
||||||
|
const authors = gamedata.authors.map((a, idx) => a.name).join(", ");
|
||||||
|
console.log(
|
||||||
|
`Found: ${gamedata.name} (${gamedata.version}) by ${authors}\n`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
173
src/index.ts
173
src/index.ts
|
@ -52,7 +52,9 @@ shared.logger.level = "warn"; // By default log only the warn messages
|
||||||
/**
|
/**
|
||||||
* Indicates whether a user is logged in to the F95Zone platform or not.
|
* Indicates whether a user is logged in to the F95Zone platform or not.
|
||||||
*/
|
*/
|
||||||
export function isLogged(): boolean { return shared.isLogged; };
|
export function isLogged(): boolean {
|
||||||
|
return shared.isLogged;
|
||||||
|
}
|
||||||
//#endregion Export properties
|
//#endregion Export properties
|
||||||
|
|
||||||
//#region Export methods
|
//#region Export methods
|
||||||
|
@ -62,68 +64,73 @@ export function isLogged(): boolean { return shared.isLogged; };
|
||||||
*
|
*
|
||||||
* This **must** be the first operation performed before accessing any other script functions.
|
* This **must** be the first operation performed before accessing any other script functions.
|
||||||
*/
|
*/
|
||||||
export async function login(username: string, password: string): Promise<LoginResult> {
|
export async function login(
|
||||||
// Try to load a previous session
|
username: string,
|
||||||
await shared.session.load();
|
password: string
|
||||||
|
): Promise<LoginResult> {
|
||||||
|
// Try to load a previous session
|
||||||
|
await shared.session.load();
|
||||||
|
|
||||||
// If the session is valid, return
|
// If the session is valid, return
|
||||||
if (shared.session.isValid(username, password)) {
|
if (shared.session.isValid(username, password)) {
|
||||||
shared.logger.info(`Loading previous session for ${username}`);
|
shared.logger.info(`Loading previous session for ${username}`);
|
||||||
|
|
||||||
// Load platform data
|
// Load platform data
|
||||||
await fetchPlatformData();
|
await fetchPlatformData();
|
||||||
|
|
||||||
shared.setIsLogged(true);
|
shared.setIsLogged(true);
|
||||||
return new LoginResult(true, `${username} already authenticated (session)`);
|
return new LoginResult(true, `${username} already authenticated (session)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Creating credentials and fetch unique platform token
|
// Creating credentials and fetch unique platform token
|
||||||
shared.logger.trace("Fetching token...");
|
shared.logger.trace("Fetching token...");
|
||||||
const creds = new Credentials(username, password);
|
const creds = new Credentials(username, password);
|
||||||
await creds.fetchToken();
|
await creds.fetchToken();
|
||||||
|
|
||||||
shared.logger.trace(`Authentication for ${username}`);
|
shared.logger.trace(`Authentication for ${username}`);
|
||||||
const result = await authenticate(creds);
|
const result = await authenticate(creds);
|
||||||
shared.setIsLogged(result.success);
|
shared.setIsLogged(result.success);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
// Load platform data
|
// Load platform data
|
||||||
await fetchPlatformData();
|
await fetchPlatformData();
|
||||||
|
|
||||||
// Recreate the session, overwriting the old one
|
// Recreate the session, overwriting the old one
|
||||||
shared.session.create(username, password, creds.token);
|
shared.session.create(username, password, creds.token);
|
||||||
await shared.session.save();
|
await shared.session.save();
|
||||||
|
|
||||||
shared.logger.info("User logged in through the platform");
|
shared.logger.info("User logged in through the platform");
|
||||||
} else shared.logger.warn(`Error during authentication: ${result.message}`);
|
} else shared.logger.warn(`Error during authentication: ${result.message}`);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Chek if exists a new version of the handiwork.
|
* Chek if exists a new version of the handiwork.
|
||||||
*
|
*
|
||||||
* You **must** be logged in to the portal before calling this method.
|
* You **must** be logged in to the portal before calling this method.
|
||||||
*/
|
*/
|
||||||
export async function checkIfHandiworkHasUpdate(hw: HandiWork): Promise<boolean> {
|
export async function checkIfHandiworkHasUpdate(
|
||||||
// Local variables
|
hw: HandiWork
|
||||||
let hasUpdate = false;
|
): Promise<boolean> {
|
||||||
|
// Local variables
|
||||||
|
let hasUpdate = false;
|
||||||
|
|
||||||
// Check if the user is logged
|
// Check if the user is logged
|
||||||
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
||||||
|
|
||||||
// F95 change URL at every game update,
|
// F95 change URL at every game update,
|
||||||
// so if the URL is different an update is available
|
// so if the URL is different an update is available
|
||||||
if (await urlExists(hw.url, true)) {
|
if (await urlExists(hw.url, true)) {
|
||||||
// Fetch the online handiwork
|
// Fetch the online handiwork
|
||||||
const onlineHw = await getHandiworkFromURL<HandiWork>(hw.url);
|
const onlineHw = await getHandiworkFromURL<HandiWork>(hw.url);
|
||||||
|
|
||||||
// Compare the versions
|
// Compare the versions
|
||||||
hasUpdate = onlineHw.version?.toUpperCase() !== hw.version?.toUpperCase();
|
hasUpdate = onlineHw.version?.toUpperCase() !== hw.version?.toUpperCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
return hasUpdate;
|
return hasUpdate;
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Search for one or more handiworks identified by a specific query.
|
* Search for one or more handiworks identified by a specific query.
|
||||||
|
@ -133,30 +140,35 @@ export async function checkIfHandiworkHasUpdate(hw: HandiWork): Promise<boolean>
|
||||||
* @param {HandiworkSearchQuery} query Parameters used for the search.
|
* @param {HandiworkSearchQuery} query Parameters used for the search.
|
||||||
* @param {Number} limit Maximum number of results. Default: 10
|
* @param {Number} limit Maximum number of results. Default: 10
|
||||||
*/
|
*/
|
||||||
export async function searchHandiwork<T extends IBasic>(query: HandiworkSearchQuery, limit: number = 10): Promise<T[]> {
|
export async function searchHandiwork<T extends IBasic>(
|
||||||
// Check if the user is logged
|
query: HandiworkSearchQuery,
|
||||||
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
limit = 10
|
||||||
|
): Promise<T[]> {
|
||||||
|
// Check if the user is logged
|
||||||
|
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
||||||
|
|
||||||
return search<T>(query, limit);
|
return search<T>(query, limit);
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given the url, it gets all the information about the handiwork requested.
|
* Given the url, it gets all the information about the handiwork requested.
|
||||||
*
|
*
|
||||||
* You **must** be logged in to the portal before calling this method.
|
* You **must** be logged in to the portal before calling this method.
|
||||||
*/
|
*/
|
||||||
export async function getHandiworkFromURL<T extends IBasic>(url: string): Promise<T> {
|
export async function getHandiworkFromURL<T extends IBasic>(
|
||||||
// Check if the user is logged
|
url: string
|
||||||
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
): Promise<T> {
|
||||||
|
// Check if the user is logged
|
||||||
|
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
||||||
|
|
||||||
// Check URL validity
|
// Check URL validity
|
||||||
const exists = await urlExists(url);
|
const exists = await urlExists(url);
|
||||||
if (!exists) throw new URIError(`${url} is not a valid URL`);
|
if (!exists) throw new URIError(`${url} is not a valid URL`);
|
||||||
if (!isF95URL(url)) throw new Error(`${url} is not a valid F95Zone URL`);
|
if (!isF95URL(url)) throw new Error(`${url} is not a valid F95Zone URL`);
|
||||||
|
|
||||||
// Get game data
|
// Get game data
|
||||||
return getHandiworkInformation<T>(url);
|
return getHandiworkInformation<T>(url);
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the data of the currently logged in user.
|
* Gets the data of the currently logged in user.
|
||||||
|
@ -166,15 +178,15 @@ export async function getHandiworkFromURL<T extends IBasic>(url: string): Promis
|
||||||
* @returns {Promise<UserProfile>} Data of the user currently logged in
|
* @returns {Promise<UserProfile>} Data of the user currently logged in
|
||||||
*/
|
*/
|
||||||
export async function getUserData(): Promise<UserProfile> {
|
export async function getUserData(): Promise<UserProfile> {
|
||||||
// Check if the user is logged
|
// Check if the user is logged
|
||||||
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
||||||
|
|
||||||
// Create and fetch profile data
|
// Create and fetch profile data
|
||||||
const profile = new UserProfile();
|
const profile = new UserProfile();
|
||||||
await profile.fetch();
|
await profile.fetch();
|
||||||
|
|
||||||
return profile;
|
return profile;
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the latest updated games that match the specified parameters.
|
* Gets the latest updated games that match the specified parameters.
|
||||||
|
@ -184,19 +196,22 @@ export async function getUserData(): Promise<UserProfile> {
|
||||||
* @param {LatestSearchQuery} query Parameters used for the search.
|
* @param {LatestSearchQuery} query Parameters used for the search.
|
||||||
* @param {Number} limit Maximum number of results. Default: 10
|
* @param {Number} limit Maximum number of results. Default: 10
|
||||||
*/
|
*/
|
||||||
export async function getLatestUpdates<T extends IBasic>(query: LatestSearchQuery, limit: number = 10): Promise<T[]> {
|
export async function getLatestUpdates<T extends IBasic>(
|
||||||
// Check limit value
|
query: LatestSearchQuery,
|
||||||
if (limit <= 0) throw new Error("limit must be greater than 0");
|
limit = 10
|
||||||
|
): Promise<T[]> {
|
||||||
|
// Check limit value
|
||||||
|
if (limit <= 0) throw new Error("limit must be greater than 0");
|
||||||
|
|
||||||
// Check if the user is logged
|
// Check if the user is logged
|
||||||
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
if (!shared.isLogged) throw new UserNotLogged(USER_NOT_LOGGED);
|
||||||
|
|
||||||
// Fetch the results
|
// Fetch the results
|
||||||
const urls = await fetchLatestHandiworkURLs(query, limit);
|
const urls = await fetchLatestHandiworkURLs(query, limit);
|
||||||
|
|
||||||
// Get the data from urls
|
// Get the data from urls
|
||||||
const promiseList = urls.map((u: string) => getHandiworkInformation<T>(u));
|
const promiseList = urls.map((u: string) => getHandiworkInformation<T>(u));
|
||||||
return Promise.all(promiseList);
|
return Promise.all(promiseList);
|
||||||
};
|
}
|
||||||
|
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|
|
@ -7,28 +7,28 @@ import { getF95Token } from "../network-helper.js";
|
||||||
* Represents the credentials used to access the platform.
|
* Represents the credentials used to access the platform.
|
||||||
*/
|
*/
|
||||||
export default class Credentials {
|
export default class Credentials {
|
||||||
/**
|
/**
|
||||||
* Username
|
* Username
|
||||||
*/
|
*/
|
||||||
public username: string;
|
public username: string;
|
||||||
/**
|
/**
|
||||||
* Password of the user.
|
* Password of the user.
|
||||||
*/
|
*/
|
||||||
public password: string;
|
public password: string;
|
||||||
/**
|
/**
|
||||||
* One time token used during login.
|
* One time token used during login.
|
||||||
*/
|
*/
|
||||||
public token: string = null;
|
public token: string = null;
|
||||||
|
|
||||||
constructor(username: string, password: string) {
|
constructor(username: string, password: string) {
|
||||||
this.username = username;
|
this.username = username;
|
||||||
this.password = password;
|
this.password = password;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch and save the token used to log in to F95Zone.
|
* Fetch and save the token used to log in to F95Zone.
|
||||||
*/
|
*/
|
||||||
async fetchToken(): Promise<void> {
|
async fetchToken(): Promise<void> {
|
||||||
this.token = await getF95Token();
|
this.token = await getF95Token();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,44 +1,44 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
interface IBaseError {
|
interface IBaseError {
|
||||||
/**
|
/**
|
||||||
* Unique identifier of the error.
|
* Unique identifier of the error.
|
||||||
*/
|
*/
|
||||||
id: number,
|
id: number;
|
||||||
/**
|
/**
|
||||||
* Error message.
|
* Error message.
|
||||||
*/
|
*/
|
||||||
message: string,
|
message: string;
|
||||||
/**
|
/**
|
||||||
* Error to report.
|
* Error to report.
|
||||||
*/
|
*/
|
||||||
error: Error,
|
error: Error;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class GenericAxiosError extends Error implements IBaseError {
|
export class GenericAxiosError extends Error implements IBaseError {
|
||||||
id: number;
|
id: number;
|
||||||
message: string;
|
message: string;
|
||||||
error: Error;
|
error: Error;
|
||||||
|
|
||||||
constructor(args: IBaseError) {
|
constructor(args: IBaseError) {
|
||||||
super();
|
super();
|
||||||
this.id = args.id;
|
this.id = args.id;
|
||||||
this.message = args.message;
|
this.message = args.message;
|
||||||
this.error = args.error;
|
this.error = args.error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class UnexpectedResponseContentType extends Error implements IBaseError {
|
export class UnexpectedResponseContentType extends Error implements IBaseError {
|
||||||
id: number;
|
id: number;
|
||||||
message: string;
|
message: string;
|
||||||
error: Error;
|
error: Error;
|
||||||
|
|
||||||
constructor(args: IBaseError) {
|
constructor(args: IBaseError) {
|
||||||
super();
|
super();
|
||||||
this.id = args.id;
|
this.id = args.id;
|
||||||
this.message = args.message;
|
this.message = args.message;
|
||||||
this.error = args.error;
|
this.error = args.error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class InvalidF95Token extends Error {}
|
export class InvalidF95Token extends Error {}
|
||||||
|
|
|
@ -4,28 +4,26 @@
|
||||||
import { TAuthor, IAnimation, TRating, TCategory } from "../../interfaces";
|
import { TAuthor, IAnimation, TRating, TCategory } from "../../interfaces";
|
||||||
|
|
||||||
export default class Animation implements IAnimation {
|
export default class Animation implements IAnimation {
|
||||||
|
//#region Properties
|
||||||
//#region Properties
|
censored: boolean;
|
||||||
censored: boolean;
|
genre: string[];
|
||||||
genre: string[];
|
installation: string;
|
||||||
installation: string;
|
language: string[];
|
||||||
language: string[];
|
lenght: string;
|
||||||
lenght: string;
|
pages: string;
|
||||||
pages: string;
|
resolution: string[];
|
||||||
resolution: string[];
|
authors: TAuthor[];
|
||||||
authors: TAuthor[];
|
category: TCategory;
|
||||||
category: TCategory;
|
changelog: string[];
|
||||||
changelog: string[];
|
cover: string;
|
||||||
cover: string;
|
id: number;
|
||||||
id: number;
|
lastThreadUpdate: Date;
|
||||||
lastThreadUpdate: Date;
|
name: string;
|
||||||
name: string;
|
overview: string;
|
||||||
overview: string;
|
prefixes: string[];
|
||||||
prefixes: string[];
|
rating: TRating;
|
||||||
rating: TRating;
|
tags: string[];
|
||||||
tags: string[];
|
threadPublishingDate: Date;
|
||||||
threadPublishingDate: Date;
|
url: string;
|
||||||
url: string;
|
//#endregion Properties
|
||||||
//#endregion Properties
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,27 +4,25 @@
|
||||||
import { TAuthor, IAsset, TRating, TCategory } from "../../interfaces";
|
import { TAuthor, IAsset, TRating, TCategory } from "../../interfaces";
|
||||||
|
|
||||||
export default class Asset implements IAsset {
|
export default class Asset implements IAsset {
|
||||||
|
//#region Properties
|
||||||
//#region Properties
|
assetLink: string;
|
||||||
assetLink: string;
|
associatedAssets: string[];
|
||||||
associatedAssets: string[];
|
compatibleSoftware: string;
|
||||||
compatibleSoftware: string;
|
includedAssets: string[];
|
||||||
includedAssets: string[];
|
officialLinks: string[];
|
||||||
officialLinks: string[];
|
sku: string;
|
||||||
sku: string;
|
authors: TAuthor[];
|
||||||
authors: TAuthor[];
|
category: TCategory;
|
||||||
category: TCategory;
|
changelog: string[];
|
||||||
changelog: string[];
|
cover: string;
|
||||||
cover: string;
|
id: number;
|
||||||
id: number;
|
lastThreadUpdate: Date;
|
||||||
lastThreadUpdate: Date;
|
name: string;
|
||||||
name: string;
|
overview: string;
|
||||||
overview: string;
|
prefixes: string[];
|
||||||
prefixes: string[];
|
rating: TRating;
|
||||||
rating: TRating;
|
tags: string[];
|
||||||
tags: string[];
|
threadPublishingDate: Date;
|
||||||
threadPublishingDate: Date;
|
url: string;
|
||||||
url: string;
|
//#endregion Properties
|
||||||
//#endregion Properties
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,23 +4,22 @@
|
||||||
import { TAuthor, IComic, TRating, TCategory } from "../../interfaces";
|
import { TAuthor, IComic, TRating, TCategory } from "../../interfaces";
|
||||||
|
|
||||||
export default class Comic implements IComic {
|
export default class Comic implements IComic {
|
||||||
|
//#region Properties
|
||||||
//#region Properties
|
genre: string[];
|
||||||
genre: string[];
|
pages: string;
|
||||||
pages: string;
|
resolution: string[];
|
||||||
resolution: string[];
|
authors: TAuthor[];
|
||||||
authors: TAuthor[];
|
category: TCategory;
|
||||||
category: TCategory;
|
changelog: string[];
|
||||||
changelog: string[];
|
cover: string;
|
||||||
cover: string;
|
id: number;
|
||||||
id: number;
|
lastThreadUpdate: Date;
|
||||||
lastThreadUpdate: Date;
|
name: string;
|
||||||
name: string;
|
overview: string;
|
||||||
overview: string;
|
prefixes: string[];
|
||||||
prefixes: string[];
|
rating: TRating;
|
||||||
rating: TRating;
|
tags: string[];
|
||||||
tags: string[];
|
threadPublishingDate: Date;
|
||||||
threadPublishingDate: Date;
|
url: string;
|
||||||
url: string;
|
//#endregion Properties
|
||||||
//#endregion Properties
|
}
|
||||||
}
|
|
||||||
|
|
|
@ -1,34 +1,39 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
// Modules from files
|
// Modules from files
|
||||||
import { TAuthor, TEngine, IGame, TRating, TStatus, TCategory } from "../../interfaces";
|
import {
|
||||||
|
TAuthor,
|
||||||
|
TEngine,
|
||||||
|
IGame,
|
||||||
|
TRating,
|
||||||
|
TStatus,
|
||||||
|
TCategory
|
||||||
|
} from "../../interfaces";
|
||||||
|
|
||||||
export default class Game implements IGame {
|
export default class Game implements IGame {
|
||||||
|
//#region Properties
|
||||||
//#region Properties
|
censored: boolean;
|
||||||
censored: boolean;
|
engine: TEngine;
|
||||||
engine: TEngine;
|
genre: string[];
|
||||||
genre: string[];
|
installation: string;
|
||||||
installation: string;
|
language: string[];
|
||||||
language: string[];
|
lastRelease: Date;
|
||||||
lastRelease: Date;
|
mod: boolean;
|
||||||
mod: boolean;
|
os: string[];
|
||||||
os: string[];
|
status: TStatus;
|
||||||
status: TStatus;
|
version: string;
|
||||||
version: string;
|
authors: TAuthor[];
|
||||||
authors: TAuthor[];
|
category: TCategory;
|
||||||
category: TCategory;
|
changelog: string[];
|
||||||
changelog: string[];
|
cover: string;
|
||||||
cover: string;
|
id: number;
|
||||||
id: number;
|
lastThreadUpdate: Date;
|
||||||
lastThreadUpdate: Date;
|
name: string;
|
||||||
name: string;
|
overview: string;
|
||||||
overview: string;
|
prefixes: string[];
|
||||||
prefixes: string[];
|
rating: TRating;
|
||||||
rating: TRating;
|
tags: string[];
|
||||||
tags: string[];
|
threadPublishingDate: Date;
|
||||||
threadPublishingDate: Date;
|
url: string;
|
||||||
url: string;
|
//#endregion Properties
|
||||||
//#endregion Properties
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,46 +1,51 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
// Modules from files
|
// Modules from files
|
||||||
import { TAuthor, TRating, IHandiwork, TEngine, TCategory, TStatus } from "../../interfaces";
|
import {
|
||||||
|
TAuthor,
|
||||||
|
TRating,
|
||||||
|
IHandiwork,
|
||||||
|
TEngine,
|
||||||
|
TCategory,
|
||||||
|
TStatus
|
||||||
|
} from "../../interfaces";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* It represents a generic work, be it a game, a comic, an animation or an asset.
|
* It represents a generic work, be it a game, a comic, an animation or an asset.
|
||||||
*/
|
*/
|
||||||
export default class HandiWork implements IHandiwork {
|
export default class HandiWork implements IHandiwork {
|
||||||
|
//#region Properties
|
||||||
//#region Properties
|
censored: boolean;
|
||||||
censored: boolean;
|
engine: TEngine;
|
||||||
engine: TEngine;
|
genre: string[];
|
||||||
genre: string[];
|
installation: string;
|
||||||
installation: string;
|
language: string[];
|
||||||
language: string[];
|
lastRelease: Date;
|
||||||
lastRelease: Date;
|
mod: boolean;
|
||||||
mod: boolean;
|
os: string[];
|
||||||
os: string[];
|
status: TStatus;
|
||||||
status: TStatus;
|
version: string;
|
||||||
version: string;
|
authors: TAuthor[];
|
||||||
authors: TAuthor[];
|
category: TCategory;
|
||||||
category: TCategory;
|
changelog: string[];
|
||||||
changelog: string[];
|
cover: string;
|
||||||
cover: string;
|
id: number;
|
||||||
id: number;
|
lastThreadUpdate: Date;
|
||||||
lastThreadUpdate: Date;
|
name: string;
|
||||||
name: string;
|
overview: string;
|
||||||
overview: string;
|
prefixes: string[];
|
||||||
prefixes: string[];
|
rating: TRating;
|
||||||
rating: TRating;
|
tags: string[];
|
||||||
tags: string[];
|
threadPublishingDate: Date;
|
||||||
threadPublishingDate: Date;
|
url: string;
|
||||||
url: string;
|
pages: string;
|
||||||
pages: string;
|
resolution: string[];
|
||||||
resolution: string[];
|
lenght: string;
|
||||||
lenght: string;
|
assetLink: string;
|
||||||
assetLink: string;
|
associatedAssets: string[];
|
||||||
associatedAssets: string[];
|
compatibleSoftware: string;
|
||||||
compatibleSoftware: string;
|
includedAssets: string[];
|
||||||
includedAssets: string[];
|
officialLinks: string[];
|
||||||
officialLinks: string[];
|
sku: string;
|
||||||
sku: string;
|
//#endregion Properties
|
||||||
//#endregion Properties
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,17 +4,17 @@
|
||||||
* Object obtained in response to an attempt to login to the portal.
|
* Object obtained in response to an attempt to login to the portal.
|
||||||
*/
|
*/
|
||||||
export default class LoginResult {
|
export default class LoginResult {
|
||||||
/**
|
/**
|
||||||
* Result of the login operation
|
* Result of the login operation
|
||||||
*/
|
*/
|
||||||
success: boolean;
|
success: boolean;
|
||||||
/**
|
/**
|
||||||
* Login response message
|
* Login response message
|
||||||
*/
|
*/
|
||||||
message: string;
|
message: string;
|
||||||
|
|
||||||
constructor(success: boolean, message: string) {
|
constructor(success: boolean, message: string) {
|
||||||
this.success = success;
|
this.success = success;
|
||||||
this.message = message;
|
this.message = message;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,144 +13,181 @@ import { GENERIC, MEMBER } from "../../constants/css-selector.js";
|
||||||
* Represents a generic user registered on the platform.
|
* Represents a generic user registered on the platform.
|
||||||
*/
|
*/
|
||||||
export default class PlatformUser {
|
export default class PlatformUser {
|
||||||
|
//#region Fields
|
||||||
|
|
||||||
//#region Fields
|
private _id: number;
|
||||||
|
private _name: string;
|
||||||
|
private _title: string;
|
||||||
|
private _banners: string[];
|
||||||
|
private _messages: number;
|
||||||
|
private _reactionScore: number;
|
||||||
|
private _points: number;
|
||||||
|
private _ratingsReceived: number;
|
||||||
|
private _joined: Date;
|
||||||
|
private _lastSeen: Date;
|
||||||
|
private _followed: boolean;
|
||||||
|
private _ignored: boolean;
|
||||||
|
private _private: boolean;
|
||||||
|
private _avatar: string;
|
||||||
|
private _amountDonated: number;
|
||||||
|
|
||||||
private _id: number;
|
//#endregion Fields
|
||||||
private _name: string;
|
|
||||||
private _title: string;
|
|
||||||
private _banners: string[];
|
|
||||||
private _messages: number;
|
|
||||||
private _reactionScore: number;
|
|
||||||
private _points: number;
|
|
||||||
private _ratingsReceived: number;
|
|
||||||
private _joined: Date;
|
|
||||||
private _lastSeen: Date;
|
|
||||||
private _followed: boolean;
|
|
||||||
private _ignored: boolean;
|
|
||||||
private _private: boolean;
|
|
||||||
private _avatar: string;
|
|
||||||
private _amountDonated: number;
|
|
||||||
|
|
||||||
//#endregion Fields
|
//#region Getters
|
||||||
|
|
||||||
//#region Getters
|
/**
|
||||||
|
* Unique user ID.
|
||||||
/**
|
*/
|
||||||
* Unique user ID.
|
public get id() {
|
||||||
*/
|
return this._id;
|
||||||
public get id() { return this._id; }
|
}
|
||||||
/**
|
/**
|
||||||
* Username.
|
* Username.
|
||||||
*/
|
*/
|
||||||
public get name() { return this._name; }
|
public get name() {
|
||||||
/**
|
return this._name;
|
||||||
* Title assigned to the user by the platform.
|
}
|
||||||
*/
|
/**
|
||||||
public get title() { return this._title; }
|
* Title assigned to the user by the platform.
|
||||||
/**
|
*/
|
||||||
* List of banners assigned by the platform.
|
public get title() {
|
||||||
*/
|
return this._title;
|
||||||
public get banners() { return this._banners; }
|
}
|
||||||
/**
|
/**
|
||||||
* Number of messages written by the user.
|
* List of banners assigned by the platform.
|
||||||
*/
|
*/
|
||||||
public get messages() { return this._messages; }
|
public get banners() {
|
||||||
/**
|
return this._banners;
|
||||||
* @todo Reaction score.
|
}
|
||||||
*/
|
/**
|
||||||
public get reactionScore() { return this._reactionScore; }
|
* Number of messages written by the user.
|
||||||
/**
|
*/
|
||||||
* @todo Points.
|
public get messages() {
|
||||||
*/
|
return this._messages;
|
||||||
public get points() { return this._points; }
|
}
|
||||||
/**
|
/**
|
||||||
* Number of ratings received.
|
* @todo Reaction score.
|
||||||
*/
|
*/
|
||||||
public get ratingsReceived() { return this._ratingsReceived; }
|
public get reactionScore() {
|
||||||
/**
|
return this._reactionScore;
|
||||||
* Date of joining the platform.
|
}
|
||||||
*/
|
/**
|
||||||
public get joined() { return this._joined; }
|
* @todo Points.
|
||||||
/**
|
*/
|
||||||
* Date of the last connection to the platform.
|
public get points() {
|
||||||
*/
|
return this._points;
|
||||||
public get lastSeen() { return this._lastSeen; }
|
}
|
||||||
/**
|
/**
|
||||||
* Indicates whether the user is followed by the currently logged in user.
|
* Number of ratings received.
|
||||||
*/
|
*/
|
||||||
public get followed() { return this._followed; }
|
public get ratingsReceived() {
|
||||||
/**
|
return this._ratingsReceived;
|
||||||
* Indicates whether the user is ignored by the currently logged on user.
|
}
|
||||||
*/
|
/**
|
||||||
public get ignored() { return this._ignored; }
|
* Date of joining the platform.
|
||||||
/**
|
*/
|
||||||
* Indicates that the profile is private and not viewable by the user.
|
public get joined() {
|
||||||
*/
|
return this._joined;
|
||||||
public get private() { return this._private; }
|
}
|
||||||
/**
|
/**
|
||||||
* URL of the image used as the user's avatar.
|
* Date of the last connection to the platform.
|
||||||
*/
|
*/
|
||||||
public get avatar() { return this._avatar; }
|
public get lastSeen() {
|
||||||
/**
|
return this._lastSeen;
|
||||||
* Value of donations made.
|
}
|
||||||
*/
|
/**
|
||||||
public get donation() { return this._amountDonated; }
|
* Indicates whether the user is followed by the currently logged in user.
|
||||||
|
*/
|
||||||
|
public get followed() {
|
||||||
|
return this._followed;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Indicates whether the user is ignored by the currently logged on user.
|
||||||
|
*/
|
||||||
|
public get ignored() {
|
||||||
|
return this._ignored;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Indicates that the profile is private and not viewable by the user.
|
||||||
|
*/
|
||||||
|
public get private() {
|
||||||
|
return this._private;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* URL of the image used as the user's avatar.
|
||||||
|
*/
|
||||||
|
public get avatar() {
|
||||||
|
return this._avatar;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Value of donations made.
|
||||||
|
*/
|
||||||
|
public get donation() {
|
||||||
|
return this._amountDonated;
|
||||||
|
}
|
||||||
|
|
||||||
//#endregion Getters
|
//#endregion Getters
|
||||||
|
|
||||||
constructor(id?: number) { this._id = id; }
|
constructor(id?: number) {
|
||||||
|
this._id = id;
|
||||||
|
}
|
||||||
|
|
||||||
//#region Public methods
|
//#region Public methods
|
||||||
|
|
||||||
public setID(id: number) { this._id = id; }
|
public setID(id: number) {
|
||||||
|
this._id = id;
|
||||||
|
}
|
||||||
|
|
||||||
public async fetch() {
|
public async fetch() {
|
||||||
// Check ID
|
// Check ID
|
||||||
if (!this.id && this.id < 1) throw new Error("Invalid user ID");
|
if (!this.id && this.id < 1) throw new Error("Invalid user ID");
|
||||||
|
|
||||||
// Prepare the URL
|
// Prepare the URL
|
||||||
const url = new URL(this.id.toString(), `${urls.F95_MEMBERS}/`).toString();
|
const url = new URL(this.id.toString(), `${urls.F95_MEMBERS}/`).toString();
|
||||||
|
|
||||||
// Fetch the page
|
// Fetch the page
|
||||||
const htmlResponse = await fetchHTML(url);
|
const htmlResponse = await fetchHTML(url);
|
||||||
|
|
||||||
if (htmlResponse.isSuccess()) {
|
if (htmlResponse.isSuccess()) {
|
||||||
// Prepare cheerio
|
// Prepare cheerio
|
||||||
const $ = cheerio.load(htmlResponse.value);
|
const $ = cheerio.load(htmlResponse.value);
|
||||||
|
|
||||||
// Check if the profile is private
|
|
||||||
this._private = $(GENERIC.ERROR_BANNER)
|
|
||||||
?.text()
|
|
||||||
.trim() === "This member limits who may view their full profile.";
|
|
||||||
|
|
||||||
if (!this._private) {
|
// Check if the profile is private
|
||||||
// Parse the elements
|
this._private =
|
||||||
this._name = $(MEMBER.NAME).text();
|
$(GENERIC.ERROR_BANNER)?.text().trim() ===
|
||||||
this._title = $(MEMBER.TITLE).text();
|
"This member limits who may view their full profile.";
|
||||||
this._banners = $(MEMBER.BANNERS).toArray().map((el, idx) => $(el).text().trim()).filter(el => el);
|
|
||||||
this._avatar = $(MEMBER.AVATAR).attr("src");
|
|
||||||
this._followed = $(MEMBER.FOLLOWED).text() === "Unfollow";
|
|
||||||
this._ignored = $(MEMBER.IGNORED).text() === "Unignore";
|
|
||||||
this._messages = parseInt($(MEMBER.MESSAGES).text(), 10);
|
|
||||||
this._reactionScore = parseInt($(MEMBER.REACTION_SCORE).text(), 10);
|
|
||||||
this._points = parseInt($(MEMBER.POINTS).text(), 10);
|
|
||||||
this._ratingsReceived = parseInt($(MEMBER.RATINGS_RECEIVED).text(), 10);
|
|
||||||
|
|
||||||
// Parse date
|
if (!this._private) {
|
||||||
const joined = $(MEMBER.JOINED)?.attr("datetime");
|
// Parse the elements
|
||||||
if (luxon.DateTime.fromISO(joined).isValid) this._joined = new Date(joined);
|
this._name = $(MEMBER.NAME).text();
|
||||||
|
this._title = $(MEMBER.TITLE).text();
|
||||||
|
this._banners = $(MEMBER.BANNERS)
|
||||||
|
.toArray()
|
||||||
|
.map((el, idx) => $(el).text().trim())
|
||||||
|
.filter((el) => el);
|
||||||
|
this._avatar = $(MEMBER.AVATAR).attr("src");
|
||||||
|
this._followed = $(MEMBER.FOLLOWED).text() === "Unfollow";
|
||||||
|
this._ignored = $(MEMBER.IGNORED).text() === "Unignore";
|
||||||
|
this._messages = parseInt($(MEMBER.MESSAGES).text(), 10);
|
||||||
|
this._reactionScore = parseInt($(MEMBER.REACTION_SCORE).text(), 10);
|
||||||
|
this._points = parseInt($(MEMBER.POINTS).text(), 10);
|
||||||
|
this._ratingsReceived = parseInt($(MEMBER.RATINGS_RECEIVED).text(), 10);
|
||||||
|
|
||||||
const lastSeen = $(MEMBER.LAST_SEEN)?.attr("datetime");
|
// Parse date
|
||||||
if (luxon.DateTime.fromISO(lastSeen).isValid) this._joined = new Date(lastSeen);
|
const joined = $(MEMBER.JOINED)?.attr("datetime");
|
||||||
|
if (luxon.DateTime.fromISO(joined).isValid)
|
||||||
|
this._joined = new Date(joined);
|
||||||
|
|
||||||
// Parse donation
|
const lastSeen = $(MEMBER.LAST_SEEN)?.attr("datetime");
|
||||||
const donation = $(MEMBER.AMOUNT_DONATED)?.text().replace("$", "");
|
if (luxon.DateTime.fromISO(lastSeen).isValid)
|
||||||
this._amountDonated = donation ? parseInt(donation, 10) : 0;
|
this._joined = new Date(lastSeen);
|
||||||
}
|
|
||||||
} else throw htmlResponse.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Public method
|
// Parse donation
|
||||||
|
const donation = $(MEMBER.AMOUNT_DONATED)?.text().replace("$", "");
|
||||||
|
this._amountDonated = donation ? parseInt(donation, 10) : 0;
|
||||||
|
}
|
||||||
|
} else throw htmlResponse.value;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
//#endregion Public method
|
||||||
|
}
|
||||||
|
|
|
@ -5,7 +5,10 @@ import cheerio from "cheerio";
|
||||||
|
|
||||||
// Modules from file
|
// Modules from file
|
||||||
import PlatformUser from "./platform-user.js";
|
import PlatformUser from "./platform-user.js";
|
||||||
import { IPostElement, parseF95ThreadPost } from "../../scrape-data/post-parse.js";
|
import {
|
||||||
|
IPostElement,
|
||||||
|
parseF95ThreadPost
|
||||||
|
} from "../../scrape-data/post-parse.js";
|
||||||
import { POST, THREAD } from "../../constants/css-selector.js";
|
import { POST, THREAD } from "../../constants/css-selector.js";
|
||||||
import { urls } from "../../constants/url.js";
|
import { urls } from "../../constants/url.js";
|
||||||
import { fetchHTML } from "../../network-helper.js";
|
import { fetchHTML } from "../../network-helper.js";
|
||||||
|
@ -14,122 +17,150 @@ import { fetchHTML } from "../../network-helper.js";
|
||||||
* Represents a post published by a user on the F95Zone platform.
|
* Represents a post published by a user on the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export default class Post {
|
export default class Post {
|
||||||
|
//#region Fields
|
||||||
|
|
||||||
//#region Fields
|
private _id: number;
|
||||||
|
private _number: number;
|
||||||
|
private _published: Date;
|
||||||
|
private _lastEdit: Date;
|
||||||
|
private _owner: PlatformUser;
|
||||||
|
private _bookmarked: boolean;
|
||||||
|
private _message: string;
|
||||||
|
private _body: IPostElement[];
|
||||||
|
|
||||||
private _id: number;
|
//#endregion Fields
|
||||||
private _number: number;
|
|
||||||
private _published: Date;
|
|
||||||
private _lastEdit: Date;
|
|
||||||
private _owner: PlatformUser;
|
|
||||||
private _bookmarked: boolean;
|
|
||||||
private _message: string;
|
|
||||||
private _body: IPostElement[];
|
|
||||||
|
|
||||||
//#endregion Fields
|
//#region Getters
|
||||||
|
|
||||||
//#region Getters
|
/**
|
||||||
|
* Represents a post published by a user on the F95Zone platform.
|
||||||
|
*/
|
||||||
|
public get id() {
|
||||||
|
return this._id;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Unique ID of the post within the thread in which it is present.
|
||||||
|
*/
|
||||||
|
public get number() {
|
||||||
|
return this._number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Date the post was first published.
|
||||||
|
*/
|
||||||
|
public get published() {
|
||||||
|
return this._published;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Date the post was last modified.
|
||||||
|
*/
|
||||||
|
public get lastEdit() {
|
||||||
|
return this._lastEdit;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* User who owns the post.
|
||||||
|
*/
|
||||||
|
public get owner() {
|
||||||
|
return this._owner;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Indicates whether the post has been bookmarked.
|
||||||
|
*/
|
||||||
|
public get bookmarked() {
|
||||||
|
return this._bookmarked;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Post message text.
|
||||||
|
*/
|
||||||
|
public get message() {
|
||||||
|
return this._message;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Set of the elements that make up the body of the post.
|
||||||
|
*/
|
||||||
|
public get body() {
|
||||||
|
return this._body;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
//#endregion Getters
|
||||||
* Represents a post published by a user on the F95Zone platform.
|
|
||||||
*/
|
|
||||||
public get id() { return this._id; }
|
|
||||||
/**
|
|
||||||
* Unique ID of the post within the thread in which it is present.
|
|
||||||
*/
|
|
||||||
public get number() { return this._number; }
|
|
||||||
/**
|
|
||||||
* Date the post was first published.
|
|
||||||
*/
|
|
||||||
public get published() { return this._published; }
|
|
||||||
/**
|
|
||||||
* Date the post was last modified.
|
|
||||||
*/
|
|
||||||
public get lastEdit() { return this._lastEdit; }
|
|
||||||
/**
|
|
||||||
* User who owns the post.
|
|
||||||
*/
|
|
||||||
public get owner() { return this._owner; }
|
|
||||||
/**
|
|
||||||
* Indicates whether the post has been bookmarked.
|
|
||||||
*/
|
|
||||||
public get bookmarked() { return this._bookmarked; }
|
|
||||||
/**
|
|
||||||
* Post message text.
|
|
||||||
*/
|
|
||||||
public get message() { return this._message; }
|
|
||||||
/**
|
|
||||||
* Set of the elements that make up the body of the post.
|
|
||||||
*/
|
|
||||||
public get body() { return this._body; }
|
|
||||||
|
|
||||||
//#endregion Getters
|
constructor(id: number) {
|
||||||
|
this._id = id;
|
||||||
|
}
|
||||||
|
|
||||||
constructor(id: number) { this._id = id; }
|
//#region Public methods
|
||||||
|
|
||||||
//#region Public methods
|
/**
|
||||||
|
* Gets the post data starting from its unique ID for the entire platform.
|
||||||
|
*/
|
||||||
|
public async fetch() {
|
||||||
|
// Fetch HTML page containing the post
|
||||||
|
const url = new URL(this.id.toString(), urls.F95_POSTS).toString();
|
||||||
|
const htmlResponse = await fetchHTML(url);
|
||||||
|
|
||||||
/**
|
if (htmlResponse.isSuccess()) {
|
||||||
* Gets the post data starting from its unique ID for the entire platform.
|
// Load cheerio and find post
|
||||||
*/
|
const $ = cheerio.load(htmlResponse.value);
|
||||||
public async fetch() {
|
|
||||||
// Fetch HTML page containing the post
|
|
||||||
const url = new URL(this.id.toString(), urls.F95_POSTS).toString();
|
|
||||||
const htmlResponse = await fetchHTML(url);
|
|
||||||
|
|
||||||
if (htmlResponse.isSuccess()) {
|
const post = $(THREAD.POSTS_IN_PAGE)
|
||||||
// Load cheerio and find post
|
.toArray()
|
||||||
const $ = cheerio.load(htmlResponse.value);
|
.find((el, idx) => {
|
||||||
|
// Fetch the ID and check if it is what we are searching
|
||||||
|
const sid: string = $(el)
|
||||||
|
.find(POST.ID)
|
||||||
|
.attr("id")
|
||||||
|
.replace("post-", "");
|
||||||
|
const id = parseInt(sid, 10);
|
||||||
|
|
||||||
const post = $(THREAD.POSTS_IN_PAGE).toArray().find((el, idx) => {
|
if (id === this.id) return el;
|
||||||
// Fetch the ID and check if it is what we are searching
|
});
|
||||||
const sid: string = $(el).find(POST.ID).attr("id").replace("post-", "");
|
|
||||||
const id = parseInt(sid, 10);
|
|
||||||
|
|
||||||
if (id === this.id) return el;
|
// Finally parse the post
|
||||||
});
|
await this.parsePost($, $(post));
|
||||||
|
} else throw htmlResponse.value;
|
||||||
|
}
|
||||||
|
|
||||||
// Finally parse the post
|
//#endregion Public methods
|
||||||
await this.parsePost($, $(post));
|
|
||||||
} else throw htmlResponse.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Public methods
|
//#region Private methods
|
||||||
|
|
||||||
//#region Private methods
|
private async parsePost(
|
||||||
|
$: cheerio.Root,
|
||||||
|
post: cheerio.Cheerio
|
||||||
|
): Promise<void> {
|
||||||
|
// Find post's ID
|
||||||
|
const sid: string = post.find(POST.ID).attr("id").replace("post-", "");
|
||||||
|
this._id = parseInt(sid, 10);
|
||||||
|
|
||||||
private async parsePost($: cheerio.Root, post: cheerio.Cheerio): Promise<void> {
|
// Find post's number
|
||||||
// Find post's ID
|
const sNumber: string = post.find(POST.NUMBER).text().replace("#", "");
|
||||||
const sid: string = post.find(POST.ID).attr("id").replace("post-", "");
|
this._number = parseInt(sNumber, 10);
|
||||||
this._id = parseInt(sid, 10);
|
|
||||||
|
|
||||||
// Find post's number
|
// Find post's publishing date
|
||||||
const sNumber: string = post.find(POST.NUMBER).text().replace("#", "");
|
const sPublishing: string = post.find(POST.PUBLISH_DATE).attr("datetime");
|
||||||
this._number = parseInt(sNumber, 10);
|
this._published = new Date(sPublishing);
|
||||||
|
|
||||||
// Find post's publishing date
|
// Find post's last edit date
|
||||||
const sPublishing: string = post.find(POST.PUBLISH_DATE).attr("datetime");
|
const sLastEdit: string = post.find(POST.LAST_EDIT).attr("datetime");
|
||||||
this._published = new Date(sPublishing);
|
this._lastEdit = new Date(sLastEdit);
|
||||||
|
|
||||||
// Find post's last edit date
|
// Find post's owner
|
||||||
const sLastEdit: string = post.find(POST.LAST_EDIT).attr("datetime");
|
const sOwnerID: string = post
|
||||||
this._lastEdit = new Date(sLastEdit);
|
.find(POST.OWNER_ID)
|
||||||
|
.attr("data-user-id")
|
||||||
|
.trim();
|
||||||
|
this._owner = new PlatformUser(parseInt(sOwnerID, 10));
|
||||||
|
await this._owner.fetch();
|
||||||
|
|
||||||
// Find post's owner
|
// Find if the post is bookmarked
|
||||||
const sOwnerID: string = post.find(POST.OWNER_ID).attr("data-user-id").trim();
|
this._bookmarked = post.find(POST.BOOKMARKED).length !== 0;
|
||||||
this._owner = new PlatformUser(parseInt(sOwnerID, 10));
|
|
||||||
await this._owner.fetch();
|
|
||||||
|
|
||||||
// Find if the post is bookmarked
|
// Find post's message
|
||||||
this._bookmarked = post.find(POST.BOOKMARKED).length !== 0;
|
this._message = post.find(POST.BODY).text();
|
||||||
|
|
||||||
// Find post's message
|
// Parse post's body
|
||||||
this._message = post.find(POST.BODY).text();
|
const body = post.find(POST.BODY);
|
||||||
|
this._body = parseF95ThreadPost($, body);
|
||||||
// Parse post's body
|
}
|
||||||
const body = post.find(POST.BODY);
|
|
||||||
this._body = parseF95ThreadPost($, body);
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion
|
//#endregion
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,11 @@ import { urls } from "../../constants/url.js";
|
||||||
import { POST, THREAD } from "../../constants/css-selector.js";
|
import { POST, THREAD } from "../../constants/css-selector.js";
|
||||||
import { fetchHTML, fetchPOSTResponse } from "../../network-helper.js";
|
import { fetchHTML, fetchPOSTResponse } from "../../network-helper.js";
|
||||||
import Shared from "../../shared.js";
|
import Shared from "../../shared.js";
|
||||||
import { GenericAxiosError, ParameterError, UnexpectedResponseContentType } from "../errors.js";
|
import {
|
||||||
|
GenericAxiosError,
|
||||||
|
ParameterError,
|
||||||
|
UnexpectedResponseContentType
|
||||||
|
} from "../errors.js";
|
||||||
import { Result } from "../result.js";
|
import { Result } from "../result.js";
|
||||||
import { getJSONLD, TJsonLD } from "../../scrape-data/json-ld.js";
|
import { getJSONLD, TJsonLD } from "../../scrape-data/json-ld.js";
|
||||||
|
|
||||||
|
@ -20,263 +24,289 @@ import { getJSONLD, TJsonLD } from "../../scrape-data/json-ld.js";
|
||||||
* Represents a generic F95Zone platform thread.
|
* Represents a generic F95Zone platform thread.
|
||||||
*/
|
*/
|
||||||
export default class Thread {
|
export default class Thread {
|
||||||
|
//#region Fields
|
||||||
|
|
||||||
//#region Fields
|
private POST_FOR_PAGE = 20;
|
||||||
|
private _id: number;
|
||||||
|
private _url: string;
|
||||||
|
private _title: string;
|
||||||
|
private _tags: string[];
|
||||||
|
private _prefixes: string[];
|
||||||
|
private _rating: TRating;
|
||||||
|
private _owner: PlatformUser;
|
||||||
|
private _publication: Date;
|
||||||
|
private _modified: Date;
|
||||||
|
private _category: TCategory;
|
||||||
|
|
||||||
private POST_FOR_PAGE: number = 20;
|
//#endregion Fields
|
||||||
private _id: number;
|
|
||||||
private _url: string;
|
|
||||||
private _title: string;
|
|
||||||
private _tags: string[];
|
|
||||||
private _prefixes: string[];
|
|
||||||
private _rating: TRating;
|
|
||||||
private _owner: PlatformUser;
|
|
||||||
private _publication: Date;
|
|
||||||
private _modified: Date;
|
|
||||||
private _category: TCategory;
|
|
||||||
|
|
||||||
//#endregion Fields
|
//#region Getters
|
||||||
|
|
||||||
//#region Getters
|
/**
|
||||||
|
* Unique ID of the thread on the platform.
|
||||||
|
*/
|
||||||
|
public get id() {
|
||||||
|
return this._id;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* URL of the thread.
|
||||||
|
*
|
||||||
|
* It may vary depending on any versions of the contained product.
|
||||||
|
*/
|
||||||
|
public get url() {
|
||||||
|
return this._url;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Thread title.
|
||||||
|
*/
|
||||||
|
public get title() {
|
||||||
|
return this._title;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Tags associated with the thread.
|
||||||
|
*/
|
||||||
|
public get tags() {
|
||||||
|
return this._tags;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Prefixes associated with the thread
|
||||||
|
*/
|
||||||
|
public get prefixes() {
|
||||||
|
return this._prefixes;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Rating assigned to the thread.
|
||||||
|
*/
|
||||||
|
public get rating() {
|
||||||
|
return this._rating;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Owner of the thread.
|
||||||
|
*/
|
||||||
|
public get owner() {
|
||||||
|
return this._owner;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Date the thread was first published.
|
||||||
|
*/
|
||||||
|
public get publication() {
|
||||||
|
return this._publication;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Date the thread was last modified.
|
||||||
|
*/
|
||||||
|
public get modified() {
|
||||||
|
return this._modified;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Category to which the content of the thread belongs.
|
||||||
|
*/
|
||||||
|
public get category() {
|
||||||
|
return this._category;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
//#endregion Getters
|
||||||
* Unique ID of the thread on the platform.
|
|
||||||
*/
|
|
||||||
public get id() { return this._id; }
|
|
||||||
/**
|
|
||||||
* URL of the thread.
|
|
||||||
*
|
|
||||||
* It may vary depending on any versions of the contained product.
|
|
||||||
*/
|
|
||||||
public get url() { return this._url; }
|
|
||||||
/**
|
|
||||||
* Thread title.
|
|
||||||
*/
|
|
||||||
public get title() { return this._title; }
|
|
||||||
/**
|
|
||||||
* Tags associated with the thread.
|
|
||||||
*/
|
|
||||||
public get tags() { return this._tags; }
|
|
||||||
/**
|
|
||||||
* Prefixes associated with the thread
|
|
||||||
*/
|
|
||||||
public get prefixes() { return this._prefixes; }
|
|
||||||
/**
|
|
||||||
* Rating assigned to the thread.
|
|
||||||
*/
|
|
||||||
public get rating() { return this._rating; }
|
|
||||||
/**
|
|
||||||
* Owner of the thread.
|
|
||||||
*/
|
|
||||||
public get owner() { return this._owner; }
|
|
||||||
/**
|
|
||||||
* Date the thread was first published.
|
|
||||||
*/
|
|
||||||
public get publication() { return this._publication; }
|
|
||||||
/**
|
|
||||||
* Date the thread was last modified.
|
|
||||||
*/
|
|
||||||
public get modified() { return this._modified; }
|
|
||||||
/**
|
|
||||||
* Category to which the content of the thread belongs.
|
|
||||||
*/
|
|
||||||
public get category() { return this._category; }
|
|
||||||
|
|
||||||
//#endregion Getters
|
/**
|
||||||
|
* Initializes an object for mapping a thread.
|
||||||
|
*
|
||||||
|
* The unique ID of the thread must be specified.
|
||||||
|
*/
|
||||||
|
constructor(id: number) {
|
||||||
|
this._id = id;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
//#region Private methods
|
||||||
* Initializes an object for mapping a thread.
|
|
||||||
*
|
|
||||||
* The unique ID of the thread must be specified.
|
|
||||||
*/
|
|
||||||
constructor(id: number) { this._id = id; }
|
|
||||||
|
|
||||||
//#region Private methods
|
/**
|
||||||
|
* Set the number of posts to display for the current thread.
|
||||||
|
*/
|
||||||
|
private async setMaximumPostsForPage(n: 20 | 40 | 60 | 100): Promise<void> {
|
||||||
|
// Prepare the parameters to send via POST request
|
||||||
|
const params = {
|
||||||
|
_xfResponseType: "json",
|
||||||
|
_xfRequestUri: `/account/dpp-update?content_type=thread&content_id=${this.id}`,
|
||||||
|
_xfToken: Shared.session.token,
|
||||||
|
_xfWithData: "1",
|
||||||
|
content_id: this.id.toString(),
|
||||||
|
content_type: "thread",
|
||||||
|
"dpp_custom_config[posts]": n.toString()
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
// Send POST request
|
||||||
* Set the number of posts to display for the current thread.
|
const response = await fetchPOSTResponse(urls.F95_POSTS_NUMBER, params);
|
||||||
*/
|
if (response.isFailure()) throw response.value;
|
||||||
private async setMaximumPostsForPage(n: 20 | 40 | 60 | 100): Promise<void> {
|
}
|
||||||
// Prepare the parameters to send via POST request
|
|
||||||
const params = {
|
|
||||||
"_xfResponseType": "json",
|
|
||||||
"_xfRequestUri": `/account/dpp-update?content_type=thread&content_id=${this.id}`,
|
|
||||||
"_xfToken": Shared.session.token,
|
|
||||||
"_xfWithData": "1",
|
|
||||||
"content_id": this.id.toString(),
|
|
||||||
"content_type": "thread",
|
|
||||||
"dpp_custom_config[posts]": n.toString(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Send POST request
|
/**
|
||||||
const response = await fetchPOSTResponse(urls.F95_POSTS_NUMBER, params);
|
* Gets all posts on a page.
|
||||||
if (response.isFailure()) throw response.value;
|
*/
|
||||||
|
private parsePostsInPage(html: string): Post[] {
|
||||||
|
// Load the HTML
|
||||||
|
const $ = cheerio.load(html);
|
||||||
|
|
||||||
|
// Start parsing the posts
|
||||||
|
const posts = $(THREAD.POSTS_IN_PAGE)
|
||||||
|
.toArray()
|
||||||
|
.map((el, idx) => {
|
||||||
|
const id = $(el).find(POST.ID).attr("id").replace("post-", "");
|
||||||
|
return new Post(parseInt(id, 10));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for the post to be fetched
|
||||||
|
return posts;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all posts in the thread.
|
||||||
|
*/
|
||||||
|
private async fetchPosts(pages: number): Promise<Post[]> {
|
||||||
|
// Local variables
|
||||||
|
type TFetchResult = Promise<
|
||||||
|
Result<GenericAxiosError | UnexpectedResponseContentType, string>
|
||||||
|
>;
|
||||||
|
const htmlPromiseList: TFetchResult[] = [];
|
||||||
|
const fetchedPosts: Post[] = [];
|
||||||
|
|
||||||
|
// Fetch posts for every page in the thread
|
||||||
|
for (let i = 1; i <= pages; i++) {
|
||||||
|
// Prepare the URL
|
||||||
|
const url = new URL(`page-${i}`, `${this.url}/`).toString();
|
||||||
|
|
||||||
|
// Fetch the HTML source
|
||||||
|
const htmlResponse = fetchHTML(url);
|
||||||
|
htmlPromiseList.push(htmlResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Wait for all the pages to load
|
||||||
* Gets all posts on a page.
|
const responses = await Promise.all(htmlPromiseList);
|
||||||
*/
|
|
||||||
private parsePostsInPage(html: string): Post[] {
|
|
||||||
// Load the HTML
|
|
||||||
const $ = cheerio.load(html);
|
|
||||||
|
|
||||||
// Start parsing the posts
|
// Scrape the pages
|
||||||
const posts = $(THREAD.POSTS_IN_PAGE)
|
for (const response of responses) {
|
||||||
.toArray()
|
if (response.isSuccess()) {
|
||||||
.map((el, idx) => {
|
const posts = this.parsePostsInPage(response.value);
|
||||||
const id = $(el).find(POST.ID).attr("id").replace("post-", "");
|
fetchedPosts.push(...posts);
|
||||||
return new Post(parseInt(id, 10));
|
} else throw response.value;
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for the post to be fetched
|
|
||||||
return posts;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Sorts the list of posts
|
||||||
* Gets all posts in the thread.
|
return fetchedPosts.sort((a, b) =>
|
||||||
*/
|
a.id > b.id ? 1 : b.id > a.id ? -1 : 0
|
||||||
private async fetchPosts(pages: number): Promise<Post[]> {
|
);
|
||||||
// Local variables
|
}
|
||||||
type TFetchResult = Promise<Result<GenericAxiosError | UnexpectedResponseContentType, string>>;
|
|
||||||
const htmlPromiseList: TFetchResult[] = [];
|
|
||||||
const fetchedPosts: Post[] = [];
|
|
||||||
|
|
||||||
// Fetch posts for every page in the thread
|
/**
|
||||||
for (let i = 1; i <= pages; i++) {
|
* It processes the rating of the thread
|
||||||
// Prepare the URL
|
* starting from the data contained in the JSON+LD tag.
|
||||||
const url = new URL(`page-${i}`, `${this.url}/`).toString();
|
*/
|
||||||
|
private parseRating(data: TJsonLD): TRating {
|
||||||
|
const ratingTree = data["aggregateRating"] as TJsonLD;
|
||||||
|
const rating: TRating = {
|
||||||
|
average: ratingTree ? parseFloat(ratingTree["ratingValue"] as string) : 0,
|
||||||
|
best: ratingTree ? parseInt(ratingTree["bestRating"] as string, 10) : 0,
|
||||||
|
count: ratingTree ? parseInt(ratingTree["ratingCount"] as string, 10) : 0
|
||||||
|
};
|
||||||
|
|
||||||
// Fetch the HTML source
|
return rating;
|
||||||
const htmlResponse = fetchHTML(url);
|
}
|
||||||
htmlPromiseList.push(htmlResponse);
|
|
||||||
|
/**
|
||||||
|
* Clean the title of a thread, removing prefixes
|
||||||
|
* and generic elements between square brackets, and
|
||||||
|
* returns the clean title of the work.
|
||||||
|
*/
|
||||||
|
private cleanHeadline(headline: string): string {
|
||||||
|
// From the title we can extract: Name, author and version
|
||||||
|
// [PREFIXES] TITLE [VERSION] [AUTHOR]
|
||||||
|
const matches = headline.match(/\[(.*?)\]/g);
|
||||||
|
|
||||||
|
// Get the title name
|
||||||
|
let name = headline;
|
||||||
|
if (matches) matches.forEach((e) => (name = name.replace(e, "")));
|
||||||
|
return name.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Private methods
|
||||||
|
|
||||||
|
//#region Public methods
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets information about this thread.
|
||||||
|
*/
|
||||||
|
public async fetch() {
|
||||||
|
// Prepare the url
|
||||||
|
this._url = new URL(this.id.toString(), urls.F95_THREADS).toString();
|
||||||
|
|
||||||
|
// Fetch the HTML source
|
||||||
|
const htmlResponse = await fetchHTML(this.url);
|
||||||
|
|
||||||
|
if (htmlResponse.isSuccess()) {
|
||||||
|
// Load the HTML
|
||||||
|
const $ = cheerio.load(htmlResponse.value);
|
||||||
|
|
||||||
|
// Fetch data from selectors
|
||||||
|
const ownerID = $(THREAD.OWNER_ID).attr("data-user-id");
|
||||||
|
const tagArray = $(THREAD.TAGS).toArray();
|
||||||
|
const prefixArray = $(THREAD.PREFIXES).toArray();
|
||||||
|
const JSONLD = getJSONLD($("body"));
|
||||||
|
const published = JSONLD["datePublished"] as string;
|
||||||
|
const modified = JSONLD["dateModified"] as string;
|
||||||
|
|
||||||
|
// Parse the thread's data
|
||||||
|
this._title = this.cleanHeadline(JSONLD["headline"] as string);
|
||||||
|
this._tags = tagArray.map((el) => $(el).text().trim());
|
||||||
|
this._prefixes = prefixArray.map((el) => $(el).text().trim());
|
||||||
|
this._owner = new PlatformUser(parseInt(ownerID, 10));
|
||||||
|
await this._owner.fetch();
|
||||||
|
this._rating = this.parseRating(JSONLD);
|
||||||
|
this._category = JSONLD["articleSection"] as TCategory;
|
||||||
|
|
||||||
|
// Validate the dates
|
||||||
|
if (luxon.DateTime.fromISO(modified).isValid)
|
||||||
|
this._modified = new Date(modified);
|
||||||
|
if (luxon.DateTime.fromISO(published).isValid)
|
||||||
|
this._publication = new Date(published);
|
||||||
|
} else throw htmlResponse.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the post in the `index` position with respect to the posts in the thread.
|
||||||
|
*
|
||||||
|
* `index` must be greater or equal to 1.
|
||||||
|
* If the post is not found, `null` is returned.
|
||||||
|
*/
|
||||||
|
public async getPost(index: number): Promise<Post | null> {
|
||||||
|
// Validate parameters
|
||||||
|
if (index < 1)
|
||||||
|
throw new ParameterError("Index must be greater or equal than 1");
|
||||||
|
|
||||||
|
// Local variables
|
||||||
|
let returnValue = null;
|
||||||
|
|
||||||
|
// Get the page number of the post
|
||||||
|
const page = Math.ceil(index / this.POST_FOR_PAGE);
|
||||||
|
|
||||||
|
// Fetch the page
|
||||||
|
const url = new URL(`page-${page}`, `${this.url}/`).toString();
|
||||||
|
const htmlResponse = await fetchHTML(url);
|
||||||
|
|
||||||
|
if (htmlResponse.isSuccess()) {
|
||||||
|
// Parse the post
|
||||||
|
const posts = this.parsePostsInPage(htmlResponse.value);
|
||||||
|
|
||||||
|
// Find the searched post
|
||||||
|
for (const p of posts) {
|
||||||
|
await p.fetch();
|
||||||
|
|
||||||
|
if (p.number === index) {
|
||||||
|
returnValue = p;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Wait for all the pages to load
|
return returnValue;
|
||||||
const responses = await Promise.all(htmlPromiseList);
|
} else throw htmlResponse.value;
|
||||||
|
}
|
||||||
// Scrape the pages
|
|
||||||
for (const response of responses) {
|
|
||||||
if (response.isSuccess()) {
|
|
||||||
const posts = this.parsePostsInPage(response.value);
|
|
||||||
fetchedPosts.push(...posts);
|
|
||||||
} else throw response.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sorts the list of posts
|
|
||||||
return fetchedPosts.sort((a, b) => (a.id > b.id) ? 1 : ((b.id > a.id) ? -1 : 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* It processes the rating of the thread
|
|
||||||
* starting from the data contained in the JSON+LD tag.
|
|
||||||
*/
|
|
||||||
private parseRating(data: TJsonLD): TRating {
|
|
||||||
const ratingTree = data["aggregateRating"] as TJsonLD;
|
|
||||||
const rating: TRating = {
|
|
||||||
average: ratingTree ? parseFloat(ratingTree["ratingValue"] as string) : 0,
|
|
||||||
best: ratingTree ? parseInt(ratingTree["bestRating"] as string, 10) : 0,
|
|
||||||
count: ratingTree ? parseInt(ratingTree["ratingCount"] as string, 10) : 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
return rating;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean the title of a thread, removing prefixes
|
|
||||||
* and generic elements between square brackets, and
|
|
||||||
* returns the clean title of the work.
|
|
||||||
*/
|
|
||||||
private cleanHeadline(headline: string): string {
|
|
||||||
// From the title we can extract: Name, author and version
|
|
||||||
// [PREFIXES] TITLE [VERSION] [AUTHOR]
|
|
||||||
const matches = headline.match(/\[(.*?)\]/g);
|
|
||||||
|
|
||||||
// Get the title name
|
|
||||||
let name = headline;
|
|
||||||
if (matches) matches.forEach(e => name = name.replace(e, ""));
|
|
||||||
return name.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Private methods
|
|
||||||
|
|
||||||
//#region Public methods
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets information about this thread.
|
|
||||||
*/
|
|
||||||
public async fetch() {
|
|
||||||
// Prepare the url
|
|
||||||
this._url = new URL(this.id.toString(), urls.F95_THREADS).toString();
|
|
||||||
|
|
||||||
// Fetch the HTML source
|
|
||||||
const htmlResponse = await fetchHTML(this.url);
|
|
||||||
|
|
||||||
if (htmlResponse.isSuccess()) {
|
|
||||||
// Load the HTML
|
|
||||||
const $ = cheerio.load(htmlResponse.value);
|
|
||||||
|
|
||||||
// Fetch data from selectors
|
|
||||||
const ownerID = $(THREAD.OWNER_ID).attr("data-user-id");
|
|
||||||
const tagArray = $(THREAD.TAGS).toArray();
|
|
||||||
const prefixArray = $(THREAD.PREFIXES).toArray();
|
|
||||||
const JSONLD = getJSONLD($("body"));
|
|
||||||
const published = JSONLD["datePublished"] as string;
|
|
||||||
const modified = JSONLD["dateModified"] as string;
|
|
||||||
|
|
||||||
// Parse the thread's data
|
|
||||||
this._title = this.cleanHeadline(JSONLD["headline"] as string);
|
|
||||||
this._tags = tagArray.map(el => $(el).text().trim());
|
|
||||||
this._prefixes = prefixArray.map(el => $(el).text().trim());
|
|
||||||
this._owner = new PlatformUser(parseInt(ownerID, 10));
|
|
||||||
await this._owner.fetch();
|
|
||||||
this._rating = this.parseRating(JSONLD);
|
|
||||||
this._category = JSONLD["articleSection"] as TCategory;
|
|
||||||
|
|
||||||
// Validate the dates
|
|
||||||
if (luxon.DateTime.fromISO(modified).isValid) this._modified = new Date(modified);
|
|
||||||
if (luxon.DateTime.fromISO(published).isValid) this._publication = new Date(published);
|
|
||||||
|
|
||||||
} else throw htmlResponse.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the post in the `index` position with respect to the posts in the thread.
|
|
||||||
*
|
|
||||||
* `index` must be greater or equal to 1.
|
|
||||||
* If the post is not found, `null` is returned.
|
|
||||||
*/
|
|
||||||
public async getPost(index: number): Promise<Post|null> {
|
|
||||||
// Validate parameters
|
|
||||||
if (index < 1) throw new ParameterError("Index must be greater or equal than 1");
|
|
||||||
|
|
||||||
// Local variables
|
|
||||||
let returnValue = null;
|
|
||||||
|
|
||||||
// Get the page number of the post
|
|
||||||
const page = Math.ceil(index / this.POST_FOR_PAGE);
|
|
||||||
|
|
||||||
// Fetch the page
|
|
||||||
const url = new URL(`page-${page}`, `${this.url}/`).toString();
|
|
||||||
const htmlResponse = await fetchHTML(url);
|
|
||||||
|
|
||||||
if (htmlResponse.isSuccess()) {
|
|
||||||
// Parse the post
|
|
||||||
const posts = this.parsePostsInPage(htmlResponse.value);
|
|
||||||
|
|
||||||
// Find the searched post
|
|
||||||
for (const p of posts) {
|
|
||||||
await p.fetch();
|
|
||||||
|
|
||||||
if (p.number === index) {
|
|
||||||
returnValue = p;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return returnValue;
|
|
||||||
} else throw htmlResponse.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Public methods
|
|
||||||
|
|
||||||
|
//#endregion Public methods
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,164 +14,184 @@ import { Result } from "../result.js";
|
||||||
|
|
||||||
// Interfaces
|
// Interfaces
|
||||||
interface IWatchedThread {
|
interface IWatchedThread {
|
||||||
/**
|
/**
|
||||||
* URL of the thread
|
* URL of the thread
|
||||||
*/
|
*/
|
||||||
url: string;
|
url: string;
|
||||||
/**
|
/**
|
||||||
* Indicates whether the thread has any unread posts.
|
* Indicates whether the thread has any unread posts.
|
||||||
*/
|
*/
|
||||||
unread: boolean,
|
unread: boolean;
|
||||||
/**
|
/**
|
||||||
* Specifies the forum to which the thread belongs.
|
* Specifies the forum to which the thread belongs.
|
||||||
*/
|
*/
|
||||||
forum: string,
|
forum: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
type TFetchResult = Result<GenericAxiosError | UnexpectedResponseContentType, string>;
|
type TFetchResult = Result<
|
||||||
|
GenericAxiosError | UnexpectedResponseContentType,
|
||||||
|
string
|
||||||
|
>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class containing the data of the user currently connected to the F95Zone platform.
|
* Class containing the data of the user currently connected to the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export default class UserProfile extends PlatformUser {
|
export default class UserProfile extends PlatformUser {
|
||||||
|
//#region Fields
|
||||||
|
|
||||||
//#region Fields
|
private _watched: IWatchedThread[] = [];
|
||||||
|
private _bookmarks: Post[] = [];
|
||||||
|
private _alerts: string[] = [];
|
||||||
|
private _conversations: string[];
|
||||||
|
|
||||||
private _watched: IWatchedThread[] = [];
|
//#endregion Fields
|
||||||
private _bookmarks: Post[] = [];
|
|
||||||
private _alerts: string[] = [];
|
|
||||||
private _conversations: string[];
|
|
||||||
|
|
||||||
//#endregion Fields
|
//#region Getters
|
||||||
|
|
||||||
//#region Getters
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List of followed thread data.
|
* List of followed thread data.
|
||||||
*/
|
*/
|
||||||
public get watched() { return this._watched; }
|
public get watched() {
|
||||||
/**
|
return this._watched;
|
||||||
* List of bookmarked posts.
|
}
|
||||||
* @todo
|
/**
|
||||||
*/
|
* List of bookmarked posts.
|
||||||
public get bookmarks() { return this._bookmarks; }
|
* @todo
|
||||||
/**
|
*/
|
||||||
* List of alerts.
|
public get bookmarks() {
|
||||||
* @todo
|
return this._bookmarks;
|
||||||
*/
|
}
|
||||||
public get alerts() { return this._alerts; }
|
/**
|
||||||
/**
|
* List of alerts.
|
||||||
* List of conversations.
|
* @todo
|
||||||
* @todo
|
*/
|
||||||
*/
|
public get alerts() {
|
||||||
public get conversation() { return this._conversations; }
|
return this._alerts;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* List of conversations.
|
||||||
|
* @todo
|
||||||
|
*/
|
||||||
|
public get conversation() {
|
||||||
|
return this._conversations;
|
||||||
|
}
|
||||||
|
|
||||||
//#endregion Getters
|
//#endregion Getters
|
||||||
|
|
||||||
constructor() { super(); }
|
constructor() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
//#region Public methods
|
//#region Public methods
|
||||||
|
|
||||||
public async fetch() {
|
public async fetch() {
|
||||||
// First get the user ID and set it
|
// First get the user ID and set it
|
||||||
const id = await this.fetchUserID();
|
const id = await this.fetchUserID();
|
||||||
super.setID(id);
|
super.setID(id);
|
||||||
|
|
||||||
// Than fetch the basic data
|
// Than fetch the basic data
|
||||||
await super.fetch();
|
await super.fetch();
|
||||||
|
|
||||||
// Now fetch the watched threads
|
// Now fetch the watched threads
|
||||||
this._watched = await this.fetchWatchedThread();
|
this._watched = await this.fetchWatchedThread();
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Public methods
|
||||||
|
|
||||||
|
//#region Private methods
|
||||||
|
|
||||||
|
private async fetchUserID(): Promise<number> {
|
||||||
|
// Local variables
|
||||||
|
const url = new URL(urls.F95_BASE_URL).toString();
|
||||||
|
|
||||||
|
// fetch and parse page
|
||||||
|
const htmlResponse = await fetchHTML(url);
|
||||||
|
if (htmlResponse.isSuccess()) {
|
||||||
|
// Load page with cheerio
|
||||||
|
const $ = cheerio.load(htmlResponse.value);
|
||||||
|
|
||||||
|
const sid = $(GENERIC.CURRENT_USER_ID).attr("data-user-id").trim();
|
||||||
|
return parseInt(sid, 10);
|
||||||
|
} else throw htmlResponse.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchWatchedThread(): Promise<IWatchedThread[]> {
|
||||||
|
// Prepare and fetch URL
|
||||||
|
const url = new URL(urls.F95_WATCHED_THREADS);
|
||||||
|
url.searchParams.set("unread", "0");
|
||||||
|
|
||||||
|
const htmlResponse = await fetchHTML(url.toString());
|
||||||
|
|
||||||
|
if (htmlResponse.isSuccess()) {
|
||||||
|
// Load page in cheerio
|
||||||
|
const $ = cheerio.load(htmlResponse.value);
|
||||||
|
|
||||||
|
// Fetch the pages
|
||||||
|
const lastPage = parseInt($(WATCHED_THREAD.LAST_PAGE).text().trim(), 10);
|
||||||
|
const pages = await this.fetchPages(url, lastPage);
|
||||||
|
|
||||||
|
const watchedThreads = pages.map((r, idx) => {
|
||||||
|
const elements = r.applyOnSuccess(this.fetchPageThreadElements);
|
||||||
|
if (elements.isSuccess()) return elements.value;
|
||||||
|
});
|
||||||
|
|
||||||
|
return [].concat(...watchedThreads);
|
||||||
|
} else throw htmlResponse.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the pages containing the thread data.
|
||||||
|
* @param url Base URL to use for scraping a page
|
||||||
|
* @param n Total number of pages
|
||||||
|
* @param s Page to start from
|
||||||
|
*/
|
||||||
|
private async fetchPages(
|
||||||
|
url: URL,
|
||||||
|
n: number,
|
||||||
|
s = 1
|
||||||
|
): Promise<TFetchResult[]> {
|
||||||
|
// Local variables
|
||||||
|
const responsePromiseList: Promise<TFetchResult>[] = [];
|
||||||
|
|
||||||
|
// Fetch the page' HTML
|
||||||
|
for (let page = s; page <= n; page++) {
|
||||||
|
// Set the page URL
|
||||||
|
url.searchParams.set("page", page.toString());
|
||||||
|
|
||||||
|
// Fetch HTML but not wait for it
|
||||||
|
const promise = fetchHTML(url.toString());
|
||||||
|
responsePromiseList.push(promise);
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Public methods
|
// Wait for the promises to resolve
|
||||||
|
return Promise.all(responsePromiseList);
|
||||||
|
}
|
||||||
|
|
||||||
//#region Private methods
|
/**
|
||||||
|
* Gets thread data starting from the source code of the page passed by parameter.
|
||||||
|
*/
|
||||||
|
private fetchPageThreadElements(html: string): IWatchedThread[] {
|
||||||
|
// Local variables
|
||||||
|
const $ = cheerio.load(html);
|
||||||
|
|
||||||
private async fetchUserID(): Promise<number> {
|
return $(WATCHED_THREAD.BODIES)
|
||||||
// Local variables
|
.map((idx, el) => {
|
||||||
const url = new URL(urls.F95_BASE_URL).toString();
|
// Parse the URL
|
||||||
|
const partialURL = $(el).find(WATCHED_THREAD.URL).attr("href");
|
||||||
|
const url = new URL(
|
||||||
|
partialURL.replace("unread", ""),
|
||||||
|
`${urls.F95_BASE_URL}`
|
||||||
|
).toString();
|
||||||
|
|
||||||
// fetch and parse page
|
return {
|
||||||
const htmlResponse = await fetchHTML(url);
|
url: url.toString(),
|
||||||
if (htmlResponse.isSuccess()) {
|
unread: partialURL.endsWith("unread"),
|
||||||
// Load page with cheerio
|
forum: $(el).find(WATCHED_THREAD.FORUM).text().trim()
|
||||||
const $ = cheerio.load(htmlResponse.value);
|
} as IWatchedThread;
|
||||||
|
})
|
||||||
const sid = $(GENERIC.CURRENT_USER_ID).attr("data-user-id").trim();
|
.get();
|
||||||
return parseInt(sid, 10);
|
}
|
||||||
} else throw htmlResponse.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async fetchWatchedThread(): Promise<IWatchedThread[]> {
|
|
||||||
// Prepare and fetch URL
|
|
||||||
const url = new URL(urls.F95_WATCHED_THREADS);
|
|
||||||
url.searchParams.set("unread", "0");
|
|
||||||
|
|
||||||
const htmlResponse = await fetchHTML(url.toString());
|
|
||||||
|
|
||||||
if (htmlResponse.isSuccess()) {
|
|
||||||
// Load page in cheerio
|
|
||||||
const $ = cheerio.load(htmlResponse.value);
|
|
||||||
|
|
||||||
// Fetch the pages
|
|
||||||
const lastPage = parseInt($(WATCHED_THREAD.LAST_PAGE).text().trim(), 10);
|
|
||||||
const pages = await this.fetchPages(url, lastPage);
|
|
||||||
|
|
||||||
const watchedThreads = pages.map((r, idx) => {
|
|
||||||
const elements = r.applyOnSuccess(this.fetchPageThreadElements);
|
|
||||||
if (elements.isSuccess()) return elements.value;
|
|
||||||
});
|
|
||||||
|
|
||||||
return [].concat(...watchedThreads);
|
|
||||||
} else throw htmlResponse.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the pages containing the thread data.
|
|
||||||
* @param url Base URL to use for scraping a page
|
|
||||||
* @param n Total number of pages
|
|
||||||
* @param s Page to start from
|
|
||||||
*/
|
|
||||||
private async fetchPages(url: URL, n: number, s: number = 1): Promise<TFetchResult[]> {
|
|
||||||
// Local variables
|
|
||||||
const responsePromiseList: Promise<TFetchResult>[] = [];
|
|
||||||
|
|
||||||
// Fetch the page' HTML
|
|
||||||
for (let page = s; page <= n; page++) {
|
|
||||||
// Set the page URL
|
|
||||||
url.searchParams.set("page", page.toString());
|
|
||||||
|
|
||||||
// Fetch HTML but not wait for it
|
|
||||||
const promise = fetchHTML(url.toString())
|
|
||||||
responsePromiseList.push(promise);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait for the promises to resolve
|
|
||||||
return Promise.all(responsePromiseList);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets thread data starting from the source code of the page passed by parameter.
|
|
||||||
*/
|
|
||||||
private fetchPageThreadElements(html: string): IWatchedThread[] {
|
|
||||||
// Local variables
|
|
||||||
const $ = cheerio.load(html);
|
|
||||||
|
|
||||||
return $(WATCHED_THREAD.BODIES).map((idx, el) => {
|
|
||||||
// Parse the URL
|
|
||||||
const partialURL = $(el).find(WATCHED_THREAD.URL).attr("href");
|
|
||||||
const url = new URL(partialURL.replace("unread", ""), `${urls.F95_BASE_URL}`).toString();
|
|
||||||
|
|
||||||
return {
|
|
||||||
url: url.toString(),
|
|
||||||
unread: partialURL.endsWith("unread"),
|
|
||||||
forum: $(el).find(WATCHED_THREAD.FORUM).text().trim()
|
|
||||||
} as IWatchedThread;
|
|
||||||
}).get();
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Private methods
|
|
||||||
|
|
||||||
|
//#endregion Private methods
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,102 +7,110 @@ import shared, { TPrefixDict } from "../shared.js";
|
||||||
* Convert prefixes and platform tags from string to ID and vice versa.
|
* Convert prefixes and platform tags from string to ID and vice versa.
|
||||||
*/
|
*/
|
||||||
export default class PrefixParser {
|
export default class PrefixParser {
|
||||||
|
//#region Private methods
|
||||||
|
/**
|
||||||
|
* Gets the key associated with a given value from a dictionary.
|
||||||
|
* @param {Object} object Dictionary to search
|
||||||
|
* @param {Any} value Value associated with the key
|
||||||
|
* @returns {String|undefined} Key found or undefined
|
||||||
|
*/
|
||||||
|
private getKeyByValue(
|
||||||
|
object: TPrefixDict,
|
||||||
|
value: string
|
||||||
|
): string | undefined {
|
||||||
|
return Object.keys(object).find((key) => object[key] === value);
|
||||||
|
}
|
||||||
|
|
||||||
//#region Private methods
|
/**
|
||||||
|
* Makes an array of strings uppercase.
|
||||||
|
*/
|
||||||
|
private toUpperCaseArray(a: string[]): string[] {
|
||||||
/**
|
/**
|
||||||
* Gets the key associated with a given value from a dictionary.
|
* Makes a string uppercase.
|
||||||
* @param {Object} object Dictionary to search
|
|
||||||
* @param {Any} value Value associated with the key
|
|
||||||
* @returns {String|undefined} Key found or undefined
|
|
||||||
*/
|
*/
|
||||||
private getKeyByValue(object: TPrefixDict, value: string): string | undefined {
|
function toUpper(s: string): string {
|
||||||
return Object.keys(object).find(key => object[key] === value);
|
return s.toUpperCase();
|
||||||
|
}
|
||||||
|
return a.map(toUpper);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if `dict` contains `value` as a value.
|
||||||
|
*/
|
||||||
|
private valueInDict(dict: TPrefixDict, value: string): boolean {
|
||||||
|
const array = Object.values(dict);
|
||||||
|
const upperArr = this.toUpperCaseArray(array);
|
||||||
|
const element = value.toUpperCase();
|
||||||
|
return upperArr.includes(element);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search within the platform prefixes for the
|
||||||
|
* desired element and return the dictionary that contains it.
|
||||||
|
* @param element Element to search in the prefixes as a key or as a value
|
||||||
|
*/
|
||||||
|
private searchElementInPrefixes(
|
||||||
|
element: string | number
|
||||||
|
): TPrefixDict | null {
|
||||||
|
// Local variables
|
||||||
|
let dictName = null;
|
||||||
|
|
||||||
|
// Iterate the key/value pairs in order to find the element
|
||||||
|
for (const [key, subdict] of Object.entries(shared.prefixes)) {
|
||||||
|
// Check if the element is a value in the sub-dict
|
||||||
|
const valueInDict =
|
||||||
|
typeof element === "string" &&
|
||||||
|
this.valueInDict(subdict, element as string);
|
||||||
|
|
||||||
|
// Check if the element is a key in the subdict
|
||||||
|
const keyInDict =
|
||||||
|
typeof element === "number" &&
|
||||||
|
Object.keys(subdict).includes(element.toString());
|
||||||
|
|
||||||
|
if (valueInDict || keyInDict) {
|
||||||
|
dictName = key;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
return shared.prefixes[dictName] ?? null;
|
||||||
* Makes an array of strings uppercase.
|
}
|
||||||
*/
|
//#endregion Private methods
|
||||||
private toUpperCaseArray(a: string[]): string[] {
|
|
||||||
/**
|
/**
|
||||||
* Makes a string uppercase.
|
* Convert a list of prefixes to their respective IDs.
|
||||||
*/
|
*/
|
||||||
function toUpper(s: string): string {
|
public prefixesToIDs(prefixes: string[]): number[] {
|
||||||
return s.toUpperCase();
|
const ids: number[] = [];
|
||||||
}
|
|
||||||
return a.map(toUpper);
|
for (const p of prefixes) {
|
||||||
|
// Check what dict contains the value
|
||||||
|
const dict = this.searchElementInPrefixes(p);
|
||||||
|
|
||||||
|
if (dict) {
|
||||||
|
// Extract the key from the dict
|
||||||
|
const key = this.getKeyByValue(dict, p);
|
||||||
|
ids.push(parseInt(key, 10));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return ids;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if `dict` contains `value` as a value.
|
* It converts a list of IDs into their respective prefixes.
|
||||||
*/
|
*/
|
||||||
private valueInDict(dict: TPrefixDict, value: string): boolean {
|
public idsToPrefixes(ids: number[]): string[] {
|
||||||
const array = Object.values(dict);
|
const prefixes: string[] = [];
|
||||||
const upperArr = this.toUpperCaseArray(array);
|
|
||||||
const element = value.toUpperCase();
|
for (const id of ids) {
|
||||||
return upperArr.includes(element);
|
// Check what dict contains the key
|
||||||
|
const dict = this.searchElementInPrefixes(id);
|
||||||
|
|
||||||
|
// Add the key to the list
|
||||||
|
if (dict) {
|
||||||
|
prefixes.push(dict[id]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return prefixes;
|
||||||
/**
|
}
|
||||||
* Search within the platform prefixes for the
|
}
|
||||||
* desired element and return the dictionary that contains it.
|
|
||||||
* @param element Element to search in the prefixes as a key or as a value
|
|
||||||
*/
|
|
||||||
private searchElementInPrefixes(element: string | number): TPrefixDict | null {
|
|
||||||
// Local variables
|
|
||||||
let dictName = null;
|
|
||||||
|
|
||||||
// Iterate the key/value pairs in order to find the element
|
|
||||||
for (const [key, subdict] of Object.entries(shared.prefixes)) {
|
|
||||||
// Check if the element is a value in the sub-dict
|
|
||||||
const valueInDict = typeof element === "string" && this.valueInDict(subdict, element as string);
|
|
||||||
|
|
||||||
// Check if the element is a key in the subdict
|
|
||||||
const keyInDict = typeof element === "number" && Object.keys(subdict).includes(element.toString());
|
|
||||||
|
|
||||||
if (valueInDict || keyInDict) {
|
|
||||||
dictName = key;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return shared.prefixes[dictName] ?? null;
|
|
||||||
}
|
|
||||||
//#endregion Private methods
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert a list of prefixes to their respective IDs.
|
|
||||||
*/
|
|
||||||
public prefixesToIDs(prefixes: string[]) : number[] {
|
|
||||||
const ids: number[] = [];
|
|
||||||
|
|
||||||
for(const p of prefixes) {
|
|
||||||
// Check what dict contains the value
|
|
||||||
const dict = this.searchElementInPrefixes(p);
|
|
||||||
|
|
||||||
if (dict) {
|
|
||||||
// Extract the key from the dict
|
|
||||||
const key = this.getKeyByValue(dict, p);
|
|
||||||
ids.push(parseInt(key, 10));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ids;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* It converts a list of IDs into their respective prefixes.
|
|
||||||
*/
|
|
||||||
public idsToPrefixes(ids: number[]): string[] {
|
|
||||||
const prefixes:string[] = [];
|
|
||||||
|
|
||||||
for(const id of ids) {
|
|
||||||
// Check what dict contains the key
|
|
||||||
const dict = this.searchElementInPrefixes(id);
|
|
||||||
|
|
||||||
// Add the key to the list
|
|
||||||
if (dict) {
|
|
||||||
prefixes.push(dict[id]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return prefixes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
import { AxiosResponse } from 'axios';
|
import { AxiosResponse } from "axios";
|
||||||
// Public modules from npm
|
// Public modules from npm
|
||||||
import validator from 'class-validator';
|
import validator from "class-validator";
|
||||||
|
|
||||||
// Module from files
|
// Module from files
|
||||||
import { IQuery, TCategory, TQueryInterface } from "../../interfaces.js";
|
import { IQuery, TCategory, TQueryInterface } from "../../interfaces.js";
|
||||||
import { GenericAxiosError, UnexpectedResponseContentType } from '../errors.js';
|
import { GenericAxiosError, UnexpectedResponseContentType } from "../errors.js";
|
||||||
import { Result } from '../result.js';
|
import { Result } from "../result.js";
|
||||||
import LatestSearchQuery, { TLatestOrder } from './latest-search-query.js';
|
import LatestSearchQuery, { TLatestOrder } from "./latest-search-query.js";
|
||||||
import ThreadSearchQuery, { TThreadOrder } from './thread-search-query.js';
|
import ThreadSearchQuery, { TThreadOrder } from "./thread-search-query.js";
|
||||||
|
|
||||||
// Type definitions
|
// Type definitions
|
||||||
/**
|
/**
|
||||||
|
@ -30,149 +30,165 @@ import ThreadSearchQuery, { TThreadOrder } from './thread-search-query.js';
|
||||||
*
|
*
|
||||||
* `views`: Order based on the number of visits. Replacement: `replies`.
|
* `views`: Order based on the number of visits. Replacement: `replies`.
|
||||||
*/
|
*/
|
||||||
type THandiworkOrder = "date" | "likes" | "relevance" | "replies" | "title" | "views";
|
type THandiworkOrder =
|
||||||
|
| "date"
|
||||||
|
| "likes"
|
||||||
|
| "relevance"
|
||||||
|
| "replies"
|
||||||
|
| "title"
|
||||||
|
| "views";
|
||||||
type TExecuteResult = Result<GenericAxiosError, AxiosResponse<any>>;
|
type TExecuteResult = Result<GenericAxiosError, AxiosResponse<any>>;
|
||||||
|
|
||||||
export default class HandiworkSearchQuery implements IQuery {
|
export default class HandiworkSearchQuery implements IQuery {
|
||||||
|
//#region Private fields
|
||||||
//#region Private fields
|
|
||||||
|
|
||||||
static MIN_PAGE = 1;
|
static MIN_PAGE = 1;
|
||||||
|
|
||||||
//#endregion Private fields
|
//#endregion Private fields
|
||||||
|
|
||||||
//#region Properties
|
//#region Properties
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Keywords to use in the search.
|
* Keywords to use in the search.
|
||||||
*/
|
*/
|
||||||
public keywords: string = "";
|
public keywords = "";
|
||||||
/**
|
/**
|
||||||
* The results must be more recent than the date indicated.
|
* The results must be more recent than the date indicated.
|
||||||
*/
|
*/
|
||||||
public newerThan: Date = null;
|
public newerThan: Date = null;
|
||||||
/**
|
/**
|
||||||
* The results must be older than the date indicated.
|
* The results must be older than the date indicated.
|
||||||
*/
|
*/
|
||||||
public olderThan: Date = null;
|
public olderThan: Date = null;
|
||||||
public includedTags: string[] = [];
|
public includedTags: string[] = [];
|
||||||
/**
|
/**
|
||||||
* Tags to exclude from the search.
|
* Tags to exclude from the search.
|
||||||
*/
|
*/
|
||||||
public excludedTags: string[] = [];
|
public excludedTags: string[] = [];
|
||||||
public includedPrefixes: string[] = [];
|
public includedPrefixes: string[] = [];
|
||||||
public category: TCategory = null;
|
public category: TCategory = null;
|
||||||
/**
|
/**
|
||||||
* Results presentation order.
|
* Results presentation order.
|
||||||
*/
|
*/
|
||||||
public order: THandiworkOrder = "relevance";
|
public order: THandiworkOrder = "relevance";
|
||||||
@validator.IsInt({
|
@validator.IsInt({
|
||||||
message: "$property expect an integer, received $value"
|
message: "$property expect an integer, received $value"
|
||||||
})
|
})
|
||||||
@validator.Min(HandiworkSearchQuery.MIN_PAGE, {
|
@validator.Min(HandiworkSearchQuery.MIN_PAGE, {
|
||||||
message: "The minimum $property value must be $constraint1, received $value"
|
message: "The minimum $property value must be $constraint1, received $value"
|
||||||
})
|
})
|
||||||
public page: number = 1;
|
public page = 1;
|
||||||
itype: TQueryInterface = "HandiworkSearchQuery";
|
itype: TQueryInterface = "HandiworkSearchQuery";
|
||||||
|
|
||||||
//#endregion Properties
|
//#endregion Properties
|
||||||
|
|
||||||
//#region Public methods
|
//#region Public methods
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Select what kind of search should be
|
* Select what kind of search should be
|
||||||
* performed based on the properties of
|
* performed based on the properties of
|
||||||
* the query.
|
* the query.
|
||||||
*/
|
*/
|
||||||
public selectSearchType(): "latest" | "thread" {
|
public selectSearchType(): "latest" | "thread" {
|
||||||
// Local variables
|
// Local variables
|
||||||
const MAX_TAGS_LATEST_SEARCH = 5;
|
const MAX_TAGS_LATEST_SEARCH = 5;
|
||||||
const DEFAULT_SEARCH_TYPE = "latest";
|
const DEFAULT_SEARCH_TYPE = "latest";
|
||||||
|
|
||||||
// If the keywords are set or the number
|
// If the keywords are set or the number
|
||||||
// of included tags is greather than 5,
|
// of included tags is greather than 5,
|
||||||
// we must perform a thread search
|
// we must perform a thread search
|
||||||
if (this.keywords || this.includedTags.length > MAX_TAGS_LATEST_SEARCH) return "thread";
|
if (this.keywords || this.includedTags.length > MAX_TAGS_LATEST_SEARCH)
|
||||||
|
return "thread";
|
||||||
|
|
||||||
return DEFAULT_SEARCH_TYPE;
|
return DEFAULT_SEARCH_TYPE;
|
||||||
|
}
|
||||||
|
|
||||||
|
public validate(): boolean {
|
||||||
|
return validator.validateSync(this).length === 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async execute(): Promise<TExecuteResult> {
|
||||||
|
// Local variables
|
||||||
|
let response: TExecuteResult = null;
|
||||||
|
|
||||||
|
// Check if the query is valid
|
||||||
|
if (!this.validate()) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid query: ${validator.validateSync(this).join("\n")}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public validate(): boolean { return validator.validateSync(this).length === 0; }
|
// Convert the query
|
||||||
|
if (this.selectSearchType() === "latest")
|
||||||
|
response = await this.cast<LatestSearchQuery>(
|
||||||
|
"LatestSearchQuery"
|
||||||
|
).execute();
|
||||||
|
else
|
||||||
|
response = await this.cast<ThreadSearchQuery>(
|
||||||
|
"ThreadSearchQuery"
|
||||||
|
).execute();
|
||||||
|
|
||||||
public async execute(): Promise<TExecuteResult> {
|
return response;
|
||||||
// Local variables
|
}
|
||||||
let response: TExecuteResult = null;
|
|
||||||
|
|
||||||
// Check if the query is valid
|
public cast<T extends IQuery>(type: TQueryInterface): T {
|
||||||
if (!this.validate()) {
|
// Local variables
|
||||||
throw new Error(`Invalid query: ${validator.validateSync(this).join("\n")}`);
|
let returnValue = null;
|
||||||
}
|
|
||||||
|
|
||||||
// Convert the query
|
// Convert the query
|
||||||
if (this.selectSearchType() === "latest") response = await this.cast<LatestSearchQuery>("LatestSearchQuery").execute();
|
if (type === "LatestSearchQuery") returnValue = this.castToLatest();
|
||||||
else response = await this.cast<ThreadSearchQuery>("ThreadSearchQuery").execute();
|
else if (type === "ThreadSearchQuery") returnValue = this.castToThread();
|
||||||
|
else returnValue = this as HandiworkSearchQuery;
|
||||||
|
|
||||||
return response;
|
// Cast the result to T
|
||||||
}
|
return returnValue as T;
|
||||||
|
}
|
||||||
|
|
||||||
public cast<T extends IQuery>(type: TQueryInterface): T {
|
//#endregion Public methods
|
||||||
// Local variables
|
|
||||||
let returnValue = null;
|
|
||||||
|
|
||||||
// Convert the query
|
|
||||||
if (type === "LatestSearchQuery") returnValue = this.castToLatest();
|
|
||||||
else if (type === "ThreadSearchQuery") returnValue = this.castToThread();
|
|
||||||
else returnValue = this as HandiworkSearchQuery;
|
|
||||||
|
|
||||||
// Cast the result to T
|
//#region Private methods
|
||||||
return returnValue as T;
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Public methods
|
private castToLatest(): LatestSearchQuery {
|
||||||
|
// Cast the basic query object and copy common values
|
||||||
|
const query: LatestSearchQuery = new LatestSearchQuery();
|
||||||
|
Object.keys(this).forEach((key) => {
|
||||||
|
if (query.hasOwnProperty(key)) {
|
||||||
|
query[key] = this[key];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
//#region Private methods
|
// Adapt order filter
|
||||||
|
let orderFilter = this.order as string;
|
||||||
|
if (orderFilter === "relevance") orderFilter = "rating";
|
||||||
|
else if (orderFilter === "replies") orderFilter = "views";
|
||||||
|
query.order = orderFilter as TLatestOrder;
|
||||||
|
|
||||||
private castToLatest(): LatestSearchQuery {
|
// Adapt date
|
||||||
// Cast the basic query object and copy common values
|
if (this.newerThan) query.date = query.findNearestDate(this.newerThan);
|
||||||
const query: LatestSearchQuery = new LatestSearchQuery();
|
|
||||||
Object.keys(this).forEach(key => {
|
|
||||||
if (query.hasOwnProperty(key)) {
|
|
||||||
query[key] = this[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Adapt order filter
|
return query;
|
||||||
let orderFilter = this.order as string;
|
}
|
||||||
if (orderFilter === "relevance") orderFilter = "rating";
|
|
||||||
else if (orderFilter === "replies") orderFilter = "views";
|
|
||||||
query.order = orderFilter as TLatestOrder;
|
|
||||||
|
|
||||||
// Adapt date
|
private castToThread(): ThreadSearchQuery {
|
||||||
if (this.newerThan) query.date = query.findNearestDate(this.newerThan);
|
// Cast the basic query object and copy common values
|
||||||
|
const query: ThreadSearchQuery = new ThreadSearchQuery();
|
||||||
|
Object.keys(this).forEach((key) => {
|
||||||
|
if (query.hasOwnProperty(key)) {
|
||||||
|
query[key] = this[key];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return query;
|
// Set uncommon values
|
||||||
}
|
query.onlyTitles = true;
|
||||||
|
|
||||||
private castToThread(): ThreadSearchQuery {
|
// Adapt order filter
|
||||||
// Cast the basic query object and copy common values
|
let orderFilter = this.order as string;
|
||||||
const query: ThreadSearchQuery = new ThreadSearchQuery();
|
if (orderFilter === "title") orderFilter = "relevance";
|
||||||
Object.keys(this).forEach(key => {
|
else if (orderFilter === "likes") orderFilter = "replies";
|
||||||
if (query.hasOwnProperty(key)) {
|
query.order = orderFilter as TThreadOrder;
|
||||||
query[key] = this[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set uncommon values
|
return query;
|
||||||
query.onlyTitles = true;
|
}
|
||||||
|
|
||||||
// Adapt order filter
|
|
||||||
let orderFilter = this.order as string;
|
|
||||||
if (orderFilter === "title") orderFilter = "relevance";
|
|
||||||
else if (orderFilter === "likes") orderFilter = "replies";
|
|
||||||
query.order = orderFilter as TThreadOrder;
|
|
||||||
|
|
||||||
return query;
|
//#endregion
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
// Public modules from npm
|
// Public modules from npm
|
||||||
import validator from 'class-validator';
|
import validator from "class-validator";
|
||||||
|
|
||||||
// Modules from file
|
// Modules from file
|
||||||
import { urls } from "../../constants/url.js";
|
import { urls } from "../../constants/url.js";
|
||||||
import PrefixParser from '../prefix-parser.js';
|
import PrefixParser from "../prefix-parser.js";
|
||||||
import { IQuery, TCategory, TQueryInterface } from "../../interfaces.js";
|
import { IQuery, TCategory, TQueryInterface } from "../../interfaces.js";
|
||||||
import { fetchGETResponse } from '../../network-helper.js';
|
import { fetchGETResponse } from "../../network-helper.js";
|
||||||
|
|
||||||
// Type definitions
|
// Type definitions
|
||||||
export type TLatestOrder = "date" | "likes" | "views" | "title" | "rating";
|
export type TLatestOrder = "date" | "likes" | "views" | "title" | "rating";
|
||||||
|
@ -17,128 +17,133 @@ type TDate = 365 | 180 | 90 | 30 | 14 | 7 | 3 | 1;
|
||||||
* Query used to search handiwork in the "Latest" tab.
|
* Query used to search handiwork in the "Latest" tab.
|
||||||
*/
|
*/
|
||||||
export default class LatestSearchQuery implements IQuery {
|
export default class LatestSearchQuery implements IQuery {
|
||||||
|
//#region Private fields
|
||||||
|
|
||||||
//#region Private fields
|
private static MAX_TAGS = 5;
|
||||||
|
private static MIN_PAGE = 1;
|
||||||
|
|
||||||
private static MAX_TAGS = 5;
|
//#endregion Private fields
|
||||||
private static MIN_PAGE = 1;
|
|
||||||
|
|
||||||
//#endregion Private fields
|
//#region Properties
|
||||||
|
|
||||||
//#region Properties
|
public category: TCategory = "games";
|
||||||
|
/**
|
||||||
|
* Ordering type.
|
||||||
|
*
|
||||||
|
* Default: `date`.
|
||||||
|
*/
|
||||||
|
public order: TLatestOrder = "date";
|
||||||
|
/**
|
||||||
|
* Date limit in days, to be understood as "less than".
|
||||||
|
* Use `1` to indicate "today" or `null` to indicate "anytime".
|
||||||
|
*
|
||||||
|
* Default: `null`
|
||||||
|
*/
|
||||||
|
public date: TDate = null;
|
||||||
|
|
||||||
public category: TCategory = 'games';
|
@validator.ArrayMaxSize(LatestSearchQuery.MAX_TAGS, {
|
||||||
/**
|
message: "Too many tags: $value instead of $constraint1"
|
||||||
* Ordering type.
|
})
|
||||||
*
|
public includedTags: string[] = [];
|
||||||
* Default: `date`.
|
public includedPrefixes: string[] = [];
|
||||||
*/
|
|
||||||
public order: TLatestOrder = 'date';
|
|
||||||
/**
|
|
||||||
* Date limit in days, to be understood as "less than".
|
|
||||||
* Use `1` to indicate "today" or `null` to indicate "anytime".
|
|
||||||
*
|
|
||||||
* Default: `null`
|
|
||||||
*/
|
|
||||||
public date: TDate = null;
|
|
||||||
|
|
||||||
@validator.ArrayMaxSize(LatestSearchQuery.MAX_TAGS, {
|
|
||||||
message: "Too many tags: $value instead of $constraint1"
|
|
||||||
})
|
|
||||||
public includedTags: string[] = [];
|
|
||||||
public includedPrefixes: string[] = [];
|
|
||||||
|
|
||||||
@validator.IsInt({
|
@validator.IsInt({
|
||||||
message: "$property expect an integer, received $value"
|
message: "$property expect an integer, received $value"
|
||||||
})
|
})
|
||||||
@validator.Min(LatestSearchQuery.MIN_PAGE, {
|
@validator.Min(LatestSearchQuery.MIN_PAGE, {
|
||||||
message: "The minimum $property value must be $constraint1, received $value"
|
message: "The minimum $property value must be $constraint1, received $value"
|
||||||
})
|
})
|
||||||
public page = LatestSearchQuery.MIN_PAGE;
|
public page = LatestSearchQuery.MIN_PAGE;
|
||||||
itype: TQueryInterface = "LatestSearchQuery";
|
itype: TQueryInterface = "LatestSearchQuery";
|
||||||
|
|
||||||
//#endregion Properties
|
//#endregion Properties
|
||||||
|
|
||||||
//#region Public methods
|
//#region Public methods
|
||||||
|
|
||||||
public validate(): boolean { return validator.validateSync(this).length === 0; }
|
public validate(): boolean {
|
||||||
|
return validator.validateSync(this).length === 0;
|
||||||
|
}
|
||||||
|
|
||||||
public async execute() {
|
public async execute() {
|
||||||
// Check if the query is valid
|
// Check if the query is valid
|
||||||
if (!this.validate()) {
|
if (!this.validate()) {
|
||||||
throw new Error(`Invalid query: ${validator.validateSync(this).join("\n")}`);
|
throw new Error(
|
||||||
}
|
`Invalid query: ${validator.validateSync(this).join("\n")}`
|
||||||
|
);
|
||||||
// Prepare the URL
|
|
||||||
const url = this.prepareGETurl();
|
|
||||||
const decoded = decodeURIComponent(url.toString());
|
|
||||||
|
|
||||||
// Fetch the result
|
|
||||||
return fetchGETResponse(decoded);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Prepare the URL
|
||||||
* Gets the value (in days) acceptable in the query starting from a generic date.
|
const url = this.prepareGETurl();
|
||||||
*/
|
const decoded = decodeURIComponent(url.toString());
|
||||||
public findNearestDate(d: Date): TDate {
|
|
||||||
// Find the difference between today and the passed date
|
|
||||||
const diff = this.dateDiffInDays(new Date(), d);
|
|
||||||
|
|
||||||
// Find the closest valid value in the array
|
// Fetch the result
|
||||||
const closest = [365, 180, 90, 30, 14, 7, 3, 1].reduce(function (prev, curr) {
|
return fetchGETResponse(decoded);
|
||||||
return (Math.abs(curr - diff) < Math.abs(prev - diff) ? curr : prev);
|
}
|
||||||
});
|
|
||||||
|
|
||||||
return closest as TDate;
|
/**
|
||||||
|
* Gets the value (in days) acceptable in the query starting from a generic date.
|
||||||
|
*/
|
||||||
|
public findNearestDate(d: Date): TDate {
|
||||||
|
// Find the difference between today and the passed date
|
||||||
|
const diff = this.dateDiffInDays(new Date(), d);
|
||||||
|
|
||||||
|
// Find the closest valid value in the array
|
||||||
|
const closest = [365, 180, 90, 30, 14, 7, 3, 1].reduce(function (
|
||||||
|
prev,
|
||||||
|
curr
|
||||||
|
) {
|
||||||
|
return Math.abs(curr - diff) < Math.abs(prev - diff) ? curr : prev;
|
||||||
|
});
|
||||||
|
|
||||||
|
return closest as TDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Public methods
|
||||||
|
|
||||||
|
//#region Private methods
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare the URL by filling out the GET parameters with the data in the query.
|
||||||
|
*/
|
||||||
|
private prepareGETurl(): URL {
|
||||||
|
// Create the URL
|
||||||
|
const url = new URL(urls.F95_LATEST_PHP);
|
||||||
|
url.searchParams.set("cmd", "list");
|
||||||
|
|
||||||
|
// Set the category
|
||||||
|
const cat: TCategory = this.category === "mods" ? "games" : this.category;
|
||||||
|
url.searchParams.set("cat", cat);
|
||||||
|
|
||||||
|
// Add tags and prefixes
|
||||||
|
const parser = new PrefixParser();
|
||||||
|
for (const tag of parser.prefixesToIDs(this.includedTags)) {
|
||||||
|
url.searchParams.append("tags[]", tag.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Public methods
|
for (const p of parser.prefixesToIDs(this.includedPrefixes)) {
|
||||||
|
url.searchParams.append("prefixes[]", p.toString());
|
||||||
//#region Private methods
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepare the URL by filling out the GET parameters with the data in the query.
|
|
||||||
*/
|
|
||||||
private prepareGETurl(): URL {
|
|
||||||
// Create the URL
|
|
||||||
const url = new URL(urls.F95_LATEST_PHP);
|
|
||||||
url.searchParams.set("cmd", "list");
|
|
||||||
|
|
||||||
// Set the category
|
|
||||||
const cat: TCategory = this.category === "mods" ? "games" : this.category;
|
|
||||||
url.searchParams.set("cat", cat);
|
|
||||||
|
|
||||||
// Add tags and prefixes
|
|
||||||
const parser = new PrefixParser();
|
|
||||||
for (const tag of parser.prefixesToIDs(this.includedTags)) {
|
|
||||||
url.searchParams.append("tags[]", tag.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const p of parser.prefixesToIDs(this.includedPrefixes)) {
|
|
||||||
url.searchParams.append("prefixes[]", p.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the other values
|
|
||||||
url.searchParams.set("sort", this.order.toString());
|
|
||||||
url.searchParams.set("page", this.page.toString());
|
|
||||||
if (this.date) url.searchParams.set("date", this.date.toString());
|
|
||||||
|
|
||||||
return url;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Set the other values
|
||||||
*
|
url.searchParams.set("sort", this.order.toString());
|
||||||
*/
|
url.searchParams.set("page", this.page.toString());
|
||||||
private dateDiffInDays(a: Date, b: Date) {
|
if (this.date) url.searchParams.set("date", this.date.toString());
|
||||||
const MS_PER_DAY = 1000 * 60 * 60 * 24;
|
|
||||||
|
|
||||||
// Discard the time and time-zone information.
|
return url;
|
||||||
const utc1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate());
|
}
|
||||||
const utc2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate());
|
|
||||||
|
|
||||||
return Math.floor((utc2 - utc1) / MS_PER_DAY);
|
/**
|
||||||
}
|
*
|
||||||
|
*/
|
||||||
|
private dateDiffInDays(a: Date, b: Date) {
|
||||||
|
const MS_PER_DAY = 1000 * 60 * 60 * 24;
|
||||||
|
|
||||||
//#endregion Private methodss
|
// Discard the time and time-zone information.
|
||||||
|
const utc1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate());
|
||||||
|
const utc2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate());
|
||||||
|
|
||||||
}
|
return Math.floor((utc2 - utc1) / MS_PER_DAY);
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Private methodss
|
||||||
|
}
|
||||||
|
|
|
@ -1,177 +1,183 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
// Public modules from npm
|
// Public modules from npm
|
||||||
import validator from 'class-validator';
|
import validator from "class-validator";
|
||||||
|
|
||||||
// Module from files
|
// Module from files
|
||||||
import { IQuery, TCategory, TQueryInterface } from "../../interfaces.js";
|
import { IQuery, TCategory, TQueryInterface } from "../../interfaces.js";
|
||||||
import { urls } from "../../constants/url.js";
|
import { urls } from "../../constants/url.js";
|
||||||
import PrefixParser from "./../prefix-parser.js";
|
import PrefixParser from "./../prefix-parser.js";
|
||||||
import { fetchPOSTResponse } from '../../network-helper.js';
|
import { fetchPOSTResponse } from "../../network-helper.js";
|
||||||
import { AxiosResponse } from 'axios';
|
import { AxiosResponse } from "axios";
|
||||||
import { GenericAxiosError } from '../errors.js';
|
import { GenericAxiosError } from "../errors.js";
|
||||||
import { Result } from '../result.js';
|
import { Result } from "../result.js";
|
||||||
import Shared from '../../shared.js';
|
import Shared from "../../shared.js";
|
||||||
|
|
||||||
// Type definitions
|
// Type definitions
|
||||||
export type TThreadOrder = "relevance" | "date" | "last_update" | "replies";
|
export type TThreadOrder = "relevance" | "date" | "last_update" | "replies";
|
||||||
|
|
||||||
export default class ThreadSearchQuery implements IQuery {
|
export default class ThreadSearchQuery implements IQuery {
|
||||||
|
//#region Private fields
|
||||||
|
|
||||||
//#region Private fields
|
static MIN_PAGE = 1;
|
||||||
|
|
||||||
static MIN_PAGE = 1;
|
//#endregion Private fields
|
||||||
|
|
||||||
//#endregion Private fields
|
//#region Properties
|
||||||
|
|
||||||
//#region Properties
|
/**
|
||||||
|
* Keywords to use in the search.
|
||||||
|
*/
|
||||||
|
public keywords = "";
|
||||||
|
/**
|
||||||
|
* Indicates to search by checking only the thread titles and not the content.
|
||||||
|
*/
|
||||||
|
public onlyTitles = false;
|
||||||
|
/**
|
||||||
|
* The results must be more recent than the date indicated.
|
||||||
|
*/
|
||||||
|
public newerThan: Date = null;
|
||||||
|
/**
|
||||||
|
* The results must be older than the date indicated.
|
||||||
|
*/
|
||||||
|
public olderThan: Date = null;
|
||||||
|
public includedTags: string[] = [];
|
||||||
|
/**
|
||||||
|
* Tags to exclude from the search.
|
||||||
|
*/
|
||||||
|
public excludedTags: string[] = [];
|
||||||
|
/**
|
||||||
|
* Minimum number of answers that the thread must possess.
|
||||||
|
*/
|
||||||
|
public minimumReplies = 0;
|
||||||
|
public includedPrefixes: string[] = [];
|
||||||
|
public category: TCategory = null;
|
||||||
|
/**
|
||||||
|
* Results presentation order.
|
||||||
|
*/
|
||||||
|
public order: TThreadOrder = "relevance";
|
||||||
|
@validator.IsInt({
|
||||||
|
message: "$property expect an integer, received $value"
|
||||||
|
})
|
||||||
|
@validator.Min(ThreadSearchQuery.MIN_PAGE, {
|
||||||
|
message: "The minimum $property value must be $constraint1, received $value"
|
||||||
|
})
|
||||||
|
public page = 1;
|
||||||
|
itype: TQueryInterface = "ThreadSearchQuery";
|
||||||
|
|
||||||
/**
|
//#endregion Properties
|
||||||
* Keywords to use in the search.
|
|
||||||
*/
|
|
||||||
public keywords: string = "";
|
|
||||||
/**
|
|
||||||
* Indicates to search by checking only the thread titles and not the content.
|
|
||||||
*/
|
|
||||||
public onlyTitles: boolean = false;
|
|
||||||
/**
|
|
||||||
* The results must be more recent than the date indicated.
|
|
||||||
*/
|
|
||||||
public newerThan: Date = null;
|
|
||||||
/**
|
|
||||||
* The results must be older than the date indicated.
|
|
||||||
*/
|
|
||||||
public olderThan: Date = null;
|
|
||||||
public includedTags: string[] = [];
|
|
||||||
/**
|
|
||||||
* Tags to exclude from the search.
|
|
||||||
*/
|
|
||||||
public excludedTags: string[] = [];
|
|
||||||
/**
|
|
||||||
* Minimum number of answers that the thread must possess.
|
|
||||||
*/
|
|
||||||
public minimumReplies: number = 0;
|
|
||||||
public includedPrefixes: string[] = [];
|
|
||||||
public category: TCategory = null;
|
|
||||||
/**
|
|
||||||
* Results presentation order.
|
|
||||||
*/
|
|
||||||
public order: TThreadOrder = "relevance";
|
|
||||||
@validator.IsInt({
|
|
||||||
message: "$property expect an integer, received $value"
|
|
||||||
})
|
|
||||||
@validator.Min(ThreadSearchQuery.MIN_PAGE, {
|
|
||||||
message: "The minimum $property value must be $constraint1, received $value"
|
|
||||||
})
|
|
||||||
public page: number = 1;
|
|
||||||
itype: TQueryInterface = "ThreadSearchQuery";
|
|
||||||
|
|
||||||
//#endregion Properties
|
//#region Public methods
|
||||||
|
|
||||||
//#region Public methods
|
public validate(): boolean {
|
||||||
|
return validator.validateSync(this).length === 0;
|
||||||
public validate(): boolean { return validator.validateSync(this).length === 0; }
|
}
|
||||||
|
|
||||||
public async execute(): Promise<Result<GenericAxiosError, AxiosResponse<any>>> {
|
|
||||||
// Check if the query is valid
|
|
||||||
if (!this.validate()) {
|
|
||||||
throw new Error(`Invalid query: ${validator.validateSync(this).join("\n")}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Define the POST parameters
|
public async execute(): Promise<
|
||||||
const params = this.preparePOSTParameters();
|
Result<GenericAxiosError, AxiosResponse<any>>
|
||||||
|
> {
|
||||||
// Return the POST response
|
// Check if the query is valid
|
||||||
return fetchPOSTResponse(urls.F95_SEARCH_URL, params);
|
if (!this.validate()) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid query: ${validator.validateSync(this).join("\n")}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Public methods
|
// Define the POST parameters
|
||||||
|
const params = this.preparePOSTParameters();
|
||||||
|
|
||||||
//#region Private methods
|
// Return the POST response
|
||||||
|
return fetchPOSTResponse(urls.F95_SEARCH_URL, params);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
//#endregion Public methods
|
||||||
* Prepare the parameters for post request with the data in the query.
|
|
||||||
*/
|
|
||||||
private preparePOSTParameters(): { [s: string]: string } {
|
|
||||||
// Local variables
|
|
||||||
const params = {};
|
|
||||||
|
|
||||||
// Ad the session token
|
//#region Private methods
|
||||||
params["_xfToken"] = Shared.session.token;
|
|
||||||
|
|
||||||
// Specify if only the title should be searched
|
/**
|
||||||
if (this.onlyTitles) params["c[title_only]"] = "1";
|
* Prepare the parameters for post request with the data in the query.
|
||||||
|
*/
|
||||||
|
private preparePOSTParameters(): { [s: string]: string } {
|
||||||
|
// Local variables
|
||||||
|
const params = {};
|
||||||
|
|
||||||
// Add keywords
|
// Ad the session token
|
||||||
params["keywords"] = this.keywords ?? "*";
|
params["_xfToken"] = Shared.session.token;
|
||||||
|
|
||||||
// Specify the scope of the search (only "threads/post")
|
// Specify if only the title should be searched
|
||||||
params["search_type"] = "post";
|
if (this.onlyTitles) params["c[title_only]"] = "1";
|
||||||
|
|
||||||
// Set the dates
|
// Add keywords
|
||||||
if (this.newerThan) {
|
params["keywords"] = this.keywords ?? "*";
|
||||||
const date = this.convertShortDate(this.newerThan);
|
|
||||||
params["c[newer_than]"] = date;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.olderThan) {
|
// Specify the scope of the search (only "threads/post")
|
||||||
const date = this.convertShortDate(this.olderThan);
|
params["search_type"] = "post";
|
||||||
params["c[older_than]"] = date;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set included and excluded tags (joined with a comma)
|
// Set the dates
|
||||||
if (this.includedTags) params["c[tags]"] = this.includedTags.join(",");
|
if (this.newerThan) {
|
||||||
if (this.excludedTags) params["c[excludeTags]"] = this.excludedTags.join(",");
|
const date = this.convertShortDate(this.newerThan);
|
||||||
|
params["c[newer_than]"] = date;
|
||||||
// Set minimum reply number
|
|
||||||
if (this.minimumReplies > 0) params["c[min_reply_count]"] = this.minimumReplies.toString();
|
|
||||||
|
|
||||||
// Add prefixes
|
|
||||||
const parser = new PrefixParser();
|
|
||||||
const ids = parser.prefixesToIDs(this.includedPrefixes);
|
|
||||||
for (let i = 0; i < ids.length; i++) {
|
|
||||||
const name = `c[prefixes][${i}]`;
|
|
||||||
params[name] = ids[i].toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the category
|
|
||||||
params["c[child_nodes]"] = "1"; // Always set
|
|
||||||
if (this.category) {
|
|
||||||
const catID = this.categoryToID(this.category).toString();
|
|
||||||
params["c[nodes][0]"] = catID;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the other values
|
|
||||||
params["order"] = this.order.toString();
|
|
||||||
params["page"] = this.page.toString();
|
|
||||||
|
|
||||||
return params;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
if (this.olderThan) {
|
||||||
* Convert a date in the YYYY-MM-DD format taking into account the time zone.
|
const date = this.convertShortDate(this.olderThan);
|
||||||
*/
|
params["c[older_than]"] = date;
|
||||||
private convertShortDate(d: Date): string {
|
|
||||||
const offset = d.getTimezoneOffset()
|
|
||||||
d = new Date(d.getTime() - (offset * 60 * 1000))
|
|
||||||
return d.toISOString().split('T')[0]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Set included and excluded tags (joined with a comma)
|
||||||
* Gets the unique ID of the selected category.
|
if (this.includedTags) params["c[tags]"] = this.includedTags.join(",");
|
||||||
*/
|
if (this.excludedTags)
|
||||||
private categoryToID(category: TCategory): number {
|
params["c[excludeTags]"] = this.excludedTags.join(",");
|
||||||
const catMap = {
|
|
||||||
"games": 2,
|
|
||||||
"mods": 41,
|
|
||||||
"comics": 40,
|
|
||||||
"animations": 94,
|
|
||||||
"assets": 95,
|
|
||||||
}
|
|
||||||
|
|
||||||
return catMap[category as string];
|
// Set minimum reply number
|
||||||
|
if (this.minimumReplies > 0)
|
||||||
|
params["c[min_reply_count]"] = this.minimumReplies.toString();
|
||||||
|
|
||||||
|
// Add prefixes
|
||||||
|
const parser = new PrefixParser();
|
||||||
|
const ids = parser.prefixesToIDs(this.includedPrefixes);
|
||||||
|
for (let i = 0; i < ids.length; i++) {
|
||||||
|
const name = `c[prefixes][${i}]`;
|
||||||
|
params[name] = ids[i].toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Private methods
|
// Set the category
|
||||||
|
params["c[child_nodes]"] = "1"; // Always set
|
||||||
|
if (this.category) {
|
||||||
|
const catID = this.categoryToID(this.category).toString();
|
||||||
|
params["c[nodes][0]"] = catID;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
// Set the other values
|
||||||
|
params["order"] = this.order.toString();
|
||||||
|
params["page"] = this.page.toString();
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a date in the YYYY-MM-DD format taking into account the time zone.
|
||||||
|
*/
|
||||||
|
private convertShortDate(d: Date): string {
|
||||||
|
const offset = d.getTimezoneOffset();
|
||||||
|
d = new Date(d.getTime() - offset * 60 * 1000);
|
||||||
|
return d.toISOString().split("T")[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the unique ID of the selected category.
|
||||||
|
*/
|
||||||
|
private categoryToID(category: TCategory): number {
|
||||||
|
const catMap = {
|
||||||
|
games: 2,
|
||||||
|
mods: 41,
|
||||||
|
comics: 40,
|
||||||
|
animations: 94,
|
||||||
|
assets: 95
|
||||||
|
};
|
||||||
|
|
||||||
|
return catMap[category as string];
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Private methods
|
||||||
|
}
|
||||||
|
|
|
@ -1,49 +1,49 @@
|
||||||
export type Result<L, A> = Failure<L, A> | Success<L, A>;
|
export type Result<L, A> = Failure<L, A> | Success<L, A>;
|
||||||
|
|
||||||
export class Failure<L, A> {
|
export class Failure<L, A> {
|
||||||
readonly value: L;
|
readonly value: L;
|
||||||
|
|
||||||
constructor(value: L) {
|
constructor(value: L) {
|
||||||
this.value = value;
|
this.value = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
isFailure(): this is Failure<L, A> {
|
isFailure(): this is Failure<L, A> {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
isSuccess(): this is Success<L, A> {
|
isSuccess(): this is Success<L, A> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
applyOnSuccess<B>(_: (a: A) => B): Result<L, B> {
|
applyOnSuccess<B>(_: (a: A) => B): Result<L, B> {
|
||||||
return this as any;
|
return this as any;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Success<L, A> {
|
export class Success<L, A> {
|
||||||
readonly value: A;
|
readonly value: A;
|
||||||
|
|
||||||
constructor(value: A) {
|
constructor(value: A) {
|
||||||
this.value = value;
|
this.value = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
isFailure(): this is Failure<L, A> {
|
isFailure(): this is Failure<L, A> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
isSuccess(): this is Success<L, A> {
|
isSuccess(): this is Success<L, A> {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
applyOnSuccess<B>(func: (a: A) => B): Result<L, B> {
|
applyOnSuccess<B>(func: (a: A) => B): Result<L, B> {
|
||||||
return new Success(func(this.value));
|
return new Success(func(this.value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const failure = <L, A>(l: L): Result<L, A> => {
|
export const failure = <L, A>(l: L): Result<L, A> => {
|
||||||
return new Failure(l);
|
return new Failure(l);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const success = <L, A>(a: A): Result<L, A> => {
|
export const success = <L, A>(a: A): Result<L, A> => {
|
||||||
return new Success<L, A>(a);
|
return new Success<L, A>(a);
|
||||||
};
|
};
|
||||||
|
|
|
@ -15,189 +15,202 @@ const awritefile = promisify(fs.writeFile);
|
||||||
const aunlinkfile = promisify(fs.unlink);
|
const aunlinkfile = promisify(fs.unlink);
|
||||||
|
|
||||||
export default class Session {
|
export default class Session {
|
||||||
|
//#region Fields
|
||||||
|
|
||||||
//#region Fields
|
/**
|
||||||
|
* Max number of days the session is valid.
|
||||||
|
*/
|
||||||
|
private readonly SESSION_TIME: number = 3;
|
||||||
|
private readonly COOKIEJAR_FILENAME: string = "f95cookiejar.json";
|
||||||
|
private _path: string;
|
||||||
|
private _isMapped: boolean;
|
||||||
|
private _created: Date;
|
||||||
|
private _hash: string;
|
||||||
|
private _token: string;
|
||||||
|
private _cookieJar: CookieJar;
|
||||||
|
private _cookieJarPath: string;
|
||||||
|
|
||||||
/**
|
//#endregion Fields
|
||||||
* Max number of days the session is valid.
|
|
||||||
*/
|
|
||||||
private readonly SESSION_TIME: number = 3;
|
|
||||||
private readonly COOKIEJAR_FILENAME: string = "f95cookiejar.json";
|
|
||||||
private _path: string;
|
|
||||||
private _isMapped: boolean;
|
|
||||||
private _created: Date;
|
|
||||||
private _hash: string;
|
|
||||||
private _token: string;
|
|
||||||
private _cookieJar: CookieJar;
|
|
||||||
private _cookieJarPath: string;
|
|
||||||
|
|
||||||
//#endregion Fields
|
//#region Getters
|
||||||
|
|
||||||
//#region Getters
|
/**
|
||||||
|
* Path of the session map file on disk.
|
||||||
|
*/
|
||||||
|
public get path() {
|
||||||
|
return this._path;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Indicates if the session is mapped on disk.
|
||||||
|
*/
|
||||||
|
public get isMapped() {
|
||||||
|
return this._isMapped;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Date of creation of the session.
|
||||||
|
*/
|
||||||
|
public get created() {
|
||||||
|
return this._created;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* MD5 hash of the username and the password.
|
||||||
|
*/
|
||||||
|
public get hash() {
|
||||||
|
return this._hash;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Token used to login to F95Zone.
|
||||||
|
*/
|
||||||
|
public get token() {
|
||||||
|
return this._token;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Cookie holder.
|
||||||
|
*/
|
||||||
|
public get cookieJar() {
|
||||||
|
return this._cookieJar;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
//#endregion Getters
|
||||||
* Path of the session map file on disk.
|
|
||||||
*/
|
|
||||||
public get path() { return this._path; }
|
|
||||||
/**
|
|
||||||
* Indicates if the session is mapped on disk.
|
|
||||||
*/
|
|
||||||
public get isMapped() { return this._isMapped; }
|
|
||||||
/**
|
|
||||||
* Date of creation of the session.
|
|
||||||
*/
|
|
||||||
public get created() { return this._created; }
|
|
||||||
/**
|
|
||||||
* MD5 hash of the username and the password.
|
|
||||||
*/
|
|
||||||
public get hash() { return this._hash; }
|
|
||||||
/**
|
|
||||||
* Token used to login to F95Zone.
|
|
||||||
*/
|
|
||||||
public get token() { return this._token; }
|
|
||||||
/**
|
|
||||||
* Cookie holder.
|
|
||||||
*/
|
|
||||||
public get cookieJar() { return this._cookieJar; }
|
|
||||||
|
|
||||||
//#endregion Getters
|
/**
|
||||||
|
* Initializes the session by setting the path for saving information to disk.
|
||||||
|
*/
|
||||||
|
constructor(p: string) {
|
||||||
|
this._path = p;
|
||||||
|
this._isMapped = fs.existsSync(this.path);
|
||||||
|
this._created = new Date(Date.now());
|
||||||
|
this._hash = null;
|
||||||
|
this._token = null;
|
||||||
|
this._cookieJar = new tough.CookieJar();
|
||||||
|
|
||||||
/**
|
// Define the path for the cookiejar
|
||||||
* Initializes the session by setting the path for saving information to disk.
|
const basedir = path.dirname(p);
|
||||||
*/
|
this._cookieJarPath = path.join(basedir, this.COOKIEJAR_FILENAME);
|
||||||
constructor(p: string) {
|
}
|
||||||
this._path = p;
|
|
||||||
this._isMapped = fs.existsSync(this.path);
|
|
||||||
this._created = new Date(Date.now());
|
|
||||||
this._hash = null;
|
|
||||||
this._token = null;
|
|
||||||
this._cookieJar = new tough.CookieJar();
|
|
||||||
|
|
||||||
// Define the path for the cookiejar
|
//#region Private Methods
|
||||||
const basedir = path.dirname(p);
|
|
||||||
this._cookieJarPath = path.join(basedir, this.COOKIEJAR_FILENAME);
|
/**
|
||||||
|
* Get the difference in days between two dates.
|
||||||
|
*/
|
||||||
|
private dateDiffInDays(a: Date, b: Date) {
|
||||||
|
const MS_PER_DAY = 1000 * 60 * 60 * 24;
|
||||||
|
|
||||||
|
// Discard the time and time-zone information.
|
||||||
|
const utc1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate());
|
||||||
|
const utc2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate());
|
||||||
|
|
||||||
|
return Math.floor((utc2 - utc1) / MS_PER_DAY);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert the object to a dictionary serializable in JSON.
|
||||||
|
*/
|
||||||
|
private toJSON(): Record<string, unknown> {
|
||||||
|
return {
|
||||||
|
_created: this._created,
|
||||||
|
_hash: this._hash,
|
||||||
|
_token: this._token
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Private Methods
|
||||||
|
|
||||||
|
//#region Public Methods
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new session.
|
||||||
|
*/
|
||||||
|
create(username: string, password: string, token: string): void {
|
||||||
|
// First, create the _hash of the credentials
|
||||||
|
const value = `${username}%%%${password}`;
|
||||||
|
this._hash = sha256(value);
|
||||||
|
|
||||||
|
// Set the token
|
||||||
|
this._token = token;
|
||||||
|
|
||||||
|
// Update the creation date
|
||||||
|
this._created = new Date(Date.now());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the session to disk.
|
||||||
|
*/
|
||||||
|
async save(): Promise<void> {
|
||||||
|
// Update the creation date
|
||||||
|
this._created = new Date(Date.now());
|
||||||
|
|
||||||
|
// Convert data
|
||||||
|
const json = this.toJSON();
|
||||||
|
const data = JSON.stringify(json);
|
||||||
|
|
||||||
|
// Write data
|
||||||
|
await awritefile(this.path, data);
|
||||||
|
|
||||||
|
// Write cookiejar
|
||||||
|
const serializedJar = await this._cookieJar.serialize();
|
||||||
|
await awritefile(this._cookieJarPath, JSON.stringify(serializedJar));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load the session from disk.
|
||||||
|
*/
|
||||||
|
async load(): Promise<void> {
|
||||||
|
if (this.isMapped) {
|
||||||
|
// Read data
|
||||||
|
const data = await areadfile(this.path, { encoding: "utf-8", flag: "r" });
|
||||||
|
const json = JSON.parse(data);
|
||||||
|
|
||||||
|
// Assign values
|
||||||
|
this._created = new Date(json._created);
|
||||||
|
this._hash = json._hash;
|
||||||
|
this._token = json._token;
|
||||||
|
|
||||||
|
// Load cookiejar
|
||||||
|
const serializedJar = await areadfile(this._cookieJarPath, {
|
||||||
|
encoding: "utf-8",
|
||||||
|
flag: "r"
|
||||||
|
});
|
||||||
|
this._cookieJar = await CookieJar.deserialize(JSON.parse(serializedJar));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//#region Private Methods
|
/**
|
||||||
|
* Delete the session from disk.
|
||||||
|
*/
|
||||||
|
async delete(): Promise<void> {
|
||||||
|
if (this.isMapped) {
|
||||||
|
// Delete the session data
|
||||||
|
await aunlinkfile(this.path);
|
||||||
|
|
||||||
/**
|
// Delete the cookiejar
|
||||||
* Get the difference in days between two dates.
|
await aunlinkfile(this._cookieJarPath);
|
||||||
*/
|
|
||||||
private dateDiffInDays(a: Date, b: Date) {
|
|
||||||
const MS_PER_DAY = 1000 * 60 * 60 * 24;
|
|
||||||
|
|
||||||
// Discard the time and time-zone information.
|
|
||||||
const utc1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate());
|
|
||||||
const utc2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate());
|
|
||||||
|
|
||||||
return Math.floor((utc2 - utc1) / MS_PER_DAY);
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert the object to a dictionary serializable in JSON.
|
* Check if the session is valid.
|
||||||
*/
|
*/
|
||||||
private toJSON(): Record<string, unknown> {
|
isValid(username: string, password: string): boolean {
|
||||||
return {
|
// Get the number of days from the file creation
|
||||||
_created: this._created,
|
const diff = this.dateDiffInDays(new Date(Date.now()), this.created);
|
||||||
_hash: this._hash,
|
|
||||||
_token: this._token,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Private Methods
|
// The session is valid if the number of days is minor than SESSION_TIME
|
||||||
|
const dateValid = diff < this.SESSION_TIME;
|
||||||
|
|
||||||
//#region Public Methods
|
// Check the hash
|
||||||
|
const value = `${username}%%%${password}`;
|
||||||
|
const hashValid = sha256(value) === this._hash;
|
||||||
|
|
||||||
/**
|
// Search for expired cookies
|
||||||
* Create a new session.
|
const jarValid =
|
||||||
*/
|
this._cookieJar
|
||||||
create(username: string, password: string, token: string): void {
|
.getCookiesSync("https://f95zone.to")
|
||||||
// First, create the _hash of the credentials
|
.filter((el) => el.TTL() === 0).length === 0;
|
||||||
const value = `${username}%%%${password}`;
|
|
||||||
this._hash = sha256(value);
|
|
||||||
|
|
||||||
// Set the token
|
return dateValid && hashValid && jarValid;
|
||||||
this._token = token;
|
}
|
||||||
|
|
||||||
// Update the creation date
|
//#endregion Public Methods
|
||||||
this._created = new Date(Date.now());
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save the session to disk.
|
|
||||||
*/
|
|
||||||
async save(): Promise<void> {
|
|
||||||
// Update the creation date
|
|
||||||
this._created = new Date(Date.now());
|
|
||||||
|
|
||||||
// Convert data
|
|
||||||
const json = this.toJSON();
|
|
||||||
const data = JSON.stringify(json);
|
|
||||||
|
|
||||||
// Write data
|
|
||||||
await awritefile(this.path, data);
|
|
||||||
|
|
||||||
// Write cookiejar
|
|
||||||
const serializedJar = await this._cookieJar.serialize();
|
|
||||||
await awritefile(this._cookieJarPath, JSON.stringify(serializedJar));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load the session from disk.
|
|
||||||
*/
|
|
||||||
async load(): Promise<void> {
|
|
||||||
if (this.isMapped) {
|
|
||||||
// Read data
|
|
||||||
const data = await areadfile(this.path, { encoding: 'utf-8', flag: 'r' });
|
|
||||||
const json = JSON.parse(data);
|
|
||||||
|
|
||||||
// Assign values
|
|
||||||
this._created = new Date(json._created);
|
|
||||||
this._hash = json._hash;
|
|
||||||
this._token = json._token;
|
|
||||||
|
|
||||||
// Load cookiejar
|
|
||||||
const serializedJar = await areadfile(this._cookieJarPath, { encoding: 'utf-8', flag: 'r' });
|
|
||||||
this._cookieJar = await CookieJar.deserialize(JSON.parse(serializedJar));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete the session from disk.
|
|
||||||
*/
|
|
||||||
async delete(): Promise<void> {
|
|
||||||
if (this.isMapped) {
|
|
||||||
// Delete the session data
|
|
||||||
await aunlinkfile(this.path);
|
|
||||||
|
|
||||||
// Delete the cookiejar
|
|
||||||
await aunlinkfile(this._cookieJarPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if the session is valid.
|
|
||||||
*/
|
|
||||||
isValid(username: string, password: string): boolean {
|
|
||||||
// Get the number of days from the file creation
|
|
||||||
const diff = this.dateDiffInDays(new Date(Date.now()), this.created);
|
|
||||||
|
|
||||||
// The session is valid if the number of days is minor than SESSION_TIME
|
|
||||||
const dateValid = diff < this.SESSION_TIME;
|
|
||||||
|
|
||||||
// Check the hash
|
|
||||||
const value = `${username}%%%${password}`;
|
|
||||||
const hashValid = sha256(value) === this._hash;
|
|
||||||
|
|
||||||
// Search for expired cookies
|
|
||||||
const jarValid = this._cookieJar
|
|
||||||
.getCookiesSync("https://f95zone.to")
|
|
||||||
.filter(el => el.TTL() === 0)
|
|
||||||
.length === 0;
|
|
||||||
|
|
||||||
return dateValid && hashValid && jarValid;
|
|
||||||
}
|
|
||||||
|
|
||||||
//#endregion Public Methods
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,208 +1,216 @@
|
||||||
export const selectors = {
|
export const selectors = {
|
||||||
WT_FILTER_POPUP_BUTTON: "a.filterBar-menuTrigger",
|
WT_FILTER_POPUP_BUTTON: "a.filterBar-menuTrigger",
|
||||||
WT_NEXT_PAGE: "a.pageNav-jump--next",
|
WT_NEXT_PAGE: "a.pageNav-jump--next",
|
||||||
WT_URLS: "a[href^=\"/threads/\"][data-tp-primary]",
|
WT_URLS: 'a[href^="/threads/"][data-tp-primary]',
|
||||||
WT_UNREAD_THREAD_CHECKBOX: "input[type=\"checkbox\"][name=\"unread\"]",
|
WT_UNREAD_THREAD_CHECKBOX: 'input[type="checkbox"][name="unread"]',
|
||||||
GS_POSTS: "article.message-body:first-child > div.bbWrapper:first-of-type",
|
GS_POSTS: "article.message-body:first-child > div.bbWrapper:first-of-type",
|
||||||
GS_RESULT_THREAD_TITLE: "h3.contentRow-title > a",
|
GS_RESULT_THREAD_TITLE: "h3.contentRow-title > a",
|
||||||
GS_RESULT_BODY: "div.contentRow-main",
|
GS_RESULT_BODY: "div.contentRow-main",
|
||||||
GS_MEMBERSHIP: "li > a:not(.username)",
|
GS_MEMBERSHIP: "li > a:not(.username)",
|
||||||
GET_REQUEST_TOKEN: "input[name=\"_xfToken\"]",
|
GET_REQUEST_TOKEN: 'input[name="_xfToken"]',
|
||||||
UD_USERNAME_ELEMENT: "a[href=\"/account/\"] > span.p-navgroup-linkText",
|
UD_USERNAME_ELEMENT: 'a[href="/account/"] > span.p-navgroup-linkText',
|
||||||
UD_AVATAR_PIC: "a[href=\"/account/\"] > span.avatar > img[class^=\"avatar\"]",
|
UD_AVATAR_PIC: 'a[href="/account/"] > span.avatar > img[class^="avatar"]',
|
||||||
LOGIN_MESSAGE_ERROR: "div.blockMessage.blockMessage--error.blockMessage--iconic",
|
LOGIN_MESSAGE_ERROR:
|
||||||
LU_TAGS_SCRIPT: "script:contains('latestUpdates')",
|
"div.blockMessage.blockMessage--error.blockMessage--iconic",
|
||||||
BK_RESULTS: "ol.listPlain > * div.contentRow-main",
|
LU_TAGS_SCRIPT: "script:contains('latestUpdates')",
|
||||||
BK_POST_URL: "div.contentRow-title > a",
|
BK_RESULTS: "ol.listPlain > * div.contentRow-main",
|
||||||
BK_DESCRIPTION: "div.contentRow-snippet",
|
BK_POST_URL: "div.contentRow-title > a",
|
||||||
BK_POST_OWNER: "div.contentRow-minor > * a.username",
|
BK_DESCRIPTION: "div.contentRow-snippet",
|
||||||
BK_TAGS: "div.contentRow-minor > * a.tagItem",
|
BK_POST_OWNER: "div.contentRow-minor > * a.username",
|
||||||
/**
|
BK_TAGS: "div.contentRow-minor > * a.tagItem",
|
||||||
* Attribute `datetime` contains an ISO date.
|
/**
|
||||||
*/
|
* Attribute `datetime` contains an ISO date.
|
||||||
BK_TIME: "div.contentRow-minor > * time",
|
*/
|
||||||
|
BK_TIME: "div.contentRow-minor > * time"
|
||||||
};
|
};
|
||||||
|
|
||||||
export const GENERIC = {
|
export const GENERIC = {
|
||||||
/**
|
/**
|
||||||
* The ID of the user currently logged into
|
* The ID of the user currently logged into
|
||||||
* the platform in the attribute `data-user-id`.
|
* the platform in the attribute `data-user-id`.
|
||||||
*/
|
*/
|
||||||
CURRENT_USER_ID: "span.avatar[data-user-id]",
|
CURRENT_USER_ID: "span.avatar[data-user-id]",
|
||||||
/**
|
/**
|
||||||
* Banner containing any error messages as text.
|
* Banner containing any error messages as text.
|
||||||
*/
|
*/
|
||||||
ERROR_BANNER: "div.p-body-pageContent > div.blockMessage",
|
ERROR_BANNER: "div.p-body-pageContent > div.blockMessage"
|
||||||
}
|
};
|
||||||
|
|
||||||
export const WATCHED_THREAD = {
|
export const WATCHED_THREAD = {
|
||||||
/**
|
/**
|
||||||
* List of elements containing the data of the watched threads.
|
* List of elements containing the data of the watched threads.
|
||||||
*/
|
*/
|
||||||
BODIES: "div.structItem-cell--main",
|
BODIES: "div.structItem-cell--main",
|
||||||
/**
|
/**
|
||||||
* Link element containing the partial URL
|
* Link element containing the partial URL
|
||||||
* of the thread in the `href` attribute.
|
* of the thread in the `href` attribute.
|
||||||
*
|
*
|
||||||
* It may be followed by the `/unread` segment.
|
* It may be followed by the `/unread` segment.
|
||||||
*
|
*
|
||||||
* For use within a `WATCHED_THREAD.BODIES` selector.
|
* For use within a `WATCHED_THREAD.BODIES` selector.
|
||||||
*/
|
*/
|
||||||
URL: "div > a[data-tp-primary]",
|
URL: "div > a[data-tp-primary]",
|
||||||
/**
|
/**
|
||||||
* Name of the forum to which the thread belongs as text.
|
* Name of the forum to which the thread belongs as text.
|
||||||
*
|
*
|
||||||
* For use within a `WATCHED_THREAD.BODIES` selector.
|
* For use within a `WATCHED_THREAD.BODIES` selector.
|
||||||
*/
|
*/
|
||||||
FORUM: "div.structItem-cell--main > div.structItem-minor > ul.structItem-parts > li:last-of-type > a",
|
FORUM:
|
||||||
/**
|
"div.structItem-cell--main > div.structItem-minor > ul.structItem-parts > li:last-of-type > a",
|
||||||
* Index of the last page available as text.
|
/**
|
||||||
*/
|
* Index of the last page available as text.
|
||||||
LAST_PAGE: "ul.pageNav-main > li:last-child > a"
|
*/
|
||||||
}
|
LAST_PAGE: "ul.pageNav-main > li:last-child > a"
|
||||||
|
};
|
||||||
|
|
||||||
export const THREAD = {
|
export const THREAD = {
|
||||||
/**
|
/**
|
||||||
* Number of pages in the thread (as text of the element).
|
* Number of pages in the thread (as text of the element).
|
||||||
*
|
*
|
||||||
* Two identical elements are identified.
|
* Two identical elements are identified.
|
||||||
*/
|
*/
|
||||||
LAST_PAGE: "ul.pageNav-main > li:last-child > a",
|
LAST_PAGE: "ul.pageNav-main > li:last-child > a",
|
||||||
/**
|
/**
|
||||||
* Identify the creator of the thread.
|
* Identify the creator of the thread.
|
||||||
*
|
*
|
||||||
* The ID is contained in the `data-user-id` attribute.
|
* The ID is contained in the `data-user-id` attribute.
|
||||||
*/
|
*/
|
||||||
OWNER_ID: "div.uix_headerInner > * a.username[data-user-id]",
|
OWNER_ID: "div.uix_headerInner > * a.username[data-user-id]",
|
||||||
/**
|
/**
|
||||||
* Contains the creation date of the thread.
|
* Contains the creation date of the thread.
|
||||||
*
|
*
|
||||||
* The date is contained in the `datetime` attribute as an ISO string.
|
* The date is contained in the `datetime` attribute as an ISO string.
|
||||||
*/
|
*/
|
||||||
CREATION: "div.uix_headerInner > * time",
|
CREATION: "div.uix_headerInner > * time",
|
||||||
/**
|
/**
|
||||||
* List of tags assigned to the thread.
|
* List of tags assigned to the thread.
|
||||||
*/
|
*/
|
||||||
TAGS: "a.tagItem",
|
TAGS: "a.tagItem",
|
||||||
/**
|
/**
|
||||||
* List of prefixes assigned to the thread.
|
* List of prefixes assigned to the thread.
|
||||||
*/
|
*/
|
||||||
PREFIXES: "h1.p-title-value > a.labelLink > span[dir=\"auto\"]",
|
PREFIXES: 'h1.p-title-value > a.labelLink > span[dir="auto"]',
|
||||||
/**
|
/**
|
||||||
* Thread title.
|
* Thread title.
|
||||||
*/
|
*/
|
||||||
TITLE: "h1.p-title-value",
|
TITLE: "h1.p-title-value",
|
||||||
/**
|
/**
|
||||||
* JSON containing thread information.
|
* JSON containing thread information.
|
||||||
*
|
*
|
||||||
* Two different elements are found.
|
* Two different elements are found.
|
||||||
*/
|
*/
|
||||||
JSONLD: "script[type=\"application/ld+json\"]",
|
JSONLD: 'script[type="application/ld+json"]',
|
||||||
/**
|
/**
|
||||||
* Posts on the current page.
|
* Posts on the current page.
|
||||||
*/
|
*/
|
||||||
POSTS_IN_PAGE: "article.message",
|
POSTS_IN_PAGE: "article.message"
|
||||||
}
|
};
|
||||||
|
|
||||||
export const POST = {
|
export const POST = {
|
||||||
/**
|
/**
|
||||||
* Unique post number for the current thread.
|
* Unique post number for the current thread.
|
||||||
*
|
*
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
*/
|
*/
|
||||||
NUMBER: "* ul.message-attribution-opposite > li > a:not([id])[rel=\"nofollow\"]",
|
NUMBER:
|
||||||
/**
|
'* ul.message-attribution-opposite > li > a:not([id])[rel="nofollow"]',
|
||||||
* Unique ID of the post in the F95Zone platform in the `id` attribute.
|
/**
|
||||||
*
|
* Unique ID of the post in the F95Zone platform in the `id` attribute.
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
*
|
||||||
*/
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
ID: "span[id^=\"post\"]",
|
*/
|
||||||
/**
|
ID: 'span[id^="post"]',
|
||||||
* Unique ID of the post author in the `data-user-id` attribute.
|
/**
|
||||||
*
|
* Unique ID of the post author in the `data-user-id` attribute.
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
*
|
||||||
*/
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
OWNER_ID: "* div.message-cell--user > * a[data-user-id]",
|
*/
|
||||||
/**
|
OWNER_ID: "* div.message-cell--user > * a[data-user-id]",
|
||||||
* Main body of the post where the message written by the user is contained.
|
/**
|
||||||
*
|
* Main body of the post where the message written by the user is contained.
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
*
|
||||||
*/
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
BODY: "* article.message-body > div.bbWrapper",
|
*/
|
||||||
/**
|
BODY: "* article.message-body > div.bbWrapper",
|
||||||
* Publication date of the post contained in the `datetime` attribute as an ISO date.
|
/**
|
||||||
*
|
* Publication date of the post contained in the `datetime` attribute as an ISO date.
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
*
|
||||||
*/
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
PUBLISH_DATE: "* div.message-attribution-main > a > time",
|
*/
|
||||||
/**
|
PUBLISH_DATE: "* div.message-attribution-main > a > time",
|
||||||
* Last modified date of the post contained in the `datetime` attribute as the ISO date.
|
/**
|
||||||
*
|
* Last modified date of the post contained in the `datetime` attribute as the ISO date.
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
*
|
||||||
*/
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
LAST_EDIT: "* div.message-lastEdit > time",
|
*/
|
||||||
/**
|
LAST_EDIT: "* div.message-lastEdit > time",
|
||||||
* Gets the element only if the post has been bookmarked.
|
/**
|
||||||
*
|
* Gets the element only if the post has been bookmarked.
|
||||||
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
*
|
||||||
*/
|
* For use within a `THREAD.POSTS_IN_PAGE` selector.
|
||||||
BOOKMARKED: "* ul.message-attribution-opposite >li > a[title=\"Bookmark\"].is-bookmarked",
|
*/
|
||||||
|
BOOKMARKED:
|
||||||
|
'* ul.message-attribution-opposite >li > a[title="Bookmark"].is-bookmarked'
|
||||||
};
|
};
|
||||||
|
|
||||||
export const MEMBER = {
|
export const MEMBER = {
|
||||||
/**
|
/**
|
||||||
* Name of the user.
|
* Name of the user.
|
||||||
*
|
*
|
||||||
* It also contains the unique ID of the user in the `data-user-id` attribute.
|
* It also contains the unique ID of the user in the `data-user-id` attribute.
|
||||||
*/
|
*/
|
||||||
NAME: "span[class^=\"username\"]",
|
NAME: 'span[class^="username"]',
|
||||||
/**
|
/**
|
||||||
* Title of the user in the platform.
|
* Title of the user in the platform.
|
||||||
*
|
*
|
||||||
* i.e.: Member
|
* i.e.: Member
|
||||||
*/
|
*/
|
||||||
TITLE: "span.userTitle",
|
TITLE: "span.userTitle",
|
||||||
/**
|
/**
|
||||||
* Avatar used by the user.
|
* Avatar used by the user.
|
||||||
*
|
*
|
||||||
* Source in the attribute `src`.
|
* Source in the attribute `src`.
|
||||||
*/
|
*/
|
||||||
AVATAR: "span.avatarWrapper > a.avatar > img",
|
AVATAR: "span.avatarWrapper > a.avatar > img",
|
||||||
/**
|
/**
|
||||||
* User assigned banners.
|
* User assigned banners.
|
||||||
*
|
*
|
||||||
* The last element is always empty and can be ignored.
|
* The last element is always empty and can be ignored.
|
||||||
*/
|
*/
|
||||||
BANNERS: "em.userBanner > strong",
|
BANNERS: "em.userBanner > strong",
|
||||||
/**
|
/**
|
||||||
* Date the user joined the platform.
|
* Date the user joined the platform.
|
||||||
*
|
*
|
||||||
* The date is contained in the `datetime` attribute as an ISO string.
|
* The date is contained in the `datetime` attribute as an ISO string.
|
||||||
*/
|
*/
|
||||||
JOINED: "div.uix_memberHeader__extra > div.memberHeader-blurb:nth-child(1) > * time",
|
JOINED:
|
||||||
/**
|
"div.uix_memberHeader__extra > div.memberHeader-blurb:nth-child(1) > * time",
|
||||||
* Last time the user connected to the platform.
|
/**
|
||||||
*
|
* Last time the user connected to the platform.
|
||||||
* The date is contained in the `datetime` attribute as an ISO string.
|
*
|
||||||
*/
|
* The date is contained in the `datetime` attribute as an ISO string.
|
||||||
LAST_SEEN: "div.uix_memberHeader__extra > div.memberHeader-blurb:nth-child(2) > * time",
|
*/
|
||||||
MESSAGES: "div.pairJustifier > dl:nth-child(1) > * a",
|
LAST_SEEN:
|
||||||
REACTION_SCORE: "div.pairJustifier > dl:nth-child(2) > dd",
|
"div.uix_memberHeader__extra > div.memberHeader-blurb:nth-child(2) > * time",
|
||||||
POINTS: "div.pairJustifier > dl:nth-child(3) > * a",
|
MESSAGES: "div.pairJustifier > dl:nth-child(1) > * a",
|
||||||
RATINGS_RECEIVED: "div.pairJustifier > dl:nth-child(4) > dd",
|
REACTION_SCORE: "div.pairJustifier > dl:nth-child(2) > dd",
|
||||||
AMOUNT_DONATED: "div.pairJustifier > dl:nth-child(5) > dd",
|
POINTS: "div.pairJustifier > dl:nth-child(3) > * a",
|
||||||
/**
|
RATINGS_RECEIVED: "div.pairJustifier > dl:nth-child(4) > dd",
|
||||||
* Button used to follow/unfollow the user.
|
AMOUNT_DONATED: "div.pairJustifier > dl:nth-child(5) > dd",
|
||||||
*
|
/**
|
||||||
* If the text is `Unfollow` then the user is followed.
|
* Button used to follow/unfollow the user.
|
||||||
* If the text is `Follow` then the user is not followed.
|
*
|
||||||
*/
|
* If the text is `Unfollow` then the user is followed.
|
||||||
FOLLOWED: "div.memberHeader-buttons > div.buttonGroup:first-child > a[data-sk-follow] > span",
|
* If the text is `Follow` then the user is not followed.
|
||||||
/**
|
*/
|
||||||
* Button used to ignore/unignore the user.
|
FOLLOWED:
|
||||||
*
|
"div.memberHeader-buttons > div.buttonGroup:first-child > a[data-sk-follow] > span",
|
||||||
* If the text is `Unignore` then the user is ignored.
|
/**
|
||||||
* If the text is `Ignore` then the user is not ignored.
|
* Button used to ignore/unignore the user.
|
||||||
*/
|
*
|
||||||
IGNORED: "div.memberHeader-buttons > div.buttonGroup:first-child > a[data-sk-ignore]",
|
* If the text is `Unignore` then the user is ignored.
|
||||||
}
|
* If the text is `Ignore` then the user is not ignored.
|
||||||
|
*/
|
||||||
|
IGNORED:
|
||||||
|
"div.memberHeader-buttons > div.buttonGroup:first-child > a[data-sk-ignore]"
|
||||||
|
};
|
||||||
|
|
|
@ -1,26 +1,26 @@
|
||||||
export const urls = {
|
export const urls = {
|
||||||
F95_BASE_URL: "https://f95zone.to",
|
F95_BASE_URL: "https://f95zone.to",
|
||||||
F95_SEARCH_URL: "https://f95zone.to/search/search/",
|
F95_SEARCH_URL: "https://f95zone.to/search/search/",
|
||||||
F95_LATEST_UPDATES: "https://f95zone.to/latest",
|
F95_LATEST_UPDATES: "https://f95zone.to/latest",
|
||||||
F95_THREADS: "https://f95zone.to/threads/",
|
F95_THREADS: "https://f95zone.to/threads/",
|
||||||
F95_LOGIN_URL: "https://f95zone.to/login/login",
|
F95_LOGIN_URL: "https://f95zone.to/login/login",
|
||||||
F95_WATCHED_THREADS: "https://f95zone.to/watched/threads",
|
F95_WATCHED_THREADS: "https://f95zone.to/watched/threads",
|
||||||
F95_LATEST_PHP: "https://f95zone.to/new_latest.php",
|
F95_LATEST_PHP: "https://f95zone.to/new_latest.php",
|
||||||
F95_BOOKMARKS: "https://f95zone.to/account/bookmarks",
|
F95_BOOKMARKS: "https://f95zone.to/account/bookmarks",
|
||||||
/**
|
/**
|
||||||
* Add the unique ID of the post to
|
* Add the unique ID of the post to
|
||||||
* get the thread page where the post
|
* get the thread page where the post
|
||||||
* is present.
|
* is present.
|
||||||
*/
|
*/
|
||||||
F95_POSTS: "https://f95zone.to/posts/",
|
F95_POSTS: "https://f95zone.to/posts/",
|
||||||
/**
|
/**
|
||||||
* @todo
|
* @todo
|
||||||
*/
|
*/
|
||||||
F95_CONVERSATIONS: "https://f95zone.to/conversations/",
|
F95_CONVERSATIONS: "https://f95zone.to/conversations/",
|
||||||
/**
|
/**
|
||||||
* @todo
|
* @todo
|
||||||
*/
|
*/
|
||||||
F95_ALERTS: "https://f95zone.to/account/alerts",
|
F95_ALERTS: "https://f95zone.to/account/alerts",
|
||||||
F95_POSTS_NUMBER: "https://f95zone.to/account/dpp-update",
|
F95_POSTS_NUMBER: "https://f95zone.to/account/dpp-update",
|
||||||
F95_MEMBERS: "https://f95zone.to/members",
|
F95_MEMBERS: "https://f95zone.to/members"
|
||||||
};
|
};
|
||||||
|
|
|
@ -16,26 +16,28 @@ import fetchThreadHandiworkURLs from "./fetch-thread.js";
|
||||||
* Maximum number of items to get. Default: 30
|
* Maximum number of items to get. Default: 30
|
||||||
* @returns {Promise<String[]>} URLs of the handiworks
|
* @returns {Promise<String[]>} URLs of the handiworks
|
||||||
*/
|
*/
|
||||||
export default async function fetchHandiworkURLs(query: HandiworkSearchQuery, limit: number = 30): Promise<string[]> {
|
export default async function fetchHandiworkURLs(
|
||||||
// Local variables
|
query: HandiworkSearchQuery,
|
||||||
let urls: string[] = null;
|
limit = 30
|
||||||
const searchType = query.selectSearchType();
|
): Promise<string[]> {
|
||||||
|
// Local variables
|
||||||
|
let urls: string[] = null;
|
||||||
|
const searchType = query.selectSearchType();
|
||||||
|
|
||||||
// Convert the query
|
// Convert the query
|
||||||
if (searchType === "latest") {
|
if (searchType === "latest") {
|
||||||
// Cast the query
|
// Cast the query
|
||||||
const castedQuery = query.cast<LatestSearchQuery>("LatestSearchQuery");
|
const castedQuery = query.cast<LatestSearchQuery>("LatestSearchQuery");
|
||||||
|
|
||||||
// Fetch the urls
|
// Fetch the urls
|
||||||
urls = await fetchLatestHandiworkURLs(castedQuery, limit);
|
urls = await fetchLatestHandiworkURLs(castedQuery, limit);
|
||||||
}
|
} else {
|
||||||
else {
|
// Cast the query
|
||||||
// Cast the query
|
const castedQuery = query.cast<ThreadSearchQuery>("ThreadSearchQuery");
|
||||||
const castedQuery = query.cast<ThreadSearchQuery>("ThreadSearchQuery");
|
|
||||||
|
|
||||||
// Fetch the urls
|
// Fetch the urls
|
||||||
urls = await fetchThreadHandiworkURLs(castedQuery, limit);
|
urls = await fetchThreadHandiworkURLs(castedQuery, limit);
|
||||||
}
|
}
|
||||||
|
|
||||||
return urls;
|
return urls;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
import LatestSearchQuery from "../classes/query/latest-search-query.js";
|
import LatestSearchQuery from "../classes/query/latest-search-query.js";
|
||||||
import { urls } from "../constants/url.js";
|
import { urls } from "../constants/url.js";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the URLs of the latest handiworks that match the passed parameters.
|
* Gets the URLs of the latest handiworks that match the passed parameters.
|
||||||
*
|
*
|
||||||
|
@ -15,39 +14,44 @@ import { urls } from "../constants/url.js";
|
||||||
* Maximum number of items to get. Default: 30
|
* Maximum number of items to get. Default: 30
|
||||||
* @returns {Promise<String[]>} URLs of the handiworks
|
* @returns {Promise<String[]>} URLs of the handiworks
|
||||||
*/
|
*/
|
||||||
export default async function fetchLatestHandiworkURLs(query: LatestSearchQuery, limit: number = 30): Promise<string[]> {
|
export default async function fetchLatestHandiworkURLs(
|
||||||
// Local variables
|
query: LatestSearchQuery,
|
||||||
const shallowQuery: LatestSearchQuery = Object.assign(new LatestSearchQuery(), query);
|
limit = 30
|
||||||
const resultURLs = [];
|
): Promise<string[]> {
|
||||||
let fetchedResults = 0;
|
// Local variables
|
||||||
let noMorePages = false;
|
const shallowQuery: LatestSearchQuery = Object.assign(
|
||||||
|
new LatestSearchQuery(),
|
||||||
|
query
|
||||||
|
);
|
||||||
|
const resultURLs = [];
|
||||||
|
let fetchedResults = 0;
|
||||||
|
let noMorePages = false;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
// Fetch the response (application/json)
|
// Fetch the response (application/json)
|
||||||
const response = await shallowQuery.execute();
|
const response = await shallowQuery.execute();
|
||||||
|
|
||||||
// Save the URLs
|
// Save the URLs
|
||||||
if (response.isSuccess()) {
|
if (response.isSuccess()) {
|
||||||
// In-loop variables
|
// In-loop variables
|
||||||
const data: [{ thread_id: number}] = response.value.data.msg.data;
|
const data: [{ thread_id: number }] = response.value.data.msg.data;
|
||||||
const totalPages: number = response.value.data.msg.pagination.total;
|
const totalPages: number = response.value.data.msg.pagination.total;
|
||||||
|
|
||||||
data.map((e, idx) => {
|
|
||||||
if (fetchedResults < limit) {
|
|
||||||
|
|
||||||
const gameURL = new URL(e.thread_id.toString(), urls.F95_THREADS).href;
|
|
||||||
resultURLs.push(gameURL);
|
|
||||||
|
|
||||||
fetchedResults += 1;
|
data.map((e, idx) => {
|
||||||
}
|
if (fetchedResults < limit) {
|
||||||
});
|
const gameURL = new URL(e.thread_id.toString(), urls.F95_THREADS)
|
||||||
|
.href;
|
||||||
|
resultURLs.push(gameURL);
|
||||||
|
|
||||||
// Increment page and check for it's existence
|
fetchedResults += 1;
|
||||||
shallowQuery.page += 1;
|
}
|
||||||
noMorePages = shallowQuery.page > totalPages;
|
});
|
||||||
} else throw response.value;
|
|
||||||
}
|
|
||||||
while (fetchedResults < limit && !noMorePages);
|
|
||||||
|
|
||||||
return resultURLs;
|
// Increment page and check for it's existence
|
||||||
|
shallowQuery.page += 1;
|
||||||
|
noMorePages = shallowQuery.page > totalPages;
|
||||||
|
} else throw response.value;
|
||||||
|
} while (fetchedResults < limit && !noMorePages);
|
||||||
|
|
||||||
|
return resultURLs;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ import cheerio from "cheerio";
|
||||||
// Modules from file
|
// Modules from file
|
||||||
import shared, { TPrefixDict } from "../shared.js";
|
import shared, { TPrefixDict } from "../shared.js";
|
||||||
import { urls as f95url } from "../constants/url.js";
|
import { urls as f95url } from "../constants/url.js";
|
||||||
import { selectors as f95selector} from "../constants/css-selector.js";
|
import { selectors as f95selector } from "../constants/css-selector.js";
|
||||||
import { fetchHTML } from "../network-helper.js";
|
import { fetchHTML } from "../network-helper.js";
|
||||||
|
|
||||||
//#region Interface definitions
|
//#region Interface definitions
|
||||||
|
@ -17,27 +17,27 @@ import { fetchHTML } from "../network-helper.js";
|
||||||
* Represents the single element contained in the data categories.
|
* Represents the single element contained in the data categories.
|
||||||
*/
|
*/
|
||||||
interface ISingleOption {
|
interface ISingleOption {
|
||||||
id: number,
|
id: number;
|
||||||
name: string,
|
name: string;
|
||||||
class: string
|
class: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents the set of values associated with a specific category of data.
|
* Represents the set of values associated with a specific category of data.
|
||||||
*/
|
*/
|
||||||
interface ICategoryResource {
|
interface ICategoryResource {
|
||||||
id: number,
|
id: number;
|
||||||
name: string,
|
name: string;
|
||||||
prefixes: ISingleOption[]
|
prefixes: ISingleOption[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents the set of tags present on the platform-
|
* Represents the set of tags present on the platform-
|
||||||
*/
|
*/
|
||||||
interface ILatestResource {
|
interface ILatestResource {
|
||||||
prefixes: { [s: string]: ICategoryResource[] },
|
prefixes: { [s: string]: ICategoryResource[] };
|
||||||
tags: TPrefixDict,
|
tags: TPrefixDict;
|
||||||
options: string
|
options: string;
|
||||||
}
|
}
|
||||||
//#endregion Interface definitions
|
//#endregion Interface definitions
|
||||||
|
|
||||||
|
@ -47,22 +47,22 @@ interface ILatestResource {
|
||||||
* (such as graphics engines and progress statuses)
|
* (such as graphics engines and progress statuses)
|
||||||
*/
|
*/
|
||||||
export default async function fetchPlatformData(): Promise<void> {
|
export default async function fetchPlatformData(): Promise<void> {
|
||||||
// Check if the data are cached
|
// Check if the data are cached
|
||||||
if (!readCache(shared.cachePath)) {
|
if (!readCache(shared.cachePath)) {
|
||||||
// Load the HTML
|
// Load the HTML
|
||||||
const html = await fetchHTML(f95url.F95_LATEST_UPDATES);
|
const html = await fetchHTML(f95url.F95_LATEST_UPDATES);
|
||||||
|
|
||||||
// Parse data
|
|
||||||
if (html.isSuccess()) {
|
|
||||||
const data = parseLatestPlatformHTML(html.value);
|
|
||||||
|
|
||||||
// Assign data
|
// Parse data
|
||||||
assignLatestPlatformData(data);
|
if (html.isSuccess()) {
|
||||||
|
const data = parseLatestPlatformHTML(html.value);
|
||||||
|
|
||||||
// Cache data
|
// Assign data
|
||||||
saveCache(shared.cachePath);
|
assignLatestPlatformData(data);
|
||||||
} else throw html.value;
|
|
||||||
}
|
// Cache data
|
||||||
|
saveCache(shared.cachePath);
|
||||||
|
} else throw html.value;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
//#endregion Public methods
|
//#endregion Public methods
|
||||||
|
|
||||||
|
@ -72,21 +72,21 @@ export default async function fetchPlatformData(): Promise<void> {
|
||||||
* Read the platform cache (if available)
|
* Read the platform cache (if available)
|
||||||
*/
|
*/
|
||||||
function readCache(path: string) {
|
function readCache(path: string) {
|
||||||
// Local variables
|
// Local variables
|
||||||
let returnValue = false;
|
let returnValue = false;
|
||||||
|
|
||||||
if (existsSync(path)) {
|
if (existsSync(path)) {
|
||||||
const data = readFileSync(path, {encoding: "utf-8", flag: "r"});
|
const data = readFileSync(path, { encoding: "utf-8", flag: "r" });
|
||||||
const json: { [s: string]: TPrefixDict } = JSON.parse(data);
|
const json: { [s: string]: TPrefixDict } = JSON.parse(data);
|
||||||
|
|
||||||
shared.setPrefixPair("engines", json.engines);
|
shared.setPrefixPair("engines", json.engines);
|
||||||
shared.setPrefixPair("statuses", json.statuses);
|
shared.setPrefixPair("statuses", json.statuses);
|
||||||
shared.setPrefixPair("tags", json.tags);
|
shared.setPrefixPair("tags", json.tags);
|
||||||
shared.setPrefixPair("others", json.others);
|
shared.setPrefixPair("others", json.others);
|
||||||
|
|
||||||
returnValue = true;
|
returnValue = true;
|
||||||
}
|
}
|
||||||
return returnValue;
|
return returnValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -94,14 +94,14 @@ function readCache(path: string) {
|
||||||
* Save the current platform variables to disk.
|
* Save the current platform variables to disk.
|
||||||
*/
|
*/
|
||||||
function saveCache(path: string): void {
|
function saveCache(path: string): void {
|
||||||
const saveDict = {
|
const saveDict = {
|
||||||
engines: shared.prefixes["engines"],
|
engines: shared.prefixes["engines"],
|
||||||
statuses: shared.prefixes["statuses"],
|
statuses: shared.prefixes["statuses"],
|
||||||
tags: shared.prefixes["tags"],
|
tags: shared.prefixes["tags"],
|
||||||
others: shared.prefixes["others"],
|
others: shared.prefixes["others"]
|
||||||
};
|
};
|
||||||
const json = JSON.stringify(saveDict);
|
const json = JSON.stringify(saveDict);
|
||||||
writeFileSync(path, json);
|
writeFileSync(path, json);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -109,15 +109,15 @@ function saveCache(path: string): void {
|
||||||
* Given the HTML code of the response from the F95Zone,
|
* Given the HTML code of the response from the F95Zone,
|
||||||
* parse it and return the result.
|
* parse it and return the result.
|
||||||
*/
|
*/
|
||||||
function parseLatestPlatformHTML(html: string): ILatestResource{
|
function parseLatestPlatformHTML(html: string): ILatestResource {
|
||||||
const $ = cheerio.load(html);
|
const $ = cheerio.load(html);
|
||||||
|
|
||||||
// Clean the JSON string
|
// Clean the JSON string
|
||||||
const unparsedText = $(f95selector.LU_TAGS_SCRIPT).html().trim();
|
const unparsedText = $(f95selector.LU_TAGS_SCRIPT).html().trim();
|
||||||
const startIndex = unparsedText.indexOf("{");
|
const startIndex = unparsedText.indexOf("{");
|
||||||
const endIndex = unparsedText.lastIndexOf("}");
|
const endIndex = unparsedText.lastIndexOf("}");
|
||||||
const parsedText = unparsedText.substring(startIndex, endIndex + 1);
|
const parsedText = unparsedText.substring(startIndex, endIndex + 1);
|
||||||
return JSON.parse(parsedText);
|
return JSON.parse(parsedText);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -125,28 +125,28 @@ function parseLatestPlatformHTML(html: string): ILatestResource{
|
||||||
* Assign to the local variables the values from the F95Zone.
|
* Assign to the local variables the values from the F95Zone.
|
||||||
*/
|
*/
|
||||||
function assignLatestPlatformData(data: ILatestResource): void {
|
function assignLatestPlatformData(data: ILatestResource): void {
|
||||||
// Local variables
|
// Local variables
|
||||||
const scrapedData = {};
|
const scrapedData = {};
|
||||||
|
|
||||||
// Parse and assign the values that are NOT tags
|
// Parse and assign the values that are NOT tags
|
||||||
for (const [key, value] of Object.entries(data.prefixes)) {
|
for (const [key, value] of Object.entries(data.prefixes)) {
|
||||||
for (const res of value) {
|
for (const res of value) {
|
||||||
// Prepare the dict
|
// Prepare the dict
|
||||||
const dict: TPrefixDict = {};
|
const dict: TPrefixDict = {};
|
||||||
|
|
||||||
for (const e of res.prefixes) {
|
for (const e of res.prefixes) {
|
||||||
dict[e.id] = e.name.replace("'", "'");
|
dict[e.id] = e.name.replace("'", "'");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the property
|
// Save the property
|
||||||
scrapedData[res.name] = dict;
|
scrapedData[res.name] = dict;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Save the values
|
// Save the values
|
||||||
shared.setPrefixPair("engines", Object.assign({}, scrapedData["Engine"]));
|
shared.setPrefixPair("engines", Object.assign({}, scrapedData["Engine"]));
|
||||||
shared.setPrefixPair("statuses", Object.assign({}, scrapedData["Status"]));
|
shared.setPrefixPair("statuses", Object.assign({}, scrapedData["Status"]));
|
||||||
shared.setPrefixPair("others", Object.assign({}, scrapedData["Other"]));
|
shared.setPrefixPair("others", Object.assign({}, scrapedData["Other"]));
|
||||||
shared.setPrefixPair("tags", data.tags);
|
shared.setPrefixPair("tags", data.tags);
|
||||||
}
|
}
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|
|
@ -16,17 +16,20 @@ import { IQuery } from "../interfaces.js";
|
||||||
* @param limit Maximum number of items to get. Default: 30
|
* @param limit Maximum number of items to get. Default: 30
|
||||||
* @returns URLs of the fetched games
|
* @returns URLs of the fetched games
|
||||||
*/
|
*/
|
||||||
export default async function getURLsFromQuery(query: IQuery, limit: number = 30): Promise<string[]> {
|
export default async function getURLsFromQuery(
|
||||||
switch (query.itype) {
|
query: IQuery,
|
||||||
case "HandiworkSearchQuery":
|
limit = 30
|
||||||
return fetchHandiworkURLs(query as HandiworkSearchQuery, limit);
|
): Promise<string[]> {
|
||||||
case "LatestSearchQuery":
|
switch (query.itype) {
|
||||||
return fetchLatestHandiworkURLs(query as LatestSearchQuery, limit);
|
case "HandiworkSearchQuery":
|
||||||
case "ThreadSearchQuery":
|
return fetchHandiworkURLs(query as HandiworkSearchQuery, limit);
|
||||||
return fetchThreadHandiworkURLs(query as ThreadSearchQuery, limit);
|
case "LatestSearchQuery":
|
||||||
default:
|
return fetchLatestHandiworkURLs(query as LatestSearchQuery, limit);
|
||||||
throw Error(`Invalid query type: ${query.itype}`);
|
case "ThreadSearchQuery":
|
||||||
}
|
return fetchThreadHandiworkURLs(query as ThreadSearchQuery, limit);
|
||||||
|
default:
|
||||||
|
throw Error(`Invalid query type: ${query.itype}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|
|
@ -21,13 +21,17 @@ import ThreadSearchQuery from "../classes/query/thread-search-query.js";
|
||||||
* Maximum number of items to get. Default: 30
|
* Maximum number of items to get. Default: 30
|
||||||
* @returns {Promise<String[]>} URLs of the handiworks
|
* @returns {Promise<String[]>} URLs of the handiworks
|
||||||
*/
|
*/
|
||||||
export default async function fetchThreadHandiworkURLs(query: ThreadSearchQuery, limit: number = 30): Promise<string[]> {
|
export default async function fetchThreadHandiworkURLs(
|
||||||
// Execute the query
|
query: ThreadSearchQuery,
|
||||||
const response = await query.execute();
|
limit = 30
|
||||||
|
): Promise<string[]> {
|
||||||
|
// Execute the query
|
||||||
|
const response = await query.execute();
|
||||||
|
|
||||||
// Fetch the results from F95 and return the handiwork urls
|
// Fetch the results from F95 and return the handiwork urls
|
||||||
if (response.isSuccess()) return fetchResultURLs(response.value.data as string, limit);
|
if (response.isSuccess())
|
||||||
else throw response.value
|
return fetchResultURLs(response.value.data as string, limit);
|
||||||
|
else throw response.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Public methods
|
//#endregion Public methods
|
||||||
|
@ -39,20 +43,23 @@ export default async function fetchThreadHandiworkURLs(query: ThreadSearchQuery,
|
||||||
* @param {number} limit
|
* @param {number} limit
|
||||||
* Maximum number of items to get. Default: 30
|
* Maximum number of items to get. Default: 30
|
||||||
*/
|
*/
|
||||||
async function fetchResultURLs(html: string, limit: number = 30): Promise<string[]> {
|
async function fetchResultURLs(html: string, limit = 30): Promise<string[]> {
|
||||||
// Prepare cheerio
|
// Prepare cheerio
|
||||||
const $ = cheerio.load(html);
|
const $ = cheerio.load(html);
|
||||||
|
|
||||||
// Here we get all the DIV that are the body of the various query results
|
// Here we get all the DIV that are the body of the various query results
|
||||||
const results = $("body").find(f95Selector.GS_RESULT_BODY);
|
const results = $("body").find(f95Selector.GS_RESULT_BODY);
|
||||||
|
|
||||||
// Than we extract the URLs
|
// Than we extract the URLs
|
||||||
const urls = results.slice(0, limit).map((idx, el) => {
|
const urls = results
|
||||||
const elementSelector = $(el);
|
.slice(0, limit)
|
||||||
return extractLinkFromResult(elementSelector);
|
.map((idx, el) => {
|
||||||
}).get();
|
const elementSelector = $(el);
|
||||||
|
return extractLinkFromResult(elementSelector);
|
||||||
|
})
|
||||||
|
.get();
|
||||||
|
|
||||||
return urls;
|
return urls;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -61,15 +68,15 @@ async function fetchResultURLs(html: string, limit: number = 30): Promise<string
|
||||||
* @returns {String} URL to thread
|
* @returns {String} URL to thread
|
||||||
*/
|
*/
|
||||||
function extractLinkFromResult(selector: cheerio.Cheerio): string {
|
function extractLinkFromResult(selector: cheerio.Cheerio): string {
|
||||||
shared.logger.trace("Extracting thread link from result...");
|
shared.logger.trace("Extracting thread link from result...");
|
||||||
|
|
||||||
const partialLink = selector
|
const partialLink = selector
|
||||||
.find(f95Selector.GS_RESULT_THREAD_TITLE)
|
.find(f95Selector.GS_RESULT_THREAD_TITLE)
|
||||||
.attr("href")
|
.attr("href")
|
||||||
.trim();
|
.trim();
|
||||||
|
|
||||||
// Compose and return the URL
|
// Compose and return the URL
|
||||||
return new URL(partialLink, f95urls.F95_BASE_URL).toString();
|
return new URL(partialLink, f95urls.F95_BASE_URL).toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Private methods
|
//#endregion Private methods
|
||||||
|
|
|
@ -2,52 +2,66 @@
|
||||||
* Data relating to an external platform (i.e. Patreon).
|
* Data relating to an external platform (i.e. Patreon).
|
||||||
*/
|
*/
|
||||||
export type TExternalPlatform = {
|
export type TExternalPlatform = {
|
||||||
/**
|
/**
|
||||||
* name of the platform.
|
* name of the platform.
|
||||||
*/
|
*/
|
||||||
name: string,
|
name: string;
|
||||||
/**
|
/**
|
||||||
* link to the platform.
|
* link to the platform.
|
||||||
*/
|
*/
|
||||||
link: string
|
link: string;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Information about the author of a work.
|
* Information about the author of a work.
|
||||||
*/
|
*/
|
||||||
export type TAuthor = {
|
export type TAuthor = {
|
||||||
/**
|
/**
|
||||||
* Plain name or username of the author.
|
* Plain name or username of the author.
|
||||||
*/
|
*/
|
||||||
name: string,
|
name: string;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
platforms: TExternalPlatform[],
|
platforms: TExternalPlatform[];
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Information on the evaluation of a work.
|
* Information on the evaluation of a work.
|
||||||
*/
|
*/
|
||||||
export type TRating = {
|
export type TRating = {
|
||||||
/**
|
/**
|
||||||
* average value of evaluations.
|
* average value of evaluations.
|
||||||
*/
|
*/
|
||||||
average: number,
|
average: number;
|
||||||
/**
|
/**
|
||||||
* Best rating received.
|
* Best rating received.
|
||||||
*/
|
*/
|
||||||
best: number,
|
best: number;
|
||||||
/**
|
/**
|
||||||
* Number of ratings made by users.
|
* Number of ratings made by users.
|
||||||
*/
|
*/
|
||||||
count: number,
|
count: number;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List of possible graphics engines used for game development.
|
* List of possible graphics engines used for game development.
|
||||||
*/
|
*/
|
||||||
export type TEngine = "QSP" | "RPGM" | "Unity" | "HTML" | "RAGS" | "Java" | "Ren'Py" | "Flash" | "ADRIFT" | "Others" | "Tads" | "Wolf RPG" | "Unreal Engine" | "WebGL";
|
export type TEngine =
|
||||||
|
| "QSP"
|
||||||
|
| "RPGM"
|
||||||
|
| "Unity"
|
||||||
|
| "HTML"
|
||||||
|
| "RAGS"
|
||||||
|
| "Java"
|
||||||
|
| "Ren'Py"
|
||||||
|
| "Flash"
|
||||||
|
| "ADRIFT"
|
||||||
|
| "Others"
|
||||||
|
| "Tads"
|
||||||
|
| "Wolf RPG"
|
||||||
|
| "Unreal Engine"
|
||||||
|
| "WebGL";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List of possible progress states associated with a game.
|
* List of possible progress states associated with a game.
|
||||||
|
@ -62,235 +76,238 @@ export type TCategory = "games" | "mods" | "comics" | "animations" | "assets";
|
||||||
/**
|
/**
|
||||||
* Valid names of classes that implement the IQuery interface.
|
* Valid names of classes that implement the IQuery interface.
|
||||||
*/
|
*/
|
||||||
export type TQueryInterface = "LatestSearchQuery" | "ThreadSearchQuery" | "HandiworkSearchQuery";
|
export type TQueryInterface =
|
||||||
|
| "LatestSearchQuery"
|
||||||
|
| "ThreadSearchQuery"
|
||||||
|
| "HandiworkSearchQuery";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collection of values defined for each
|
* Collection of values defined for each
|
||||||
* handiwork on the F95Zone platform.
|
* handiwork on the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export interface IBasic {
|
export interface IBasic {
|
||||||
/**
|
/**
|
||||||
* Authors of the work.
|
* Authors of the work.
|
||||||
*/
|
*/
|
||||||
authors: TAuthor[],
|
authors: TAuthor[];
|
||||||
/**
|
/**
|
||||||
* Category of the work..
|
* Category of the work..
|
||||||
*/
|
*/
|
||||||
category: TCategory,
|
category: TCategory;
|
||||||
/**
|
/**
|
||||||
* List of changes of the work for each version.
|
* List of changes of the work for each version.
|
||||||
*/
|
*/
|
||||||
changelog: string[],
|
changelog: string[];
|
||||||
/**
|
/**
|
||||||
* link to the cover image of the work.
|
* link to the cover image of the work.
|
||||||
*/
|
*/
|
||||||
cover: string,
|
cover: string;
|
||||||
/**
|
/**
|
||||||
* Unique ID of the work on the platform.
|
* Unique ID of the work on the platform.
|
||||||
*/
|
*/
|
||||||
id: number,
|
id: number;
|
||||||
/**
|
/**
|
||||||
* Last update of the opera thread.
|
* Last update of the opera thread.
|
||||||
*/
|
*/
|
||||||
lastThreadUpdate: Date,
|
lastThreadUpdate: Date;
|
||||||
/**
|
/**
|
||||||
* Plain name of the work (without tags and/or prefixes)
|
* Plain name of the work (without tags and/or prefixes)
|
||||||
*/
|
*/
|
||||||
name: string,
|
name: string;
|
||||||
/**
|
/**
|
||||||
* Work description
|
* Work description
|
||||||
*/
|
*/
|
||||||
overview: string,
|
overview: string;
|
||||||
/**
|
/**
|
||||||
* List of prefixes associated with the work.
|
* List of prefixes associated with the work.
|
||||||
*/
|
*/
|
||||||
prefixes: string[],
|
prefixes: string[];
|
||||||
/**
|
/**
|
||||||
* Evaluation of the work by the users of the platform.
|
* Evaluation of the work by the users of the platform.
|
||||||
*/
|
*/
|
||||||
rating: TRating,
|
rating: TRating;
|
||||||
/**
|
/**
|
||||||
* List of tags associated with the work.
|
* List of tags associated with the work.
|
||||||
*/
|
*/
|
||||||
tags: string[],
|
tags: string[];
|
||||||
/**
|
/**
|
||||||
* Date of publication of the thread associated with the work.
|
* Date of publication of the thread associated with the work.
|
||||||
*/
|
*/
|
||||||
threadPublishingDate: Date,
|
threadPublishingDate: Date;
|
||||||
/**
|
/**
|
||||||
* URL to the work's official conversation on the F95Zone portal.
|
* URL to the work's official conversation on the F95Zone portal.
|
||||||
*/
|
*/
|
||||||
url: string,
|
url: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collection of values representing a game present on the F95Zone platform.
|
* Collection of values representing a game present on the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export interface IGame extends IBasic {
|
export interface IGame extends IBasic {
|
||||||
/**
|
/**
|
||||||
* Specify whether the work has censorship
|
* Specify whether the work has censorship
|
||||||
* measures regarding NSFW scenes
|
* measures regarding NSFW scenes
|
||||||
*/
|
*/
|
||||||
censored: boolean,
|
censored: boolean;
|
||||||
/**
|
/**
|
||||||
* Graphics engine used for game development.
|
* Graphics engine used for game development.
|
||||||
*/
|
*/
|
||||||
engine: TEngine,
|
engine: TEngine;
|
||||||
/**
|
/**
|
||||||
* List of genres associated with the work.
|
* List of genres associated with the work.
|
||||||
*/
|
*/
|
||||||
genre: string[],
|
genre: string[];
|
||||||
/**
|
/**
|
||||||
* Author's Guide to Installation.
|
* Author's Guide to Installation.
|
||||||
*/
|
*/
|
||||||
installation: string,
|
installation: string;
|
||||||
/**
|
/**
|
||||||
* List of available languages.
|
* List of available languages.
|
||||||
*/
|
*/
|
||||||
language: string[],
|
language: string[];
|
||||||
/**
|
/**
|
||||||
* Last time the work underwent updates.
|
* Last time the work underwent updates.
|
||||||
*/
|
*/
|
||||||
lastRelease: Date,
|
lastRelease: Date;
|
||||||
/**
|
/**
|
||||||
* Indicates that this item represents a mod.
|
* Indicates that this item represents a mod.
|
||||||
*/
|
*/
|
||||||
mod: boolean,
|
mod: boolean;
|
||||||
/**
|
/**
|
||||||
* List of OS for which the work is compatible.
|
* List of OS for which the work is compatible.
|
||||||
*/
|
*/
|
||||||
os: string[],
|
os: string[];
|
||||||
/**
|
/**
|
||||||
* Indicates the progress of a game.
|
* Indicates the progress of a game.
|
||||||
*/
|
*/
|
||||||
status: TStatus,
|
status: TStatus;
|
||||||
/**
|
/**
|
||||||
* Version of the work.
|
* Version of the work.
|
||||||
*/
|
*/
|
||||||
version: string,
|
version: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collection of values representing a comic present on the F95Zone platform.
|
* Collection of values representing a comic present on the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export interface IComic extends IBasic {
|
export interface IComic extends IBasic {
|
||||||
/**
|
/**
|
||||||
* List of genres associated with the work.
|
* List of genres associated with the work.
|
||||||
*/
|
*/
|
||||||
genre: string[],
|
genre: string[];
|
||||||
/**
|
/**
|
||||||
* Number of pages or elements that make up the work.
|
* Number of pages or elements that make up the work.
|
||||||
*/
|
*/
|
||||||
pages: string,
|
pages: string;
|
||||||
/**
|
/**
|
||||||
* List of resolutions available for the work.
|
* List of resolutions available for the work.
|
||||||
*/
|
*/
|
||||||
resolution: string[],
|
resolution: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collection of values representing an animation present on the F95Zone platform.
|
* Collection of values representing an animation present on the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export interface IAnimation extends IBasic {
|
export interface IAnimation extends IBasic {
|
||||||
/**
|
/**
|
||||||
* Specify whether the work has censorship
|
* Specify whether the work has censorship
|
||||||
* measures regarding NSFW scenes
|
* measures regarding NSFW scenes
|
||||||
*/
|
*/
|
||||||
censored: boolean,
|
censored: boolean;
|
||||||
/**
|
/**
|
||||||
* List of genres associated with the work.
|
* List of genres associated with the work.
|
||||||
*/
|
*/
|
||||||
genre: string[],
|
genre: string[];
|
||||||
/**
|
/**
|
||||||
* Author's Guide to Installation.
|
* Author's Guide to Installation.
|
||||||
*/
|
*/
|
||||||
installation: string,
|
installation: string;
|
||||||
/**
|
/**
|
||||||
* List of available languages.
|
* List of available languages.
|
||||||
*/
|
*/
|
||||||
language: string[],
|
language: string[];
|
||||||
/**
|
/**
|
||||||
* Length of the animation.
|
* Length of the animation.
|
||||||
*/
|
*/
|
||||||
lenght: string,
|
lenght: string;
|
||||||
/**
|
/**
|
||||||
* Number of pages or elements that make up the work.
|
* Number of pages or elements that make up the work.
|
||||||
*/
|
*/
|
||||||
pages: string,
|
pages: string;
|
||||||
/**
|
/**
|
||||||
* List of resolutions available for the work.
|
* List of resolutions available for the work.
|
||||||
*/
|
*/
|
||||||
resolution: string[],
|
resolution: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collection of values representing an asset present on the F95Zone platform.
|
* Collection of values representing an asset present on the F95Zone platform.
|
||||||
*/
|
*/
|
||||||
export interface IAsset extends IBasic {
|
export interface IAsset extends IBasic {
|
||||||
/**
|
/**
|
||||||
* External URL of the asset.
|
* External URL of the asset.
|
||||||
*/
|
*/
|
||||||
assetLink: string,
|
assetLink: string;
|
||||||
/**
|
/**
|
||||||
* List of URLs of assets associated with the work
|
* List of URLs of assets associated with the work
|
||||||
* (for example same collection).
|
* (for example same collection).
|
||||||
*/
|
*/
|
||||||
associatedAssets: string[],
|
associatedAssets: string[];
|
||||||
/**
|
/**
|
||||||
* Software compatible with the work.
|
* Software compatible with the work.
|
||||||
*/
|
*/
|
||||||
compatibleSoftware: string,
|
compatibleSoftware: string;
|
||||||
/**
|
/**
|
||||||
* List of assets url included in the work or used to develop it.
|
* List of assets url included in the work or used to develop it.
|
||||||
*/
|
*/
|
||||||
includedAssets: string[],
|
includedAssets: string[];
|
||||||
/**
|
/**
|
||||||
* List of official links of the work, external to the platform.
|
* List of official links of the work, external to the platform.
|
||||||
*/
|
*/
|
||||||
officialLinks: string[],
|
officialLinks: string[];
|
||||||
/**
|
/**
|
||||||
* Unique SKU value of the work.
|
* Unique SKU value of the work.
|
||||||
*/
|
*/
|
||||||
sku: string,
|
sku: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collection of values extrapolated from the
|
* Collection of values extrapolated from the
|
||||||
* F95 platform representing a particular work.
|
* F95 platform representing a particular work.
|
||||||
*/
|
*/
|
||||||
export interface IHandiwork extends IGame, IComic, IAnimation, IAsset { }
|
export interface IHandiwork extends IGame, IComic, IAnimation, IAsset {}
|
||||||
|
|
||||||
export interface IQuery {
|
export interface IQuery {
|
||||||
/**
|
/**
|
||||||
* Name of the implemented interface.
|
* Name of the implemented interface.
|
||||||
*/
|
*/
|
||||||
itype: TQueryInterface,
|
itype: TQueryInterface;
|
||||||
/**
|
/**
|
||||||
* Category of items to search among.
|
* Category of items to search among.
|
||||||
*/
|
*/
|
||||||
category: TCategory,
|
category: TCategory;
|
||||||
/**
|
/**
|
||||||
* Tags to be include in the search.
|
* Tags to be include in the search.
|
||||||
* Max. 5 tags
|
* Max. 5 tags
|
||||||
*/
|
*/
|
||||||
includedTags: string[],
|
includedTags: string[];
|
||||||
/**
|
/**
|
||||||
* Prefixes to include in the search.
|
* Prefixes to include in the search.
|
||||||
*/
|
*/
|
||||||
includedPrefixes: string[],
|
includedPrefixes: string[];
|
||||||
/**
|
/**
|
||||||
* Index of the page to be obtained.
|
* Index of the page to be obtained.
|
||||||
* Between 1 and infinity.
|
* Between 1 and infinity.
|
||||||
*/
|
*/
|
||||||
page: number,
|
page: number;
|
||||||
/**
|
/**
|
||||||
* Verify that the query values are valid.
|
* Verify that the query values are valid.
|
||||||
*/
|
*/
|
||||||
validate(): boolean,
|
validate(): boolean;
|
||||||
/**
|
/**
|
||||||
* Search with the data in the query and returns the result.
|
* Search with the data in the query and returns the result.
|
||||||
*
|
*
|
||||||
* If the query is invalid it throws an exception.
|
* If the query is invalid it throws an exception.
|
||||||
*/
|
*/
|
||||||
execute(): any,
|
execute(): any;
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,11 +12,16 @@ import { selectors as f95selector } from "./constants/css-selector.js";
|
||||||
import LoginResult from "./classes/login-result.js";
|
import LoginResult from "./classes/login-result.js";
|
||||||
import credentials from "./classes/credentials.js";
|
import credentials from "./classes/credentials.js";
|
||||||
import { failure, Result, success } from "./classes/result.js";
|
import { failure, Result, success } from "./classes/result.js";
|
||||||
import { GenericAxiosError, InvalidF95Token, UnexpectedResponseContentType } from "./classes/errors.js";
|
import {
|
||||||
|
GenericAxiosError,
|
||||||
|
InvalidF95Token,
|
||||||
|
UnexpectedResponseContentType
|
||||||
|
} from "./classes/errors.js";
|
||||||
|
|
||||||
// Global variables
|
// Global variables
|
||||||
const userAgent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15) " +
|
const userAgent =
|
||||||
"AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0 Safari/605.1.15";
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15) " +
|
||||||
|
"AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0 Safari/605.1.15";
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
axiosCookieJarSupport.default(axios);
|
axiosCookieJarSupport.default(axios);
|
||||||
|
|
||||||
|
@ -24,47 +29,49 @@ axiosCookieJarSupport.default(axios);
|
||||||
* Common configuration used to send request via Axios.
|
* Common configuration used to send request via Axios.
|
||||||
*/
|
*/
|
||||||
const commonConfig = {
|
const commonConfig = {
|
||||||
/**
|
/**
|
||||||
* Headers to add to the request.
|
* Headers to add to the request.
|
||||||
*/
|
*/
|
||||||
headers: {
|
headers: {
|
||||||
"User-Agent": userAgent,
|
"User-Agent": userAgent,
|
||||||
"Connection": "keep-alive"
|
Connection: "keep-alive"
|
||||||
},
|
},
|
||||||
/**
|
/**
|
||||||
* Specify if send credentials along the request.
|
* Specify if send credentials along the request.
|
||||||
*/
|
*/
|
||||||
withCredentials: true,
|
withCredentials: true,
|
||||||
/**
|
/**
|
||||||
* Jar of cookies to send along the request.
|
* Jar of cookies to send along the request.
|
||||||
*/
|
*/
|
||||||
jar: shared.session.cookieJar,
|
jar: shared.session.cookieJar,
|
||||||
validateStatus: function (status: number) {
|
validateStatus: function (status: number) {
|
||||||
return status < 500; // Resolve only if the status code is less than 500
|
return status < 500; // Resolve only if the status code is less than 500
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the HTML code of a page.
|
* Gets the HTML code of a page.
|
||||||
*/
|
*/
|
||||||
export async function fetchHTML(url: string): Promise<Result<GenericAxiosError | UnexpectedResponseContentType, string>> {
|
export async function fetchHTML(
|
||||||
// Fetch the response of the platform
|
url: string
|
||||||
const response = await fetchGETResponse(url);
|
): Promise<Result<GenericAxiosError | UnexpectedResponseContentType, string>> {
|
||||||
|
// Fetch the response of the platform
|
||||||
|
const response = await fetchGETResponse(url);
|
||||||
|
|
||||||
if (response.isSuccess()) {
|
if (response.isSuccess()) {
|
||||||
// Check if the response is a HTML source code
|
// Check if the response is a HTML source code
|
||||||
const isHTML = response.value.headers["content-type"].includes("text/html");
|
const isHTML = response.value.headers["content-type"].includes("text/html");
|
||||||
|
|
||||||
const unexpectedResponseError = new UnexpectedResponseContentType({
|
const unexpectedResponseError = new UnexpectedResponseContentType({
|
||||||
id: 2,
|
id: 2,
|
||||||
message: `Expected HTML but received ${response.value["content-type"]}`,
|
message: `Expected HTML but received ${response.value["content-type"]}`,
|
||||||
error: null
|
error: null
|
||||||
});
|
});
|
||||||
|
|
||||||
return isHTML ?
|
return isHTML
|
||||||
success(response.value.data as string) :
|
? success(response.value.data as string)
|
||||||
failure(unexpectedResponseError);
|
: failure(unexpectedResponseError);
|
||||||
} else return failure(response.value as GenericAxiosError);
|
} else return failure(response.value as GenericAxiosError);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -75,102 +82,118 @@ export async function fetchHTML(url: string): Promise<Result<GenericAxiosError |
|
||||||
* @param {Boolean} force Specifies whether the request should be forced, ignoring any saved cookies
|
* @param {Boolean} force Specifies whether the request should be forced, ignoring any saved cookies
|
||||||
* @returns {Promise<LoginResult>} Result of the operation
|
* @returns {Promise<LoginResult>} Result of the operation
|
||||||
*/
|
*/
|
||||||
export async function authenticate(credentials: credentials, force: boolean = false): Promise<LoginResult> {
|
export async function authenticate(
|
||||||
shared.logger.info(`Authenticating with user ${credentials.username}`);
|
credentials: credentials,
|
||||||
if (!credentials.token) throw new InvalidF95Token(`Invalid token for auth: ${credentials.token}`);
|
force = false
|
||||||
|
): Promise<LoginResult> {
|
||||||
|
shared.logger.info(`Authenticating with user ${credentials.username}`);
|
||||||
|
if (!credentials.token)
|
||||||
|
throw new InvalidF95Token(`Invalid token for auth: ${credentials.token}`);
|
||||||
|
|
||||||
// Secure the URL
|
// Secure the URL
|
||||||
const secureURL = enforceHttpsUrl(f95url.F95_LOGIN_URL);
|
const secureURL = enforceHttpsUrl(f95url.F95_LOGIN_URL);
|
||||||
|
|
||||||
// Prepare the parameters to send to the platform to authenticate
|
// Prepare the parameters to send to the platform to authenticate
|
||||||
const params = {
|
const params = {
|
||||||
"login": credentials.username,
|
login: credentials.username,
|
||||||
"url": "",
|
url: "",
|
||||||
"password": credentials.password,
|
password: credentials.password,
|
||||||
"password_confirm": "",
|
password_confirm: "",
|
||||||
"additional_security": "",
|
additional_security: "",
|
||||||
"remember": "1",
|
remember: "1",
|
||||||
"_xfRedirect": "https://f95zone.to/",
|
_xfRedirect: "https://f95zone.to/",
|
||||||
"website_code": "",
|
website_code: "",
|
||||||
"_xfToken": credentials.token,
|
_xfToken: credentials.token
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Try to log-in
|
// Try to log-in
|
||||||
const response = await fetchPOSTResponse(f95url.F95_LOGIN_URL, params, force);
|
const response = await fetchPOSTResponse(
|
||||||
|
f95url.F95_LOGIN_URL,
|
||||||
|
params,
|
||||||
|
force
|
||||||
|
);
|
||||||
|
|
||||||
if (response.isSuccess()) {
|
if (response.isSuccess()) {
|
||||||
// Parse the response HTML
|
// Parse the response HTML
|
||||||
const $ = cheerio.load(response.value.data as string);
|
const $ = cheerio.load(response.value.data as string);
|
||||||
|
|
||||||
// Get the error message (if any) and remove the new line chars
|
// Get the error message (if any) and remove the new line chars
|
||||||
const errorMessage = $("body").find(f95selector.LOGIN_MESSAGE_ERROR).text().replace(/\n/g, "");
|
const errorMessage = $("body")
|
||||||
|
.find(f95selector.LOGIN_MESSAGE_ERROR)
|
||||||
|
.text()
|
||||||
|
.replace(/\n/g, "");
|
||||||
|
|
||||||
// Return the result of the authentication
|
// Return the result of the authentication
|
||||||
const result = errorMessage.trim() === "";
|
const result = errorMessage.trim() === "";
|
||||||
const message = result ? "Authentication successful" : errorMessage;
|
const message = result ? "Authentication successful" : errorMessage;
|
||||||
return new LoginResult(result, message);
|
return new LoginResult(result, message);
|
||||||
}
|
} else throw response.value;
|
||||||
else throw response.value;
|
} catch (e) {
|
||||||
} catch (e) {
|
shared.logger.error(
|
||||||
shared.logger.error(`Error ${e.message} occurred while authenticating to ${secureURL}`);
|
`Error ${e.message} occurred while authenticating to ${secureURL}`
|
||||||
return new LoginResult(false, `Error ${e.message} while authenticating`);
|
);
|
||||||
}
|
return new LoginResult(false, `Error ${e.message} while authenticating`);
|
||||||
};
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Obtain the token used to authenticate the user to the platform.
|
* Obtain the token used to authenticate the user to the platform.
|
||||||
*/
|
*/
|
||||||
export async function getF95Token() {
|
export async function getF95Token() {
|
||||||
// Fetch the response of the platform
|
// Fetch the response of the platform
|
||||||
const response = await fetchGETResponse(f95url.F95_LOGIN_URL);
|
const response = await fetchGETResponse(f95url.F95_LOGIN_URL);
|
||||||
|
|
||||||
if (response.isSuccess()) {
|
if (response.isSuccess()) {
|
||||||
// The response is a HTML page, we need to find the <input> with name "_xfToken"
|
// The response is a HTML page, we need to find the <input> with name "_xfToken"
|
||||||
const $ = cheerio.load(response.value.data as string);
|
const $ = cheerio.load(response.value.data as string);
|
||||||
return $("body").find(f95selector.GET_REQUEST_TOKEN).attr("value");
|
return $("body").find(f95selector.GET_REQUEST_TOKEN).attr("value");
|
||||||
} else throw response.value;
|
} else throw response.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
//#region Utility methods
|
//#region Utility methods
|
||||||
/**
|
/**
|
||||||
* Performs a GET request to a specific URL and returns the response.
|
* Performs a GET request to a specific URL and returns the response.
|
||||||
*/
|
*/
|
||||||
export async function fetchGETResponse(url: string): Promise<Result<GenericAxiosError, AxiosResponse<any>>> {
|
export async function fetchGETResponse(
|
||||||
// Secure the URL
|
url: string
|
||||||
const secureURL = enforceHttpsUrl(url);
|
): Promise<Result<GenericAxiosError, AxiosResponse<any>>> {
|
||||||
|
// Secure the URL
|
||||||
|
const secureURL = enforceHttpsUrl(url);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Fetch and return the response
|
// Fetch and return the response
|
||||||
commonConfig.jar = shared.session.cookieJar;
|
commonConfig.jar = shared.session.cookieJar;
|
||||||
const response = await axios.get(secureURL, commonConfig);
|
const response = await axios.get(secureURL, commonConfig);
|
||||||
return success(response);
|
return success(response);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e.response);
|
console.log(e.response);
|
||||||
shared.logger.error(`(GET) Error ${e.message} occurred while trying to fetch ${secureURL}`);
|
shared.logger.error(
|
||||||
const genericError = new GenericAxiosError({
|
`(GET) Error ${e.message} occurred while trying to fetch ${secureURL}`
|
||||||
id: 1,
|
);
|
||||||
message:`(GET) Error ${e.message} occurred while trying to fetch ${secureURL}`,
|
const genericError = new GenericAxiosError({
|
||||||
error: e
|
id: 1,
|
||||||
});
|
message: `(GET) Error ${e.message} occurred while trying to fetch ${secureURL}`,
|
||||||
return failure(genericError);
|
error: e
|
||||||
}
|
});
|
||||||
|
return failure(genericError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enforces the scheme of the URL is https and returns the new URL.
|
* Enforces the scheme of the URL is https and returns the new URL.
|
||||||
*/
|
*/
|
||||||
export function enforceHttpsUrl(url: string): string {
|
export function enforceHttpsUrl(url: string): string {
|
||||||
if (isStringAValidURL(url)) return url.replace(/^(https?:)?\/\//, "https://");
|
if (isStringAValidURL(url)) return url.replace(/^(https?:)?\/\//, "https://");
|
||||||
else throw new Error(`${url} is not a valid URL`);
|
else throw new Error(`${url} is not a valid URL`);
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the url belongs to the domain of the F95 platform.
|
* Check if the url belongs to the domain of the F95 platform.
|
||||||
*/
|
*/
|
||||||
export function isF95URL(url: string): boolean {
|
export function isF95URL(url: string): boolean {
|
||||||
return url.toString().startsWith(f95url.F95_BASE_URL);
|
return url.toString().startsWith(f95url.F95_BASE_URL);
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if the string passed by parameter has a
|
* Checks if the string passed by parameter has a
|
||||||
|
@ -178,11 +201,11 @@ export function isF95URL(url: string): boolean {
|
||||||
* @param {String} url String to check for correctness
|
* @param {String} url String to check for correctness
|
||||||
*/
|
*/
|
||||||
export function isStringAValidURL(url: string): boolean {
|
export function isStringAValidURL(url: string): boolean {
|
||||||
// Many thanks to Daveo at StackOverflow (https://preview.tinyurl.com/y2f2e2pc)
|
// Many thanks to Daveo at StackOverflow (https://preview.tinyurl.com/y2f2e2pc)
|
||||||
const expression = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/;
|
const expression = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/;
|
||||||
const regex = new RegExp(expression);
|
const regex = new RegExp(expression);
|
||||||
return url.match(regex).length > 0;
|
return url.match(regex).length > 0;
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if a particular URL is valid and reachable on the web.
|
* Check if a particular URL is valid and reachable on the web.
|
||||||
|
@ -192,20 +215,23 @@ export function isStringAValidURL(url: string): boolean {
|
||||||
* Default: false
|
* Default: false
|
||||||
* @returns {Promise<Boolean>} true if the URL exists, false otherwise
|
* @returns {Promise<Boolean>} true if the URL exists, false otherwise
|
||||||
*/
|
*/
|
||||||
export async function urlExists(url: string, checkRedirect: boolean = false): Promise<boolean> {
|
export async function urlExists(
|
||||||
// Local variables
|
url: string,
|
||||||
let valid = false;
|
checkRedirect = false
|
||||||
|
): Promise<boolean> {
|
||||||
|
// Local variables
|
||||||
|
let valid = false;
|
||||||
|
|
||||||
if (isStringAValidURL(url)) {
|
if (isStringAValidURL(url)) {
|
||||||
valid = await axiosUrlExists(url);
|
valid = await axiosUrlExists(url);
|
||||||
|
|
||||||
if (valid && checkRedirect) {
|
if (valid && checkRedirect) {
|
||||||
const redirectUrl = await getUrlRedirect(url);
|
const redirectUrl = await getUrlRedirect(url);
|
||||||
valid = redirectUrl === url;
|
valid = redirectUrl === url;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return valid;
|
return valid;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -214,8 +240,8 @@ export async function urlExists(url: string, checkRedirect: boolean = false): Pr
|
||||||
* @returns {Promise<String>} Redirect URL or the passed URL
|
* @returns {Promise<String>} Redirect URL or the passed URL
|
||||||
*/
|
*/
|
||||||
export async function getUrlRedirect(url: string): Promise<string> {
|
export async function getUrlRedirect(url: string): Promise<string> {
|
||||||
const response = await axios.head(url);
|
const response = await axios.head(url);
|
||||||
return response.config.url;
|
return response.config.url;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -224,34 +250,41 @@ export async function getUrlRedirect(url: string): Promise<string> {
|
||||||
* @param params List of value pairs to send with the request
|
* @param params List of value pairs to send with the request
|
||||||
* @param force If `true`, the request ignores the sending of cookies already present on the device.
|
* @param force If `true`, the request ignores the sending of cookies already present on the device.
|
||||||
*/
|
*/
|
||||||
export async function fetchPOSTResponse(url: string, params: { [s: string]: string }, force: boolean = false): Promise<Result<GenericAxiosError, AxiosResponse<any>>> {
|
export async function fetchPOSTResponse(
|
||||||
// Secure the URL
|
url: string,
|
||||||
const secureURL = enforceHttpsUrl(url);
|
params: { [s: string]: string },
|
||||||
|
force = false
|
||||||
// Prepare the parameters for the POST request
|
): Promise<Result<GenericAxiosError, AxiosResponse<any>>> {
|
||||||
const urlParams = new URLSearchParams();
|
// Secure the URL
|
||||||
for (const [key, value] of Object.entries(params)) urlParams.append(key, value);
|
const secureURL = enforceHttpsUrl(url);
|
||||||
|
|
||||||
// Shallow copy of the common configuration object
|
// Prepare the parameters for the POST request
|
||||||
commonConfig.jar = shared.session.cookieJar;
|
const urlParams = new URLSearchParams();
|
||||||
const config = Object.assign({}, commonConfig);
|
for (const [key, value] of Object.entries(params))
|
||||||
|
urlParams.append(key, value);
|
||||||
|
|
||||||
// Remove the cookies if forced
|
// Shallow copy of the common configuration object
|
||||||
if (force) delete config.jar;
|
commonConfig.jar = shared.session.cookieJar;
|
||||||
|
const config = Object.assign({}, commonConfig);
|
||||||
|
|
||||||
// Send the POST request and await the response
|
// Remove the cookies if forced
|
||||||
try {
|
if (force) delete config.jar;
|
||||||
const response = await axios.post(secureURL, urlParams, config);
|
|
||||||
return success(response);
|
// Send the POST request and await the response
|
||||||
} catch (e) {
|
try {
|
||||||
shared.logger.error(`(POST) Error ${e.message} occurred while trying to fetch ${secureURL}`);
|
const response = await axios.post(secureURL, urlParams, config);
|
||||||
const genericError = new GenericAxiosError({
|
return success(response);
|
||||||
id: 3,
|
} catch (e) {
|
||||||
message: `(POST) Error ${e.message} occurred while trying to fetch ${secureURL}`,
|
shared.logger.error(
|
||||||
error: e
|
`(POST) Error ${e.message} occurred while trying to fetch ${secureURL}`
|
||||||
});
|
);
|
||||||
return failure(genericError);
|
const genericError = new GenericAxiosError({
|
||||||
}
|
id: 3,
|
||||||
|
message: `(POST) Error ${e.message} occurred while trying to fetch ${secureURL}`,
|
||||||
|
error: e
|
||||||
|
});
|
||||||
|
return failure(genericError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Utility methods
|
//#endregion Utility methods
|
||||||
|
@ -261,20 +294,20 @@ export async function fetchPOSTResponse(url: string, params: { [s: string]: stri
|
||||||
* Check with Axios if a URL exists.
|
* Check with Axios if a URL exists.
|
||||||
*/
|
*/
|
||||||
async function axiosUrlExists(url: string): Promise<boolean> {
|
async function axiosUrlExists(url: string): Promise<boolean> {
|
||||||
// Local variables
|
// Local variables
|
||||||
const ERROR_CODES = ["ENOTFOUND", "ETIMEDOUT"];
|
const ERROR_CODES = ["ENOTFOUND", "ETIMEDOUT"];
|
||||||
let valid = false;
|
let valid = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.head(url, {
|
const response = await axios.head(url, {
|
||||||
timeout: 3000
|
timeout: 3000
|
||||||
});
|
});
|
||||||
valid = response && !/4\d\d/.test(response.status.toString());
|
valid = response && !/4\d\d/.test(response.status.toString());
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Throw error only if the error is unknown
|
// Throw error only if the error is unknown
|
||||||
if (!ERROR_CODES.includes(error.code)) throw error;
|
if (!ERROR_CODES.includes(error.code)) throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
return valid;
|
return valid;
|
||||||
}
|
}
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|
|
@ -6,13 +6,23 @@ import luxon from "luxon";
|
||||||
// Modules from files
|
// Modules from files
|
||||||
import HandiWork from "../classes/handiwork/handiwork.js";
|
import HandiWork from "../classes/handiwork/handiwork.js";
|
||||||
import Thread from "../classes/mapping/thread.js";
|
import Thread from "../classes/mapping/thread.js";
|
||||||
import { IBasic, TAuthor, TEngine, TExternalPlatform, TStatus } from "../interfaces.js";
|
import {
|
||||||
|
IBasic,
|
||||||
|
TAuthor,
|
||||||
|
TEngine,
|
||||||
|
TExternalPlatform,
|
||||||
|
TStatus
|
||||||
|
} from "../interfaces.js";
|
||||||
import shared, { TPrefixDict } from "../shared.js";
|
import shared, { TPrefixDict } from "../shared.js";
|
||||||
import { ILink, IPostElement } from "./post-parse.js";
|
import { ILink, IPostElement } from "./post-parse.js";
|
||||||
|
|
||||||
export async function getHandiworkInformation<T extends IBasic>(url: string): Promise<T>
|
export async function getHandiworkInformation<T extends IBasic>(
|
||||||
|
url: string
|
||||||
|
): Promise<T>;
|
||||||
|
|
||||||
export async function getHandiworkInformation<T extends IBasic>(url: string): Promise<T>
|
export async function getHandiworkInformation<T extends IBasic>(
|
||||||
|
url: string
|
||||||
|
): Promise<T>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets information of a particular handiwork from its thread.
|
* Gets information of a particular handiwork from its thread.
|
||||||
|
@ -21,36 +31,38 @@ export async function getHandiworkInformation<T extends IBasic>(url: string): Pr
|
||||||
*
|
*
|
||||||
* @todo It does not currently support assets.
|
* @todo It does not currently support assets.
|
||||||
*/
|
*/
|
||||||
export default async function getHandiworkInformation<T extends IBasic>(arg: string | Thread): Promise<T> {
|
export default async function getHandiworkInformation<T extends IBasic>(
|
||||||
// Local variables
|
arg: string | Thread
|
||||||
let thread: Thread = null;
|
): Promise<T> {
|
||||||
|
// Local variables
|
||||||
|
let thread: Thread = null;
|
||||||
|
|
||||||
if (typeof arg === "string") {
|
if (typeof arg === "string") {
|
||||||
// Fetch thread data
|
// Fetch thread data
|
||||||
const id = extractIDFromURL(arg);
|
const id = extractIDFromURL(arg);
|
||||||
thread = new Thread(id);
|
thread = new Thread(id);
|
||||||
await thread.fetch();
|
await thread.fetch();
|
||||||
} else thread = arg;
|
} else thread = arg;
|
||||||
|
|
||||||
shared.logger.info(`Obtaining handiwork from ${thread.url}`);
|
shared.logger.info(`Obtaining handiwork from ${thread.url}`);
|
||||||
|
|
||||||
// Convert the info from thread to handiwork
|
// Convert the info from thread to handiwork
|
||||||
const hw: HandiWork = {} as HandiWork;
|
const hw: HandiWork = {} as HandiWork;
|
||||||
hw.id = thread.id;
|
hw.id = thread.id;
|
||||||
hw.url = thread.url;
|
hw.url = thread.url;
|
||||||
hw.name = thread.title;
|
hw.name = thread.title;
|
||||||
hw.category = thread.category;
|
hw.category = thread.category;
|
||||||
hw.threadPublishingDate = thread.publication;
|
hw.threadPublishingDate = thread.publication;
|
||||||
hw.lastThreadUpdate = thread.modified;
|
hw.lastThreadUpdate = thread.modified;
|
||||||
hw.tags = thread.tags;
|
hw.tags = thread.tags;
|
||||||
hw.rating = thread.rating;
|
hw.rating = thread.rating;
|
||||||
fillWithPrefixes(hw, thread.prefixes);
|
fillWithPrefixes(hw, thread.prefixes);
|
||||||
|
|
||||||
// Fetch info from first post
|
// Fetch info from first post
|
||||||
const post = await thread.getPost(1);
|
const post = await thread.getPost(1);
|
||||||
fillWithPostData(hw, post.body);
|
fillWithPostData(hw, post.body);
|
||||||
|
|
||||||
return <T><unknown>hw;
|
return <T>(<unknown>hw);
|
||||||
}
|
}
|
||||||
|
|
||||||
//#region Private methods
|
//#region Private methods
|
||||||
|
@ -61,28 +73,28 @@ export default async function getHandiworkInformation<T extends IBasic>(arg: str
|
||||||
* Extracts the work's unique ID from its URL.
|
* Extracts the work's unique ID from its URL.
|
||||||
*/
|
*/
|
||||||
function extractIDFromURL(url: string): number {
|
function extractIDFromURL(url: string): number {
|
||||||
shared.logger.trace("Extracting ID from URL...");
|
shared.logger.trace("Extracting ID from URL...");
|
||||||
|
|
||||||
// URL are in the format https://f95zone.to/threads/GAMENAME-VERSION-DEVELOPER.ID/
|
// URL are in the format https://f95zone.to/threads/GAMENAME-VERSION-DEVELOPER.ID/
|
||||||
// or https://f95zone.to/threads/ID/
|
// or https://f95zone.to/threads/ID/
|
||||||
const match = url.match(/([0-9]+)(?=\/|\b)(?!-|\.)/);
|
const match = url.match(/([0-9]+)(?=\/|\b)(?!-|\.)/);
|
||||||
if (!match) return -1;
|
if (!match) return -1;
|
||||||
|
|
||||||
// Parse and return number
|
// Parse and return number
|
||||||
return parseInt(match[0], 10);
|
return parseInt(match[0], 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Makes an array of strings uppercase.
|
* Makes an array of strings uppercase.
|
||||||
*/
|
*/
|
||||||
function toUpperCaseArray(a: string[]): string[] {
|
function toUpperCaseArray(a: string[]): string[] {
|
||||||
/**
|
/**
|
||||||
* Makes a string uppercase.
|
* Makes a string uppercase.
|
||||||
*/
|
*/
|
||||||
function toUpper(s: string): string {
|
function toUpper(s: string): string {
|
||||||
return s.toUpperCase();
|
return s.toUpperCase();
|
||||||
}
|
}
|
||||||
return a.map(toUpper);
|
return a.map(toUpper);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,10 +103,10 @@ function toUpperCaseArray(a: string[]): string[] {
|
||||||
* Case insensitive.
|
* Case insensitive.
|
||||||
*/
|
*/
|
||||||
function stringInDict(s: string, a: TPrefixDict): boolean {
|
function stringInDict(s: string, a: TPrefixDict): boolean {
|
||||||
// Make uppercase all the strings in the array
|
// Make uppercase all the strings in the array
|
||||||
const values = toUpperCaseArray(Object.values(a));
|
const values = toUpperCaseArray(Object.values(a));
|
||||||
|
|
||||||
return values.includes(s.toUpperCase());
|
return values.includes(s.toUpperCase());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -103,15 +115,15 @@ function stringInDict(s: string, a: TPrefixDict): boolean {
|
||||||
* Check also for `yes`/`no` and `1`/`0`.
|
* Check also for `yes`/`no` and `1`/`0`.
|
||||||
*/
|
*/
|
||||||
function stringToBoolean(s: string): boolean {
|
function stringToBoolean(s: string): boolean {
|
||||||
// Local variables
|
// Local variables
|
||||||
const positiveTerms = ["true", "yes", "1"];
|
const positiveTerms = ["true", "yes", "1"];
|
||||||
const negativeTerms = ["false", "no", "0"];
|
const negativeTerms = ["false", "no", "0"];
|
||||||
const cleanString = s.toLowerCase().trim();
|
const cleanString = s.toLowerCase().trim();
|
||||||
let result = Boolean(s);
|
let result = Boolean(s);
|
||||||
|
|
||||||
if (positiveTerms.includes(cleanString)) result = true;
|
if (positiveTerms.includes(cleanString)) result = true;
|
||||||
else if (negativeTerms.includes(cleanString)) result = false;
|
else if (negativeTerms.includes(cleanString)) result = false;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -119,8 +131,11 @@ function stringToBoolean(s: string): boolean {
|
||||||
*
|
*
|
||||||
* Case-insensitive.
|
* Case-insensitive.
|
||||||
*/
|
*/
|
||||||
function getPostElementByName(elements: IPostElement[], name: string): IPostElement | undefined {
|
function getPostElementByName(
|
||||||
return elements.find(el => el.name.toUpperCase() === name.toUpperCase());
|
elements: IPostElement[],
|
||||||
|
name: string
|
||||||
|
): IPostElement | undefined {
|
||||||
|
return elements.find((el) => el.name.toUpperCase() === name.toUpperCase());
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Utilities
|
//#endregion Utilities
|
||||||
|
@ -132,43 +147,45 @@ function getPostElementByName(elements: IPostElement[], name: string): IPostElem
|
||||||
* `Engine`, `Status`, `Mod`.
|
* `Engine`, `Status`, `Mod`.
|
||||||
*/
|
*/
|
||||||
function fillWithPrefixes(hw: HandiWork, prefixes: string[]) {
|
function fillWithPrefixes(hw: HandiWork, prefixes: string[]) {
|
||||||
shared.logger.trace("Parsing prefixes...");
|
shared.logger.trace("Parsing prefixes...");
|
||||||
|
|
||||||
// Local variables
|
// Local variables
|
||||||
let mod = false;
|
let mod = false;
|
||||||
let engine: TEngine = null;
|
let engine: TEngine = null;
|
||||||
let status: TStatus = null;
|
let status: TStatus = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Emulated dictionary of mod prefixes.
|
* Emulated dictionary of mod prefixes.
|
||||||
*/
|
*/
|
||||||
const fakeModDict: TPrefixDict = {
|
const fakeModDict: TPrefixDict = {
|
||||||
0: "MOD",
|
0: "MOD",
|
||||||
1: "CHEAT MOD",
|
1: "CHEAT MOD"
|
||||||
}
|
};
|
||||||
|
|
||||||
// Initialize the array
|
// Initialize the array
|
||||||
hw.prefixes = [];
|
hw.prefixes = [];
|
||||||
|
|
||||||
prefixes.map((item, idx) => {
|
prefixes.map((item, idx) => {
|
||||||
// Remove the square brackets
|
// Remove the square brackets
|
||||||
const prefix = item.replace("[", "").replace("]", "");
|
const prefix = item.replace("[", "").replace("]", "");
|
||||||
|
|
||||||
// Check what the prefix indicates
|
// Check what the prefix indicates
|
||||||
if (stringInDict(prefix, shared.prefixes["engines"])) engine = prefix as TEngine;
|
if (stringInDict(prefix, shared.prefixes["engines"]))
|
||||||
else if (stringInDict(prefix, shared.prefixes["statuses"])) status = prefix as TStatus;
|
engine = prefix as TEngine;
|
||||||
else if (stringInDict(prefix, fakeModDict)) mod = true;
|
else if (stringInDict(prefix, shared.prefixes["statuses"]))
|
||||||
|
status = prefix as TStatus;
|
||||||
|
else if (stringInDict(prefix, fakeModDict)) mod = true;
|
||||||
|
|
||||||
// Anyway add the prefix to list
|
// Anyway add the prefix to list
|
||||||
hw.prefixes.push(prefix);
|
hw.prefixes.push(prefix);
|
||||||
});
|
});
|
||||||
|
|
||||||
// If the status is not set, then the game is in development (Ongoing)
|
// If the status is not set, then the game is in development (Ongoing)
|
||||||
status = (!status && hw.category === "games") ? status : "Ongoing";
|
status = !status && hw.category === "games" ? status : "Ongoing";
|
||||||
|
|
||||||
hw.engine = engine;
|
hw.engine = engine;
|
||||||
hw.status = status;
|
hw.status = status;
|
||||||
hw.mod = mod;
|
hw.mod = mod;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -181,70 +198,87 @@ function fillWithPrefixes(hw: HandiWork, prefixes: string[]) {
|
||||||
* `LastRelease`, `Authors`, `Changelog`, `Cover`.
|
* `LastRelease`, `Authors`, `Changelog`, `Cover`.
|
||||||
*/
|
*/
|
||||||
function fillWithPostData(hw: HandiWork, elements: IPostElement[]) {
|
function fillWithPostData(hw: HandiWork, elements: IPostElement[]) {
|
||||||
// First fill the "simple" elements
|
// First fill the "simple" elements
|
||||||
hw.overview = getPostElementByName(elements, "overview")?.text;
|
hw.overview = getPostElementByName(elements, "overview")?.text;
|
||||||
hw.os = getPostElementByName(elements, "os")?.text?.split(",").map(s => s.trim());
|
hw.os = getPostElementByName(elements, "os")
|
||||||
hw.language = getPostElementByName(elements, "language")?.text?.split(",").map(s => s.trim());
|
?.text?.split(",")
|
||||||
hw.version = getPostElementByName(elements, "version")?.text;
|
.map((s) => s.trim());
|
||||||
hw.installation = getPostElementByName(elements, "installation")?.content.shift()?.text;
|
hw.language = getPostElementByName(elements, "language")
|
||||||
hw.pages = getPostElementByName(elements, "pages")?.text;
|
?.text?.split(",")
|
||||||
hw.resolution = getPostElementByName(elements, "resolution")?.text?.split(",").map(s => s.trim());
|
.map((s) => s.trim());
|
||||||
hw.lenght = getPostElementByName(elements, "lenght")?.text;
|
hw.version = getPostElementByName(elements, "version")?.text;
|
||||||
|
hw.installation = getPostElementByName(
|
||||||
|
elements,
|
||||||
|
"installation"
|
||||||
|
)?.content.shift()?.text;
|
||||||
|
hw.pages = getPostElementByName(elements, "pages")?.text;
|
||||||
|
hw.resolution = getPostElementByName(elements, "resolution")
|
||||||
|
?.text?.split(",")
|
||||||
|
.map((s) => s.trim());
|
||||||
|
hw.lenght = getPostElementByName(elements, "lenght")?.text;
|
||||||
|
|
||||||
// Parse the censorship
|
// Parse the censorship
|
||||||
const censored = getPostElementByName(elements, "censored") || getPostElementByName(elements, "censorship");
|
const censored =
|
||||||
if (censored) hw.censored = stringToBoolean(censored.text);
|
getPostElementByName(elements, "censored") ||
|
||||||
|
getPostElementByName(elements, "censorship");
|
||||||
|
if (censored) hw.censored = stringToBoolean(censored.text);
|
||||||
|
|
||||||
// Get the genres
|
// Get the genres
|
||||||
const genre = getPostElementByName(elements, "genre")?.content.shift()?.text;
|
const genre = getPostElementByName(elements, "genre")?.content.shift()?.text;
|
||||||
hw.genre = genre?.split(",").map(s => s.trim());
|
hw.genre = genre?.split(",").map((s) => s.trim());
|
||||||
|
|
||||||
// Get the cover
|
// Get the cover
|
||||||
const cover = getPostElementByName(elements, "overview")?.content.find(el => el.type === "Image") as ILink;
|
const cover = getPostElementByName(elements, "overview")?.content.find(
|
||||||
hw.cover = cover?.href;
|
(el) => el.type === "Image"
|
||||||
|
) as ILink;
|
||||||
|
hw.cover = cover?.href;
|
||||||
|
|
||||||
// Fill the dates
|
// Fill the dates
|
||||||
const releaseDate = getPostElementByName(elements, "release date")?.text;
|
const releaseDate = getPostElementByName(elements, "release date")?.text;
|
||||||
if (luxon.DateTime.fromISO(releaseDate).isValid) hw.lastRelease = new Date(releaseDate);
|
if (luxon.DateTime.fromISO(releaseDate).isValid)
|
||||||
|
hw.lastRelease = new Date(releaseDate);
|
||||||
|
|
||||||
//#region Convert the author
|
//#region Convert the author
|
||||||
const authorElement = getPostElementByName(elements, "developer") ||
|
const authorElement =
|
||||||
getPostElementByName(elements, "developer/publisher") ||
|
getPostElementByName(elements, "developer") ||
|
||||||
getPostElementByName(elements, "artist");
|
getPostElementByName(elements, "developer/publisher") ||
|
||||||
const author: TAuthor = {
|
getPostElementByName(elements, "artist");
|
||||||
name: authorElement?.text,
|
const author: TAuthor = {
|
||||||
platforms: []
|
name: authorElement?.text,
|
||||||
|
platforms: []
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add the found platforms
|
||||||
|
authorElement?.content.forEach((el: ILink, idx) => {
|
||||||
|
const platform: TExternalPlatform = {
|
||||||
|
name: el.text,
|
||||||
|
link: el.href
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add the found platforms
|
author.platforms.push(platform);
|
||||||
authorElement?.content.forEach((el: ILink, idx) => {
|
});
|
||||||
const platform: TExternalPlatform = {
|
hw.authors = [author];
|
||||||
name: el.text,
|
//#endregion Convert the author
|
||||||
link: el.href,
|
|
||||||
};
|
|
||||||
|
|
||||||
author.platforms.push(platform);
|
//#region Get the changelog
|
||||||
|
hw.changelog = [];
|
||||||
|
const changelogElement =
|
||||||
|
getPostElementByName(elements, "changelog") ||
|
||||||
|
getPostElementByName(elements, "change-log");
|
||||||
|
if (changelogElement) {
|
||||||
|
const changelogSpoiler = changelogElement?.content.find((el) => {
|
||||||
|
return el.type === "Spoiler" && el.content.length > 0;
|
||||||
});
|
});
|
||||||
hw.authors = [author];
|
|
||||||
//#endregion Convert the author
|
|
||||||
|
|
||||||
//#region Get the changelog
|
// Add to the changelog the single spoilers
|
||||||
hw.changelog = [];
|
changelogSpoiler?.content.forEach((el) => {
|
||||||
const changelogElement = getPostElementByName(elements, "changelog") || getPostElementByName(elements, "change-log");
|
if (el.text.trim()) hw.changelog.push(el.text);
|
||||||
if (changelogElement) {
|
});
|
||||||
const changelogSpoiler = changelogElement?.content.find(el => {
|
|
||||||
return el.type === "Spoiler" && el.content.length > 0;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add to the changelog the single spoilers
|
// Add at the end also the text of the "changelog" element
|
||||||
changelogSpoiler?.content.forEach(el => {
|
hw.changelog.push(changelogSpoiler.text);
|
||||||
if (el.text.trim()) hw.changelog.push(el.text);
|
}
|
||||||
});
|
//#endregion Get the changelog
|
||||||
|
|
||||||
// Add at the end also the text of the "changelog" element
|
|
||||||
hw.changelog.push(changelogSpoiler.text);
|
|
||||||
}
|
|
||||||
//#endregion Get the changelog
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Private methods
|
//#endregion Private methods
|
||||||
|
|
|
@ -10,7 +10,7 @@ import { THREAD } from "../constants/css-selector.js";
|
||||||
/**
|
/**
|
||||||
* Represents information contained in a JSON+LD tag.
|
* Represents information contained in a JSON+LD tag.
|
||||||
*/
|
*/
|
||||||
export type TJsonLD = { [s: string]: string | TJsonLD }
|
export type TJsonLD = { [s: string]: string | TJsonLD };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts and processes the JSON-LD values of the page.
|
* Extracts and processes the JSON-LD values of the page.
|
||||||
|
@ -18,16 +18,16 @@ export type TJsonLD = { [s: string]: string | TJsonLD }
|
||||||
* @returns {TJsonLD[]} List of data obtained from the page
|
* @returns {TJsonLD[]} List of data obtained from the page
|
||||||
*/
|
*/
|
||||||
export function getJSONLD(body: cheerio.Cheerio): TJsonLD {
|
export function getJSONLD(body: cheerio.Cheerio): TJsonLD {
|
||||||
shared.logger.trace("Extracting JSON-LD data...");
|
shared.logger.trace("Extracting JSON-LD data...");
|
||||||
|
|
||||||
// Fetch the JSON-LD data
|
// Fetch the JSON-LD data
|
||||||
const structuredDataElements = body.find(THREAD.JSONLD);
|
const structuredDataElements = body.find(THREAD.JSONLD);
|
||||||
|
|
||||||
// Parse the data
|
// Parse the data
|
||||||
const values = structuredDataElements.map((idx, el) => parseJSONLD(el)).get();
|
const values = structuredDataElements.map((idx, el) => parseJSONLD(el)).get();
|
||||||
|
|
||||||
// Merge the data and return a single value
|
// Merge the data and return a single value
|
||||||
return mergeJSONLD(values);
|
return mergeJSONLD(values);
|
||||||
}
|
}
|
||||||
|
|
||||||
//#region Private methods
|
//#region Private methods
|
||||||
|
@ -36,29 +36,29 @@ export function getJSONLD(body: cheerio.Cheerio): TJsonLD {
|
||||||
* @param data List of JSON+LD tags
|
* @param data List of JSON+LD tags
|
||||||
*/
|
*/
|
||||||
function mergeJSONLD(data: TJsonLD[]): TJsonLD {
|
function mergeJSONLD(data: TJsonLD[]): TJsonLD {
|
||||||
// Local variables
|
// Local variables
|
||||||
let merged: TJsonLD = {};
|
let merged: TJsonLD = {};
|
||||||
|
|
||||||
for (const value of data) {
|
for (const value of data) {
|
||||||
merged = Object.assign(merged, value);
|
merged = Object.assign(merged, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
return merged;
|
return merged;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse a JSON-LD element source code.
|
* Parse a JSON-LD element source code.
|
||||||
*/
|
*/
|
||||||
function parseJSONLD(element: cheerio.Element): TJsonLD {
|
function parseJSONLD(element: cheerio.Element): TJsonLD {
|
||||||
// Get the element HTML
|
// Get the element HTML
|
||||||
const html = cheerio(element).html().trim();
|
const html = cheerio(element).html().trim();
|
||||||
|
|
||||||
// Obtain the JSON-LD
|
// Obtain the JSON-LD
|
||||||
const data = html
|
const data = html
|
||||||
.replace("<script type=\"application/ld+json\">", "")
|
.replace('<script type="application/ld+json">', "")
|
||||||
.replace("</script>", "");
|
.replace("</script>", "");
|
||||||
|
|
||||||
// Convert the string to an object
|
// Convert the string to an object
|
||||||
return JSON.parse(data);
|
return JSON.parse(data);
|
||||||
}
|
}
|
||||||
//#endregion Private methods
|
//#endregion Private methods
|
||||||
|
|
|
@ -3,15 +3,15 @@
|
||||||
//#region Interfaces
|
//#region Interfaces
|
||||||
|
|
||||||
export interface IPostElement {
|
export interface IPostElement {
|
||||||
type: "Empty" | "Text" | "Link" | "Image" | "Spoiler",
|
type: "Empty" | "Text" | "Link" | "Image" | "Spoiler";
|
||||||
name: string,
|
name: string;
|
||||||
text: string,
|
text: string;
|
||||||
content: IPostElement[]
|
content: IPostElement[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ILink extends IPostElement {
|
export interface ILink extends IPostElement {
|
||||||
type: "Image" | "Link",
|
type: "Image" | "Link";
|
||||||
href: string,
|
href: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Interfaces
|
//#endregion Interfaces
|
||||||
|
@ -20,24 +20,31 @@ export interface ILink extends IPostElement {
|
||||||
/**
|
/**
|
||||||
* Given a post of a thread page it extracts the information contained in the body.
|
* Given a post of a thread page it extracts the information contained in the body.
|
||||||
*/
|
*/
|
||||||
export function parseF95ThreadPost($: cheerio.Root, post: cheerio.Cheerio): IPostElement[] {
|
export function parseF95ThreadPost(
|
||||||
// The data is divided between "tag" and "text" elements.
|
$: cheerio.Root,
|
||||||
// Simple data is composed of a "tag" element followed
|
post: cheerio.Cheerio
|
||||||
// by a "text" element, while more complex data (contained
|
): IPostElement[] {
|
||||||
// in spoilers) is composed of a "tag" element, followed
|
// The data is divided between "tag" and "text" elements.
|
||||||
// by a text containing only ":" and then by an additional
|
// Simple data is composed of a "tag" element followed
|
||||||
// "tag" element having as the first term "Spoiler"
|
// by a "text" element, while more complex data (contained
|
||||||
|
// in spoilers) is composed of a "tag" element, followed
|
||||||
|
// by a text containing only ":" and then by an additional
|
||||||
|
// "tag" element having as the first term "Spoiler"
|
||||||
|
|
||||||
// First fetch all the elements in the post
|
// First fetch all the elements in the post
|
||||||
const elements = post.contents().toArray().map(el => {
|
const elements = post
|
||||||
const node = parseCheerioNode($, el);
|
.contents()
|
||||||
if (node.name || node.text || node.content.length != 0) {
|
.toArray()
|
||||||
return node;
|
.map((el) => {
|
||||||
}
|
const node = parseCheerioNode($, el);
|
||||||
}).filter(el => el);
|
if (node.name || node.text || node.content.length != 0) {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((el) => el);
|
||||||
|
|
||||||
// ... then parse the elements to create the pairs of title/data
|
// ... then parse the elements to create the pairs of title/data
|
||||||
return parsePostElements(elements);
|
return parsePostElements(elements);
|
||||||
}
|
}
|
||||||
//#endregion Public methods
|
//#endregion Public methods
|
||||||
|
|
||||||
|
@ -47,50 +54,57 @@ export function parseF95ThreadPost($: cheerio.Root, post: cheerio.Cheerio): IPos
|
||||||
* Process a spoiler element by getting its text broken
|
* Process a spoiler element by getting its text broken
|
||||||
* down by any other spoiler elements present.
|
* down by any other spoiler elements present.
|
||||||
*/
|
*/
|
||||||
function parseCheerioSpoilerNode($: cheerio.Root, spoiler: cheerio.Cheerio): IPostElement {
|
function parseCheerioSpoilerNode(
|
||||||
// A spoiler block is composed of a div with class "bbCodeSpoiler",
|
$: cheerio.Root,
|
||||||
// containing a div "bbCodeSpoiler-content" containing, in cascade,
|
spoiler: cheerio.Cheerio
|
||||||
// a div with class "bbCodeBlock--spoiler" and a div with class "bbCodeBlock-content".
|
): IPostElement {
|
||||||
// This last tag contains the required data.
|
// A spoiler block is composed of a div with class "bbCodeSpoiler",
|
||||||
|
// containing a div "bbCodeSpoiler-content" containing, in cascade,
|
||||||
|
// a div with class "bbCodeBlock--spoiler" and a div with class "bbCodeBlock-content".
|
||||||
|
// This last tag contains the required data.
|
||||||
|
|
||||||
// Local variables
|
// Local variables
|
||||||
const BUTTON_CLASS = "button.bbCodeSpoiler-button";
|
const BUTTON_CLASS = "button.bbCodeSpoiler-button";
|
||||||
const SPOILER_CONTENT_CLASS = "div.bbCodeSpoiler-content > div.bbCodeBlock--spoiler > div.bbCodeBlock-content";
|
const SPOILER_CONTENT_CLASS =
|
||||||
const content: IPostElement = {
|
"div.bbCodeSpoiler-content > div.bbCodeBlock--spoiler > div.bbCodeBlock-content";
|
||||||
type: "Spoiler",
|
const content: IPostElement = {
|
||||||
name: "",
|
type: "Spoiler",
|
||||||
text: "",
|
name: "",
|
||||||
content: []
|
text: "",
|
||||||
};
|
content: []
|
||||||
|
};
|
||||||
|
|
||||||
// Find the title of the spoiler (contained in the button)
|
// Find the title of the spoiler (contained in the button)
|
||||||
const button = spoiler.find(BUTTON_CLASS).toArray().shift();
|
const button = spoiler.find(BUTTON_CLASS).toArray().shift();
|
||||||
content.name = $(button).text().trim();
|
content.name = $(button).text().trim();
|
||||||
|
|
||||||
// Parse the content of the spoiler
|
// Parse the content of the spoiler
|
||||||
spoiler.find(SPOILER_CONTENT_CLASS).contents().map((idx, el) => {
|
spoiler
|
||||||
// Convert the element
|
.find(SPOILER_CONTENT_CLASS)
|
||||||
const element = $(el);
|
.contents()
|
||||||
|
.map((idx, el) => {
|
||||||
|
// Convert the element
|
||||||
|
const element = $(el);
|
||||||
|
|
||||||
// Parse nested spoiler
|
// Parse nested spoiler
|
||||||
if (element.attr("class") === "bbCodeSpoiler") {
|
if (element.attr("class") === "bbCodeSpoiler") {
|
||||||
const spoiler = parseCheerioSpoilerNode($, element);
|
const spoiler = parseCheerioSpoilerNode($, element);
|
||||||
content.content.push(spoiler);
|
content.content.push(spoiler);
|
||||||
}
|
}
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
// else if (el.name === "br") {
|
// else if (el.name === "br") {
|
||||||
// // Add new line
|
// // Add new line
|
||||||
// content.text += "\n";
|
// content.text += "\n";
|
||||||
// }
|
// }
|
||||||
else if (el.type === "text") {
|
else if (el.type === "text") {
|
||||||
// Append text
|
// Append text
|
||||||
content.text += element.text();
|
content.text += element.text();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Clean text
|
// Clean text
|
||||||
content.text = content.text.replace(/\s\s+/g, ' ').trim();
|
content.text = content.text.replace(/\s\s+/g, " ").trim();
|
||||||
return content;
|
return content;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -98,11 +112,11 @@ function parseCheerioSpoilerNode($: cheerio.Root, spoiler: cheerio.Cheerio): IPo
|
||||||
* This also includes formatted nodes (i.e. `<b>`).
|
* This also includes formatted nodes (i.e. `<b>`).
|
||||||
*/
|
*/
|
||||||
function isTextNode(node: cheerio.Element): boolean {
|
function isTextNode(node: cheerio.Element): boolean {
|
||||||
const formattedTags = ["b", "i"]
|
const formattedTags = ["b", "i"];
|
||||||
const isText = node.type === "text";
|
const isText = node.type === "text";
|
||||||
const isFormatted = node.type === "tag" && formattedTags.includes(node.name);
|
const isFormatted = node.type === "tag" && formattedTags.includes(node.name);
|
||||||
|
|
||||||
return isText || isFormatted;
|
return isText || isFormatted;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -110,13 +124,17 @@ function isTextNode(node: cheerio.Element): boolean {
|
||||||
* Also includes formatted text elements (i.e. `<b>`).
|
* Also includes formatted text elements (i.e. `<b>`).
|
||||||
*/
|
*/
|
||||||
function getCheerioNonChildrenText(node: cheerio.Cheerio): string {
|
function getCheerioNonChildrenText(node: cheerio.Cheerio): string {
|
||||||
// Find all the text nodes in the node
|
// Find all the text nodes in the node
|
||||||
const text = node.first().contents().filter((idx, el) => {
|
const text = node
|
||||||
return isTextNode(el);
|
.first()
|
||||||
}).text();
|
.contents()
|
||||||
|
.filter((idx, el) => {
|
||||||
|
return isTextNode(el);
|
||||||
|
})
|
||||||
|
.text();
|
||||||
|
|
||||||
// Clean and return the text
|
// Clean and return the text
|
||||||
return text.replace(/\s\s+/g, ' ').trim();
|
return text.replace(/\s\s+/g, " ").trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -124,28 +142,27 @@ function getCheerioNonChildrenText(node: cheerio.Cheerio): string {
|
||||||
* link or image. If not, it returns `null`.
|
* link or image. If not, it returns `null`.
|
||||||
*/
|
*/
|
||||||
function parseCheerioLinkNode(element: cheerio.Cheerio): ILink | null {
|
function parseCheerioLinkNode(element: cheerio.Cheerio): ILink | null {
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
const name = element[0]?.name;
|
const name = element[0]?.name;
|
||||||
const link: ILink = {
|
const link: ILink = {
|
||||||
name: "",
|
name: "",
|
||||||
type: "Link",
|
type: "Link",
|
||||||
text: "",
|
text: "",
|
||||||
href: "",
|
href: "",
|
||||||
content: []
|
content: []
|
||||||
};
|
};
|
||||||
|
|
||||||
if (name === "img") {
|
if (name === "img") {
|
||||||
link.type = "Image";
|
link.type = "Image";
|
||||||
link.text = element.attr("alt");
|
link.text = element.attr("alt");
|
||||||
link.href = element.attr("data-src");
|
link.href = element.attr("data-src");
|
||||||
}
|
} else if (name === "a") {
|
||||||
else if (name === "a") {
|
link.type = "Link";
|
||||||
link.type = "Link";
|
link.text = element.text().replace(/\s\s+/g, " ").trim();
|
||||||
link.text = element.text().replace(/\s\s+/g, ' ').trim();
|
link.href = element.attr("href");
|
||||||
link.href = element.attr("href");
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return link.href ? link : null;
|
return link.href ? link : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -153,84 +170,93 @@ function parseCheerioLinkNode(element: cheerio.Cheerio): ILink | null {
|
||||||
* in the `Content` field in case it has no information.
|
* in the `Content` field in case it has no information.
|
||||||
*/
|
*/
|
||||||
function reducePostElement(element: IPostElement): IPostElement {
|
function reducePostElement(element: IPostElement): IPostElement {
|
||||||
if (element.content.length === 1) {
|
if (element.content.length === 1) {
|
||||||
const content = element.content[0] as IPostElement;
|
const content = element.content[0] as IPostElement;
|
||||||
const nullValues = (!element.name || !content.name) && (!element.text || !content.text);
|
const nullValues =
|
||||||
const sameValues = (element.name === content.name) || (element.text === content.text)
|
(!element.name || !content.name) && (!element.text || !content.text);
|
||||||
|
const sameValues =
|
||||||
|
element.name === content.name || element.text === content.text;
|
||||||
|
|
||||||
if (nullValues || sameValues) {
|
if (nullValues || sameValues) {
|
||||||
element.name = element.name || content.name;
|
element.name = element.name || content.name;
|
||||||
element.text = element.text || content.text;
|
element.text = element.text || content.text;
|
||||||
element.content.push(...content.content);
|
element.content.push(...content.content);
|
||||||
element.type = content.type;
|
element.type = content.type;
|
||||||
|
|
||||||
// If the content is a link, add the HREF to the element
|
// If the content is a link, add the HREF to the element
|
||||||
const contentILink = content as ILink;
|
const contentILink = content as ILink;
|
||||||
const elementILink = element as ILink;
|
const elementILink = element as ILink;
|
||||||
if (contentILink.href) elementILink.href = contentILink.href;
|
if (contentILink.href) elementILink.href = contentILink.href;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return element;
|
return element;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Transform a `cheerio.Cheerio` node into an `IPostElement` element with its subnodes.
|
* Transform a `cheerio.Cheerio` node into an `IPostElement` element with its subnodes.
|
||||||
* @param reduce Compress subsequent subnodes if they contain no information. Default: `true`.
|
* @param reduce Compress subsequent subnodes if they contain no information. Default: `true`.
|
||||||
*/
|
*/
|
||||||
function parseCheerioNode($: cheerio.Root, node: cheerio.Element, reduce = true): IPostElement {
|
function parseCheerioNode(
|
||||||
// Local variables
|
$: cheerio.Root,
|
||||||
let content: IPostElement = {
|
node: cheerio.Element,
|
||||||
type: "Empty",
|
reduce = true
|
||||||
name: "",
|
): IPostElement {
|
||||||
text: "",
|
// Local variables
|
||||||
content: []
|
const content: IPostElement = {
|
||||||
};
|
type: "Empty",
|
||||||
const cheerioNode = $(node);
|
name: "",
|
||||||
|
text: "",
|
||||||
|
content: []
|
||||||
|
};
|
||||||
|
const cheerioNode = $(node);
|
||||||
|
|
||||||
if (isTextNode(node)) {
|
if (isTextNode(node)) {
|
||||||
content.text = cheerioNode.text().replace(/\s\s+/g, ' ').trim();
|
content.text = cheerioNode.text().replace(/\s\s+/g, " ").trim();
|
||||||
content.type = "Text";
|
content.type = "Text";
|
||||||
} else {
|
} else {
|
||||||
// Get the number of children that the element own
|
// Get the number of children that the element own
|
||||||
const nChildren = cheerioNode.children().length;
|
const nChildren = cheerioNode.children().length;
|
||||||
|
|
||||||
// Get the text of the element without childrens
|
// Get the text of the element without childrens
|
||||||
content.text = getCheerioNonChildrenText(cheerioNode);
|
content.text = getCheerioNonChildrenText(cheerioNode);
|
||||||
|
|
||||||
// Parse spoilers
|
// Parse spoilers
|
||||||
if (cheerioNode.attr("class") === "bbCodeSpoiler") {
|
if (cheerioNode.attr("class") === "bbCodeSpoiler") {
|
||||||
const spoiler = parseCheerioSpoilerNode($, cheerioNode);
|
const spoiler = parseCheerioSpoilerNode($, cheerioNode);
|
||||||
|
|
||||||
// Add element if not null
|
// Add element if not null
|
||||||
if (spoiler) {
|
if (spoiler) {
|
||||||
content.content.push(spoiler);
|
content.content.push(spoiler);
|
||||||
content.type = "Spoiler";
|
content.type = "Spoiler";
|
||||||
}
|
}
|
||||||
}
|
|
||||||
// Parse links
|
|
||||||
else if (nChildren === 0 && cheerioNode.length != 0) {
|
|
||||||
const link = parseCheerioLinkNode(cheerioNode);
|
|
||||||
|
|
||||||
// Add element if not null
|
|
||||||
if (link) {
|
|
||||||
content.content.push(link);
|
|
||||||
content.type = "Link";
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cheerioNode.children().map((idx, el) => {
|
|
||||||
// Parse the children of the element passed as parameter
|
|
||||||
const childElement = parseCheerioNode($, el);
|
|
||||||
|
|
||||||
// If the children is valid (not empty) push it
|
|
||||||
if ((childElement.text || childElement.content.length !== 0) && !isTextNode(el)) {
|
|
||||||
content.content.push(childElement);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// Parse links
|
||||||
|
else if (nChildren === 0 && cheerioNode.length != 0) {
|
||||||
|
const link = parseCheerioLinkNode(cheerioNode);
|
||||||
|
|
||||||
return reduce ? reducePostElement(content) : content;
|
// Add element if not null
|
||||||
|
if (link) {
|
||||||
|
content.content.push(link);
|
||||||
|
content.type = "Link";
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cheerioNode.children().map((idx, el) => {
|
||||||
|
// Parse the children of the element passed as parameter
|
||||||
|
const childElement = parseCheerioNode($, el);
|
||||||
|
|
||||||
|
// If the children is valid (not empty) push it
|
||||||
|
if (
|
||||||
|
(childElement.text || childElement.content.length !== 0) &&
|
||||||
|
!isTextNode(el)
|
||||||
|
) {
|
||||||
|
content.content.push(childElement);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return reduce ? reducePostElement(content) : content;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -238,49 +264,49 @@ function parseCheerioNode($: cheerio.Root, node: cheerio.Element, reduce = true)
|
||||||
* the corresponding value to each characterizing element (i.e. author).
|
* the corresponding value to each characterizing element (i.e. author).
|
||||||
*/
|
*/
|
||||||
function parsePostElements(elements: IPostElement[]): IPostElement[] {
|
function parsePostElements(elements: IPostElement[]): IPostElement[] {
|
||||||
// Local variables
|
// Local variables
|
||||||
const pairs: IPostElement[] = [];
|
const pairs: IPostElement[] = [];
|
||||||
const specialCharsRegex = /^[-!$%^&*()_+|~=`{}\[\]:";'<>?,.\/]/;
|
const specialCharsRegex = /^[-!$%^&*()_+|~=`{}\[\]:";'<>?,.\/]/;
|
||||||
const specialRegex = new RegExp(specialCharsRegex);
|
const specialRegex = new RegExp(specialCharsRegex);
|
||||||
|
|
||||||
for (let i = 0; i < elements.length; i++) {
|
for (let i = 0; i < elements.length; i++) {
|
||||||
// If the text starts with a special char, clean it
|
// If the text starts with a special char, clean it
|
||||||
const startWithSpecial = specialRegex.test(elements[i].text);
|
const startWithSpecial = specialRegex.test(elements[i].text);
|
||||||
|
|
||||||
// Get the latest IPostElement in "pairs"
|
// Get the latest IPostElement in "pairs"
|
||||||
const lastIndex = pairs.length - 1;
|
const lastIndex = pairs.length - 1;
|
||||||
const lastPair = pairs[lastIndex];
|
const lastPair = pairs[lastIndex];
|
||||||
|
|
||||||
// If this statement is valid, we have a "data"
|
// If this statement is valid, we have a "data"
|
||||||
if (elements[i].type === "Text" && startWithSpecial && pairs.length > 0) {
|
if (elements[i].type === "Text" && startWithSpecial && pairs.length > 0) {
|
||||||
// We merge this element with the last element appended to 'pairs'
|
// We merge this element with the last element appended to 'pairs'
|
||||||
const cleanText = elements[i].text.replace(specialCharsRegex, "").trim();
|
const cleanText = elements[i].text.replace(specialCharsRegex, "").trim();
|
||||||
lastPair.text = lastPair.text || cleanText;
|
lastPair.text = lastPair.text || cleanText;
|
||||||
lastPair.content.push(...elements[i].content);
|
lastPair.content.push(...elements[i].content);
|
||||||
}
|
|
||||||
// This is a special case
|
|
||||||
else if (elements[i].text.startsWith("Overview:\n")) {
|
|
||||||
// We add the overview to the pairs as a text element
|
|
||||||
elements[i].type = "Text";
|
|
||||||
elements[i].name = "Overview";
|
|
||||||
elements[i].text = elements[i].text.replace("Overview:\n", "");
|
|
||||||
pairs.push(elements[i]);
|
|
||||||
}
|
|
||||||
// We have an element referred to the previous "title"
|
|
||||||
else if (elements[i].type != "Text" && pairs.length > 0) {
|
|
||||||
// We append this element to the content of the last title
|
|
||||||
lastPair.content.push(elements[i]);
|
|
||||||
}
|
|
||||||
// ... else we have a "title" (we need to swap the text to the name because it is a title)
|
|
||||||
else {
|
|
||||||
const swap: IPostElement = Object.assign({}, elements[i]);
|
|
||||||
swap.name = elements[i].text;
|
|
||||||
swap.text = "";
|
|
||||||
pairs.push(swap);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// This is a special case
|
||||||
|
else if (elements[i].text.startsWith("Overview:\n")) {
|
||||||
|
// We add the overview to the pairs as a text element
|
||||||
|
elements[i].type = "Text";
|
||||||
|
elements[i].name = "Overview";
|
||||||
|
elements[i].text = elements[i].text.replace("Overview:\n", "");
|
||||||
|
pairs.push(elements[i]);
|
||||||
|
}
|
||||||
|
// We have an element referred to the previous "title"
|
||||||
|
else if (elements[i].type != "Text" && pairs.length > 0) {
|
||||||
|
// We append this element to the content of the last title
|
||||||
|
lastPair.content.push(elements[i]);
|
||||||
|
}
|
||||||
|
// ... else we have a "title" (we need to swap the text to the name because it is a title)
|
||||||
|
else {
|
||||||
|
const swap: IPostElement = Object.assign({}, elements[i]);
|
||||||
|
swap.name = elements[i].text;
|
||||||
|
swap.text = "";
|
||||||
|
pairs.push(swap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return pairs;
|
return pairs;
|
||||||
}
|
}
|
||||||
|
|
||||||
//#endregion Private methods
|
//#endregion Private methods
|
||||||
|
|
|
@ -11,14 +11,17 @@ import getURLsFromQuery from "./fetch-data/fetch-query.js";
|
||||||
* @param {Number} limit
|
* @param {Number} limit
|
||||||
* Maximum number of items to get. Default: 30
|
* Maximum number of items to get. Default: 30
|
||||||
*/
|
*/
|
||||||
export default async function search<T extends IBasic>(query: IQuery, limit: number = 30): Promise<T[]> {
|
export default async function search<T extends IBasic>(
|
||||||
// Fetch the URLs
|
query: IQuery,
|
||||||
const urls: string[] = await getURLsFromQuery(query, limit);
|
limit = 30
|
||||||
|
): Promise<T[]> {
|
||||||
|
// Fetch the URLs
|
||||||
|
const urls: string[] = await getURLsFromQuery(query, limit);
|
||||||
|
|
||||||
// Fetch the data
|
// Fetch the data
|
||||||
const results = urls.map((url, idx) => {
|
const results = urls.map((url, idx) => {
|
||||||
return getHandiworkInformation<T>(url);
|
return getHandiworkInformation<T>(url);
|
||||||
});
|
});
|
||||||
|
|
||||||
return Promise.all(results);
|
return Promise.all(results);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,53 +12,68 @@ import log4js from "log4js";
|
||||||
import Session from "./classes/session.js";
|
import Session from "./classes/session.js";
|
||||||
|
|
||||||
// Types declaration
|
// Types declaration
|
||||||
export type TPrefixDict = { [n: number]: string; };
|
export type TPrefixDict = { [n: number]: string };
|
||||||
type TPrefixKey = "engines" | "statuses" | "tags" | "others";
|
type TPrefixKey = "engines" | "statuses" | "tags" | "others";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class containing variables shared between modules.
|
* Class containing variables shared between modules.
|
||||||
*/
|
*/
|
||||||
export default abstract class Shared {
|
export default abstract class Shared {
|
||||||
|
//#region Fields
|
||||||
//#region Fields
|
|
||||||
|
|
||||||
private static _isLogged = false;
|
private static _isLogged = false;
|
||||||
private static _prefixes: { [key in TPrefixKey]: TPrefixDict } = {} as { [key in TPrefixKey]: TPrefixDict };
|
private static _prefixes: { [key in TPrefixKey]: TPrefixDict } = {} as {
|
||||||
private static _logger: log4js.Logger = log4js.getLogger();
|
[key in TPrefixKey]: TPrefixDict;
|
||||||
private static _session = new Session(join(tmpdir(), "f95session.json"));
|
};
|
||||||
|
private static _logger: log4js.Logger = log4js.getLogger();
|
||||||
|
private static _session = new Session(join(tmpdir(), "f95session.json"));
|
||||||
|
|
||||||
//#endregion Fields
|
//#endregion Fields
|
||||||
|
|
||||||
//#region Getters
|
//#region Getters
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Indicates whether a user is logged in to the F95Zone platform or not.
|
* Indicates whether a user is logged in to the F95Zone platform or not.
|
||||||
*/
|
*/
|
||||||
static get isLogged(): boolean { return this._isLogged; }
|
static get isLogged(): boolean {
|
||||||
/**
|
return this._isLogged;
|
||||||
* List of platform prefixes and tags.
|
}
|
||||||
*/
|
/**
|
||||||
static get prefixes(): { [s: string]: TPrefixDict } { return this._prefixes; }
|
* List of platform prefixes and tags.
|
||||||
/**
|
*/
|
||||||
* Logger object used to write to both file and console.
|
static get prefixes(): { [s: string]: TPrefixDict } {
|
||||||
*/
|
return this._prefixes;
|
||||||
static get logger(): log4js.Logger { return this._logger; }
|
}
|
||||||
/**
|
/**
|
||||||
* Path to the cache used by this module wich contains engines, statuses, tags...
|
* Logger object used to write to both file and console.
|
||||||
*/
|
*/
|
||||||
static get cachePath(): string { return join(tmpdir(), "f95cache.json"); }
|
static get logger(): log4js.Logger {
|
||||||
/**
|
return this._logger;
|
||||||
* Session on the F95Zone platform.
|
}
|
||||||
*/
|
/**
|
||||||
static get session(): Session { return this._session; }
|
* Path to the cache used by this module wich contains engines, statuses, tags...
|
||||||
|
*/
|
||||||
|
static get cachePath(): string {
|
||||||
|
return join(tmpdir(), "f95cache.json");
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Session on the F95Zone platform.
|
||||||
|
*/
|
||||||
|
static get session(): Session {
|
||||||
|
return this._session;
|
||||||
|
}
|
||||||
|
|
||||||
//#endregion Getters
|
//#endregion Getters
|
||||||
|
|
||||||
//#region Setters
|
//#region Setters
|
||||||
|
|
||||||
static setPrefixPair(key: TPrefixKey, val: TPrefixDict): void { this._prefixes[key] = val; }
|
static setPrefixPair(key: TPrefixKey, val: TPrefixDict): void {
|
||||||
|
this._prefixes[key] = val;
|
||||||
|
}
|
||||||
|
|
||||||
static setIsLogged(val: boolean): void { this._isLogged = val; }
|
static setIsLogged(val: boolean): void {
|
||||||
|
this._isLogged = val;
|
||||||
|
}
|
||||||
|
|
||||||
//#endregion Setters
|
//#endregion Setters
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue