aboutsummaryrefslogtreecommitdiff
path: root/pages/api/v2
diff options
context:
space:
mode:
Diffstat (limited to 'pages/api/v2')
-rw-r--r--pages/api/v2/episode/[id].js111
-rw-r--r--pages/api/v2/etc/recent/[page].js26
-rw-r--r--pages/api/v2/etc/schedule/index.js71
-rw-r--r--pages/api/v2/info/[id].js39
-rw-r--r--pages/api/v2/source/index.js47
5 files changed, 294 insertions, 0 deletions
diff --git a/pages/api/v2/episode/[id].js b/pages/api/v2/episode/[id].js
new file mode 100644
index 0000000..1d328f6
--- /dev/null
+++ b/pages/api/v2/episode/[id].js
@@ -0,0 +1,111 @@
+import axios from "axios";
+import redis from "../../../../lib/redis";
+
+const CONSUMET_URI = process.env.API_URI;
+const API_KEY = process.env.API_KEY;
+
+async function fetchConsumet(id, dub) {
+ try {
+ if (dub) {
+ return [];
+ }
+
+ const { data } = await axios.get(`${CONSUMET_URI}/meta/anilist/info/${id}`);
+
+ if (!data?.episodes?.length > 0) {
+ return [];
+ }
+
+ const array = [
+ {
+ map: true,
+ providerId: "gogoanime",
+ episodes: data.episodes.reverse(),
+ },
+ ];
+
+ return array;
+ } catch (error) {
+ console.error(error);
+ return [];
+ }
+}
+
+async function fetchAnify(id) {
+ try {
+ if (!process.env.API_KEY) {
+ return [];
+ }
+
+ const { data } = await axios.get(
+ `https://api.anify.tv/episodes/${id}?apikey=${API_KEY}`
+ );
+
+ if (!data) {
+ return [];
+ }
+
+ const filtered = data.filter(
+ (item) => item.providerId !== "animepahe" && item.providerId !== "kass"
+ );
+ const modifiedData = filtered.map((provider) => {
+ if (provider.providerId === "gogoanime") {
+ const reversedEpisodes = [...provider.episodes].reverse();
+ return { ...provider, episodes: reversedEpisodes };
+ }
+ return provider;
+ });
+
+ return modifiedData;
+ } catch (error) {
+ console.error(error);
+ return [];
+ }
+}
+
+export default async function handler(req, res) {
+ const { id, releasing = "false", dub = false } = req.query;
+
+ // if releasing is true then cache for 10 minutes, if it false cache for 1 week;
+ const cacheTime = releasing === "true" ? 60 * 10 : 60 * 60 * 24 * 7;
+
+ let cached;
+
+ if (redis) {
+ cached = await redis.get(id);
+ console.log("using redis");
+ }
+
+ if (cached) {
+ if (dub) {
+ const filtered = JSON.parse(cached).filter((item) =>
+ item.episodes.some((epi) => epi.hasDub === true)
+ );
+ return res.status(200).json(filtered);
+ } else {
+ return res.status(200).json(JSON.parse(cached));
+ }
+ }
+
+ const [consumet, anify] = await Promise.all([
+ fetchConsumet(id, dub),
+ fetchAnify(id),
+ ]);
+
+ const data = [...consumet, ...anify];
+
+ if (redis && cacheTime !== null) {
+ await redis.set(id, JSON.stringify(data), "EX", cacheTime);
+ }
+
+ if (dub) {
+ const filtered = data.filter((item) =>
+ item.episodes.some((epi) => epi.hasDub === true)
+ );
+ return res.status(200).json(filtered);
+ }
+
+ console.log("fresh data");
+
+ return res.status(200).json(data);
+}
diff --git a/pages/api/v2/etc/recent/[page].js b/pages/api/v2/etc/recent/[page].js
new file mode 100644
index 0000000..19495c1
--- /dev/null
+++ b/pages/api/v2/etc/recent/[page].js
@@ -0,0 +1,26 @@
+const API_URL = process.env.API_URI;
+
+export default async function handler(req, res) {
+ try {
+ const page = req.query.page || 1;
+
+ var hasNextPage = true;
+ var datas = [];
+
+ async function fetchData(page) {
+ const data = await fetch(
+ `${API_URL}/meta/anilist/recent-episodes?page=${page}&perPage=30&provider=gogoanime`
+ ).then((res) => res.json());
+
+ const filtered = data?.results?.filter((i) => i.type !== "ONA");
+ hasNextPage = data?.hasNextPage;
+ datas = filtered;
+ }
+
+ await fetchData(page);
+
+ return res.status(200).json({ hasNextPage, results: datas });
+ } catch (error) {
+ res.status(500).json({ error });
+ }
+}
diff --git a/pages/api/v2/etc/schedule/index.js b/pages/api/v2/etc/schedule/index.js
new file mode 100644
index 0000000..7a13fff
--- /dev/null
+++ b/pages/api/v2/etc/schedule/index.js
@@ -0,0 +1,71 @@
+import axios from "axios";
+import cron from "cron";
+import redis from "../../../../../lib/redis";
+
+const API_KEY = process.env.API_KEY;
+
+// Function to fetch new data
+async function fetchData() {
+ try {
+ const { data } = await axios.get(
+ `https://api.anify.tv/schedule?apikey=${API_KEY}`
+ );
+ return data;
+ } catch (error) {
+ console.error("Error fetching data:", error);
+ return null;
+ }
+}
+
+// Function to refresh the cache with new data
+async function refreshCache() {
+ const newData = await fetchData();
+ if (newData) {
+ if (redis) {
+ await redis.set(
+ "schedule",
+ JSON.stringify(newData),
+ "EX",
+ 60 * 60 * 24 * 7
+ );
+ }
+ console.log("Cache refreshed successfully.");
+ }
+}
+
+// Schedule cache refresh every Monday at 00:00 AM (local time)
+const job = new cron.CronJob("0 0 * * 1", () => {
+ refreshCache();
+});
+job.start();
+
+export default async function handler(req, res) {
+ try {
+ let cached;
+ if (redis) {
+ cached = await redis.get("schedule");
+ }
+ if (cached) {
+ return res.status(200).json(JSON.parse(cached));
+ } else {
+ const data = await fetchData();
+
+ if (data) {
+ // cacheData.put("schedule", data, 1000 * 60 * 60 * 24 * 7);
+ if (redis) {
+ await redis.set(
+ "schedule",
+ JSON.stringify(data),
+ "EX",
+ 60 * 60 * 24 * 7
+ );
+ }
+ res.status(200).json(data);
+ } else {
+ res.status(404).json({ message: "Schedule not found" });
+ }
+ }
+ } catch (error) {
+ res.status(500).json({ error });
+ }
+}
diff --git a/pages/api/v2/info/[id].js b/pages/api/v2/info/[id].js
new file mode 100644
index 0000000..41daa6e
--- /dev/null
+++ b/pages/api/v2/info/[id].js
@@ -0,0 +1,39 @@
+import axios from "axios";
+import redis from "../../../../lib/redis";
+
+const API_KEY = process.env.API_KEY;
+
+export async function fetchInfo(id) {
+ try {
+ const { data } = await axios.get(
+ `https://api.anify.tv/info/${id}?apikey=${API_KEY}`
+ );
+ return data;
+ } catch (error) {
+ console.error("Error fetching data:", error);
+ return null;
+ }
+}
+
+export default async function handler(req, res) {
+ const id = req.query.id;
+ let cached;
+ if (redis) {
+ cached = await redis.get(id);
+ }
+ if (cached) {
+ // console.log("Using cached data");
+ return res.status(200).json(JSON.parse(cached));
+ } else {
+ const data = await fetchInfo(id);
+ if (data) {
+ // console.log("Setting cache");
+ if (redis) {
+ await redis.set(id, JSON.stringify(data), "EX", 60 * 10);
+ }
+ return res.status(200).json(data);
+ } else {
+ return res.status(404).json({ message: "Schedule not found" });
+ }
+ }
+}
diff --git a/pages/api/v2/source/index.js b/pages/api/v2/source/index.js
new file mode 100644
index 0000000..51ac5ec
--- /dev/null
+++ b/pages/api/v2/source/index.js
@@ -0,0 +1,47 @@
+import axios from "axios";
+
+const CONSUMET_URI = process.env.API_URI;
+const API_KEY = process.env.API_KEY;
+
+async function consumetSource(id) {
+ try {
+ const { data } = await axios.get(
+ `${CONSUMET_URI}/meta/anilist/watch/${id}`
+ );
+ return data;
+ } catch (error) {
+ console.error(error);
+ return null;
+ }
+}
+
+async function anifySource(providerId, watchId, episode, id, sub) {
+ try {
+ const { data } = await axios.get(
+ `https://api.anify.tv/sources?providerId=${providerId}&watchId=${encodeURIComponent(
+ watchId
+ )}&episode=${episode}&id=${id}&subType=${sub}&apikey=${API_KEY}`
+ );
+ return data;
+ } catch (error) {
+ return null;
+ }
+}
+
+export default async function handler(req, res) {
+ if (req.method !== "POST") {
+ return res.status(405).json({ message: "Method not allowed" });
+ }
+
+ const { source, providerId, watchId, episode, id, sub = "sub" } = req.body;
+
+ if (source === "anify") {
+ const data = await anifySource(providerId, watchId, episode, id, sub);
+ return res.status(200).json(data);
+ }
+
+ if (source === "consumet") {
+ const data = await consumetSource(watchId);
+ return res.status(200).json(data);
+ }
+}