aboutsummaryrefslogtreecommitdiff
path: root/pages/api/v2/etc
diff options
context:
space:
mode:
Diffstat (limited to 'pages/api/v2/etc')
-rw-r--r--pages/api/v2/etc/recent/[page].js26
-rw-r--r--pages/api/v2/etc/schedule/index.js71
2 files changed, 97 insertions, 0 deletions
diff --git a/pages/api/v2/etc/recent/[page].js b/pages/api/v2/etc/recent/[page].js
new file mode 100644
index 0000000..19495c1
--- /dev/null
+++ b/pages/api/v2/etc/recent/[page].js
@@ -0,0 +1,26 @@
+const API_URL = process.env.API_URI;
+
+export default async function handler(req, res) {
+ try {
+ const page = req.query.page || 1;
+
+ var hasNextPage = true;
+ var datas = [];
+
+ async function fetchData(page) {
+ const data = await fetch(
+ `${API_URL}/meta/anilist/recent-episodes?page=${page}&perPage=30&provider=gogoanime`
+ ).then((res) => res.json());
+
+ const filtered = data?.results?.filter((i) => i.type !== "ONA");
+ hasNextPage = data?.hasNextPage;
+ datas = filtered;
+ }
+
+ await fetchData(page);
+
+ return res.status(200).json({ hasNextPage, results: datas });
+ } catch (error) {
+ res.status(500).json({ error });
+ }
+}
diff --git a/pages/api/v2/etc/schedule/index.js b/pages/api/v2/etc/schedule/index.js
new file mode 100644
index 0000000..7a13fff
--- /dev/null
+++ b/pages/api/v2/etc/schedule/index.js
@@ -0,0 +1,71 @@
+import axios from "axios";
+import cron from "cron";
+import redis from "../../../../../lib/redis";
+
+const API_KEY = process.env.API_KEY;
+
+// Function to fetch new data
+async function fetchData() {
+ try {
+ const { data } = await axios.get(
+ `https://api.anify.tv/schedule?apikey=${API_KEY}`
+ );
+ return data;
+ } catch (error) {
+ console.error("Error fetching data:", error);
+ return null;
+ }
+}
+
+// Function to refresh the cache with new data
+async function refreshCache() {
+ const newData = await fetchData();
+ if (newData) {
+ if (redis) {
+ await redis.set(
+ "schedule",
+ JSON.stringify(newData),
+ "EX",
+ 60 * 60 * 24 * 7
+ );
+ }
+ console.log("Cache refreshed successfully.");
+ }
+}
+
+// Schedule cache refresh every Monday at 00:00 AM (local time)
+const job = new cron.CronJob("0 0 * * 1", () => {
+ refreshCache();
+});
+job.start();
+
+export default async function handler(req, res) {
+ try {
+ let cached;
+ if (redis) {
+ cached = await redis.get("schedule");
+ }
+ if (cached) {
+ return res.status(200).json(JSON.parse(cached));
+ } else {
+ const data = await fetchData();
+
+ if (data) {
+ // cacheData.put("schedule", data, 1000 * 60 * 60 * 24 * 7);
+ if (redis) {
+ await redis.set(
+ "schedule",
+ JSON.stringify(data),
+ "EX",
+ 60 * 60 * 24 * 7
+ );
+ }
+ res.status(200).json(data);
+ } else {
+ res.status(404).json({ message: "Schedule not found" });
+ }
+ }
+ } catch (error) {
+ res.status(500).json({ error });
+ }
+}