redirecter for ao3 that adds opengraph metadata
1import DOM from "fauxdom"
2import { readFile } from 'node:fs/promises'
3import querystring from 'node:querystring'
4import { join } from 'node:path'
5import themes from '@/lib/themes.js'
6import baseFonts from '@/lib/baseFonts.js'
7import titleFonts from '@/lib/titleFonts.js'
8import { checkItem, checkToggle } from '@/lib/propUtils.js'
9
10const getWork = async (workId, archive = null) => {
11 const domainParam = (archive && archive !== process.env.ARCHIVE) ? `?archive=${archive}` : ''
12 const data = await fetch(`http://${process.env.DOMAIN}/api/works/${workId}${domainParam}`)
13 const work = await data.json()
14 return work
15}
16
17const getHighestRating = async (works, archive = null) => {
18 const ratings = await Promise.all(works.map(async (w) => {
19 const work = await getWork(w.id, archive)
20 return work.rating
21 }))
22 if (ratings.includes("Not Rated")) {
23 return "NR"
24 } else if (ratings.includes("Explicit")) {
25 return "E"
26 } else if (ratings.includes("Mature")) {
27 return "M"
28 } else if (ratings.includes("Teen")) {
29 return "T"
30 }
31 return "G"
32}
33
34const getHighestWarning = async (works, archive = null) => {
35 const warnings = await Promise.all(works.map(async (w) => {
36 const work = await getWork(w.id, archive)
37 return work.tags.warnings
38 }))
39 const warningsUnique = warnings.reduce((a, b) => { return a.concat(b) }).filter((w, i) => { return i === warnings.indexOf(w) })
40 if (warningsUnique.length === 1 && warningsUnique[0] === "Creator Chose Not To Use Archive Warnings") {
41 return "CNTW"
42 } else if (warningsUnique.length === 1 && warningsUnique[0] === "No Archive Warnings Apply") {
43 return "NW"
44 }
45 return "W"
46}
47
48const getCategory = async (works, archive = null) => {
49 const categories = await Promise.all(works.map(async (w) => {
50 const work = await getWork(w.id, archive)
51 return work.category
52 }))
53 const categoriesJoined = categories.reduce((a, b) => { return a.concat(b) })
54 const categoriesUnique = categoriesJoined.filter((w, i) => { return i === categoriesJoined.indexOf(w) })
55
56 if (categoriesUnique.length === 1) {
57 if (categoriesUnique[0] === "F/F") return "F"
58 if (categoriesUnique[0] === "M/M") return "M"
59 if (categoriesUnique[0] === "F/M") return "FM"
60 if (categoriesUnique[0] === "Gen") return "G"
61 if (categoriesUnique[0] === "Multi") return "MX"
62 if (categoriesUnique[0] === "Other") return "O"
63 }
64 return "MX"
65}
66
67const sanitizeProps = (props) => {
68 let propsParsed = {}
69 Object.keys(props).forEach((pr) => {
70 if (props[pr] === 'true') {
71 propsParsed[pr] = true
72 return
73 } else if (props[pr] === 'false') {
74 propsParsed[pr] = false
75 return
76 } else if (typeof parseInt(props[pr]) === 'Number') {
77 propsParsed[pr] = parseInt(props[pr])
78 return
79 }
80 propsParsed[pr] = props[pr]
81 })
82 return propsParsed
83}
84
85export default async function sanitizeData ({ type, data, props }) {
86 const archive = props && checkItem('archive', props) ? props.get('archive') : process.env.ARCHIVE
87 const baseFont = checkItem('baseFont', props) ? props.get('baseFont') : process.env.DEFAULT_BASE_FONT
88 const baseFontData = baseFonts[baseFont]
89 const titleFont = checkItem('titleFont', props) ? props.get('titleFont') : process.env.DEFAULT_TITLE_FONT
90 const titleFontData = titleFonts[titleFont]
91 const archClean = checkItem('archive', props) ? props.get('archive').replace("https://", '').replace('/', '') : null
92 const theme = checkItem('theme', props) ? props.get('theme') : (checkItem('archive', props) && !["ao3.org", "archiveofourown.org", "archive.transformativeworks.org"].includes(archClean) && Object.values(siteMap).includes(archClean) ? Object.keys(siteMap)[Object.values(siteMap).indexOf(archClean)] : process.env.DEFAULT_THEME)
93 const themeData = themes[theme]
94 const parentWork = type === 'work' && data.chapterInfo ? await getWork(data.id, archive) : null
95 const bfs = await Promise.all(baseFontData.defs.map(async (bf) => {
96 return {
97 name: baseFontData.displayName,
98 data: await readFile(
99 join(process.cwd(), bf.path)
100 ),
101 style: bf.style,
102 weight: bf.weight
103 }
104 })).then(x => x)
105 const tfs = await Promise.all(titleFontData.defs.map(async (tf) => {
106 return {
107 name: titleFontData.displayName,
108 data: await readFile(
109 join(process.cwd(), tf.path)
110 ),
111 style: tf.style,
112 weight: tf.weight
113 }
114 })).then(x => x)
115 const authorsFormatted = data.authors
116 ? data.authors.map((a) => {
117 if (a.anonymous) return "Anonymous"
118 if (a.pseud !== a.username) return `${a.pseud} (${a.username})`
119 return a.username
120 })
121 : []
122 const rating = type === 'work' ? data.rating : await getHighestRating(data.works, archive)
123 const warning = type === 'work' ? await getHighestWarning([data], archive) : await getHighestWarning(data.works, archive)
124 const category = type === 'work' ? await getCategory([data], archive) : await getCategory(data.works, archive)
125 const authorString = (authorsFormatted.length > 1
126 ? authorsFormatted.slice(0, -1).join(", ") + " & " +
127 authorsFormatted.slice(-1)[0]
128 : authorsFormatted[0])
129 const summaryContent = type === 'work'
130 ? (props.get('summaryType') === 'chapter' && data.chapterInfo && data.chapterInfo.summary ? data.chapterInfo.summary : (props.get('summaryType') === 'custom' && props.get('customSummary') !== '' ? props.get('customSummary') : (data.summary ? data.summary : (parentWork ? parentWork.summary : ''))))
131 : (props.get('summaryType') === 'custom' && props.get('customSummary') !== '' ? props.get('customSummary') : data.notes)
132 const formatter = new Intl.NumberFormat('en-US')
133 const words = formatter.format(data.words)
134 const summaryDOM = new DOM(summaryContent, {decodeEntities: true})
135 const summaryFormatted = summaryDOM.innerHTML.replace(/\<br(?: \/)?\>/g, "\n").replace(
136 /(<([^>]+)>)/ig,
137 "",
138 ).split("\n")
139 const titleString = type === 'work' ? data.title : data.name
140 const chapterString = data.chapterInfo ? (data.chapterInfo.name
141 ? data.chapterInfo.name
142 : "Chapter " + data.chapterInfo.index) : null
143 const chapterCountString = type === 'work' ? (data.chapters
144 ? data.chapters.published+'/'+(
145 data.chapters.total
146 ? data.chapters.total
147 : '?'
148 )
149 : '') : null
150 const fandomString = type === 'work' ? (
151 data.fandoms.length > 1
152 ? (
153 data.fandoms.length <= 2
154 ? data.fandoms.slice(0, -1).join(", ")+" & "+data.fandoms.slice(-1)
155 : data.fandoms.join(", ")+" (+"+(data.fandoms.length - 2)+")"
156 )
157 : data.fandoms[0]
158 ) : (
159 ''
160 )
161 const charTags = type === 'work' ? data.tags.characters : data.works.map(w => w.tags.characters).reduce((a, b) => { return b ? (a ? a.concat(b) : []) : (a ? a : []) }).filter((w, i) => { return i === data.works.indexOf(w) })
162 const relTags = type === 'work' ? data.tags.relationships : data.works.map(w => w.tags.relationships).reduce((a, b) => { return b ? (a ? a.concat(b) : []) : (a ? a : []) }).filter((w, i) => { return i === data.works.indexOf(w) })
163 const freeTags = type === 'work' ? data.tags.additional : data.works.map(w => w.tags.additional).reduce((a, b) => { return b ? (a ? a.concat(b) : []) : (a ? a : []) }).filter((w, i) => { return i === data.works.indexOf(w) })
164 const warnings = type === 'work' ? data.tags.warnings : data.works.map(w => w.tags.warnings).reduce((a, b) => { return b ? (a ? a.concat(b) : []) : (a ? a : []) }).filter((w, i) => { return i === data.works.indexOf(w) })
165
166 const ret = {
167 topLine: fandomString,
168 titleLine: titleString,
169 authorLine: authorString,
170 chapterLine: chapterString,
171 chapterCount: chapterCountString,
172 words: words,
173 rating: rating,
174 warning: warning,
175 category: category,
176 summary: summaryFormatted,
177 theme: themeData,
178 charTags: charTags,
179 relTags: relTags,
180 freeTags: freeTags,
181 postedAt: type === 'work' ? data.publishedAt : data.startedAt,
182 updatedAt: data.updatedAt,
183 baseFont: baseFont,
184 titleFont: titleFont,
185 props: props,
186 opts: {
187 fonts: bfs.concat(tfs)
188 }
189 }
190 return ret;
191}