forked from lolcat/4get
Compare commits
2 Commits
instances-
...
master
Author | SHA1 | Date |
---|---|---|
lolcat | 13dfa9240c | |
Fijxu | 0a53c3605a |
|
@ -1,8 +1,8 @@
|
|||
FROM alpine:latest
|
||||
FROM alpine:3.21
|
||||
WORKDIR /var/www/html/4get
|
||||
|
||||
RUN apk update && apk upgrade
|
||||
RUN apk add php apache2-ssl php83-fileinfo php83-openssl php83-iconv php83-common php83-dom php83-sodium php83-curl curl php83-pecl-apcu php83-apache2 imagemagick php83-pecl-imagick php-mbstring imagemagick-webp imagemagick-jpeg
|
||||
RUN apk add php apache2-ssl php84-fileinfo php84-openssl php84-iconv php84-common php84-dom php84-sodium php84-curl curl php84-pecl-apcu php84-apache2 imagemagick php84-pecl-imagick php84-mbstring imagemagick-webp imagemagick-jpeg
|
||||
|
||||
COPY . .
|
||||
|
||||
|
@ -14,4 +14,4 @@ EXPOSE 443
|
|||
ENV FOURGET_PROTO=http
|
||||
|
||||
ENTRYPOINT ["./docker/docker-entrypoint.sh"]
|
||||
CMD ["start"]
|
||||
CMD ["start"]
|
|
@ -90,15 +90,31 @@ class config{
|
|||
// eachother your serber should appear everywhere.
|
||||
const INSTANCES = [
|
||||
"https://4get.ca",
|
||||
"https://4get.nadeko.net",
|
||||
"https://4get.zzls.xyz",
|
||||
"https://4getus.zzls.xyz",
|
||||
"https://4get.silly.computer",
|
||||
"https://4get.konakona.moe",
|
||||
"https://4get.lvkaszus.pl",
|
||||
"https://4g.ggtyler.dev",
|
||||
"https://4get.perennialte.ch",
|
||||
"https://4get.sijh.net",
|
||||
"https://4get.hbubli.cc",
|
||||
"https://4get.plunked.party",
|
||||
"https://4get.seitan-ayoub.lol",
|
||||
"https://4get.etenie.pl",
|
||||
"https://4get.lunar.icu",
|
||||
"https://4get.dcs0.hu",
|
||||
"https://4get.kizuki.lol",
|
||||
"https://4get.psily.garden",
|
||||
"https://search.milivojevic.in.rs",
|
||||
"https://4get.snine.nl",
|
||||
"https://4get.datura.network",
|
||||
"https://4get.neco.lol",
|
||||
"https://4get.lol",
|
||||
"https://4get.ch",
|
||||
"https://4get.edmateo.site",
|
||||
"https://4get.sudovanilla.org",
|
||||
"https://search.mint.lgbt"
|
||||
];
|
||||
|
||||
// Default user agent to use for scraper requests. Sometimes ignored to get specific webpages
|
||||
|
|
Loading…
Reference in New Issue