Yahya Alnwsany commited on
Commit
83bdf43
·
1 Parent(s): 232f377

first commit

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ src/database/webui.db filter=lfs diff=lfs merge=lfs -text
Dockerfile ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # SPDX-FileCopyrightText: Hadad <[email protected]>
3
+ # SPDX-License-Identifier: MIT
4
+ #
5
+
6
+ # Use a specific container image for the application
7
+ FROM hadadrjt/ai:latest
8
+
9
+ # Set the main working directory inside the container
10
+ WORKDIR /app/backend
11
+
12
+ # Copy the database file into the container
13
+ # This database is a placeholder or dummy,
14
+ # and the core configuration is located in the Environment
15
+ # and Secret Environment settings of Hugging Face Spaces.
16
+ COPY --chown=$UID:$GID src/database/webui.db /app/backend/data/
17
+
18
+ # Set the database permission
19
+ RUN chmod 777 /app/backend/data/webui.db
20
+
21
+ # Search Engine Optimization (SEO)
22
+ # Robots Exclusion Protocol
23
+ RUN search='<meta name="robots" content="noindex,nofollow"' && \
24
+ replace='<meta name="robots" content="index,follow"' && \
25
+ find /app -type f -name '*.html' -exec grep -l "$search" {} \; | \
26
+ while IFS= read -r file; do \
27
+ echo "Processing: $file" && \
28
+ if sed -i "s|$search|$replace|g" "$file"; then \
29
+ echo "Success: $file updated"; \
30
+ else \
31
+ echo "Error: failed to update $file"; \
32
+ exit 1; \
33
+ fi; \
34
+ done
35
+ # https://umint-ai.hf.space/robots.txt
36
+ COPY --chown=$UID:$GID src/crawlers/robots.txt /app/build/
37
+ # Sitemaps
38
+ # https://umint-ai.hf.space/sitemap.xml
39
+ COPY --chown=$UID:$GID src/crawlers/sitemap.xml /app/build/
40
+ # Google Search Console Tools
41
+ # https://umint-ai.hf.space/google15aba15fe250d693.html
42
+ COPY --chown=$UID:$GID src/webmasters/google.html /app/build/google15aba15fe250d693.html
43
+ # Bing Webmaster Tools
44
+ # https://umint-ai.hf.space/BingSiteAuth.xml
45
+ COPY --chown=$UID:$GID src/webmasters/bing.xml /app/build/BingSiteAuth.xml
46
+
47
+ # Open the port so the application can be accessed
48
+ EXPOSE 8000
49
+
50
+ # Start the application using the startup script
51
+ CMD ["bash", "start.sh"]
LICENSE ADDED
@@ -0,0 +1 @@
 
 
1
+ MIT License
src/crawlers/robots.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ User-agent: *
2
+ Allow: /
3
+
4
+ Sitemap: https://umint-ai.hf.space/sitemap.xml
src/crawlers/sitemap.xml ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <!--
3
+ SPDX-FileCopyrightText: Hadad <[email protected]>
4
+ SPDX-License-Identifier: MIT
5
+ -->
6
+ <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
7
+ <url>
8
+ <loc>https://umint-ai.hf.space</loc>
9
+ <changefreq>daily</changefreq>
10
+ <priority>1.0</priority>
11
+ </url>
12
+ </urlset>
src/database/webui.db ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6aaff0e04e621ba56703c8f641d4cbb6f0e6ad6d0c48e980827d3e07f1b074b
3
+ size 9211904
src/webmasters/bing.xml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <users>
3
+ <user>6F0E69E47393F89FAEEEE8B9212EBAE4</user>
4
+ </users>
src/webmasters/google.html ADDED
@@ -0,0 +1 @@
 
 
1
+ google-site-verification: google15aba15fe250d693.html