Compare commits

...

112 commits

Author SHA1 Message Date
sdomi
ec6a0d81a9 notORM: fix spurious missing backslashes 2025-02-20 15:45:44 +01:00
sdomi
9c403fbc3b notORM: data_add now supports auto-increment IDs 2025-02-19 23:05:57 +01:00
sdomi
affe9e4fbe notORM: fix a few leaky variables 2025-02-19 22:42:42 +01:00
sdomi
febb4087e4 misc: sync up immediate cookie changes with cookies array 2025-02-14 17:41:26 +01:00
sdomi
993941680a notORM: hotfix the off-by-one bug until I can find a better solution 2025-02-13 23:21:02 +01:00
sdomi
4b59b3d257 template: add -uri-num tags for automatic URL manipulation 2025-02-12 04:47:37 +01:00
sdomi
f7627c7af6 template: new array copy solution (... not happy about it, but what can I do) 2025-01-14 16:43:42 +00:00
sdomi
23e85fc7d0 main: add a $run_once variable, to discern between startup and normal operation 2025-01-14 13:40:57 +00:00
sdomi
55814d4427 server: generic r[url_clean] for just getting the current URL w/o params 2025-01-08 21:32:34 +01:00
sdomi
d39956b815 readme: update 2025-01-06 00:33:20 +01:00
Merlin Scholz
1b085fbbdb
Fix accidental session_cookie loss caused by missing IFS unset 2025-01-05 19:37:18 +01:00
sdomi
eaabcc0da2 notORM: fix matching } 2025-01-01 01:37:00 +01:00
sdomi
a66a74208e account: add a config toggle for register behavior 2024-12-26 12:54:28 +01:00
sdomi
284e1c0e70 relicense to BSD 3-Clause 2024-12-23 19:36:03 +01:00
sdomi
53dbaadc6c docker: rewrite 2024-12-23 19:35:02 +01:00
sdomi
b5b44aa4ca dockerfile: revert f938165518 2024-12-23 19:14:23 +01:00
sdomi
fbcdd76b14 account: propagate user from user_reset_password 2024-12-22 05:13:45 +01:00
sdomi
bd445181ee mail: propagate errors on mailsend 2024-12-16 21:19:47 +01:00
sdomi
cc1619e797 notORM: fix an off-by-one affecting ops on the second-last column 2024-12-16 20:06:14 +01:00
sdomi
9d62173cfe misc: fix striping garbage from url_decode 2024-12-16 19:34:42 +01:00
sdomi
b251e2736c notORM: fix data_replace_value due to sed quirk 2024-12-16 19:34:23 +01:00
sdomi
a2413d7062 notORM: remove debug echo 2024-12-16 17:57:56 +01:00
sdomi
5c099c1472 sec-fixes: add information about the notORM bug 2024-12-15 16:28:24 +01:00
sdomi
a00b1b00ee notORM: secfix for sed inconsistently parsing escaped characters 2024-12-15 15:57:44 +01:00
sdomi
e64bdbb0d9 notORM: temporarily disable new parameter parsing on data_replace 2024-12-10 03:19:50 +01:00
sdomi
b0d76ecc9a notORM: split expr generation into _data_gen_expr 2024-12-06 05:19:45 +01:00
sdomi
4ca9c99b14 tests: add a few notORM tests 2024-12-06 05:19:07 +01:00
sdomi
62e7a9edd9 notORM: implement new syntax for data_yeet + more fixes 2024-12-06 04:04:54 +01:00
sdomi
6d91d057e8 notORM: implement new syntax for data_iter + some fixes 2024-12-06 03:00:25 +01:00
sdomi
a2daafe89a notORM: split argv parsing code into an alias 2024-12-06 01:37:15 +01:00
sdomi
45dc428576 notORM: impl searching for more than one constraint in data_get 2024-12-05 23:50:25 +01:00
sdomi
1c144612de server: normalize x-forwarded-for (somewhat) 2024-12-03 23:48:05 +01:00
sdomi
60b40019aa docs: add info about the router 2024-12-03 20:29:10 +01:00
sdomi
5ee00c6ead server: fix directory traversal
In select cases, if the attacker asked for an URL not starting with a slash (/),
a directory traversal bug could have been triggered. The attack is limited to
directories within `${cfg[namespace]}` (default: `app`) which begin with
`${cfg[root]}` (default: `webroot`).

This means that an adversary could traverse to `app/webroot*`. We never
encouraged / suggested keeping multiple webroots in one namespace, thus it's
doubtful whether any HTTP.sh deployment met the criteria.
2024-12-01 22:52:11 +01:00
sdomi
403ef2b4ee server: normalize a few other things 2024-12-01 22:51:56 +01:00
sdomi
c943b7897e template: prevent expansion on keys (which shouldn't happen anyways) 2024-12-01 22:39:30 +01:00
sdomi
3d8dd9879e server: url_decode all GET params, (we don't care about binary data there) 2024-10-22 16:33:02 +01:00
sdomi
126de1e396 main: fix edge case with cloned app repo 2024-10-11 20:41:37 +02:00
sdomi
da54143a3f server: remove PHP/Python execution handlers, to be replaced with CGI 2024-10-07 18:09:17 +02:00
sdomi
951517b30d *: better versioning, split off various resources into .resources/ 2024-10-07 17:58:15 +02:00
sdomi
56d2af2cd8 tests: more header parsing tests 2024-10-05 04:36:03 +02:00
sdomi
3c8f848a9a proxy: remove
While a proxy function is useful, this implementation was unsalvageable.
2024-10-05 03:17:39 +02:00
famfo
510c372f1d server: add support for getting IP address behind a proxy 2024-10-05 03:05:16 +02:00
famfo
2cc067fc93 server: simplify header parsing code 2024-10-05 03:05:09 +02:00
famfo
a1323dc07e Test: fix bash path 2024-10-05 03:04:11 +02:00
TheresNoTime
9adbf34fce Add xxd as a required dependency 2024-09-26 23:18:24 +02:00
famfo
b4ea5954ec
mime: fix ico/favicon.ico mimetype 2024-09-26 14:04:19 +02:00
sdomi
2fd62dbbba server: fixup non-urlencoded post_data parsing 2024-09-06 00:50:47 +02:00
sdomi
fb8ae0eabc notORM: fix erroneous row return on no match 2024-09-03 22:56:34 +02:00
sdomi
cd0fe42879 cookie: add Path attribute 2024-08-18 00:05:58 +02:00
sdomi
d8a475e11b notORM, account: revert previous fix and employ a workaround for a bash bug 2024-08-17 23:10:10 +02:00
sdomi
11828198ce account: fix bug with empty fields getting omitted 2024-08-17 22:36:47 +02:00
sdomi
12011e5991 notORM: fix not enough delims 2024-08-17 22:30:07 +02:00
sdomi
4b9d4a5fda notORM: fix spurious $delim 2024-08-17 21:57:17 +02:00
sdomi
3db39c4948 notORM: fix typo 2024-08-17 21:35:57 +02:00
sdomi
23de1448e0 worker: setup the runtime 2024-08-17 21:28:23 +02:00
sdomi
11d3c0a899 notORM: replace some tr calls with bashisms 2024-08-17 21:27:32 +02:00
sdomi
1059be1618 server: fixup the parameter and cookie parsing 2024-08-17 05:14:53 +02:00
sdomi
4728b1651f server: rewrite slow sed calls into fast(er) bashisms 2024-08-17 04:51:17 +02:00
sdomi
5fc46f0f53 misc: improve url_decode 2024-08-17 04:14:06 +02:00
sdomi
6a22be0b4d server: fix parameter parsing on very long chains (in GET) 2024-08-17 02:55:54 +02:00
sdomi
dabd8bdb0c main: interactive shell mode 2024-08-17 02:33:49 +02:00
sdomi
69751adc52 server: fix parameter parsing on very long chains 2024-08-15 23:44:49 +02:00
sdomi
691ff46e75 response: fixup the HTTP response code if it got emptied by downstream 2024-08-15 19:50:15 +02:00
sdomi
fb247a6092 server: try to drop invalid connections 2024-08-15 19:45:56 +02:00
sdomi
62fb5556f4 notORM: iter now returns 255 on loop break 2024-08-11 21:58:34 +02:00
sdomi
ef933dd603 account: more checks 2024-08-11 18:39:54 +02:00
sdomi
c595acf6b9 router: more bashisms, less subshells 2024-08-11 03:02:14 +02:00
sdomi
8c29f4ad4e notORM: change iter failure state from 1 to 255 to prevent spontaneous fatals 2024-08-11 02:17:11 +02:00
sdomi
9a8f1dc2e4 response: migrate from printf to 'echo' and 'echo -e' (to prevent % injections) 2024-08-11 01:07:19 +02:00
sdomi
0f6233bbd3 core: better debuggier output, somewhat 2024-08-11 00:39:17 +02:00
sdomi
30c494c8e9 notORM: break out of data_iter after first failure 2024-08-10 23:24:04 +02:00
sdomi
b38860ec9c tests: alignn tst.sh with the docs 2024-08-07 00:02:31 +02:00
sdomi
5425a8ff14 misc: fix url_encode inconsistencies 2024-08-07 00:02:03 +02:00
sdomi
d6f46b949d tests: add match_not; add html/url encode tests 2024-08-06 23:42:44 +02:00
sdomi
358a8737ab tests: basic template tests 2024-08-06 22:50:57 +02:00
sdomi
46530b9f17 docs: quick start 2024-08-05 19:17:45 +02:00
sdomi
1c48d95d41 docs: wrote some info about tst.sh 2024-08-05 18:13:03 +02:00
sdomi
332c256d6c tests: crude testing framework 2024-08-04 21:37:51 +02:00
sdomi
5b8d492898 account: remember me flag 2024-08-04 01:59:21 +02:00
sdomi
93d02b4295 account: more middleware functions 2024-08-04 01:33:10 +02:00
sdomi
f16005fa0b notORM: fix the repeat function 2024-08-04 00:20:25 +02:00
sdomi
5ef931ca9d account: finally, proper sessions 2024-08-04 00:19:47 +02:00
sdomi
3e50cc8737 account: optional extra fields 2024-08-03 20:13:27 +02:00
famfo
343da427a1 Modify get_mime to allow text/html 2024-08-02 21:16:33 +02:00
famfo
d93323597d Use file diretcly to set mime type 2024-08-02 21:16:33 +02:00
famfo
317c827a1d Set content type for listing 2024-08-02 21:16:33 +02:00
famfo
9adc827018 Set content type for 403 and 404 pages 2024-08-02 21:16:33 +02:00
famfo
cb2acacc32 Set content type when running in buffered mode 2024-08-02 21:16:33 +02:00
Linus Groh
640baa8e7b docker: bump to alpine:3.20 2024-07-31 20:03:04 +02:00
sdomi
eb80d42711 server: fix slight parameter decode mangling 2024-07-30 16:48:30 +02:00
sdomi
0dbd85f9ec server: better handle custom statuses, fix some string escapes 2024-07-26 23:11:49 +02:00
sdomi
7a6e6c2f38 server: fix "typo" 2024-07-26 20:56:23 +02:00
sdomi
b0f23c01e5 notORM: fix typo 2024-07-26 20:52:52 +02:00
sdomi
ee1a540120 server: remove some of the parsing crimes 2024-07-26 20:52:42 +02:00
sdomi
2c1dfa20f1 meta: version bump, since i'm breaking compat anyways 2024-07-26 03:24:14 +02:00
sdomi
bb8526a752 account: migrate to notORM for data storage 2024-07-26 03:20:50 +02:00
sdomi
1df5fb17ca notORM: new generic data I/O interface, currently backed by CSV-esque files 2024-07-26 03:20:34 +02:00
sdomi
efbeca0498 template: normalize IFS 2024-07-24 03:01:43 +02:00
sdomi
00f9432b29 misc: fix decoding spaces in url_decode (oops?) 2024-07-19 23:11:27 +02:00
sdomi
ac89f028d0 template: fix misrenders due to unsorted key array 2024-07-19 22:24:51 +02:00
sdomi
ccc1ce3273 account: typo fix + minor flow changes 2024-07-19 18:03:27 +02:00
sdomi
1059fcf177 template: fix edge case with newline splitting sed arguments 2024-07-19 17:36:41 +02:00
sdomi
b28e1d9fcd account: fix poor checking that could lead to privilege escalation 2024-07-19 17:23:10 +02:00
famfo
61fea4b849
Add option to show call trace, basic cli docs 2024-07-19 02:25:33 +02:00
sdomi
f5eebc109d account: added argon2id as a preferred (default) password hash 2024-07-19 00:58:13 +02:00
sdomi
4e6c5c0ba3 template: tpl includes with {{#PATH}} 2024-07-17 22:09:01 +02:00
Linus Groh
a65b600952 Don't run ncat within background loop in the background too 2024-06-07 13:39:44 +02:00
Linus Groh
10342035a4 Match headers on beginning of line 2024-05-19 19:52:10 +01:00
Linus Groh
c459a405b2 Ignore query when parsing URL params 2024-05-19 16:21:40 +01:00
sdomi
231b52f171 * fix router parameter clobbering
if a route contained a static string with the same name as one
of the named params, said string would overwrite the payload from
the previous named param. this commit adds a check for `:` in the
template to prevent this
2024-04-21 21:54:06 +02:00
sdomi
a94d7b7c24 * fixes of some ugly sed hacks from 4 years ago 2024-04-21 19:27:23 +02:00
41 changed files with 2002 additions and 595 deletions

5
.resources/README.md Normal file
View file

@ -0,0 +1,5 @@
# .resources
this directory contains internal "template" files, copied by HTTP.sh during first run/init.
editing them directly here won't do much :p

10
.resources/config.sh Normal file
View file

@ -0,0 +1,10 @@
## app config
## your application-specific config goes here!
# worker_add example 5
cfg[enable_multipart]=false # by default, uploading files is disabled
if [[ "$run_once" ]]; then
# the following will only run once at startup, not with every request
:
fi

View file

@ -0,0 +1,15 @@
#!/usr/bin/env bash
source templates/head.sh
echo "<h1>Hello from HTTP.sh!</h1><br>To get started with your app, check out $(pwd)/${cfg[namespace]}/
<ul><li>$(pwd)/${cfg[namespace]}/${cfg[root]} - your (public) files go here</li>
<li>$(pwd)/${cfg[namespace]}/workers/ - worker directory, with an example one ready to go</li>
<li>$(pwd)/${cfg[namespace]}/views/ - individual views can be stored there, to be later referenced by routes.sh</li>
<li>$(pwd)/${cfg[namespace]}/templates/ - template files live over there</li>
<li>$(pwd)/${cfg[namespace]}/config.sh - config for everything specific to your app AND workers</li>
<li>$(pwd)/${cfg[namespace]}/routes.sh - config for the HTTP.sh router</li></ul>
Fun things outside of the app directory:
<ul><li>$(pwd)/config/master.sh - master server config</li>
<li>$(pwd)/config/<hostname> - config loaded if a request is made to a specific hostname</li>
<li>$(pwd)/storage/ - directory for storing all and any data your app may produce</li>
<li>$(pwd)/secret/ - user accounts and other secret tokens live here</li>
<li>$(pwd)/src/ - HTTP.sh src, feel free to poke around ;P</li></ul>"

View file

@ -0,0 +1,2 @@
#!/usr/bin/env bash
date

View file

@ -1,6 +1,6 @@
declare -A cfg
cfg[ip]=127.0.0.1 # IP address to bind to - use 0.0.0.0 to bind to all
cfg[ip]=[::] # IP address to bind to - use [::] to bind to all
cfg[http]=true # enables/disables listening on HTTP
cfg[port]=1337 # HTTP port
@ -13,7 +13,7 @@ cfg[index]='index.shs'
cfg[autoindex]=true
cfg[auth_required]=false
cfg[auth_realm]="Laura is cute <3"
cfg[auth_realm]="asdf"
cfg[ssl]=false # enables/disables listening on HTTPS
cfg[ssl_port]=8443
@ -21,22 +21,25 @@ cfg[ssl_cert]=''
cfg[ssl_key]=''
cfg[extension]='shs'
cfg[extra_headers]='server: HTTP.sh/0.95 (devel)'
#cfg[encoding]='UTF-8' # UTF-8 by default, used by iconv
cfg[extra_headers]="server: HTTP.sh/$HTTPSH_VERSION (devel)"
cfg[title]='HTTP.sh 0.95'
cfg[php_enabled]=false # enable PHP script evalutaion (requires PHP)
cfg[python_enabled]=false # enable Python script evalutaion (requires Python)
cfg[title]="HTTP.sh $HTTPSH_VERSION"
cfg[log]='log' # filename
# proxy functionality is very WiP
cfg[proxy]=false
cfg[proxy_url]='http://example.com/'
# mail handler config
cfg[mail]=""
cfg[mail_server]=""
cfg[mail_password]=""
cfg[mail_ssl]=true
cfg[mail_ignore_bad_cert]=false
# unset for legacy sha256sum hashing (not recommended)
cfg[hash]="argon2id"
cfg[cookie_path]="/"
# should registering automatically login the user?
# useful for flows involving a confirmation e-mail
cfg[register_should_login]=true

14
.resources/routes.sh Normal file
View file

@ -0,0 +1,14 @@
## routes - application-specific routes
##
## HTTP.sh supports both serving files using a directory structure (webroot),
## and using routes. The latter may come in handy if you want to create nicer
## paths, e.g.
##
## (webroot) https://example.com/profile.shs?name=asdf
## ... may become ...
## (routes) https://example.com/profile/asdf
##
## To set up routes, define rules in this file (see below for examples)
# router "/test" "app/views/test.shs"
# router "/profile/:user" "app/views/user.shs"

View file

@ -1,15 +1,14 @@
FROM alpine:3.14
FROM alpine:3.21
RUN apk update \
&& apk add sed xxd grep findutils file nmap-ncat socat jq bash file curl
RUN apk upgrade -U && apk add bash sed grep nmap-ncat socat file findutils jq curl argon2
WORKDIR /httpsh
WORKDIR /app
COPY . .
EXPOSE 1337
VOLUME /httpsh/config
VOLUME /httpsh/app
VOLUME /httpsh/storage
VOLUME /httpsh/secret
VOLUME /app/app
VOLUME /app/config
VOLUME /app/storage
VOLUME /app/secret
CMD ["/httpsh/http.sh"]
CMD ["/app/http.sh"]

View file

@ -1,157 +1,11 @@
### GNU LESSER GENERAL PUBLIC LICENSE
Copyright 2020-2024, sdomi et al.
Version 3, 29 June 2007
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Copyright (C) 2007 Free Software Foundation, Inc.
<https://fsf.org/>
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
This version of the GNU Lesser General Public License incorporates the
terms and conditions of version 3 of the GNU General Public License,
supplemented by the additional permissions listed below.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#### 0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the
GNU General Public License.
"The Library" refers to a covered work governed by this License, other
than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
#### 1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
#### 2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
- a) under this License, provided that you make a good faith effort
to ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
- b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
#### 3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from a
header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
- a) Give prominent notice with each copy of the object code that
the Library is used in it and that the Library and its use are
covered by this License.
- b) Accompany the object code with a copy of the GNU GPL and this
license document.
#### 4. Combined Works.
You may convey a Combined Work under terms of your choice that, taken
together, effectively do not restrict modification of the portions of
the Library contained in the Combined Work and reverse engineering for
debugging such modifications, if you also do each of the following:
- a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
- b) Accompany the Combined Work with a copy of the GNU GPL and this
license document.
- c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
- d) Do one of the following:
- 0) Convey the Minimal Corresponding Source under the terms of
this License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
- 1) Use a suitable shared library mechanism for linking with
the Library. A suitable mechanism is one that (a) uses at run
time a copy of the Library already present on the user's
computer system, and (b) will operate properly with a modified
version of the Library that is interface-compatible with the
Linked Version.
- e) Provide Installation Information, but only if you would
otherwise be required to provide such information under section 6
of the GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the Application
with a modified version of the Linked Version. (If you use option
4d0, the Installation Information must accompany the Minimal
Corresponding Source and Corresponding Application Code. If you
use option 4d1, you must provide the Installation Information in
the manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.)
#### 5. Combined Libraries.
You may place library facilities that are a work based on the Library
side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
- a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities, conveyed under the terms of this License.
- b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
#### 6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
as you received it specifies that a certain numbered version of the
GNU Lesser General Public License "or any later version" applies to
it, you have the option of following the terms and conditions either
of that published version or of any later version published by the
Free Software Foundation. If the Library as you received it does not
specify a version number of the GNU Lesser General Public License, you
may choose any version of the GNU Lesser General Public License ever
published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -1,68 +1,43 @@
# HTTP.sh
Node.js, but `| sed 's/Node/HTTP/;s/js/sh/'`.
HTTP.sh is (by far) the most extensible attempt at creating a web framework in Bash, and (AFAIK) the only one that's actively maintained. Although I strive for code quality, this is still rather experimental and may contain bugs.
the *coolest* web framework (in Bash) to date.
Originally made for Junction Stupidhack 2020; Created by [sdomi](https://sakamoto.pl/), [ptrcnull](https://ptrcnull.me/) and [selfisekai](https://selfisekai.rocks/).
We now have an IRC channel! Join #http.sh @ irc.libera.chat
## Quick Start
## Documentation
If you want to build a new webapp from scratch:
We have some guides and general documentation in the [docs](docs/) directory. Among them:
```
./http.sh init
./http.sh
```
If you're setting up HTTP.sh for an existing application:
```
git clone https://git.sakamoto.pl/laudom/ocw/ app # example repo :P
./http.sh
```
We also support Docker! Both a Dockerfile and an example docker-compose.yml are included for your convenience. Containerizing your webapp is as easy as `docker-compose up -d`
- A [quick start](docs/quick-start.md) guide
- General [directory structure](docs/directory-structure.md)
- [CLI usage](docs/running.md)
- [Tests](docs/tests.md)
- [HTTP Router](docs/router.md)
- [List of security fixes](docs/sec-fixes/)
## Dependencies
- Bash (4.x should work, but we'll need 5.0 soon)
- [Ncat](https://nmap.org/ncat), not openbsd-nc, not netcat, not nc
- socat (because the above is slightly broken)
- pkill
- mktemp
- jq (probably not needed just yet, but it will be in 1.0)
- dd (for accounts, multipart/form-data and websockets)
- sha1sum, sha256sum, base64 (for accounts and simple auth)
- curl (for some demos)
Absolutely necessary:
- Bash (5.x, not interested in backwards compat)
- either [Ncat](https://nmap.org/ncat) (not openbsd-nc, not netcat, not nc) or socat, or a combo of both
- GNU grep/sed
Full list of dependencies: [required](src/dependencies.required), [optional](src/dependencies.optional).
## Known faults
- can't change the HTTP status code from Shell Server scripts. This could theoretically be done with custom vhost configs and some `if` statements, but this would be a rather nasty solution to that problem.
- if ncat fails to bind to `[::]`, change the bind to `127.0.0.1` or `0` in `config/master.sh`
- `$post_multipart` doesn't keep original names - could be fixed by parsing individual headers from the multipart request instead of skipping them all
- it won't ever throw a 500, thus it fails silently
## Directory structure
- ${cfg[namespace]} (`app` by default)
- ${cfg[root]} (`webroot` by default) - public application root
- workers/ - scripts that execute periodically live there (see examples)
- views/ - for use with HTTP.sh router
- config.sh - application-level config file
- config
- master.sh - main server config file - loaded on boot and with every request
- host:port - if a file matching the Host header is found, HTTP.sh will load it request-wide
- src
- server source files and modules
- response
- files corresponding to specific HTTP status codes
- listing.sh (code 210) is actually HTTP 200, but triggered in a directory with autoindex turned on and without a valid `index.shs` file
- templates - section templates go here
- secret - users, passwords and other Seecret data should be stored here
- storage - random data storage for your webapp
- websocket impl isn't properly finished
- fails with an empty response, instead of throwing 400/500
## Variables that we think are cool!
![](https://f.sakamoto.pl/d6584c01-1c48-42b9-935b-d9a89af4e071file_101.jpg)
(this data may be slightly outdated. Full docs TODO.)
- get_data - holds data from GET parameters
- /?test=asdf -> `${get_data[test]}` == `"asdf"`
- params - holds parsed data from URL router

View file

@ -1 +0,0 @@
cfg[title]='Laura is cute :3'

View file

@ -0,0 +1,49 @@
# File / Directory structure
(alphabetical order; state for 2024-08-05)
- `config` contains per-vhost configuration settings. `config/master.sh` gets loaded by default,
`config/<hostname>[:port]` gets loaded based on the `Host` header.
- `docs` is what you're reading now. Hi!!
- `secret` is where user data gets stored. think: user accounts and sessions.
- `src` contains the majority of HTTP.sh's code.
- `response/*` are files executed based on computed return code. `response/200.sh` is a bit
special, because it handles the general "success" path. Refactor pending.
- `account.sh` is the middleware for user account management
- `dependencies.*` store the list of required and optional deps, newline delimetered
- `mail.sh` has some crude SMTP code, for sending out mails
- `mime.sh` contains a glue function for handling special cases where `file` command doesn't
return the proper mimetype
- `misc.sh` consists of functions that didn't really fit anywhere else. Of note, `html_encode`,
`url_encode`, `url_decode`, `header` and various cookie functions all live there for now.
- `notORM.sh` is, as I said, not an [ORM](https://en.wikipedia.org/wiki/Object%E2%80%93relational_mapping)
- `route.sh` defines a small function for handling adding the routes
- `server.sh` is where most of the demons live
- `template.sh` is where the rest of the demons live
- `worker.sh` is the literal embodiment of "we have cron at home"; workers are just background
jobs that run every n minutes; you can also start and stop them on will! *fancy*
- `ws.sh` is an incomplete WebSocket implementation
- `storage` is like `secret`, but you can generally use it for whatever
- `templates` will be moved/removed soon (`head.sh` has *nothing* to do with the current templating
system; it has some handlers for remaking things you put into `meta[]` array into HTML `<head>`
fields. Should not be used, at least not in its current form.)
- `tests` is where all the tests live!
The actually important files are:
- `http.sh` - run this and see what happens
- `tst.sh` - [the test suite](tests.md)
## suggested skeleton structure in `app/`
FYI: this is merely a suggestion. `./http.sh init` will create some of those directories for you,
but it's fine to move things around. A lot of it can be changed within `config/master.sh`, even the
directory name itself!
- `src` for various backend code
- `templates` for HTML in our special templating language
- `views` for individual pages / endpoints
- `webroot` for static files, or .shs scripts that don't use the router
- `config.sh` has some general, always-included stuff
- `routes.sh` configures the router; entries should point into `views/`
- `localcfg.sh` may be sourced from `config.sh` and contain only local config (useful for developing
stuff with others through git, for instance; `localcfg.sh` should then be added to `.gitignore`)

101
docs/quick-start.md Normal file
View file

@ -0,0 +1,101 @@
# HTTP.sh: quick start
Welcome to the exciting world of Bash witchery! I'll be your guide on this webdev adventure today.
## about HTTP.sh
HTTP.sh is a very extensive web framework. I use it for quick and dirty hacks, and I "designed" it
in a way where you don't need to write a lot of code to do some basic stuff. I'm also gradually
adding middleware that helps you do more advanced stuff. With some regards, there are already
multiple ways one could implement a web app in HTTP.sh; Thus, I feel like I need this to be heard:
**There are no bad ways to write code here.** You can still write *bad code*, but this is a safe
space where nobody shall tell you "Y is garbage, you should use X instead!";
This strongly applies to specific features of the framework: You can use the templating engine, or
you can just `echo` a bunch of stuff directly from your script. You can use the URL router, or you
could just name your scripts under the webroot in a fancy way. **As long as it works, it's good :3**
## Getting started
First, clone the repository. I'm sure you know how to do that; Afterwards, try running:
```
./http.sh init
./http.sh
```
`init` will lay out some directories, and running it w/o any params will just start the server.
If you're missing any dependencies, you should now see a list of them.
By default, http.sh starts on port 1337; Try going to http://localhost:1337/ - if you see a welcome
page, it's working!!
We have a "debug mode" under `./http.sh debug`. Check [running.md](running.md) for more options.
## Basic scripting
By default, your application lives in `app/`. See [directory-structure.md](directory-structure.md)
for more info on what goes where. For now, go into `app/webroot/` and remove `index.shs`. That
should bring you to an empty directory listing; Static files can be put as-is into `app/webroot/`
and they'll be visible within the directory!
To create a script, make a new file with `.shs` extension, and start writing a script like normal.
All of your `stdout` (aka: everything you `echo`) goes directly to the output. Everything sent to
`stderr` will be shown in the `./http.sh debug` output.
## Parameters
There are a few ways of receiving input; The most basic ones are `get_data` and `post_data`, which
are associative arrays that handle GET params and POST (body) params, respectively. Consider the
following example:
```
#!/bin/bash
echo '<html><head><meta charset="utf-8"></head><body>'
if [[ ! "${get_data[example]}" ]]; then
echo '<form>
<input type="text" name="example">
<input type="submit">
</form>'
else
echo "<p>you sent: $(html_encode "${get_data[example]}")</p>"
fi
echo '</body></html>'
```
When opened in a browser, this example looks like so:
![screenshot of a simple web page. there's a text box, and a button saying Submit Query](https://f.sakamoto.pl/IwIalnWw.png)
... and after submitting data, it looks like that:
![screenshot of another page. it says "you sent: meow!"](https://f.sakamoto.pl/IwIy0thg.png)
## Security
Remember to use sufficient quotes in your scripts, and escape untrusted data (read: ALL data you
didn't write/create yourself. This is especially important when parameter splitting may occur;
For instance, consider:
```
rm storage/${get_data[file]}
```
vs
```
rm -- "storage/$(basename "${get_data[file]}")"
```
The first one can fail due to:
- spaces (if `?file=a+b+c+d`, then it will remove `storage/a`, `b`, `c` and `d`). Hence, you get
arbitrary file deletion.
- unescaped filename (param containing `../` leads to path traversal)
- unterminated parameter expansion (`--` in `rm --` terminates switches; after this point, only
file names can occur)
Furthermore, if you're displaying user-controlled data in your app, remember to use `html_encode`
to prevent cross-site scripting attacks.

36
docs/router.md Normal file
View file

@ -0,0 +1,36 @@
# HTTP.sh: URL router
After running `./http.sh init`, your `app` directory should include a file called `routes.sh` - this
is where you define custom routes. The syntax is as follows:
```
router "/uri/path" "${cfg[namespace]}/views/file.shs"
```
This can be used to remap files that are already in `webroot`, but to prevent confusion, it is
recommended to make a separate directory for routed files. In other HTTP.sh projects, it's usually
`views`.
The router also can be used to pass parameters:
```
router "/user/:username" "${cfg[namespace]}/views/profile.shs"
router "/user/:username/:postid" "${cfg[namespace]}/views/post.shs"
```
All router parameters are available at runtime through `${params[]}` associative array.
A sample `profile.shs` could look like this:
```
#!/bin/bash
echo "$(html_encode "${params[username]}")'s profile"
```
## Limitations
- The param name can only contain the following characters: `[A-Za-z0-9]`
- Currently, the param itself can only contain the following characters: `[A-Za-z0-9.,%:\\-_]`;
Otherwise, the route won't match, and you'll likely get a 404. Support for other special chars
will be added somewhere down the line.
- Router takes precedence over normal file matching; This could allow one to override a file.

11
docs/running.md Normal file
View file

@ -0,0 +1,11 @@
# Running http.sh
## cli args
The arg parsing is a bit rudimentary atm. Assume only one option supported per invocation.
- `init` creates an app skeleton and writes example config. Optional second parameter sets the
namespace (app directory) name.
- `debug` shows stderr (useful for debugging)
- `debuggier` shows stderr and calltrace
- `shell` drops you into an environment practically equivalent to the runtime

View file

@ -0,0 +1,9 @@
# 2024-12-15 Possible pattern injection in notORM
Prior to commit a00b1b00ee64215dfdd575cf3c51e2f7c387761f, notORM was vulnerable to a pattern
injection attack, which could potentially lead to privilege escalation through the account system.
The vulnerability arose due to an inconsistency with how certain versions of sed handle escaped
hex characters (`\xNN`). GNU sed expands the escaped characters and treats them as a raw part of
the pattern as long as Extended Regex (`-E`) mode is used. This behavior is not present within
busybox sed, which is why it hasn't been caught before.

169
docs/tests.md Normal file
View file

@ -0,0 +1,169 @@
# the test framework
We have a small test harness! It lives in `./tst.sh` in the root of the HTTP.sh repo. It's inspired
by some init systems, and a bit influenced by how APKBUILD/PKGBUILDs are structured. A very basic
test is attached below:
```
tst() {
return 0
}
```
A `tst()` function is all you need in a test. Running the test can be done like so:
```
$ ./tst.sh tests/example.sh
OK: tests/example.sh
Testing done!
OK: 1
FAIL: 0
```
If running multiple tests is desired, I recommend calling `./tst.sh tests/*`, and prepending the
filenames with numbers to make sure they run in the correct sequence.
You can also contain multiple tests in a file by grouping them into a function, and then adding the
function names to an array:
```
a() {
tst() {
return 0
}
}
b() {
tst() {
return 1
}
}
subtest_list=(
a
b
)
```
This will yield the following result *(output subject to change)*:
```
--- tests/example.sh ---
OK: a
FAIL: b
(res: )
Testing done!
OK: 1
FAIL: 1
```
Of note: `tst.sh` is designed in a way where *most* functions will fall through; If you'd like to
run the same test against a different set of checks (see below) then you *don't* need to redefine
the `tst()` function, just changing the checks is enough.
---
## return codes
The following return codes are defined:
- 0 as success
- 1 as error (test execution continues)
- 255 as fatal error (cleans up and exits immediately)
## determining success / failure
Besides very simple return-code based matching, `tst.sh` also supports stdout matching with the
following variables:
- `match` (matches the whole string)
- `match_sub` (matches a substring)
- `match_begin` (matches the beginning)
- `match_end` (matches the end)
- `match_not` (inverse substring match)
If any of those are defined, all except fatal return codes are ignored. If more than one of those
is defined, it checks the list above top-to-bottom and picks the first one that is set, ignoring
all others.
## special functions
The framework defines two special functions, plus a few callbacks that can be overriden:
### prepare
`prepare` runs **once** after definition, right before the test itself. As of now, it's the only
function that gets cleaned up after each run (by design; see section `statefullness` below)
By default (undefined state), `prepare` does nothing.
```
prepare() {
echo 'echo meow' > app/webroot/test.shs
}
tst() {
curl localhost:1337/test.shs
}
match="meow"
```
*(note: this test requires tst.sh to be used with http.sh, and for http.sh to be running)*
### cleanup
`cleanup` runs after every test. The name should be self-explanatory. Define as `cleanup() { :; }`
to disable behavior from previous tests.
By default (undefined state), `cleanup` does nothing.
```
prepare() {
echo 'echo meow' > app/webroot/test.shs
}
tst() {
curl localhost:1337/test.shs
}
cleanup() {
rm app/webroot/test.shs
}
match="meow"
```
*(note: same thing as above)*
### on_success, on_error, on_fatal
Called on every success, failure and fatal error. First two call `on_{success,error}_default`,
which increments the counter and outputs the OK/FAIL message. The third one just logs the FATAL,
cleans up and exits. Overloading `on_fatal` is not recommended; While overloading the other two,
make sure to add a call to the `_default` function, or handle the numbers gracefully by yourself.
## statefullness
This framework is designed in a way where a lot of the state is inherited from previous tests. This
is by-design, to make sure that there's less repetition in the tests themselves. It is up to the
author of the tests to remember about cleaning up variables and other state that could affect any
further tests in the chain.
Currently, state is cleaned up under the following circumstances:
- all `match` variables get cleaned up after every test
- `prepare()` function is reset after every test (so, each definition of `prepare` will run
exactly *once*)
- upon switching files, `tst()` and `cleanup()` get reset to initial values. Of note, those two
**do** get inherited between subtests in a single file!
- upon termination of the test harness, it tries to kill all child processes
The following state **is not** cleaned up:
- `tst()` and `cleanup()` between subtests in a single file
- `on_error()`, `on_success()` functions
- any global user-defined variables, also between files
- any started processes
- any modified files (we don't have a way to track those atm, although I may look into this)

211
http.sh
View file

@ -1,148 +1,101 @@
#!/usr/bin/env bash
trap ctrl_c INT
if [[ ! -f "config/master.sh" ]]; then
mkdir -p config
cat <<EOF > "config/master.sh"
declare -A cfg
cfg[ip]=0.0.0.0 # IP address to bind to - use 0.0.0.0 to bind to all
cfg[http]=true # enables/disables listening on HTTP
cfg[port]=1337 # HTTP port
cfg[socat_only]=false
cfg[namespace]='app'
cfg[root]='webroot/'
cfg[index]='index.shs'
cfg[autoindex]=true
cfg[auth_required]=false
cfg[auth_realm]="asdf"
cfg[ssl]=false # enables/disables listening on HTTPS
cfg[ssl_port]=8443
cfg[ssl_cert]=''
cfg[ssl_key]=''
cfg[extension]='shs'
cfg[extra_headers]='server: HTTP.sh/0.95 (devel)'
cfg[title]='HTTP.sh 0.95'
cfg[php_enabled]=false # enable PHP script evalutaion (requires PHP)
cfg[python_enabled]=false # enable Python script evalutaion (requires Python)
cfg[log]='log' # filename
cfg[proxy]=false # you probably want to configure this per-url
cfg[proxy_url]='' # regexp matching valid URLs to proxy
cfg[proxy_param]='url' # /proxy?url=...
# mail handler config
cfg[mail]=""
cfg[mail_server]=""
cfg[mail_password]=""
cfg[mail_ssl]=true
cfg[mail_ignore_bad_cert]=false
EOF
fi
source config/master.sh
function ctrl_c() {
ctrl_c() {
[[ $socket != '' ]] && rm $socket
pkill -P $$
echo -e "Cleaned up, exitting.\nHave an awesome day!!"
}
if [[ ! -f "$(pwd)/http.sh" ]]; then
echo -e "Please run HTTP.sh inside it's designated directory\nRunning the script from arbitrary locations isn't supported."
setup_config() {
[[ ! "$1" ]] && namespace=app || namespace="$1"
mkdir -p config
cp ".resources/primary_config.sh" "config/master.sh"
echo "cfg[namespace]=$namespace # default namespace" >> "config/master.sh"
echo "cfg[init_version]=$HTTPSH_VERSION" >> "config/master.sh"
}
if [[ ! -f "$PWD/http.sh" ]]; then
echo -e "Please run HTTP.sh inside its designated directory\nRunning the script from arbitrary locations isn't supported."
exit 1
fi
source src/version.sh
if [[ "$1" == "init" ]]; then # will get replaced with proper parameter parsing in 1.0
[[ ! "$2" ]] && namespace=app || namespace="$2"
if [[ ! -f "config/master.sh" ]]; then
setup_config
elif [[ -d "$namespace" ]]; then
echo -e "ERR: HTTP.sh has been initialized before.\nSpecify a new namespace directory, or perish (remove '$namespace'?)"
exit 1
else
echo "WARN: HTTP.sh has been initialized before. Continuing w/o recreating config."
fi
source config/master.sh
mkdir -p "${cfg[namespace]}/${cfg[root]}" "${cfg[namespace]}/workers/example" "${cfg[namespace]}/views" "${cfg[namespace]}/templates"
touch "${cfg[namespace]}/config.sh" "${cfg[namespace]}/workers/example/control"
cp ".resources/config.sh" "${cfg[namespace]}/config.sh"
cp ".resources/routes.sh" "${cfg[namespace]}/routes.sh"
cp .resources/example_worker/* "${cfg[namespace]}/workers/example/"
cp .resources/example_webroot/* "${cfg[namespace]}/${cfg[root]}/index.shs"
echo -e "Success..?\nTry running \`./http.sh\` now"
exit 0
elif [[ ! -f "config/master.sh" ]]; then
if [[ -d "app" ]]; then # if the de-facto default app dir already exists, copy the cfg
setup_config
else
echo "ERR: Initialize HTTP.sh first! run './http.sh init'"
exit 1
fi
fi
source config/master.sh
if [[ "$HTTPSH_VERSION" != "${cfg[init_version]}" ]]; then
echo "WARN: HTTP.sh was updated since this instance was initialized (config v${cfg[init_version]:-(none)}, runtime v$HTTPSH_VERSION). There may be breaking changes. Edit cfg[init_version] in config/master.sh to remove this warning."
fi
for i in $(cat src/dependencies.required); do
which $i > /dev/null 2>&1
if [[ $? != 0 ]]; then
while read i; do
if ! which $i > /dev/null 2>&1; then
echo "ERROR: can't find $i"
error=true
fi
done
for i in $(cat src/dependencies.optional); do
done < src/dependencies.required
while read i; do
which $i > /dev/null 2>&1
[[ $? != 0 ]] && echo "WARNING: can't find $i"
done
done < src/dependencies.optional
which ncat > /dev/null 2>&1
if [[ $? != 0 ]]; then
if ! which ncat > /dev/null 2>&1; then
if [[ ${cfg[socat_only]} != true ]]; then
echo "ERROR: can't find ncat, and cfg[socat_only] is not set to true"
echo "ERR: can't find ncat, and cfg[socat_only] is not set to true"
error=true
fi
fi
if [[ $error == true ]]; then
echo "Fix above dependencies, and I might just let you pass."
exit 0
exit 1
fi
if [[ $1 == "init" ]]; then # will get replaced with proper parameter parsing in 1.0
#set -e
mkdir -p "${cfg[namespace]}/${cfg[root]}" "${cfg[namespace]}/workers/example" "${cfg[namespace]}/views" "${cfg[namespace]}/templates"
touch "${cfg[namespace]}/config.sh" "${cfg[namespace]}/workers/example/control"
cat <<EOF > "${cfg[namespace]}/config.sh"
## app config
## your application-specific config goes here!
# worker_add example 5
cfg[enable_multipart]=false # by default, uploading files is disabled
EOF
cat <<EOF > "${cfg[namespace]}/workers/example/worker.sh"
#!/usr/bin/env bash
date
EOF
cat <<EOF > "${cfg[namespace]}/${cfg[root]}/index.shs"
#!/usr/bin/env bash
source templates/head.sh
echo "<h1>Hello from HTTP.sh!</h1><br>To get started with your app, check out $(pwd)/${cfg[namespace]}/
<ul><li>$(pwd)/${cfg[namespace]}/${cfg[root]} - your (public) files go here</li>
<li>$(pwd)/${cfg[namespace]}/workers/ - worker directory, with an example one ready to go</li>
<li>$(pwd)/${cfg[namespace]}/views/ - individual views can be stored there, to be later referenced by routes.sh</li>
<li>$(pwd)/${cfg[namespace]}/templates/ - template files (.t) live over there</li>
<li>$(pwd)/${cfg[namespace]}/config.sh - config for everything specific to your app AND workers</li>
<li>$(pwd)/${cfg[namespace]}/routes.sh - config for the HTTP.sh router</li></ul>
Fun things outside of the app directory:
<ul><li>$(pwd)/config/master.sh - master server config</li>
<li>$(pwd)/config/<hostname> - config loaded if a request is made to a specific hostname</li>
<li>$(pwd)/storage/ - directory for storing all and any data your app may produce</li>
<li>$(pwd)/secret/ - user accounts and other secret tokens live here</li>
<li>$(pwd)/src/ - HTTP.sh src, feel free to poke around ;P</li></ul>"
EOF
cat <<EOF > "${cfg[namespace]}/routes.sh"
## routes - application-specific routes
##
## HTTP.sh supports both serving files using a directory structure (webroot),
## and using routes. The latter may come in handy if you want to create nicer
## paths, e.g.
##
## (webroot) https://example.com/profile.shs?name=asdf
## ... may become ...
## (routes) https://example.com/profile/asdf
##
## To set up routes, define rules in this file (see below for examples)
# router "/test" "app/views/test.shs"
# router "/profile/:user" "app/views/user.shs"
EOF
chmod +x "${cfg[namespace]}/workers/example/worker.sh"
echo -e "Success..?\nTry running \`./http.sh\` now"
if [[ "$1" == 'shell' ]]; then
bash --rcfile <(echo '
shopt -s extglob
x() { declare -p data;} # for notORM
source config/master.sh
source src/account.sh
source src/mail.sh
source src/mime.sh
source src/misc.sh
source src/notORM.sh
source src/template.sh
source "${cfg[namespace]}/config.sh"
PS1="[HTTP.sh] \[\033[01;34m\]\w\[\033[00m\]\$ "')
exit 0
fi
@ -152,29 +105,37 @@ cat <<EOF >&2
| |__| | | | | | | |_| | |___ | |__| |
| |__| | | | | | | ___/\___ \ | |__| |
| | | | | | | | | | ___\ \| | | |
|_| |_| |_| |_| |_| □ /_____/|_| |_|
|_| |_| |_| |_| |_| □ /_____/|_| |_| v$HTTPSH_VERSION
EOF
if [[ "$1" == "debug" ]]; then
cfg[dbg]=true
echo "[DEBUG] Activated debug mode - stderr will be shown"
elif [[ "$1" == "debuggier" ]]; then
cfg[dbg]=true
cfg[debuggier]=true
export PS4=' ${BASH_SOURCE}:${LINENO}: ${FUNCNAME[0]:+${FUNCNAME[0]}(): }'
echo "[DEBUG] Activated debuggier mode - stderr and call trace will be shown"
set -x
fi
source src/worker.sh
if [[ -f "${cfg[namespace]}/config.sh" ]]; then
run_once=true
source "${cfg[namespace]}/config.sh"
unset run_once
fi
if [[ ${cfg[socat_only]} == true ]]; then
echo "[INFO] listening directly via socat, assuming no ncat available"
echo "[HTTP] listening on ${cfg[ip]}:${cfg[port]}"
if [[ ${cfg[dbg]} == true ]]; then
socat tcp-listen:${cfg[port]},bind=${cfg[ip]},fork "exec:bash -c src/server.sh"
socat tcp-listen:${cfg[port]},bind=${cfg[ip]},fork "exec:bash -c \'src/server.sh ${cfg[debuggier]}\'"
else
socat tcp-listen:${cfg[port]},bind=${cfg[ip]},fork "exec:bash -c src/server.sh" 2>> /dev/null
if [[ $? != 0 ]]; then
echo "[WARN] socat exitted with a non-zero status; Maybe the port is in use?"
echo "[WARN] socat quit with a non-zero status; Maybe the port is in use?"
fi
fi
else
@ -186,11 +147,11 @@ else
# to quit after the first time-outed connection, ignoring the
# "broker" (-k) mode. This is a workaround for this.
while true; do
ncat -i 600s -l -U "$socket" -c src/server.sh -k
ncat -i 600s -l -U "$socket" -c "src/server.sh ${cfg[debuggier]}" -k
done &
else
while true; do
ncat -i 600s -l -U "$socket" -c src/server.sh -k 2>> /dev/null &
ncat -i 600s -l -U "$socket" -c src/server.sh -k 2>> /dev/null
done &
fi
socat TCP-LISTEN:${cfg[port]},fork,bind=${cfg[ip]} UNIX-CLIENT:$socket &

View file

@ -1,87 +1,241 @@
#!/usr/bin/env bash
# account.sh - account and session mgmt
# TODO: add stricter argument checks for all the funcs
# register(username, password)
# registers a new user.
# first two params are strings; third is a reference to an array with
# optional extra data (email, OTP...)
#
# [extra=()] register(username, password)
function register() {
local username=$(echo -ne $(sed -E "s/ /_/g;s/\:/\-/g;s/\%/\\x/g" <<< "$1"))
if [[ ! "$1" || ! "$2" ]]; then
reason="User/password empty!"
return 1
fi
local username=$(url_decode "$1")
unset IFS
if [[ $(grep "$username:" secret/users.dat) != '' ]]; then
data_get secret/users.dat "$username"
if [[ $? != 2 && $? != 4 ]]; then # entry not found / file not found
reason="This user already exists!"
return 1
fi
local salt=$(dd if=/dev/urandom bs=256 count=1 | sha1sum | cut -c 1-16)
local hash=$(echo -n $2$salt | sha256sum | cut -c 1-64)
local token=$(dd if=/dev/urandom bs=32 count=1 | sha1sum | cut -c 1-40)
set_cookie_permanent "sh_session" $token
set_cookie_permanent "username" $username
echo "$username:$hash:$salt:$token" >> secret/users.dat
local salt=$(dd if=/dev/urandom bs=16 count=1 status=none | xxd -p)
_password_hash "$2" "$salt"
local out=("$username" "$hash" "$salt" "" "${extra[@]}")
data_add secret/users.dat out
[[ "${cfg[register_should_login]}" == true ]] && _new_session "$username"
set_cookie_permanent "sh_session" "${session[2]}"
set_cookie_permanent "username" "$username"
unset hash
}
# login(username, password)
# login(username, password, [forever]) -> [res]
function login() {
local username=$(echo -ne $(sed -E 's/%/\\x/g' <<< "$1"))
IFS=':'
local user=($(grep -P "$username:" secret/users.dat))
if [[ ! "$1" || ! "$2" ]]; then
reason="User/password empty!"
return 1
fi
local username=$(url_decode "$1")
[[ "$3" ]] && local forever=true
unset IFS
if [[ $(echo -n $2${user[2]} | sha256sum | cut -c 1-64 ) == "${user[1]}" ]]; then
set_cookie_permanent "sh_session" "${user[3]}"
set_cookie_permanent "username" "$username"
if ! data_get secret/users.dat "$username" 0 user; then
reason="Bad credentials"
return 1
fi
_password_hash "$2" "${user[2]}"
if [[ "$hash" == "${user[1]}" ]]; then
_new_session "$username" "$forever"
if [[ "$forever" == true ]]; then
set_cookie_permanent "sh_session" "${session[2]}"
set_cookie_permanent "username" "$username"
else
set_cookie "sh_session" "${session[2]}"
set_cookie "username" "$username"
fi
declare -ga res=("${user[@]:4}")
unset hash
return 0
else
remove_cookie "sh_session"
remove_cookie "username"
reason="Invalid credentials!!11"
reason="Bad credentials"
unset hash
return 1
fi
}
# login_simple(base64)
function login_simple() {
local data=$(base64 -d <<< "$3")
local password=$(sed -E 's/^(.*)\://' <<< "$data")
local login=$(sed -E 's/\:(.*)$//' <<< "$data")
IFS=':'
local user=($(grep "$login:" secret/users.dat))
unset IFS
if [[ $(echo -n $password${user[2]} | sha256sum | cut -c 1-64 ) == ${user[1]} ]]; then
if [[ ! "$password" || ! "$login" ]]; then
return 1
fi
data_get secret/users.dat "$login" 0 user
_password_hash "$password" "${user[2]}"
if [[ "$hash" == "${user[1]}" ]]; then
r[authorized]=true
else
r[authorized]=false
fi
unset hash
}
# logout()
function logout() {
if [[ "${cookies[sh_session]}" ]]; then
data_yeet secret/sessions.dat "${cookies[sh_session]}" 2
fi
remove_cookie "sh_session"
remove_cookie "username"
}
# session_verify(session)
# session_verify(session) -> [res]
function session_verify() {
if [[ $(grep ":$1" secret/users.dat) != '' && $1 != '' ]]; then
return 0
else
return 1
[[ ! "$1" ]] && return 1
unset IFS
local session
local user
if data_get secret/sessions.dat "$1" 2 session; then
if data_get secret/users.dat "${session[0]}" 0 user; then # double-check if tables agree
declare -ga res=("${user[@]:4}")
return 0
fi
fi
return 1
}
# session_get_username(session)
function session_get_username() {
[[ "$1" == "" ]] && return
IFS=':'
local data=($(grep ":$1$" secret/users.dat))
[[ ! "$1" ]] && return 1
unset IFS
echo ${data[0]}
local session
if data_get secret/sessions.dat "$1" 2 session; then
if data_get secret/users.dat "${session[0]}" 0 user; then # double-check if tables agree
echo "${user[0]}"
return 0
fi
fi
return 1
}
# THIS FUNCTION IS DANGEROUS
# delete_account(username)
function delete_account() {
[[ "$1" == "" ]] && return
sed -i "s/^$1:.*//;/^$/d" secret/users.dat
[[ ! "$1" ]] && return 1
data_yeet secret/users.dat "$1"
}
# user_reset_password(username, token, new_password) -> $?, ${user[@]}
user_reset_password() {
[[ ! "$1" ]] && return 1 # sensitive function, so we're checking all three
[[ ! "$2" ]] && return 1 # there's probably a better way,
[[ ! "$3" ]] && return 1 # but i don't care.
if data_get secret/users.dat "$1" 0 user; then
if [[ "$2" == "${user[3]}" ]]; then
_password_hash "$3" "${user[2]}"
user[1]="$hash"
user[3]=''
data_replace secret/users.dat "$1" user
session_purge "$1"
unset hash token
return 0
fi
fi
return 1
}
# user_change_password(username, old_password, new_password) -> $?, ${user[@]}
user_change_password() {
[[ ! "$1" ]] && return 1
[[ ! "$2" ]] && return 1
[[ ! "$3" ]] && return 1
if data_get secret/users.dat "$1" 0 user; then
_password_hash "$2" "${user[2]}"
if [[ "$hash" == "${user[1]}" ]]; then
_password_hash "$3" "${user[2]}"
[[ ! "$hash" ]] && return
user[1]="$hash"
user[3]=''
data_replace secret/users.dat "$1" user
session_purge "$1"
unset hash token
return 0
fi
fi
unset hash
return 1
}
# user_gen_reset_token(username) -> $?, $token, ${user[@]}
user_gen_reset_token() {
[[ ! "$1" ]] && return 1
if data_get secret/users.dat "$1" 0 user; then
user[3]="$(dd if=/dev/urandom bs=20 count=1 status=none | xxd -p)"
data_replace secret/users.dat "$1" user
token="${user[3]}"
else
return 1
fi
}
# logs out ALL sessions for user
#
# session_purge(username)
session_purge() {
data_yeet secret/sessions.dat "$1"
}
# _new_session(username, forever) -> $session
_new_session() {
[[ ! "$1" ]] && return 1
[[ "$2" == true ]] && local forever=true || local forever=false
session=("$1" "$(date '+%s')" "$(dd if=/dev/urandom bs=24 count=1 status=none | xxd -p)" "$forever")
data_add secret/sessions.dat session
}
_password_hash() {
[[ ! "$1" ]] && return 1
[[ ! "$2" ]] && return 1
if [[ "${cfg[hash]}" == "argon2id" ]]; then
hash="$(echo -n "$1" | argon2 "$2" -id -e)"
else
hash=$(echo -n $1$2 | sha256sum | cut -c 1-64)
fi
}

View file

@ -2,3 +2,4 @@ sha1sum
sha256sum
curl
iconv
argon2

View file

@ -6,3 +6,4 @@ mktemp
date
dd
file
xxd

View file

@ -28,5 +28,7 @@ function mailsend() {
--upload-file "$tmp" \
--user "${cfg[mail]}:${cfg[mail_password]}"
res=$?
rm "$tmp"
return $res
}

View file

@ -14,16 +14,29 @@
function get_mime() {
local file="$@"
local mime="$(file --mime-type -b "$file")"
if [[ $file == *".htm" || $file == *".html" ]]; then
mimetype="text/html"
elif [[ $file == *".shs" || $file == *".py" || $file == *".php" ]]; then
mimetype=""
elif [[ $file == *".css" ]]; then
mimetype="text/css"
elif [[ $mime == "text/"* && $mime != "text/xml" ]]; then
mimetype="text/plain"
if [[ -f "$file" ]]; then
local mime="$(file --mime-type -b "$file")"
if [[ $file == *".htm" || $file == *".html" || $mime == "text/html" ]]; then
mimetype="text/html"
elif [[ $file == *".shs" || $file == *".py" || $file == *".php" ]]; then
mimetype=""
elif [[ $file == *".css" ]]; then
mimetype="text/css"
elif [[ $mime == "text/"* && $mime != "text/xml" ]]; then
mimetype="text/plain"
# Technically image/x-icon isn't correct for all images (image/ico also exists) but
# it's what browser (firefox (sample size: 1)) seem to have the least problems with.
# image/vnd.microsoft.icon was standardized by the IANA but no microsoft software
# understands it, they use image/ico instead. What a mess.
elif [[ $file == *"favicon.ico" ]]; then
mimetype="image/x-icon"
elif [[ $file == *".ico" || $mime == "image/vnd.microsoft.icon" ]]; then
mimetype="image/ico"
else
mimetype="$mime"
fi
else
mimetype="$mime"
mimetype=""
fi
}

View file

@ -3,17 +3,20 @@
# set_cookie(cookie_name, cookie_content)
function set_cookie() {
r[headers]+="Set-Cookie: $1=$2\r\n"
r[headers]+="Set-Cookie: $1=$2; Path=${cfg[cookie_path]}\r\n"
cookies["$1"]="$2"
}
# set_cookie_permanent(cookie_name, cookie_content)
function set_cookie_permanent() {
r[headers]+="Set-Cookie: $1=$2; Expires=Mon, 26 Jul 2100 22:45:00 GMT\r\n"
r[headers]+="Set-Cookie: $1=$2; Expires=Mon, 26 Jul 2100 22:45:00 GMT; Path=${cfg[cookie_path]}\r\n"
cookies["$1"]="$2"
}
# remove_cookie(cookie_name)
function remove_cookie() {
r[headers]+="Set-Cookie: $1=; Expires=Sat, 02 Apr 2005 20:37:00 GMT\r\n"
unset cookies["$1"]
}
# header(header, header...)
@ -48,12 +51,19 @@ function html_encode() {
# url_encode(string)
function url_encode() {
xxd -ps -u <<< "$1" | tr -d '\n' | sed -E 's/.{2}/%&/g'
echo -n "$1" | xxd -p | tr -d '\n' | sed -E 's/.{2}/%&/g'
}
# url_decode(string)
function url_decode() {
echo -ne "$(sed -E 's/%[0-1][0-9a-f]//g;s/%/\\x/g' <<< "$1")"
# we should probably fail on invalid data here,
# but this function is kinda sorta infallible right now
local t=$'\01'
local a="${1//$t}" # strip all of our control chrs for safety
a="${a//+/ }" # handle whitespace
a="${a//%[A-Fa-f0-9][A-Fa-f0-9]/$t&}" # match '%xx', prepend with token
echo -ne "${a//$t%/\\x}" # replace the above with '\\x' and evaluate
}
# bogus function!

337
src/notORM.sh Executable file
View file

@ -0,0 +1,337 @@
#!/bin/bash
## notORM.sh - clearly, not an ORM.
# basic interface for saving semi-arbitrary data organized in "tables".
## limitations:
# - only for strings (we trim some bytes; see `reserved values` below)
# - currently only supports saving to CSV-with-extra-steps
## function return values:
#
# 0 - success
# 1 - general failure
# 2 - entry not found
# 3 - locked, try again later
# 4 - file not found
## data reserved values:
#
# \x00 - bash yeets it out of existence
# \x01 - delimeter
# \x02 - newline
# \x03 - control chr for sed
delim=$'\01'
newline=$'\02'
ctrl=$'\03'
# TODO: proper locking
# TODO: matching more than one column
repeat() {
local IFS=$'\n'
[[ "$1" -gt 0 ]] && printf -- "$2%.0s" $(seq 1 $1)
}
shopt -s expand_aliases
# internal. parses the `{ }` syntax, starting with 2nd arg.
# alias, not a function, because we want to modify the argv of the parent
# _data_parse_pairs(_, { search, column }, [{ search2, column2 }], ...) -> ${search[@]}, ${column[@]}
alias _data_parse_pairs='
local search=()
local column=()
while shift; do # "shebang reference?" ~ mei
[[ "$1" != "{" ]] && break # yes, we need to match this twice
if [[ "$2" != "}" || "$3" == "}" || "$4" == "}" ]]; then # make sure we dont want to match the bracket
search+=("$2")
else # empty search - just match ANY record
search+=("")
column+=(0)
shift 2
break
fi
if [[ "$3" != "}" ]]; then
column+=("$3")
[[ "$4" != "}" ]] && return 1 # we accept only values in pairs
shift 3
else
column+=(0)
shift 2
if [[ "$2" != "{" ]]; then
shift
break
fi
fi
done
'
# internal function. take search and column, generate a sed matching expr from them
# data_gen_expr() -> $expr
_data_gen_expr() {
# we need the pairs sorted due to how the sed expr generation works
local IFS=$'\01\n'
local i
sorted=($(for (( i=0; i<${#search[@]}; i++ )); do
echo "${column[i]}"$'\01'"${search[i]}"
done | sort -n -t$'\01'))
local last=0
for (( i=0; i<${#sorted[@]}; i=i+2 )); do
if [[ $((sorted[i] - last)) -le 1 ]]; then
expr+="$(_sed_sanitize "${sorted[i+1]}")${delim}"
else
expr+="$(repeat $((sorted[i] - last)) ".*$delim")$(_sed_sanitize "${sorted[i+1]}")${delim}"
fi
last="${sorted[i]}"
done
}
# adds a flat `array` to the `store`.
# a store can be any file, as long as we have r/w access to it and the
# adjacent directory.
#
# 3rd argument is optional, and will specify whether to insert an auto-increment
# ID column. False by default; Setting to true will cause an internal data_iter
# call. The inserted ID column is always the zeroeth one.
#
# this function will create some helper files if they don't exist. those
# shouldn't be removed, as other functions may use them for data mangling.
#
# data_add(store, array, [numbered])
data_add() {
[[ ! -v "$2" ]] && return 1
local -n ref="$2"
local res=
local IFS=$'\n'
if [[ ! -f "$1" ]]; then
if [[ "$3" == true ]]; then
res+="0$delim"
echo "$((${#ref[@]}+1))" > "${1}.cols"
else
echo "${#ref[@]}" > "${1}.cols"
fi
elif [[ "$3" == true ]]; then
local data
data_iter "$1" { } : # get last element
local id=$(( ${data[0]}+1 )) # returns 1 on non-int values
res+="$id$delim"
fi
local i
for i in "${ref[@]}"; do
_trim_control "$i"
res+="$tr$delim"
done
echo "$res" >> "$1" # TODO: some locking
}
# get one entry from store, filtering by search. exit after first result.
# by default uses the 0th column. override with optional `column`.
# returns the data to $res. override with optional `res`
#
# 2nd and 3rd arguments can be repeated, given you enclose each pair
# in curly braces. (e.g. `{ search } { search2 column2 }`)
#
# also can be used as `data_get store { } meow` to match all records
#
# data_get(store, { search, [column] }, ... [res]]) -> $res / ${!-1}
# data_get(store, search, [column], [res]) -> $res / ${!4}
data_get() {
[[ ! "$2" ]] && return 1
[[ ! -f "$1" ]] && return 4
local IFS=$'\n'
local store="$1"
if [[ "$2" == '{' ]]; then
_data_parse_pairs
local -n ref="${1:-res}"
else # compat
local search=("$2")
local column=("${3:-0}")
local -n ref=${4:-res}
fi
local line
while read -r line; do
IFS=$delim
# LOAD-BEARING!!
# without an intermediate variable, bash trims out empty
# objects. expansions be damned
local x="${line//$newline/$'\n'}"
ref=($x)
local i
for (( i=0; i<${#search[@]}; i++ )); do
if [[ "${ref[column[i]]}" != "${search[i]}" && "${search[i]}" ]]; then
continue 2
fi
done
return 0 # only reached if an entry matched all constraints
done < "$store"
unset ref
return 2
}
# run `callback` on all entries from `store` that match `search`.
# by default uses the 0th column. override with optional `column`
#
# immediately exits with 255 if the callback function returned 255
# if there were no matches, returns 2
# if the store wasn't found, returns 4
#
# data_iter(store, { search, [column] }, ... callback) -> $data
# data_iter(store, search, callback, [column]) -> $data
data_iter() {
[[ ! "$3" ]] && return 1
[[ ! -f "$1" ]] && return 4
local store="$1"
local IFS=$'\n'
local r=2
if [[ "$2" == '{' ]]; then
_data_parse_pairs
local callback="$1"
else # compat
local callback="$3"
local search=("$2")
local column=("${4:-0}")
fi
while read -r line; do
IFS=$delim
# LOAD BEARING; see data_get
local x="${line//$newline/$'\n'}"
data=($x)
IFS=
local i
for (( i=0; i<${#search[@]}; i++ )); do
if [[ "${data[column[i]]}" != "${search[i]}" && "${search[i]}" ]]; then
continue 2
fi
done
"$callback" # only reached if an entry matched all constraints
[[ $? == 255 ]] && return 255
r=0
done < "$store"
return $r
}
# replace a value in `store` with `array`, filtering by `search`.
# by default uses the 0th column. override with optional `column`
#
# `value` is any string, which will directly replace `search`
#
# data_replace_value(store, search, value, [column])
data_replace_value() {
[[ ! "$3" ]] && return 1
[[ ! -f "$1" ]] && return 4
local column=${4:-0}
local IFS=' '
# NOTE: sed in normal (not extended -E mode) requires `\(asdf\)` to make a match!
if [[ $column == 0 ]]; then
local expr="s$ctrl^$(_sed_sanitize "$2")\(${delim}.*\)$ctrl$(_sed_sanitize "$3")\1$ctrl"
else
local expr="s$ctrl^\($(repeat $column ".*$delim")\)$(_sed_sanitize "$2")\($delim$(repeat $(( $(cat "${1}.cols") - column - 1 )) ".*$delim")\)"'$'"$ctrl\1$(_sed_sanitize "$3")\2$ctrl"
fi
sed -i "$expr" "$1"
}
# replace an entire entry in `store` with `array`, filtering by `search`.
# by default uses the 0th column. override with optional `column`
#
# pass `array` without expanding (`arr`, not `$arr`).
#
# data_replace(store, search, array, [column])
data_replace() {
[[ ! "$3" ]] && return 1
[[ ! -f "$1" ]] && return 4
local store="$1"
local output=
local tr
## currently broken
# if [[ "$2" == '{' ]]; then
# _data_parse_pairs
#
# local -n ref="$1"
#
# local expr
# _data_gen_expr
# expr="s$ctrl^${expr}.*$ctrl"
# else
local column=${4:-0}
local -n ref="$3"
local IFS=' '
if [[ $column == 0 ]]; then
local expr="s$ctrl^$(_sed_sanitize "$2")${delim}.*$ctrl"
else
local expr="s$ctrl^$(repeat $column ".*$delim")$(_sed_sanitize "$2")$delim$(repeat $(( $(cat "${store}.cols") - column - 1 )) ".*$delim")"'$'"$ctrl"
fi
# fi
local i
for i in "${ref[@]}"; do
_trim_control "$i"
output+="$tr$delim"
done
expr+="$(_sed_sanitize_array "$output")$ctrl"
sed -i "$expr" "$store"
}
# deletes entries from the `store` using `search`.
# by default uses the 0th column. override with optional `column`
#
# data_yeet(store, search, [column])
# data_yeet(store, { search, [column] }, ...)
data_yeet() {
[[ ! "$2" ]] && return 1
[[ ! -f "$1" ]] && return 4
local store="$1"
if [[ "$2" == '{' ]]; then
_data_parse_pairs
local expr
_data_gen_expr
expr="/^${expr}.*/d"
else # compat
local search="$2"
local column="${3:-0}"
local IFS=' '
if [[ $column == 0 ]]; then
local expr="/^$(_sed_sanitize "$2")${delim}.*/d"
else
local expr="/^$(repeat $column ".*$delim")$(_sed_sanitize "$2")$delim$(repeat $(( $(cat "${store}.cols") - column - 1 )) ".*$delim")"'$'"/d"
fi
fi
sed -i "$expr" "$store"
}
_sed_sanitize() {
_trim_control "$1"
echo -n "$tr" | xxd -p | tr -d '\n' | sed 's/../\\x&/g'
}
_sed_sanitize_array() {
echo -n "$1" | xxd -p | tr -d '\n' | sed 's/../\\x&/g'
}
# _trim_control(string) -> $tr
_trim_control() {
tr="${1//$delim}" # remove 0x01
tr="${tr//$newline}" # remove 0x02
tr="${tr//$ctrl}" # remove 0x03
tr="${tr//$'\n'/$newline}" # \n -> 0x02
}
shopt -u expand_aliases # back to the default

View file

@ -3,10 +3,10 @@ Connection: Upgrade
Upgrade: WebSocket
${cfg[extra_headers]}"
if [[ ${r[websocket_key]} != '' ]]; then
accept=$(echo -ne $(printf "${r[websocket_key]}""258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | sha1sum | sed 's/ //g;s/-//g;s/.\{2\}/\\x&/g') | base64)
accept=$(echo -ne $(echo "${r[websocket_key]}""258EAFA5-E914-47DA-95CA-C5AB0DC85B11" | sha1sum | sed 's/ //g;s/-//g;s/.\{2\}/\\x&/g') | base64)
echo "Sec-WebSocket-Accept: "$accept
fi
printf "\r\n\r\n"
echo -e "\r\n\r\n"
#echo "Laura is cute <3"
#WebSocket-Location: ws://localhost:1337/

View file

@ -1,21 +1,29 @@
# TODO: move parts of this into server.sh, or rename the file appropriately
# __headers(end)
# Sets the header and terminates the header block if end is NOT set to false
function __headers() {
if [[ "${cfg[unbuffered]}" != true ]]; then
if [[ "${r[headers]}" == *'Location'* ]]; then
printf "HTTP/1.0 302 aaaaa\r\n"
else
printf "HTTP/1.0 200 OK\r\n"
if [[ "${r[headers]}" == *'Location'* ]]; then # override for redirects
echo -ne "HTTP/1.0 302 aaaaa\r\n"
elif [[ "${r[status]}" == '200' || "${r[status]}" == '212' ]]; then # normal or router, should just return 200
echo -ne "HTTP/1.0 200 OK\r\n"
else # changed by the user in the meantime :)
[[ ! "${r[status]}" ]] && r[status]=500 # ... if they left it blank
echo -ne "HTTP/1.0 ${r[status]} meow\r\n"
fi
[[ "${r[headers]}" != '' ]] && printf "${r[headers]}"
printf "${cfg[extra_headers]}\r\n"
[[ "${r[headers]}" != '' ]] && echo -ne "${r[headers]}"
echo -ne "${cfg[extra_headers]}\r\n"
else
echo "uh oh - we're running unbuffered" > /dev/stderr
fi
if [[ ${r[status]} == 200 ]]; then
get_mime "${r[uri]}"
[[ "$mimetype" != '' ]] && printf "content-type: $mimetype\r\n"
[[ "$mimetype" != '' ]] && echo -ne "content-type: $mimetype\r\n"
fi
printf "\r\n"
[[ "$1" != false ]] && echo -ne "\r\n"
}
if [[ ${r[status]} == 212 ]]; then
@ -24,30 +32,20 @@ if [[ ${r[status]} == 212 ]]; then
else
temp=$(mktemp)
source "${r[view]}" > $temp
__headers
__headers false
get_mime "$temp"
# Defaults to text/plain for things it doesn't know, eg. CSS
[[ "$mimetype" != 'text/plain' ]] && echo -ne "content-type: $mimetype\r\n"
echo -ne "\r\n"
cat $temp
rm $temp
fi
elif [[ "${cfg[php_enabled]}" == true && "${r[uri]}" =~ ".php" ]]; then
temp=$(mktemp)
php "${r[uri]}" "$(get_dump)" "$(post_dump)" > $temp
__headers
cat $temp
rm $temp
elif [[ "${cfg[python_enabled]}" == true && "${r[uri]}" =~ ".py" ]]; then
temp=$(mktemp)
python "${r[uri]}" "$(get_dump)" "$(post_dump)" > $temp
__headers
cat $temp
rm $temp
elif [[ "${r[uri]}" =~ \.${cfg[extension]}$ ]]; then
temp=$(mktemp)
source "${r[uri]}" > $temp
__headers
if [[ "${cfg[encoding]}" != '' ]]; then
if [[ "${cfg[encoding]}" ]]; then
iconv $temp -f UTF-8 -t "${cfg[encoding]}"
else
cat $temp

View file

@ -1,3 +1,3 @@
printf "HTTP/1.0 401 Unauthorized
echo -ne "HTTP/1.0 401 Unauthorized
WWW-Authenticate: Basic realm=\"${cfg[auth_realm]}\"
${cfg[extra_headers]}\r\n"

View file

@ -1,4 +1,5 @@
printf "HTTP/1.0 403 Forbidden
echo -ne "HTTP/1.0 403 Forbidden
content-type: text/html
${cfg[extra_headers]}\r\n\r\n"
source templates/head.sh
echo "<h1>403: You've been naughty</h1>"

View file

@ -1,4 +1,5 @@
printf "HTTP/1.0 404 Not Found
echo -ne "HTTP/1.0 404 Not Found
content-type: text/html
${cfg[extra_headers]}\r\n\r\n"
source templates/head.sh
echo "<h1>404 Not Found</h1>"

View file

@ -1,15 +1,16 @@
printf "HTTP/1.0 200 OK
echo -ne "HTTP/1.0 200 OK
content-type: text/html
${cfg[extra_headers]}\r\n\r\n"
source templates/head.sh
printf "<h1>Index of $([[ ${r[url]} == '' ]] && echo '/' || echo $(html_encode ${r[url]}))</h1>"
echo "<h1>Index of $([[ ${r[url]} == '' ]] && echo '/' || echo $(html_encode ${r[url]}))</h1>"
if [[ ${cookies[username]} != '' ]]; then
echo "Logged in as $(html_encode ${cookies[username]})"
fi
printf "<table>
echo "<table>
<tr>
<th>File</th>
<th>Size</th>
@ -24,10 +25,10 @@ for i in $(ls ${r[uri]}); do
unset IFS
stats=($(ls -hld "${r[uri]}/$i")) # -hld stands for Half-Life Dedicated
if [[ -d "${r[uri]}"'/'"$i" ]]; then
printf "<tr><td><a href='$(html_encode "${r[url]}/$i/")'>$(html_encode "$i")</a></td><td>&lt;DIR&gt;</td><td>${stats[5]} ${stats[6]} ${stats[7]}</td></tr>"
echo "<tr><td><a href='$(html_encode "${r[url]}/$i/")'>$(html_encode "$i")</a></td><td>&lt;DIR&gt;</td><td>${stats[5]} ${stats[6]} ${stats[7]}</td></tr>"
else
printf "<tr><td><a href='$(html_encode "${r[url]}/$i")'>$(html_encode "$i")</a></td><td>${stats[4]}B</td><td>${stats[5]} ${stats[6]} ${stats[7]}</td></tr>"
echo "<tr><td><a href='$(html_encode "${r[url]}/$i")'>$(html_encode "$i")</a></td><td>${stats[4]}B</td><td>${stats[5]} ${stats[6]} ${stats[7]}</td></tr>"
fi
done
printf "</table><p><i>HTTP.sh server on $(html_encode ${r[host]})</i></p><p>laura is cute</p>"
echo "</table><p><i>HTTP.sh server on $(html_encode ${r[host]})</i></p><p>meow!</p>"

View file

@ -1,22 +0,0 @@
#!/usr/bin/env bash
url="$(url_decode "$(url_decode "$(sed -E 's/\?/<2F><>Lun4_iS_CuTe<54>/;s/^(.*)<29><>Lun4_iS_CuTe<54>//;s/'"${cfg[proxy_param]}"'=//g' <<< "${r[url]}")")")"
if [[ $(grep -Poh "${cfg[proxy_url]}" <<< "$url") == '' ]]; then
exit 1
fi
host="$(sed -E 's@http(s|)://@@;s@/.*@@' <<< "$url")"
proxy_url="$(sed -E 's/\?.*//g' <<< "${r[url]}")"
headers="$(tr '\r' '\n' <<< "${r[req_headers]}")"
headers+=$'\n'
#params=()
while read line; do
if [[ "$line" != "GET"* && "$line" != "Host:"* && "$line" != '' ]]; then
args+=('-H')
args+=("$line")
fi
done <<< "$headers"
curl --http1.1 "$url" "${args[@]}" -D /dev/stdout | grep -aiv "Transfer-Encoding: chunked" | sed -E '/Location/s/\?/%3f/g;/Location/s/\&/%26/g;/Location/s/\:/%3a/g;/Location/s@/@%2f@g;s@Location%3a @Location: '"$proxy_url"'?'"${cfg[proxy_param]}"'=@'

View file

@ -1,4 +1,12 @@
#!/usr/bin/env bash
# If $1 is set to true, enable the call trace
if [[ "$1" == true ]]; then
set -x
fi
shopt -s extglob
source src/version.sh
source config/master.sh
source src/mime.sh
source src/misc.sh
@ -6,6 +14,7 @@ source src/account.sh
source src/mail.sh
source src/route.sh
source src/template.sh
source src/notORM.sh # to be split off HTTP.sh at some point :^)
[[ -f "${cfg[namespace]}/config.sh" ]] && source "${cfg[namespace]}/config.sh"
declare -A r # current request / response
@ -17,100 +26,136 @@ declare -A params # parsed router data
r[status]=210 # Mommy always said that I was special
r[req_headers]=''
r[payload_type]=none # placeholder
post_length=0
while read -r param; do
r[req_headers]+="$param"
param_l="${param,,}" # lowercase
name=''
value=''
data=''
# start reading the stream here instead of the loop below;
# this way, we can detect if the connection is even valid HTTP.
# we're reading up to 8 characters and waiting for a space.
read -d' ' -r -n8 param
shopt -s nocasematch # only for initial parse; saves us *many* sed calls
if [[ "${param,,}" =~ ^(get|post|patch|put|delete|meow) ]]; then # TODO: OPTIONS, HEAD
r[method]="${param%% *}"
read -r param
[[ "${r[method],,}" != "get" ]] && r[post]=true
r[url]="$(sed -E 's/^ *//;s/HTTP\/[0-9]+\.[0-9]+//;s/ //g;s/\/*\r//g;s/\/\/*/\//g' <<< "$param")"
unset IFS
if [[ "$param_l" == $'\015' ]]; then
break
elif [[ "$param_l" == *"content-length:"* ]]; then
r[content_length]="$(sed 's/Content-Length: //i;s/\r//' <<< "$param")"
elif [[ "$param_l" == *"content-type:"* ]]; then
r[content_type]="$(sed 's/Content-Type: //i;s/\r//' <<< "$param")"
if [[ "${r[content_type]}" == *"multipart/form-data"* ]]; then
tmpdir=$(mktemp -d)
fi
if [[ "${r[content_type]}" == *"boundary="* ]]; then
r[content_boundary]="$(sed -E 's/(.*)boundary=//i;s/\r//;s/ //' <<< "${r[content_type]}")"
fi
elif [[ "$param_l" == *"host:"* ]]; then
r[host]="$(sed 's/Host: //i;s/\r//;s/\\//g' <<< "$param")"
r[host_portless]="$(sed -E 's/:(.*)$//' <<< "${r[host]}")"
if [[ -f "config/$(basename -- ${r[host]})" ]]; then
source "config/$(basename -- ${r[host]})"
elif [[ -f "config/$(basename -- ${r[host_portless]})" ]]; then
source "config/$(basename -- ${r[host_portless]})"
fi
elif [[ "$param_l" == *"user-agent:"* ]]; then
r[user_agent]="$(sed 's/User-Agent: //i;s/\r//;s/\\//g' <<< "$param")"
elif [[ "$param_l" == *"upgrade:"* && $(sed 's/Upgrade: //i;s/\r//' <<< "$param") == "websocket" ]]; then
r[status]=101
elif [[ "$param_l" == *"sec-websocket-key:"* ]]; then
r[websocket_key]="$(sed 's/Sec-WebSocket-Key: //i;s/\r//' <<< "$param")"
elif [[ "$param_l" == *"authorization: basic"* ]]; then
login_simple "$param"
elif [[ "$param_l" == *"authorization: bearer"* ]]; then
r[authorization]="$(sed 's/Authorization: Bearer //i;s/\r//' <<< "$param")"
elif [[ "$param_l" == *"cookie: "* ]]; then
IFS=';'
for i in $(IFS=' '; echo "$param" | sed -E 's/Cookie: //i;;s/%/\\x/g'); do
name="$((grep -Poh "[^ ].*?(?==)" | head -1) <<< $i)"
value="$(sed "s/$name=//;s/^ //;s/ $//" <<< $i)"
cookies[$name]="$(echo -e $value)"
done
elif [[ "$param_l" == *"range: bytes="* ]]; then
r[range]="$(sed 's/Range: bytes=//;s/\r//' <<< "$param")"
elif [[ "$param" == *"GET "* ]]; then
r[url]="$(echo -ne "$(url_decode "$(sed -E 's/GET //;s/HTTP\/[0-9]+\.[0-9]+//;s/ //g;s/\/*\r//g;s/\/\/*/\//g' <<< "$param")")")"
data="$(sed -E 's/\?/<2F><>Lun4_iS_CuTe<54>/;s/^(.*)<29><>Lun4_iS_CuTe<54>//;s/\&/ /g' <<< "${r[url]}")"
if [[ "$data" != "${r[url]}" ]]; then
data="$(sed -E 's/\?/<2F><>Lun4_iS_CuTe<54>/;s/^(.*)<29><>Lun4_iS_CuTe<54>//' <<< "${r[url]}")"
IFS='&'
for i in $data; do
name="$(sed -E 's/\=(.*)$//' <<< "$i")"
value="$(sed "s/$name\=//" <<< "$i")"
get_data[$name]="$value"
done
fi
elif [[ "$param" == *"POST "* ]]; then
r[url]="$(echo -ne "$(url_decode "$(sed -E 's/POST //;s/HTTP\/[0-9]+\.[0-9]+//;s/ //g;s/\/*\r//g;s/\/\/*/\//g' <<< "$param")")")"
r[post]=true
# below shamelessly copied from GET, should be moved to a function
data="$(sed -E 's/\?/<2F><>Lun4_iS_CuTe<54>/;s/^(.*)<29><>Lun4_iS_CuTe<54>//;s/\&/ /g' <<< "${r[url]}")"
if [[ "$data" != "${r[url]}" ]]; then
data="$(sed -E 's/\?/<2F><>Lun4_iS_CuTe<54>/;s/^(.*)<29><>Lun4_iS_CuTe<54>//' <<< "${r[url]}")"
IFS='&'
for i in $data; do
name="$(sed -E 's/\=(.*)$//' <<< "$i")"
value="$(sed "s/$name\=//" <<< "$i")"
get_data[$name]="$value"
done
fi
if [[ "${r[url]}" == *'?'* ]]; then
while read -d'&' i; do
name="${i%%=*}"
if [[ "$name" ]]; then
value="${i#*=}"
get_data[$name]="$(url_decode "$value")"
fi
done <<< "${r[url]#*\?}&"
fi
else
exit 1 # TODO: throw 400 here
fi
declare -A headers
IFS=$'\n'
# continue with reading the headers
while read -r param; do
[[ "$param" == $'\r' ]] && break
[[ "$param" != *":"* ]] && exit 1 # TODO: throw 400
IFS=':'
read -ra header_pair <<< "$param"
header_key="${header_pair[0],,}" # To lowercase...
header_key="${header_key##*( )}" # ...trim leading whitespace...
header_key="${header_key%%*( )}" # ...and trailing whitespaces
header_value="${header_pair[@]:1}"
header_value="${header_value##*( )}" # Trim leading whitespace...
headers["${header_key}"]="${header_value%%*( )*($'\r')}" # ...and trailing whitespace and \r
done
unset IFS
r[uri]="$(realpath "${cfg[namespace]}/${cfg[root]}$(sed -E 's/\?(.*)$//' <<< "${r[url]}")")"
[[ -d "${r[uri]}/" ]] && pwd="${r[uri]}" || pwd=$(dirname "${r[uri]}")
# TODO: remove deprecated fields below
if [[ $NCAT_LOCAL_PORT == '' ]]; then
r[content_length]="${headers["content-length"]}"
r[user_agent]="${headers["user-agent"]}"
r[websocket_key]="${headers["sec-websocket-key"]}"
r[req_headers]="$headers"
r[url]="$(url_decode "${r[url]}")" # doing this here for.. reasons
r[uri]="$(realpath "${cfg[namespace]}/${cfg[root]}/$(sed -E 's/\?(.*)$//' <<< "${r[url]}")")"
r[url_clean]="${r[url]%\?*}"
[[ -d "${r[uri]}/" ]] && pwd="${r[uri]}" || pwd=$(dirname "${r[uri]}") # dead code
if [[ -n "${headers["content-type"]}" ]]; then
IFS=';'
read -ra content_type <<< "${headers["content-type"]}"
r[content_type]="${content_type[0]}"
if [[ "${r[content_type]}" == "application/x-www-form-urlencoded" ]]; then
r[payload_type]="urlencoded" # TODO: do we want to have a better indicator for this?
elif [[ "${r[content_type]}" == "multipart/form-data" ]]; then
r[payload_type]="multipart"
tmpdir=$(mktemp -d)
if [[ "${r[content_type]}" == "boundary="* ]]; then
boundary="${content_type[@]:1}"
r[content_boundary]="${boundary##*boundary=}"
fi
fi
unset IFS
fi
if [[ -n "${headers["host"]}" ]]; then
r[host]="${headers["host"]}"
r[host_portless]="${headers["host"]%%:*}"
if [[ -f "config/$(basename -- ${r[host]})" ]]; then
source "config/$(basename -- ${r[host]})"
elif [[ -f "config/$(basename -- ${r[host_portless]})" ]]; then
source "config/$(basename -- ${r[host_portless]})"
fi
fi
if [[ "${headers["connection"]}" == "upgrade" && "${headers["upgrade"]}" == "websocket" ]]; then
r[status]=101
fi
shopt -u nocasematch
if [[ -n "${headers["authorization"]}" ]]; then
if [[ "${headers["authorization"],,}" == "basic"* ]]; then
base64="${headers["authorization"]#[Bb]asic*( )}"
login_simple "${base64##*( )}"
elif [[ "${headers["authorization"],,}" == "bearer"* ]]; then
bearer="${headers["authorization"]#[Bb]earer*( )}"
r[authorization]="${bearer##*( )}"
fi
fi
if [[ -n "${headers["cookie"]}" ]]; then
while read -r -d';' cookie_pair; do
cookie_pair="$(url_decode "$cookie_pair")"
name="${cookie_pair%%=*}"
if [[ -n "$name" ]]; then
# get value, strip potential whitespace
value="${cookie_pair##*=}"
value="${value##*( )}"
value="${value%%*( )}"
cookies["$name"]="$value"
fi
done <<< "${headers["cookie"]};" # This hack is beyond me, just trust the process
fi
if [[ "${headers["range"]}" == "bytes"* ]]; then
r[range]="${headers["range"]#*=}"
fi
if [[ ${headers["x-forwarded-for"]} ]]; then
r[proto]='http'
r[ip]="${headers["x-forwarded-for"]%%[, ]*}"
elif [[ -z "$NCAT_LOCAL_PORT" ]]; then
r[proto]='http'
r[ip]="NCAT_IS_BORK"
else
@ -123,18 +168,18 @@ echo "$(date) - IP: ${r[ip]}, PROTO: ${r[proto]}, URL: ${r[url]}, GET_data: ${ge
[[ -f "${cfg[namespace]}/routes.sh" ]] && source "${cfg[namespace]}/routes.sh"
if [[ ${r[status]} != 101 ]]; then
clean_url="$(sed -E 's/\?.*//' <<< "${r[url]}")"
for (( i=0; i<${#route[@]}; i=i+3 )); do
if [[ "$(grep -Poh "^${route[$((i+1))]}$" <<< "$clean_url")" != "" ]] || [[ "$(grep -Poh "^${route[$((i+1))]}$" <<< "$clean_url/")" != "" ]]; then
if [[ "$(grep -Poh "^${route[$((i+1))]}$" <<< "${r[url_clean]}")" != "" ]] || [[ "$(grep -Poh "^${route[$((i+1))]}$" <<< "${r[url_clean]}/")" != "" ]]; then
r[status]=212
r[view]="${route[$((i+2))]}"
IFS='/'
url=(${route[$i]})
url_=(${r[url]})
url_=(${r[url_clean]})
unset IFS
for (( j=0; j<${#url[@]}; j++ )); do
if [[ ${url_[$j]} != '' ]]; then
params[$(sed 's/://' <<< "${url[$j]}")]="${url_[$j]}"
# TODO: think about the significance of this if really hard when i'm less tired
if [[ ${url_[$j]} != '' && ${url[$j]} == ":"* ]]; then
params[${url[$j]/:/}]="${url_[$j]}"
fi
done
break
@ -144,7 +189,7 @@ if [[ ${r[status]} != 101 ]]; then
if [[ ${r[status]} != 212 ]]; then
if [[ -a "${r[uri]}" && ! -r "${r[uri]}" ]]; then
r[status]=403
elif [[ "$(echo -n "${r[uri]}")" != "$(realpath "${cfg[namespace]}/${cfg[root]}")"* ]]; then
elif [[ "${r[uri]}" != "$(realpath "${cfg[namespace]}/${cfg[root]}")"* ]]; then
r[status]=403
elif [[ -f "${r[uri]}" ]]; then
r[status]=200
@ -171,10 +216,6 @@ if [[ "${cfg[auth_required]}" == true && "${r[authorized]}" != true ]]; then
r[status]=401
fi
if [[ "${cfg[proxy]}" == true ]]; then
r[status]=211
fi
if [[ "${r[post]}" == true ]] && [[ "${r[status]}" == 200 || "${r[status]}" == 212 ]]; then
# This whole ordeal is here to prevent passing binary data as a variable.
# I could have done it as an array, but this solution works, and it's
@ -184,14 +225,14 @@ if [[ "${r[post]}" == true ]] && [[ "${r[status]}" == 200 || "${r[status]}" ==
declare post_multipart
tmpfile=$(mktemp -p $tmpdir)
dd iflag=fullblock of=$tmpfile ibs=${r[content_length]} count=1 obs=1M
delimeter_len=$(echo -n "${r[content_boundary]}"$'\015' | wc -c)
boundaries_list=$(echo -ne $(grep $tmpfile -ao -e ${r[content_boundary]} --byte-offset | sed -E 's/:(.*)//g') | sed -E 's/ [0-9]+$//')
for i in $boundaries_list; do
tmpout=$(mktemp -p $tmpdir)
dd iflag=fullblock if=$tmpfile ibs=$(($i+$delimeter_len)) obs=1M skip=1 | while true; do
read line
read -r line
if [[ $line == $'\015' ]]; then
cat - > $tmpout
break
@ -205,22 +246,25 @@ if [[ "${r[post]}" == true ]] && [[ "${r[status]}" == 200 || "${r[status]}" ==
done
rm $tmpfile
else
read -N "${r[content_length]}" data
IFS='&'
for i in $(tr -d '\n' <<< "$data"); do
name="$(sed -E 's/\=(.*)$//' <<< "$i")"
param="$(sed "s/$name\=//" <<< "$i")"
post_data[$name]="$param"
done
unset IFS
read -r -N "${r[content_length]}" data
if [[ "${r[payload_type]}" == "urlencoded" ]]; then
unset IFS
while read -r -d'&' i; do
name="${i%%=*}"
value="${i#*=}"
post_data[$name]="$(url_decode "$value")"
echo post_data[$name]="$value" >/dev/stderr
done <<< "${data}&"
else
# this is fine?
post_data[0]="${data%\&}"
fi
fi
fi
if [[ ${r[status]} == 210 && ${cfg[autoindex]} == true ]]; then
source "src/response/listing.sh"
elif [[ ${r[status]} == 211 ]]; then
source "src/response/proxy.sh"
elif [[ ${r[status]} == 200 || ${r[status]} == 212 ]]; then
source "src/response/200.sh"
elif [[ ${r[status]} == 401 ]]; then

View file

@ -13,6 +13,7 @@ function render() {
local tmp=$(mktemp)
local key
IFS=$'\n'
for key in ${!ref[@]}; do
if [[ "$key" == "_"* ]]; then # iter mode
local subtemplate=$(mktemp)
@ -20,7 +21,7 @@ function render() {
echo 's'$'\02''\{\{start '"$key"'\}\}.*\{\{end '"$key"'\}\}'$'\02''\{\{'"$key"'\}\}'$'\02'';' >> "$tmp"
local -n asdf=${ref[$key]}
local -n asdf=${ref["$key"]}
local j
local value=''
for j in ${!asdf[@]}; do
@ -32,29 +33,60 @@ function render() {
echo 's'$'\02''\{\{'"$key"'\}\}'$'\02'''"$value"''$'\02'';' >> "$tmp"
rm "$subtemplate"
elif [[ "$key" == "@"* && "${ref[$key]}" != '' ]]; then
local value="$(sed -E 's/\&/<2F>UwU<77>/g' <<< "${ref[$key]}")"
echo 's'$'\02''\{\{\'"$key"'\}\}'$'\02'''"$value"''$'\02''g;' >> "$tmp"
elif [[ "$key" == "@"* && "${ref["$key"]}" != '' ]]; then
local value="$(sed -E 's/\&/<2F>UwU<77>/g' <<< "${ref["$key"]}")"
echo 's'$'\02''\{\{\'"$key"'\}\}'$'\02'''"$value"''$'\02''g;' >> "$tmp" #'
elif [[ "$key" == '?'* ]]; then
local _key="\\?${key/?/}"
local subtemplate=$(mktemp)
echo 's'$'\02''\{\{start '"$_key"'\}\}((.*)\{\{else '"$_key"'\}\}.*\{\{end '"$_key"'\}\}|(.*)\{\{end '"$_key"'\}\})'$'\02''\2\3'$'\02'';' >> "$subtemplate"
# TODO: check if this is needed?
# the code below makes sure to resolve the conditional blocks
# *before* anything else. I can't think of *why* this is needed
# right now, but I definitely had a reason in this. Question is, what reason.
cat <<< $(cat "$subtemplate" "$tmp") > "$tmp" # call that cat abuse
rm "$subtemplate"
elif [[ "${ref[$key]}" != "" ]]; then
echo "VALUE: ${ref[$key]}" > /dev/stderr
elif [[ "${ref["$key"]}" != "" ]]; then
echo "VALUE: ${ref["$key"]}" > /dev/stderr
if [[ "$3" != true ]]; then
local value="$(html_encode <<< "${ref[$key]}" | sed -E 's/\&/<2F>UwU<77>/g')"
local value="$(html_encode <<< "${ref["$key"]}" | sed -E 's/\&/<2F>UwU<77>/g')"
else
local value="$(sed -E 's/\\\\/<2F>OwO<77>/g;s/\\//g;s/<2F>OwO<77>/\\/g' <<< "${ref[$key]}" | html_encode | sed -E 's/\&/<2F>UwU<77>/g')"
local value="$(echo -n "${ref["$key"]}" | tr -d $'\01'$'\02' | tr $'\n' $'\01' | sed -E 's/\\\\/<2F>OwO<77>/g;s/\\//g;s/<2F>OwO<77>/\\/g' | html_encode | sed -E 's/\&/<2F>UwU<77>/g')"
fi
echo 's'$'\02''\{\{\.'"$key"'\}\}'$'\02'''"$value"''$'\02''g;' >> "$tmp"
else
echo 's'$'\02''\{\{\.'"$key"'\}\}'$'\02'$'\02''g;' >> "$tmp"
fi
done
unset IFS
# process file includes;
# achtung: even though this is *after* the main loop, it actually executes sed reaplces *before* it;
# recursion is currently unsupported here, i feel like it may break things?
if [[ "$template" == *'{{#'* && "$3" != true ]]; then
local subtemplate=$(mktemp)
while read key; do
# below check prevents the loop loading itself as a template.
# this is possibly not enough to prevent all recursions, but
# i see it as a last-ditch measure. so it'll do here.
if [[ "$file" == "$2" ]]; then
echo 's'$'\02''\{\{\#'"$key"'\}\}'$'\02''I cowardly refuse to endlessly recurse\!'$'\02''g;' >> "$subtemplate"
elif [[ -f "$key" ]]; then
echo 's'$'\02''\{\{\#'"$key"'\}\}'$'\02'"$(tr -d $'\01'$'\02' < "$key" | tr $'\n' $'\01' | sed 's/\&/<2F>UwU<77>/g')"$'\02''g;' >> "$subtemplate"
_template_find_special_uri "$(cat "$key")"
fi
done <<< "$(grep -Poh '{{#.*?}}' <<< "$template" | sed 's/{{#//;s/}}$//')"
cat <<< $(cat "$subtemplate" "$tmp") > "$tmp"
rm "$subtemplate"
fi
_template_find_special_uri "$template"
_template_gen_special_uri >> "$tmp"
if [[ "$3" != true ]]; then # are we recursing?
cat "$tmp" | tr '\n' $'\01' | sed -E 's/'$'\02'';'$'\01''/'$'\02'';/g;s/'$'\02''g;'$'\01''/'$'\02''g;/g' > "${tmp}_"
@ -69,6 +101,38 @@ function render() {
fi
}
_template_uri_list=()
# internal function that finds all occurences of the special `{{-uri-N}}` tag.
# here to also make it run on subtemplates
#
# _template_find_special_uri(tpl_string)
_template_find_special_uri() {
local IFS=$'\n'
local line
if [[ "$1" == *'{{-uri'* ]]; then
while read line; do
_template_uri_list+=("${line//[^0-9]}")
done <<< "$(grep -Poh '{{-uri-[0-9]*}}' <<< "$1")"
fi
}
# internal function that takes the output from _template_find_special_uri and
# transforms it into sed exprs
#
# _template_gen_special_uri() -> stdout
_template_gen_special_uri() {
local IFS=$'\n'
local num
local uri
# {{-uri-<num>}}, where num is amount of slashed parts to include
sort <<< ${_template_uri_list[*]} | uniq | while read num; do
uri="$(grep -Poh '^(/.*?){'"$((num+1))"'}' <<< "${r[url_clean]}/")"
echo 's'$'\02''\{\{-uri-'"$num"'\}\}'$'\02'"$uri"$'\02''g;'
done
# for replacing plain {{-uri}} without a number
echo 's'$'\02''\{\{-uri\}\}'$'\02'"${r[url_clean]}"$'\02''g;'
}
# render_unsafe(array, template_file)
function render_unsafe() {
local template="$(cat "$2")"
@ -77,14 +141,14 @@ function render_unsafe() {
for key in ${!ref[@]}; do
if [[ "$key" == "_"* ]]; then # iter mode
# grep "start _test" -A99999 | grep "end _test" -B99999
local -n item_array=${ref[$key]}
local -n item_array=${ref["$key"]}
local value
for ((_i = 0; _i < ${#item_array[@]}; _i++)); do
value+="$(xxd -p <<< "${item_array[$_i]}" | tr -d '\n' | sed -E 's/../\\x&/g')"
done
echo 's/\{\{'"$key"'\}\}/'"$value"'/g' >> "$tmp"
else
local value="$(xxd -p <<< "${ref[$key]}" | tr -d '\n' | sed -E 's/../\\x&/g')"
local value="$(xxd -p <<< "${ref["$key"]}" | tr -d '\n' | sed -E 's/../\\x&/g')"
echo 's/\{\{\.'"$key"'\}\}/'"$value"'/g' >> "$tmp"
fi
done
@ -108,13 +172,12 @@ function nested_declare() {
# nested_add(ref, array)
function nested_add() {
local nested_id=$(_nested_random)
declare -n nested_ref=$2
declare -g -A _$nested_id
# poor man's array copy
for k in ${!nested_ref[@]}; do
declare -g -A _$nested_id[$k]="${nested_ref[$k]}"
done
local a
a="$(declare -p "$2")"
# pain
eval "${a/ $2=/ -g _$nested_id=}"
local -n ref=$1
ref+=("$nested_id")
@ -123,5 +186,5 @@ function nested_add() {
# nested_get(ref, i)
function nested_get() {
local -n ref=$1
declare -g -n res=_${ref[$2]}
declare -g -n res=_${ref["$2"]}
}

2
src/version.sh Normal file
View file

@ -0,0 +1,2 @@
#!/usr/bin/env bash
HTTPSH_VERSION=0.97

View file

@ -1,25 +1,35 @@
#!/usr/bin/env bash
# worker.sh - setup and control of workers
# worker_add(name, interval)
function worker_add() {
if [[ -x "${cfg[namespace]}/workers/$1/worker.sh" ]]; then
echo "[WRKR] adding worker $1"
while true; do
source "${cfg[namespace]}/workers/$1/worker.sh"
sleep $2
if [[ $(cat "${cfg[namespace]}/workers/$1/control") == "die" ]]; then
echo "" > ${cfg[namespace]}/workers/$1/control
while true; do
if [[ $(cat "${cfg[namespace]}/workers/$1/control") == "run" ]]; then
echo "" > "${cfg[namespace]}/workers/$1/control"
break
fi
sleep $2
done
fi
done &
{
shopt -s extglob
x() { declare -p data;} # for notORM
source config/master.sh
source src/account.sh
source src/mail.sh
source src/mime.sh
source src/misc.sh
source src/notORM.sh
source src/template.sh
while true; do
source "${cfg[namespace]}/workers/$1/worker.sh"
sleep $2
if [[ $(cat "${cfg[namespace]}/workers/$1/control") == "die" ]]; then
echo "" > ${cfg[namespace]}/workers/$1/control
while true; do
if [[ $(cat "${cfg[namespace]}/workers/$1/control") == "run" ]]; then
echo "" > "${cfg[namespace]}/workers/$1/control"
break
fi
sleep $2
done
fi
done
} &
else
echo "[WRKR] Broken config - workers/$1/worker.sh does not exist, or is not executable?"
fi

17
tests/00-prepare.sh Normal file
View file

@ -0,0 +1,17 @@
#!/bin/bash
prepare() {
[[ ! -d app ]] && ./http.sh init
./http.sh >/dev/null &
}
tst() {
for i in {1..10}; do
if [[ "$(ss -tulnap | grep LISTEN | grep 1337)" ]]; then
return 0
fi
sleep 0.5
done
return 255
}

174
tests/01-http-basic.sh Normal file
View file

@ -0,0 +1,174 @@
#!/bin/bash
server_output() {
prepare() {
cat <<"EOF" > app/webroot/meow.shs
#!/bin/bash
echo meow
EOF
}
tst() {
curl -s localhost:1337/meow.shs
}
match="meow"
}
server_get_param() {
prepare() {
cat <<"EOF" > app/webroot/meow.shs
#!/bin/bash
echo "${get_data[meow]}"
EOF
}
tst() {
curl -s "localhost:1337/meow.shs?meow=nyaa"
}
match="nyaa"
}
server_get_random() {
prepare() {
cat <<"EOF" > app/webroot/meow.shs
#!/bin/bash
echo "${get_data[meow]}"
EOF
}
tst() {
curl -s "localhost:1337/meow.shs?meow=nyaa"
}
match="nyaa"
}
server_post_param() {
prepare() {
cat <<"EOF" > app/webroot/meow.shs
#!/bin/bash
echo "${post_data[meow]}"
EOF
}
tst() {
curl -s "localhost:1337/meow.shs" -d 'meow=nyaa'
}
match="nyaa"
}
server_res_header() {
tst() {
curl -s -I localhost:1337
}
match_sub="HTTP.sh"
}
server_res_header_custom() {
prepare() {
cat <<"EOF" > app/webroot/meow.shs
#!/bin/bash
header "meow: a custom header!"
EOF
}
tst() {
curl -s -v localhost:1337/meow.shs 2>&1
}
match_sub="a custom header!"
}
server_req_header() {
prepare() {
cat <<"EOF" > app/webroot/meow.shs
#!/bin/bash
echo "${headers[meow]}"
EOF
}
tst() {
curl -s "localhost:1337/meow.shs" -H 'meow: nyaa'
}
match="nyaa"
}
server_req_header_case() {
tst() {
curl -s "localhost:1337/meow.shs" -H 'Meow: nyaa'
}
match="nyaa"
}
server_req_header_dup() {
tst() {
curl -s "localhost:1337/meow.shs" -H 'Meow: nyaa' -H 'mEow: asdf'
}
# TODO: maybe we should return 400 when we detect sth like this?
match="asdf"
}
server_req_header_invalid() {
tst() {
# we have to trick curl into sending an invalid header for us
curl -s "localhost:1337/meow.shs" -H $'a:\nasdf asdf asdf asdf' -H "meow: asdf"
}
match_not="asdf"
}
server_req_header_special_value() {
rand="$(cat /dev/urandom | cut -c 1-10 | head -n1 | sed -E 's/[\r\0]//')"
tst() {
# this needs some more polish, we sometimes confuse curl xD
curl -s "localhost:1337/meow.shs" -H "meow: $rand"
}
match="$rand"
}
server_req_header_special_name() {
rand="$(cat /dev/urandom | cut -c 1-10 | head -n1 | sed -E 's/[\r\0]//')"
prepare() {
cat <<EOF > app/webroot/meow.shs
#!/bin/bash
rand="\$(xxd -p -r <<< "$(echo "$rand" | xxd -p)")"
echo "\${headers["\${rand,,}"]}" # normalize to lowercase
EOF
}
tst() {
curl -s "localhost:1337/meow.shs" -H "$rand: nyaa"
}
cleanup() {
# *sigh* we need a better way to do this tbh
rm app/webroot/meow.shs
}
match="nyaa"
}
subtest_list=(
server_output
server_get_param
server_post_param
server_res_header
server_res_header_custom
server_req_header
server_req_header_case
server_req_header_dup
server_req_header_invalid
server_req_header_special_value
server_req_header_special_name
)

33
tests/02-template.sh Normal file
View file

@ -0,0 +1,33 @@
#!/bin/bash
tpl_basic() {
prepare() {
source src/misc.sh
source src/template.sh
}
tst() {
declare -A meow
meow[asdf]="$value"
render meow <(echo "value: {{.asdf}}")
}
value="A quick brown fox jumped over the lazy dog"
match="value: $value"
}
tpl_basic_specialchars() {
value="&#$%^&*() <-- look at me go"
match="value: $(html_encode "$value")"
}
tpl_basic_newline() {
value=$'\n'a$'\n'
match="value: $(html_encode "$value")"
}
subtest_list=(
tpl_basic
tpl_basic_specialchars
tpl_basic_newline
)

101
tests/03-misc.sh Normal file
View file

@ -0,0 +1,101 @@
#!/bin/bash
misc_html_escape_basic() {
prepare() {
source src/misc.sh
}
tst() {
html_encode "$value"
}
value="meow"
match="meow"
}
misc_html_escape_special() {
value="<script>"
match_not="<"
}
misc_html_escape_apos() {
value="<img src='asdf'>"
match_not="'"
}
misc_html_escape_quot() {
value='<img src="meow">'
match_not='"'
}
# ---
misc_url_encode() {
tst() {
url_encode "$value"
}
value="nyaa"
match=""
}
misc_url_encode_special01() {
value="%%"
match="%25%25"
}
misc_url_encode_special02() {
value="&"
match_not="&"
}
misc_url_encode_special03() {
value="?asdf=meow&nyaa="
match_not="?"
}
misc_url_encode_url() {
value="https://example.org/?nyaa=meow"
# i promise we'll get a better impl of this at some point xD
match="%68%74%74%70%73%3a%2f%2f%65%78%61%6d%70%6c%65%2e%6f%72%67%2f%3f%6e%79%61%61%3d%6d%65%6f%77"
}
# ---
misc_url_decode_encode() {
tst() {
url_decode "$(url_encode "$value")"
}
value="https://example.org/?nyaa=meow&as=df"
match="$value"
}
# ---
misc_url_decode01() {
tst() {
url_decode "$value"
}
value='%25'
match='%'
}
misc_url_decode02() {
value='%2525'
match='%25'
}
subtest_list=(
misc_html_escape_basic
misc_html_escape_special
misc_html_escape_apos
misc_html_escape_quot
misc_url_encode
misc_url_encode_special01
misc_url_encode_special02
misc_url_encode_special03
misc_url_encode_url
misc_url_decode_encode
misc_url_decode01
misc_url_decode02
)

100
tests/04-notORM.sh Normal file
View file

@ -0,0 +1,100 @@
#!/bin/bash
store="storage/notORM-test.dat"
notORM_add_get() {
prepare() {
source src/notORM.sh
rm "$store"
a=("$value" 1 "$value_")
data_add "$store" a
for i in {2..16}; do
a[1]=$i
data_add "$store" a
done
}
tst() {
data_get "$store" { } || return $?
echo "${res[0]}"
}
value="A quick brown fox jumped over the lazy dog"
value_=$'meow?\n:3c'
match="$value"
}
notORM_get_multiline() {
tst() {
data_get "$store" { }
echo "${res[2]}"
}
match="$value_"
}
notORM_get_filter() {
tst() {
data_get "$store" { "2" 1 }
return $?
}
}
notORM_get_oldsyntax() {
tst() {
data_get "$store" 2 1 meow || return $?
[[ "${meow[0]}" == "$value" ]] && return 0 || return 1
}
}
notORM_yeet_oldsyntax() {
tst() {
data_yeet "$store" 1 1
data_get "$store" 1 1
if [[ $? == 2 ]]; then
return 0
fi
return 1
}
}
notORM_yeet() {
tst() {
data_yeet "$store" { 2 1 }
data_get "$store" { 2 1 }
if [[ $? == 2 ]]; then
return 0
fi
return 1
}
}
notORM_yeet_multiple_filters() {
tst() {
data_yeet "$store" { 3 1 } { "$value" }
data_get "$store" { 3 1 }
if [[ $? == 2 ]]; then
return 0
fi
return 1
}
}
notORM_replace_oldsyntax() {
tst() {
data_get "$store" { } out
out[2]='meow!'
data_replace "$store" 4 out 1 || return $?
data_get "$store" 4 || return $?
[[ "${res[@]}" == "${out[@]}" ]] && return 0 || return 1
}
}
subtest_list=(
notORM_add_get
notORM_get_multiline
notORM_get_filter
notORM_get_oldsyntax
notORM_yeet_oldsyntax
notORM_yeet
notORM_yeet_multiple_filters
notORM_replace_oldsyntax
)

154
tst.sh Executable file
View file

@ -0,0 +1,154 @@
#!/usr/bin/env bash
_defaults() {
match=""
match_begin=""
match_end=""
match_sub=""
tst() {
echo "dummy test! please set me up properly" > /dev/stderr
exit 1
}
prepare() {
:
}
cleanup () {
:
}
}
_defaults
on_error() {
on_error_default
}
on_success() {
on_success_default
}
on_success_default() {
echo "OK: $test_name"
(( ok_count++ ))
return 0
}
on_error_default() {
echo "FAIL: $test_name"
echo "(res: $res)"
(( fail_count++ ))
return 0
}
on_fatal() {
echo "FATAL: $test_name"
_final_cleanup
exit 1
}
IFS=$'\n'
for i in "$@"; do
if [[ ! -f "$i" ]]; then
echo -e "$0 - basic test framework\n\nusage: $0 <test> [test] [...]"
exit 1
fi
done
unset IFS
ok_count=0
fail_count=0
_a() {
[[ "$res_code" == 255 ]] && on_fatal
# Q: why not `[[ ... ]] && a || b`?
# A: simple; if `a` returns 1, `b` will get called erroneously.
# normally one wouldn't care, but those functions are meant to
# be overriden. I don't want to fund anyone a lot of frustration,
# so splitting the ifs is a saner option here :)
if [[ "$match" ]]; then
if [[ "$res" == "$match" ]]; then
on_success
else
on_error
fi
elif [[ "$match_sub" ]]; then
if [[ "$res" == *"$match_sub"* ]]; then
on_success
else
on_error
fi
elif [[ "$match_begin" ]]; then
if [[ "$res" == "$match_begin"* ]]; then
on_success
else
on_error
fi
elif [[ "$match_end" ]]; then
if [[ "$res" == *"$match_end" ]]; then
on_success
else
on_error
fi
elif [[ "$match_not" ]]; then
if [[ "$res" == *"$match_not"* ]]; then
on_error
else
on_success
fi
else
if [[ "$res_code" == 0 ]]; then
on_success
else
on_error
fi
fi
unset match match_sub match_begin match_end match_not
prepare() { :; }
}
_final_cleanup() {
# handle spawned processes
for i in $(jobs -p); do
pkill -P $i
done
sleep 2
for i in $(jobs -p); do
pkill -9 -P $i
done
pkill -P $$
}
for j in "$@"; do
source "$j"
if [[ "${#subtest_list[@]}" == 0 ]]; then
test_name="$j"
prepare
res="$(tst)"
res_code=$?
cleanup
_a
else
echo "--- $j ---"
for i in "${subtest_list[@]}"; do
test_name="$i"
"$i"
prepare
res="$(tst)"
res_code=$?
cleanup
_a
done
fi
_defaults
done
_final_cleanup
echo -e "\n\nTesting done!
OK: $ok_count
FAIL: $fail_count"