feat(users/Profpatsch): init httzip
A streaming webserver which serves directories as .zip recursively. Because everything sucks and this is the best way to get dirs delivered to people. Change-Id: I451885cfc5082db12ac32eb0a4bfb04bc983d3c2 Reviewed-on: https://cl.tvl.fyi/c/depot/+/8953 Reviewed-by: Profpatsch <mail@profpatsch.de> Tested-by: BuildkiteCI
This commit is contained in:
parent
b4cfddfc80
commit
c266f5133f
6 changed files with 184 additions and 0 deletions
|
@ -7,3 +7,4 @@ packages:
|
|||
./cas-serve/cas-serve.cabal
|
||||
./jbovlaste-sqlite/jbovlaste-sqlite.cabal
|
||||
./whatcd-resolver/whatcd-resolver.cabal
|
||||
./httzip/httzip.cabal
|
||||
|
|
|
@ -20,3 +20,5 @@ cradle:
|
|||
component: "jbovlaste-sqlite:exe:jbovlaste-sqlite"
|
||||
- path: "./whatcd-resolver/src"
|
||||
component: "lib:whatcd-resolver"
|
||||
- path: "./httzip/Httzip.hs"
|
||||
component: "httzip:exe:httzip"
|
||||
|
|
66
users/Profpatsch/httzip/Httzip.hs
Normal file
66
users/Profpatsch/httzip/Httzip.hs
Normal file
|
@ -0,0 +1,66 @@
|
|||
{-# LANGUAGE QuasiQuotes #-}
|
||||
|
||||
module Main where
|
||||
|
||||
import Conduit ((.|))
|
||||
import Data.Binary.Builder qualified as Builder
|
||||
import Data.Conduit qualified as Cond
|
||||
import Data.Conduit.Combinators qualified as Cond
|
||||
import Data.Conduit.Process.Typed qualified as Cond
|
||||
import Data.Conduit.Process.Typed qualified as Proc
|
||||
import Data.List qualified as List
|
||||
import Data.Text qualified as Text
|
||||
import Network.HTTP.Types qualified as Http
|
||||
import Network.Wai qualified as Wai
|
||||
import Network.Wai.Conduit qualified as Wai.Conduit
|
||||
import Network.Wai.Handler.Warp qualified as Warp
|
||||
import PossehlAnalyticsPrelude
|
||||
import System.Directory qualified as Dir
|
||||
import System.FilePath ((</>))
|
||||
import System.FilePath qualified as File
|
||||
import System.Posix qualified as Unix
|
||||
|
||||
-- Webserver that returns folders under CWD as .zip archives (recursively)
|
||||
main :: IO ()
|
||||
main = do
|
||||
currentDirectory <- Dir.getCurrentDirectory >>= Dir.canonicalizePath
|
||||
run currentDirectory
|
||||
|
||||
run :: FilePath -> IO ()
|
||||
run dir = do
|
||||
currentDirectory <- Dir.canonicalizePath dir
|
||||
putStderrLn $ [fmt|current {show currentDirectory}|]
|
||||
Warp.run 7070 $ \req respond -> do
|
||||
let respondHtml status content = respond $ Wai.responseLBS status [("Content-Type", "text/html")] content
|
||||
case req & Wai.pathInfo of
|
||||
[] -> respond $ Wai.responseLBS Http.status200 [("Content-Type", "text/html")] "any directory will be returned as .zip!"
|
||||
filePath -> do
|
||||
absoluteWantedFilepath <- Dir.canonicalizePath (currentDirectory </> (File.joinPath (filePath <&> textToString)))
|
||||
-- I hope this prevents any shenanigans lol
|
||||
let noCurrentDirPrefix = List.stripPrefix (File.addTrailingPathSeparator currentDirectory) absoluteWantedFilepath
|
||||
if
|
||||
| (any (Text.elem '/') filePath) -> putStderrLn "tried %2F encoding" >> respondHtml Http.status400 "no"
|
||||
| Nothing <- noCurrentDirPrefix -> putStderrLn "tried parent dir with .." >> respondHtml Http.status400 "no^2"
|
||||
| Just wantedFilePath <- noCurrentDirPrefix -> do
|
||||
putStderrLn $ [fmt|wanted {show wantedFilePath}|]
|
||||
ex <- Unix.fileExist wantedFilePath
|
||||
if ex
|
||||
then do
|
||||
status <- Unix.getFileStatus wantedFilePath
|
||||
if status & Unix.isDirectory
|
||||
then do
|
||||
zipDir <- zipDirectory wantedFilePath
|
||||
Proc.withProcessWait zipDir $ \process -> do
|
||||
let stream =
|
||||
Proc.getStdout process
|
||||
.| Cond.map (\bytes -> Cond.Chunk $ Builder.fromByteString bytes)
|
||||
-- TODO: how to handle broken zip? Is it just gonna return a 500? But the stream is already starting, so hard!
|
||||
respond $ Wai.Conduit.responseSource Http.ok200 [("Content-Type", "application/zip")] stream
|
||||
else respondHtml Http.status404 "not found"
|
||||
else respondHtml Http.status404 "not found"
|
||||
where
|
||||
zipDirectory toZipDir = do
|
||||
putStderrLn [fmt|running $ zip {show ["--recurse-paths", "-", toZipDir]}|]
|
||||
pure $
|
||||
Proc.proc "zip" ["--recurse-paths", "-", toZipDir]
|
||||
& Proc.setStdout Cond.createSource
|
40
users/Profpatsch/httzip/default.nix
Normal file
40
users/Profpatsch/httzip/default.nix
Normal file
|
@ -0,0 +1,40 @@
|
|||
{ depot, pkgs, lib, ... }:
|
||||
|
||||
let
|
||||
|
||||
httzip = pkgs.haskellPackages.mkDerivation {
|
||||
pname = "httzip";
|
||||
version = "0.1.0";
|
||||
|
||||
src = depot.users.Profpatsch.exactSource ./. [
|
||||
./httzip.cabal
|
||||
./Httzip.hs
|
||||
];
|
||||
|
||||
libraryHaskellDepends = [
|
||||
pkgs.haskellPackages.pa-prelude
|
||||
pkgs.haskellPackages.warp
|
||||
pkgs.haskellPackages.wai
|
||||
pkgs.haskellPackages.wai-conduit
|
||||
pkgs.haskellPackages.conduit-extra
|
||||
pkgs.haskellPackages.conduit
|
||||
];
|
||||
|
||||
isExecutable = true;
|
||||
isLibrary = false;
|
||||
license = lib.licenses.mit;
|
||||
};
|
||||
|
||||
bins = depot.nix.getBins httzip [ "httzip" ];
|
||||
|
||||
in
|
||||
depot.nix.writeExecline "httzip-wrapped" { } [
|
||||
"importas"
|
||||
"-ui"
|
||||
"PATH"
|
||||
"PATH"
|
||||
"export"
|
||||
"PATH"
|
||||
"${pkgs.zip}/bin"
|
||||
bins.httzip
|
||||
]
|
73
users/Profpatsch/httzip/httzip.cabal
Normal file
73
users/Profpatsch/httzip/httzip.cabal
Normal file
|
@ -0,0 +1,73 @@
|
|||
cabal-version: 3.0
|
||||
name: httzip
|
||||
version: 0.1.0.0
|
||||
author: Profpatsch
|
||||
maintainer: mail@profpatsch.de
|
||||
|
||||
common common-options
|
||||
ghc-options:
|
||||
-Wall
|
||||
-Wno-type-defaults
|
||||
-Wunused-packages
|
||||
-Wredundant-constraints
|
||||
-fwarn-missing-deriving-strategies
|
||||
|
||||
-- See https://downloads.haskell.org/ghc/latest/docs/users_guide/exts.html
|
||||
-- for a description of all these extensions
|
||||
default-extensions:
|
||||
-- Infer Applicative instead of Monad where possible
|
||||
ApplicativeDo
|
||||
|
||||
-- Allow literal strings to be Text
|
||||
OverloadedStrings
|
||||
|
||||
-- Syntactic sugar improvements
|
||||
LambdaCase
|
||||
MultiWayIf
|
||||
|
||||
-- Makes the (deprecated) usage of * instead of Data.Kind.Type an error
|
||||
NoStarIsType
|
||||
|
||||
-- Convenient and crucial to deal with ambiguous field names, commonly
|
||||
-- known as RecordDotSyntax
|
||||
OverloadedRecordDot
|
||||
|
||||
-- does not export record fields as functions, use OverloadedRecordDot to access instead
|
||||
NoFieldSelectors
|
||||
|
||||
-- Record punning
|
||||
RecordWildCards
|
||||
|
||||
-- Improved Deriving
|
||||
DerivingStrategies
|
||||
DerivingVia
|
||||
|
||||
-- Type-level strings
|
||||
DataKinds
|
||||
|
||||
-- to enable the `type` keyword in import lists (ormolu uses this automatically)
|
||||
ExplicitNamespaces
|
||||
|
||||
default-language: GHC2021
|
||||
|
||||
|
||||
executable httzip
|
||||
import: common-options
|
||||
|
||||
main-is: Httzip.hs
|
||||
|
||||
build-depends:
|
||||
base >=4.15 && <5,
|
||||
pa-prelude,
|
||||
bytestring,
|
||||
text,
|
||||
warp,
|
||||
wai,
|
||||
http-types,
|
||||
directory,
|
||||
filepath,
|
||||
unix,
|
||||
wai-conduit,
|
||||
conduit,
|
||||
conduit-extra,
|
||||
binary
|
|
@ -38,6 +38,8 @@ pkgs.mkShell {
|
|||
h.sqlite-simple
|
||||
h.hedgehog
|
||||
h.http-conduit
|
||||
h.http-conduit
|
||||
h.wai-conduit
|
||||
h.nonempty-containers
|
||||
h.deriving-compat
|
||||
h.unix
|
||||
|
|
Loading…
Reference in a new issue