mirror of
https://github.com/commercialhaskell/stackage-server.git
synced 2026-01-12 04:08:29 +01:00
Compare commits
760 Commits
dead/2015-
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
51592009a1 | ||
|
|
1a4328b7d9 | ||
|
|
4099ddf87e | ||
|
|
a3dd2c7389 | ||
|
|
10c9d8364d | ||
|
|
f5dac7092f | ||
|
|
a2cc25d6f0 | ||
|
|
ba3aa93423 | ||
|
|
810e0f3253 | ||
|
|
cbe4038c12 | ||
|
|
9523039dee | ||
|
|
0205d03302 | ||
|
|
a384248d68 | ||
|
|
6771516cec | ||
|
|
c344ce21ce | ||
|
|
672099d68e | ||
|
|
608cf0f4f6 | ||
|
|
885dd2a01e | ||
|
|
66c5361458 | ||
|
|
d0eba4e31e | ||
|
|
6bf160f210 | ||
|
|
935a5012fe | ||
|
|
c4676e524c | ||
|
|
c4c8241fc3 | ||
|
|
8001335ed3 | ||
|
|
d5cc9bd923 | ||
|
|
ec5d5e7b92 | ||
|
|
866fc23f79 | ||
|
|
baddf10194 | ||
|
|
7a26c17e32 | ||
|
|
20951c584a | ||
|
|
b3ee4cc6c2 | ||
|
|
6abeba8268 | ||
|
|
c8a6a622e1 | ||
|
|
ab2c96a2ba | ||
|
|
cd621636eb | ||
|
|
83f6bd9467 | ||
|
|
c01f1ab2ad | ||
|
|
f7b7a61a0a | ||
|
|
bd01a31af8 | ||
|
|
7e795ed052 | ||
|
|
6539721be1 | ||
|
|
7c495bb481 | ||
|
|
6d6b20e63f | ||
|
|
d3d0521890 | ||
|
|
22977c3475 | ||
|
|
0a181b6339 | ||
|
|
09405c186f | ||
|
|
2cafc53abf | ||
|
|
cbdc933e6e | ||
|
|
c5c0f58a84 | ||
|
|
0774e445f8 | ||
|
|
9f3bf32b76 | ||
|
|
cd2aff1b5d | ||
|
|
6ff1ee7d15 | ||
|
|
a62a2a8cb4 | ||
|
|
652b78ab6b | ||
|
|
9420272b55 | ||
|
|
b56aaf33fc | ||
|
|
0dcb101b34 | ||
|
|
c568b5f173 | ||
|
|
eebde8b817 | ||
|
|
22ef976f05 | ||
|
|
9f7d079cfe | ||
|
|
5cb5668295 | ||
|
|
33e5cb2589 | ||
|
|
c1c7d14e15 | ||
|
|
143b9b01c5 | ||
|
|
6b4232b1c6 | ||
|
|
2939d98b9f | ||
|
|
a2f77219b6 | ||
|
|
a4cacd6991 | ||
|
|
6331131b68 | ||
|
|
96522f62ea | ||
|
|
2e1c651cef | ||
|
|
6822d1c1ef | ||
|
|
6e324aefe5 | ||
|
|
02cdb54683 | ||
|
|
26bf589661 | ||
|
|
6d2db1bba4 | ||
|
|
187e8c6e01 | ||
|
|
cfc35aff79 | ||
|
|
a2f0e3eefd | ||
|
|
408be123db | ||
|
|
a52f65255f | ||
|
|
05337cd782 | ||
|
|
c6d4ec3e3b | ||
|
|
fcc2931a17 | ||
|
|
b3e7fcbfe1 | ||
|
|
19de58f2ab | ||
|
|
2f8e8ba95a | ||
|
|
1638873d8d | ||
|
|
03ca4b5255 | ||
|
|
25e12579dd | ||
|
|
af987be2ab | ||
|
|
041255cc29 | ||
|
|
e37596e30a | ||
|
|
ab379c6cbe | ||
|
|
add86d4417 | ||
|
|
f39c950448 | ||
|
|
3f7b642947 | ||
|
|
12462dff95 | ||
|
|
c5847bb2a5 | ||
|
|
057dee56f4 | ||
|
|
4ef23fd0f5 | ||
|
|
be614d4876 | ||
|
|
dfa7f7e669 | ||
|
|
85983a1077 | ||
|
|
ff6a392754 | ||
|
|
b998f5b10b | ||
|
|
37eb5f3da6 | ||
|
|
a585735b65 | ||
|
|
070cbc6bf2 | ||
|
|
ffe944ae74 | ||
|
|
b707b5a0d7 | ||
|
|
bc38a194cc | ||
|
|
5b8b19f846 | ||
|
|
af4ba4feae | ||
|
|
5f4edc17b3 | ||
|
|
f8a82ec511 | ||
|
|
3bf0d89985 | ||
|
|
b44e9222de | ||
|
|
54e475e43e | ||
|
|
dcb1485e50 | ||
|
|
89e373caf1 | ||
|
|
0fb4ef7942 | ||
|
|
af20bc6291 | ||
|
|
47ae6b8387 | ||
|
|
6389b4468f | ||
|
|
098d5176d7 | ||
|
|
b7908241d7 | ||
|
|
9a77dd3394 | ||
|
|
2bce468a4d | ||
|
|
6b910d77a5 | ||
|
|
d16fca78f3 | ||
|
|
cf4c4cc150 | ||
|
|
f91b964d8e | ||
|
|
c5f3e76eff | ||
|
|
a726444afd | ||
|
|
a9933fe093 | ||
|
|
03dd30cc59 | ||
|
|
947b66031d | ||
|
|
44dd6a5c5d | ||
|
|
9d1dfc6ff3 | ||
|
|
7dbb9bf6da | ||
|
|
17ec30eacc | ||
|
|
71793e0b72 | ||
|
|
e086b058f6 | ||
|
|
7bd8968c2b | ||
|
|
809ab9bcef | ||
|
|
a33bcceb41 | ||
|
|
1baa9295cd | ||
|
|
5a8de6a11b | ||
|
|
ad484ca048 | ||
|
|
7bc469c5f3 | ||
|
|
722f441d65 | ||
|
|
615667401f | ||
|
|
bfb01a7a92 | ||
|
|
14c4924281 | ||
|
|
98df84df28 | ||
|
|
c361328767 | ||
|
|
88f951a0b8 | ||
|
|
0680b420e9 | ||
|
|
3d8cd6a115 | ||
|
|
68fa14a4bb | ||
|
|
4e7e62c3dc | ||
|
|
42f4f7e586 | ||
|
|
daacf64bb9 | ||
|
|
26d4a2312e | ||
|
|
ebc27e0746 | ||
|
|
fbbf169e58 | ||
|
|
6eb463f20c | ||
|
|
c308e89a16 | ||
|
|
4204ca90da | ||
|
|
a960196e02 | ||
|
|
e16fb64620 | ||
|
|
220a57da4c | ||
|
|
a24d962151 | ||
|
|
6385fa6de4 | ||
|
|
0823066401 | ||
|
|
3393217f66 | ||
|
|
b80a7f9a52 | ||
|
|
98f2fa250f | ||
|
|
f5056a2b8c | ||
|
|
4ad7e421b2 | ||
|
|
c0e6c9d091 | ||
|
|
cd8e4ff345 | ||
|
|
be582f4ced | ||
|
|
a5562eea85 | ||
|
|
f145417e06 | ||
|
|
3992038a27 | ||
|
|
5a6d02eb46 | ||
|
|
ef2595caf8 | ||
|
|
ec23962208 | ||
|
|
6afbf09892 | ||
|
|
dcc4ec7213 | ||
|
|
d724878c2c | ||
|
|
25dcc40e70 | ||
|
|
bd40aeddd9 | ||
|
|
34ec0783c3 | ||
|
|
83b3fcc44a | ||
|
|
c362dafc18 | ||
|
|
fe25b2fa2f | ||
|
|
96973cac11 | ||
|
|
ef0247d3bb | ||
|
|
227d8a9bc9 | ||
|
|
0304353465 | ||
|
|
eb46df2050 | ||
|
|
bdcdd1887a | ||
|
|
8e247dde03 | ||
|
|
722260e1d4 | ||
|
|
1455e63a97 | ||
|
|
cc7b12dcd7 | ||
|
|
806385c25f | ||
|
|
f3ee682725 | ||
|
|
b89c4195ef | ||
|
|
10d6f7fde7 | ||
|
|
a773f49dc5 | ||
|
|
ab8d383cd5 | ||
|
|
2701f186ca | ||
|
|
dfe0122edf | ||
|
|
f2a70752c3 | ||
|
|
cbfb68bdc8 | ||
|
|
537a295bfb | ||
|
|
39d1e0c867 | ||
|
|
5dbb09a256 | ||
|
|
b9a38540fc | ||
|
|
05307bded8 | ||
|
|
148cc8258c | ||
|
|
c18e98d981 | ||
|
|
3d426e1e9d | ||
|
|
ebdde64745 | ||
|
|
78b019a915 | ||
|
|
d9a285a87f | ||
|
|
8ae7dc234a | ||
|
|
f909a18e83 | ||
|
|
07f3ef293f | ||
|
|
0eea4ca99a | ||
|
|
98d0d61958 | ||
|
|
82f363c24a | ||
|
|
d2ee4f0f13 | ||
|
|
3a97473118 | ||
|
|
13ec0dec3f | ||
|
|
385620e185 | ||
|
|
567d456ca7 | ||
|
|
f5e147ab97 | ||
|
|
83117bd409 | ||
|
|
e7d8a08442 | ||
|
|
0eac4c309b | ||
|
|
f1e3a8bb23 | ||
|
|
7b7b1470e6 | ||
|
|
3f5c5e8647 | ||
|
|
254ce236b2 | ||
|
|
52a638def3 | ||
|
|
82d9f97586 | ||
|
|
bd9aebc5fe | ||
|
|
3d549adfaf | ||
|
|
e5ab3e263c | ||
|
|
809ca83b0a | ||
|
|
68b7e4079c | ||
|
|
96dc7647fc | ||
|
|
a18d182bf9 | ||
|
|
276f4f7f04 | ||
|
|
c4e2dd6603 | ||
|
|
b55da8c55a | ||
|
|
83e1871e0f | ||
|
|
3608a9d2b1 | ||
|
|
40e551a6f2 | ||
|
|
4f91ac6c73 | ||
|
|
91dfb99a6e | ||
|
|
cca49c10b8 | ||
|
|
0afe4a7ab5 | ||
|
|
e0661a0ada | ||
|
|
f037a18415 | ||
|
|
709a194621 | ||
|
|
79a8cc7044 | ||
|
|
d6e39d96b8 | ||
|
|
6498a6365e | ||
|
|
9ea91909c2 | ||
|
|
62944018d8 | ||
|
|
64c1f9519e | ||
|
|
b81f5b790b | ||
|
|
e032263580 | ||
|
|
fcca891217 | ||
|
|
86d13e8da6 | ||
|
|
760b356c0c | ||
|
|
96e9a53a17 | ||
|
|
014114855b | ||
|
|
77b0b3b396 | ||
|
|
856ac728b4 | ||
|
|
fe20a6d825 | ||
|
|
cf14304ee3 | ||
|
|
f8aa5bc4de | ||
|
|
c8c8b971ce | ||
|
|
75ad28ab56 | ||
|
|
1dbbde2abf | ||
|
|
6263bcd666 | ||
|
|
8ddcd0562a | ||
|
|
6f9cb33224 | ||
|
|
edfabd986c | ||
|
|
99556c4caa | ||
|
|
800b8907c8 | ||
|
|
f732899303 | ||
|
|
a331d3e714 | ||
|
|
115feaa219 | ||
|
|
04ad964983 | ||
|
|
7473c0cb42 | ||
|
|
354374b0db | ||
|
|
f9632d734c | ||
|
|
4d974136da | ||
|
|
51759cbbbc | ||
|
|
cfb9ed248f | ||
|
|
298d1d5b52 | ||
|
|
77e345b6f2 | ||
|
|
6bc2350fcb | ||
|
|
e05cc3a20d | ||
|
|
8a8c067df3 | ||
|
|
f3ea771929 | ||
|
|
a83629f09a | ||
|
|
e6fe53feb5 | ||
|
|
66919e1e14 | ||
|
|
83e67f857a | ||
|
|
75390181c1 | ||
|
|
e054d25982 | ||
|
|
f7b836b836 | ||
|
|
eac18f4b1b | ||
|
|
950cb7ef6d | ||
|
|
e607494a08 | ||
|
|
5b717b8098 | ||
|
|
7804667f44 | ||
|
|
d7a0371cd6 | ||
|
|
bc49025139 | ||
|
|
d082642003 | ||
|
|
8f7e3514a3 | ||
|
|
086a6e2cea | ||
|
|
d7789b4380 | ||
|
|
296c284ccb | ||
|
|
a11195a904 | ||
|
|
7ab2d3f841 | ||
|
|
1e0019cee3 | ||
|
|
8514b3d710 | ||
|
|
173fc4cec2 | ||
|
|
4320af064c | ||
|
|
af14d5051a | ||
|
|
ef9b0ddf2e | ||
|
|
a95dfbe396 | ||
|
|
a50739e193 | ||
|
|
2e44abc382 | ||
|
|
fda2b78723 | ||
|
|
4a09d4bc4a | ||
|
|
bc228569a9 | ||
|
|
330637f430 | ||
|
|
122e34ff12 | ||
|
|
6dcefdc633 | ||
|
|
3a467a5e68 | ||
|
|
70891a9799 | ||
|
|
b86d720954 | ||
|
|
7ec9baa9f5 | ||
|
|
b5185e0050 | ||
|
|
3a1da33a71 | ||
|
|
e79c2f0c1f | ||
|
|
1d85cac3a2 | ||
|
|
a211a148b9 | ||
|
|
e484d1236d | ||
|
|
f2bf4f5722 | ||
|
|
185a263ba7 | ||
|
|
984cc11c92 | ||
|
|
589169f205 | ||
|
|
13663c2ce9 | ||
|
|
48e944ab81 | ||
|
|
e0e8e3ee17 | ||
|
|
f224e1da1c | ||
|
|
d4a22e29c9 | ||
|
|
8f649e0397 | ||
|
|
8547b84f6e | ||
|
|
5a6137f7c6 | ||
|
|
cc1dc6ffe5 | ||
|
|
7e44c31152 | ||
|
|
e5a7e9b2e8 | ||
|
|
bcf86a1d40 | ||
|
|
8fb3678d5b | ||
|
|
7e342157f9 | ||
|
|
6dd5604444 | ||
|
|
8e32667f17 | ||
|
|
3c35b792f1 | ||
|
|
4da05012e5 | ||
|
|
dd02c4d845 | ||
|
|
3821f9a7bd | ||
|
|
c3f65f5a6e | ||
|
|
ce3fffcb6e | ||
|
|
aa0fe190ac | ||
|
|
a765bc95f3 | ||
|
|
c140ff979b | ||
|
|
c3e132970e | ||
|
|
b2d5aff410 | ||
|
|
8c5f8296e4 | ||
|
|
6d4f9e03fa | ||
|
|
58c4e6c163 | ||
|
|
da9c47b945 | ||
|
|
2738b0280d | ||
|
|
bc45faa645 | ||
|
|
ef8171bb94 | ||
|
|
25f2a3c1da | ||
|
|
d2db9519d4 | ||
|
|
b5f562a6ff | ||
|
|
37d7a52b15 | ||
|
|
08bc951bdc | ||
|
|
3f460f6a44 | ||
|
|
89f8650151 | ||
|
|
a2b88f4aba | ||
|
|
dbf7cf75f7 | ||
|
|
f372f832fa | ||
|
|
d2f2e1537f | ||
|
|
81df4e9b35 | ||
|
|
b9965e328d | ||
|
|
67baaef082 | ||
|
|
ccefcd3445 | ||
|
|
537aea35fe | ||
|
|
4d62715a3e | ||
|
|
d0f7828cf6 | ||
|
|
6c98313d85 | ||
|
|
7db1e96d9c | ||
|
|
e53b6f50b2 | ||
|
|
9c90dd1f7d | ||
|
|
0c664efe34 | ||
|
|
be3f40b2fa | ||
|
|
603894577e | ||
|
|
0f4a77872b | ||
|
|
e3c74c757e | ||
|
|
b1f9fd1923 | ||
|
|
40e92689f1 | ||
|
|
11ddd148c1 | ||
|
|
84f8df3578 | ||
|
|
c20670f0eb | ||
|
|
c104a9caa2 | ||
|
|
67686374ad | ||
|
|
d36d922f13 | ||
|
|
bc2173b4b8 | ||
|
|
c78676a2cc | ||
|
|
afdec252df | ||
|
|
558b95711a | ||
|
|
dce37cd726 | ||
|
|
83ddbe62cf | ||
|
|
930ba310bd | ||
|
|
cafb407b2e | ||
|
|
30e7a7fef0 | ||
|
|
d282ade792 | ||
|
|
b945ad449c | ||
|
|
0ae6ca4b3c | ||
|
|
5402f33a47 | ||
|
|
8db35e2f83 | ||
|
|
9c52d4b6aa | ||
|
|
cb93e54729 | ||
|
|
fffdf9717e | ||
|
|
1a36376a6c | ||
|
|
101d1728b4 | ||
|
|
6f5857fda3 | ||
|
|
6eae9fb419 | ||
|
|
e1f65cc655 | ||
|
|
34d23b6e47 | ||
|
|
4107980263 | ||
|
|
a708c630ae | ||
|
|
9b299e870e | ||
|
|
c1e16d8e1a | ||
|
|
e54b3f80a6 | ||
|
|
7cf7ed1e99 | ||
|
|
435c65fff5 | ||
|
|
e0f8755f95 | ||
|
|
77e92dc6ab | ||
|
|
d49f3f5aaf | ||
|
|
307d7bb8af | ||
|
|
4c63827c21 | ||
|
|
1fbaf13574 | ||
|
|
b81ff2a59d | ||
|
|
413aa50450 | ||
|
|
c5f16f2faa | ||
|
|
4b953f8585 | ||
|
|
9d0d715894 | ||
|
|
2805f81e90 | ||
|
|
dd0c4e3f4c | ||
|
|
267f488bca | ||
|
|
60af396f9c | ||
|
|
a9534e5390 | ||
|
|
a030ba4afb | ||
|
|
ade312b9df | ||
|
|
9cef058b9c | ||
|
|
b16a9b4f37 | ||
|
|
2f6a05a844 | ||
|
|
dccb89523e | ||
|
|
6a5a29672d | ||
|
|
46c3364a19 | ||
|
|
b44d2cc052 | ||
|
|
08d1c3ef66 | ||
|
|
07e6121773 | ||
|
|
b9c12a3518 | ||
|
|
bb4bf8d67d | ||
|
|
053c2e0631 | ||
|
|
df90a48756 | ||
|
|
b577b75774 | ||
|
|
0af19eed1c | ||
|
|
04b51783a1 | ||
|
|
46b185766d | ||
|
|
60dcfba8de | ||
|
|
89c8cdb1be | ||
|
|
13d12cabf7 | ||
|
|
30896f3663 | ||
|
|
6fcb72889f | ||
|
|
eaa816885e | ||
|
|
c50899bd65 | ||
|
|
5bd4a45913 | ||
|
|
912a0175d4 | ||
|
|
9cc7f662b3 | ||
|
|
be25e87b69 | ||
|
|
26af5d29ed | ||
|
|
0e46ca9964 | ||
|
|
d2caecd432 | ||
|
|
de9e250b4e | ||
|
|
23fe1adc37 | ||
|
|
47e4545842 | ||
|
|
c453b0bd34 | ||
|
|
8955a7a49f | ||
|
|
ad053dc101 | ||
|
|
0233d07f4c | ||
|
|
2decb3516e | ||
|
|
353ecd9903 | ||
|
|
cfaa662f0e | ||
|
|
a84b598a71 | ||
|
|
362001b558 | ||
|
|
c21841f3af | ||
|
|
7cb7870d75 | ||
|
|
492eaf0444 | ||
|
|
66c420c0ef | ||
|
|
b4f2c27017 | ||
|
|
62434f29c5 | ||
|
|
1e1e875bd0 | ||
|
|
49828b012f | ||
|
|
22a1df30e0 | ||
|
|
3f8bc821dd | ||
|
|
b798ac8236 | ||
|
|
b0ec509d9e | ||
|
|
c3a59798cb | ||
|
|
c2fb5b1fa5 | ||
|
|
e4a9880fde | ||
|
|
0fc5bbbf43 | ||
|
|
5133a38006 | ||
|
|
e66813be9f | ||
|
|
62c0789ca6 | ||
|
|
be32c1a177 | ||
|
|
90cc9ea5c1 | ||
|
|
1e5614ca59 | ||
|
|
e2f4c0c30a | ||
|
|
86b4b7b964 | ||
|
|
4b7bfb4e78 | ||
|
|
b98bcfcf4a | ||
|
|
bef289a8c3 | ||
|
|
1bf967903f | ||
|
|
0e9164e5d6 | ||
|
|
a2f2fb79ce | ||
|
|
5d1d97c46d | ||
|
|
5c2e8ecf68 | ||
|
|
8c9c916491 | ||
|
|
fcc36a3a81 | ||
|
|
e74080d5c8 | ||
|
|
240b0316bb | ||
|
|
fb481b02f7 | ||
|
|
20d08ffa31 | ||
|
|
2fae2cde89 | ||
|
|
c43340d40d | ||
|
|
a192dcf1d2 | ||
|
|
734e3b60b3 | ||
|
|
c538927aba | ||
|
|
1ab01273bc | ||
|
|
160f2b02f9 | ||
|
|
fabb3979d4 | ||
|
|
32de0b00a9 | ||
|
|
5308096be0 | ||
|
|
3fa3df3c4f | ||
|
|
57fd6b8a1b | ||
|
|
d364a5d0a7 | ||
|
|
9fe4618044 | ||
|
|
22f326cc60 | ||
|
|
1d0cad2e0a | ||
|
|
bb85e107fb | ||
|
|
0eb69c8198 | ||
|
|
4cec606fb0 | ||
|
|
789443cb71 | ||
|
|
67c43193da | ||
|
|
ddd8734604 | ||
|
|
34886ca21a | ||
|
|
69863eb363 | ||
|
|
5e7424c77e | ||
|
|
7f4f7f8ce9 | ||
|
|
0f74359d79 | ||
|
|
2f96607735 | ||
|
|
3a88c8835b | ||
|
|
a6217c50bc | ||
|
|
702afdca6a | ||
|
|
ebbeba08b7 | ||
|
|
3f8d0b3916 | ||
|
|
45741016dc | ||
|
|
768eaec573 | ||
|
|
bb01d34d8c | ||
|
|
c0b3ea9302 | ||
|
|
78ac5c4456 | ||
|
|
e663fc4a63 | ||
|
|
1e54d46414 | ||
|
|
2d90945853 | ||
|
|
8f20a226fe | ||
|
|
b71e551737 | ||
|
|
ebe470fe68 | ||
|
|
cbe72be4ee | ||
|
|
b1942934bb | ||
|
|
7f62b5b8f6 | ||
|
|
64548ce031 | ||
|
|
47506d9ecd | ||
|
|
f3b81b36b5 | ||
|
|
2d21263f5e | ||
|
|
c7b82f38d0 | ||
|
|
59fac14f73 | ||
|
|
2feecaa88a | ||
|
|
c60612be34 | ||
|
|
7490787bbe | ||
|
|
5dc16a55d5 | ||
|
|
6728a65b28 | ||
|
|
99861cde9d | ||
|
|
fac5b9c4f4 | ||
|
|
7533b9b014 | ||
|
|
5b228f6e45 | ||
|
|
54b69cb491 | ||
|
|
d627f63521 | ||
|
|
e076a912f1 | ||
|
|
a923a4e5ff | ||
|
|
7caaf7ba23 | ||
|
|
69d65594a5 | ||
|
|
ff6a3c6877 | ||
|
|
24875df4d2 | ||
|
|
1394c82730 | ||
|
|
66559c0d9d | ||
|
|
f67a22da79 | ||
|
|
0dc4cab5da | ||
|
|
79bc1a9662 | ||
|
|
27deb7b378 | ||
|
|
d35b73d67f | ||
|
|
874d007691 | ||
|
|
a0d2703738 | ||
|
|
54645b1eaa | ||
|
|
a53dadcbfc | ||
|
|
88b98b9a3c | ||
|
|
607d6faab2 | ||
|
|
8ffb235fda | ||
|
|
fd4e84e14d | ||
|
|
4564385c73 | ||
|
|
d77b87b6c2 | ||
|
|
deac45e202 | ||
|
|
50ff9efead | ||
|
|
e71b8c036b | ||
|
|
8c23324d60 | ||
|
|
7758078625 | ||
|
|
f08978fadf | ||
|
|
7f3bb119f4 | ||
|
|
c04686aad0 | ||
|
|
d956b074c0 | ||
|
|
06c5059392 | ||
|
|
0992779e82 | ||
|
|
b8b33e0ad3 | ||
|
|
13d93fc25e | ||
|
|
cb85530cfa | ||
|
|
9c57579caa | ||
|
|
4935dd4287 | ||
|
|
31b66e6fae | ||
|
|
12083fea65 | ||
|
|
04f649b5da | ||
|
|
b28ee0f9f0 | ||
|
|
f37f112e8f | ||
|
|
025782be8d | ||
|
|
e94b1b17d9 | ||
|
|
511b1c21e9 | ||
|
|
f03ada0f81 | ||
|
|
130b22e3ea | ||
|
|
cdd059d9eb | ||
|
|
7f26cc26a9 | ||
|
|
8198c06b01 | ||
|
|
90ad3afe19 | ||
|
|
8a3f199ebb | ||
|
|
dd71baad30 | ||
|
|
cafa6b0496 | ||
|
|
9f170e7d68 | ||
|
|
34d1d628e8 | ||
|
|
4d02fc7bdd | ||
|
|
2f1fb53537 | ||
|
|
8378cd8869 | ||
|
|
6e39fe91ba | ||
|
|
ddfce6e551 | ||
|
|
1117ca93c9 | ||
|
|
c837587609 | ||
|
|
80cb890dad | ||
|
|
1b4d149801 | ||
|
|
bfbe634e5f | ||
|
|
d98d3866ec | ||
|
|
d6b9c8c04e | ||
|
|
8c4dfd2e84 | ||
|
|
ada16de0ce | ||
|
|
6bac842472 | ||
|
|
30b6d57f5c | ||
|
|
dd375bbbc1 | ||
|
|
5d5b9448ec | ||
|
|
027b562aa3 | ||
|
|
d6ccfc04b9 | ||
|
|
4ec9caab59 | ||
|
|
55880e0f15 | ||
|
|
d69497ebf9 | ||
|
|
a3d679f2a3 | ||
|
|
90d5913f86 | ||
|
|
ea17b80243 | ||
|
|
fafc236c43 | ||
|
|
028cdaf6ab | ||
|
|
da1b63ba9b | ||
|
|
55a5107657 | ||
|
|
374d3733c0 | ||
|
|
ad091514a7 | ||
|
|
980cf46690 | ||
|
|
7385dd16b6 | ||
|
|
abe270c1ea | ||
|
|
8b27ad835a | ||
|
|
e9aec442e9 | ||
|
|
ed0d2a8c49 | ||
|
|
495b5a9576 | ||
|
|
749a2d9533 | ||
|
|
9592ef47d7 | ||
|
|
ec57908a63 | ||
|
|
706a977e19 | ||
|
|
5c3e4adf10 | ||
|
|
9112ac4440 | ||
|
|
980aa4c484 | ||
|
|
54198b5560 | ||
|
|
3c6a2c7516 | ||
|
|
44df030334 | ||
|
|
450ec9562a | ||
|
|
fd89710c69 | ||
|
|
caebdbd30c | ||
|
|
67a04a6803 | ||
|
|
500835c960 | ||
|
|
68f08adb89 | ||
|
|
d65e19cb05 | ||
|
|
2b4d9a667b | ||
|
|
8d5774b097 | ||
|
|
c074e638f2 | ||
|
|
ed23d5edc7 | ||
|
|
07fb2c9290 | ||
|
|
ff02a8368a | ||
|
|
f52c0010dc | ||
|
|
70a59af6c1 | ||
|
|
3f4e86e5fe | ||
|
|
0bf235760b | ||
|
|
3c4e132774 | ||
|
|
6cf5be6ff1 | ||
|
|
93f91708ca | ||
|
|
c6800fd7aa | ||
|
|
4e4362f19b | ||
|
|
ef1b8de969 |
@ -1,5 +1,6 @@
|
||||
((haskell-mode . ((haskell-indent-spaces . 4)
|
||||
(haskell-process-type . cabal-repl)
|
||||
;;(hindent-style . "johan-tibell")
|
||||
;;(haskell-process-type . cabal-repl)
|
||||
(haskell-process-use-ghci . t)))
|
||||
(hamlet-mode . ((hamlet/basic-offset . 4)
|
||||
(haskell-process-use-ghci . t)))
|
||||
|
||||
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
.stack-work
|
||||
2
.ghci
2
.ghci
@ -1,6 +1,6 @@
|
||||
:set -fobject-code
|
||||
:set -i.:config:dist/build/autogen
|
||||
:set -XCPP -XTemplateHaskell -XQuasiQuotes -XTypeFamilies -XFlexibleContexts -XGADTs -XOverloadedStrings -XMultiParamTypeClasses -XGeneralizedNewtypeDeriving -XEmptyDataDecls -XDeriveDataTypeable -XRankNTypes -XNoImplicitPrelude -XFunctionalDependencies -XFlexibleInstances -XTemplateHaskell -XQuasiQuotes -XOverloadedStrings -XNoImplicitPrelude -XCPP -XMultiParamTypeClasses -XTypeFamilies -XGADTs -XGeneralizedNewtypeDeriving -XFlexibleContexts -XEmptyDataDecls -XNoMonomorphismRestriction -XDeriveDataTypeable -XViewPatterns -XTypeSynonymInstances -XFlexibleInstances -XRankNTypes -XFunctionalDependencies -XPatternGuards -XStandaloneDeriving -XUndecidableInstances -XBangPatterns -XScopedTypeVariables
|
||||
:set -XOverloadedStrings
|
||||
:set -DDEVELOPMENT=1
|
||||
:set -DINGHCI=1
|
||||
:set -package foreign-store
|
||||
|
||||
27
.github/workflows/build.yml
vendored
Normal file
27
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
name: build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
name: Haskell GHC
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: haskell-actions/setup@v2
|
||||
with:
|
||||
enable-stack: true
|
||||
stack-no-global: true
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.stack
|
||||
.stack-work
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/*.cabal','**/stack.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-
|
||||
- run: stack build
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@ -14,8 +14,12 @@ cabal.sandbox.config
|
||||
*.swp
|
||||
/dev-blob-store/
|
||||
TAGS
|
||||
/config/postgresql.yml
|
||||
/config/settings.yml
|
||||
/tarballs/
|
||||
stackage-server.keter
|
||||
/stackage-content/
|
||||
/docker/app/
|
||||
.stack-work
|
||||
/stackage-database/
|
||||
*~
|
||||
*#
|
||||
/stackage-server.cabal
|
||||
/hoogle/
|
||||
/hoogle-gen/
|
||||
|
||||
3
.hindent.yaml
Normal file
3
.hindent.yaml
Normal file
@ -0,0 +1,3 @@
|
||||
indent-size: 4
|
||||
line-length: 100
|
||||
force-trailing-newline: true
|
||||
229
.stylish-haskell.yaml
Normal file
229
.stylish-haskell.yaml
Normal file
@ -0,0 +1,229 @@
|
||||
# stylish-haskell configuration file
|
||||
# ==================================
|
||||
|
||||
# The stylish-haskell tool is mainly configured by specifying steps. These steps
|
||||
# are a list, so they have an order, and one specific step may appear more than
|
||||
# once (if needed). Each file is processed by these steps in the given order.
|
||||
steps:
|
||||
# Convert some ASCII sequences to their Unicode equivalents. This is disabled
|
||||
# by default.
|
||||
# - unicode_syntax:
|
||||
# # In order to make this work, we also need to insert the UnicodeSyntax
|
||||
# # language pragma. If this flag is set to true, we insert it when it's
|
||||
# # not already present. You may want to disable it if you configure
|
||||
# # language extensions using some other method than pragmas. Default:
|
||||
# # true.
|
||||
# add_language_pragma: true
|
||||
|
||||
# Align the right hand side of some elements. This is quite conservative
|
||||
# and only applies to statements where each element occupies a single
|
||||
# line.
|
||||
- simple_align:
|
||||
cases: true
|
||||
top_level_patterns: true
|
||||
records: true
|
||||
|
||||
# Import cleanup
|
||||
- imports:
|
||||
# There are different ways we can align names and lists.
|
||||
#
|
||||
# - global: Align the import names and import list throughout the entire
|
||||
# file.
|
||||
#
|
||||
# - file: Like global, but don't add padding when there are no qualified
|
||||
# imports in the file.
|
||||
#
|
||||
# - group: Only align the imports per group (a group is formed by adjacent
|
||||
# import lines).
|
||||
#
|
||||
# - none: Do not perform any alignment.
|
||||
#
|
||||
# Default: global.
|
||||
align: none
|
||||
|
||||
# The following options affect only import list alignment.
|
||||
#
|
||||
# List align has following options:
|
||||
#
|
||||
# - after_alias: Import list is aligned with end of import including
|
||||
# 'as' and 'hiding' keywords.
|
||||
#
|
||||
# > import qualified Data.List as List (concat, foldl, foldr, head,
|
||||
# > init, last, length)
|
||||
#
|
||||
# - with_alias: Import list is aligned with start of alias or hiding.
|
||||
#
|
||||
# > import qualified Data.List as List (concat, foldl, foldr, head,
|
||||
# > init, last, length)
|
||||
#
|
||||
# - new_line: Import list starts always on new line.
|
||||
#
|
||||
# > import qualified Data.List as List
|
||||
# > (concat, foldl, foldr, head, init, last, length)
|
||||
#
|
||||
# Default: after_alias
|
||||
list_align: after_alias
|
||||
|
||||
# Right-pad the module names to align imports in a group:
|
||||
#
|
||||
# - true: a little more readable
|
||||
#
|
||||
# > import qualified Data.List as List (concat, foldl, foldr,
|
||||
# > init, last, length)
|
||||
# > import qualified Data.List.Extra as List (concat, foldl, foldr,
|
||||
# > init, last, length)
|
||||
#
|
||||
# - false: diff-safe
|
||||
#
|
||||
# > import qualified Data.List as List (concat, foldl, foldr, init,
|
||||
# > last, length)
|
||||
# > import qualified Data.List.Extra as List (concat, foldl, foldr,
|
||||
# > init, last, length)
|
||||
#
|
||||
# Default: true
|
||||
pad_module_names: true
|
||||
|
||||
# Long list align style takes effect when import is too long. This is
|
||||
# determined by 'columns' setting.
|
||||
#
|
||||
# - inline: This option will put as much specs on same line as possible.
|
||||
#
|
||||
# - new_line: Import list will start on new line.
|
||||
#
|
||||
# - new_line_multiline: Import list will start on new line when it's
|
||||
# short enough to fit to single line. Otherwise it'll be multiline.
|
||||
#
|
||||
# - multiline: One line per import list entry.
|
||||
# Type with constructor list acts like single import.
|
||||
#
|
||||
# > import qualified Data.Map as M
|
||||
# > ( empty
|
||||
# > , singleton
|
||||
# > , ...
|
||||
# > , delete
|
||||
# > )
|
||||
#
|
||||
# Default: inline
|
||||
long_list_align: inline
|
||||
|
||||
# Align empty list (importing instances)
|
||||
#
|
||||
# Empty list align has following options
|
||||
#
|
||||
# - inherit: inherit list_align setting
|
||||
#
|
||||
# - right_after: () is right after the module name:
|
||||
#
|
||||
# > import Vector.Instances ()
|
||||
#
|
||||
# Default: inherit
|
||||
empty_list_align: right_after
|
||||
|
||||
# List padding determines indentation of import list on lines after import.
|
||||
# This option affects 'long_list_align'.
|
||||
#
|
||||
# - <integer>: constant value
|
||||
#
|
||||
# - module_name: align under start of module name.
|
||||
# Useful for 'file' and 'group' align settings.
|
||||
list_padding: 4
|
||||
|
||||
# Separate lists option affects formatting of import list for type
|
||||
# or class. The only difference is single space between type and list
|
||||
# of constructors, selectors and class functions.
|
||||
#
|
||||
# - true: There is single space between Foldable type and list of it's
|
||||
# functions.
|
||||
#
|
||||
# > import Data.Foldable (Foldable (fold, foldl, foldMap))
|
||||
#
|
||||
# - false: There is no space between Foldable type and list of it's
|
||||
# functions.
|
||||
#
|
||||
# > import Data.Foldable (Foldable(fold, foldl, foldMap))
|
||||
#
|
||||
# Default: true
|
||||
separate_lists: false
|
||||
|
||||
# Space surround option affects formatting of import lists on a single
|
||||
# line. The only difference is single space after the initial
|
||||
# parenthesis and a single space before the terminal parenthesis.
|
||||
#
|
||||
# - true: There is single space associated with the enclosing
|
||||
# parenthesis.
|
||||
#
|
||||
# > import Data.Foo ( foo )
|
||||
#
|
||||
# - false: There is no space associated with the enclosing parenthesis
|
||||
#
|
||||
# > import Data.Foo (foo)
|
||||
#
|
||||
# Default: false
|
||||
space_surround: false
|
||||
|
||||
# Language pragmas
|
||||
- language_pragmas:
|
||||
# We can generate different styles of language pragma lists.
|
||||
#
|
||||
# - vertical: Vertical-spaced language pragmas, one per line.
|
||||
#
|
||||
# - compact: A more compact style.
|
||||
#
|
||||
# - compact_line: Similar to compact, but wrap each line with
|
||||
# `{-#LANGUAGE #-}'.
|
||||
#
|
||||
# Default: vertical.
|
||||
style: vertical
|
||||
|
||||
# Align affects alignment of closing pragma brackets.
|
||||
#
|
||||
# - true: Brackets are aligned in same column.
|
||||
#
|
||||
# - false: Brackets are not aligned together. There is only one space
|
||||
# between actual import and closing bracket.
|
||||
#
|
||||
# Default: true
|
||||
align: false
|
||||
|
||||
# stylish-haskell can detect redundancy of some language pragmas. If this
|
||||
# is set to true, it will remove those redundant pragmas. Default: true.
|
||||
remove_redundant: false
|
||||
|
||||
# Replace tabs by spaces. This is disabled by default.
|
||||
# - tabs:
|
||||
# # Number of spaces to use for each tab. Default: 8, as specified by the
|
||||
# # Haskell report.
|
||||
# spaces: 8
|
||||
|
||||
# Remove trailing whitespace
|
||||
- trailing_whitespace: {}
|
||||
|
||||
# Squash multiple spaces between the left and right hand sides of some
|
||||
# elements into single spaces. Basically, this undoes the effect of
|
||||
# simple_align but is a bit less conservative.
|
||||
# - squash: {}
|
||||
|
||||
# A common setting is the number of columns (parts of) code will be wrapped
|
||||
# to. Different steps take this into account. Default: 80.
|
||||
columns: 80
|
||||
|
||||
# By default, line endings are converted according to the OS. You can override
|
||||
# preferred format here.
|
||||
#
|
||||
# - native: Native newline format. CRLF on Windows, LF on other OSes.
|
||||
#
|
||||
# - lf: Convert to LF ("\n").
|
||||
#
|
||||
# - crlf: Convert to CRLF ("\r\n").
|
||||
#
|
||||
# Default: native.
|
||||
newline: native
|
||||
|
||||
# Sometimes, language extensions are specified in a cabal file or from the
|
||||
# command line instead of using language pragmas in the file. stylish-haskell
|
||||
# needs to be aware of these, so it can parse the file correctly.
|
||||
#
|
||||
# No language extensions are enabled by default.
|
||||
# language_extensions:
|
||||
# - TemplateHaskell
|
||||
# - QuasiQuotes
|
||||
410
Application.hs
410
Application.hs
@ -1,410 +0,0 @@
|
||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
module Application
|
||||
( makeApplication
|
||||
, getApplicationDev
|
||||
, makeFoundation
|
||||
, cabalLoaderMain
|
||||
) where
|
||||
|
||||
import qualified Aws
|
||||
import Control.Concurrent (forkIO, threadDelay)
|
||||
import Control.Exception (catch)
|
||||
import Control.Monad.Logger (runLoggingT, LoggingT, defaultLogStr)
|
||||
import Data.BlobStore (fileStore, storeWrite, cachedS3Store)
|
||||
import Data.Hackage
|
||||
import Data.Unpacking (newDocUnpacker, createHoogleDatabases)
|
||||
import Data.WebsiteContent
|
||||
import Data.Slug (SnapSlug (..), safeMakeSlug, HasGenIO)
|
||||
import Data.Time (diffUTCTime)
|
||||
import qualified Database.Esqueleto as E
|
||||
import qualified Database.Persist
|
||||
import Filesystem (getModified, removeTree)
|
||||
import Import hiding (catch)
|
||||
import Language.Haskell.TH.Syntax (Loc(..))
|
||||
import Network.Wai (Middleware, responseLBS)
|
||||
import Network.Wai.Logger (clockDateCacher)
|
||||
import Network.Wai.Middleware.RequestLogger
|
||||
( mkRequestLogger, outputFormat, OutputFormat (..), IPAddrSource (..), destination
|
||||
)
|
||||
import qualified Network.Wai.Middleware.RequestLogger as RequestLogger
|
||||
import Settings
|
||||
import System.Log.FastLogger (newStdoutLoggerSet, newFileLoggerSet, defaultBufSize, flushLogStr, fromLogStr)
|
||||
import qualified System.Random.MWC as MWC
|
||||
import Yesod.Core.Types (loggerSet, Logger (Logger))
|
||||
import Yesod.Default.Config
|
||||
import Yesod.Default.Handlers
|
||||
import Yesod.Default.Main
|
||||
import Yesod.GitRepo
|
||||
import System.Environment (getEnvironment)
|
||||
import Data.BlobStore (HasBlobStore (..), BlobStore)
|
||||
import System.IO (hSetBuffering, BufferMode (LineBuffering))
|
||||
import qualified Data.ByteString as S
|
||||
import qualified Data.Text as T
|
||||
import System.Process (rawSystem)
|
||||
|
||||
import qualified Echo
|
||||
|
||||
-- Import all relevant handler modules here.
|
||||
-- Don't forget to add new modules to your cabal file!
|
||||
import Handler.Home
|
||||
import Handler.Snapshots
|
||||
import Handler.Profile
|
||||
import Handler.Email
|
||||
import Handler.ResetToken
|
||||
import Handler.UploadStackage
|
||||
import Handler.StackageHome
|
||||
import Handler.StackageIndex
|
||||
import Handler.StackageSdist
|
||||
import Handler.Aliases
|
||||
import Handler.Alias
|
||||
import Handler.Progress
|
||||
import Handler.System
|
||||
import Handler.Haddock
|
||||
import Handler.Package
|
||||
import Handler.PackageList
|
||||
import Handler.CompressorStatus
|
||||
import Handler.Tag
|
||||
import Handler.BannedTags
|
||||
import Handler.RefreshDeprecated
|
||||
import Handler.Hoogle
|
||||
import Handler.BuildVersion
|
||||
import Handler.PackageCounts
|
||||
|
||||
-- This line actually creates our YesodDispatch instance. It is the second half
|
||||
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
|
||||
-- comments there for more details.
|
||||
mkYesodDispatch "App" resourcesApp
|
||||
|
||||
-- This function allocates resources (such as a database connection pool),
|
||||
-- performs initialization and creates a WAI application. This is also the
|
||||
-- place to put your migrate statements to have automatic database
|
||||
-- migrations handled by Yesod.
|
||||
makeApplication :: Bool -- ^ Use Echo.
|
||||
-> AppConfig DefaultEnv Extra -> IO (Application, LogFunc)
|
||||
makeApplication echo@True conf = do
|
||||
foundation <- makeFoundation echo conf
|
||||
app <- toWaiAppPlain foundation
|
||||
logWare <- mkRequestLogger def
|
||||
{ destination = RequestLogger.Callback (const (return ()))
|
||||
}
|
||||
Echo.clear
|
||||
return (logWare (defaultMiddlewaresNoLogging app),logFunc)
|
||||
where logFunc (Loc filename' _pkg _mod (line,_) _) source level str =
|
||||
Echo.write (filename',line) (show source ++ ": " ++ show level ++ ": " ++ toStr str)
|
||||
toStr = unpack . decodeUtf8 . fromLogStr
|
||||
makeApplication echo@False conf = do
|
||||
foundation <- makeFoundation echo conf
|
||||
-- Initialize the logging middleware
|
||||
logWare <- mkRequestLogger def
|
||||
{ outputFormat =
|
||||
if development
|
||||
then Detailed True
|
||||
else Apache FromFallback
|
||||
, destination = RequestLogger.Logger $ loggerSet $ appLogger foundation
|
||||
}
|
||||
-- Create the WAI application and apply middlewares
|
||||
app <- toWaiAppPlain foundation
|
||||
let logFunc = messageLoggerSource foundation (appLogger foundation)
|
||||
middleware = nicerExceptions . logWare . defaultMiddlewaresNoLogging
|
||||
return (middleware app, logFunc)
|
||||
|
||||
nicerExceptions :: Middleware
|
||||
nicerExceptions app req send = catch (app req send) $ \e -> do
|
||||
let text = "Exception thrown to Warp: " ++ tshow (e :: SomeException)
|
||||
putStrLn text
|
||||
send $ responseLBS status500 [("Content-Type", "text/plain")] $
|
||||
fromStrict $ encodeUtf8 text
|
||||
|
||||
getDbConf :: AppConfig DefaultEnv Extra -> IO Settings.PersistConf
|
||||
getDbConf conf =
|
||||
withYamlEnvironment "config/postgresql.yml" (appEnv conf)
|
||||
Database.Persist.loadConfig >>=
|
||||
Database.Persist.applyEnv
|
||||
|
||||
loadBlobStore :: Manager -> AppConfig DefaultEnv Extra -> IO (BlobStore StoreKey)
|
||||
loadBlobStore manager conf =
|
||||
case storeConfig $ appExtra conf of
|
||||
BSCFile root -> return $ fileStore root
|
||||
BSCAWS root access secret bucket prefix -> do
|
||||
creds <- Aws.Credentials
|
||||
<$> pure (encodeUtf8 access)
|
||||
<*> pure (encodeUtf8 secret)
|
||||
<*> newIORef []
|
||||
<*> pure Nothing
|
||||
return $ cachedS3Store root creds bucket prefix manager
|
||||
|
||||
-- | Loads up any necessary settings, creates your foundation datatype, and
|
||||
-- performs some initialization.
|
||||
makeFoundation :: Bool -> AppConfig DefaultEnv Extra -> IO App
|
||||
makeFoundation useEcho conf = do
|
||||
manager <- newManager
|
||||
s <- staticSite
|
||||
dbconf <- getDbConf conf
|
||||
p <- Database.Persist.createPoolConfig dbconf
|
||||
|
||||
loggerSet' <- if useEcho
|
||||
then newFileLoggerSet defaultBufSize "/dev/null"
|
||||
else newStdoutLoggerSet defaultBufSize
|
||||
(getter, updater) <- clockDateCacher
|
||||
|
||||
-- If the Yesod logger (as opposed to the request logger middleware) is
|
||||
-- used less than once a second on average, you may prefer to omit this
|
||||
-- thread and use "(updater >> getter)" in place of "getter" below. That
|
||||
-- would update the cache every time it is used, instead of every second.
|
||||
let updateLoop = do
|
||||
threadDelay 1000000
|
||||
updater
|
||||
flushLogStr loggerSet'
|
||||
updateLoop
|
||||
_ <- forkIO updateLoop
|
||||
|
||||
gen <- MWC.createSystemRandom
|
||||
|
||||
blobStore' <- loadBlobStore manager conf
|
||||
|
||||
let haddockRootDir' = "/tmp/stackage-server-haddocks2"
|
||||
widgetCache' <- newIORef mempty
|
||||
|
||||
websiteContent' <- if development
|
||||
then do
|
||||
void $ rawSystem "git"
|
||||
[ "clone"
|
||||
, "https://github.com/fpco/stackage-content.git"
|
||||
]
|
||||
gitRepoDev "stackage-content" loadWebsiteContent
|
||||
else gitRepo
|
||||
"https://github.com/fpco/stackage-content.git"
|
||||
"master"
|
||||
loadWebsiteContent
|
||||
|
||||
env <- getEnvironment
|
||||
|
||||
let runDB' :: (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a
|
||||
runDB' = flip (Database.Persist.runPool dbconf) p
|
||||
docUnpacker <- newDocUnpacker haddockRootDir' blobStore' runDB'
|
||||
|
||||
let logger = Yesod.Core.Types.Logger loggerSet' getter
|
||||
foundation = App
|
||||
{ settings = conf
|
||||
, getStatic = s
|
||||
, connPool = p
|
||||
, httpManager = manager
|
||||
, persistConfig = dbconf
|
||||
, appLogger = logger
|
||||
, genIO = gen
|
||||
, blobStore = blobStore'
|
||||
, haddockRootDir = haddockRootDir'
|
||||
, appDocUnpacker = docUnpacker
|
||||
, widgetCache = widgetCache'
|
||||
, websiteContent = websiteContent'
|
||||
}
|
||||
|
||||
let urlRender' = yesodRender foundation (appRoot conf)
|
||||
|
||||
-- Perform database migration using our application's logging settings.
|
||||
when (lookup "STACKAGE_SKIP_MIGRATION" env /= Just "1") $
|
||||
runResourceT $
|
||||
flip runReaderT gen $
|
||||
flip runLoggingT (messageLoggerSource foundation logger) $
|
||||
flip (Database.Persist.runPool dbconf) p $ do
|
||||
runMigration migrateAll
|
||||
checkMigration 1 fixSnapSlugs
|
||||
checkMigration 2 setCorePackages
|
||||
|
||||
|
||||
let updateDB = lookup "STACKAGE_CABAL_LOADER" env /= Just "0"
|
||||
hoogleGen = lookup "STACKAGE_HOOGLE_GEN" env /= Just "0"
|
||||
forceUpdate = lookup "STACKAGE_FORCE_UPDATE" env == Just "1"
|
||||
loadCabalFiles' = appLoadCabalFiles updateDB forceUpdate foundation dbconf p
|
||||
|
||||
-- Start the cabal file loader
|
||||
ifRunCabalLoader $ forkIO $ forever $ flip runLoggingT (messageLoggerSource foundation logger) $ do
|
||||
$logInfoS "CLEANUP" "Cleaning up /tmp"
|
||||
now <- liftIO getCurrentTime
|
||||
runResourceT $ sourceDirectory "/tmp" $$ mapM_C (cleanupTemp now)
|
||||
$logInfoS "CLEANUP" "Cleaning up complete"
|
||||
|
||||
loadCabalFiles'
|
||||
|
||||
when hoogleGen $ liftIO $ createHoogleDatabases blobStore' runDB' putStrLn urlRender'
|
||||
|
||||
liftIO $ threadDelay $ 30 * 60 * 1000000
|
||||
return foundation
|
||||
where ifRunCabalLoader m =
|
||||
if cabalFileLoader
|
||||
then void m
|
||||
else return ()
|
||||
|
||||
data CabalLoaderEnv = CabalLoaderEnv
|
||||
{ cleSettings :: !(AppConfig DefaultEnv Extra)
|
||||
, cleBlobStore :: !(BlobStore StoreKey)
|
||||
, cleManager :: !Manager
|
||||
}
|
||||
|
||||
instance HasHackageRoot CabalLoaderEnv where
|
||||
getHackageRoot = hackageRoot . appExtra . cleSettings
|
||||
instance HasBlobStore CabalLoaderEnv StoreKey where
|
||||
getBlobStore = cleBlobStore
|
||||
instance HasHttpManager CabalLoaderEnv where
|
||||
getHttpManager = cleManager
|
||||
|
||||
cabalLoaderMain :: IO ()
|
||||
cabalLoaderMain = do
|
||||
conf <- fromArgs parseExtra
|
||||
dbconf <- getDbConf conf
|
||||
pool <- Database.Persist.createPoolConfig dbconf
|
||||
manager <- newManager
|
||||
bs <- loadBlobStore manager conf
|
||||
hSetBuffering stdout LineBuffering
|
||||
env <- getEnvironment
|
||||
let forceUpdate = lookup "STACKAGE_FORCE_UPDATE" env == Just "1"
|
||||
flip runLoggingT logFunc $ appLoadCabalFiles
|
||||
True -- update database?
|
||||
forceUpdate
|
||||
CabalLoaderEnv
|
||||
{ cleSettings = conf
|
||||
, cleBlobStore = bs
|
||||
, cleManager = manager
|
||||
}
|
||||
dbconf
|
||||
pool
|
||||
|
||||
let foundation = App
|
||||
{ settings = conf
|
||||
, getStatic = error "getStatic"
|
||||
, connPool = pool
|
||||
, httpManager = manager
|
||||
, persistConfig = dbconf
|
||||
, appLogger = error "appLogger"
|
||||
, genIO = error "genIO"
|
||||
, blobStore = bs
|
||||
, haddockRootDir = error "haddockRootDir"
|
||||
, appDocUnpacker = error "appDocUnpacker"
|
||||
, widgetCache = error "widgetCache"
|
||||
, websiteContent = error "websiteContent"
|
||||
}
|
||||
createHoogleDatabases
|
||||
bs
|
||||
(flip (Database.Persist.runPool dbconf) pool)
|
||||
putStrLn
|
||||
(yesodRender foundation (appRoot conf))
|
||||
where
|
||||
logFunc loc src level str
|
||||
| level > LevelDebug = S.hPutStr stdout $ fromLogStr $ defaultLogStr loc src level str
|
||||
| otherwise = return ()
|
||||
|
||||
appLoadCabalFiles :: ( PersistConfig c
|
||||
, PersistConfigBackend c ~ SqlPersistT
|
||||
, HasHackageRoot env
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHttpManager env
|
||||
)
|
||||
=> Bool -- ^ update database?
|
||||
-> Bool -- ^ force update?
|
||||
-> env
|
||||
-> c
|
||||
-> PersistConfigPool c
|
||||
-> LoggingT IO ()
|
||||
appLoadCabalFiles updateDB forceUpdate env dbconf p = do
|
||||
eres <- tryAny $ flip runReaderT env $ do
|
||||
let runDB' :: SqlPersistT (ResourceT (ReaderT env (LoggingT IO))) a
|
||||
-> ReaderT env (LoggingT IO) a
|
||||
runDB' = runResourceT . flip (Database.Persist.runPool dbconf) p
|
||||
uploadHistory0 <- runDB' $ selectSource [] [] $$ sinkUploadHistory
|
||||
let toMDPair (E.Value name, E.Value version, E.Value hash') =
|
||||
(name, (version, hash'))
|
||||
metadata0 <- fmap (mapFromList . map toMDPair)
|
||||
$ runDB' $ E.select $ E.from $ \m -> return
|
||||
( m E.^. MetadataName
|
||||
, m E.^. MetadataVersion
|
||||
, m E.^. MetadataHash
|
||||
)
|
||||
UploadState uploadHistory newUploads _ newMD <- loadCabalFiles updateDB forceUpdate uploadHistory0 metadata0
|
||||
$logInfo "Inserting to new uploads"
|
||||
runDB' $ insertMany_ newUploads
|
||||
$logInfo $ "Updating metadatas: " ++ tshow (length newMD)
|
||||
runDB' $ do
|
||||
let newMD' = toList newMD
|
||||
deleteWhere [MetadataName <-. map metadataName newMD']
|
||||
insertMany_ newMD'
|
||||
forM_ newMD' $ \md -> do
|
||||
deleteWhere [DependencyUser ==. metadataName md]
|
||||
insertMany_ $ flip map (metadataDeps md) $ \dep ->
|
||||
Dependency (PackageName dep) (metadataName md)
|
||||
case eres of
|
||||
Left e -> $logError $ tshow e
|
||||
Right () -> return ()
|
||||
|
||||
cleanupTemp :: UTCTime -> FilePath -> ResourceT (LoggingT IO) ()
|
||||
cleanupTemp now fp
|
||||
| any (`isPrefixOf` name) prefixes = handleAny ($logError . tshow) $ do
|
||||
modified <- liftIO $ getModified fp
|
||||
if (diffUTCTime now modified > 60 * 60)
|
||||
then do
|
||||
$logInfoS "CLEANUP" $ "Removing temp directory: " ++ fpToText fp
|
||||
liftIO $ removeTree fp
|
||||
$logInfoS "CLEANUP" $ "Temp directory deleted: " ++ fpToText fp
|
||||
else $logInfoS "CLEANUP" $ "Ignoring recent entry: " ++ fpToText fp
|
||||
| otherwise = $logInfoS "CLEANUP" $ "Ignoring unmatched path: " ++ fpToText fp
|
||||
where
|
||||
name = fpToText $ filename fp
|
||||
prefixes = asVector $ pack
|
||||
[ "hackage-index"
|
||||
, "createview"
|
||||
, "build00index."
|
||||
, "newindex"
|
||||
]
|
||||
|
||||
-- for yesod devel
|
||||
getApplicationDev :: Bool -> IO (Int, Application)
|
||||
getApplicationDev useEcho =
|
||||
defaultDevelApp loader (fmap fst . makeApplication useEcho)
|
||||
where
|
||||
loader = Yesod.Default.Config.loadConfig (configSettings Development)
|
||||
{ csParseExtra = parseExtra
|
||||
}
|
||||
|
||||
checkMigration :: MonadIO m
|
||||
=> Int
|
||||
-> ReaderT SqlBackend m ()
|
||||
-> ReaderT SqlBackend m ()
|
||||
checkMigration num f = do
|
||||
eres <- insertBy $ Migration num
|
||||
case eres of
|
||||
Left _ -> return ()
|
||||
Right _ -> f
|
||||
|
||||
fixSnapSlugs :: (MonadResource m, HasGenIO env, MonadReader env m)
|
||||
=> ReaderT SqlBackend m ()
|
||||
fixSnapSlugs =
|
||||
selectSource [] [Asc StackageUploaded] $$ mapM_C go
|
||||
where
|
||||
go (Entity sid Stackage {..}) =
|
||||
loop (1 :: Int)
|
||||
where
|
||||
base = T.replace "haskell platform" "hp"
|
||||
$ T.replace "stackage build for " ""
|
||||
$ toLower stackageTitle
|
||||
loop 50 = error "fixSnapSlugs can't find a good slug"
|
||||
loop i = do
|
||||
slug' <- lift $ safeMakeSlug base $ if i == 1 then False else True
|
||||
let slug = SnapSlug slug'
|
||||
ms <- getBy $ UniqueSnapshot slug
|
||||
case ms of
|
||||
Nothing -> update sid [StackageSlug =. slug]
|
||||
Just _ -> loop (i + 1)
|
||||
|
||||
setCorePackages :: MonadIO m => ReaderT SqlBackend m ()
|
||||
setCorePackages =
|
||||
updateWhere
|
||||
[ PackageName' <-. defaultCorePackages
|
||||
, PackageCore ==. Nothing
|
||||
]
|
||||
[PackageCore =. Just True]
|
||||
where
|
||||
defaultCorePackages = map PackageName $ words =<<
|
||||
[ "ghc hoopl bytestring unix haskeline Cabal base time xhtml"
|
||||
, "haskell98 hpc filepath process array integer-gmp bin-package-db"
|
||||
, "containers haskell2010 binary ghc-prim old-time old-locale rts"
|
||||
, "terminfo transformers deepseq pretty template-haskell directory"
|
||||
]
|
||||
@ -1,178 +0,0 @@
|
||||
module Data.BlobStore
|
||||
( BlobStore (..)
|
||||
, ToPath (..)
|
||||
, fileStore
|
||||
, HasBlobStore (..)
|
||||
, storeWrite
|
||||
, storeRead
|
||||
, storeExists
|
||||
, BackupToS3 (..)
|
||||
, cachedS3Store
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import qualified Filesystem as F
|
||||
import Control.Monad.Trans.Resource (release)
|
||||
import qualified Aws
|
||||
import Aws.S3 as Aws
|
||||
import qualified System.IO as IO
|
||||
import System.Directory (getTemporaryDirectory)
|
||||
|
||||
-- FIXME add a sendfile optimization
|
||||
data BlobStore key = BlobStore
|
||||
{ storeWrite' :: !(forall m. MonadIO m => key -> Acquire (Sink ByteString m ()))
|
||||
, storeRead' :: !(forall m. MonadIO m => key -> Acquire (Maybe (Source m ByteString)))
|
||||
, storeExists' :: !(forall m. MonadIO m => key -> m Bool)
|
||||
}
|
||||
|
||||
class HasBlobStore a key | a -> key where
|
||||
getBlobStore :: a -> BlobStore key
|
||||
instance HasBlobStore (BlobStore key) key where
|
||||
getBlobStore = id
|
||||
|
||||
storeWrite :: (MonadResource m, MonadReader env m, HasBlobStore env key)
|
||||
=> key
|
||||
-> Consumer ByteString m ()
|
||||
storeWrite key = do
|
||||
store <- liftM getBlobStore ask
|
||||
(releaseKey, sink) <- allocateAcquire $ storeWrite' store key
|
||||
toConsumer sink
|
||||
release releaseKey
|
||||
|
||||
storeRead :: (MonadResource m, MonadReader env m, HasBlobStore env key)
|
||||
=> key
|
||||
-> m (Maybe (Source m ByteString))
|
||||
storeRead key = do
|
||||
store <- liftM getBlobStore ask
|
||||
(releaseKey, msrc) <- allocateAcquire $ storeRead' store key
|
||||
case msrc of
|
||||
Nothing -> do
|
||||
release releaseKey
|
||||
return Nothing
|
||||
Just src -> return $ Just $ src >> release releaseKey
|
||||
|
||||
storeExists :: (MonadIO m, MonadReader env m, HasBlobStore env key)
|
||||
=> key
|
||||
-> m Bool
|
||||
storeExists key = do
|
||||
store <- liftM getBlobStore ask
|
||||
storeExists' store key
|
||||
|
||||
class ToPath a where
|
||||
toPath :: a -> [Text]
|
||||
|
||||
fileStore :: ToPath key
|
||||
=> FilePath -- ^ root
|
||||
-> BlobStore key
|
||||
fileStore root = BlobStore
|
||||
{ storeWrite' = \key -> (sinkHandle . snd) <$> mkAcquireType
|
||||
(do
|
||||
let fp = toFP root key
|
||||
F.createTree $ directory fp
|
||||
IO.openBinaryTempFile
|
||||
(fpToString $ directory fp)
|
||||
(fpToString $ filename fp))
|
||||
(\(fp, h) rt ->
|
||||
case rt of
|
||||
ReleaseException -> do
|
||||
hClose h `finally` F.removeFile (fpFromString fp)
|
||||
_ -> do
|
||||
hClose h
|
||||
F.rename (fpFromString fp) (toFP root key))
|
||||
, storeRead' = \key -> (fmap sourceHandle) <$> mkAcquire
|
||||
((Just <$> F.openFile (toFP root key) F.ReadMode)
|
||||
`catch` \e ->
|
||||
if isDoesNotExistError e
|
||||
then return Nothing
|
||||
else throwIO e)
|
||||
(maybe (return ()) hClose)
|
||||
, storeExists' = liftIO . F.isFile . toFP root
|
||||
}
|
||||
|
||||
toFP :: ToPath a => FilePath -> a -> FilePath
|
||||
toFP root key = foldl' (\x y -> x </> fpFromText y) root (toPath key)
|
||||
|
||||
-- | Note: Only use with data which will never be modified!
|
||||
cachedS3Store :: (BackupToS3 key, ToPath key)
|
||||
=> FilePath -- ^ cache directory
|
||||
-> Aws.Credentials
|
||||
-> Text -- bucket FIXME Aws.Bucket
|
||||
-> Text -- ^ prefix within bucket
|
||||
-> Manager
|
||||
-> BlobStore key
|
||||
cachedS3Store cache creds bucket prefix manager =
|
||||
self
|
||||
where
|
||||
self = BlobStore
|
||||
{ storeWrite' = \key ->
|
||||
if shouldBackup key
|
||||
then do
|
||||
tempDir <- liftIO getTemporaryDirectory
|
||||
(fp, h) <- mkAcquire
|
||||
(IO.openBinaryTempFile tempDir "store-write-cache")
|
||||
(\(fp, h) -> hClose h >> F.removeFile (fpFromString fp))
|
||||
return $ do
|
||||
len <- getZipSink $ ZipSink (sinkHandle h) *> ZipSink lengthCE
|
||||
liftIO $ hClose h
|
||||
liftIO $ IO.withFile fp IO.ReadMode $ \inH -> runResourceT $ do
|
||||
-- FIXME the need for this separate manager
|
||||
-- indicates a serious bug in either aws or (more
|
||||
-- likely) http-client, must investigate!
|
||||
manager' <- newManager
|
||||
res <- Aws.aws
|
||||
(Aws.Configuration Aws.Timestamp creds
|
||||
$ Aws.defaultLog Aws.Error)
|
||||
Aws.defServiceConfig
|
||||
manager'
|
||||
(Aws.putObject bucket (toS3Path key)
|
||||
$ requestBodySource len
|
||||
$ sourceHandle inH)
|
||||
void $ Aws.readResponseIO res
|
||||
liftIO $ IO.withFile fp IO.ReadMode $ \inH -> withAcquire
|
||||
(storeWrite' (fileStore cache) key)
|
||||
(sourceHandle inH $$)
|
||||
else storeWrite' (fileStore cache) key
|
||||
, storeRead' = \key ->
|
||||
if shouldBackup key
|
||||
then do
|
||||
msrc <- storeRead' (fileStore cache) key
|
||||
case msrc of
|
||||
Just src -> return $ Just src
|
||||
Nothing -> do
|
||||
join $ liftIO $ handle (\S3Error{} -> return $ return Nothing) $ runResourceT $ do
|
||||
res <- Aws.aws
|
||||
(Aws.Configuration Aws.Timestamp creds
|
||||
$ Aws.defaultLog Aws.Error)
|
||||
Aws.defServiceConfig
|
||||
manager
|
||||
(Aws.getObject bucket (toS3Path key))
|
||||
gor <- Aws.readResponseIO res
|
||||
let fp = toFP cache key
|
||||
liftIO $ F.createTree $ directory fp
|
||||
bracketOnError
|
||||
(liftIO $ IO.openBinaryTempFile
|
||||
(fpToString $ directory fp)
|
||||
(fpToString $ filename fp))
|
||||
(\(fpTmp, h) -> liftIO $ do
|
||||
hClose h
|
||||
F.removeFile (fpFromString fpTmp))
|
||||
$ \(fpTmp, h) -> do
|
||||
responseBody (Aws.gorResponse gor) $$+- sinkHandle h
|
||||
liftIO $ do
|
||||
hClose h
|
||||
F.rename (fpFromString fpTmp) fp
|
||||
return $ storeRead' (fileStore cache) key -- FIXME optimize?
|
||||
else storeRead' (fileStore cache) key
|
||||
, storeExists' = \key ->
|
||||
if shouldBackup key
|
||||
then liftIO $ withAcquire (storeRead' self key)
|
||||
$ \msrc -> return
|
||||
$ maybe False (const True)
|
||||
(msrc :: Maybe (Source IO ByteString))
|
||||
else storeExists' (fileStore cache) key
|
||||
}
|
||||
|
||||
toS3Path key = intercalate "/" $ filter (not . null) $ prefix : toPath key
|
||||
|
||||
class BackupToS3 key where
|
||||
shouldBackup :: key -> Bool
|
||||
444
Data/Hackage.hs
444
Data/Hackage.hs
@ -1,444 +0,0 @@
|
||||
module Data.Hackage
|
||||
( loadCabalFiles
|
||||
, sourceHackageSdist
|
||||
, sinkUploadHistory
|
||||
, UploadState (..)
|
||||
, UploadHistory
|
||||
, sourceHistory
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod hiding (get)
|
||||
import Types
|
||||
import Data.BlobStore
|
||||
import Data.Conduit.Lazy (MonadActive (..), lazyConsume)
|
||||
import qualified Codec.Archive.Tar as Tar
|
||||
import Control.Monad.Logger (runNoLoggingT)
|
||||
import qualified Data.Text as T
|
||||
import Data.Conduit.Zlib (ungzip)
|
||||
import System.IO.Temp (withSystemTempFile)
|
||||
import System.IO (IOMode (ReadMode), openBinaryFile)
|
||||
import Model (Uploaded (Uploaded), Metadata (..))
|
||||
import Distribution.PackageDescription.Parse (parsePackageDescription, ParseResult (ParseOk))
|
||||
import qualified Distribution.PackageDescription as PD
|
||||
import qualified Distribution.Package as PD
|
||||
import Control.Monad.State.Strict (put, get, execStateT, MonadState)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Crypto.Hash (Digest, SHA256)
|
||||
import Data.Byteable (toBytes)
|
||||
import Distribution.Text (display)
|
||||
import Text.Markdown (Markdown (Markdown))
|
||||
import qualified Data.Traversable as T
|
||||
import qualified Data.Version
|
||||
import Text.ParserCombinators.ReadP (readP_to_S)
|
||||
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
|
||||
import Text.Blaze.Html (unsafeByteString)
|
||||
import qualified Text.Blaze.Html5 as H
|
||||
import qualified Text.Blaze.Html5.Attributes as A
|
||||
import qualified Documentation.Haddock.Parser as Haddock
|
||||
import Documentation.Haddock.Types (DocH (..), Hyperlink (..), Picture (..), Header (..), Example (..))
|
||||
import qualified Data.HashMap.Lazy as HM
|
||||
|
||||
sinkUploadHistory :: Monad m => Consumer (Entity Uploaded) m UploadHistory
|
||||
sinkUploadHistory =
|
||||
foldlC go mempty
|
||||
where
|
||||
go history (Entity _ (Uploaded name version time)) =
|
||||
case lookup name history of
|
||||
Nothing -> insertMap name (singletonMap version time) history
|
||||
Just vhistory -> insertMap name (insertMap version time vhistory) history
|
||||
|
||||
loadCabalFiles :: ( MonadActive m
|
||||
, MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadIO m
|
||||
, MonadReader env m
|
||||
, HasHttpManager env
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
, MonadLogger m
|
||||
, MonadMask m
|
||||
)
|
||||
=> Bool -- ^ do the database updating
|
||||
-> Bool -- ^ force updates regardless of hash value?
|
||||
-> UploadHistory -- ^ initial
|
||||
-> HashMap PackageName (Version, ByteString)
|
||||
-> m (UploadState Metadata)
|
||||
loadCabalFiles dbUpdates forceUpdate uploadHistory0 metadata0 = (>>= runUploadState) $ flip execStateT (UploadState uploadHistory0 [] metadata1 mempty) $ do
|
||||
HackageRoot root <- liftM getHackageRoot ask
|
||||
$logDebug $ "Entering loadCabalFiles, root == " ++ root
|
||||
req <- parseUrl $ unpack $ root ++ "/00-index.tar.gz"
|
||||
withSystemTempFile "hackage-index" $ \tempIndex handleOut -> do
|
||||
$logDebug $ "Requesting: " ++ tshow req
|
||||
withResponse req $ \res -> responseBody res $$ sinkHandle handleOut
|
||||
liftIO $ hClose handleOut
|
||||
withBinaryFile tempIndex ReadMode $ \handleIn -> do
|
||||
bss <- lazyConsume $ sourceHandle handleIn $= ungzip
|
||||
tarSource (Tar.read $ fromChunks bss)
|
||||
$$ parMapMC 32 go
|
||||
=$ scanlC (\x _ -> x + 1) (0 :: Int)
|
||||
=$ filterC ((== 0) . (`mod` 1000))
|
||||
=$ mapM_C (\i -> $logInfo $ "Processing cabal file #" ++ tshow i)
|
||||
$logInfo "Finished processing cabal files"
|
||||
where
|
||||
metadata1 = flip fmap metadata0 $ \(v, h) -> MetaSig
|
||||
v
|
||||
(fromMaybe (pack [0, 0, 0]) $ readVersion v)
|
||||
h
|
||||
withBinaryFile fp mode = bracket (liftIO $ openBinaryFile fp mode) (liftIO . hClose)
|
||||
|
||||
go entry = do
|
||||
case Tar.entryContent entry of
|
||||
Tar.NormalFile lbs _
|
||||
| Just (name, version) <- parseFilePath (Tar.entryPath entry) -> do
|
||||
let key = HackageCabal name version
|
||||
-- It's not longer sufficient to simply check if the cabal
|
||||
-- file exists, since Hackage now allows updating in place.
|
||||
-- Instead, we have to check if it matches what we have
|
||||
-- and, if not, update it.
|
||||
store <- liftM getBlobStore ask
|
||||
newDigest :: Digest SHA256 <- sourceLazy lbs $$ sinkHash
|
||||
toStore <- withAcquire (storeRead' store key) $ \mcurr ->
|
||||
case mcurr of
|
||||
Nothing -> return True
|
||||
Just curr -> do
|
||||
-- Check if it matches. This is cheaper than
|
||||
-- always writing, since it can take advantage
|
||||
-- of the local filesystem cache and not go to
|
||||
-- S3 each time.
|
||||
currDigest <- curr $$ sinkHash
|
||||
return $! currDigest /= newDigest
|
||||
when toStore $ withAcquire (storeWrite' store key) $ \sink ->
|
||||
sourceLazy lbs $$ sink
|
||||
when dbUpdates $ do
|
||||
setUploadDate name version
|
||||
|
||||
case readVersion version of
|
||||
Nothing -> return ()
|
||||
Just dataVersion -> setMetadata
|
||||
forceUpdate
|
||||
name
|
||||
version
|
||||
dataVersion
|
||||
(toBytes newDigest)
|
||||
(parsePackageDescription $ unpack $ decodeUtf8 lbs)
|
||||
_ -> return ()
|
||||
|
||||
readVersion :: Version -> Maybe (UVector Int)
|
||||
readVersion v =
|
||||
case filter (null . snd) $ readP_to_S Data.Version.parseVersion . unpack . unVersion $ v of
|
||||
(dv, _):_ -> Just $ pack $ Data.Version.versionBranch dv
|
||||
[] -> Nothing
|
||||
|
||||
runUploadState :: MonadIO m => UploadState (IO a) -> m (UploadState a)
|
||||
runUploadState (UploadState w x y z) = liftIO $ UploadState w x y <$> T.sequence z
|
||||
|
||||
tarSource :: (Exception e, MonadThrow m)
|
||||
=> Tar.Entries e
|
||||
-> Producer m Tar.Entry
|
||||
tarSource Tar.Done = return ()
|
||||
tarSource (Tar.Fail e) = throwM e
|
||||
tarSource (Tar.Next e es) = yield e >> tarSource es
|
||||
|
||||
type UploadHistory = HashMap PackageName (HashMap Version UTCTime)
|
||||
data UploadState md = UploadState
|
||||
{ usHistory :: !UploadHistory
|
||||
, usChanges :: ![Uploaded]
|
||||
, usMetadata :: !(HashMap PackageName MetaSig)
|
||||
, usMetaChanges :: (HashMap PackageName md)
|
||||
}
|
||||
|
||||
data MetaSig = MetaSig
|
||||
{-# UNPACK #-} !Version
|
||||
{-# UNPACK #-} !(UVector Int) -- versionBranch
|
||||
{-# UNPACK #-} !ByteString -- hash
|
||||
|
||||
setUploadDate :: ( MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadIO m
|
||||
, MonadReader env m
|
||||
, MonadState (UploadState (IO Metadata)) m
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> m ()
|
||||
setUploadDate name version = do
|
||||
UploadState history changes us3 us4 <- get
|
||||
case lookup name history >>= lookup version of
|
||||
Just _ -> return ()
|
||||
Nothing -> do
|
||||
req <- parseUrl url
|
||||
$logDebug $ "Requesting: " ++ tshow req
|
||||
lbs <- withResponse req $ \res -> responseBody res $$ sinkLazy
|
||||
let uploadDateT = decodeUtf8 $ toStrict lbs
|
||||
case parseTime defaultTimeLocale "%c" $ unpack uploadDateT of
|
||||
Nothing -> return ()
|
||||
Just time -> do
|
||||
let vhistory = insertMap version time $ fromMaybe mempty $ lookup name history
|
||||
history' = insertMap name vhistory history
|
||||
changes' = Uploaded name version time : changes
|
||||
put $ UploadState history' changes' us3 us4
|
||||
where
|
||||
url = unpack $ concat
|
||||
[ "http://hackage.haskell.org/package/"
|
||||
, toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, "/upload-time"
|
||||
]
|
||||
|
||||
setMetadata :: ( MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadIO m
|
||||
, MonadReader env m
|
||||
, MonadState (UploadState (IO Metadata)) m
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
, MonadActive m
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
)
|
||||
=> Bool -- ^ force update?
|
||||
-> PackageName
|
||||
-> Version
|
||||
-> UVector Int -- ^ versionBranch
|
||||
-> ByteString
|
||||
-> ParseResult PD.GenericPackageDescription
|
||||
-> m ()
|
||||
setMetadata forceUpdate name version dataVersion hash' gpdRes = do
|
||||
UploadState us1 us2 mdMap mdChanges <- get
|
||||
let toUpdate =
|
||||
case lookup name mdMap of
|
||||
Just (MetaSig _currVersion currDataVersion currHash) ->
|
||||
case compare currDataVersion dataVersion of
|
||||
LT -> True
|
||||
GT -> False
|
||||
EQ -> currHash /= hash' || forceUpdate
|
||||
Nothing -> True
|
||||
if toUpdate
|
||||
then case gpdRes of
|
||||
ParseOk _ gpd -> do
|
||||
!md <- getMetadata name version hash' gpd
|
||||
put $! UploadState us1 us2
|
||||
(insertMap name (MetaSig version dataVersion hash') mdMap)
|
||||
(HM.insert name md mdChanges)
|
||||
_ -> return ()
|
||||
else return ()
|
||||
|
||||
getMetadata :: ( MonadActive m
|
||||
, MonadIO m
|
||||
, MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadReader env m
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> ByteString
|
||||
-> PD.GenericPackageDescription
|
||||
-> m (IO Metadata)
|
||||
getMetadata name version hash' gpd = do
|
||||
let pd = PD.packageDescription gpd
|
||||
env <- ask
|
||||
return $ liftIO $ runNoLoggingT $ flip runReaderT env $ do
|
||||
(mreadme, mchangelog, mlicenseContent) <-
|
||||
grabExtraFiles name version
|
||||
#if MIN_VERSION_Cabal(1, 20, 0)
|
||||
$ PD.licenseFiles pd
|
||||
#else
|
||||
[PD.licenseFile pd]
|
||||
#endif
|
||||
let collapseHtml = unsafeByteString . toStrict . renderHtml
|
||||
return Metadata
|
||||
{ metadataName = name
|
||||
, metadataVersion = version
|
||||
, metadataHash = hash'
|
||||
, metadataDeps = setToList
|
||||
$ asSet
|
||||
$ concat
|
||||
[ foldMap goTree $ PD.condLibrary gpd
|
||||
, foldMap (goTree . snd) $ PD.condExecutables gpd
|
||||
]
|
||||
, metadataAuthor = pack $ PD.author pd
|
||||
, metadataMaintainer = pack $ PD.maintainer pd
|
||||
, metadataLicenseName = pack $ display $ PD.license pd
|
||||
, metadataHomepage = pack $ PD.homepage pd
|
||||
, metadataBugReports = pack $ PD.bugReports pd
|
||||
, metadataSynopsis = pack $ PD.synopsis pd
|
||||
, metadataSourceRepo = mapMaybe showSourceRepo $ PD.sourceRepos pd
|
||||
, metadataCategory = pack $ PD.category pd
|
||||
, metadataLibrary = isJust $ PD.library pd
|
||||
, metadataExes = length $ PD.executables pd
|
||||
, metadataTestSuites = length $ PD.testSuites pd
|
||||
, metadataBenchmarks = length $ PD.benchmarks pd
|
||||
, metadataReadme = collapseHtml $ fromMaybe
|
||||
(hToHtml . Haddock.toRegular . Haddock.parseParas $ PD.description pd)
|
||||
mreadme
|
||||
, metadataChangelog = collapseHtml <$> mchangelog
|
||||
, metadataLicenseContent = collapseHtml <$> mlicenseContent
|
||||
}
|
||||
where
|
||||
goTree (PD.CondNode _ deps comps) = concatMap goDep deps ++ concatMap goComp comps
|
||||
goDep (PD.Dependency (PD.PackageName n) _) = singletonSet $ pack n
|
||||
goComp (_, tree, mtree) = goTree tree ++ maybe mempty goTree mtree
|
||||
|
||||
-- | Convert a Haddock doc to HTML.
|
||||
hToHtml :: DocH String String -> Html
|
||||
hToHtml =
|
||||
go
|
||||
where
|
||||
go :: DocH String String -> Html
|
||||
go DocEmpty = mempty
|
||||
go (DocAppend x y) = go x ++ go y
|
||||
go (DocString x) = toHtml x
|
||||
go (DocParagraph x) = H.p $ go x
|
||||
go (DocIdentifier s) = H.code $ toHtml s
|
||||
go (DocIdentifierUnchecked s) = H.code $ toHtml s
|
||||
go (DocModule s) = H.code $ toHtml s
|
||||
go (DocWarning x) = H.span H.! A.class_ "warning" $ go x
|
||||
go (DocEmphasis x) = H.em $ go x
|
||||
go (DocMonospaced x) = H.code $ go x
|
||||
go (DocBold x) = H.strong $ go x
|
||||
go (DocUnorderedList xs) = H.ul $ foldMap (H.li . go) xs
|
||||
go (DocOrderedList xs) = H.ol $ foldMap (H.li . go) xs
|
||||
go (DocDefList xs) = H.dl $ flip foldMap xs $ \(x, y) ->
|
||||
H.dt (go x) ++ H.dd (go y)
|
||||
go (DocCodeBlock x) = H.pre $ H.code $ go x
|
||||
go (DocHyperlink (Hyperlink url mlabel)) =
|
||||
H.a H.! A.href (H.toValue url) $ toHtml label
|
||||
where
|
||||
label = fromMaybe url mlabel
|
||||
go (DocPic (Picture url mtitle)) =
|
||||
H.img H.! A.src (H.toValue url) H.! A.title (H.toValue $ fromMaybe mempty mtitle)
|
||||
go (DocAName s) = H.div H.! A.id (H.toValue s) $ mempty
|
||||
go (DocProperty s) = H.pre $ H.code $ toHtml s
|
||||
go (DocExamples es) = flip foldMap es $ \(Example exp' ress) ->
|
||||
H.div H.! A.class_ "example" $ do
|
||||
H.pre H.! A.class_ "expression" $ H.code $ toHtml exp'
|
||||
flip foldMap ress $ \res ->
|
||||
H.pre H.! A.class_ "result" $ H.code $ toHtml res
|
||||
go (DocHeader (Header level content)) =
|
||||
wrapper level $ go content
|
||||
where
|
||||
wrapper 1 = H.h1
|
||||
wrapper 2 = H.h2
|
||||
wrapper 3 = H.h3
|
||||
wrapper 4 = H.h4
|
||||
wrapper 5 = H.h5
|
||||
wrapper _ = H.h6
|
||||
|
||||
showSourceRepo :: PD.SourceRepo -> Maybe Text
|
||||
showSourceRepo = fmap pack . PD.repoLocation
|
||||
|
||||
grabExtraFiles :: ( MonadActive m
|
||||
, MonadIO m
|
||||
, MonadBaseControl IO m
|
||||
, MonadThrow m
|
||||
, MonadReader env m
|
||||
, HasBlobStore env StoreKey
|
||||
, HasHackageRoot env
|
||||
, HasHttpManager env
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> [String] -- ^ license files
|
||||
-> m (Maybe Html, Maybe Html, Maybe Html) -- ^ README, changelog, license
|
||||
grabExtraFiles name version lfiles = runResourceT $ do
|
||||
msrc <- sourceHackageSdist name version
|
||||
handle (\(_ :: Tar.FormatError) -> return (Nothing,Nothing,Nothing)) $
|
||||
case msrc of
|
||||
Nothing -> return mempty
|
||||
Just src -> do
|
||||
bss <- lazyConsume $ src $= ungzip
|
||||
tarSource (Tar.read $ fromChunks bss) $$ foldlC go mempty
|
||||
where
|
||||
go trip@(mreadme, mchangelog, mlicense) entry =
|
||||
case Tar.entryContent entry of
|
||||
Tar.NormalFile lbs _ ->
|
||||
let name' = drop 1 $ dropWhile (/= '/') $ Tar.entryPath entry in
|
||||
case toLower name' of
|
||||
"readme.md" -> (md lbs, mchangelog, mlicense)
|
||||
"readme" -> (txt lbs, mchangelog, mlicense)
|
||||
"readme.txt" -> (txt lbs, mchangelog, mlicense)
|
||||
"changelog.md" -> (mreadme, md lbs, mlicense)
|
||||
"changelog" -> (mreadme, txt lbs, mlicense)
|
||||
"changelog.txt" -> (mreadme, txt lbs, mlicense)
|
||||
"changes.md" -> (mreadme, md lbs, mlicense)
|
||||
"changes" -> (mreadme, txt lbs, mlicense)
|
||||
"changes.txt" -> (mreadme, txt lbs, mlicense)
|
||||
_ | name' `elem` lfiles -> (mreadme, mchangelog, txt lbs)
|
||||
_ -> trip
|
||||
_ -> trip
|
||||
|
||||
md = wrapClass "markdown" . Markdown . decodeUtf8
|
||||
txt = wrapClass "plain-text" . Textarea . toStrict . decodeUtf8
|
||||
|
||||
wrapClass clazz inner = Just $ H.div H.! A.class_ clazz $ toHtml inner
|
||||
|
||||
parseFilePath :: String -> Maybe (PackageName, Version)
|
||||
parseFilePath s =
|
||||
case filter (not . null) $ T.split (== '/') $ pack s of
|
||||
(name:version:_) -> Just (PackageName name, Version version)
|
||||
_ -> Nothing
|
||||
|
||||
sourceHackageSdist :: ( MonadIO m
|
||||
, MonadThrow m
|
||||
, MonadBaseControl IO m
|
||||
, MonadResource m
|
||||
, MonadReader env m
|
||||
, HasHttpManager env
|
||||
, HasHackageRoot env
|
||||
, HasBlobStore env StoreKey
|
||||
, MonadLogger m
|
||||
)
|
||||
=> PackageName
|
||||
-> Version
|
||||
-> m (Maybe (Source m ByteString))
|
||||
sourceHackageSdist name version = do
|
||||
let key = HackageSdist name version
|
||||
msrc1 <- storeRead key
|
||||
case msrc1 of
|
||||
Just src -> return $ Just src
|
||||
Nothing -> do
|
||||
HackageRoot root <- liftM getHackageRoot ask
|
||||
let url = concat
|
||||
[ root
|
||||
, "/package/"
|
||||
, toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, ".tar.gz"
|
||||
]
|
||||
req' <- parseUrl $ unpack url
|
||||
let req = req' { checkStatus = \_ _ _ -> Nothing }
|
||||
$logDebug $ "Requesting: " ++ tshow req
|
||||
exists <- withResponse req $ \res ->
|
||||
if responseStatus res == status200
|
||||
then do
|
||||
responseBody res $$ storeWrite key
|
||||
return True
|
||||
else return False
|
||||
if exists
|
||||
then storeRead key
|
||||
else return Nothing
|
||||
|
||||
sourceHistory :: Monad m => UploadHistory -> Producer m Uploaded
|
||||
sourceHistory =
|
||||
mapM_ go . mapToList
|
||||
where
|
||||
go (name, vhistory) =
|
||||
mapM_ go' $ mapToList vhistory
|
||||
where
|
||||
go' (version, time) = yield $ Uploaded name version time
|
||||
|
||||
-- FIXME put in conduit-combinators
|
||||
parMapMC :: (MonadIO m, MonadBaseControl IO m)
|
||||
=> Int
|
||||
-> (i -> m o)
|
||||
-> Conduit i m o
|
||||
parMapMC _ = mapMC
|
||||
@ -1,49 +0,0 @@
|
||||
-- | Transforms http://hackage.haskell.org/packages/deprecated.json
|
||||
-- into model data to be stored in the database.
|
||||
module Data.Hackage.DeprecationInfo
|
||||
( HackageDeprecationInfo(..)
|
||||
) where
|
||||
|
||||
import Prelude
|
||||
import Data.Aeson
|
||||
import Model
|
||||
import Types
|
||||
|
||||
data HackageDeprecationInfo = HackageDeprecationInfo {
|
||||
deprecations :: [Deprecated],
|
||||
suggestions :: [Suggested]
|
||||
}
|
||||
|
||||
instance FromJSON HackageDeprecationInfo where
|
||||
parseJSON j = do
|
||||
deprecationRecords <- parseJSON j
|
||||
return $ HackageDeprecationInfo {
|
||||
deprecations = map toDeprecated deprecationRecords,
|
||||
suggestions = concatMap toSuggestions deprecationRecords
|
||||
}
|
||||
|
||||
data DeprecationRecord = DeprecationRecord {
|
||||
_deprecatedPackage :: PackageName,
|
||||
_deprecatedInFavourOf :: [PackageName]
|
||||
}
|
||||
|
||||
instance FromJSON DeprecationRecord where
|
||||
parseJSON j = do
|
||||
obj <- parseJSON j
|
||||
package <- (obj .: "deprecated-package") >>= parsePackageName
|
||||
inFavourOf <- (obj .: "in-favour-of") >>= mapM parsePackageName
|
||||
return $ DeprecationRecord package inFavourOf
|
||||
where
|
||||
parsePackageName name = return (PackageName name)
|
||||
|
||||
toDeprecated :: DeprecationRecord -> Deprecated
|
||||
toDeprecated (DeprecationRecord deprecated _) = Deprecated deprecated
|
||||
|
||||
toSuggestions :: DeprecationRecord -> [Suggested]
|
||||
toSuggestions (DeprecationRecord deprecated inFavourOf) =
|
||||
map toSuggestion inFavourOf
|
||||
where
|
||||
toSuggestion favoured = Suggested {
|
||||
suggestedPackage = favoured,
|
||||
suggestedInsteadOf = deprecated
|
||||
}
|
||||
106
Data/Slug.hs
106
Data/Slug.hs
@ -1,106 +0,0 @@
|
||||
module Data.Slug
|
||||
( Slug
|
||||
, mkSlug
|
||||
, mkSlugLen
|
||||
, safeMakeSlug
|
||||
, unSlug
|
||||
, InvalidSlugException (..)
|
||||
, HasGenIO (..)
|
||||
, randomSlug
|
||||
, slugField
|
||||
, SnapSlug (..)
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Database.Persist.Sql (PersistFieldSql (sqlType))
|
||||
import qualified System.Random.MWC as MWC
|
||||
import GHC.Prim (RealWorld)
|
||||
import Text.Blaze (ToMarkup)
|
||||
|
||||
newtype Slug = Slug { unSlug :: Text }
|
||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, Ord, Hashable)
|
||||
instance PersistFieldSql Slug where
|
||||
sqlType = sqlType . liftM unSlug
|
||||
|
||||
mkSlug :: MonadThrow m => Text -> m Slug
|
||||
mkSlug t
|
||||
| length t < minLen = throwM $ InvalidSlugException t "Too short"
|
||||
| length t > maxLen = throwM $ InvalidSlugException t "Too long"
|
||||
| any (not . validChar) t = throwM $ InvalidSlugException t "Contains invalid characters"
|
||||
| "-" `isPrefixOf` t = throwM $ InvalidSlugException t "Must not start with a hyphen"
|
||||
| otherwise = return $ Slug t
|
||||
where
|
||||
|
||||
mkSlugLen :: MonadThrow m => Int -> Int -> Text -> m Slug
|
||||
mkSlugLen minLen' maxLen' t
|
||||
| length t < minLen' = throwM $ InvalidSlugException t "Too short"
|
||||
| length t > maxLen' = throwM $ InvalidSlugException t "Too long"
|
||||
| any (not . validChar) t = throwM $ InvalidSlugException t "Contains invalid characters"
|
||||
| "-" `isPrefixOf` t = throwM $ InvalidSlugException t "Must not start with a hyphen"
|
||||
| otherwise = return $ Slug t
|
||||
|
||||
minLen, maxLen :: Int
|
||||
minLen = 3
|
||||
maxLen = 30
|
||||
|
||||
validChar :: Char -> Bool
|
||||
validChar c =
|
||||
('A' <= c && c <= 'Z') ||
|
||||
('a' <= c && c <= 'z') ||
|
||||
('0' <= c && c <= '9') ||
|
||||
c == '.' ||
|
||||
c == '-' ||
|
||||
c == '_'
|
||||
|
||||
data InvalidSlugException = InvalidSlugException !Text !Text
|
||||
deriving (Show, Typeable)
|
||||
instance Exception InvalidSlugException
|
||||
|
||||
instance PathPiece Slug where
|
||||
toPathPiece = unSlug
|
||||
fromPathPiece = mkSlug
|
||||
|
||||
class HasGenIO a where
|
||||
getGenIO :: a -> MWC.GenIO
|
||||
instance s ~ RealWorld => HasGenIO (MWC.Gen s) where
|
||||
getGenIO = id
|
||||
|
||||
safeMakeSlug :: (MonadIO m, MonadReader env m, HasGenIO env)
|
||||
=> Text
|
||||
-> Bool -- ^ force some randomness?
|
||||
-> m Slug
|
||||
safeMakeSlug orig forceRandom
|
||||
| needsRandom || forceRandom = do
|
||||
gen <- liftM getGenIO ask
|
||||
cs <- liftIO $ replicateM 3 $ MWC.uniformR (0, 61) gen
|
||||
return $ Slug $ cleaned ++ pack ('_':map toChar cs)
|
||||
| otherwise = return $ Slug cleaned
|
||||
where
|
||||
cleaned = take (maxLen - minLen - 1) $ dropWhile (== '-') $ filter validChar orig
|
||||
needsRandom = length cleaned < minLen
|
||||
|
||||
toChar :: Int -> Char
|
||||
toChar i
|
||||
| i < 26 = toEnum $ fromEnum 'A' + i
|
||||
| i < 52 = toEnum $ fromEnum 'a' + i - 26
|
||||
| otherwise = toEnum $ fromEnum '0' + i - 52
|
||||
|
||||
randomSlug :: (MonadIO m, MonadReader env m, HasGenIO env)
|
||||
=> Int -- ^ length
|
||||
-> m Slug
|
||||
randomSlug (min maxLen . max minLen -> len) = do
|
||||
gen <- liftM getGenIO ask
|
||||
cs <- liftIO $ replicateM len $ MWC.uniformR (0, 61) gen
|
||||
return $ Slug $ pack $ map toChar cs
|
||||
|
||||
slugField :: (Monad m, RenderMessage (HandlerSite m) FormMessage) => Field m Slug
|
||||
slugField =
|
||||
checkMMap go unSlug textField
|
||||
where
|
||||
go = return . either (Left . tshow) Right . mkSlug
|
||||
|
||||
-- | Unique identifier for a snapshot.
|
||||
newtype SnapSlug = SnapSlug { unSnapSlug :: Slug }
|
||||
deriving (Show, Read, Eq, Typeable, PersistField, ToMarkup, PathPiece, Ord, Hashable)
|
||||
instance PersistFieldSql SnapSlug where
|
||||
sqlType = sqlType . liftM unSnapSlug
|
||||
11
Data/Tag.hs
11
Data/Tag.hs
@ -1,11 +0,0 @@
|
||||
-- | A wrapper around the 'Slug' interface.
|
||||
|
||||
module Data.Tag where
|
||||
|
||||
import Control.Monad.Catch
|
||||
import Data.Slug
|
||||
import Data.Text
|
||||
|
||||
-- | Make a tag.
|
||||
mkTag :: MonadThrow m => Text -> m Slug
|
||||
mkTag = mkSlugLen 1 20
|
||||
@ -1,494 +0,0 @@
|
||||
-- | Code for unpacking documentation bundles, building the Hoogle databases,
|
||||
-- and compressing/deduping contents.
|
||||
module Data.Unpacking
|
||||
( newDocUnpacker
|
||||
, getHoogleDB
|
||||
, makeHoogle
|
||||
, createHoogleDatabases
|
||||
) where
|
||||
|
||||
import Import hiding (runDB)
|
||||
import Data.BlobStore
|
||||
import Handler.Haddock
|
||||
import Filesystem (createTree, isFile, removeTree, isDirectory, listDirectory, removeDirectory, removeFile, rename)
|
||||
import System.Posix.Files (createLink)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Control.Concurrent (forkIO)
|
||||
import Control.Monad.Trans.Resource (allocate, release)
|
||||
import Data.Char (isAlpha)
|
||||
import qualified Hoogle
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Yaml as Y
|
||||
import System.IO (IOMode (ReadMode), withBinaryFile, openBinaryFile)
|
||||
import System.IO.Temp (withSystemTempFile, withTempFile, withSystemTempDirectory)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import System.Process (createProcess, proc, cwd, waitForProcess)
|
||||
import qualified Filesystem.Path.CurrentOS as F
|
||||
import Data.Conduit.Zlib (gzip, ungzip)
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Byteable (toBytes)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
|
||||
newDocUnpacker
|
||||
:: FilePath -- ^ haddock root
|
||||
-> BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> IO DocUnpacker
|
||||
newDocUnpacker root store runDB = do
|
||||
createDirs dirs
|
||||
|
||||
statusMapVar <- newTVarIO $ asMap mempty
|
||||
messageVar <- newTVarIO "Inactive"
|
||||
workChan <- atomically newTChan
|
||||
|
||||
let requestDocs forceUnpack ent = atomically $ do
|
||||
var <- newTVar USBusy
|
||||
modifyTVar statusMapVar
|
||||
$ insertMap (stackageSlug $ entityVal ent) var
|
||||
writeTChan workChan (forceUnpack, ent, var)
|
||||
|
||||
forkForever $ unpackWorker dirs runDB store messageVar workChan
|
||||
|
||||
return DocUnpacker
|
||||
{ duRequestDocs = \ent -> do
|
||||
m <- readTVarIO statusMapVar
|
||||
case lookup (stackageSlug $ entityVal ent) m of
|
||||
Nothing -> do
|
||||
b <- isUnpacked dirs (entityVal ent)
|
||||
if b
|
||||
then return USReady
|
||||
else do
|
||||
requestDocs False ent
|
||||
return USBusy
|
||||
Just us -> readTVarIO us
|
||||
, duGetStatus = readTVarIO messageVar
|
||||
, duForceReload = \ent -> do
|
||||
atomically $ modifyTVar statusMapVar
|
||||
$ deleteMap (stackageSlug $ entityVal ent)
|
||||
requestDocs True ent
|
||||
}
|
||||
where
|
||||
dirs = mkDirs root
|
||||
|
||||
createDirs :: Dirs -> IO ()
|
||||
createDirs dirs = do
|
||||
createTree $ dirCacheRoot dirs
|
||||
createTree $ dirRawRoot dirs
|
||||
createTree $ dirGzRoot dirs
|
||||
createTree $ dirHoogleRoot dirs
|
||||
|
||||
-- | Check for the presence of file system artifacts indicating that the docs
|
||||
-- have been unpacked.
|
||||
isUnpacked :: Dirs -> Stackage -> IO Bool
|
||||
isUnpacked dirs stackage = isFile $ completeUnpackFile dirs stackage
|
||||
|
||||
defaultHooDest :: Dirs -> Stackage -> FilePath
|
||||
defaultHooDest dirs stackage = dirHoogleFp dirs (stackageIdent stackage)
|
||||
["default-" ++ VERSION_hoogle ++ ".hoo"]
|
||||
|
||||
forkForever :: IO () -> IO ()
|
||||
forkForever inner = mask $ \restore ->
|
||||
void $ forkIO $ forever $ handleAny print $ restore $ forever inner
|
||||
|
||||
unpackWorker
|
||||
:: Dirs
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> BlobStore StoreKey
|
||||
-> TVar Text
|
||||
-> TChan (Bool, Entity Stackage, TVar UnpackStatus)
|
||||
-> IO ()
|
||||
unpackWorker dirs runDB store messageVar workChan = do
|
||||
let say' = atomically . writeTVar messageVar
|
||||
say' "Running the compressor"
|
||||
let shouldStop = fmap not $ atomically $ isEmptyTChan workChan
|
||||
handleAny print $ runCompressor shouldStop say' dirs
|
||||
|
||||
say' "Waiting for new work item"
|
||||
(forceUnpack, ent, resVar) <- atomically $ readTChan workChan
|
||||
shouldUnpack <-
|
||||
if forceUnpack
|
||||
then return True
|
||||
else not <$> isUnpacked dirs (entityVal ent)
|
||||
|
||||
let say msg = atomically $ writeTVar messageVar $ concat
|
||||
[ toPathPiece (stackageSlug $ entityVal ent)
|
||||
, ": "
|
||||
, msg
|
||||
]
|
||||
|
||||
when shouldUnpack $ do
|
||||
say "Beginning of processing"
|
||||
|
||||
-- As soon as the raw unpack is complete, start serving docs
|
||||
let onRawComplete = atomically $ writeTVar resVar USReady
|
||||
|
||||
eres <- tryAny $ unpacker dirs runDB store say onRawComplete ent
|
||||
atomically $ writeTVar resVar $ case eres of
|
||||
Left e -> USFailed $ tshow e
|
||||
Right () -> USReady
|
||||
|
||||
removeTreeIfExists :: FilePath -> IO ()
|
||||
removeTreeIfExists fp = whenM (isDirectory fp) (removeTree fp)
|
||||
|
||||
unpackRawDocsTo
|
||||
:: BlobStore StoreKey
|
||||
-> PackageSetIdent
|
||||
-> (Text -> IO ())
|
||||
-> FilePath
|
||||
-> IO ()
|
||||
unpackRawDocsTo store ident say destdir =
|
||||
withSystemTempFile "haddock-bundle.tar.xz" $ \tempfp temph -> do
|
||||
say "Downloading raw doc tarball"
|
||||
withAcquire (storeRead' store (HaddockBundle ident)) $ \msrc ->
|
||||
case msrc of
|
||||
Nothing -> error "No haddocks exist for that snapshot"
|
||||
Just src -> src $$ sinkHandle temph
|
||||
hClose temph
|
||||
|
||||
createTree destdir
|
||||
say "Unpacking tarball"
|
||||
(Nothing, Nothing, Nothing, ph) <- createProcess
|
||||
(proc "tar" ["xf", tempfp])
|
||||
{ cwd = Just $ fpToString destdir
|
||||
}
|
||||
ec <- waitForProcess ph
|
||||
if ec == ExitSuccess then return () else throwM ec
|
||||
|
||||
|
||||
unpacker
|
||||
:: Dirs
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> BlobStore StoreKey
|
||||
-> (Text -> IO ())
|
||||
-> IO () -- ^ onRawComplete
|
||||
-> Entity Stackage
|
||||
-> IO ()
|
||||
unpacker dirs runDB store say onRawComplete (Entity sid stackage@Stackage {..}) = do
|
||||
say "Removing old directories, if they exist"
|
||||
removeTreeIfExists $ dirRawIdent dirs stackageIdent
|
||||
removeTreeIfExists $ dirGzIdent dirs stackageIdent
|
||||
removeTreeIfExists $ dirHoogleIdent dirs stackageIdent
|
||||
|
||||
let destdir = dirRawIdent dirs stackageIdent
|
||||
unpackRawDocsTo store stackageIdent say destdir
|
||||
onRawComplete
|
||||
|
||||
createTree $ dirHoogleIdent dirs stackageIdent
|
||||
|
||||
-- Determine which packages have documentation and update the
|
||||
-- database appropriately
|
||||
say "Updating database for available documentation"
|
||||
runResourceT $ runDB $ do
|
||||
updateWhere
|
||||
[PackageStackage ==. sid]
|
||||
[PackageHasHaddocks =. False]
|
||||
sourceDirectory destdir $$ mapM_C (\fp -> do
|
||||
let mnv = nameAndVersionFromPath fp
|
||||
forM_ mnv $ \(name, version) -> updateWhere
|
||||
[ PackageStackage ==. sid
|
||||
, PackageName' ==. PackageName name
|
||||
, PackageVersion ==. Version version
|
||||
]
|
||||
[PackageHasHaddocks =. True]
|
||||
)
|
||||
|
||||
say "Unpack complete"
|
||||
let completeFP = completeUnpackFile dirs stackage
|
||||
liftIO $ do
|
||||
createTree $ F.parent completeFP
|
||||
writeFile completeFP ("Complete" :: ByteString)
|
||||
|
||||
completeUnpackFile :: Dirs -> Stackage -> FilePath
|
||||
completeUnpackFile dirs stackage =
|
||||
dirGzIdent dirs (stackageIdent stackage) </> "unpack-complete"
|
||||
|
||||
-- | Get the path to the Hoogle database, downloading from persistent storage
|
||||
-- if necessary. This function will /not/ generate a new database, and
|
||||
-- therefore is safe to run on a live web server.
|
||||
getHoogleDB :: Dirs
|
||||
-> Stackage
|
||||
-> Handler (Maybe FilePath)
|
||||
getHoogleDB dirs stackage = do
|
||||
exists <- liftIO $ isFile fp
|
||||
if exists
|
||||
then return $ Just fp
|
||||
else do
|
||||
msrc <- storeRead key
|
||||
case msrc of
|
||||
Nothing -> return Nothing
|
||||
Just src -> do
|
||||
liftIO $ createTree $ F.parent fp
|
||||
let tmpfp = fp <.> "tmp" -- FIXME add something random
|
||||
src $$ ungzip =$ sinkFile tmpfp
|
||||
liftIO $ rename tmpfp fp
|
||||
return $ Just fp
|
||||
where
|
||||
fp = defaultHooDest dirs stackage
|
||||
key = HoogleDB (stackageIdent stackage) $ HoogleVersion VERSION_hoogle
|
||||
|
||||
-- | Make sure that the last 5 LTS and last 5 Nightly releases all have Hoogle
|
||||
-- databases available.
|
||||
createHoogleDatabases
|
||||
:: BlobStore StoreKey
|
||||
-> (forall a m. (MonadIO m, MonadBaseControl IO m) => SqlPersistT m a -> m a)
|
||||
-> (Text -> IO ())
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> IO ()
|
||||
createHoogleDatabases store runDB say urlRender = do
|
||||
stackages <- runDB $ do
|
||||
sids <- (++)
|
||||
<$> fmap (map $ ltsStackage . entityVal)
|
||||
(selectList [] [Desc LtsMajor, Desc LtsMinor, LimitTo 5])
|
||||
<*> fmap (map $ nightlyStackage . entityVal)
|
||||
(selectList [] [Desc NightlyDay, LimitTo 5])
|
||||
catMaybes <$> mapM get sids
|
||||
forM_ stackages $ \stackage -> do
|
||||
let say' x = say $ concat
|
||||
[ toPathPiece $ stackageSlug stackage
|
||||
, ": "
|
||||
, x
|
||||
]
|
||||
handleAny (say' . tshow) $ makeHoogle store say' urlRender stackage
|
||||
|
||||
-- | Either download the Hoogle database from persistent storage, or create it.
|
||||
makeHoogle
|
||||
:: BlobStore StoreKey
|
||||
-> (Text -> IO ())
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> Stackage
|
||||
-> IO ()
|
||||
makeHoogle store say urlRender stackage = do
|
||||
say "Making hoogle database"
|
||||
exists <- storeExists' store hoogleKey
|
||||
if exists
|
||||
then say "Hoogle database already exists, skipping"
|
||||
else do
|
||||
say "Generating Hoogle database"
|
||||
generate
|
||||
where
|
||||
ident = stackageIdent stackage
|
||||
hoogleKey = HoogleDB ident $ HoogleVersion VERSION_hoogle
|
||||
|
||||
generate = withSystemTempDirectory "hoogle-database-gen" $ \hoogletemp' -> do
|
||||
let hoogletemp = fpFromString hoogletemp'
|
||||
rawdocs = hoogletemp </> "rawdocs"
|
||||
|
||||
unpackRawDocsTo store ident say rawdocs
|
||||
|
||||
say "Copying Hoogle text files to temp directory"
|
||||
runResourceT $ copyHoogleTextFiles say rawdocs hoogletemp
|
||||
say "Creating Hoogle database"
|
||||
withSystemTempFile "default.hoo" $ \dstFP' dstH -> do
|
||||
let dstFP = fpFromString dstFP'
|
||||
hClose dstH
|
||||
createHoogleDb say dstFP stackage hoogletemp urlRender
|
||||
say "Uploading database to persistent storage"
|
||||
withAcquire (storeWrite' store hoogleKey) $ \sink ->
|
||||
runResourceT $ sourceFile dstFP $$ gzip =$ sink
|
||||
|
||||
runCompressor :: IO Bool -- ^ should stop early?
|
||||
-> (Text -> IO ()) -> Dirs -> IO ()
|
||||
runCompressor shouldStop say dirs =
|
||||
handle (\EarlyStop -> return ()) $ runResourceT $ goDir $ dirRawRoot dirs
|
||||
where
|
||||
goDir dir = do
|
||||
liftIO $ whenM shouldStop $ do
|
||||
say "Stopping compressor early"
|
||||
throwIO EarlyStop
|
||||
liftIO $ say $ "Compressing directory: " ++ fpToText dir
|
||||
sourceDirectory dir $$ mapM_C goFP
|
||||
liftIO $ void $ tryIO $ removeDirectory dir
|
||||
|
||||
goFP fp = do
|
||||
e <- liftIO $ isFile fp
|
||||
if e
|
||||
then liftIO $ do
|
||||
liftIO $ say $ "Compressing file: " ++ fpToText fp
|
||||
handle (print . asSomeException)
|
||||
$ gzipHash dirs suffix
|
||||
else goDir fp
|
||||
where
|
||||
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
||||
|
||||
data EarlyStop = EarlyStop
|
||||
deriving (Show, Typeable)
|
||||
instance Exception EarlyStop
|
||||
|
||||
-- Procedure is to:
|
||||
--
|
||||
-- * Gzip the src file to a temp file, and get a hash of the gzipped contents
|
||||
-- * If that hash doesn't exist in the cache, move the new file to the cache
|
||||
-- * Create a hard link from dst to the file in the cache
|
||||
-- * Delete src
|
||||
gzipHash :: Dirs
|
||||
-> FilePath -- ^ suffix
|
||||
-> IO ()
|
||||
gzipHash dirs suffix = do
|
||||
withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do
|
||||
digest <- withBinaryFile (fpToString src) ReadMode $ \inh ->
|
||||
sourceHandle inh
|
||||
$= gzip
|
||||
$$ (getZipSink $
|
||||
ZipSink (sinkHandle temph) *>
|
||||
ZipSink sinkHash)
|
||||
hClose temph
|
||||
let fpcache = dirCacheFp dirs digest
|
||||
unlessM (isFile fpcache) $ do
|
||||
createTree $ F.parent fpcache
|
||||
rename (fpFromString tempfp) fpcache
|
||||
createTree $ F.parent dst
|
||||
createLink (fpToString fpcache) (fpToString dst)
|
||||
removeFile src
|
||||
where
|
||||
src = dirRawRoot dirs </> suffix
|
||||
dst = dirGzRoot dirs </> suffix
|
||||
|
||||
dirCacheFp :: Dirs -> Digest SHA1 -> FilePath
|
||||
dirCacheFp dirs digest =
|
||||
dirCacheRoot dirs </> fpFromText x </> fpFromText y <.> "gz"
|
||||
where
|
||||
name = decodeUtf8 $ B16.encode $ toBytes digest
|
||||
(x, y) = splitAt 2 name
|
||||
|
||||
copyHoogleTextFiles :: (Text -> IO ()) -- ^ log
|
||||
-> FilePath -- ^ raw unpacked Haddock files
|
||||
-> FilePath -- ^ temporary work directory
|
||||
-> ResourceT IO ()
|
||||
copyHoogleTextFiles say raw tmp = do
|
||||
let tmptext = tmp </> "text"
|
||||
liftIO $ createTree tmptext
|
||||
sourceDirectory raw $$ mapM_C (\fp ->
|
||||
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> do
|
||||
let src = fp </> fpFromText name <.> "txt"
|
||||
dst = tmptext </> fpFromText (name ++ "-" ++ version)
|
||||
exists <- liftIO $ isFile src
|
||||
if exists
|
||||
then sourceFile src $$ (sinkFile dst :: Sink ByteString (ResourceT IO) ())
|
||||
else liftIO $ appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = name
|
||||
, packageVersion = version
|
||||
, errors = ["No textual Hoogle DB (use \"cabal haddock --hoogle\")"]
|
||||
}
|
||||
)
|
||||
|
||||
createHoogleDb :: (Text -> IO ())
|
||||
-> FilePath -- ^ default.hoo output location
|
||||
-> Stackage
|
||||
-> FilePath -- ^ temp directory
|
||||
-> (Route App -> [(Text, Text)] -> Text)
|
||||
-> IO ()
|
||||
createHoogleDb say dstDefaultHoo stackage tmpdir urlRender = do
|
||||
let tmpbin = tmpdir </> "binary"
|
||||
createTree tmpbin
|
||||
eres <- tryAny $ runResourceT $ do
|
||||
-- Create hoogle binary databases for each package.
|
||||
sourceDirectory (tmpdir </> "text") $$ mapM_C
|
||||
( \fp -> do
|
||||
(releaseKey, srcH) <- allocate (openBinaryFile (fpToString fp) ReadMode) hClose
|
||||
forM_ (nameAndVersionFromPath fp) $ \(name, version) -> liftIO $ do
|
||||
say $ concat
|
||||
[ "Creating Hoogle database for: "
|
||||
, name
|
||||
, "-"
|
||||
, version
|
||||
]
|
||||
src <- unpack . decodeUtf8 . asLByteString <$> hGetContents srcH
|
||||
let -- Preprocess the haddock-generated manifest file.
|
||||
src' = unlines $ haddockHacks (Just (unpack docsUrl)) $ lines src
|
||||
docsUrl = urlRender (HaddockR (stackageSlug stackage) urlPieces) []
|
||||
urlPieces = [name <> "-" <> version, "index.html"]
|
||||
-- Compute the filepath of the resulting hoogle
|
||||
-- database.
|
||||
out = fpToString $ tmpbin </> fpFromText base
|
||||
base = name <> "-" <> version <> ".hoo"
|
||||
errs <- Hoogle.createDatabase "" Hoogle.Haskell [] src' out
|
||||
when (not $ null errs) $ do
|
||||
-- TODO: remove this printing once errors are yielded
|
||||
-- to the user.
|
||||
putStrLn $ concat
|
||||
[ base
|
||||
, " Hoogle errors: "
|
||||
, tshow errs
|
||||
]
|
||||
appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = name
|
||||
, packageVersion = version
|
||||
, errors = map show errs
|
||||
}
|
||||
release releaseKey
|
||||
)
|
||||
-- Merge the individual binary databases into one big database.
|
||||
liftIO $ do
|
||||
say "Merging all Hoogle databases"
|
||||
dbs <- listDirectory tmpbin
|
||||
Hoogle.mergeDatabase
|
||||
(map fpToString dbs)
|
||||
(fpToString dstDefaultHoo)
|
||||
case eres of
|
||||
Right () -> return ()
|
||||
Left err -> liftIO $ appendHoogleErrors say $ HoogleErrors
|
||||
{ packageName = "Exception thrown while building hoogle DB"
|
||||
, packageVersion = ""
|
||||
, errors = [show err]
|
||||
}
|
||||
|
||||
data HoogleErrors = HoogleErrors
|
||||
{ packageName :: Text
|
||||
, packageVersion :: Text
|
||||
, errors :: [String]
|
||||
} deriving (Generic)
|
||||
|
||||
instance ToJSON HoogleErrors where
|
||||
instance FromJSON HoogleErrors where
|
||||
|
||||
-- Appends hoogle errors to a log file. By encoding within a single
|
||||
-- list, the resulting file can be decoded as [HoogleErrors].
|
||||
appendHoogleErrors :: (Text -> IO ()) -> HoogleErrors -> IO ()
|
||||
appendHoogleErrors say errs = say $ decodeUtf8 $ Y.encode [errs]
|
||||
|
||||
nameAndVersionFromPath :: FilePath -> Maybe (Text, Text)
|
||||
nameAndVersionFromPath fp =
|
||||
(\name -> (name, version)) <$> stripSuffix "-" name'
|
||||
where
|
||||
(name', version) = T.breakOnEnd "-" $ fpToText $ filename fp
|
||||
|
||||
---------------------------------------------------------------------
|
||||
-- HADDOCK HACKS
|
||||
-- (Copied from hoogle-4.2.36/src/Recipe/Haddock.hs)
|
||||
-- Modifications:
|
||||
-- 1) Some name qualification
|
||||
-- 2) Explicit type sig due to polymorphic elem
|
||||
-- 3) Fixed an unused binding warning
|
||||
|
||||
-- Eliminate @version
|
||||
-- Change :*: to (:*:), Haddock bug
|
||||
-- Change !!Int to !Int, Haddock bug
|
||||
-- Change instance [overlap ok] to instance, Haddock bug
|
||||
-- Change instance [incoherent] to instance, Haddock bug
|
||||
-- Change instance [safe] to instance, Haddock bug
|
||||
-- Change !Int to Int, HSE bug
|
||||
-- Drop {-# UNPACK #-}, Haddock bug
|
||||
-- Drop everything after where, Haddock bug
|
||||
|
||||
haddockHacks :: Maybe Hoogle.URL -> [String] -> [String]
|
||||
haddockHacks loc src = maybe id haddockPackageUrl loc (translate src)
|
||||
where
|
||||
translate :: [String] -> [String]
|
||||
translate = map (unwords . g . map f . words) . filter (not . isPrefixOf "@version ")
|
||||
|
||||
f "::" = "::"
|
||||
f (':':xs) = "(:" ++ xs ++ ")"
|
||||
f ('!':'!':x:xs) | isAlpha x = xs
|
||||
f ('!':x:xs) | isAlpha x || x `elem` ("[(" :: String) = x:xs
|
||||
f x | x `elem` ["[overlap","ok]","[incoherent]","[safe]"] = ""
|
||||
f x | x `elem` ["{-#","UNPACK","#-}"] = ""
|
||||
f x = x
|
||||
|
||||
g ("where":_) = []
|
||||
g (x:xs) = x : g xs
|
||||
g [] = []
|
||||
|
||||
haddockPackageUrl :: Hoogle.URL -> [String] -> [String]
|
||||
haddockPackageUrl x = concatMap f
|
||||
where f y | "@package " `isPrefixOf` y = ["@url " ++ x, y]
|
||||
| otherwise = [y]
|
||||
@ -1,31 +0,0 @@
|
||||
module Data.WebsiteContent
|
||||
( WebsiteContent (..)
|
||||
, loadWebsiteContent
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Text.Markdown (markdown, msXssProtect, msAddHeadingId)
|
||||
|
||||
data WebsiteContent = WebsiteContent
|
||||
{ wcHomepage :: !Html
|
||||
, wcAuthors :: !Html
|
||||
, wcInstall :: !Html
|
||||
, wcOlderReleases :: !Html
|
||||
}
|
||||
|
||||
loadWebsiteContent :: FilePath -> IO WebsiteContent
|
||||
loadWebsiteContent dir = do
|
||||
wcHomepage <- readHtml "homepage.html"
|
||||
wcAuthors <- readHtml "authors.html"
|
||||
wcInstall <- readMarkdown "install.md"
|
||||
wcOlderReleases <- readHtml "older-releases.html" `catchIO`
|
||||
\_ -> readMarkdown "older-releases.md"
|
||||
return WebsiteContent {..}
|
||||
where
|
||||
readHtml fp = fmap (preEscapedToMarkup . decodeUtf8 :: ByteString -> Html)
|
||||
$ readFile $ dir </> fp
|
||||
readMarkdown fp = fmap (markdown def
|
||||
{ msXssProtect = False
|
||||
, msAddHeadingId = True
|
||||
})
|
||||
$ readFile $ dir </> fp
|
||||
52
DevelMain.hs
52
DevelMain.hs
@ -1,52 +0,0 @@
|
||||
{-# LANGUAGE ImplicitPrelude #-}
|
||||
|
||||
-- | Devel web server.
|
||||
--
|
||||
-- > :l DevelMain
|
||||
-- > DevelMain.update
|
||||
--
|
||||
-- To start/restart the server.
|
||||
|
||||
module DevelMain where
|
||||
|
||||
import Application (getApplicationDev)
|
||||
|
||||
import Control.Concurrent
|
||||
import Data.IORef
|
||||
import Foreign.Store
|
||||
import Network.Wai.Handler.Warp
|
||||
import Yesod
|
||||
import Yesod.Static
|
||||
|
||||
-- | Start the web server.
|
||||
main :: IO (Store (IORef Application))
|
||||
main =
|
||||
do s <- static "static"
|
||||
c <- newChan
|
||||
(port,app) <- getApplicationDev True
|
||||
ref <- newIORef app
|
||||
tid <- forkIO
|
||||
(runSettings
|
||||
(setPort port defaultSettings)
|
||||
(\req cont ->
|
||||
do handler <- readIORef ref
|
||||
handler req cont))
|
||||
_ <- newStore tid
|
||||
ref' <- newStore ref
|
||||
_ <- newStore c
|
||||
return ref'
|
||||
|
||||
-- | Update the server, start it if not running.
|
||||
update :: IO (Store (IORef Application))
|
||||
update =
|
||||
do m <- lookupStore 1
|
||||
case m of
|
||||
Nothing -> main
|
||||
Just store ->
|
||||
do ref <- readStore store
|
||||
c <- readStore (Store 2)
|
||||
writeChan c ()
|
||||
s <- static "static"
|
||||
(port,app) <- getApplicationDev True
|
||||
writeIORef ref app
|
||||
return store
|
||||
47
Echo.hs
47
Echo.hs
@ -1,47 +0,0 @@
|
||||
-- | A quick and dirty way to echo a printf-style debugging message to
|
||||
-- a file from anywhere.
|
||||
--
|
||||
-- To use from Emacs, run `tail -f /tmp/echo` with M-x grep. You can
|
||||
-- rename the buffer to *echo* or something. The grep-mode buffer has
|
||||
-- handy up/down keybindings that will open the file location for you
|
||||
-- and it supports results coming in live. So it's a perfect way to
|
||||
-- browse printf-style debugging logs.
|
||||
|
||||
module Echo where
|
||||
|
||||
import Control.Concurrent.MVar
|
||||
import Control.Monad.Trans (MonadIO(..))
|
||||
import System.Locale
|
||||
import Data.Time
|
||||
import Language.Haskell.TH
|
||||
import Language.Haskell.TH.Lift
|
||||
import Prelude
|
||||
import System.IO.Unsafe
|
||||
|
||||
-- | God forgive me for my sins.
|
||||
echoV :: MVar ()
|
||||
echoV = unsafePerformIO (newMVar ())
|
||||
{-# NOINLINE echoV #-}
|
||||
|
||||
-- | Echo something.
|
||||
echo :: Q Exp
|
||||
echo = [|write $(location >>= liftLoc) |]
|
||||
|
||||
-- | Grab the filename and line/col.
|
||||
liftLoc :: Loc -> Q Exp
|
||||
liftLoc (Loc filename _pkg _mod (line, _) _) =
|
||||
[|($(lift filename)
|
||||
,$(lift line))|]
|
||||
|
||||
-- | Thread-safely (probably) write to the log.
|
||||
write :: (MonadIO m) => (FilePath,Int) -> String -> m ()
|
||||
write (file,line) it =
|
||||
liftIO (withMVar echoV (const (loggit)))
|
||||
where loggit =
|
||||
do now <- getCurrentTime
|
||||
appendFile "/tmp/echo" (loc ++ ": " ++ fmt now ++ " " ++ it ++ "\n")
|
||||
loc = file ++ ":" ++ show line
|
||||
fmt = formatTime defaultTimeLocale "%T%Q"
|
||||
|
||||
clear :: IO ()
|
||||
clear = writeFile "/tmp/echo" ""
|
||||
284
Foundation.hs
284
Foundation.hs
@ -1,284 +0,0 @@
|
||||
module Foundation where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Data.BlobStore
|
||||
import Data.Slug (safeMakeSlug, HasGenIO (getGenIO), randomSlug, Slug, SnapSlug)
|
||||
import Data.WebsiteContent
|
||||
import qualified Database.Persist
|
||||
import Database.Persist.Sql (PersistentSqlException (Couldn'tGetSQLConnection))
|
||||
import Model
|
||||
import qualified Settings
|
||||
import Settings (widgetFile, Extra (..), GoogleAuth (..))
|
||||
import Settings.Development (development)
|
||||
import Settings.StaticFiles
|
||||
import qualified System.Random.MWC as MWC
|
||||
import Text.Blaze
|
||||
import Text.Hamlet (hamletFile)
|
||||
import Text.Jasmine (minifym)
|
||||
import Types
|
||||
import Yesod.Auth
|
||||
import Yesod.Auth.BrowserId
|
||||
import Yesod.Auth.GoogleEmail2
|
||||
import Yesod.Core.Types (Logger, GWData)
|
||||
import Yesod.Default.Config
|
||||
import Yesod.Default.Util (addStaticContentExternal)
|
||||
import Yesod.GitRepo
|
||||
|
||||
-- | The site argument for your application. This can be a good place to
|
||||
-- keep settings and values requiring initialization before your application
|
||||
-- starts running, such as database connections. Every handler will have
|
||||
-- access to the data present here.
|
||||
data App = App
|
||||
{ settings :: AppConfig DefaultEnv Extra
|
||||
, getStatic :: Static -- ^ Settings for static file serving.
|
||||
, connPool :: Database.Persist.PersistConfigPool Settings.PersistConf -- ^ Database connection pool.
|
||||
, httpManager :: Manager
|
||||
, persistConfig :: Settings.PersistConf
|
||||
, appLogger :: Logger
|
||||
, genIO :: MWC.GenIO
|
||||
, blobStore :: BlobStore StoreKey
|
||||
, haddockRootDir :: FilePath
|
||||
, appDocUnpacker :: DocUnpacker
|
||||
-- ^ We have a dedicated thread so that (1) we don't try to unpack too many
|
||||
-- things at once, (2) we never unpack the same thing twice at the same
|
||||
-- time, and (3) so that even if the client connection dies, we finish the
|
||||
-- unpack job.
|
||||
, widgetCache :: IORef (HashMap Text (UTCTime, GWData (Route App)))
|
||||
, websiteContent :: GitRepo WebsiteContent
|
||||
}
|
||||
|
||||
data DocUnpacker = DocUnpacker
|
||||
{ duRequestDocs :: Entity Stackage -> IO UnpackStatus
|
||||
, duGetStatus :: IO Text
|
||||
, duForceReload :: Entity Stackage -> IO ()
|
||||
}
|
||||
|
||||
data Progress = ProgressWorking !Text
|
||||
| ProgressDone !Text !(Route App)
|
||||
|
||||
instance HasBlobStore App StoreKey where
|
||||
getBlobStore = blobStore
|
||||
|
||||
instance HasGenIO App where
|
||||
getGenIO = genIO
|
||||
|
||||
instance HasHttpManager App where
|
||||
getHttpManager = httpManager
|
||||
|
||||
instance HasHackageRoot App where
|
||||
getHackageRoot = hackageRoot . appExtra . settings
|
||||
|
||||
-- This is where we define all of the routes in our application. For a full
|
||||
-- explanation of the syntax, please see:
|
||||
-- http://www.yesodweb.com/book/routing-and-handlers
|
||||
--
|
||||
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
|
||||
-- generates the rest of the code. Please see the linked documentation for an
|
||||
-- explanation for this split.
|
||||
mkYesodData "App" $(parseRoutesFile "config/routes")
|
||||
|
||||
deriving instance Show Progress
|
||||
|
||||
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
|
||||
|
||||
-- Please see the documentation for the Yesod typeclass. There are a number
|
||||
-- of settings which can be configured by overriding methods here.
|
||||
instance Yesod App where
|
||||
approot = ApprootMaster $ appRoot . settings
|
||||
|
||||
-- Store session data on the client in encrypted cookies,
|
||||
-- default session idle timeout is 120 minutes
|
||||
makeSessionBackend _ = fmap Just $ defaultClientSessionBackend
|
||||
(120 * 60) -- 120 minutes
|
||||
"config/client_session_key.aes"
|
||||
|
||||
defaultLayout widget = do
|
||||
mmsg <- getMessage
|
||||
muser <- catch maybeAuth $ \e -> case e of
|
||||
Couldn'tGetSQLConnection -> return Nothing
|
||||
_ -> throwM e
|
||||
|
||||
-- We break up the default layout into two components:
|
||||
-- default-layout is the contents of the body tag, and
|
||||
-- default-layout-wrapper is the entire page. Since the final
|
||||
-- value passed to hamletToRepHtml cannot be a widget, this allows
|
||||
-- you to use normal widget features in default-layout.
|
||||
|
||||
cur <- getCurrentRoute
|
||||
pc <- widgetToPageContent $ do
|
||||
$(combineStylesheets 'StaticR
|
||||
[ css_normalize_css
|
||||
, css_bootstrap_css
|
||||
, css_bootstrap_responsive_css
|
||||
])
|
||||
$((combineScripts 'StaticR
|
||||
[ js_jquery_js
|
||||
, js_bootstrap_js
|
||||
]))
|
||||
$(widgetFile "default-layout")
|
||||
|
||||
mcurr <- getCurrentRoute
|
||||
let notHome = mcurr /= Just HomeR
|
||||
|
||||
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
|
||||
|
||||
-- This is done to provide an optimization for serving static files from
|
||||
-- a separate domain. Please see the staticRoot setting in Settings.hs
|
||||
urlRenderOverride y (StaticR s) =
|
||||
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
|
||||
urlRenderOverride _ _ = Nothing
|
||||
|
||||
-- The page to be redirected to when authentication is required.
|
||||
authRoute _ = Just $ AuthR LoginR
|
||||
|
||||
{- Temporarily disable to allow for horizontal scaling
|
||||
-- This function creates static content files in the static folder
|
||||
-- and names them based on a hash of their content. This allows
|
||||
-- expiration dates to be set far in the future without worry of
|
||||
-- users receiving stale content.
|
||||
addStaticContent =
|
||||
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
|
||||
where
|
||||
-- Generate a unique filename based on the content itself
|
||||
genFileName lbs
|
||||
| development = "autogen-" ++ base64md5 lbs
|
||||
| otherwise = base64md5 lbs
|
||||
-}
|
||||
|
||||
-- Place Javascript at bottom of the body tag so the rest of the page loads first
|
||||
jsLoader _ = BottomOfBody
|
||||
|
||||
-- What messages should be logged. The following includes all messages when
|
||||
-- in development, and warnings and errors in production.
|
||||
shouldLog _ "CLEANUP" _ = False
|
||||
shouldLog _ source level =
|
||||
development || level == LevelWarn || level == LevelError || source == "CLEANUP"
|
||||
|
||||
makeLogger = return . appLogger
|
||||
|
||||
maximumContentLength _ (Just UploadStackageR) = Just 50000000
|
||||
maximumContentLength _ (Just UploadHaddockR{}) = Just 100000000
|
||||
maximumContentLength _ _ = Just 2000000
|
||||
|
||||
instance ToMarkup (Route App) where
|
||||
toMarkup c =
|
||||
case c of
|
||||
AllSnapshotsR{} -> "Snapshots"
|
||||
UploadStackageR{} -> "Upload"
|
||||
AuthR (LoginR{}) -> "Login"
|
||||
_ -> ""
|
||||
|
||||
-- How to run database actions.
|
||||
instance YesodPersist App where
|
||||
type YesodPersistBackend App = SqlBackend
|
||||
runDB = defaultRunDB persistConfig connPool
|
||||
instance YesodPersistRunner App where
|
||||
getDBRunner = defaultGetDBRunner connPool
|
||||
|
||||
instance YesodAuth App where
|
||||
type AuthId App = UserId
|
||||
|
||||
-- Where to send a user after successful login
|
||||
loginDest _ = HomeR
|
||||
-- Where to send a user after logout
|
||||
logoutDest _ = HomeR
|
||||
|
||||
redirectToReferer _ = True
|
||||
|
||||
getAuthId creds = do
|
||||
muid <- maybeAuthId
|
||||
join $ runDB $ case muid of
|
||||
Nothing -> do
|
||||
x <- getBy $ UniqueEmail $ credsIdent creds
|
||||
case x of
|
||||
Just (Entity _ email) -> return $ return $ Just $ emailUser email
|
||||
Nothing -> do
|
||||
handle' <- getHandle (0 :: Int)
|
||||
token <- getToken
|
||||
userid <- insert User
|
||||
{ userHandle = handle'
|
||||
, userDisplay = credsIdent creds
|
||||
, userToken = token
|
||||
}
|
||||
void $ insert Email
|
||||
{ emailEmail = credsIdent creds
|
||||
, emailUser = userid
|
||||
}
|
||||
return $ return $ Just userid
|
||||
Just uid -> do
|
||||
memail <- getBy $ UniqueEmail $ credsIdent creds
|
||||
case memail of
|
||||
Nothing -> do
|
||||
void $ insert Email
|
||||
{ emailEmail = credsIdent creds
|
||||
, emailUser = uid
|
||||
}
|
||||
return $ do
|
||||
setMessage $ toHtml $ concat
|
||||
[ "Email address "
|
||||
, credsIdent creds
|
||||
, " added to your account."
|
||||
]
|
||||
redirect ProfileR
|
||||
Just (Entity _ email)
|
||||
| emailUser email == uid -> return $ do
|
||||
setMessage $ toHtml $ concat
|
||||
[ "The email address "
|
||||
, credsIdent creds
|
||||
, " is already part of your account"
|
||||
]
|
||||
redirect ProfileR
|
||||
| otherwise -> invalidArgs $ return $ concat
|
||||
[ "The email address "
|
||||
, credsIdent creds
|
||||
, " is already associated with a different account."
|
||||
]
|
||||
where
|
||||
handleBase = takeWhile (/= '@') (credsIdent creds)
|
||||
getHandle cnt | cnt > 50 = error "Could not get a unique slug"
|
||||
getHandle cnt = do
|
||||
slug <- lift $ safeMakeSlug handleBase (cnt > 0)
|
||||
muser <- getBy $ UniqueHandle slug
|
||||
case muser of
|
||||
Nothing -> return slug
|
||||
Just _ -> getHandle (cnt + 1)
|
||||
|
||||
-- You can add other plugins like BrowserID, email or OAuth here
|
||||
authPlugins app =
|
||||
authBrowserId def : google
|
||||
where
|
||||
google =
|
||||
case googleAuth $ appExtra $ settings app of
|
||||
Nothing -> []
|
||||
Just GoogleAuth {..} -> [authGoogleEmail gaClientId gaClientSecret]
|
||||
|
||||
authHttpManager = httpManager
|
||||
instance YesodAuthPersist App
|
||||
|
||||
getToken :: YesodDB App Slug
|
||||
getToken =
|
||||
go (0 :: Int)
|
||||
where
|
||||
go cnt | cnt > 50 = error "Could not get a unique token"
|
||||
go cnt = do
|
||||
slug <- lift $ randomSlug 25
|
||||
muser <- getBy $ UniqueToken slug
|
||||
case muser of
|
||||
Nothing -> return slug
|
||||
Just _ -> go (cnt + 1)
|
||||
|
||||
-- This instance is required to use forms. You can modify renderMessage to
|
||||
-- achieve customized and internationalized form validation messages.
|
||||
instance RenderMessage App FormMessage where
|
||||
renderMessage _ _ = defaultFormMessage
|
||||
|
||||
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
|
||||
getExtra :: Handler Extra
|
||||
getExtra = fmap (appExtra . settings) getYesod
|
||||
|
||||
-- Note: previous versions of the scaffolding included a deliver function to
|
||||
-- send emails. Unfortunately, there are too many different options for us to
|
||||
-- give a reasonable default. Instead, the information is available on the
|
||||
-- wiki:
|
||||
--
|
||||
-- https://github.com/yesodweb/yesod/wiki/Sending-email
|
||||
@ -1,82 +0,0 @@
|
||||
module Handler.Alias
|
||||
( handleAliasR
|
||||
, getLtsR
|
||||
, getNightlyR
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Data.Slug (Slug)
|
||||
import Handler.StackageHome (getStackageHomeR, getStackageMetadataR, getStackageCabalConfigR, getSnapshotPackagesR, getDocsR)
|
||||
import Handler.StackageIndex (getStackageIndexR, getStackageBundleR)
|
||||
import Handler.StackageSdist (getStackageSdistR)
|
||||
import Handler.Hoogle (getHoogleR)
|
||||
|
||||
handleAliasR :: Slug -> Slug -> [Text] -> Handler ()
|
||||
handleAliasR user name pieces = do
|
||||
$logDebug $ tshow (user, name, pieces)
|
||||
Entity _ (Alias _ _ setid) <- runDB $ do
|
||||
Entity uid _ <- getBy404 $ UniqueHandle user
|
||||
getBy404 $ UniqueAlias uid name
|
||||
$logDebug $ "setid: " ++ tshow (setid, pieces)
|
||||
case parseRoute ("stackage" : toPathPiece setid : pieces, []) of
|
||||
Nothing -> notFound
|
||||
Just route -> redirect (route :: Route App)
|
||||
|
||||
getLtsR :: [Text] -> Handler ()
|
||||
getLtsR pieces0 =
|
||||
case pieces0 of
|
||||
[] -> go []
|
||||
piece:pieces'
|
||||
| Just (x, y) <- parseLtsPair piece -> goXY x y pieces'
|
||||
| Just x <- fromPathPiece piece -> goX x pieces'
|
||||
| otherwise -> go pieces0
|
||||
where
|
||||
go pieces = do
|
||||
mlts <- runDB $ selectFirst [] [Desc LtsMajor, Desc LtsMinor]
|
||||
case mlts of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ (Lts _ _ sid)) -> goSid sid pieces
|
||||
|
||||
goX x pieces = do
|
||||
mlts <- runDB $ selectFirst [LtsMajor ==. x] [Desc LtsMinor]
|
||||
case mlts of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ (Lts _ _ sid)) -> goSid sid pieces
|
||||
|
||||
goXY x y pieces = do
|
||||
Entity _ (Lts _ _ sid) <- runDB $ getBy404 $ UniqueLts x y
|
||||
goSid sid pieces
|
||||
|
||||
getNightlyR :: [Text] -> Handler ()
|
||||
getNightlyR pieces0 =
|
||||
case pieces0 of
|
||||
[] -> go []
|
||||
piece:pieces'
|
||||
| Just day <- fromPathPiece piece -> goDay day pieces'
|
||||
| otherwise -> go pieces0
|
||||
where
|
||||
go pieces = do
|
||||
mn <- runDB $ selectFirst [] [Desc NightlyDay]
|
||||
case mn of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ (Nightly _ _ sid)) -> goSid sid pieces
|
||||
goDay day pieces = do
|
||||
Entity _ (Nightly _ _ sid) <- runDB $ getBy404 $ UniqueNightly day
|
||||
goSid sid pieces
|
||||
|
||||
goSid :: StackageId -> [Text] -> Handler ()
|
||||
goSid sid pieces = do
|
||||
s <- runDB $ get404 sid
|
||||
case parseRoute ("snapshot" : toPathPiece (stackageSlug s) : pieces, []) of
|
||||
Just (SnapshotR slug sr) ->
|
||||
case sr of
|
||||
StackageHomeR -> getStackageHomeR slug >>= sendResponse
|
||||
StackageMetadataR -> getStackageMetadataR slug >>= sendResponse
|
||||
StackageCabalConfigR -> getStackageCabalConfigR slug >>= sendResponse
|
||||
StackageIndexR -> getStackageIndexR slug >>= sendResponse
|
||||
StackageBundleR -> getStackageBundleR slug >>= sendResponse
|
||||
StackageSdistR pnv -> getStackageSdistR slug pnv >>= sendResponse
|
||||
SnapshotPackagesR -> getSnapshotPackagesR slug >>= sendResponse
|
||||
DocsR -> getDocsR slug >>= sendResponse
|
||||
HoogleR -> getHoogleR slug >>= sendResponse
|
||||
_ -> notFound
|
||||
@ -1,23 +0,0 @@
|
||||
module Handler.Aliases where
|
||||
|
||||
import Import
|
||||
import Data.Text (strip)
|
||||
|
||||
putAliasesR :: Handler ()
|
||||
putAliasesR = do
|
||||
uid <- requireAuthId
|
||||
aliasesText <- runInputPost $ ireq textField "aliases"
|
||||
aliases <- mapM (parseAlias uid) $ lines aliasesText
|
||||
runDB $ do
|
||||
deleteWhere [AliasUser ==. uid]
|
||||
mapM_ insert_ aliases
|
||||
setMessage "Aliases updated"
|
||||
redirect ProfileR
|
||||
|
||||
parseAlias :: UserId -> Text -> Handler Alias
|
||||
parseAlias uid t = maybe (invalidArgs ["Invalid alias: " ++ t]) return $ do
|
||||
name <- fromPathPiece x
|
||||
setid <- fromPathPiece y
|
||||
return $ Alias uid name setid
|
||||
where
|
||||
(strip -> x, (strip . drop 1) -> y) = break (== ':') t
|
||||
@ -1,39 +0,0 @@
|
||||
module Handler.BannedTags where
|
||||
|
||||
import Data.Slug (unSlug, Slug)
|
||||
import Data.Tag
|
||||
import Import
|
||||
|
||||
checkSlugs :: Monad m => Textarea -> m (Either Text [Slug])
|
||||
checkSlugs (Textarea t) =
|
||||
return $ first tshow $ (mapM mkTag $ filter (not . null) $ lines $ filter (/= '\r') t)
|
||||
|
||||
fromSlugs :: [Slug] -> Textarea
|
||||
fromSlugs = Textarea . unlines . map unSlug
|
||||
|
||||
getBannedTagsR :: Handler Html
|
||||
getBannedTagsR = do
|
||||
Entity _ user <- requireAuth
|
||||
extra <- getExtra
|
||||
when (unSlug (userHandle user) `notMember` adminUsers extra)
|
||||
$ permissionDenied "You are not an administrator"
|
||||
curr <- fmap (map (bannedTagTag . entityVal))
|
||||
$ runDB $ selectList [] [Asc BannedTagTag]
|
||||
((res, widget), enctype) <- runFormPost $ renderDivs
|
||||
$ fmap (fromMaybe [])
|
||||
$ aopt
|
||||
(checkMMap checkSlugs fromSlugs textareaField)
|
||||
"Banned tags (one per line)" $ Just (Just curr)
|
||||
case res of
|
||||
FormSuccess tags -> do
|
||||
runDB $ do
|
||||
deleteWhere ([] :: [Filter BannedTag])
|
||||
insertMany_ $ map BannedTag tags
|
||||
setMessage "Tags updated"
|
||||
redirect BannedTagsR
|
||||
_ -> defaultLayout $ do
|
||||
setTitle "Banned Tags"
|
||||
$(widgetFile "banned-tags")
|
||||
|
||||
putBannedTagsR :: Handler Html
|
||||
putBannedTagsR = getBannedTagsR
|
||||
@ -1,29 +0,0 @@
|
||||
module Handler.BuildVersion where
|
||||
|
||||
import Import hiding (lift)
|
||||
import Language.Haskell.TH.Syntax
|
||||
import System.Process (rawSystem)
|
||||
import System.Exit
|
||||
|
||||
getBuildVersionR :: Handler Text
|
||||
getBuildVersionR = return $ pack $(do
|
||||
let headFile = ".git/HEAD"
|
||||
qAddDependentFile headFile
|
||||
ehead <- qRunIO $ tryIO $ readFile $ fpFromString headFile
|
||||
case decodeUtf8 <$> ehead of
|
||||
Left e -> lift $ ".git/HEAD not read: " ++ show e
|
||||
Right raw ->
|
||||
case takeWhile (/= '\n') <$> stripPrefix "ref: " raw of
|
||||
Nothing -> lift $ ".git/HEAD not in expected format: " ++ show raw
|
||||
Just fp' -> do
|
||||
let fp = ".git" </> fpFromText fp'
|
||||
qAddDependentFile $ fpToString fp
|
||||
bs <- qRunIO $ readFile fp
|
||||
isDirty <- qRunIO
|
||||
$ (/= ExitSuccess)
|
||||
<$> rawSystem "git" ["diff-files", "--quiet"]
|
||||
lift $ unpack $ unlines
|
||||
[ "Most recent commit: " ++ asText (decodeUtf8 bs)
|
||||
, "Working tree is " ++ (if isDirty then "dirty" else "clean")
|
||||
]
|
||||
)
|
||||
@ -1,14 +0,0 @@
|
||||
module Handler.CompressorStatus where
|
||||
|
||||
import Import
|
||||
|
||||
getCompressorStatusR :: Handler Html
|
||||
getCompressorStatusR = do
|
||||
status <- getYesod >>= liftIO . duGetStatus . appDocUnpacker
|
||||
defaultLayout $ do
|
||||
setTitle "Compressor thread status"
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<h1>Compressor thread status
|
||||
<p>#{status}
|
||||
|]
|
||||
@ -1,14 +0,0 @@
|
||||
module Handler.Email where
|
||||
|
||||
import Import
|
||||
import Database.Persist.Sql (deleteWhereCount)
|
||||
|
||||
deleteEmailR :: EmailId -> Handler ()
|
||||
deleteEmailR eid = do
|
||||
Entity uid _ <- requireAuth
|
||||
cnt <- runDB $ deleteWhereCount [EmailUser ==. uid, EmailId ==. eid]
|
||||
setMessage $
|
||||
if cnt > 0
|
||||
then "Email address deleted"
|
||||
else "No matching email address found"
|
||||
redirect ProfileR
|
||||
@ -1,294 +0,0 @@
|
||||
module Handler.Haddock
|
||||
( getUploadHaddockR
|
||||
, putUploadHaddockR
|
||||
, getHaddockR
|
||||
, getUploadDocMapR
|
||||
, putUploadDocMapR
|
||||
-- Exported for use in Handler.Hoogle
|
||||
, Dirs (..), getDirs, dirHoogleFp, mkDirs
|
||||
, dirRawIdent
|
||||
, dirGzIdent
|
||||
, dirHoogleIdent
|
||||
, createCompressor
|
||||
) where
|
||||
|
||||
import Control.Concurrent (forkIO)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Data.Aeson (withObject)
|
||||
import Data.BlobStore
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Byteable (toBytes)
|
||||
import Data.Conduit.Zlib (gzip)
|
||||
import Data.Slug (SnapSlug, unSlug)
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Yaml as Y
|
||||
import Filesystem (isDirectory, createTree, isFile, rename, removeFile, removeDirectory)
|
||||
import qualified Filesystem.Path.CurrentOS as F
|
||||
import Import
|
||||
import Network.Mime (defaultMimeLookup)
|
||||
import System.IO (IOMode (ReadMode), withBinaryFile)
|
||||
import System.IO.Temp (withTempFile)
|
||||
import System.Posix.Files (createLink)
|
||||
|
||||
form :: Form FileInfo
|
||||
form = renderDivs $ areq fileField "tarball containing docs"
|
||||
{ fsName = Just "tarball"
|
||||
} Nothing
|
||||
|
||||
getUploadHaddockR, putUploadHaddockR :: Text -> Handler Html
|
||||
getUploadHaddockR slug0 = do
|
||||
uid <- requireAuthIdOrToken
|
||||
stackageEnt@(Entity sid Stackage {..}) <- runDB $ do
|
||||
-- Provide fallback for old URLs
|
||||
ment <- getBy $ UniqueStackage $ PackageSetIdent slug0
|
||||
case ment of
|
||||
Just ent -> return ent
|
||||
Nothing -> do
|
||||
slug <- maybe notFound return $ fromPathPiece slug0
|
||||
getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent
|
||||
slug = stackageSlug
|
||||
unless (uid == stackageUser) $ permissionDenied "You do not control this snapshot"
|
||||
((res, widget), enctype) <- runFormPostNoToken form
|
||||
case res of
|
||||
FormSuccess fileInfo -> do
|
||||
fileSource fileInfo $$ storeWrite (HaddockBundle ident)
|
||||
runDB $ update sid [StackageHasHaddocks =. True]
|
||||
master <- getYesod
|
||||
liftIO $ duForceReload (appDocUnpacker master) stackageEnt
|
||||
setMessage "Haddocks uploaded"
|
||||
redirect $ SnapshotR slug StackageHomeR
|
||||
_ -> defaultLayout $ do
|
||||
setTitle "Upload Haddocks"
|
||||
$(widgetFile "upload-haddock")
|
||||
|
||||
putUploadHaddockR = getUploadHaddockR
|
||||
|
||||
getHaddockR :: SnapSlug -> [Text] -> Handler ()
|
||||
getHaddockR slug rest = do
|
||||
stackageEnt <- runDB $ do
|
||||
ment <- getBy $ UniqueSnapshot slug
|
||||
case ment of
|
||||
Just ent -> do
|
||||
case rest of
|
||||
[pkgver] -> tryContentsRedirect ent pkgver
|
||||
[pkgver, "index.html"] -> tryContentsRedirect ent pkgver
|
||||
_ -> return ()
|
||||
return ent
|
||||
Nothing -> do
|
||||
Entity _ stackage <- getBy404
|
||||
$ UniqueStackage
|
||||
$ PackageSetIdent
|
||||
$ toPathPiece slug
|
||||
redirectWith status301 $ HaddockR (stackageSlug stackage) rest
|
||||
mapM_ sanitize rest
|
||||
dirs <- getDirs
|
||||
requireDocs stackageEnt
|
||||
|
||||
let ident = stackageIdent (entityVal stackageEnt)
|
||||
rawfp = dirRawFp dirs ident rest
|
||||
gzfp = dirGzFp dirs ident rest
|
||||
mime = defaultMimeLookup $ fpToText $ filename rawfp
|
||||
|
||||
whenM (liftIO $ isDirectory rawfp)
|
||||
$ redirect $ HaddockR slug $ rest ++ ["index.html"]
|
||||
whenM (liftIO $ isDirectory gzfp)
|
||||
$ redirect $ HaddockR slug $ rest ++ ["index.html"]
|
||||
|
||||
whenM (liftIO $ isFile gzfp) $ do
|
||||
addHeader "Content-Encoding" "gzip"
|
||||
sendFile mime $ fpToString gzfp
|
||||
|
||||
-- Note: There's a small race window here, where the compressor thread
|
||||
-- could pull the rug out from under us. We can work around this by opening
|
||||
-- the file and, if that fails, try the compressed version again.
|
||||
whenM (liftIO $ isFile rawfp) $ sendFile mime $ fpToString rawfp
|
||||
|
||||
notFound
|
||||
where
|
||||
sanitize p
|
||||
| ("/" `isInfixOf` p) || p `member` (asHashSet $ setFromList ["", ".", ".."]) =
|
||||
permissionDenied "Invalid request"
|
||||
| otherwise = return ()
|
||||
|
||||
-- | Try to redirect to the snapshot's package page instead of the
|
||||
-- Haddock-generated HTML.
|
||||
tryContentsRedirect :: Entity Stackage -> Text -> YesodDB App ()
|
||||
tryContentsRedirect (Entity sid Stackage {..}) pkgver = do
|
||||
mdocs <- selectFirst
|
||||
[ DocsName ==. name
|
||||
, DocsVersion ==. version
|
||||
, DocsSnapshot ==. Just sid
|
||||
]
|
||||
[]
|
||||
forM_ mdocs $ const
|
||||
$ redirect
|
||||
$ SnapshotR stackageSlug
|
||||
$ StackageSdistR
|
||||
$ PNVNameVersion name version
|
||||
where
|
||||
(PackageName . dropDash -> name, Version -> version) = T.breakOnEnd "-" pkgver
|
||||
|
||||
dropDash :: Text -> Text
|
||||
dropDash t = fromMaybe t $ stripSuffix "-" t
|
||||
|
||||
createCompressor
|
||||
:: Dirs
|
||||
-> IO (IORef Text, IO ()) -- ^ action to kick off compressor again
|
||||
createCompressor dirs = do
|
||||
baton <- newMVar ()
|
||||
status <- newIORef "Compressor is idle"
|
||||
mask_ $ void $ forkIO $ (finallyE $ \e -> writeIORef status $ "Compressor thread exited: " ++ tshow e) $ forever $ do
|
||||
writeIORef status "Waiting for signal to start compressing"
|
||||
takeMVar baton
|
||||
writeIORef status "Received signal, traversing directories"
|
||||
let rawRoot = dirRawRoot dirs
|
||||
whenM (isDirectory rawRoot) $ runResourceT $ goDir status rawRoot
|
||||
return (status, void $ tryPutMVar baton ())
|
||||
where
|
||||
finallyE f g = mask $ \restore -> do
|
||||
restore g `catch` \e -> do
|
||||
() <- f $ Just (e :: SomeException)
|
||||
() <- throwIO e
|
||||
return ()
|
||||
f Nothing
|
||||
goDir status dir = do
|
||||
writeIORef status $ "Compressing directory: " ++ fpToText dir
|
||||
sourceDirectory dir $$ mapM_C (goFP status)
|
||||
liftIO $ void $ tryIO $ removeDirectory dir
|
||||
|
||||
goFP status fp = do
|
||||
e <- liftIO $ isFile fp
|
||||
if e
|
||||
then liftIO $ do
|
||||
writeIORef status $ "Compressing file: " ++ fpToText fp
|
||||
handle (print . asSomeException)
|
||||
$ gzipHash dirs suffix
|
||||
else goDir status fp
|
||||
where
|
||||
Just suffix = F.stripPrefix (dirRawRoot dirs </> "") fp
|
||||
|
||||
-- Procedure is to:
|
||||
--
|
||||
-- * Gzip the src file to a temp file, and get a hash of the gzipped contents
|
||||
-- * If that hash doesn't exist in the cache, move the new file to the cache
|
||||
-- * Create a hard link from dst to the file in the cache
|
||||
-- * Delete src
|
||||
gzipHash :: Dirs
|
||||
-> FilePath -- ^ suffix
|
||||
-> IO ()
|
||||
gzipHash dirs suffix = do
|
||||
withTempFile (fpToString $ dirCacheRoot dirs) "haddock-file.gz" $ \tempfp temph -> do
|
||||
digest <- withBinaryFile (fpToString src) ReadMode $ \inh ->
|
||||
sourceHandle inh
|
||||
$= gzip
|
||||
$$ (getZipSink $
|
||||
ZipSink (sinkHandle temph) *>
|
||||
ZipSink sinkHash)
|
||||
hClose temph
|
||||
let fpcache = dirCacheFp dirs digest
|
||||
unlessM (isFile fpcache) $ do
|
||||
createTree $ F.parent fpcache
|
||||
rename (fpFromString tempfp) fpcache
|
||||
createTree $ F.parent dst
|
||||
createLink (fpToString fpcache) (fpToString dst)
|
||||
removeFile src
|
||||
where
|
||||
src = dirRawRoot dirs </> suffix
|
||||
dst = dirGzRoot dirs </> suffix
|
||||
|
||||
data Dirs = Dirs
|
||||
{ dirRawRoot :: !FilePath
|
||||
, dirGzRoot :: !FilePath
|
||||
, dirCacheRoot :: !FilePath
|
||||
, dirHoogleRoot :: !FilePath
|
||||
}
|
||||
|
||||
getDirs :: Handler Dirs
|
||||
getDirs = mkDirs . haddockRootDir <$> getYesod
|
||||
|
||||
mkDirs :: FilePath -> Dirs
|
||||
mkDirs dir = Dirs
|
||||
{ dirRawRoot = dir </> "idents-raw"
|
||||
, dirGzRoot = dir </> "idents-gz"
|
||||
, dirCacheRoot = dir </> "cachedir"
|
||||
, dirHoogleRoot = dir </> "hoogle"
|
||||
}
|
||||
|
||||
dirGzIdent, dirRawIdent, dirHoogleIdent :: Dirs -> PackageSetIdent -> FilePath
|
||||
dirGzIdent dirs ident = dirGzRoot dirs </> fpFromText (toPathPiece ident)
|
||||
dirRawIdent dirs ident = dirRawRoot dirs </> fpFromText (toPathPiece ident)
|
||||
dirHoogleIdent dirs ident = dirHoogleRoot dirs </> fpFromText (toPathPiece ident)
|
||||
|
||||
dirGzFp, dirRawFp, dirHoogleFp :: Dirs -> PackageSetIdent -> [Text] -> FilePath
|
||||
dirGzFp dirs ident rest = dirGzIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
dirRawFp dirs ident rest = dirRawIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
dirHoogleFp dirs ident rest = dirHoogleIdent dirs ident </> mconcat (map fpFromText rest)
|
||||
|
||||
dirCacheFp :: Dirs -> Digest SHA1 -> FilePath
|
||||
dirCacheFp dirs digest =
|
||||
dirCacheRoot dirs </> fpFromText x </> fpFromText y <.> "gz"
|
||||
where
|
||||
name = decodeUtf8 $ B16.encode $ toBytes digest
|
||||
(x, y) = splitAt 2 name
|
||||
|
||||
data DocInfo = DocInfo Version (Map Text [Text])
|
||||
instance FromJSON DocInfo where
|
||||
parseJSON = withObject "DocInfo" $ \o -> DocInfo
|
||||
<$> (Version <$> o .: "version")
|
||||
<*> o .: "modules"
|
||||
|
||||
getUploadDocMapR :: Handler Html
|
||||
getUploadDocMapR = do
|
||||
uid <- requireAuthIdOrToken
|
||||
user <- runDB $ get404 uid
|
||||
extra <- getExtra
|
||||
when (unSlug (userHandle user) `notMember` adminUsers extra)
|
||||
$ permissionDenied "Must be an administrator"
|
||||
|
||||
((res, widget), enctype) <- runFormPostNoToken $ renderDivs $ (,)
|
||||
<$> areq
|
||||
fileField
|
||||
"YAML file with map" { fsName = Just "docmap" }
|
||||
Nothing
|
||||
<*> areq textField "Stackage ID" { fsName = Just "snapshot" } Nothing
|
||||
case res of
|
||||
FormSuccess (fi, snapshot) -> do
|
||||
Entity sid stackage <- runDB $ do
|
||||
ment <- getBy $ UniqueStackage $ PackageSetIdent snapshot
|
||||
case ment of
|
||||
Just ent -> return ent
|
||||
Nothing -> do
|
||||
slug <- maybe notFound return $ fromPathPiece snapshot
|
||||
getBy404 $ UniqueSnapshot slug
|
||||
unless (stackageHasHaddocks stackage) $ invalidArgs $ return
|
||||
"Cannot use a snapshot without docs for a docmap"
|
||||
bs <- fileSource fi $$ foldC
|
||||
case Y.decodeEither bs of
|
||||
Left e -> invalidArgs [pack e]
|
||||
Right m0 -> do
|
||||
now <- liftIO getCurrentTime
|
||||
render <- getUrlRender
|
||||
runDB $ forM_ (mapToList $ asMap m0) $ \(package, DocInfo version ms) -> do
|
||||
did <- insert Docs
|
||||
{ docsName = PackageName package
|
||||
, docsVersion = version
|
||||
, docsUploaded = now
|
||||
, docsSnapshot = Just sid
|
||||
}
|
||||
forM_ (mapToList ms) $ \(name, pieces) -> do
|
||||
let url = render $ HaddockR (stackageSlug stackage) pieces
|
||||
insert_ $ Module did name url
|
||||
setMessage "Doc map complete"
|
||||
redirect UploadDocMapR
|
||||
_ -> defaultLayout $ do
|
||||
setTitle "Upload doc map"
|
||||
[whamlet|
|
||||
<form method=post action=?_method=PUT enctype=#{enctype}>
|
||||
^{widget}
|
||||
<input type=submit .btn value="Set document map">
|
||||
|]
|
||||
|
||||
putUploadDocMapR :: Handler Html
|
||||
putUploadDocMapR = getUploadDocMapR
|
||||
@ -1,81 +0,0 @@
|
||||
{-# LANGUAGE TupleSections, OverloadedStrings #-}
|
||||
module Handler.Home where
|
||||
|
||||
import Data.Slug
|
||||
import Database.Esqueleto as E hiding (isNothing)
|
||||
import Import hiding ((=.),on,(||.),(==.))
|
||||
import Yesod.GitRepo (grContent)
|
||||
|
||||
-- This is a handler function for the G request method on the HomeR
|
||||
-- resource pattern. All of your resource patterns are defined in
|
||||
-- config/routes
|
||||
--
|
||||
-- The majority of the code you will write in Yesod lives in these handler
|
||||
-- functions. You can spread them across multiple files if you are so
|
||||
-- inclined, or create a single monolithic file.
|
||||
getHomeR :: Handler Html
|
||||
getHomeR = contentHelper "Stackage Server" wcHomepage
|
||||
|
||||
getAuthorsR :: Handler Html
|
||||
getAuthorsR = contentHelper "Library Authors" wcAuthors
|
||||
|
||||
getInstallR :: Handler Html
|
||||
getInstallR = contentHelper "Haskell Installation Instructions" wcInstall
|
||||
|
||||
getOlderReleasesR :: Handler Html
|
||||
getOlderReleasesR = contentHelper "Older Releases" wcOlderReleases
|
||||
|
||||
contentHelper :: Html -> (WebsiteContent -> Html) -> Handler Html
|
||||
contentHelper title accessor = do
|
||||
homepage <- getYesod >>= fmap accessor . liftIO . grContent . websiteContent
|
||||
defaultLayout $ do
|
||||
setTitle title
|
||||
toWidget homepage
|
||||
|
||||
-- FIXME remove this and switch to above getHomeR' when new homepage is ready
|
||||
getHomeR' :: Handler Html
|
||||
getHomeR' = do
|
||||
windowsLatest <- linkFor "unstable-ghc78hp-inclusive"
|
||||
restLatest <- linkFor "unstable-ghc78-inclusive"
|
||||
defaultLayout $ do
|
||||
setTitle "Stackage Server"
|
||||
$(combineStylesheets 'StaticR
|
||||
[ css_bootstrap_modified_css
|
||||
, css_bootstrap_responsive_modified_css
|
||||
])
|
||||
$(widgetFile "homepage")
|
||||
where
|
||||
linkFor name =
|
||||
do slug <- mkSlug name
|
||||
fpcomplete <- mkSlug "fpcomplete"
|
||||
selecting (\(alias, user, stackage) ->
|
||||
do where_ $
|
||||
alias ^. AliasName ==. val slug &&.
|
||||
alias ^. AliasUser ==. user ^. UserId &&.
|
||||
user ^. UserHandle ==. val fpcomplete &&.
|
||||
alias ^. AliasTarget ==. stackage ^. StackageIdent
|
||||
return (stackage ^. StackageSlug))
|
||||
where selecting =
|
||||
fmap (fmap unValue . listToMaybe) .
|
||||
runDB .
|
||||
select .
|
||||
from
|
||||
|
||||
addSnapshot title short = do
|
||||
mex <- handlerToWidget $ linkFor $ name "exclusive"
|
||||
min' <- handlerToWidget $ linkFor $ name "inclusive"
|
||||
when (isJust mex || isJust min')
|
||||
[whamlet|
|
||||
<tr>
|
||||
<td>
|
||||
#{asHtml title}
|
||||
<td>
|
||||
$maybe ex <- mex
|
||||
<a href=@{SnapshotR ex StackageHomeR}>exclusive
|
||||
$if isJust mex && isJust min'
|
||||
<td>
|
||||
$maybe in <- min'
|
||||
<a href=@{SnapshotR in StackageHomeR}>inclusive
|
||||
|]
|
||||
where
|
||||
name suffix = concat ["unstable-", short, "-", suffix]
|
||||
@ -1,157 +0,0 @@
|
||||
module Handler.Hoogle where
|
||||
|
||||
import Control.DeepSeq (NFData(..))
|
||||
import Control.DeepSeq.Generics (genericRnf)
|
||||
import Control.Spoon (spoon)
|
||||
import Data.Data (Data (..))
|
||||
import Data.Slug (SnapSlug)
|
||||
import Data.Text.Read (decimal)
|
||||
import Data.Unpacking (getHoogleDB)
|
||||
import Handler.Haddock (getDirs)
|
||||
import qualified Hoogle
|
||||
import Import
|
||||
import Text.Blaze.Html (preEscapedToHtml)
|
||||
|
||||
getHoogleR :: SnapSlug -> Handler Html
|
||||
getHoogleR slug = do
|
||||
dirs <- getDirs
|
||||
mquery <- lookupGetParam "q"
|
||||
mpage <- lookupGetParam "page"
|
||||
exact <- maybe False (const True) <$> lookupGetParam "exact"
|
||||
mresults' <- lookupGetParam "results"
|
||||
let count' =
|
||||
case decimal <$> mresults' of
|
||||
Just (Right (i, "")) -> min perPage i
|
||||
_ -> perPage
|
||||
page =
|
||||
case decimal <$> mpage of
|
||||
Just (Right (i, "")) -> i
|
||||
_ -> 1
|
||||
offset = (page - 1) * perPage
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
mdatabasePath <- getHoogleDB dirs stackage
|
||||
heDatabase <- case mdatabasePath of
|
||||
Just x -> return $ liftIO $ Hoogle.loadDatabase $ fpToString x
|
||||
Nothing -> (>>= sendResponse) $ defaultLayout $ do
|
||||
setTitle "Hoogle database not available"
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<p>The given Hoogle database is not available.
|
||||
<p>
|
||||
<a href=@{SnapshotR slug StackageHomeR}>Return to snapshot homepage
|
||||
|]
|
||||
|
||||
mresults <- case mquery of
|
||||
Just query -> runHoogleQuery heDatabase HoogleQueryInput
|
||||
{ hqiQueryInput = query
|
||||
, hqiExactSearch = if exact then Just query else Nothing
|
||||
, hqiLimitTo = count'
|
||||
, hqiOffsetBy = offset
|
||||
}
|
||||
Nothing -> return $ HoogleQueryOutput "" [] Nothing
|
||||
let queryText = fromMaybe "" mquery
|
||||
pageLink p = (SnapshotR slug HoogleR
|
||||
, (if exact then (("exact", "true"):) else id)
|
||||
$ (maybe id (\q' -> (("q", q'):)) mquery)
|
||||
[("page", tshow p)])
|
||||
snapshotLink = SnapshotR slug StackageHomeR
|
||||
hoogleForm = $(widgetFile "hoogle-form")
|
||||
defaultLayout $ do
|
||||
setTitle "Hoogle Search"
|
||||
$(widgetFile "hoogle")
|
||||
|
||||
getPageCount :: Int -> Int
|
||||
getPageCount totalCount = 1 + div totalCount perPage
|
||||
|
||||
perPage :: Int
|
||||
perPage = 10
|
||||
|
||||
data HoogleQueryInput = HoogleQueryInput
|
||||
{ hqiQueryInput :: Text
|
||||
, hqiExactSearch :: Maybe Text
|
||||
, hqiLimitTo :: Int
|
||||
, hqiOffsetBy :: Int
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
data HoogleQueryOutput = HoogleQueryBad Text
|
||||
| HoogleQueryOutput Text [HoogleResult] (Maybe Int) -- ^ Text == HTML version of query, Int == total count
|
||||
deriving (Read, Typeable, Data, Show, Eq)
|
||||
|
||||
data HoogleResult = HoogleResult
|
||||
{ hrURL :: String
|
||||
, hrSources :: [(PackageLink, [ModuleLink])]
|
||||
, hrTitle :: String -- ^ HTML
|
||||
, hrBody :: String -- ^ plain text
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
data PackageLink = PackageLink
|
||||
{ plName :: String
|
||||
, plURL :: String
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
data ModuleLink = ModuleLink
|
||||
{ mlName :: String
|
||||
, mlURL :: String
|
||||
}
|
||||
deriving (Eq, Read, Show, Data, Typeable, Ord, Generic)
|
||||
|
||||
instance NFData HoogleResult where rnf = genericRnf
|
||||
instance NFData PackageLink where rnf = genericRnf
|
||||
instance NFData ModuleLink where rnf = genericRnf
|
||||
|
||||
runHoogleQuery :: Monad m
|
||||
=> m Hoogle.Database
|
||||
-> HoogleQueryInput
|
||||
-> m HoogleQueryOutput
|
||||
runHoogleQuery heDatabase HoogleQueryInput {..} =
|
||||
runQuery $ Hoogle.parseQuery Hoogle.Haskell query
|
||||
where
|
||||
query = unpack hqiQueryInput
|
||||
|
||||
runQuery (Left err) = return $ HoogleQueryBad (tshow err)
|
||||
runQuery (Right query') = do
|
||||
hoogledb <- heDatabase
|
||||
let query'' = Hoogle.queryExact classifier query'
|
||||
rawRes = concatMap fixResult
|
||||
$ Hoogle.search hoogledb query''
|
||||
mres = spoon
|
||||
$ take (min 100 hqiLimitTo)
|
||||
$ drop hqiOffsetBy rawRes
|
||||
mcount = spoon $ limitedLength 0 rawRes
|
||||
limitedLength x [] = Just x
|
||||
limitedLength x (_:rest)
|
||||
| x >= 100 = Nothing
|
||||
| otherwise = limitedLength (x + 1) rest
|
||||
rendered = pack $ Hoogle.showTagHTML $ Hoogle.renderQuery query''
|
||||
return $ case (,) <$> mres <*> mcount of
|
||||
Nothing ->
|
||||
HoogleQueryOutput rendered [] (Just 0)
|
||||
Just (results, mcount') ->
|
||||
HoogleQueryOutput rendered (take hqiLimitTo results) mcount'
|
||||
|
||||
classifier = maybe Nothing
|
||||
(const (Just Hoogle.UnclassifiedItem))
|
||||
hqiExactSearch
|
||||
|
||||
fixResult (_, Hoogle.Result locs self docs) = do
|
||||
(loc, _) <- take 1 locs
|
||||
let sources' = unionsWith (++) $
|
||||
mapMaybe (getPkgModPair . snd) locs
|
||||
return HoogleResult
|
||||
{ hrURL = loc
|
||||
, hrSources = mapToList sources'
|
||||
, hrTitle = Hoogle.showTagHTML self
|
||||
, hrBody = fromMaybe "Problem loading documentation" $
|
||||
spoon $ Hoogle.showTagText docs
|
||||
}
|
||||
|
||||
getPkgModPair :: [(String, String)]
|
||||
-> Maybe (Map PackageLink [ModuleLink])
|
||||
getPkgModPair [(pkg, pkgname), (modu, moduname)] = do
|
||||
let pkg' = PackageLink pkgname pkg
|
||||
modu' = ModuleLink moduname modu
|
||||
return $ asMap $ singletonMap pkg' [modu']
|
||||
getPkgModPair _ = Nothing
|
||||
@ -1,337 +0,0 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
-- | Lists the package page similar to Hackage.
|
||||
|
||||
module Handler.Package where
|
||||
|
||||
import Data.Char
|
||||
import Data.Slug
|
||||
import Data.Tag
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text.Encoding as T
|
||||
import qualified Data.Text.Lazy as LT
|
||||
|
||||
import Database.Esqueleto ((^.), (&&.), Value (Value))
|
||||
import qualified Database.Esqueleto as E
|
||||
import qualified Database.Persist as P
|
||||
import Formatting
|
||||
import Import
|
||||
import qualified Text.Blaze.Html.Renderer.Text as LT
|
||||
import Text.Email.Validate
|
||||
|
||||
-- | Page metadata package.
|
||||
getPackageR :: PackageName -> Handler Html
|
||||
getPackageR pn =
|
||||
packagePage pn Nothing (selectFirst [DocsName ==. pn] [Desc DocsUploaded])
|
||||
|
||||
packagePage :: PackageName
|
||||
-> Maybe Version
|
||||
-> YesodDB App (Maybe (Entity Docs))
|
||||
-> Handler Html
|
||||
packagePage pn mversion getDocs = do
|
||||
let haddocksLink ident version =
|
||||
HaddockR ident [concat [toPathPiece pn, "-", toPathPiece version]]
|
||||
muid <- maybeAuthId
|
||||
(mnightly, mlts, nLikes, liked,
|
||||
Entity _ metadata, revdeps', mdocs, deprecated, inFavourOf) <- runDB $ do
|
||||
mnightly <- getNightly pn
|
||||
mlts <- getLts pn
|
||||
nLikes <- count [LikePackage ==. pn]
|
||||
let getLiked uid = (>0) <$> count [LikePackage ==. pn, LikeVoter ==. uid]
|
||||
liked <- maybe (return False) getLiked muid
|
||||
|
||||
|
||||
|
||||
metadata <- getBy404 (UniqueMetadata pn)
|
||||
revdeps' <- reverseDeps pn
|
||||
mdocsent <- getDocs
|
||||
mdocs <- forM mdocsent $ \(Entity docsid (Docs _ version _ _)) -> (,)
|
||||
<$> pure version
|
||||
<*> (map entityVal <$>
|
||||
selectList [ModuleDocs ==. docsid] [Asc ModuleName])
|
||||
deprecated <- getDeprecated pn
|
||||
inFavourOf <- getInFavourOf pn
|
||||
return ( mnightly
|
||||
, mlts
|
||||
, nLikes
|
||||
, liked
|
||||
, metadata
|
||||
, revdeps'
|
||||
, mdocs
|
||||
, deprecated
|
||||
, inFavourOf
|
||||
)
|
||||
|
||||
let ixInFavourOf = zip [0::Int ..] inFavourOf
|
||||
displayedVersion = fromMaybe (metadataVersion metadata) mversion
|
||||
|
||||
myTags <- maybe (return []) (runDB . user'sTagsOf pn) muid
|
||||
tags <- fmap (map (\(v,count') -> (v,count',any (==v) myTags)))
|
||||
(runDB (packageTags pn))
|
||||
|
||||
let likeTitle = if liked
|
||||
then "You liked this!"
|
||||
else "I like this!" :: Text
|
||||
|
||||
let homepage = case T.strip (metadataHomepage metadata) of
|
||||
x | null x -> Nothing
|
||||
| otherwise -> Just x
|
||||
synopsis = metadataSynopsis metadata
|
||||
deps = enumerate (metadataDeps metadata)
|
||||
revdeps = enumerate revdeps'
|
||||
authors = enumerate (parseIdentitiesLiberally (metadataAuthor metadata))
|
||||
maintainers = let ms = enumerate (parseIdentitiesLiberally (metadataMaintainer metadata))
|
||||
in if ms == authors
|
||||
then []
|
||||
else ms
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml pn
|
||||
$(combineStylesheets 'StaticR
|
||||
[ css_font_awesome_min_css
|
||||
])
|
||||
$(widgetFile "package")
|
||||
where enumerate = zip [0::Int ..]
|
||||
|
||||
-- | Get tags of the given package.
|
||||
packageTags :: PackageName -> YesodDB App [(Slug,Int)]
|
||||
packageTags pn =
|
||||
fmap (map boilerplate)
|
||||
(E.select
|
||||
(E.from (\(t `E.LeftOuterJoin` bt) -> do
|
||||
E.on $ t E.^. TagTag E.==. bt E.^. BannedTagTag
|
||||
E.where_
|
||||
$ (t ^. TagPackage E.==. E.val pn) E.&&.
|
||||
(E.isNothing $ E.just $ bt E.^. BannedTagTag)
|
||||
E.groupBy (t ^. TagTag)
|
||||
E.orderBy [E.asc (t ^. TagTag)]
|
||||
return (t ^. TagTag,E.count (t ^. TagTag)))))
|
||||
where boilerplate (E.Value a,E.Value b) = (a,b)
|
||||
|
||||
-- | Get tags of the package by the user.
|
||||
user'sTagsOf :: PackageName -> UserId -> YesodDB App [Slug]
|
||||
user'sTagsOf pn uid =
|
||||
fmap (map (\(E.Value v) -> v))
|
||||
(E.select
|
||||
(E.from (\t ->
|
||||
do E.where_ (t ^. TagPackage E.==. E.val pn E.&&.
|
||||
t ^. TagVoter E.==. E.val uid)
|
||||
E.orderBy [E.asc (t ^. TagTag)]
|
||||
return (t ^. TagTag))))
|
||||
|
||||
-- | Get reverse dependencies of a package.
|
||||
reverseDeps :: PackageName -> YesodDB App [PackageName]
|
||||
reverseDeps pn = fmap (map boilerplate) $ E.select $ E.from $ \dep -> do
|
||||
E.where_ $ dep ^. DependencyDep E.==. E.val pn
|
||||
E.orderBy [E.asc $ dep ^. DependencyUser]
|
||||
return $ dep ^. DependencyUser
|
||||
where boilerplate (E.Value e) = e
|
||||
|
||||
-- | Get the latest nightly snapshot for the given package.
|
||||
getNightly :: PackageName -> YesodDB App (Maybe (Day, Text, Version, SnapSlug))
|
||||
getNightly pn =
|
||||
fmap (fmap boilerplate . listToMaybe)
|
||||
(E.select (E.from query))
|
||||
where boilerplate (E.Value a,E.Value b,E.Value c,E.Value d) =
|
||||
(a,b,c,d)
|
||||
query (p,n,s) =
|
||||
do E.where_ ((p ^. PackageName' E.==. E.val pn) E.&&.
|
||||
(p ^. PackageStackage E.==. n ^. NightlyStackage) E.&&.
|
||||
(s ^. StackageId E.==. n ^. NightlyStackage))
|
||||
E.orderBy [E.desc (n ^. NightlyDay)]
|
||||
return (n ^. NightlyDay
|
||||
,n ^. NightlyGhcVersion
|
||||
,p ^. PackageVersion
|
||||
,s ^. StackageSlug)
|
||||
|
||||
-- | Get the latest LTS snapshot for the given package.
|
||||
getLts :: PackageName -> YesodDB App (Maybe (Int,Int,Version,SnapSlug))
|
||||
getLts pn =
|
||||
fmap (fmap boilerplate . listToMaybe)
|
||||
(E.select (E.from query))
|
||||
where boilerplate (E.Value a,Value b,Value c,Value d) =
|
||||
(a,b,c,d)
|
||||
query (p,n,s) =
|
||||
do E.where_ ((p ^. PackageName' E.==. E.val pn) E.&&.
|
||||
(p ^. PackageStackage E.==. n ^. LtsStackage) E.&&.
|
||||
(s ^. StackageId E.==. n ^. LtsStackage))
|
||||
E.orderBy [E.desc (n ^. LtsMajor),E.desc (n ^. LtsMinor)]
|
||||
return (n ^. LtsMajor
|
||||
,n ^. LtsMinor
|
||||
,p ^. PackageVersion
|
||||
,s ^. StackageSlug)
|
||||
|
||||
getDeprecated :: PackageName -> YesodDB App Bool
|
||||
getDeprecated pn = fmap ((>0) . length) $ E.select $ E.from $ \d -> do
|
||||
E.where_ $ d ^. DeprecatedPackage E.==. E.val pn
|
||||
return ()
|
||||
|
||||
getInFavourOf :: PackageName -> YesodDB App [PackageName]
|
||||
getInFavourOf pn = fmap unBoilerplate $ E.select $ E.from $ \s -> do
|
||||
E.where_ $ s ^. SuggestedInsteadOf E.==. E.val pn
|
||||
return (s ^. SuggestedPackage)
|
||||
where
|
||||
unBoilerplate = map (\(E.Value p) -> p)
|
||||
|
||||
-- | An identifier specified in a package. Because this field has
|
||||
-- quite liberal requirements, we often encounter various forms. A
|
||||
-- name, a name and email, just an email, or maybe nothing at all.
|
||||
data Identifier
|
||||
= EmailOnly !EmailAddress -- ^ An email only e.g. jones@example.com
|
||||
| Contact !Text
|
||||
!EmailAddress -- ^ A contact syntax, e.g. Dave Jones <jones@example.com>
|
||||
| PlainText !Text -- ^ Couldn't parse anything sensible, leaving as-is.
|
||||
deriving (Show,Eq)
|
||||
|
||||
-- | An author/maintainer field may contain a comma-separated list of
|
||||
-- identifiers. It may be the case that a person's name is written as
|
||||
-- "Einstein, Albert", but we only parse commas when there's an
|
||||
-- accompanying email, so that would be:
|
||||
--
|
||||
-- Einstein, Albert <emc2@gmail.com>, Isaac Newton <falling@apple.com>
|
||||
--
|
||||
-- Whereas
|
||||
--
|
||||
-- Einstein, Albert, Isaac Newton
|
||||
--
|
||||
-- Will just be left alone. It's an imprecise parsing because the
|
||||
-- input is wide open, but it's better than nothing:
|
||||
--
|
||||
-- λ> parseIdentitiesLiberally "Chris Done, Dave Jones <chrisdone@gmail.com>, Einstein, Albert, Isaac Newton, Michael Snoyman <michael@snoyman.com>"
|
||||
-- [PlainText "Chris Done"
|
||||
-- ,Contact "Dave Jones" "chrisdone@gmail.com"
|
||||
-- ,PlainText "Einstein, Albert, Isaac Newton"
|
||||
-- ,Contact "Michael Snoyman" "michael@snoyman.com"]
|
||||
--
|
||||
-- I think that is quite a predictable and reasonable result.
|
||||
--
|
||||
parseIdentitiesLiberally :: Text -> [Identifier]
|
||||
parseIdentitiesLiberally =
|
||||
filter (not . empty) .
|
||||
map strip .
|
||||
concatPlains .
|
||||
map parseChunk .
|
||||
T.split (== ',')
|
||||
where empty (PlainText e) = T.null e
|
||||
empty _ = False
|
||||
strip (PlainText t) = PlainText (T.strip t)
|
||||
strip x = x
|
||||
concatPlains = go
|
||||
where go (PlainText x:PlainText y:xs) =
|
||||
go (PlainText (x <> "," <> y) :
|
||||
xs)
|
||||
go (x:xs) = x : go xs
|
||||
go [] = []
|
||||
|
||||
-- | Try to parse a chunk into an identifier.
|
||||
--
|
||||
-- 1. First tries to parse an \"email@domain.com\".
|
||||
-- 2. Then tries to parse a \"Foo <email@domain.com>\".
|
||||
-- 3. Finally gives up and returns a plain text.
|
||||
--
|
||||
-- λ> parseChunk "foo@example.com"
|
||||
-- EmailOnly "foo@example.com"
|
||||
-- λ> parseChunk "Dave Jones <dave@jones.com>"
|
||||
-- Contact "Dave Jones" "dave@jones.com"
|
||||
-- λ> parseChunk "<x>"
|
||||
-- PlainText "<x>"
|
||||
-- λ> parseChunk "Hello!"
|
||||
-- PlainText "Hello!"
|
||||
--
|
||||
parseChunk :: Text -> Identifier
|
||||
parseChunk chunk =
|
||||
case emailAddress (T.encodeUtf8 (T.strip chunk)) of
|
||||
Just email -> EmailOnly email
|
||||
Nothing ->
|
||||
case T.stripPrefix
|
||||
">"
|
||||
(T.dropWhile isSpace
|
||||
(T.reverse chunk)) of
|
||||
Just rest ->
|
||||
case T.span (/= '<') rest of
|
||||
(T.reverse -> emailStr,this) ->
|
||||
case T.stripPrefix "< " this of
|
||||
Just (T.reverse -> name) ->
|
||||
case emailAddress (T.encodeUtf8 (T.strip emailStr)) of
|
||||
Just email ->
|
||||
Contact (T.strip name) email
|
||||
_ -> plain
|
||||
_ -> plain
|
||||
_ -> plain
|
||||
where plain = PlainText chunk
|
||||
|
||||
-- | Render email to text.
|
||||
renderEmail :: EmailAddress -> Text
|
||||
renderEmail = T.decodeUtf8 . toByteString
|
||||
|
||||
-- | Format a number with commas nicely.
|
||||
formatNum :: Int -> Text
|
||||
formatNum = sformat commas
|
||||
|
||||
postPackageLikeR :: PackageName -> Handler ()
|
||||
postPackageLikeR packageName = maybeAuthId >>= \muid -> case muid of
|
||||
Nothing -> return ()
|
||||
Just uid -> runDB $ P.insert_ $ Like packageName uid
|
||||
|
||||
postPackageUnlikeR :: PackageName -> Handler ()
|
||||
postPackageUnlikeR name = maybeAuthId >>= \muid -> case muid of
|
||||
Nothing -> return ()
|
||||
Just uid -> runDB $ P.deleteWhere [LikePackage ==. name, LikeVoter ==. uid]
|
||||
|
||||
postPackageTagR :: PackageName -> Handler ()
|
||||
postPackageTagR packageName =
|
||||
maybeAuthId >>=
|
||||
\muid ->
|
||||
case muid of
|
||||
Nothing -> return ()
|
||||
Just uid ->
|
||||
do mtag <- lookupPostParam "slug"
|
||||
case mtag of
|
||||
Just tag ->
|
||||
do slug <- mkTag tag
|
||||
void (runDB (P.insert (Tag packageName slug uid)))
|
||||
Nothing -> error "Need a slug"
|
||||
|
||||
postPackageUntagR :: PackageName -> Handler ()
|
||||
postPackageUntagR packageName =
|
||||
maybeAuthId >>=
|
||||
\muid ->
|
||||
case muid of
|
||||
Nothing -> return ()
|
||||
Just uid ->
|
||||
do mtag <- lookupPostParam "slug"
|
||||
case mtag of
|
||||
Just tag ->
|
||||
do slug <- mkTag tag
|
||||
void (runDB (P.deleteWhere
|
||||
[TagPackage ==. packageName
|
||||
,TagTag ==. slug
|
||||
,TagVoter ==. uid]))
|
||||
Nothing -> error "Need a slug"
|
||||
|
||||
getPackageSnapshotsR :: PackageName -> Handler Html
|
||||
getPackageSnapshotsR pn =
|
||||
do let haddocksLink ident version =
|
||||
HaddockR ident [concat [toPathPiece pn, "-", toPathPiece version]]
|
||||
snapshots <- (runDB .
|
||||
fmap (map reformat) .
|
||||
E.select . E.from)
|
||||
(\(p,s) ->
|
||||
do E.where_ $
|
||||
(p ^. PackageStackage E.==. s ^. StackageId) &&.
|
||||
(p ^. PackageName' E.==. E.val pn)
|
||||
E.orderBy [E.desc $ s ^. StackageUploaded]
|
||||
return
|
||||
(p ^. PackageVersion
|
||||
,s ^. StackageTitle
|
||||
,s ^. StackageSlug
|
||||
,s ^. StackageHasHaddocks))
|
||||
defaultLayout
|
||||
(do setTitle ("Packages for " >> toHtml pn)
|
||||
$(combineStylesheets 'StaticR
|
||||
[css_font_awesome_min_css])
|
||||
$(widgetFile "package-snapshots"))
|
||||
where reformat (Value version,Value title,Value ident,Value hasHaddocks) =
|
||||
(version
|
||||
,fromMaybe title (stripPrefix "Stackage build for " title)
|
||||
,ident
|
||||
,hasHaddocks)
|
||||
@ -1,39 +0,0 @@
|
||||
module Handler.PackageCounts
|
||||
( getPackageCountsR
|
||||
) where
|
||||
|
||||
import Import hiding (Value (..), groupBy, (==.))
|
||||
import Data.Slug (mkSlug)
|
||||
import Database.Esqueleto
|
||||
|
||||
data Count = Count
|
||||
{ name :: Text
|
||||
, date :: Day
|
||||
, packages :: Int
|
||||
}
|
||||
|
||||
toCount :: (Value Text, Value UTCTime, Value Int) -> Count
|
||||
toCount (Value x, Value y, Value z) =
|
||||
Count x (utctDay y) z
|
||||
|
||||
getPackageCountsR :: Handler Html
|
||||
getPackageCountsR = do
|
||||
admins <- adminUsers <$> getExtra
|
||||
counts <- runDB $ do
|
||||
let slugs = mapMaybe mkSlug $ setToList admins
|
||||
adminUids <- selectKeysList [UserHandle <-. slugs] []
|
||||
fmap (map toCount) $ select $ from $ \(s, p) -> do
|
||||
where_ $
|
||||
(not_ $ s ^. StackageTitle `like` val "%inclusive") &&.
|
||||
(s ^. StackageId ==. p ^. PackageStackage) &&.
|
||||
(s ^. StackageUser `in_` valList adminUids)
|
||||
groupBy (s ^. StackageTitle, s ^. StackageUploaded)
|
||||
orderBy [desc $ s ^. StackageUploaded]
|
||||
return
|
||||
( s ^. StackageTitle
|
||||
, s ^. StackageUploaded
|
||||
, countRows
|
||||
)
|
||||
defaultLayout $ do
|
||||
setTitle "Package counts"
|
||||
$(widgetFile "package-counts")
|
||||
@ -1,50 +0,0 @@
|
||||
module Handler.PackageList where
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import Data.Time (NominalDiffTime)
|
||||
import qualified Database.Esqueleto as E
|
||||
import Import
|
||||
|
||||
|
||||
-- FIXME maybe just redirect to the LTS or nightly package list
|
||||
getPackageListR :: Handler Html
|
||||
getPackageListR = defaultLayout $ do
|
||||
setTitle "Package list"
|
||||
cachedWidget (20 * 60) "package-list" $ do
|
||||
let clean (x, y) =
|
||||
( E.unValue x
|
||||
, strip $ E.unValue y
|
||||
)
|
||||
addDocs (x, y) = (x, Nothing, y, Nothing)
|
||||
packages <- fmap (map addDocs . uniqueByKey . map clean) $ handlerToWidget $ runDB $
|
||||
E.selectDistinct $ E.from $ \(u,m) -> do
|
||||
E.where_ (m E.^. MetadataName E.==. u E.^. UploadedName)
|
||||
E.orderBy [E.asc $ u E.^. UploadedName]
|
||||
return $ (u E.^. UploadedName
|
||||
,m E.^. MetadataSynopsis)
|
||||
$(widgetFile "package-list")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
uniqueByKey = sortBy (comparing fst) . M.toList . M.fromList
|
||||
mback = Nothing
|
||||
|
||||
-- FIXME move somewhere else, maybe even yesod-core
|
||||
cachedWidget :: NominalDiffTime -> Text -> Widget -> Widget
|
||||
cachedWidget _diff _key widget = do
|
||||
-- Temporarily disabled, seems to be eating up too much memory
|
||||
widget
|
||||
{-
|
||||
ref <- widgetCache <$> getYesod
|
||||
now <- liftIO getCurrentTime
|
||||
mpair <- lookup key <$> readIORef ref
|
||||
case mpair of
|
||||
Just (expires, gw) | expires > now -> do
|
||||
$logDebug "Using cached widget"
|
||||
WidgetT $ \_ -> return ((), gw)
|
||||
_ -> do
|
||||
$logDebug "Not using cached widget"
|
||||
WidgetT $ \hd -> do
|
||||
((), gw) <- unWidgetT widget hd
|
||||
-- FIXME render the builders in gw for more efficiency
|
||||
atomicModifyIORef' ref $ \m -> (insertMap key (addUTCTime diff now, gw) m, ())
|
||||
return ((), gw)
|
||||
-}
|
||||
@ -1,39 +0,0 @@
|
||||
module Handler.Profile where
|
||||
|
||||
import Import
|
||||
import Data.Slug (slugField)
|
||||
|
||||
userForm :: User -> Form User
|
||||
userForm user = renderBootstrap2 $ User
|
||||
<$> areq slugField "User handle"
|
||||
{ fsTooltip = Just "Used for URLs"
|
||||
} (Just $ userHandle user)
|
||||
<*> areq textField "Display name" (Just $ userDisplay user)
|
||||
<*> pure (userToken user)
|
||||
|
||||
getProfileR :: Handler Html
|
||||
getProfileR = do
|
||||
Entity uid user <- requireAuth
|
||||
((result, userWidget), enctype) <- runFormPost $ userForm user
|
||||
case result of
|
||||
FormSuccess user' -> do
|
||||
runDB $ replace uid user'
|
||||
setMessage "Profile updated"
|
||||
redirect ProfileR
|
||||
_ -> return ()
|
||||
(emails, aliases) <- runDB $ (,)
|
||||
<$> selectList [EmailUser ==. uid] [Asc EmailEmail]
|
||||
<*> selectList [AliasUser ==. uid] [Asc AliasName]
|
||||
defaultLayout $ do
|
||||
setTitle "Your Profile"
|
||||
$(widgetFile "profile")
|
||||
|
||||
aliasToText :: Entity Alias -> Text
|
||||
aliasToText (Entity _ (Alias _ name target)) = concat
|
||||
[ toPathPiece name
|
||||
, ": "
|
||||
, toPathPiece target
|
||||
]
|
||||
|
||||
putProfileR :: Handler Html
|
||||
putProfileR = getProfileR
|
||||
@ -1,15 +0,0 @@
|
||||
module Handler.Progress where
|
||||
|
||||
import Import
|
||||
|
||||
getProgressR :: UploadProgressId -> Handler Html
|
||||
getProgressR key = do
|
||||
UploadProgress text mdest <- runDB $ get404 key
|
||||
case mdest of
|
||||
Nothing -> defaultLayout $ do
|
||||
addHeader "Refresh" "1"
|
||||
setTitle "Working..."
|
||||
[whamlet|<p>#{text}|]
|
||||
Just url -> do
|
||||
setMessage $ toHtml text
|
||||
redirect url
|
||||
@ -1,20 +0,0 @@
|
||||
module Handler.RefreshDeprecated where
|
||||
|
||||
import Import
|
||||
import qualified Data.Aeson as Aeson
|
||||
import Network.HTTP.Conduit (simpleHttp)
|
||||
import Data.Hackage.DeprecationInfo
|
||||
|
||||
getRefreshDeprecatedR :: Handler Html
|
||||
getRefreshDeprecatedR = do
|
||||
bs <- simpleHttp "http://hackage.haskell.org/packages/deprecated.json"
|
||||
case Aeson.decode bs of
|
||||
Nothing -> return "Failed to parse"
|
||||
Just info -> do
|
||||
runDB $ do
|
||||
deleteWhere ([] :: [Filter Deprecated])
|
||||
insertMany_ (deprecations info)
|
||||
runDB $ do
|
||||
deleteWhere ([] :: [Filter Suggested])
|
||||
insertMany_ (suggestions info)
|
||||
return "Done"
|
||||
@ -1,12 +0,0 @@
|
||||
module Handler.ResetToken where
|
||||
|
||||
import Import
|
||||
|
||||
postResetTokenR :: Handler ()
|
||||
postResetTokenR = do
|
||||
Entity uid _ <- requireAuth
|
||||
runDB $ do
|
||||
token <- getToken
|
||||
update uid [UserToken =. token]
|
||||
setMessage "Token updated"
|
||||
redirect ProfileR
|
||||
@ -1,46 +0,0 @@
|
||||
{-# LANGUAGE TupleSections, OverloadedStrings #-}
|
||||
|
||||
module Handler.Snapshots where
|
||||
|
||||
import Data.Time.Clock
|
||||
import qualified Database.Esqueleto as E
|
||||
import Formatting
|
||||
import Formatting.Time
|
||||
import Import
|
||||
|
||||
snapshotsPerPage :: Integral a => a
|
||||
snapshotsPerPage = 50
|
||||
|
||||
-- This is a handler function for the GET request method on the HomeR
|
||||
-- resource pattern. All of your resource patterns are defined in
|
||||
-- config/routes
|
||||
--
|
||||
-- The majority of the code you will write in Yesod lives in these handler
|
||||
-- functions. You can spread them across multiple files if you are so
|
||||
-- inclined, or create a single monolithic file.
|
||||
getAllSnapshotsR :: Handler Html
|
||||
getAllSnapshotsR = do
|
||||
now' <- liftIO getCurrentTime
|
||||
currentPageMay <- lookupGetParam "page"
|
||||
let currentPage :: Int64
|
||||
currentPage = fromMaybe 1 (currentPageMay >>= readMay)
|
||||
groups <- fmap (groupBy (on (==) (\(_,_,uploaded,_,_) -> uploaded)) . map (uncrapify now')) $
|
||||
runDB $ E.select $ E.from $ \(stackage `E.InnerJoin` user) -> do
|
||||
E.on (stackage E.^. StackageUser E.==. user E.^. UserId)
|
||||
E.orderBy [E.desc $ stackage E.^. StackageUploaded]
|
||||
E.limit snapshotsPerPage
|
||||
E.offset ((currentPage - 1) * snapshotsPerPage)
|
||||
return
|
||||
( stackage E.^. StackageSlug
|
||||
, stackage E.^. StackageTitle
|
||||
, stackage E.^. StackageUploaded
|
||||
, user E.^. UserDisplay
|
||||
, user E.^. UserHandle
|
||||
)
|
||||
defaultLayout $ do
|
||||
setTitle "Stackage Server"
|
||||
let snapshotsNav = $(widgetFile "snapshots-nav")
|
||||
$(widgetFile "all-snapshots")
|
||||
where uncrapify now' c =
|
||||
let (E.Value ident, E.Value title, E.Value uploaded, E.Value display, E.Value handle') = c
|
||||
in (ident,title,format (diff True) (diffUTCTime uploaded now'),display,handle')
|
||||
@ -1,253 +0,0 @@
|
||||
module Handler.StackageHome where
|
||||
|
||||
import Data.BlobStore (storeExists)
|
||||
import Import
|
||||
import Data.Time (FormatTime)
|
||||
import Data.Slug (SnapSlug)
|
||||
import qualified Database.Esqueleto as E
|
||||
import Handler.PackageList (cachedWidget)
|
||||
|
||||
getStackageHomeR :: SnapSlug -> Handler Html
|
||||
getStackageHomeR slug = do
|
||||
stackage <- runDB $ do
|
||||
Entity _ stackage <- getBy404 $ UniqueSnapshot slug
|
||||
return stackage
|
||||
|
||||
hasBundle <- storeExists $ SnapshotBundle $ stackageIdent stackage
|
||||
let minclusive =
|
||||
if "inclusive" `isSuffixOf` stackageTitle stackage
|
||||
then Just True
|
||||
else if "exclusive" `isSuffixOf` stackageTitle stackage
|
||||
then Just False
|
||||
else Nothing
|
||||
base = maybe 0 (const 1) minclusive :: Int
|
||||
hoogleForm =
|
||||
let queryText = "" :: Text
|
||||
exact = False
|
||||
in $(widgetFile "hoogle-form")
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ stackageTitle stackage
|
||||
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
||||
let maxPackages = 5000
|
||||
(packageListClipped, packages') <- handlerToWidget $ runDB $ do
|
||||
packages' <- E.select $ E.from $ \(u,m,p) -> do
|
||||
E.where_ $
|
||||
(m E.^. MetadataName E.==. u E.^. UploadedName) E.&&.
|
||||
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
||||
(p E.^. PackageStackage E.==. E.val sid)
|
||||
E.orderBy [E.asc $ u E.^. UploadedName]
|
||||
E.groupBy ( u E.^. UploadedName
|
||||
, m E.^. MetadataSynopsis
|
||||
)
|
||||
E.limit maxPackages
|
||||
return
|
||||
( u E.^. UploadedName
|
||||
, m E.^. MetadataSynopsis
|
||||
, E.max_ (p E.^. PackageVersion)
|
||||
, E.max_ $ E.case_
|
||||
[ ( p E.^. PackageHasHaddocks
|
||||
, p E.^. PackageVersion
|
||||
)
|
||||
]
|
||||
(E.val (Version ""))
|
||||
)
|
||||
packageCount <- count [PackageStackage ==. sid]
|
||||
let packageListClipped = packageCount > maxPackages
|
||||
return (packageListClipped, packages')
|
||||
let packages = flip map packages' $ \(name, syn, latestVersion, forceNotNull -> mversion) ->
|
||||
( E.unValue name
|
||||
, fmap unVersion $ E.unValue latestVersion
|
||||
, strip $ E.unValue syn
|
||||
, (<$> mversion) $ \version -> HaddockR slug $ return $ concat
|
||||
[ toPathPiece $ E.unValue name
|
||||
, "-"
|
||||
, version
|
||||
]
|
||||
)
|
||||
forceNotNull (E.Value Nothing) = Nothing
|
||||
forceNotNull (E.Value (Just (Version v)))
|
||||
| null v = Nothing
|
||||
| otherwise = Just v
|
||||
$(widgetFile "stackage-home")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
|
||||
getStackageMetadataR :: SnapSlug -> Handler TypedContent
|
||||
getStackageMetadataR slug = do
|
||||
Entity sid _ <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
respondSourceDB typePlain $ do
|
||||
sendChunkBS "Override packages\n"
|
||||
sendChunkBS "=================\n"
|
||||
stream sid True
|
||||
sendChunkBS "\nPackages from Hackage\n"
|
||||
sendChunkBS "=====================\n"
|
||||
stream sid False
|
||||
where
|
||||
stream sid isOverwrite =
|
||||
selectSource
|
||||
[ PackageStackage ==. sid
|
||||
, PackageOverwrite ==. isOverwrite
|
||||
]
|
||||
[ Asc PackageName'
|
||||
, Asc PackageVersion
|
||||
] $= mapC (Chunk . toBuilder . showPackage)
|
||||
|
||||
showPackage (Entity _ p) = concat
|
||||
[ toPathPiece $ packageName' p
|
||||
, "-"
|
||||
, toPathPiece $ packageVersion p
|
||||
, "\n"
|
||||
]
|
||||
|
||||
getStackageCabalConfigR :: SnapSlug -> Handler TypedContent
|
||||
getStackageCabalConfigR slug = do
|
||||
Entity sid _ <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
render <- getUrlRender
|
||||
|
||||
mdownload <- lookupGetParam "download"
|
||||
when (mdownload == Just "true") $
|
||||
addHeader "Content-Disposition" "attachment; filename=cabal.config"
|
||||
|
||||
mglobal <- lookupGetParam "global"
|
||||
let isGlobal = mglobal == Just "true"
|
||||
|
||||
respondSourceDB typePlain $ stream isGlobal render sid
|
||||
where
|
||||
stream isGlobal render sid =
|
||||
selectSource
|
||||
[ PackageStackage ==. sid
|
||||
]
|
||||
[ Asc PackageName'
|
||||
, Asc PackageVersion
|
||||
] $= (if isGlobal then conduitGlobal else conduitLocal) render
|
||||
|
||||
conduitGlobal render = do
|
||||
headerGlobal render
|
||||
mapC (Chunk . showPackageGlobal)
|
||||
|
||||
conduitLocal render = do
|
||||
headerLocal render
|
||||
goFirst
|
||||
mapC (Chunk . showPackageLocal)
|
||||
yield $ Chunk $ toBuilder '\n'
|
||||
|
||||
headerGlobal render = yield $ Chunk $
|
||||
toBuilder (asText "-- Stackage snapshot from: ") ++
|
||||
toBuilder (render $ SnapshotR slug StackageHomeR) ++
|
||||
toBuilder (asText "\n-- Please place these contents in your global cabal config file.\n-- To only use tested packages, uncomment the following line\n-- and comment out other remote-repo lines:\n-- remote-repo: stackage-") ++
|
||||
toBuilder (toPathPiece slug) ++
|
||||
toBuilder ':' ++
|
||||
toBuilder (render $ SnapshotR slug StackageHomeR) ++
|
||||
toBuilder '\n'
|
||||
|
||||
headerLocal render = yield $ Chunk $
|
||||
toBuilder (asText "-- Stackage snapshot from: ") ++
|
||||
toBuilder (render $ SnapshotR slug StackageHomeR) ++
|
||||
toBuilder (asText "\n-- Please place this file next to your .cabal file as cabal.config\n-- To only use tested packages, uncomment the following line:\n-- remote-repo: stackage-") ++
|
||||
toBuilder (toPathPiece slug) ++
|
||||
toBuilder ':' ++
|
||||
toBuilder (render $ SnapshotR slug StackageHomeR) ++
|
||||
toBuilder '\n'
|
||||
|
||||
constraint p
|
||||
| Just True <- packageCore p = toBuilder $ asText " installed"
|
||||
| otherwise = toBuilder (asText " ==") ++
|
||||
toBuilder (toPathPiece $ packageVersion p)
|
||||
|
||||
showPackageGlobal (Entity _ p) =
|
||||
toBuilder (asText "constraint: ") ++
|
||||
toBuilder (toPathPiece $ packageName' p) ++
|
||||
constraint p ++
|
||||
toBuilder '\n'
|
||||
|
||||
goFirst = do
|
||||
mx <- await
|
||||
forM_ mx $ \(Entity _ p) -> yield $ Chunk $
|
||||
toBuilder (asText "constraints: ") ++
|
||||
toBuilder (toPathPiece $ packageName' p) ++
|
||||
constraint p
|
||||
|
||||
showPackageLocal (Entity _ p) =
|
||||
toBuilder (asText ",\n ") ++
|
||||
toBuilder (toPathPiece $ packageName' p) ++
|
||||
constraint p
|
||||
|
||||
yearMonthDay :: FormatTime t => t -> String
|
||||
yearMonthDay = formatTime defaultTimeLocale "%Y-%m-%d"
|
||||
|
||||
getOldStackageR :: PackageSetIdent -> [Text] -> Handler ()
|
||||
getOldStackageR ident pieces = do
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueStackage ident
|
||||
case parseRoute ("snapshot" : toPathPiece (stackageSlug stackage) : pieces, []) of
|
||||
Nothing -> notFound
|
||||
Just route -> redirect (route :: Route App)
|
||||
|
||||
getSnapshotPackagesR :: SnapSlug -> Handler Html
|
||||
getSnapshotPackagesR slug = do
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ "Package list for " ++ toPathPiece slug
|
||||
cachedWidget (20 * 60) ("package-list-" ++ toPathPiece slug) $ do
|
||||
packages' <- handlerToWidget $ runDB $ E.select $ E.from $ \(u,m,p) -> do
|
||||
E.where_ $
|
||||
(m E.^. MetadataName E.==. u E.^. UploadedName) E.&&.
|
||||
(m E.^. MetadataName E.==. p E.^. PackageName') E.&&.
|
||||
(p E.^. PackageStackage E.==. E.val sid)
|
||||
E.orderBy [E.asc $ u E.^. UploadedName]
|
||||
E.groupBy ( u E.^. UploadedName
|
||||
, m E.^. MetadataSynopsis
|
||||
)
|
||||
return
|
||||
( u E.^. UploadedName
|
||||
, m E.^. MetadataSynopsis
|
||||
, E.max_ $ E.case_
|
||||
[ ( p E.^. PackageHasHaddocks
|
||||
, p E.^. PackageVersion
|
||||
)
|
||||
]
|
||||
(E.val (Version ""))
|
||||
)
|
||||
let packages = flip map packages' $ \(name, syn, forceNotNull -> mversion) ->
|
||||
( E.unValue name
|
||||
, mversion
|
||||
, strip $ E.unValue syn
|
||||
, (<$> mversion) $ \version -> HaddockR slug $ return $ concat
|
||||
[ toPathPiece $ E.unValue name
|
||||
, "-"
|
||||
, version
|
||||
]
|
||||
)
|
||||
forceNotNull (E.Value Nothing) = Nothing
|
||||
forceNotNull (E.Value (Just (Version v)))
|
||||
| null v = Nothing
|
||||
| otherwise = Just v
|
||||
$(widgetFile "package-list")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
mback = Just (SnapshotR slug StackageHomeR, "Return to snapshot")
|
||||
|
||||
getDocsR :: SnapSlug -> Handler Html
|
||||
getDocsR slug = do
|
||||
Entity sid _stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ "Module list for " ++ toPathPiece slug
|
||||
cachedWidget (20 * 60) ("module-list-" ++ toPathPiece slug) $ do
|
||||
modules' <- handlerToWidget $ runDB $ E.select $ E.from $ \(d,m) -> do
|
||||
E.where_ $
|
||||
(d E.^. DocsSnapshot E.==. E.val (Just sid)) E.&&.
|
||||
(d E.^. DocsId E.==. m E.^. ModuleDocs)
|
||||
E.orderBy [ E.asc $ m E.^. ModuleName
|
||||
, E.asc $ d E.^. DocsName
|
||||
]
|
||||
return
|
||||
( m E.^. ModuleName
|
||||
, m E.^. ModuleUrl
|
||||
, d E.^. DocsName
|
||||
, d E.^. DocsVersion
|
||||
)
|
||||
let modules = flip map modules' $ \(name, url, package, version) ->
|
||||
( E.unValue name
|
||||
, E.unValue url
|
||||
, E.unValue package
|
||||
, E.unValue version
|
||||
)
|
||||
$(widgetFile "doc-list")
|
||||
@ -1,34 +0,0 @@
|
||||
module Handler.StackageIndex where
|
||||
|
||||
import Import
|
||||
import Data.BlobStore
|
||||
import Data.Slug (SnapSlug)
|
||||
|
||||
getStackageIndexR :: SnapSlug -> Handler TypedContent
|
||||
getStackageIndexR slug = do
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent stackage
|
||||
msrc <- storeRead $ CabalIndex ident
|
||||
case msrc of
|
||||
Nothing -> notFound
|
||||
Just src -> do
|
||||
setEtag $ toPathPiece ident
|
||||
addHeader "content-disposition" "attachment; filename=\"00-index.tar.gz\""
|
||||
neverExpires
|
||||
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
||||
|
||||
getStackageBundleR :: SnapSlug -> Handler TypedContent
|
||||
getStackageBundleR slug = do
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent stackage
|
||||
slug' = stackageSlug stackage
|
||||
msrc <- storeRead $ SnapshotBundle ident
|
||||
case msrc of
|
||||
Nothing -> notFound
|
||||
Just src -> do
|
||||
addHeader "content-disposition" $ mconcat
|
||||
[ "attachment; filename=\"bundle-"
|
||||
, toPathPiece slug'
|
||||
, ".tar.gz\""
|
||||
]
|
||||
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
||||
@ -1,65 +0,0 @@
|
||||
module Handler.StackageSdist where
|
||||
|
||||
import Import
|
||||
import Data.BlobStore
|
||||
import Data.Hackage
|
||||
import Data.Slug (SnapSlug)
|
||||
import Handler.Package (packagePage)
|
||||
|
||||
getStackageSdistR :: SnapSlug -> PackageNameVersion -> Handler TypedContent
|
||||
getStackageSdistR slug (PNVTarball name version) = do
|
||||
Entity _ stackage <- runDB $ getBy404 $ UniqueSnapshot slug
|
||||
let ident = stackageIdent stackage
|
||||
addDownload (Just ident) name version
|
||||
msrc1 <- storeRead (CustomSdist ident name version)
|
||||
msrc <-
|
||||
case msrc1 of
|
||||
Just src -> return $ Just src
|
||||
Nothing -> sourceHackageSdist name version
|
||||
case msrc of
|
||||
Nothing -> notFound
|
||||
Just src -> do
|
||||
addHeader "content-disposition" $ concat
|
||||
[ "attachment; filename=\""
|
||||
, toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, ".tar.gz"
|
||||
]
|
||||
respondSource "application/x-gzip" $ mapOutput (Chunk . toBuilder) src
|
||||
getStackageSdistR slug (PNVName name) = runDB $ do
|
||||
Entity sid _ <- getBy404 $ UniqueSnapshot slug
|
||||
mp <- selectFirst
|
||||
[PackageStackage ==. sid, PackageName' ==. name]
|
||||
[Desc PackageVersion]
|
||||
case mp of
|
||||
Nothing -> notFound
|
||||
Just (Entity _ Package {..}) ->
|
||||
redirect $ SnapshotR slug
|
||||
$ StackageSdistR
|
||||
$ PNVNameVersion name packageVersion
|
||||
getStackageSdistR slug (PNVNameVersion name version) = packagePage
|
||||
name (Just version)
|
||||
(do
|
||||
Entity sid _ <- getBy404 $ UniqueSnapshot slug
|
||||
let loop [] = return Nothing
|
||||
loop (x:xs) = do
|
||||
mdocs <- selectFirst x []
|
||||
case mdocs of
|
||||
Nothing -> loop xs
|
||||
Just _ -> return mdocs
|
||||
loop
|
||||
[ [DocsName ==. name, DocsVersion ==. version, DocsSnapshot ==. Just sid]
|
||||
, [DocsName ==. name, DocsVersion ==. version]
|
||||
, [DocsName ==. name]
|
||||
]
|
||||
) >>= sendResponse
|
||||
|
||||
addDownload :: Maybe PackageSetIdent
|
||||
-> PackageName
|
||||
-> Version
|
||||
-> Handler ()
|
||||
addDownload downloadIdent downloadPackage downloadVersion = do
|
||||
downloadUserAgent <- fmap decodeUtf8 <$> lookupHeader "user-agent"
|
||||
downloadTimestamp <- liftIO getCurrentTime
|
||||
runDB $ insert_ Download {..}
|
||||
@ -1,35 +0,0 @@
|
||||
module Handler.Tag where
|
||||
|
||||
import qualified Database.Esqueleto as E
|
||||
import Data.Slug (Slug, unSlug)
|
||||
import Import
|
||||
|
||||
|
||||
getTagListR :: Handler Html
|
||||
getTagListR = do
|
||||
tags <- fmap (zip [0::Int ..] . (map (\(E.Value v,E.Value i) -> (v,i::Int)))) $ runDB $
|
||||
E.select $ E.from $ \(tag `E.LeftOuterJoin` bt) -> do
|
||||
E.groupBy (tag E.^. TagTag)
|
||||
E.orderBy [E.desc (E.count (tag E.^. TagTag) :: E.SqlExpr (E.Value Int))]
|
||||
E.on $ tag E.^. TagTag E.==. bt E.^. BannedTagTag
|
||||
E.where_ $ E.isNothing $ E.just $ bt E.^. BannedTagTag
|
||||
return (tag E.^. TagTag, E.count (tag E.^. TagTag))
|
||||
defaultLayout $ do
|
||||
setTitle "Stackage tags"
|
||||
$(widgetFile "tag-list")
|
||||
|
||||
getTagR :: Slug -> Handler Html
|
||||
getTagR tagSlug = do
|
||||
-- FIXME arguably: check if this tag is banned. Leaving it as displayed for
|
||||
-- now, since someone needs to go out of their way to find it.
|
||||
packages <- fmap (map (\(E.Value t,E.Value s) -> (t,strip s))) $ runDB $
|
||||
E.selectDistinct $ E.from $ \(tag,meta) -> do
|
||||
E.where_ (tag E.^. TagTag E.==. E.val tagSlug E.&&.
|
||||
meta E.^. MetadataName E.==. tag E.^. TagPackage)
|
||||
E.orderBy [E.asc (tag E.^. TagPackage)]
|
||||
return (tag E.^. TagPackage,meta E.^. MetadataSynopsis)
|
||||
let tag = unSlug tagSlug
|
||||
defaultLayout $ do
|
||||
setTitle $ "Stackage tag"
|
||||
$(widgetFile "tag")
|
||||
where strip x = fromMaybe x (stripSuffix "." x)
|
||||
@ -1,350 +0,0 @@
|
||||
module Handler.UploadStackage where
|
||||
|
||||
import Import hiding (catch, get, update)
|
||||
import qualified Import
|
||||
import System.IO.Temp (withSystemTempFile, withSystemTempDirectory, openBinaryTempFile)
|
||||
import Crypto.Hash.Conduit (sinkHash)
|
||||
import Crypto.Hash (Digest, SHA1)
|
||||
import Data.Byteable (toBytes)
|
||||
import qualified Data.ByteString.Base16 as B16
|
||||
import Data.Conduit.Zlib (gzip, ungzip)
|
||||
import qualified Codec.Archive.Tar as Tar
|
||||
import qualified Data.Text as T
|
||||
import Filesystem.Path (splitExtension)
|
||||
import Data.BlobStore
|
||||
import Filesystem (createTree)
|
||||
import Control.Monad.State.Strict (execStateT, get, put, modify)
|
||||
import qualified Codec.Compression.GZip as GZip
|
||||
import Control.Monad.Trans.Resource (allocate)
|
||||
import System.Directory (removeFile, getTemporaryDirectory)
|
||||
import System.Process (runProcess, waitForProcess)
|
||||
import System.Exit (ExitCode (ExitSuccess))
|
||||
import Data.Slug (mkSlug, SnapSlug (..), safeMakeSlug, unSlug)
|
||||
import Control.Debounce
|
||||
|
||||
fileKey :: Text
|
||||
fileKey = "stackage"
|
||||
|
||||
slugKey :: Text
|
||||
slugKey = "slug"
|
||||
|
||||
getUploadStackageR :: Handler Html
|
||||
getUploadStackageR = do
|
||||
_ <- requireAuth
|
||||
defaultLayout $ do
|
||||
setTitle "Upload"
|
||||
$(widgetFile "upload-stackage")
|
||||
|
||||
putUploadStackageR :: Handler TypedContent
|
||||
putUploadStackageR = do
|
||||
uid <- requireAuthIdOrToken
|
||||
|
||||
-- Only admin users can use slugs starting with "lts" and "nightly",
|
||||
-- enforce that here
|
||||
muser <- runDB $ Import.get uid
|
||||
extra <- getExtra
|
||||
let isAdmin =
|
||||
case muser of
|
||||
Nothing -> False
|
||||
Just user -> unSlug (userHandle user) `member` adminUsers extra
|
||||
allowedSlug Nothing = Nothing
|
||||
allowedSlug (Just t)
|
||||
| isAdmin = Just t
|
||||
| "lts" `isPrefixOf` t = Nothing
|
||||
| "nightly" `isPrefixOf` t = Nothing
|
||||
| otherwise = Just t
|
||||
|
||||
mfile <- lookupFile fileKey
|
||||
mslug0 <- allowedSlug <$> lookupPostParam slugKey
|
||||
case mfile of
|
||||
Nothing -> invalidArgs ["Upload missing"]
|
||||
Just file -> do
|
||||
malias <- lookupPostParam "alias"
|
||||
mlts <- lookupPostParam "lts"
|
||||
mnightly <- lookupPostParam "nightly"
|
||||
|
||||
tempDir <- liftIO getTemporaryDirectory
|
||||
(_releaseKey, (fp, handleOut)) <- allocate
|
||||
(openBinaryTempFile tempDir "upload-stackage.")
|
||||
(\(fp, h) -> hClose h `finally` removeFile fp)
|
||||
digest <- fileSource file
|
||||
$$ getZipSink (ZipSink sinkHash <* ZipSink (ungzip =$ sinkHandle handleOut))
|
||||
liftIO $ hClose handleOut
|
||||
|
||||
let bs = toBytes (digest :: Digest SHA1)
|
||||
ident = PackageSetIdent $ decodeUtf8 $ B16.encode bs
|
||||
|
||||
-- Check for duplicates
|
||||
mstackage <- runDB $ getBy $ UniqueStackage ident
|
||||
when (isJust mstackage) $ invalidArgs ["Stackage already exists"]
|
||||
|
||||
app <- getYesod
|
||||
let initProgress = UploadProgress "Upload starting" Nothing
|
||||
key <- runDB $ insert initProgress
|
||||
|
||||
-- We don't want to be writing progress updates to the database too
|
||||
-- frequently, so let's just do it once per second at most.
|
||||
-- Debounce to the rescue!
|
||||
statusRef <- newIORef initProgress
|
||||
writeToDB <- liftIO $ mkDebounce defaultDebounceSettings
|
||||
{ debounceAction = do
|
||||
up <- readIORef statusRef
|
||||
runPool (persistConfig app) (replace key up) (connPool app)
|
||||
}
|
||||
|
||||
let updateHelper :: MonadBase IO m => UploadProgress -> m ()
|
||||
updateHelper p = do
|
||||
writeIORef statusRef p
|
||||
liftBase writeToDB
|
||||
update :: MonadBase IO m => Text -> m ()
|
||||
update msg = updateHelper (UploadProgress msg Nothing)
|
||||
done msg route = do
|
||||
render <- getUrlRender
|
||||
updateHelper (UploadProgress msg $ Just $ render route)
|
||||
onExc e = done ("Exception occurred: " ++ tshow e) ProfileR
|
||||
setAlias = do
|
||||
forM_ (malias >>= mkSlug) $ \alias -> do
|
||||
deleteWhere [AliasUser ==. uid, AliasName ==. alias]
|
||||
insert_ Alias
|
||||
{ aliasUser = uid
|
||||
, aliasName = alias
|
||||
, aliasTarget = ident
|
||||
}
|
||||
whenAdmin = when isAdmin
|
||||
setLts sid = forM_ mlts
|
||||
$ \lts -> whenAdmin
|
||||
$ forM_ (parseLtsPair lts) $ \(major, minor) -> do
|
||||
mx <- getBy $ UniqueLts major minor
|
||||
when (isNothing mx) $ insert_ $ Lts major minor sid
|
||||
setNightly sid = forM_ mnightly $ \nightly -> whenAdmin $ do
|
||||
now <- liftIO getCurrentTime
|
||||
let day = utctDay now
|
||||
mx <- getBy $ UniqueNightly day
|
||||
when (isNothing mx) $ insert_ Nightly
|
||||
{ nightlyDay = day
|
||||
, nightlyGhcVersion = nightly
|
||||
, nightlyStackage = sid
|
||||
}
|
||||
|
||||
update "Starting"
|
||||
|
||||
forkHandler onExc $ do
|
||||
now <- liftIO getCurrentTime
|
||||
baseSlug <- fmap SnapSlug $ mkSlug $ fromMaybe (tshow $ utctDay now) mslug0
|
||||
let initial = Stackage
|
||||
{ stackageUser = uid
|
||||
, stackageIdent = ident
|
||||
, stackageUploaded = now
|
||||
, stackageTitle = "Untitled Stackage"
|
||||
, stackageDesc = "No description provided"
|
||||
, stackageHasHaddocks = False
|
||||
, stackageSlug = baseSlug
|
||||
}
|
||||
|
||||
-- Evil lazy I/O thanks to tar package
|
||||
lbs <- readFile $ fpFromString fp
|
||||
withSystemTempDirectory "build00index." $ \dir -> do
|
||||
LoopState _ stackage files _ contents cores <- execStateT (loop isAdmin update (Tar.read lbs)) LoopState
|
||||
{ lsRoot = fpFromString dir
|
||||
, lsStackage = initial
|
||||
, lsFiles = mempty
|
||||
, lsIdent = ident
|
||||
, lsContents = []
|
||||
, lsCores = mempty
|
||||
}
|
||||
withSystemTempFile "newindex" $ \fp' h -> do
|
||||
ec <- liftIO $ do
|
||||
hClose h
|
||||
let args = "cfz"
|
||||
: fp'
|
||||
: map fpToString (setToList files)
|
||||
ph <- runProcess "tar" args (Just dir) Nothing Nothing Nothing Nothing
|
||||
waitForProcess ph
|
||||
if ec == ExitSuccess
|
||||
then do
|
||||
sourceFile (fpFromString fp') $$ storeWrite (CabalIndex ident)
|
||||
sourceFile (fpFromString fp) $$ gzip =$ storeWrite (SnapshotBundle ident)
|
||||
slug <- runDB $ do
|
||||
slug <- getUniqueSlug $ stackageSlug stackage
|
||||
sid <- insert stackage { stackageSlug = slug}
|
||||
forM_ contents $ \(name, version, overwrite) -> insert_ Package
|
||||
{ packageStackage = sid
|
||||
, packageName' = name
|
||||
, packageVersion = version
|
||||
, packageOverwrite = overwrite
|
||||
, packageHasHaddocks = False
|
||||
, packageCore = Just $ name `member` cores
|
||||
}
|
||||
|
||||
setAlias
|
||||
setLts sid
|
||||
setNightly sid
|
||||
|
||||
return slug
|
||||
|
||||
done "Stackage created" $ SnapshotR slug StackageHomeR
|
||||
else done "Error creating index file" ProfileR
|
||||
|
||||
addHeader "X-Stackage-Ident" $ toPathPiece ident
|
||||
redirect $ ProgressR key
|
||||
where
|
||||
loop _ update Tar.Done = update "Finished processing files"
|
||||
loop _ _ (Tar.Fail e) = throwM e
|
||||
loop isAdmin update (Tar.Next entry entries) = do
|
||||
addEntry isAdmin update entry
|
||||
loop isAdmin update entries
|
||||
|
||||
addEntry isAdmin update entry = do
|
||||
_ <- update $ "Processing file: " ++ pack (Tar.entryPath entry)
|
||||
case Tar.entryContent entry of
|
||||
Tar.NormalFile lbs _ ->
|
||||
case filename $ fpFromString $ Tar.entryPath entry of
|
||||
"desc" -> do
|
||||
$logDebug $ "desc: " ++ tshow lbs
|
||||
let (title, drop 1 -> desc) = break (== '\n')
|
||||
$ decodeUtf8
|
||||
$ toStrict lbs
|
||||
ls <- get
|
||||
put ls
|
||||
{ lsStackage = (lsStackage ls)
|
||||
{ stackageTitle = title
|
||||
, stackageDesc = desc
|
||||
}
|
||||
}
|
||||
"slug" -> do
|
||||
let t = decodeUtf8 $ toStrict lbs
|
||||
when (isAdmin || not ("lts" `isPrefixOf` t || "nightly" `isPrefixOf` t)) $ do
|
||||
slug <- safeMakeSlug t False
|
||||
ls <- get
|
||||
put ls { lsStackage = (lsStackage ls) { stackageSlug = SnapSlug slug } }
|
||||
"hackage" -> forM_ (lines $ decodeUtf8 $ toStrict lbs) $ \line ->
|
||||
case parseName line of
|
||||
Just (name, version) -> do
|
||||
$logDebug $ "hackage: " ++ tshow (name, version)
|
||||
_ <- update $ concat
|
||||
[ "Adding Hackage package: "
|
||||
, toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
]
|
||||
msrc <- storeRead (HackageCabal name version)
|
||||
case msrc of
|
||||
Nothing | name == "base" -> return () -- workaround in case base isn't uploaded to Hackage
|
||||
Nothing -> invalidArgs ["Unknown Hackage name/version: " ++ tshow (name, version)]
|
||||
Just src -> addFile False name version src
|
||||
|
||||
Nothing -> return ()
|
||||
|
||||
"core" -> forM_ (lines $ decodeUtf8 $ toStrict lbs) $ \name ->
|
||||
modify $ \ls -> ls
|
||||
{ lsCores = insertSet (PackageName name)
|
||||
$ lsCores ls
|
||||
}
|
||||
|
||||
fp | (base1, Just "gz") <- splitExtension fp
|
||||
, (fpToText -> base, Just "tar") <- splitExtension base1 -> do
|
||||
ident <- lsIdent <$> get
|
||||
_ <- update $ concat
|
||||
[ "Extracting cabal file for custom tarball: "
|
||||
, base
|
||||
]
|
||||
(name, version, cabalLBS) <- extractCabal lbs base
|
||||
sourceLazy lbs $$ storeWrite (CustomSdist ident name version)
|
||||
addFile True name version $ sourceLazy cabalLBS
|
||||
_ -> return ()
|
||||
_ -> return ()
|
||||
where
|
||||
addFile isOverride name version src = do
|
||||
ls <- get
|
||||
when (isOverride || fp `notMember` lsFiles ls) $ do
|
||||
let fp' = lsRoot ls </> fp
|
||||
liftIO $ createTree $ directory fp'
|
||||
src $$ sinkFile fp'
|
||||
put ls
|
||||
{ lsFiles = insertSet fp $ lsFiles ls
|
||||
, lsContents
|
||||
= (name, version, isOverride)
|
||||
: lsContents ls
|
||||
}
|
||||
where
|
||||
fp = mkFP name version
|
||||
|
||||
mkFP name version
|
||||
= fpFromText (toPathPiece name)
|
||||
</> fpFromText (toPathPiece version)
|
||||
</> fpFromText (concat
|
||||
[ toPathPiece name
|
||||
, "-"
|
||||
, toPathPiece version
|
||||
, ".cabal"
|
||||
])
|
||||
|
||||
parseName t =
|
||||
case T.breakOnEnd "-" t of
|
||||
("", _) -> Nothing
|
||||
(_, "") -> Nothing
|
||||
(T.init -> name, version) -> Just (PackageName name, Version version)
|
||||
|
||||
data LoopState = LoopState
|
||||
{ lsRoot :: !FilePath
|
||||
, lsStackage :: !Stackage
|
||||
, lsFiles :: !(Set FilePath)
|
||||
, lsIdent :: !PackageSetIdent
|
||||
|
||||
, lsContents :: ![(PackageName, Version, IsOverride)] -- FIXME use SnocVector when ready
|
||||
, lsCores :: !(Set PackageName) -- ^ core packages
|
||||
}
|
||||
|
||||
type IsOverride = Bool
|
||||
|
||||
extractCabal :: (MonadLogger m, MonadThrow m)
|
||||
=> LByteString
|
||||
-> Text -- ^ basename
|
||||
-> m (PackageName, Version, LByteString)
|
||||
extractCabal lbs basename' =
|
||||
loop $ Tar.read $ GZip.decompress lbs
|
||||
where
|
||||
loop Tar.Done = error $ "extractCabal: cabal file missing for " ++ unpack basename'
|
||||
loop (Tar.Fail e) = throwM e
|
||||
loop (Tar.Next e es) = do
|
||||
$logDebug $ pack $ Tar.entryPath e
|
||||
case Tar.entryContent e of
|
||||
Tar.NormalFile lbs' _
|
||||
| Just (name, version) <- parseNameVersion (pack $ Tar.entryPath e)
|
||||
-> return (name, version, lbs')
|
||||
_ -> loop es
|
||||
|
||||
parseNameVersion t = do
|
||||
[dir, filename'] <- Just $ T.splitOn "/" t
|
||||
let (name', version) = T.breakOnEnd "-" dir
|
||||
name <- stripSuffix "-" name'
|
||||
guard $ name ++ ".cabal" == filename'
|
||||
return (PackageName name, Version version)
|
||||
|
||||
-- | Get a unique version of the given slug by appending random numbers to the
|
||||
-- end.
|
||||
getUniqueSlug :: MonadIO m => SnapSlug -> ReaderT SqlBackend m SnapSlug
|
||||
getUniqueSlug base =
|
||||
loop Nothing
|
||||
where
|
||||
loop msuffix = do
|
||||
slug <- checkSlug $ addSuffix msuffix
|
||||
ment <- getBy $ UniqueSnapshot slug
|
||||
case ment of
|
||||
Nothing -> return slug
|
||||
Just _ ->
|
||||
case msuffix of
|
||||
Nothing -> loop $ Just (1 :: Int)
|
||||
Just i
|
||||
| i > 50 -> error "No unique slug found"
|
||||
| otherwise -> loop $ Just $ i + 1
|
||||
|
||||
txt = toPathPiece base
|
||||
|
||||
addSuffix Nothing = txt
|
||||
addSuffix (Just i) = txt ++ pack ('-' : show i)
|
||||
|
||||
checkSlug slug =
|
||||
case fromPathPiece slug of
|
||||
Nothing -> error $ "Invalid snapshot slug: " ++ unpack slug
|
||||
Just s -> return s
|
||||
56
Import.hs
56
Import.hs
@ -1,56 +0,0 @@
|
||||
module Import
|
||||
( module Import
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod as Import
|
||||
import Foundation as Import
|
||||
import Model as Import
|
||||
import Settings as Import
|
||||
import Settings.Development as Import
|
||||
import Settings.StaticFiles as Import
|
||||
import Types as Import
|
||||
import Yesod.Auth as Import
|
||||
import Data.Slug (mkSlug)
|
||||
import Data.WebsiteContent as Import (WebsiteContent (..))
|
||||
import Data.Text.Read (decimal)
|
||||
|
||||
requireAuthIdOrToken :: Handler UserId
|
||||
requireAuthIdOrToken = do
|
||||
mtoken <- lookupHeader "authorization"
|
||||
case decodeUtf8 <$> mtoken of
|
||||
Nothing -> requireAuthId
|
||||
Just token -> do
|
||||
case mkSlug token of
|
||||
Nothing -> invalidArgs ["Invalid token: " ++ token]
|
||||
Just token' -> do
|
||||
muser <- runDB $ getBy $ UniqueToken token'
|
||||
case muser of
|
||||
Nothing -> invalidArgs ["Unknown token: " ++ token]
|
||||
Just (Entity uid _) -> return uid
|
||||
|
||||
parseLtsPair :: Text -> Maybe (Int, Int)
|
||||
parseLtsPair t1 = do
|
||||
(x, t2) <- either (const Nothing) Just $ decimal t1
|
||||
t3 <- stripPrefix "." t2
|
||||
(y, "") <- either (const Nothing) Just $ decimal t3
|
||||
Just (x, y)
|
||||
|
||||
requireDocs :: Entity Stackage -> Handler ()
|
||||
requireDocs stackageEnt = do
|
||||
master <- getYesod
|
||||
status <- liftIO $ duRequestDocs (appDocUnpacker master) stackageEnt
|
||||
case status of
|
||||
USReady -> return ()
|
||||
USBusy -> (>>= sendResponse) $ defaultLayout $ do
|
||||
setTitle "Docs unpacking, please wait"
|
||||
addHeader "Refresh" "1"
|
||||
msg <- liftIO $ duGetStatus $ appDocUnpacker master
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<p>Docs are currently being unpacked, please wait.
|
||||
<p>This page will automatically reload every second.
|
||||
<p>Current status: #{msg}
|
||||
|]
|
||||
USFailed e -> invalidArgs
|
||||
[ "Docs not available: " ++ e
|
||||
]
|
||||
5
LICENSE
5
LICENSE
@ -1,6 +1,7 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 FP Complete
|
||||
Copyright (c) 2014-2017 FP Complete
|
||||
Copyright (c) 2024 Haskell Foundation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@ -18,4 +19,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
SOFTWARE.
|
||||
|
||||
13
Model.hs
13
Model.hs
@ -1,13 +0,0 @@
|
||||
module Model where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Database.Persist.Quasi
|
||||
import Data.Slug (Slug, SnapSlug)
|
||||
import Types
|
||||
|
||||
-- You can define all of your database entities in the entities file.
|
||||
-- You can find more information on persistent and how to declare entities
|
||||
-- at:
|
||||
-- http://www.yesodweb.com/book/persistent/
|
||||
share [mkPersist sqlSettings, mkMigrate "migrateAll"]
|
||||
$(persistFileWith lowerCaseSettings "config/models")
|
||||
41
README.md
41
README.md
@ -1,13 +1,38 @@
|
||||
stackage-server
|
||||
===============
|
||||
# stackage-server
|
||||
|
||||
Server for stable, curated Haskell package sets
|
||||
|
||||
Code builds with the Stackage snapshot:
|
||||
This repo is part of the [Stackage project](https://github.com/commercialhaskell/stackage),
|
||||
and the live server can be viewed at https://www.stackage.org.
|
||||
|
||||
remote-repo: stackage-35ecbe20461b5fe50bad1e5653f6660132861fe9:http://www.stackage.org/stackage/35ecbe20461b5fe50bad1e5653f6660132861fe9
|
||||
## Building locally
|
||||
|
||||
Inside the config directory, there are two files ending in `-sample`. They
|
||||
should be copied to remove the `-sample` suffix for the site to work. We do it
|
||||
this way to avoid accidentally committing real database credentials into the
|
||||
Git repository.
|
||||
Build locally by passing the `dev` flag to it:
|
||||
|
||||
``` shellsession
|
||||
$ stack build . --flag stackage-server:dev
|
||||
```
|
||||
|
||||
## Simple testing with sqlite:
|
||||
To test the UI without real data, just run:
|
||||
```
|
||||
$ yesod devel
|
||||
```
|
||||
(install the yesod executable from yesod-bin).
|
||||
|
||||
## Testing with postgresql
|
||||
Now, initially you need to run the cron job to create and populate the database:
|
||||
|
||||
``` shellsession
|
||||
$ export PGSTRING=postgresql://postgres:password@localhost:5432/stackage
|
||||
$ stack exec stackage-server-cron
|
||||
```
|
||||
|
||||
Note that you need to modify the PGSTRING environment variable according to your actual database configuration. Also, you need to create an empty database before running the cron job. Note that it takes quite some time for it to load your database.
|
||||
|
||||
After this, run the stackage server:
|
||||
|
||||
``` shellsession
|
||||
$ export PGSTRING=postgresql://postgres:password@localhost:5432/stackage
|
||||
$ stack exec stackage-server
|
||||
```
|
||||
|
||||
109
Settings.hs
109
Settings.hs
@ -1,109 +0,0 @@
|
||||
-- | Settings are centralized, as much as possible, into this file. This
|
||||
-- includes database connection settings, static file locations, etc.
|
||||
-- In addition, you can configure a number of different aspects of Yesod
|
||||
-- by overriding methods in the Yesod typeclass. That instance is
|
||||
-- declared in the Foundation.hs file.
|
||||
module Settings where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Text.Shakespeare.Text (st)
|
||||
import Language.Haskell.TH.Syntax
|
||||
import Database.Persist.Postgresql (PostgresConf)
|
||||
import Yesod.Default.Config
|
||||
import Yesod.Default.Util
|
||||
import Data.Yaml
|
||||
import Settings.Development
|
||||
import Text.Hamlet
|
||||
import Data.Aeson (withText, withObject)
|
||||
import Types
|
||||
|
||||
-- | Which Persistent backend this site is using.
|
||||
type PersistConf = PostgresConf
|
||||
|
||||
-- Static setting below. Changing these requires a recompile
|
||||
|
||||
-- | The location of static files on your system. This is a file system
|
||||
-- path. The default value works properly with your scaffolded site.
|
||||
staticDir :: String
|
||||
staticDir = "static"
|
||||
|
||||
-- | The base URL for your static files. As you can see by the default
|
||||
-- value, this can simply be "static" appended to your application root.
|
||||
-- A powerful optimization can be serving static files from a separate
|
||||
-- domain name. This allows you to use a web server optimized for static
|
||||
-- files, more easily set expires and cache values, and avoid possibly
|
||||
-- costly transference of cookies on static files. For more information,
|
||||
-- please see:
|
||||
-- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain
|
||||
--
|
||||
-- If you change the resource pattern for StaticR in Foundation.hs, you will
|
||||
-- have to make a corresponding change here.
|
||||
--
|
||||
-- To see how this value is used, see urlRenderOverride in Foundation.hs
|
||||
staticRoot :: AppConfig DefaultEnv x -> Text
|
||||
staticRoot conf = [st|#{appRoot conf}/static|]
|
||||
|
||||
-- | Settings for 'widgetFile', such as which template languages to support and
|
||||
-- default Hamlet settings.
|
||||
--
|
||||
-- For more information on modifying behavior, see:
|
||||
--
|
||||
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
|
||||
widgetFileSettings :: WidgetFileSettings
|
||||
widgetFileSettings = def
|
||||
{ wfsHamletSettings = defaultHamletSettings
|
||||
{ hamletNewlines = AlwaysNewlines
|
||||
}
|
||||
}
|
||||
|
||||
-- The rest of this file contains settings which rarely need changing by a
|
||||
-- user.
|
||||
|
||||
widgetFile :: String -> Q Exp
|
||||
widgetFile = (if development then widgetFileReload
|
||||
else widgetFileNoReload)
|
||||
widgetFileSettings
|
||||
|
||||
data Extra = Extra
|
||||
{ storeConfig :: !BlobStoreConfig
|
||||
, hackageRoot :: !HackageRoot
|
||||
, adminUsers :: !(HashSet Text)
|
||||
, googleAuth :: !(Maybe GoogleAuth)
|
||||
}
|
||||
deriving Show
|
||||
|
||||
parseExtra :: DefaultEnv -> Object -> Parser Extra
|
||||
parseExtra _ o = Extra
|
||||
<$> o .: "blob-store"
|
||||
<*> (HackageRoot <$> o .: "hackage-root")
|
||||
<*> o .:? "admin-users" .!= mempty
|
||||
<*> o .:? "google-auth"
|
||||
|
||||
data BlobStoreConfig = BSCFile !FilePath
|
||||
| BSCAWS !FilePath !Text !Text !Text !Text
|
||||
deriving Show
|
||||
|
||||
instance FromJSON BlobStoreConfig where
|
||||
parseJSON v = file v <|> aws v
|
||||
where
|
||||
file = withText "BlobStoreConfig" $ \t ->
|
||||
case () of
|
||||
()
|
||||
| Just root <- stripPrefix "file:" t -> return $ BSCFile $ fpFromText root
|
||||
| otherwise -> fail $ "Invalid BlobStoreConfig: " ++ show t
|
||||
aws = withObject "BlobStoreConfig" $ \o -> BSCAWS
|
||||
<$> (fpFromText <$> (o .: "local"))
|
||||
<*> o .: "access"
|
||||
<*> o .: "secret"
|
||||
<*> o .: "bucket"
|
||||
<*> o .:? "prefix" .!= ""
|
||||
|
||||
data GoogleAuth = GoogleAuth
|
||||
{ gaClientId :: !Text
|
||||
, gaClientSecret :: !Text
|
||||
}
|
||||
deriving Show
|
||||
instance FromJSON GoogleAuth where
|
||||
parseJSON = withObject "GoogleAuth" $ \o -> GoogleAuth
|
||||
<$> o .: "client-id"
|
||||
<*> o .: "client-secret"
|
||||
@ -1,22 +0,0 @@
|
||||
module Settings.Development where
|
||||
|
||||
import Prelude
|
||||
|
||||
development :: Bool
|
||||
development =
|
||||
#if DEVELOPMENT
|
||||
True
|
||||
#else
|
||||
False
|
||||
#endif
|
||||
|
||||
cabalFileLoader :: Bool
|
||||
cabalFileLoader =
|
||||
#if INGHCI
|
||||
False
|
||||
#else
|
||||
True
|
||||
#endif
|
||||
|
||||
production :: Bool
|
||||
production = not development
|
||||
@ -1,35 +0,0 @@
|
||||
module Settings.StaticFiles where
|
||||
|
||||
import Prelude (IO)
|
||||
import Yesod.Static
|
||||
import qualified Yesod.Static as Static
|
||||
import Settings (staticDir)
|
||||
import Settings.Development
|
||||
import Language.Haskell.TH (Q, Exp, Name)
|
||||
import Data.Default (def)
|
||||
|
||||
-- | use this to create your static file serving site
|
||||
staticSite :: IO Static.Static
|
||||
staticSite = if development then Static.staticDevel staticDir
|
||||
else Static.static staticDir
|
||||
|
||||
-- | This generates easy references to files in the static directory at compile time,
|
||||
-- giving you compile-time verification that referenced files exist.
|
||||
-- Warning: any files added to your static directory during run-time can't be
|
||||
-- accessed this way. You'll have to use their FilePath or URL to access them.
|
||||
$(staticFiles Settings.staticDir)
|
||||
|
||||
combineSettings :: CombineSettings
|
||||
combineSettings = def
|
||||
|
||||
-- The following two functions can be used to combine multiple CSS or JS files
|
||||
-- at compile time to decrease the number of http requests.
|
||||
-- Sample usage (inside a Widget):
|
||||
--
|
||||
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
|
||||
|
||||
combineStylesheets :: Name -> [Route Static] -> Q Exp
|
||||
combineStylesheets = combineStylesheets' development combineSettings
|
||||
|
||||
combineScripts :: Name -> [Route Static] -> Q Exp
|
||||
combineScripts = combineScripts' development combineSettings
|
||||
104
Types.hs
104
Types.hs
@ -1,104 +0,0 @@
|
||||
module Types where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Data.BlobStore (ToPath (..), BackupToS3 (..))
|
||||
import Text.Blaze (ToMarkup)
|
||||
import Database.Persist.Sql (PersistFieldSql (sqlType))
|
||||
import qualified Data.Text as T
|
||||
|
||||
newtype PackageName = PackageName { unPackageName :: Text }
|
||||
deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup, PersistField, IsString)
|
||||
instance PersistFieldSql PackageName where
|
||||
sqlType = sqlType . liftM unPackageName
|
||||
newtype Version = Version { unVersion :: Text }
|
||||
deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup, PersistField)
|
||||
instance PersistFieldSql Version where
|
||||
sqlType = sqlType . liftM unVersion
|
||||
newtype PackageSetIdent = PackageSetIdent { unPackageSetIdent :: Text }
|
||||
deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup, PersistField)
|
||||
instance PersistFieldSql PackageSetIdent where
|
||||
sqlType = sqlType . liftM unPackageSetIdent
|
||||
|
||||
data PackageNameVersion = PNVTarball !PackageName !Version
|
||||
| PNVNameVersion !PackageName !Version
|
||||
| PNVName !PackageName
|
||||
deriving (Show, Read, Typeable, Eq, Ord)
|
||||
|
||||
instance PathPiece PackageNameVersion where
|
||||
toPathPiece (PNVTarball x y) = concat [toPathPiece x, "-", toPathPiece y, ".tar.gz"]
|
||||
toPathPiece (PNVNameVersion x y) = concat [toPathPiece x, "-", toPathPiece y]
|
||||
toPathPiece (PNVName x) = toPathPiece x
|
||||
fromPathPiece t' | Just t <- stripSuffix ".tar.gz" t' =
|
||||
case T.breakOnEnd "-" t of
|
||||
("", _) -> Nothing
|
||||
(_, "") -> Nothing
|
||||
(T.init -> name, version) -> Just $ PNVTarball (PackageName name) (Version version)
|
||||
fromPathPiece t = Just $
|
||||
case T.breakOnEnd "-" t of
|
||||
("", _) -> PNVName (PackageName t)
|
||||
(T.init -> name, version) | validVersion version ->
|
||||
PNVNameVersion (PackageName name) (Version version)
|
||||
_ -> PNVName (PackageName t)
|
||||
where
|
||||
validVersion =
|
||||
all f
|
||||
where
|
||||
f c = (c == '.') || ('0' <= c && c <= '9')
|
||||
|
||||
data StoreKey = HackageCabal !PackageName !Version
|
||||
| HackageSdist !PackageName !Version
|
||||
| CabalIndex !PackageSetIdent
|
||||
| CustomSdist !PackageSetIdent !PackageName !Version
|
||||
| SnapshotBundle !PackageSetIdent
|
||||
| HaddockBundle !PackageSetIdent
|
||||
| HoogleDB !PackageSetIdent !HoogleVersion
|
||||
deriving (Show, Eq, Ord, Typeable)
|
||||
|
||||
newtype HoogleVersion = HoogleVersion Text
|
||||
deriving (Show, Eq, Ord, Typeable, PathPiece)
|
||||
currentHoogleVersion :: HoogleVersion
|
||||
currentHoogleVersion = HoogleVersion VERSION_hoogle
|
||||
|
||||
instance ToPath StoreKey where
|
||||
toPath (HackageCabal name version) = ["hackage", toPathPiece name, toPathPiece version ++ ".cabal"]
|
||||
toPath (HackageSdist name version) = ["hackage", toPathPiece name, toPathPiece version ++ ".tar.gz"]
|
||||
toPath (CabalIndex ident) = ["cabal-index", toPathPiece ident ++ ".tar.gz"]
|
||||
toPath (CustomSdist ident name version) =
|
||||
[ "custom-tarball"
|
||||
, toPathPiece ident
|
||||
, toPathPiece name
|
||||
, toPathPiece version ++ ".tar.gz"
|
||||
]
|
||||
toPath (SnapshotBundle ident) =
|
||||
[ "bundle"
|
||||
, toPathPiece ident ++ ".tar.gz"
|
||||
]
|
||||
toPath (HaddockBundle ident) =
|
||||
[ "haddock"
|
||||
, toPathPiece ident ++ ".tar.xz"
|
||||
]
|
||||
toPath (HoogleDB ident ver) =
|
||||
[ "hoogle"
|
||||
, toPathPiece ver
|
||||
, toPathPiece ident ++ ".hoo.gz"
|
||||
]
|
||||
instance BackupToS3 StoreKey where
|
||||
shouldBackup HackageCabal{} = False
|
||||
shouldBackup HackageSdist{} = False
|
||||
shouldBackup CabalIndex{} = True
|
||||
shouldBackup CustomSdist{} = True
|
||||
shouldBackup SnapshotBundle{} = True
|
||||
shouldBackup HaddockBundle{} = True
|
||||
shouldBackup HoogleDB{} = True
|
||||
|
||||
newtype HackageRoot = HackageRoot { unHackageRoot :: Text }
|
||||
deriving (Show, Read, Typeable, Eq, Ord, Hashable, PathPiece, ToMarkup)
|
||||
|
||||
class HasHackageRoot a where
|
||||
getHackageRoot :: a -> HackageRoot
|
||||
instance HasHackageRoot HackageRoot where
|
||||
getHackageRoot = id
|
||||
|
||||
data UnpackStatus = USReady
|
||||
| USBusy
|
||||
| USFailed !Text
|
||||
56
app/DevelMain.hs
Normal file
56
app/DevelMain.hs
Normal file
@ -0,0 +1,56 @@
|
||||
{-# LANGUAGE ImplicitPrelude #-}
|
||||
|
||||
-- | Devel web server.
|
||||
--
|
||||
-- > :l DevelMain
|
||||
-- > DevelMain.update
|
||||
--
|
||||
-- To start/restart the server.
|
||||
|
||||
module DevelMain where
|
||||
|
||||
import Application (App, withFoundationDev, makeApplication)
|
||||
|
||||
import Control.Concurrent
|
||||
import Foreign.Store
|
||||
import Network.Wai.Handler.Warp
|
||||
import Yesod
|
||||
import Data.IORef
|
||||
|
||||
|
||||
data Command = Run (IO ())
|
||||
| Stop
|
||||
|
||||
newtype Devel = Devel (Store (IORef (App -> IO Application)))
|
||||
|
||||
-- | Start the web server.
|
||||
main :: IO Devel
|
||||
main = do
|
||||
c <- newChan
|
||||
ref <- newIORef makeApplication
|
||||
tid <-
|
||||
forkIO $
|
||||
withFoundationDev $ \settings foundation ->
|
||||
runSettings
|
||||
settings
|
||||
(\req cont -> do
|
||||
mkApp <- readIORef ref
|
||||
application <- mkApp foundation
|
||||
application req cont)
|
||||
_ <- newStore tid
|
||||
ref' <- newStore ref
|
||||
_ <- newStore c
|
||||
return $ Devel ref'
|
||||
|
||||
-- | Update the server, start it if not running.
|
||||
update :: IO Devel
|
||||
update =
|
||||
do m <- lookupStore 1
|
||||
case m of
|
||||
Nothing -> main
|
||||
Just store ->
|
||||
do ref <- readStore store
|
||||
c <- readStore (Store 2)
|
||||
writeChan c ()
|
||||
writeIORef ref makeApplication
|
||||
return $ Devel store
|
||||
@ -1,4 +0,0 @@
|
||||
import Application
|
||||
|
||||
main :: IO ()
|
||||
main = cabalLoaderMain
|
||||
6
app/devel.hs
Normal file
6
app/devel.hs
Normal file
@ -0,0 +1,6 @@
|
||||
{-# LANGUAGE PackageImports #-}
|
||||
import "stackage-server" Application (develMain)
|
||||
import Prelude (IO)
|
||||
|
||||
main :: IO ()
|
||||
main = develMain
|
||||
10
app/main.hs
10
app/main.hs
@ -1,9 +1,5 @@
|
||||
import Application (makeApplication)
|
||||
import Prelude (IO)
|
||||
import Prelude (Bool(..))
|
||||
import Settings (parseExtra)
|
||||
import Yesod.Default.Config (fromArgs)
|
||||
import Yesod.Default.Main (defaultMainLog)
|
||||
import Prelude (IO)
|
||||
import Application (appMain)
|
||||
|
||||
main :: IO ()
|
||||
main = defaultMainLog (fromArgs parseExtra) (makeApplication False)
|
||||
main = appMain
|
||||
|
||||
98
app/stackage-server-cron.hs
Normal file
98
app/stackage-server-cron.hs
Normal file
@ -0,0 +1,98 @@
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
import Options.Applicative
|
||||
import RIO
|
||||
import RIO.List as L
|
||||
import RIO.Text as T
|
||||
import Stackage.Database.Cron
|
||||
import Stackage.Database.Github
|
||||
|
||||
readText :: ReadM T.Text
|
||||
readText = T.pack <$> str
|
||||
|
||||
readLogLevel :: ReadM LogLevel
|
||||
readLogLevel =
|
||||
maybeReader $ \case
|
||||
"debug" -> Just LevelDebug
|
||||
"info" -> Just LevelInfo
|
||||
"warn" -> Just LevelWarn
|
||||
"error" -> Just LevelError
|
||||
_ -> Nothing
|
||||
|
||||
readGithubRepo :: ReadM GithubRepo
|
||||
readGithubRepo =
|
||||
maybeReader $ \str' ->
|
||||
case L.span (/= '/') str' of
|
||||
(grAccount, '/':grName)
|
||||
| not (L.null grName) -> Just GithubRepo {..}
|
||||
_ -> Nothing
|
||||
|
||||
optsParser :: Parser StackageCronOptions
|
||||
optsParser =
|
||||
StackageCronOptions <$>
|
||||
switch
|
||||
(long "force-update" <> short 'f' <>
|
||||
help
|
||||
"Initiate a force update, where all snapshots will be updated regardless if \
|
||||
\their yaml files from stackage-snapshots repo have been updated or not.") <*>
|
||||
option
|
||||
readText
|
||||
(long "download-bucket" <> value defHaddockBucketName <> metavar "DOWNLOAD_BUCKET" <>
|
||||
help
|
||||
("S3 Bucket name where things like haddock and current hoogle files should \
|
||||
\be downloaded from. Used in S3 API read operations. Default is: " <>
|
||||
T.unpack defHaddockBucketName)) <*>
|
||||
option
|
||||
readText
|
||||
(long "download-bucket-url" <> value defHaddockBucketUrl <> metavar "DOWNLOAD_BUCKET_URL" <>
|
||||
help
|
||||
("Publicly accessible URL where the download bucket can be accessed. Used for \
|
||||
\serving the Haddocks on the website. Default is: " <>
|
||||
T.unpack defHaddockBucketUrl)) <*>
|
||||
option
|
||||
readText
|
||||
(long "upload-bucket" <> value defHaddockBucketName <> metavar "UPLOAD_BUCKET" <>
|
||||
help
|
||||
("S3 Bucket where hoogle db and snapshots.json file will be uploaded to. Default is: " <>
|
||||
T.unpack defHaddockBucketName)) <*>
|
||||
switch
|
||||
(long "do-not-upload" <>
|
||||
help "Disable upload of Hoogle database and snapshots.json") <*>
|
||||
option
|
||||
readLogLevel
|
||||
(long "log-level" <> metavar "LOG_LEVEL" <> short 'l' <> value LevelInfo <>
|
||||
help "Verbosity level (debug|info|warn|error). Default level is 'info'.") <*>
|
||||
option
|
||||
readGithubRepo
|
||||
(long "snapshots-repo" <> metavar "SNAPSHOTS_REPO" <>
|
||||
value (GithubRepo repoAccount repoName) <>
|
||||
help
|
||||
("Github repository with snapshot files. Default level is '" ++
|
||||
repoAccount ++ "/" ++ repoName ++ "'.")) <*>
|
||||
switch (long "report-progress" <> help "Report how many packages has been loaded.") <*>
|
||||
switch
|
||||
(long "cache-cabal-files" <>
|
||||
help
|
||||
("Improve performance by caching parsed cabal files" ++
|
||||
" at expense of higher memory consumption"))
|
||||
where
|
||||
repoAccount = "commercialhaskell"
|
||||
repoName = "stackage-snapshots"
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
hSetBuffering stdout LineBuffering
|
||||
hSetBuffering stderr LineBuffering
|
||||
opts <-
|
||||
execParser $
|
||||
info
|
||||
(optsParser <*
|
||||
abortOption (ShowHelpText Nothing) (long "help" <> short 'h' <> help "Display this message."))
|
||||
(header "stackage-cron - Keep stackage.org up to date" <>
|
||||
progDesc
|
||||
"Uses github.com/commercialhaskell/stackage-snapshots repository as a source \
|
||||
\for keeping stackage.org up to date. Amongst other things are: update of hoogle db\
|
||||
\and it's upload to S3 bucket, use stackage-content for global-hints" <>
|
||||
fullDesc)
|
||||
stackageServerCron opts
|
||||
110
bench/main.hs
Normal file
110
bench/main.hs
Normal file
@ -0,0 +1,110 @@
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE FlexibleInstances #-}
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
import Control.Monad.Logger (runNoLoggingT, runStdoutLoggingT)
|
||||
import Data.Pool (destroyAllResources)
|
||||
import Database.Persist.Postgresql (PostgresConf(..), createPostgresqlPool)
|
||||
import Database.Persist.Sql (ConnectionPool, SqlBackend, runSqlPool)
|
||||
import Gauge
|
||||
import Pantry.Internal.Stackage (PackageNameP(..))
|
||||
import RIO
|
||||
import Settings (getAppSettings, AppSettings(..), DatabaseSettings(..), configSettingsYmlValue)
|
||||
import Stackage.Database.Query
|
||||
import Stackage.Database.Schema (withStackageDatabase, runDatabase)
|
||||
import Stackage.Database.Types (LatestInfo, SnapName(..), SnapshotPackageInfo(..))
|
||||
import Yesod.Default.Config2
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
appSettings <- getAppSettings
|
||||
let snapName = SNLts 16 4
|
||||
mSnapInfo <-
|
||||
runSimpleApp $
|
||||
withStackageDatabase
|
||||
True
|
||||
(appDatabase appSettings)
|
||||
(\db -> runDatabase db $ getSnapshotPackageInfoQuery snapName (PackageNameP "yesod"))
|
||||
let snapInfo = fromMaybe (error "snapInfo not retrieved") mSnapInfo
|
||||
defaultMain [benchs snapInfo]
|
||||
|
||||
runBenchApp :: ConnectionPool -> ReaderT SqlBackend (RIO SimpleApp) a -> IO a
|
||||
runBenchApp pool m = runSimpleApp $ runSqlPool m pool
|
||||
|
||||
createBenchPool :: IO ConnectionPool
|
||||
createBenchPool = do
|
||||
loadYamlSettingsArgs [configSettingsYmlValue] useEnv >>= \case
|
||||
AppSettings{appDatabase = DSPostgres pgString _} ->
|
||||
runNoLoggingT $ createPostgresqlPool (encodeUtf8 pgString) 1
|
||||
_ -> throwString "Benchmarks are crafted for PostgreSQL"
|
||||
|
||||
releasePool :: ConnectionPool -> IO ()
|
||||
releasePool = destroyAllResources
|
||||
|
||||
-- TODO: Upstream fix ? Or add new function to gauge (although it
|
||||
-- seems it might be a breaking change there) ?
|
||||
instance NFData ConnectionPool where
|
||||
rnf _ = ()
|
||||
|
||||
getLatestsBench :: Benchmark
|
||||
getLatestsBench =
|
||||
bench "getLatests" $
|
||||
perBatchEnvWithCleanup
|
||||
(\runs -> createBenchPool)
|
||||
(\_ pool -> releasePool pool)
|
||||
(\pool -> runBenchApp pool (void $ getLatests $ PackageNameP "yesod"))
|
||||
|
||||
getDeprecatedBench :: Benchmark
|
||||
getDeprecatedBench =
|
||||
bench "getDeprecated" $
|
||||
perBatchEnvWithCleanup
|
||||
(\runs -> createBenchPool)
|
||||
(\_ pool -> releasePool pool)
|
||||
(\pool -> runBenchApp pool (void $ getDeprecatedQuery $ PackageNameP "yesod"))
|
||||
|
||||
getSnapshotPackageLatestVersionBench :: Benchmark
|
||||
getSnapshotPackageLatestVersionBench =
|
||||
bench "getSnapshotPackageLatestVersion" $
|
||||
perBatchEnvWithCleanup
|
||||
(\runs -> createBenchPool)
|
||||
(\_ pool -> releasePool pool)
|
||||
(\pool ->
|
||||
runBenchApp pool (void $ getSnapshotPackageLatestVersionQuery $ PackageNameP "yesod"))
|
||||
|
||||
getSnapshotPackagePageInfoBench :: SnapshotPackageInfo -> Benchmark
|
||||
getSnapshotPackagePageInfoBench snapshotInfo =
|
||||
bench "getSnapshotPackagePageInfo" $
|
||||
perBatchEnvWithCleanup
|
||||
(\runs -> createBenchPool)
|
||||
(\_ pool -> releasePool pool)
|
||||
(\pool -> runBenchApp pool (void $ getSnapshotPackagePageInfoQuery snapshotInfo 40))
|
||||
|
||||
getPackageInfoBench :: SnapshotPackageInfo -> Benchmark
|
||||
getPackageInfoBench snapInfo =
|
||||
bench "getPackageInfo" $
|
||||
perBatchEnvWithCleanup
|
||||
(\runs -> createBenchPool)
|
||||
(\_ pool -> releasePool pool)
|
||||
(\pool -> runBenchApp pool (void $ getPackageInfoQuery (Right snapInfo)))
|
||||
|
||||
getHackageLatestVersionBench :: Benchmark
|
||||
getHackageLatestVersionBench =
|
||||
bench "getHackageLatestVersion" $
|
||||
perBatchEnvWithCleanup
|
||||
(\runs -> createBenchPool)
|
||||
(\_ pool -> releasePool pool)
|
||||
(\pool -> runBenchApp pool (void $ getHackageLatestVersion $ PackageNameP "yesod"))
|
||||
|
||||
benchs :: SnapshotPackageInfo -> Benchmark
|
||||
benchs snap =
|
||||
bgroup
|
||||
"SQL Query Benchmark"
|
||||
[ getLatestsBench
|
||||
, getDeprecatedBench
|
||||
, getHackageLatestVersionBench
|
||||
, getPackageInfoBench snap
|
||||
, getSnapshotPackagePageInfoBench snap
|
||||
, getSnapshotPackageLatestVersionBench
|
||||
]
|
||||
863
cabal.config
863
cabal.config
@ -1,863 +0,0 @@
|
||||
-- Stackage snapshot from: http://www.stackage.org/snapshot/lts-1.0
|
||||
-- Please place this file next to your .cabal file as cabal.config
|
||||
-- To only use tested packages, uncomment the following line:
|
||||
-- remote-repo: stackage-lts-1.0:http://www.stackage.org/snapshot/lts-1.0
|
||||
constraints: abstract-deque ==0.3,
|
||||
abstract-par ==0.3.3,
|
||||
accelerate ==0.15.0.0,
|
||||
ace ==0.6,
|
||||
action-permutations ==0.0.0.1,
|
||||
active ==0.1.0.17,
|
||||
AC-Vector ==2.3.2,
|
||||
ad ==4.2.1.1,
|
||||
adjunctions ==4.2,
|
||||
aeson ==0.8.0.2,
|
||||
aeson-pretty ==0.7.2,
|
||||
aeson-qq ==0.7.4,
|
||||
aeson-utils ==0.2.2.1,
|
||||
alarmclock ==0.2.0.5,
|
||||
alex ==3.1.3,
|
||||
amqp ==0.10.1,
|
||||
ansi-terminal ==0.6.2.1,
|
||||
ansi-wl-pprint ==0.6.7.1,
|
||||
appar ==0.1.4,
|
||||
approximate ==0.2.1.1,
|
||||
arbtt ==0.8.1.4,
|
||||
arithmoi ==0.4.1.1,
|
||||
array installed,
|
||||
arrow-list ==0.6.1.5,
|
||||
asn1-data ==0.7.1,
|
||||
asn1-encoding ==0.9.0,
|
||||
asn1-parse ==0.9.0,
|
||||
asn1-types ==0.3.0,
|
||||
async ==2.0.2,
|
||||
atto-lisp ==0.2.2,
|
||||
attoparsec ==0.12.1.2,
|
||||
attoparsec-conduit ==1.1.0,
|
||||
attoparsec-enumerator ==0.3.3,
|
||||
attoparsec-expr ==0.1.1.1,
|
||||
authenticate ==1.3.2.11,
|
||||
auto-update ==0.1.2.1,
|
||||
aws ==0.11,
|
||||
bake ==0.2,
|
||||
bank-holidays-england ==0.1.0.2,
|
||||
barecheck ==0.2.0.6,
|
||||
base installed,
|
||||
base16-bytestring ==0.1.1.6,
|
||||
base64-bytestring ==1.0.0.1,
|
||||
base-compat ==0.5.0,
|
||||
base-prelude ==0.1.11,
|
||||
base-unicode-symbols ==0.2.2.4,
|
||||
basic-prelude ==0.3.10,
|
||||
bifunctors ==4.2,
|
||||
binary installed,
|
||||
binary-conduit ==1.2.3,
|
||||
binary-list ==1.0.1.0,
|
||||
bindings-DSL ==1.0.21,
|
||||
bioace ==0.0.1,
|
||||
bioalign ==0.0.5,
|
||||
biocore ==0.3.1,
|
||||
biofasta ==0.0.3,
|
||||
biofastq ==0.1,
|
||||
biophd ==0.0.5,
|
||||
biopsl ==0.4,
|
||||
biosff ==0.3.7.1,
|
||||
bits ==0.4,
|
||||
BlastHTTP ==1.0.1,
|
||||
blastxml ==0.3.2,
|
||||
blaze-builder ==0.3.3.4,
|
||||
blaze-builder-enumerator ==0.2.0.6,
|
||||
blaze-html ==0.7.0.3,
|
||||
blaze-markup ==0.6.2.0,
|
||||
blaze-svg ==0.3.4,
|
||||
blaze-textual ==0.2.0.9,
|
||||
BlogLiterately ==0.7.1.7,
|
||||
BlogLiterately-diagrams ==0.1.4.3,
|
||||
bloodhound ==0.5.0.1,
|
||||
bmp ==1.2.5.2,
|
||||
Boolean ==0.2.3,
|
||||
bool-extras ==0.4.0,
|
||||
bound ==1.0.4,
|
||||
BoundedChan ==1.0.3.0,
|
||||
broadcast-chan ==0.1.0,
|
||||
bson ==0.3.1,
|
||||
bumper ==0.6.0.2,
|
||||
byteable ==0.1.1,
|
||||
bytedump ==1.0,
|
||||
byteorder ==1.0.4,
|
||||
bytes ==0.14.1.2,
|
||||
bytestring installed,
|
||||
bytestring-builder ==0.10.4.0.1,
|
||||
bytestring-lexing ==0.4.3.2,
|
||||
bytestring-mmap ==0.2.2,
|
||||
bytestring-progress ==1.0.3,
|
||||
bytestring-show ==0.3.5.6,
|
||||
bytestring-trie ==0.2.4,
|
||||
bzlib ==0.5.0.4,
|
||||
bzlib-conduit ==0.2.1.3,
|
||||
c2hs ==0.20.1,
|
||||
Cabal installed,
|
||||
cabal-install ==1.18.0.7,
|
||||
cabal-src ==0.2.5,
|
||||
cairo ==0.13.0.6,
|
||||
case-insensitive ==1.2.0.3,
|
||||
cases ==0.1.2,
|
||||
cassava ==0.4.2.1,
|
||||
cautious-file ==1.0.2,
|
||||
cereal ==0.4.1.0,
|
||||
cereal-conduit ==0.7.2.3,
|
||||
certificate ==1.3.9,
|
||||
charset ==0.3.7,
|
||||
Chart ==1.3.2,
|
||||
Chart-diagrams ==1.3.2,
|
||||
ChasingBottoms ==1.3.0.9,
|
||||
check-email ==1.0,
|
||||
checkers ==0.4.1,
|
||||
chell ==0.4,
|
||||
chell-quickcheck ==0.2.4,
|
||||
chunked-data ==0.1.0.1,
|
||||
cipher-aes ==0.2.9,
|
||||
cipher-blowfish ==0.0.3,
|
||||
cipher-camellia ==0.0.2,
|
||||
cipher-des ==0.0.6,
|
||||
cipher-rc4 ==0.1.4,
|
||||
circle-packing ==0.1.0.4,
|
||||
classy-prelude ==0.10.2,
|
||||
classy-prelude-conduit ==0.10.2,
|
||||
classy-prelude-yesod ==0.10.2,
|
||||
clientsession ==0.9.1.1,
|
||||
clock ==0.4.1.3,
|
||||
cmdargs ==0.10.12,
|
||||
code-builder ==0.1.3,
|
||||
colour ==2.3.3,
|
||||
comonad ==4.2.2,
|
||||
comonads-fd ==4.0,
|
||||
comonad-transformers ==4.0,
|
||||
compdata ==0.9,
|
||||
compensated ==0.6.1,
|
||||
composition ==1.0.1.0,
|
||||
compressed ==3.10,
|
||||
concatenative ==1.0.1,
|
||||
concurrent-extra ==0.7.0.9,
|
||||
concurrent-supply ==0.1.7,
|
||||
cond ==0.4.1.1,
|
||||
conduit ==1.2.3.1,
|
||||
conduit-combinators ==0.3.0.5,
|
||||
conduit-extra ==1.1.6,
|
||||
configurator ==0.3.0.0,
|
||||
connection ==0.2.3,
|
||||
constraints ==0.4.1.2,
|
||||
containers installed,
|
||||
containers-unicode-symbols ==0.3.1.1,
|
||||
contravariant ==1.2.0.1,
|
||||
control-monad-free ==0.5.3,
|
||||
control-monad-loop ==0.1,
|
||||
convertible ==1.1.0.0,
|
||||
cookie ==0.4.1.4,
|
||||
courier ==0.1.0.15,
|
||||
cpphs ==1.18.6,
|
||||
cprng-aes ==0.6.1,
|
||||
cpu ==0.1.2,
|
||||
criterion ==1.0.2.0,
|
||||
crypto-api ==0.13.2,
|
||||
cryptocipher ==0.6.2,
|
||||
crypto-cipher-tests ==0.0.11,
|
||||
crypto-cipher-types ==0.0.9,
|
||||
cryptohash ==0.11.6,
|
||||
cryptohash-conduit ==0.1.1,
|
||||
cryptohash-cryptoapi ==0.1.3,
|
||||
crypto-numbers ==0.2.7,
|
||||
crypto-pubkey ==0.2.7,
|
||||
crypto-pubkey-types ==0.4.2.3,
|
||||
crypto-random ==0.0.8,
|
||||
crypto-random-api ==0.2.0,
|
||||
css-text ==0.1.2.1,
|
||||
csv ==0.1.2,
|
||||
csv-conduit ==0.6.3,
|
||||
curl ==1.3.8,
|
||||
data-accessor ==0.2.2.6,
|
||||
data-accessor-mtl ==0.2.0.4,
|
||||
data-binary-ieee754 ==0.4.4,
|
||||
data-default ==0.5.3,
|
||||
data-default-class ==0.0.1,
|
||||
data-default-instances-base ==0.0.1,
|
||||
data-default-instances-containers ==0.0.1,
|
||||
data-default-instances-dlist ==0.0.1,
|
||||
data-default-instances-old-locale ==0.0.1,
|
||||
data-inttrie ==0.1.0,
|
||||
data-lens-light ==0.1.2.1,
|
||||
data-memocombinators ==0.5.1,
|
||||
data-reify ==0.6,
|
||||
DAV ==1.0.3,
|
||||
Decimal ==0.4.2,
|
||||
deepseq installed,
|
||||
deepseq-generics ==0.1.1.2,
|
||||
derive ==2.5.18,
|
||||
diagrams ==1.2,
|
||||
diagrams-builder ==0.6.0.2,
|
||||
diagrams-cairo ==1.2.0.5,
|
||||
diagrams-contrib ==1.1.2.4,
|
||||
diagrams-core ==1.2.0.4,
|
||||
diagrams-haddock ==0.2.2.12,
|
||||
diagrams-lib ==1.2.0.7,
|
||||
diagrams-postscript ==1.1.0.3,
|
||||
diagrams-svg ==1.1.0.3,
|
||||
Diff ==0.3.0,
|
||||
digest ==0.0.1.2,
|
||||
digestive-functors ==0.7.1.3,
|
||||
dimensional ==0.13.0.1,
|
||||
directory installed,
|
||||
directory-tree ==0.12.0,
|
||||
direct-sqlite ==2.3.14,
|
||||
distributed-process ==0.5.3,
|
||||
distributed-process-async ==0.2.1,
|
||||
distributed-process-client-server ==0.1.2,
|
||||
distributed-process-execution ==0.1.1,
|
||||
distributed-process-extras ==0.2.0,
|
||||
distributed-process-simplelocalnet ==0.2.2.0,
|
||||
distributed-process-supervisor ==0.1.2,
|
||||
distributed-process-task ==0.1.1,
|
||||
distributed-static ==0.3.1.0,
|
||||
distributive ==0.4.4,
|
||||
djinn-ghc ==0.0.2.2,
|
||||
djinn-lib ==0.0.1.2,
|
||||
dlist ==0.7.1,
|
||||
dlist-instances ==0.1,
|
||||
doctest ==0.9.11.1,
|
||||
double-conversion ==2.0.1.0,
|
||||
dual-tree ==0.2.0.5,
|
||||
easy-file ==0.2.0,
|
||||
either ==4.3.2.1,
|
||||
elm-build-lib ==0.14.0.0,
|
||||
elm-compiler ==0.14,
|
||||
elm-core-sources ==1.0.0,
|
||||
elm-package ==0.2.2,
|
||||
email-validate ==2.0.1,
|
||||
enclosed-exceptions ==1.0.1,
|
||||
entropy ==0.3.4.1,
|
||||
enumerator ==0.4.20,
|
||||
eq ==4.0.3,
|
||||
erf ==2.0.0.0,
|
||||
errorcall-eq-instance ==0.1.0,
|
||||
errors ==1.4.7,
|
||||
ersatz ==0.2.6.1,
|
||||
esqueleto ==2.1.2.1,
|
||||
exceptions ==0.6.1,
|
||||
exception-transformers ==0.3.0.4,
|
||||
executable-path ==0.0.3,
|
||||
extensible-exceptions ==0.1.1.4,
|
||||
extra ==1.0,
|
||||
failure ==0.2.0.3,
|
||||
fast-logger ==2.2.3,
|
||||
fay ==0.21.2.1,
|
||||
fay-base ==0.19.4.1,
|
||||
fay-builder ==0.2.0.1,
|
||||
fay-dom ==0.5,
|
||||
fay-jquery ==0.6.0.2,
|
||||
fay-text ==0.3.2,
|
||||
fay-uri ==0.2.0.0,
|
||||
fb ==1.0.7,
|
||||
fb-persistent ==0.3.4,
|
||||
fclabels ==2.0.2,
|
||||
FenwickTree ==0.1.2,
|
||||
fgl ==5.5.0.1,
|
||||
file-embed ==0.0.7,
|
||||
file-location ==0.4.5.3,
|
||||
filemanip ==0.3.6.2,
|
||||
filepath installed,
|
||||
fingertree ==0.1.0.0,
|
||||
fixed ==0.2.1,
|
||||
fixed-list ==0.1.5,
|
||||
flexible-defaults ==0.0.1.1,
|
||||
focus ==0.1.3,
|
||||
foldl ==1.0.7,
|
||||
FontyFruity ==0.4,
|
||||
force-layout ==0.3.0.8,
|
||||
foreign-store ==0.1,
|
||||
formatting ==6.0.0,
|
||||
fpco-api ==1.2.0.4,
|
||||
free ==4.10.0.1,
|
||||
freenect ==1.2,
|
||||
frisby ==0.2,
|
||||
fsnotify ==0.1.0.3,
|
||||
fuzzcheck ==0.1.1,
|
||||
gd ==3000.7.3,
|
||||
generic-aeson ==0.2.0.2,
|
||||
generic-deriving ==1.6.3,
|
||||
GenericPretty ==1.2.1,
|
||||
generics-sop ==0.1.0.4,
|
||||
ghc-heap-view ==0.5.3,
|
||||
ghcid ==0.3.4,
|
||||
ghc-mod ==5.2.1.2,
|
||||
ghc-mtl ==1.2.1.0,
|
||||
ghc-paths ==0.1.0.9,
|
||||
ghc-prim installed,
|
||||
ghc-syb-utils ==0.2.2,
|
||||
gio ==0.13.0.4,
|
||||
git-embed ==0.1.0,
|
||||
gl ==0.6.2,
|
||||
glib ==0.13.0.7,
|
||||
Glob ==0.7.5,
|
||||
GLURaw ==1.4.0.1,
|
||||
GLUT ==2.5.1.1,
|
||||
graph-core ==0.2.1.0,
|
||||
graphs ==0.5.0.1,
|
||||
gravatar ==0.6,
|
||||
groundhog ==0.7.0.1,
|
||||
groundhog-mysql ==0.7.0.1,
|
||||
groundhog-postgresql ==0.7.0.1,
|
||||
groundhog-sqlite ==0.7.0.1,
|
||||
groundhog-th ==0.7.0,
|
||||
groupoids ==4.0,
|
||||
groups ==0.4.0.0,
|
||||
gtk ==0.13.4,
|
||||
gtk2hs-buildtools ==0.13.0.3,
|
||||
haddock-api ==2.15.0.2,
|
||||
haddock-library ==1.1.1,
|
||||
half ==0.2.0.1,
|
||||
HandsomeSoup ==0.3.5,
|
||||
happstack-server ==7.3.9,
|
||||
happy ==1.19.4,
|
||||
hashable ==1.2.3.1,
|
||||
hashable-extras ==0.2.0.1,
|
||||
hashmap ==1.3.0.1,
|
||||
hashtables ==1.2.0.1,
|
||||
haskeline installed,
|
||||
haskell2010 installed,
|
||||
haskell98 installed,
|
||||
haskell-lexer ==1.0,
|
||||
haskell-names ==0.4.1,
|
||||
haskell-packages ==0.2.4.3,
|
||||
haskell-src ==1.0.1.6,
|
||||
haskell-src-exts ==1.16.0.1,
|
||||
haskell-src-meta ==0.6.0.8,
|
||||
hasql ==0.7.1,
|
||||
hasql-backend ==0.4.0,
|
||||
hasql-postgres ==0.10.1,
|
||||
hastache ==0.6.1,
|
||||
HaTeX ==3.16.0.0,
|
||||
HaXml ==1.25,
|
||||
haxr ==3000.10.3.1,
|
||||
HCodecs ==0.5,
|
||||
hdaemonize ==0.5.0.0,
|
||||
hdevtools ==0.1.0.6,
|
||||
heaps ==0.3.1,
|
||||
hebrew-time ==0.1.1,
|
||||
heist ==0.14.0.1,
|
||||
here ==1.2.6,
|
||||
heredoc ==0.2.0.0,
|
||||
hflags ==0.4,
|
||||
highlighting-kate ==0.5.11.1,
|
||||
hinotify ==0.3.7,
|
||||
hint ==0.4.2.1,
|
||||
histogram-fill ==0.8.3.0,
|
||||
hit ==0.6.2,
|
||||
hjsmin ==0.1.4.7,
|
||||
hledger ==0.24,
|
||||
hledger-lib ==0.24,
|
||||
hlibgit2 ==0.18.0.13,
|
||||
hlint ==1.9.14,
|
||||
hmatrix ==0.16.1.3,
|
||||
hmatrix-gsl ==0.16.0.2,
|
||||
hoauth2 ==0.4.3,
|
||||
holy-project ==0.1.1.1,
|
||||
hoogle ==4.2.36,
|
||||
hoopl installed,
|
||||
hOpenPGP ==1.11,
|
||||
hostname ==1.0,
|
||||
hostname-validate ==1.0.0,
|
||||
hourglass ==0.2.6,
|
||||
hpc installed,
|
||||
hPDB ==1.2.0.2,
|
||||
hPDB-examples ==1.2.0.1,
|
||||
hs-bibutils ==5.5,
|
||||
hscolour ==1.20.3,
|
||||
hse-cpp ==0.1,
|
||||
hslogger ==1.2.6,
|
||||
hslua ==0.3.13,
|
||||
hspec ==2.1.2,
|
||||
hspec2 ==0.6.1,
|
||||
hspec-core ==2.1.2,
|
||||
hspec-discover ==2.1.2,
|
||||
hspec-expectations ==0.6.1.1,
|
||||
hspec-meta ==2.0.0,
|
||||
hspec-wai ==0.6.2,
|
||||
hspec-wai-json ==0.6.0,
|
||||
HStringTemplate ==0.7.3,
|
||||
hsyslog ==2.0,
|
||||
HTF ==0.12.2.3,
|
||||
html ==1.0.1.2,
|
||||
html-conduit ==1.1.1.1,
|
||||
HTTP ==4000.2.19,
|
||||
http-client ==0.4.6.1,
|
||||
http-client-tls ==0.2.2,
|
||||
http-conduit ==2.1.5,
|
||||
http-date ==0.0.4,
|
||||
http-reverse-proxy ==0.4.1.2,
|
||||
http-types ==0.8.5,
|
||||
HUnit ==1.2.5.2,
|
||||
hweblib ==0.6.3,
|
||||
hxt ==9.3.1.10,
|
||||
hxt-charproperties ==9.2.0.0,
|
||||
hxt-http ==9.1.5,
|
||||
hxt-pickle-utils ==0.1.0.2,
|
||||
hxt-regex-xmlschema ==9.2.0,
|
||||
hxt-relaxng ==9.1.5.1,
|
||||
hxt-unicode ==9.0.2.2,
|
||||
hybrid-vectors ==0.1.2,
|
||||
hyphenation ==0.4,
|
||||
idna ==0.3.0,
|
||||
ieee754 ==0.7.4,
|
||||
IfElse ==0.85,
|
||||
imagesize-conduit ==1.0.0.4,
|
||||
immortal ==0.2,
|
||||
incremental-parser ==0.2.3.3,
|
||||
indents ==0.3.3,
|
||||
ini ==0.3.0,
|
||||
integer-gmp installed,
|
||||
integration ==0.2.0.1,
|
||||
interpolate ==0.1.0,
|
||||
interpolatedstring-perl6 ==0.9.0,
|
||||
intervals ==0.7.0.1,
|
||||
io-choice ==0.0.5,
|
||||
io-manager ==0.1.0.2,
|
||||
io-memoize ==1.1.1.0,
|
||||
iproute ==1.3.1,
|
||||
iterable ==3.0,
|
||||
ixset ==1.0.6,
|
||||
js-flot ==0.8.3,
|
||||
js-jquery ==1.11.2,
|
||||
json-autotype ==0.2.5.4,
|
||||
json-schema ==0.7.3.0,
|
||||
JuicyPixels ==3.2.1,
|
||||
JuicyPixels-repa ==0.7,
|
||||
kan-extensions ==4.2,
|
||||
kdt ==0.2.2,
|
||||
keter ==1.3.7.1,
|
||||
keys ==3.10.1,
|
||||
kure ==2.16.4,
|
||||
language-c ==0.4.7,
|
||||
language-ecmascript ==0.16.2,
|
||||
language-glsl ==0.1.1,
|
||||
language-haskell-extract ==0.2.4,
|
||||
language-java ==0.2.7,
|
||||
language-javascript ==0.5.13,
|
||||
lazy-csv ==0.5,
|
||||
lca ==0.2.4,
|
||||
lens ==4.6.0.1,
|
||||
lens-aeson ==1.0.0.3,
|
||||
lens-family-th ==0.4.0.0,
|
||||
lhs2tex ==1.18.1,
|
||||
libgit ==0.3.0,
|
||||
libnotify ==0.1.1.0,
|
||||
lifted-async ==0.2.0.2,
|
||||
lifted-base ==0.2.3.3,
|
||||
linear ==1.15.5,
|
||||
linear-accelerate ==0.2,
|
||||
list-t ==0.4.2,
|
||||
loch-th ==0.2.1,
|
||||
log-domain ==0.9.3,
|
||||
logfloat ==0.12.1,
|
||||
logict ==0.6.0.2,
|
||||
loop ==0.2.0,
|
||||
lucid ==2.5,
|
||||
lzma-conduit ==1.1.1,
|
||||
machines ==0.4.1,
|
||||
mandrill ==0.1.1.0,
|
||||
map-syntax ==0.2,
|
||||
markdown ==0.1.13,
|
||||
markdown-unlit ==0.2.0.1,
|
||||
math-functions ==0.1.5.2,
|
||||
matrix ==0.3.4.0,
|
||||
MaybeT ==0.1.2,
|
||||
MemoTrie ==0.6.2,
|
||||
mersenne-random-pure64 ==0.2.0.4,
|
||||
messagepack ==0.3.0,
|
||||
messagepack-rpc ==0.1.0.3,
|
||||
mime-mail ==0.4.6.2,
|
||||
mime-mail-ses ==0.3.2.1,
|
||||
mime-types ==0.1.0.5,
|
||||
missing-foreign ==0.1.1,
|
||||
MissingH ==1.3.0.1,
|
||||
mmap ==0.5.9,
|
||||
mmorph ==1.0.4,
|
||||
MonadCatchIO-transformers ==0.3.1.3,
|
||||
monad-control ==0.3.3.0,
|
||||
monad-coroutine ==0.8.0.1,
|
||||
monadcryptorandom ==0.6.1,
|
||||
monad-extras ==0.5.9,
|
||||
monadic-arrays ==0.2.1.3,
|
||||
monad-journal ==0.6.0.2,
|
||||
monad-logger ==0.3.11.1,
|
||||
monad-loops ==0.4.2.1,
|
||||
monad-par ==0.3.4.7,
|
||||
monad-parallel ==0.7.1.3,
|
||||
monad-par-extras ==0.3.3,
|
||||
monad-primitive ==0.1,
|
||||
monad-products ==4.0.0.1,
|
||||
MonadPrompt ==1.0.0.5,
|
||||
MonadRandom ==0.3.0.1,
|
||||
monad-st ==0.2.4,
|
||||
monads-tf ==0.1.0.2,
|
||||
mongoDB ==2.0.3,
|
||||
monoid-extras ==0.3.3.5,
|
||||
monoid-subclasses ==0.3.6.2,
|
||||
mono-traversable ==0.7.0,
|
||||
mtl ==2.1.3.1,
|
||||
mtlparse ==0.1.2,
|
||||
mtl-prelude ==1.0.2,
|
||||
multimap ==1.2.1,
|
||||
multipart ==0.1.2,
|
||||
MusicBrainz ==0.2.2,
|
||||
mwc-random ==0.13.2.2,
|
||||
mysql ==0.1.1.7,
|
||||
mysql-simple ==0.2.2.4,
|
||||
nanospec ==0.2.0,
|
||||
nats ==1,
|
||||
neat-interpolation ==0.2.2,
|
||||
nettle ==0.1.0,
|
||||
network ==2.6.0.2,
|
||||
network-conduit-tls ==1.1.0.2,
|
||||
network-info ==0.2.0.5,
|
||||
network-multicast ==0.0.11,
|
||||
network-simple ==0.4.0.2,
|
||||
network-transport ==0.4.1.0,
|
||||
network-transport-tcp ==0.4.1,
|
||||
network-transport-tests ==0.2.2.0,
|
||||
network-uri ==2.6.0.1,
|
||||
newtype ==0.2,
|
||||
nsis ==0.2.4,
|
||||
numbers ==3000.2.0.1,
|
||||
numeric-extras ==0.0.3,
|
||||
NumInstances ==1.4,
|
||||
numtype ==1.1,
|
||||
Octree ==0.5.4.2,
|
||||
old-locale installed,
|
||||
old-time installed,
|
||||
OneTuple ==0.2.1,
|
||||
opaleye ==0.3,
|
||||
OpenGL ==2.9.2.0,
|
||||
OpenGLRaw ==1.5.0.0,
|
||||
openpgp-asciiarmor ==0.1,
|
||||
operational ==0.2.3.2,
|
||||
options ==1.2.1,
|
||||
optparse-applicative ==0.11.0.1,
|
||||
osdkeys ==0.0,
|
||||
pandoc ==1.13.2,
|
||||
pandoc-citeproc ==0.6,
|
||||
pandoc-types ==1.12.4.1,
|
||||
pango ==0.13.0.5,
|
||||
parallel ==3.2.0.6,
|
||||
parallel-io ==0.3.3,
|
||||
parseargs ==0.1.5.2,
|
||||
parsec ==3.1.7,
|
||||
parsers ==0.12.1.1,
|
||||
partial-handler ==0.1.0,
|
||||
path-pieces ==0.1.5,
|
||||
patience ==0.1.1,
|
||||
pcre-light ==0.4.0.3,
|
||||
pdfinfo ==1.5.1,
|
||||
pem ==0.2.2,
|
||||
persistent ==2.1.1.3,
|
||||
persistent-mongoDB ==2.1.2,
|
||||
persistent-mysql ==2.1.2,
|
||||
persistent-postgresql ==2.1.2,
|
||||
persistent-sqlite ==2.1.1.2,
|
||||
persistent-template ==2.1.0.1,
|
||||
phantom-state ==0.2.0.2,
|
||||
pipes ==4.1.4,
|
||||
pipes-concurrency ==2.0.2,
|
||||
pipes-parse ==3.0.2,
|
||||
placeholders ==0.1,
|
||||
pointed ==4.2,
|
||||
polyparse ==1.10,
|
||||
pool-conduit ==0.1.2.3,
|
||||
postgresql-binary ==0.5.0,
|
||||
postgresql-libpq ==0.9.0.1,
|
||||
postgresql-simple ==0.4.9.0,
|
||||
pqueue ==1.2.1,
|
||||
prefix-units ==0.1.0.2,
|
||||
prelude-extras ==0.4,
|
||||
present ==2.2,
|
||||
pretty installed,
|
||||
prettyclass ==1.0.0.0,
|
||||
pretty-class ==1.0.1.1,
|
||||
pretty-show ==1.6.8,
|
||||
primes ==0.2.1.0,
|
||||
primitive ==0.5.4.0,
|
||||
process installed,
|
||||
process-conduit ==1.2.0.1,
|
||||
process-extras ==0.2.0,
|
||||
product-profunctors ==0.6,
|
||||
profunctor-extras ==4.0,
|
||||
profunctors ==4.3.2,
|
||||
project-template ==0.1.4.2,
|
||||
publicsuffixlist ==0.1,
|
||||
punycode ==2.0,
|
||||
pure-io ==0.2.1,
|
||||
pureMD5 ==2.1.2.1,
|
||||
pwstore-fast ==2.4.4,
|
||||
quandl-api ==0.2.0.0,
|
||||
QuasiText ==0.1.2.5,
|
||||
QuickCheck ==2.7.6,
|
||||
quickcheck-assertions ==0.1.1,
|
||||
quickcheck-instances ==0.3.10,
|
||||
quickcheck-io ==0.1.1,
|
||||
quickcheck-unicode ==1.0.0.0,
|
||||
quickpull ==0.4.0.0,
|
||||
rainbow ==0.20.0.4,
|
||||
rainbow-tests ==0.20.0.4,
|
||||
random ==1.0.1.1,
|
||||
random-fu ==0.2.6.1,
|
||||
random-shuffle ==0.0.4,
|
||||
random-source ==0.3.0.6,
|
||||
rank1dynamic ==0.2.0.1,
|
||||
Rasterific ==0.4,
|
||||
raw-strings-qq ==1.0.2,
|
||||
ReadArgs ==1.2.2,
|
||||
reducers ==3.10.3,
|
||||
reflection ==1.5.1,
|
||||
regex-applicative ==0.3.0.3,
|
||||
regex-base ==0.93.2,
|
||||
regex-compat ==0.95.1,
|
||||
regex-pcre-builtin ==0.94.4.8.8.35,
|
||||
regex-posix ==0.95.2,
|
||||
regexpr ==0.5.4,
|
||||
regex-tdfa ==1.2.0,
|
||||
regex-tdfa-rc ==1.1.8.3,
|
||||
regular ==0.3.4.4,
|
||||
regular-xmlpickler ==0.2,
|
||||
rematch ==0.2.0.0,
|
||||
repa ==3.3.1.2,
|
||||
repa-algorithms ==3.3.1.2,
|
||||
repa-devil ==0.3.2.2,
|
||||
repa-io ==3.3.1.2,
|
||||
reroute ==0.2.2.1,
|
||||
resource-pool ==0.2.3.2,
|
||||
resourcet ==1.1.3.3,
|
||||
rest-client ==0.4.0.2,
|
||||
rest-core ==0.33.1.2,
|
||||
rest-gen ==0.16.1.3,
|
||||
rest-happstack ==0.2.10.3,
|
||||
rest-snap ==0.1.17.14,
|
||||
rest-stringmap ==0.2.0.2,
|
||||
rest-types ==1.11.1.1,
|
||||
rest-wai ==0.1.0.4,
|
||||
rev-state ==0.1,
|
||||
rfc5051 ==0.1.0.3,
|
||||
runmemo ==1.0.0.1,
|
||||
rvar ==0.2.0.2,
|
||||
safe ==0.3.8,
|
||||
safecopy ==0.8.3,
|
||||
scientific ==0.3.3.3,
|
||||
scotty ==0.9.0,
|
||||
scrobble ==0.2.1.1,
|
||||
securemem ==0.1.4,
|
||||
semigroupoid-extras ==4.0,
|
||||
semigroupoids ==4.2,
|
||||
semigroups ==0.16.0.1,
|
||||
sendfile ==0.7.9,
|
||||
seqloc ==0.6,
|
||||
setenv ==0.1.1.1,
|
||||
SHA ==1.6.4.1,
|
||||
shake ==0.14.2,
|
||||
shake-language-c ==0.6.3,
|
||||
shakespeare ==2.0.2.1,
|
||||
shakespeare-i18n ==1.1.0,
|
||||
shakespeare-text ==1.1.0,
|
||||
shell-conduit ==4.5,
|
||||
shelly ==1.5.7,
|
||||
silently ==1.2.4.1,
|
||||
simple-reflect ==0.3.2,
|
||||
simple-sendfile ==0.2.18,
|
||||
singletons ==1.0,
|
||||
siphash ==1.0.3,
|
||||
skein ==1.0.9.2,
|
||||
slave-thread ==0.1.5,
|
||||
smallcheck ==1.1.1,
|
||||
smtLib ==1.0.7,
|
||||
snap ==0.13.3.2,
|
||||
snap-core ==0.9.6.4,
|
||||
snaplet-fay ==0.3.3.8,
|
||||
snap-server ==0.9.4.6,
|
||||
socks ==0.5.4,
|
||||
sodium ==0.11.0.3,
|
||||
sourcemap ==0.1.3.0,
|
||||
speculation ==1.5.0.1,
|
||||
sphinx ==0.6.0.1,
|
||||
split ==0.2.2,
|
||||
Spock ==0.7.6.0,
|
||||
Spock-digestive ==0.1.0.0,
|
||||
Spock-worker ==0.2.1.3,
|
||||
spoon ==0.3.1,
|
||||
sqlite-simple ==0.4.8.0,
|
||||
stackage ==0.3.1,
|
||||
stateref ==0.3,
|
||||
statestack ==0.2.0.3,
|
||||
statistics ==0.13.2.1,
|
||||
statistics-linreg ==0.3,
|
||||
stm ==2.4.4,
|
||||
stm-chans ==3.0.0.2,
|
||||
stm-conduit ==2.5.3,
|
||||
stm-containers ==0.2.7,
|
||||
stm-stats ==0.2.0.0,
|
||||
storable-complex ==0.2.1,
|
||||
storable-endian ==0.2.5,
|
||||
streaming-commons ==0.1.8,
|
||||
streams ==3.2,
|
||||
strict ==0.3.2,
|
||||
stringable ==0.1.3,
|
||||
stringbuilder ==0.5.0,
|
||||
stringprep ==1.0.0,
|
||||
stringsearch ==0.3.6.5,
|
||||
stylish-haskell ==0.5.11.0,
|
||||
SVGFonts ==1.4.0.3,
|
||||
syb ==0.4.3,
|
||||
syb-with-class ==0.6.1.5,
|
||||
system-canonicalpath ==0.2.0.0,
|
||||
system-fileio ==0.3.16,
|
||||
system-filepath ==0.4.13.1,
|
||||
system-posix-redirect ==1.1.0.1,
|
||||
tabular ==0.2.2.5,
|
||||
tagged ==0.7.3,
|
||||
tagshare ==0.0,
|
||||
tagsoup ==0.13.3,
|
||||
tagstream-conduit ==0.5.5.3,
|
||||
tar ==0.4.0.1,
|
||||
tardis ==0.3.0.0,
|
||||
tasty ==0.10.1,
|
||||
tasty-ant-xml ==1.0.1,
|
||||
tasty-golden ==2.2.2.4,
|
||||
tasty-hunit ==0.9.0.1,
|
||||
tasty-quickcheck ==0.8.3.2,
|
||||
tasty-smallcheck ==0.8.0.1,
|
||||
tasty-th ==0.1.3,
|
||||
template-haskell installed,
|
||||
temporary ==1.2.0.3,
|
||||
temporary-rc ==1.2.0.3,
|
||||
terminal-progress-bar ==0.0.1.4,
|
||||
terminal-size ==0.3.0,
|
||||
terminfo installed,
|
||||
test-framework ==0.8.1.0,
|
||||
test-framework-hunit ==0.3.0.1,
|
||||
test-framework-quickcheck2 ==0.3.0.3,
|
||||
test-framework-th ==0.2.4,
|
||||
testing-feat ==0.4.0.2,
|
||||
testpack ==2.1.3.0,
|
||||
texmath ==0.8.0.1,
|
||||
text ==1.2.0.3,
|
||||
text-binary ==0.1.0,
|
||||
text-format ==0.3.1.1,
|
||||
text-icu ==0.7.0.0,
|
||||
tf-random ==0.5,
|
||||
th-desugar ==1.4.2,
|
||||
th-expand-syns ==0.3.0.4,
|
||||
th-extras ==0.0.0.2,
|
||||
th-lift ==0.7,
|
||||
th-orphans ==0.8.3,
|
||||
threads ==0.5.1.2,
|
||||
th-reify-many ==0.1.2,
|
||||
thyme ==0.3.5.5,
|
||||
time installed,
|
||||
time-compat ==0.1.0.3,
|
||||
time-lens ==0.4.0.1,
|
||||
timezone-olson ==0.1.6,
|
||||
timezone-series ==0.1.4,
|
||||
tls ==1.2.13,
|
||||
tls-debug ==0.3.4,
|
||||
tostring ==0.2.1,
|
||||
transformers installed,
|
||||
transformers-base ==0.4.3,
|
||||
transformers-compat ==0.3.3.3,
|
||||
traverse-with-class ==0.2.0.3,
|
||||
tree-view ==0.4,
|
||||
tuple ==0.3.0.2,
|
||||
type-eq ==0.4.2,
|
||||
type-list ==0.0.0.0,
|
||||
udbus ==0.2.1,
|
||||
unbounded-delays ==0.1.0.9,
|
||||
union-find ==0.2,
|
||||
uniplate ==1.6.12,
|
||||
unix installed,
|
||||
unix-compat ==0.4.1.3,
|
||||
unix-time ==0.3.4,
|
||||
unordered-containers ==0.2.5.1,
|
||||
uri-encode ==1.5.0.3,
|
||||
url ==2.1.3,
|
||||
utf8-light ==0.4.2,
|
||||
utf8-string ==0.3.8,
|
||||
uuid ==1.3.8,
|
||||
vault ==0.3.0.4,
|
||||
vector ==0.10.12.2,
|
||||
vector-algorithms ==0.6.0.3,
|
||||
vector-binary-instances ==0.2.1.0,
|
||||
vector-instances ==3.3,
|
||||
vector-space ==0.8.7,
|
||||
vector-space-points ==0.2,
|
||||
vector-th-unbox ==0.2.1.0,
|
||||
vhd ==0.2.2,
|
||||
void ==0.7,
|
||||
wai ==3.0.2.1,
|
||||
wai-app-static ==3.0.0.5,
|
||||
wai-conduit ==3.0.0.2,
|
||||
wai-eventsource ==3.0.0,
|
||||
wai-extra ==3.0.3.2,
|
||||
wai-logger ==2.2.3,
|
||||
wai-middleware-static ==0.6.0.1,
|
||||
wai-websockets ==3.0.0.3,
|
||||
warp ==3.0.5,
|
||||
warp-tls ==3.0.1.1,
|
||||
webdriver ==0.6.0.3,
|
||||
web-fpco ==0.1.1.0,
|
||||
websockets ==0.9.2.2,
|
||||
wizards ==1.0.1,
|
||||
wl-pprint ==1.1,
|
||||
wl-pprint-extras ==3.5.0.3,
|
||||
wl-pprint-terminfo ==3.7.1.3,
|
||||
wl-pprint-text ==1.1.0.3,
|
||||
word8 ==0.1.1,
|
||||
wordpass ==1.0.0.2,
|
||||
X11 ==1.6.1.2,
|
||||
x509 ==1.5.0.1,
|
||||
x509-store ==1.5.0,
|
||||
x509-system ==1.5.0,
|
||||
x509-validation ==1.5.1,
|
||||
xenstore ==0.1.1,
|
||||
xhtml installed,
|
||||
xml ==1.3.13,
|
||||
xml-conduit ==1.2.3.1,
|
||||
xmlgen ==0.6.2.1,
|
||||
xml-hamlet ==0.4.0.9,
|
||||
xmlhtml ==0.2.3.4,
|
||||
xml-types ==0.3.4,
|
||||
xss-sanitize ==0.3.5.4,
|
||||
yackage ==0.7.0.6,
|
||||
yaml ==0.8.10.1,
|
||||
Yampa ==0.9.6,
|
||||
YampaSynth ==0.2,
|
||||
yesod ==1.4.1.3,
|
||||
yesod-auth ==1.4.1.2,
|
||||
yesod-auth-deskcom ==1.4.0,
|
||||
yesod-auth-fb ==1.6.6,
|
||||
yesod-auth-hashdb ==1.4.1.2,
|
||||
yesod-auth-oauth2 ==0.0.11,
|
||||
yesod-bin ==1.4.3.2,
|
||||
yesod-core ==1.4.7.1,
|
||||
yesod-eventsource ==1.4.0.1,
|
||||
yesod-fay ==0.7.0,
|
||||
yesod-fb ==0.3.4,
|
||||
yesod-form ==1.4.3.1,
|
||||
yesod-gitrepo ==0.1.1.0,
|
||||
yesod-newsfeed ==1.4.0.1,
|
||||
yesod-persistent ==1.4.0.2,
|
||||
yesod-sitemap ==1.4.0.1,
|
||||
yesod-static ==1.4.0.4,
|
||||
yesod-test ==1.4.2.2,
|
||||
yesod-text-markdown ==0.1.7,
|
||||
yesod-websockets ==0.2.1.1,
|
||||
zeromq4-haskell ==0.6.2,
|
||||
zip-archive ==0.2.3.5,
|
||||
zlib ==0.5.4.2,
|
||||
zlib-bindings ==0.1.1.5,
|
||||
zlib-enum ==0.2.3.1,
|
||||
zlib-lens ==0.1
|
||||
@ -1 +0,0 @@
|
||||
name: stackage-server
|
||||
@ -1,9 +0,0 @@
|
||||
stanzas:
|
||||
- type: webapp
|
||||
exec: ../dist/build/stackage-server/stackage-server
|
||||
args:
|
||||
- production
|
||||
env:
|
||||
STACKAGE_CABAL_LOADER: "0"
|
||||
STACKAGE_HOOGLE_GEN: "0"
|
||||
host: www.stackage.org
|
||||
139
config/models
139
config/models
@ -1,139 +0,0 @@
|
||||
User
|
||||
handle Slug
|
||||
display Text
|
||||
token Slug
|
||||
UniqueHandle handle
|
||||
UniqueToken token
|
||||
deriving Typeable
|
||||
|
||||
Email
|
||||
email Text
|
||||
user UserId
|
||||
UniqueEmail email
|
||||
|
||||
Verkey
|
||||
email Text
|
||||
verkey Text
|
||||
|
||||
Stackage
|
||||
user UserId
|
||||
ident PackageSetIdent
|
||||
slug SnapSlug default="md5((random())::text)"
|
||||
uploaded UTCTime
|
||||
title Text
|
||||
desc Text
|
||||
hasHaddocks Bool default=false
|
||||
UniqueStackage ident
|
||||
UniqueSnapshot slug
|
||||
|
||||
Uploaded
|
||||
name PackageName
|
||||
version Version
|
||||
uploaded UTCTime
|
||||
UniqueUploaded name version
|
||||
|
||||
Alias
|
||||
user UserId
|
||||
name Slug
|
||||
target PackageSetIdent
|
||||
UniqueAlias user name
|
||||
|
||||
Package
|
||||
stackage StackageId
|
||||
name' PackageName sql=name
|
||||
version Version
|
||||
hasHaddocks Bool default=true
|
||||
overwrite Bool
|
||||
core Bool Maybe -- use Maybe to speed up migration
|
||||
|
||||
Tag
|
||||
package PackageName
|
||||
tag Slug
|
||||
voter UserId
|
||||
UniqueTagPackageVoter package tag voter
|
||||
|
||||
Like
|
||||
package PackageName
|
||||
voter UserId
|
||||
UniqueLikePackageVoter package voter
|
||||
|
||||
Download
|
||||
ident PackageSetIdent Maybe
|
||||
view Text Maybe MigrationOnly
|
||||
timestamp UTCTime
|
||||
package PackageName
|
||||
version Version
|
||||
userAgent Text Maybe
|
||||
|
||||
Metadata
|
||||
name PackageName
|
||||
version Version
|
||||
hash ByteString
|
||||
deps [Text]
|
||||
author Text
|
||||
maintainer Text
|
||||
licenseName Text
|
||||
homepage Text
|
||||
bugReports Text
|
||||
synopsis Text
|
||||
sourceRepo [Text]
|
||||
category Text
|
||||
library Bool
|
||||
exes Int
|
||||
testSuites Int
|
||||
benchmarks Int
|
||||
|
||||
readme Html
|
||||
changelog Html Maybe
|
||||
licenseContent Html Maybe
|
||||
|
||||
UniqueMetadata name
|
||||
|
||||
Docs
|
||||
name PackageName
|
||||
version Version
|
||||
uploaded UTCTime
|
||||
snapshot StackageId Maybe
|
||||
Module
|
||||
docs DocsId
|
||||
name Text
|
||||
url Text
|
||||
UniqueModule docs name
|
||||
|
||||
Dependency
|
||||
dep PackageName
|
||||
user PackageName
|
||||
UniqueDependency dep user
|
||||
|
||||
BannedTag
|
||||
tag Slug
|
||||
UniqueBannedTag tag
|
||||
|
||||
Migration
|
||||
num Int
|
||||
UniqueMigration num
|
||||
|
||||
Nightly
|
||||
day Day
|
||||
ghcVersion Text
|
||||
stackage StackageId
|
||||
UniqueNightly day
|
||||
|
||||
Lts
|
||||
major Int
|
||||
minor Int
|
||||
stackage StackageId
|
||||
UniqueLts major minor
|
||||
|
||||
Deprecated
|
||||
package PackageName
|
||||
UniqueDeprecated package
|
||||
|
||||
Suggested
|
||||
package PackageName
|
||||
insteadOf PackageName
|
||||
UniqueSuggested package insteadOf
|
||||
|
||||
UploadProgress
|
||||
message Text
|
||||
dest Text Maybe
|
||||
@ -1,24 +0,0 @@
|
||||
Default: &defaults
|
||||
user: stackage_server
|
||||
password: stackage-server
|
||||
host: localhost
|
||||
port: 5432
|
||||
database: stackage_server
|
||||
poolsize: 10
|
||||
|
||||
Development:
|
||||
<<: *defaults
|
||||
|
||||
Testing:
|
||||
database: stackage_server_test
|
||||
<<: *defaults
|
||||
|
||||
Staging:
|
||||
database: stackage_server_staging
|
||||
poolsize: 100
|
||||
<<: *defaults
|
||||
|
||||
Production:
|
||||
database: stackage_server_production
|
||||
poolsize: 100
|
||||
<<: *defaults
|
||||
@ -1,2 +1,4 @@
|
||||
User-agent: *
|
||||
Disallow: /haddock/
|
||||
Disallow: /diff/
|
||||
Sitemap: https://www.stackage.org/sitemap.xml
|
||||
|
||||
@ -1,56 +1,68 @@
|
||||
/static StaticR Static getStatic
|
||||
/auth AuthR Auth getAuth
|
||||
/reload WebsiteContentR GitRepo-WebsiteContent websiteContent
|
||||
!/#SnapshotBranch/*Texts OldSnapshotBranchR GET
|
||||
|
||||
/static StaticR Static appStatic
|
||||
/reload WebsiteContentR GitRepo-WebsiteContent appWebsiteContent
|
||||
|
||||
/favicon.ico FaviconR GET
|
||||
/robots.txt RobotsR GET
|
||||
/sitemap.xml SitemapR GET
|
||||
|
||||
/ HomeR GET
|
||||
/healthz HealthzR GET
|
||||
/snapshots AllSnapshotsR GET
|
||||
/profile ProfileR GET PUT
|
||||
/email/#EmailId EmailR DELETE
|
||||
/reset-token ResetTokenR POST
|
||||
/upload UploadStackageR GET PUT
|
||||
/upload-haddock/#Text UploadHaddockR GET PUT
|
||||
/upload-doc-map UploadDocMapR GET PUT
|
||||
|
||||
/stackage/#PackageSetIdent/*Texts OldStackageR GET
|
||||
/snapshot/#Text/*Texts OldSnapshotR GET
|
||||
|
||||
/snapshot/#SnapSlug SnapshotR:
|
||||
/api/v1/snapshots ApiV1SnapshotsR GET
|
||||
/api/v1/snapshot/#ApiSnapshotName ApiV1SnapshotR GET
|
||||
|
||||
!/#SnapName SnapshotR:
|
||||
/ StackageHomeR GET
|
||||
/metadata StackageMetadataR GET
|
||||
/cabal.config StackageCabalConfigR GET
|
||||
/00-index.tar.gz StackageIndexR GET
|
||||
/bundle StackageBundleR GET
|
||||
/package/#PackageNameVersion StackageSdistR GET
|
||||
/package/#PackageNameVersion/deps SnapshotPackageDepsR GET
|
||||
/package/#PackageNameVersion/revdeps SnapshotPackageRevDepsR GET
|
||||
/packages SnapshotPackagesR GET
|
||||
/docs DocsR GET
|
||||
/hoogle HoogleR GET
|
||||
/db.hoo HoogleDatabaseR GET
|
||||
/build-plan BuildPlanR GET
|
||||
/ghc-major-version GhcMajorVersionR GET
|
||||
|
||||
/diff/#SnapName/#SnapName StackageDiffR GET
|
||||
|
||||
/aliases AliasesR PUT
|
||||
/alias/#Slug/#Slug/*Texts AliasR
|
||||
/progress/#UploadProgressId ProgressR GET
|
||||
/system SystemR GET
|
||||
/haddock/#SnapSlug/*Texts HaddockR GET
|
||||
/package/#PackageName PackageR GET
|
||||
/package/#PackageName/snapshots PackageSnapshotsR GET
|
||||
/haddock/#SnapName/*Texts HaddockR GET
|
||||
!/haddock/*Texts HaddockBackupR GET
|
||||
/package/#PackageNameP PackageR GET
|
||||
/package/#PackageNameP/snapshots PackageSnapshotsR GET
|
||||
/package/#PackageNameP/badge/#SnapshotBranch PackageBadgeR GET
|
||||
/package PackageListR GET
|
||||
/compressor-status CompressorStatusR GET
|
||||
/package/#PackageName/like PackageLikeR POST
|
||||
/package/#PackageName/unlike PackageUnlikeR POST
|
||||
/package/#PackageName/tag PackageTagR POST
|
||||
/package/#PackageName/untag PackageUntagR POST
|
||||
/tags TagListR GET
|
||||
/tag/#Slug TagR GET
|
||||
/banned-tags BannedTagsR GET PUT
|
||||
|
||||
/lts/*Texts LtsR GET
|
||||
/nightly/*Texts NightlyR GET
|
||||
/package/#PackageNameP/deps PackageDepsR GET
|
||||
/package/#PackageNameP/revdeps PackageRevDepsR GET
|
||||
|
||||
/authors AuthorsR GET
|
||||
/install InstallR GET
|
||||
/older-releases OlderReleasesR GET
|
||||
|
||||
/refresh-deprecated RefreshDeprecatedR GET
|
||||
/build-version BuildVersionR GET
|
||||
/package-counts PackageCountsR GET
|
||||
/build-version BuildVersionR GitRev appGitRev
|
||||
|
||||
/download DownloadR GET
|
||||
/download/snapshots.json DownloadSnapshotsJsonR GET
|
||||
/download/lts-snapshots.json DownloadLtsSnapshotsJsonR GET
|
||||
/download/#SupportedArch/#Text DownloadGhcLinksR GET
|
||||
|
||||
/feed FeedR GET
|
||||
/feed/#SnapshotBranch BranchFeedR GET
|
||||
|
||||
/stack DownloadStackListR GET
|
||||
/stack/#Text DownloadStackR GET
|
||||
|
||||
/status/mirror MirrorStatusR GET
|
||||
|
||||
/blog BlogHomeR GET
|
||||
/blog/#Year/#Month/#Text BlogPostR GET
|
||||
/blog/feed BlogFeedR GET
|
||||
|
||||
/stats StatsR GET
|
||||
|
||||
29
config/settings.yml
Normal file
29
config/settings.yml
Normal file
@ -0,0 +1,29 @@
|
||||
# Values formatted like "_env:ENV_VAR_NAME:default_value" can be overridden by the specified environment variable.
|
||||
# See https://github.com/yesodweb/yesod/wiki/Configuration#overriding-configuration-values-with-environment-variables
|
||||
|
||||
static-dir: "_env:STATIC_DIR:static"
|
||||
host: "_env:HOST:*4" # any IPv4 host
|
||||
port: "_env:PORT:3000" # NB: The port `yesod devel` uses is distinct from this value. Set the `yesod devel` port from the command line.
|
||||
ip-from-header: "_env:IP_FROM_HEADER:false"
|
||||
|
||||
# Default behavior: determine the application root from the request headers.
|
||||
# Uncomment to set an explicit approot
|
||||
approot: "_env:APPROOT:"
|
||||
|
||||
# Optional values with the following production defaults.
|
||||
# In development, they default to the inverse.
|
||||
#
|
||||
# development: false
|
||||
# detailed-logging: false
|
||||
# should-log-all: false
|
||||
# reload-templates: false
|
||||
# mutable-static: false
|
||||
# skip-combining: false
|
||||
force-ssl: false
|
||||
# dev-download: false
|
||||
|
||||
postgres-string: "_env:PGSTRING:host=localhost port=5432 user=stackage dbname=stackage password=stackage"
|
||||
postgres-poolsize: "_env:PGPOOLSIZE:8"
|
||||
|
||||
# Publicly-accessible URL for the bucket holding Haddock contents.
|
||||
download-bucket-url: "_env:DOWNLOAD_BUCKET_URL:https://s3.amazonaws.com/haddock.stackage.org"
|
||||
@ -1,33 +0,0 @@
|
||||
Default: &defaults
|
||||
host: "*4" # any IPv4 host
|
||||
port: 3000
|
||||
approot: "http://localhost:3000"
|
||||
hackage-root: http://hackage.fpcomplete.com
|
||||
admin-users:
|
||||
- fpcomplete
|
||||
# google-auth:
|
||||
# client-id: foo
|
||||
# client-secret: bar
|
||||
|
||||
Development:
|
||||
<<: *defaults
|
||||
blob-store: file:dev-blob-store
|
||||
|
||||
Testing:
|
||||
<<: *defaults
|
||||
|
||||
Staging:
|
||||
<<: *defaults
|
||||
|
||||
Production:
|
||||
#approot: "http://www.example.com"
|
||||
<<: *defaults
|
||||
blob-store: file:/tmp/stackage-server
|
||||
|
||||
# S3-backed storaged
|
||||
# blob-store:
|
||||
# type: aws
|
||||
# local: /tmp/stackage-server
|
||||
# access: someaccesskey
|
||||
# secret: somesecretkey
|
||||
# bucket: somebucket
|
||||
1
config/test-settings.yml
Normal file
1
config/test-settings.yml
Normal file
@ -0,0 +1 @@
|
||||
{}
|
||||
24
devel.hs
24
devel.hs
@ -1,24 +0,0 @@
|
||||
{-# LANGUAGE PackageImports #-}
|
||||
import "stackage-server" Application (getApplicationDev)
|
||||
import Network.Wai.Handler.Warp
|
||||
(runSettings, defaultSettings, setPort)
|
||||
import Control.Concurrent (forkIO)
|
||||
import System.Directory (doesFileExist, removeFile)
|
||||
import System.Exit (exitSuccess)
|
||||
import Control.Concurrent (threadDelay)
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
putStrLn "Starting devel application"
|
||||
(port, app) <- getApplicationDev False
|
||||
forkIO $ runSettings (setPort port defaultSettings) app
|
||||
loop
|
||||
|
||||
loop :: IO ()
|
||||
loop = do
|
||||
threadDelay 100000
|
||||
e <- doesFileExist "yesod-devel/devel-terminate"
|
||||
if e then terminateDevel else loop
|
||||
|
||||
terminateDevel :: IO ()
|
||||
terminateDevel = exitSuccess
|
||||
12
docker/Dockerfile.base-build
Normal file
12
docker/Dockerfile.base-build
Normal file
@ -0,0 +1,12 @@
|
||||
FROM fpco/pid1:20.04
|
||||
|
||||
ENV LANG C.UTF-8
|
||||
RUN export DEBIAN_FRONTEND=noninteractive && \
|
||||
apt-get update && \
|
||||
apt-get install libpq-dev curl -y && \
|
||||
curl -sSL https://get.haskellstack.org/ | sh && \
|
||||
unset DEBIAN_FRONTEND
|
||||
RUN stack update
|
||||
COPY stack.yaml package.yaml /src/
|
||||
RUN stack setup --stack-yaml /src/stack.yaml
|
||||
RUN stack build --only-snapshot --stack-yaml /src/stack.yaml
|
||||
7
docker/Dockerfile.base-run
Normal file
7
docker/Dockerfile.base-run
Normal file
@ -0,0 +1,7 @@
|
||||
FROM fpco/pid1:20.04
|
||||
|
||||
RUN export DEBIAN_FRONTEND=noninteractive && \
|
||||
apt-get update && \
|
||||
apt-get install libpq-dev curl git -y && \
|
||||
curl -sSL https://get.haskellstack.org/ | sh && \
|
||||
unset DEBIAN_FRONTEND
|
||||
12
docker/Dockerfile.runtime
Normal file
12
docker/Dockerfile.runtime
Normal file
@ -0,0 +1,12 @@
|
||||
FROM ghcr.io/fpco/stackage-server/base-build:02cdb54683a9c8feec125bbdc9aa36f9700dad17 as build-app
|
||||
|
||||
RUN mkdir -p /artifacts/bin
|
||||
COPY . /src
|
||||
RUN stack install --stack-yaml /src/stack.yaml --local-bin-path /artifacts/bin
|
||||
|
||||
FROM ghcr.io/fpco/stackage-server/base-run:02cdb54683a9c8feec125bbdc9aa36f9700dad17
|
||||
|
||||
COPY --from=build-app /src/config/ /app/config/
|
||||
COPY --from=build-app /src/static/ /app/static/
|
||||
COPY --from=build-app /artifacts/bin/stackage-server /usr/local/bin/stackage-server
|
||||
COPY --from=build-app /artifacts/bin/stackage-server-cron /usr/local/bin/stackage-server-cron
|
||||
59
flake.lock
Normal file
59
flake.lock
Normal file
@ -0,0 +1,59 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1738136902,
|
||||
"narHash": "sha256-pUvLijVGARw4u793APze3j6mU1Zwdtz7hGkGGkD87qw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "9a5db3142ce450045840cc8d832b13b8a2018e0c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
32
flake.nix
Normal file
32
flake.nix
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
description = "stackage-server";
|
||||
|
||||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils }:
|
||||
flake-utils.lib.eachDefaultSystem
|
||||
(system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
package = pkgs.callPackage ./package.nix {};
|
||||
in
|
||||
{
|
||||
packages.default = package.app;
|
||||
devShells.default = package.shell;
|
||||
|
||||
checks = {
|
||||
# I used to put these into $out/lib, but justStaticExecutables
|
||||
# removes that directory. Now I feel like I'm just getting lucky. So
|
||||
# let's double check the files are there.
|
||||
file-check = pkgs.runCommand "check-runtime-files" {} ''
|
||||
if [ -e ${self.packages.${system}.default}/run/config/settings.yml ]; then
|
||||
touch $out
|
||||
else
|
||||
2>&1 echo "Runtime files are missing"
|
||||
exit 1
|
||||
fi
|
||||
'';
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
4
font-awesome.min.css
vendored
4
font-awesome.min.css
vendored
File diff suppressed because one or more lines are too long
@ -1,8 +0,0 @@
|
||||
docker:
|
||||
repo-suffix: "_ghc-7.8.4.20141229_stackage-lts-1.0"
|
||||
image-tag: "20150101"
|
||||
# For fpbuild <= 0.1.0
|
||||
registry-username: "dummy"
|
||||
registry-password: "no-auth-required"
|
||||
packages:
|
||||
- "."
|
||||
3
indices
Normal file
3
indices
Normal file
@ -0,0 +1,3 @@
|
||||
create index nightly_snap on nightly(snap);
|
||||
create index snapshot_package_snapshot on snapshot_package(snapshot);
|
||||
create index snapshot_created on snapshot (created desc);
|
||||
33
nix/amazonka-core.nix
Normal file
33
nix/amazonka-core.nix
Normal file
@ -0,0 +1,33 @@
|
||||
# Generated by ./gen-package-nix.sh
|
||||
{ mkDerivation, aeson, attoparsec, base, bytestring
|
||||
, case-insensitive, conduit, conduit-extra, containers, crypton
|
||||
, data-ordlist, deepseq, fetchzip, hashable, http-client
|
||||
, http-conduit, http-types, lens, lib, memory, QuickCheck
|
||||
, quickcheck-unicode, regex-posix, resourcet, scientific, tasty
|
||||
, tasty-hunit, tasty-quickcheck, template-haskell, text, time
|
||||
, transformers, unordered-containers, xml-conduit, xml-types
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "amazonka-core";
|
||||
version = "2.0";
|
||||
src = fetchzip {
|
||||
url = "https://github.com/brendanhay/amazonka/archive/85e0289f8dc23c54b00f7f1a09845be7e032a1eb.zip";
|
||||
sha256 = "1mgdz9b7wwc05xksczvzp2hllp7nzl4nr6as5q2fafkgxqzwwx53";
|
||||
};
|
||||
postUnpack = "sourceRoot+=/lib/amazonka-core; echo source root reset to $sourceRoot";
|
||||
libraryHaskellDepends = [
|
||||
aeson attoparsec base bytestring case-insensitive conduit
|
||||
conduit-extra containers crypton deepseq hashable http-client
|
||||
http-conduit http-types lens memory regex-posix resourcet
|
||||
scientific text time transformers unordered-containers xml-conduit
|
||||
xml-types
|
||||
];
|
||||
testHaskellDepends = [
|
||||
aeson base bytestring case-insensitive conduit data-ordlist
|
||||
http-conduit http-types QuickCheck quickcheck-unicode tasty
|
||||
tasty-hunit tasty-quickcheck template-haskell text time
|
||||
];
|
||||
homepage = "https://github.com/brendanhay/amazonka";
|
||||
description = "Core data types and functionality for Amazonka libraries";
|
||||
license = lib.licenses.mpl20;
|
||||
}
|
||||
21
nix/crypton-connection.nix
Normal file
21
nix/crypton-connection.nix
Normal file
@ -0,0 +1,21 @@
|
||||
# Generated by ./gen-package-nix.sh
|
||||
{ mkDerivation, base, basement, bytestring, containers
|
||||
, crypton-x509, crypton-x509-store, crypton-x509-system
|
||||
, crypton-x509-validation, data-default-class, lib, network, socks
|
||||
, tls
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "crypton-connection";
|
||||
version = "0.3.2";
|
||||
sha256 = "208be23bc910f8e5f9431995b9c011ed376bb947d79f74c8f51a5e4ecd9e991e";
|
||||
revision = "1";
|
||||
editedCabalFile = "1rkana1ghppras20pgpwp2bc8dnsf8lspq90r6124jqd4ckbvx2b";
|
||||
libraryHaskellDepends = [
|
||||
base basement bytestring containers crypton-x509 crypton-x509-store
|
||||
crypton-x509-system crypton-x509-validation data-default-class
|
||||
network socks tls
|
||||
];
|
||||
homepage = "https://github.com/kazu-yamamoto/crypton-connection";
|
||||
description = "Simple and easy network connections API";
|
||||
license = lib.licenses.bsd3;
|
||||
}
|
||||
36
nix/gen-packages.sh
Executable file
36
nix/gen-packages.sh
Executable file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
filter=${1:-.}
|
||||
|
||||
gen () {
|
||||
f="$(mktemp)"
|
||||
# shellcheck disable=SC2064
|
||||
trap "rm -f $f" EXIT
|
||||
if grep -q "$filter" <<< "$1"; then
|
||||
echo "Generating $1..."
|
||||
echo "# Generated by $0" > "$f"
|
||||
cabal2nix "$2" >> "$f" "${@:3}"
|
||||
mv "$f" "${1}.nix"
|
||||
else
|
||||
echo "Skipping $1..."
|
||||
fi
|
||||
}
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
gen stackage-server --hpack ../.
|
||||
|
||||
# Has my R2 patch, which is still unreleased on 2025-01-24
|
||||
#echo "...please ignore useless error below..."
|
||||
2>/dev/null gen amazonka-core https://github.com/brendanhay/amazonka/archive/85e0289f8dc23c54b00f7f1a09845be7e032a1eb.zip --subpath lib/amazonka-core
|
||||
|
||||
# Pinned to 5.0.18.4 to avoid accidentally regenerating hoogle files. See
|
||||
# warning in stack.yaml!
|
||||
gen hoogle cabal://hoogle-5.0.18.4
|
||||
|
||||
# FIXME: I don't remember why this had to be patched.
|
||||
gen pantry https://github.com/commercialhaskell/pantry/archive/5df643cc1deb561d9c52a9cb6f593aba2bc4c08e.zip
|
||||
|
||||
echo "Success!"
|
||||
35
nix/hoogle.nix
Normal file
35
nix/hoogle.nix
Normal file
@ -0,0 +1,35 @@
|
||||
# Generated by ./gen-package-nix.sh
|
||||
{ mkDerivation, aeson, base, binary, blaze-html, blaze-markup
|
||||
, bytestring, cmdargs, conduit, conduit-extra, containers
|
||||
, crypton-connection, deepseq, directory, extra, filepath
|
||||
, foundation, hashable, haskell-src-exts, http-conduit, http-types
|
||||
, js-flot, js-jquery, lib, mmap, old-locale, process-extras
|
||||
, QuickCheck, resourcet, safe, storable-tuple, tar
|
||||
, template-haskell, text, time, transformers, uniplate, utf8-string
|
||||
, vector, wai, wai-logger, warp, warp-tls, zlib
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "hoogle";
|
||||
version = "5.0.18.4";
|
||||
sha256 = "9d0f2de39821d14e8a436d5fda3523e789258b8041f02dd655f0e37d5013e323";
|
||||
revision = "1";
|
||||
editedCabalFile = "1129flhhb1992rijw46dclvmpqlylmbrzl4swsnk2knylx6jgw5a";
|
||||
isLibrary = true;
|
||||
isExecutable = true;
|
||||
enableSeparateDataOutput = true;
|
||||
libraryHaskellDepends = [
|
||||
aeson base binary blaze-html blaze-markup bytestring cmdargs
|
||||
conduit conduit-extra containers crypton-connection deepseq
|
||||
directory extra filepath foundation hashable haskell-src-exts
|
||||
http-conduit http-types js-flot js-jquery mmap old-locale
|
||||
process-extras QuickCheck resourcet safe storable-tuple tar
|
||||
template-haskell text time transformers uniplate utf8-string vector
|
||||
wai wai-logger warp warp-tls zlib
|
||||
];
|
||||
executableHaskellDepends = [ base ];
|
||||
testTarget = "--test-option=--no-net";
|
||||
homepage = "https://hoogle.haskell.org/";
|
||||
description = "Haskell API Search";
|
||||
license = lib.licenses.bsd3;
|
||||
mainProgram = "hoogle";
|
||||
}
|
||||
45
nix/pantry.nix
Normal file
45
nix/pantry.nix
Normal file
@ -0,0 +1,45 @@
|
||||
# Generated by ./gen-package-nix.sh
|
||||
{ mkDerivation, aeson, ansi-terminal, base, bytestring, Cabal
|
||||
, casa-client, casa-types, conduit, conduit-extra, containers
|
||||
, cryptonite, cryptonite-conduit, digest, exceptions, fetchzip
|
||||
, filelock, generic-deriving, hackage-security, hedgehog, hpack
|
||||
, hspec, http-client, http-client-tls, http-conduit, http-download
|
||||
, http-types, lib, memory, mtl, network-uri, path, path-io
|
||||
, persistent, persistent-sqlite, persistent-template, primitive
|
||||
, QuickCheck, raw-strings-qq, resourcet, rio, rio-orphans
|
||||
, rio-prettyprint, tar-conduit, text, text-metrics, time
|
||||
, transformers, unix-compat, unliftio, unordered-containers, vector
|
||||
, yaml, zip-archive
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "pantry";
|
||||
version = "0.5.7";
|
||||
src = fetchzip {
|
||||
url = "https://github.com/commercialhaskell/pantry/archive/5df643cc1deb561d9c52a9cb6f593aba2bc4c08e.zip";
|
||||
sha256 = "15m9ggg5jf30c1lxi0wgn76savrarwr2khzcd1rpnprdq3jnppzs";
|
||||
};
|
||||
libraryHaskellDepends = [
|
||||
aeson ansi-terminal base bytestring Cabal casa-client casa-types
|
||||
conduit conduit-extra containers cryptonite cryptonite-conduit
|
||||
digest filelock generic-deriving hackage-security hpack http-client
|
||||
http-client-tls http-conduit http-download http-types memory mtl
|
||||
network-uri path path-io persistent persistent-sqlite
|
||||
persistent-template primitive resourcet rio rio-orphans
|
||||
rio-prettyprint tar-conduit text text-metrics time transformers
|
||||
unix-compat unliftio unordered-containers vector yaml zip-archive
|
||||
];
|
||||
testHaskellDepends = [
|
||||
aeson ansi-terminal base bytestring Cabal casa-client casa-types
|
||||
conduit conduit-extra containers cryptonite cryptonite-conduit
|
||||
digest exceptions filelock generic-deriving hackage-security
|
||||
hedgehog hpack hspec http-client http-client-tls http-conduit
|
||||
http-download http-types memory mtl network-uri path path-io
|
||||
persistent persistent-sqlite persistent-template primitive
|
||||
QuickCheck raw-strings-qq resourcet rio rio-orphans rio-prettyprint
|
||||
tar-conduit text text-metrics time transformers unix-compat
|
||||
unliftio unordered-containers vector yaml zip-archive
|
||||
];
|
||||
homepage = "https://github.com/commercialhaskell/pantry#readme";
|
||||
description = "Content addressable Haskell package management";
|
||||
license = lib.licenses.bsd3;
|
||||
}
|
||||
12
nix/safe.nix
Normal file
12
nix/safe.nix
Normal file
@ -0,0 +1,12 @@
|
||||
# Generated by ./gen-package-nix.sh
|
||||
{ mkDerivation, base, deepseq, lib, QuickCheck }:
|
||||
mkDerivation {
|
||||
pname = "safe";
|
||||
version = "0.3.20";
|
||||
sha256 = "ba9983610f9004a2ab67f5ddf11c9dff34f753b9fe11259f1ff77c2f3166df24";
|
||||
libraryHaskellDepends = [ base ];
|
||||
testHaskellDepends = [ base deepseq QuickCheck ];
|
||||
homepage = "https://github.com/ndmitchell/safe#readme";
|
||||
description = "Library of safe (exception free) functions";
|
||||
license = lib.licenses.bsd3;
|
||||
}
|
||||
77
nix/stackage-server.nix
Normal file
77
nix/stackage-server.nix
Normal file
@ -0,0 +1,77 @@
|
||||
# Generated by ./gen-packages.sh
|
||||
{ mkDerivation, aeson, amazonka, amazonka-core, amazonka-s3
|
||||
, auto-update, barrier, base, blaze-html, blaze-markup, bytestring
|
||||
, Cabal, casa-client, classy-prelude, classy-prelude-conduit
|
||||
, classy-prelude-yesod, cmark-gfm, conduit, conduit-extra
|
||||
, containers, deepseq, directory, email-validate, esqueleto
|
||||
, exceptions, fast-logger, file-embed, filepath, formatting, gauge
|
||||
, ghc-prim, haddock-library, hashable, hoogle, hpack, html-conduit
|
||||
, http-client, http-conduit, http-types, lens, lib, monad-logger
|
||||
, mono-traversable, mtl, optparse-applicative, pantry, path
|
||||
, path-io, path-pieces, persistent, persistent-postgresql
|
||||
, persistent-sqlite, persistent-template, process, resource-pool
|
||||
, resourcet, retry, rio, shakespeare, streaming-commons
|
||||
, tar-conduit, template-haskell, text, these, transformers
|
||||
, unliftio, unordered-containers, wai, wai-extra, wai-logger, warp
|
||||
, xml-conduit, xml-types, yaml, yesod, yesod-auth, yesod-core
|
||||
, yesod-form, yesod-gitrepo, yesod-gitrev, yesod-newsfeed
|
||||
, yesod-sitemap, yesod-static, zlib
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "stackage-server";
|
||||
version = "0.0.0";
|
||||
src = ../.;
|
||||
isLibrary = true;
|
||||
isExecutable = true;
|
||||
libraryHaskellDepends = [
|
||||
aeson amazonka amazonka-core amazonka-s3 auto-update barrier base
|
||||
blaze-html blaze-markup bytestring Cabal classy-prelude
|
||||
classy-prelude-conduit classy-prelude-yesod cmark-gfm conduit
|
||||
conduit-extra containers deepseq directory email-validate esqueleto
|
||||
exceptions fast-logger file-embed filepath formatting ghc-prim
|
||||
haddock-library hashable hoogle html-conduit http-client
|
||||
http-conduit http-types lens monad-logger mono-traversable mtl
|
||||
pantry path path-pieces persistent persistent-postgresql
|
||||
persistent-sqlite persistent-template process resource-pool
|
||||
resourcet retry rio shakespeare streaming-commons tar-conduit
|
||||
template-haskell text these transformers unliftio
|
||||
unordered-containers wai wai-extra wai-logger warp xml-conduit
|
||||
xml-types yaml yesod yesod-auth yesod-core yesod-form yesod-gitrepo
|
||||
yesod-gitrev yesod-newsfeed yesod-sitemap yesod-static zlib
|
||||
];
|
||||
libraryToolDepends = [ hpack ];
|
||||
executableHaskellDepends = [
|
||||
aeson amazonka amazonka-core amazonka-s3 auto-update barrier base
|
||||
blaze-html blaze-markup bytestring Cabal classy-prelude
|
||||
classy-prelude-conduit classy-prelude-yesod cmark-gfm conduit
|
||||
conduit-extra containers deepseq directory email-validate esqueleto
|
||||
exceptions fast-logger file-embed filepath formatting ghc-prim
|
||||
haddock-library hashable hoogle html-conduit http-client
|
||||
http-conduit http-types lens monad-logger mono-traversable mtl
|
||||
optparse-applicative pantry path path-pieces persistent
|
||||
persistent-postgresql persistent-sqlite persistent-template process
|
||||
resource-pool resourcet retry rio shakespeare streaming-commons
|
||||
tar-conduit template-haskell text these transformers unliftio
|
||||
unordered-containers wai wai-extra wai-logger warp xml-conduit
|
||||
xml-types yaml yesod yesod-auth yesod-core yesod-form yesod-gitrepo
|
||||
yesod-gitrev yesod-newsfeed yesod-sitemap yesod-static zlib
|
||||
];
|
||||
benchmarkHaskellDepends = [
|
||||
aeson amazonka amazonka-core amazonka-s3 auto-update barrier base
|
||||
blaze-html blaze-markup bytestring Cabal casa-client classy-prelude
|
||||
classy-prelude-conduit classy-prelude-yesod cmark-gfm conduit
|
||||
conduit-extra containers deepseq directory email-validate esqueleto
|
||||
exceptions fast-logger file-embed filepath formatting gauge
|
||||
ghc-prim haddock-library hashable hoogle html-conduit http-client
|
||||
http-conduit http-types lens monad-logger mono-traversable mtl
|
||||
pantry path path-io path-pieces persistent persistent-postgresql
|
||||
persistent-sqlite persistent-template process resource-pool
|
||||
resourcet retry rio shakespeare streaming-commons tar-conduit
|
||||
template-haskell text these transformers unliftio
|
||||
unordered-containers wai wai-extra wai-logger warp xml-conduit
|
||||
xml-types yaml yesod yesod-auth yesod-core yesod-form yesod-gitrepo
|
||||
yesod-gitrev yesod-newsfeed yesod-sitemap yesod-static zlib
|
||||
];
|
||||
prePatch = "hpack";
|
||||
license = lib.licenses.mit;
|
||||
}
|
||||
30
nix/tls.nix
Normal file
30
nix/tls.nix
Normal file
@ -0,0 +1,30 @@
|
||||
# Generated by ./gen-package-nix.sh
|
||||
{ mkDerivation, asn1-encoding, asn1-types, async, base, bytestring
|
||||
, cereal, crypton, crypton-x509, crypton-x509-store
|
||||
, crypton-x509-validation, data-default-class, gauge, hourglass
|
||||
, lib, memory, mtl, network, QuickCheck, tasty, tasty-quickcheck
|
||||
, transformers, unix-time
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "tls";
|
||||
version = "1.8.0";
|
||||
sha256 = "4a8486df3f1bd865753e7ac5f89bb252401fb91c8350226285e1075a78919808";
|
||||
libraryHaskellDepends = [
|
||||
asn1-encoding asn1-types async base bytestring cereal crypton
|
||||
crypton-x509 crypton-x509-store crypton-x509-validation
|
||||
data-default-class memory mtl network transformers unix-time
|
||||
];
|
||||
testHaskellDepends = [
|
||||
asn1-types async base bytestring crypton crypton-x509
|
||||
crypton-x509-validation data-default-class hourglass QuickCheck
|
||||
tasty tasty-quickcheck
|
||||
];
|
||||
benchmarkHaskellDepends = [
|
||||
asn1-types async base bytestring crypton crypton-x509
|
||||
crypton-x509-validation data-default-class gauge hourglass
|
||||
QuickCheck tasty-quickcheck
|
||||
];
|
||||
homepage = "https://github.com/haskell-tls/hs-tls";
|
||||
description = "TLS/SSL protocol native implementation (Server and Client)";
|
||||
license = lib.licenses.bsd3;
|
||||
}
|
||||
45
package.nix
Normal file
45
package.nix
Normal file
@ -0,0 +1,45 @@
|
||||
{ pkgs }:
|
||||
|
||||
let
|
||||
hlib = pkgs.haskell.lib;
|
||||
hpkgs = pkgs.haskellPackages.override {
|
||||
overrides = self: super: {
|
||||
|
||||
stackage-server = hlib.overrideCabal (self.callPackage nix/stackage-server.nix { }) (old: {
|
||||
preConfigure = ''
|
||||
${pkgs.hpack}/bin/hpack .
|
||||
'';
|
||||
# During build, static files are generated into the source tree's
|
||||
# static/ dir. Plus, config/ is needed at runtime.
|
||||
postInstall = ''
|
||||
mkdir -p $out/run
|
||||
cp -a {static,config} $out/run
|
||||
'';
|
||||
src = pkgs.lib.cleanSource old.src;
|
||||
});
|
||||
|
||||
# patched, see gen-package-nix.sh
|
||||
amazonka-core = self.callPackage nix/amazonka-core.nix { };
|
||||
|
||||
# We have this old dependency for unexplored reasons.
|
||||
# Tests fail from attempted network access.
|
||||
pantry = pkgs.lib.pipe (self.callPackage nix/pantry.nix { }) [hlib.dontCheck hlib.doJailbreak];
|
||||
|
||||
# Changing this has operational impacts.
|
||||
hoogle = self.callPackage nix/hoogle.nix { };
|
||||
|
||||
# Outdated breakage? (TODO: upstream)
|
||||
barrier = hlib.markUnbroken super.barrier;
|
||||
|
||||
# Tests fail from attempted network access (TODO: upstream)
|
||||
yesod-gitrev = hlib.markUnbroken (hlib.dontCheck super.yesod-gitrev);
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
app = hlib.justStaticExecutables hpkgs.stackage-server;
|
||||
shell = hpkgs.shellFor {
|
||||
packages = p: [ p.stackage-server ];
|
||||
buildInputs = [ pkgs.cabal-install pkgs.haskell-language-server pkgs.ghcid pkgs.haskellPackages.yesod-bin pkgs.postgresql ];
|
||||
};
|
||||
}
|
||||
161
package.yaml
Normal file
161
package.yaml
Normal file
@ -0,0 +1,161 @@
|
||||
name: stackage-server
|
||||
|
||||
flags:
|
||||
library-only:
|
||||
description: Build for use with "yesod devel"
|
||||
manual: false
|
||||
default: false
|
||||
dev:
|
||||
description: Turn on development settings, like auto-reload templates.
|
||||
manual: false
|
||||
default: false
|
||||
|
||||
dependencies:
|
||||
- base
|
||||
- yesod
|
||||
- aeson
|
||||
- barrier
|
||||
- blaze-markup
|
||||
- bytestring
|
||||
- classy-prelude
|
||||
- classy-prelude-yesod
|
||||
- conduit
|
||||
- conduit-extra
|
||||
- directory
|
||||
- email-validate
|
||||
- esqueleto
|
||||
- exceptions
|
||||
- fast-logger
|
||||
- ghc-prim
|
||||
- html-conduit
|
||||
- http-conduit
|
||||
- monad-logger
|
||||
- mtl
|
||||
#- prometheus-client
|
||||
#- prometheus-metrics-ghc
|
||||
- pantry
|
||||
- path
|
||||
- persistent
|
||||
- persistent-template
|
||||
- resourcet
|
||||
- rio
|
||||
- shakespeare
|
||||
- tar-conduit
|
||||
- template-haskell
|
||||
- text
|
||||
- transformers
|
||||
- these
|
||||
- unliftio
|
||||
- wai
|
||||
- wai-extra
|
||||
- wai-logger
|
||||
#- wai-middleware-prometheus
|
||||
- warp
|
||||
- xml-conduit
|
||||
- xml-types
|
||||
- yaml
|
||||
- yesod-auth
|
||||
- yesod-core
|
||||
- yesod-form
|
||||
- yesod-newsfeed
|
||||
- yesod-static
|
||||
- zlib
|
||||
- unordered-containers
|
||||
- hashable
|
||||
- Cabal >= 3.2
|
||||
- mono-traversable
|
||||
- process
|
||||
- cmark-gfm
|
||||
- formatting
|
||||
- blaze-html
|
||||
- haddock-library
|
||||
- yesod-gitrepo
|
||||
- yesod-gitrev
|
||||
- hoogle
|
||||
- deepseq
|
||||
- auto-update
|
||||
- yesod-sitemap
|
||||
- streaming-commons
|
||||
- classy-prelude-conduit
|
||||
- path-pieces
|
||||
- persistent-postgresql
|
||||
- persistent-sqlite
|
||||
- filepath
|
||||
- http-client
|
||||
- http-types
|
||||
- amazonka
|
||||
- amazonka-core
|
||||
- amazonka-s3
|
||||
- lens
|
||||
- file-embed
|
||||
- resource-pool
|
||||
- containers
|
||||
- retry
|
||||
|
||||
default-extensions:
|
||||
- OverloadedStrings
|
||||
|
||||
library:
|
||||
source-dirs: src
|
||||
when:
|
||||
- condition: (flag(dev)) || (flag(library-only))
|
||||
then:
|
||||
ghc-options:
|
||||
- -Wall
|
||||
- -O0
|
||||
cpp-options: -DDEVELOPMENT
|
||||
else:
|
||||
ghc-options:
|
||||
- -Wall
|
||||
- -O
|
||||
|
||||
executables:
|
||||
stackage-server:
|
||||
main: main.hs
|
||||
source-dirs: app
|
||||
ghc-options: -Wall -threaded -O -rtsopts -with-rtsopts=-N
|
||||
dependencies:
|
||||
- stackage-server
|
||||
when:
|
||||
- condition: flag(library-only)
|
||||
buildable: false
|
||||
- condition: flag(dev)
|
||||
then:
|
||||
other-modules: DevelMain
|
||||
dependencies:
|
||||
- foreign-store
|
||||
else:
|
||||
other-modules: []
|
||||
|
||||
stackage-server-cron:
|
||||
main: stackage-server-cron.hs
|
||||
source-dirs: app
|
||||
other-modules: []
|
||||
ghc-options:
|
||||
- -Wall
|
||||
- -threaded
|
||||
- -O
|
||||
- -rtsopts
|
||||
- -with-rtsopts=-N
|
||||
dependencies:
|
||||
- optparse-applicative
|
||||
- rio
|
||||
- stackage-server
|
||||
when:
|
||||
- condition: flag(library-only)
|
||||
buildable: false
|
||||
- condition: flag(dev)
|
||||
cpp-options: -DDEVELOPMENT
|
||||
|
||||
benchmarks:
|
||||
stackage-bench:
|
||||
main: main.hs
|
||||
source-dirs: bench
|
||||
dependencies:
|
||||
- stackage-server
|
||||
- gauge
|
||||
- deepseq
|
||||
- path-io
|
||||
- casa-client
|
||||
ghc-options:
|
||||
- -O
|
||||
7
shell.nix
Normal file
7
shell.nix
Normal file
@ -0,0 +1,7 @@
|
||||
(import (
|
||||
fetchGit {
|
||||
url = "https://github.com/edolstra/flake-compat";
|
||||
}
|
||||
) {
|
||||
src = ./.;
|
||||
}).shellNix
|
||||
272
src/Application.hs
Normal file
272
src/Application.hs
Normal file
@ -0,0 +1,272 @@
|
||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# LANGUAGE BlockArguments #-}
|
||||
|
||||
module Application
|
||||
( App
|
||||
, withApplicationDev
|
||||
, withFoundationDev
|
||||
, makeApplication
|
||||
, appMain
|
||||
, develMain
|
||||
, withFoundation
|
||||
, makeLogWare
|
||||
-- * for DevelMain
|
||||
, withApplicationRepl
|
||||
-- * for GHCI
|
||||
, handler
|
||||
) where
|
||||
|
||||
import Control.AutoUpdate
|
||||
import Control.Concurrent (threadDelay)
|
||||
import Control.Monad.Logger (liftLoc)
|
||||
import Data.WebsiteContent
|
||||
import Import hiding (catch)
|
||||
import Language.Haskell.TH.Syntax (qLocation)
|
||||
import Network.Wai (Middleware, rawPathInfo, pathInfo, responseBuilder)
|
||||
import Network.Wai.Handler.Warp (Settings, defaultSettings,
|
||||
defaultShouldDisplayException, getPort,
|
||||
runSettings, setHost, setOnException, setPort)
|
||||
import Network.Wai.Middleware.ForceSSL (forceSSL)
|
||||
import Network.Wai.Middleware.RequestLogger (Destination(Logger),
|
||||
IPAddrSource(..), OutputFormat(..),
|
||||
destination, mkRequestLogger,
|
||||
outputFormat)
|
||||
import RIO (LogFunc, LogOptions, logOptionsHandle, withLogFunc, runRIO, logError, displayShow)
|
||||
import RIO.Prelude.Simple (runSimpleApp)
|
||||
import Stackage.Database (withStackageDatabase)
|
||||
import Stackage.Database.Cron (newHoogleLocker, singleRun)
|
||||
import Stackage.Database.Github (getStackageContentDir)
|
||||
import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet, toLogStr)
|
||||
import Yesod.Core.Types (loggerSet)
|
||||
import Yesod.Default.Config2
|
||||
import Yesod.Default.Handlers
|
||||
import Yesod.GitRepo
|
||||
import Yesod.GitRev (GitRev(..))
|
||||
|
||||
-- Import all relevant handler modules here.
|
||||
import Handler.Blog
|
||||
import Handler.BuildPlan
|
||||
import Handler.Download
|
||||
import Handler.DownloadStack
|
||||
import Handler.Feed
|
||||
import Handler.Haddock
|
||||
import Handler.Home
|
||||
import Handler.Hoogle
|
||||
import Handler.MirrorStatus
|
||||
import Handler.OldLinks
|
||||
import Handler.Package
|
||||
import Handler.PackageDeps
|
||||
import Handler.PackageList
|
||||
import Handler.Sitemap
|
||||
import Handler.Snapshots
|
||||
import Handler.StackageHome
|
||||
import Handler.StackageIndex
|
||||
import Handler.StackageSdist
|
||||
import Handler.Stats
|
||||
import Handler.System
|
||||
|
||||
--import Network.Wai.Middleware.Prometheus (prometheus)
|
||||
--import Prometheus (register)
|
||||
--import Prometheus.Metric.GHC (ghcMetrics)
|
||||
|
||||
-- This line actually creates our YesodDispatch instance. It is the second half
|
||||
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
|
||||
-- comments there for more details.
|
||||
mkYesodDispatch "App" resourcesApp
|
||||
|
||||
-- This function allocates resources (such as a database connection pool),
|
||||
-- performs initialization and creates a WAI application. This is also the
|
||||
-- place to put your migrate statements to have automatic database
|
||||
-- migrations handled by Yesod.
|
||||
makeApplication :: App -> IO Application
|
||||
makeApplication foundation = do
|
||||
logWare <- makeLogWare foundation
|
||||
-- Create the WAI application and apply middlewares
|
||||
appPlain <- toWaiAppPlain foundation
|
||||
|
||||
let middleware = id -- prometheus def
|
||||
. healthz
|
||||
#if !DEVELOPMENT
|
||||
. forceSSL' (appSettings foundation)
|
||||
#endif
|
||||
. logWare
|
||||
. defaultMiddlewaresNoLogging
|
||||
|
||||
-- FIXME prometheus void (register ghcMetrics)
|
||||
|
||||
return (middleware appPlain)
|
||||
|
||||
-- | Bypass any overhead from Yesod
|
||||
healthz :: Middleware
|
||||
healthz app req send =
|
||||
case pathInfo req of
|
||||
["healthz"] -> send $ responseBuilder status200 [("content-type", "text/plain; charset=utf-8")] "OK"
|
||||
_ -> app req send
|
||||
|
||||
forceSSL' :: AppSettings -> Middleware
|
||||
forceSSL' settings app
|
||||
| appForceSsl settings = \req send ->
|
||||
-- Don't force SSL for tarballs, to provide 00-index.tar.gz and package
|
||||
-- tarball access for cabal-install
|
||||
if ".tar.gz" `isSuffixOf` rawPathInfo req
|
||||
then app req send
|
||||
else forceSSL app req send
|
||||
| otherwise = app
|
||||
|
||||
-- | Loads up any necessary settings, creates your foundation datatype, and
|
||||
-- performs some initialization.
|
||||
--
|
||||
-- Some basic initializations: HTTP connection manager, logger, and static
|
||||
-- subsite.
|
||||
withFoundation :: LogFunc -> AppSettings -> (App -> IO a) -> IO a
|
||||
withFoundation appLogFunc appSettings inner = do
|
||||
appHttpManager <- newManager
|
||||
appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger
|
||||
appStatic <-
|
||||
(if appMutableStatic appSettings
|
||||
then staticDevel
|
||||
else static)
|
||||
(appStaticDir appSettings)
|
||||
appWebsiteContent <-
|
||||
if appDevDownload appSettings
|
||||
then do
|
||||
fp <- runSimpleApp $ getStackageContentDir "."
|
||||
gitRepoDev fp loadWebsiteContent
|
||||
else gitRepo "https://github.com/commercialhaskell/stackage-content.git" "master" loadWebsiteContent
|
||||
let runContentUpdates =
|
||||
Concurrently $
|
||||
forever $
|
||||
void $ do
|
||||
threadDelay $ 1000 * 1000 * 60 * 5
|
||||
handleAny (runRIO appLogFunc . RIO.logError . fromString . displayException) $
|
||||
grRefresh appWebsiteContent
|
||||
withStackageDatabase (appShouldLogAll appSettings) (appDatabase appSettings) $ \appStackageDatabase -> do
|
||||
appLatestStackMatcher <-
|
||||
mkAutoUpdateWithModify
|
||||
defaultUpdateSettings
|
||||
{ updateFreq = 1000 * 1000 * 60 * 30 -- update every thirty minutes
|
||||
, updateAction = getLatestMatcher appHttpManager
|
||||
}
|
||||
\oldMatcher -> getLatestMatcher appHttpManager `catchAny` \e -> do
|
||||
runRIO appLogFunc $ RIO.logError $ "Couldn't get Stack matcher: " <> displayShow e
|
||||
pure oldMatcher
|
||||
appMirrorStatus <- mkUpdateMirrorStatus
|
||||
hoogleLocker <- newHoogleLocker appLogFunc appHttpManager (appDownloadBucketUrl appSettings)
|
||||
let appGetHoogleDB = singleRun hoogleLocker
|
||||
let appGitRev = GitRev
|
||||
{ gitRevHash = "invalid"
|
||||
, gitRevBranch = "invalid"
|
||||
, gitRevDirty = False
|
||||
, gitRevCommitDate = "2024-12-31"
|
||||
, gitRevCommitCount = 0
|
||||
, gitRevCommitMessage = "This page has been deprecated. Comment on https://github.com/commercialhaskell/stackage-server/issues/339 if this broke your workflow!"
|
||||
}
|
||||
runConcurrently $ runContentUpdates *> Concurrently (inner App {..})
|
||||
|
||||
getLogOpts :: AppSettings -> IO LogOptions
|
||||
getLogOpts settings = logOptionsHandle stdout (appShouldLogAll settings)
|
||||
|
||||
|
||||
makeLogWare :: App -> IO Middleware
|
||||
makeLogWare foundation =
|
||||
mkRequestLogger def
|
||||
{ outputFormat =
|
||||
if appDetailedRequestLogging $ appSettings foundation
|
||||
then Detailed True
|
||||
else Apache
|
||||
(if appIpFromHeader $ appSettings foundation
|
||||
then FromFallback
|
||||
else FromSocket)
|
||||
, destination = Logger $ loggerSet $ appLogger foundation
|
||||
}
|
||||
|
||||
|
||||
-- | Warp settings for the given foundation value.
|
||||
warpSettings :: App -> Settings
|
||||
warpSettings foundation =
|
||||
setPort (appPort $ appSettings foundation)
|
||||
$ setHost (appHost $ appSettings foundation)
|
||||
$ setOnException (\_req e ->
|
||||
when (defaultShouldDisplayException e) $ messageLoggerSource
|
||||
foundation
|
||||
(appLogger foundation)
|
||||
$(qLocation >>= liftLoc)
|
||||
"yesod"
|
||||
LevelError
|
||||
(toLogStr $ "Exception from Warp: " ++ show e))
|
||||
defaultSettings
|
||||
|
||||
-- | For yesod devel, apply an action to Warp settings, RIO's LogFunc and Foundation.
|
||||
withFoundationDev :: (Settings -> App -> IO a) -> IO a
|
||||
withFoundationDev inner = do
|
||||
appSettings <- getAppSettings
|
||||
logOpts <- getLogOpts appSettings
|
||||
withLogFunc logOpts $ \logFunc ->
|
||||
withFoundation logFunc appSettings $ \foundation -> do
|
||||
settings <- getDevSettings $ warpSettings foundation
|
||||
inner settings foundation
|
||||
|
||||
|
||||
withApplicationDev :: (Settings -> Application -> IO a) -> IO a
|
||||
withApplicationDev inner =
|
||||
withFoundationDev $ \ settings foundation -> do
|
||||
application <- makeApplication foundation
|
||||
inner settings application
|
||||
|
||||
-- | main function for use by yesod devel
|
||||
develMain :: IO ()
|
||||
develMain = withApplicationDev $ \settings app -> develMainHelper (pure (settings, app))
|
||||
|
||||
-- | The @main@ function for an executable running this site.
|
||||
appMain :: IO ()
|
||||
appMain = do
|
||||
-- Get the settings from all relevant sources
|
||||
settings <- loadYamlSettingsArgs
|
||||
-- fall back to compile-time values, set to [] to require values at runtime
|
||||
[configSettingsYmlValue]
|
||||
|
||||
-- allow environment variables to override
|
||||
useEnv
|
||||
logOpts <- getLogOpts settings
|
||||
withLogFunc logOpts $ \ logFunc -> do
|
||||
-- Generate the foundation from the settings
|
||||
withFoundation logFunc settings $ \ foundation -> do
|
||||
|
||||
-- Generate a WAI Application from the foundation
|
||||
app <- makeApplication foundation
|
||||
|
||||
-- Run the application with Warp
|
||||
runSettings (warpSettings foundation) app
|
||||
|
||||
|
||||
--------------------------------------------------------------
|
||||
-- Functions for DevelMain.hs (a way to run the app from GHCi)
|
||||
--------------------------------------------------------------
|
||||
withApplicationRepl :: (Int -> App -> Application -> IO ()) -> IO ()
|
||||
withApplicationRepl inner = do
|
||||
settings <- getAppSettings
|
||||
logOpts <- getLogOpts settings
|
||||
withLogFunc logOpts $ \ logFunc ->
|
||||
withFoundation logFunc settings $ \foundation -> do
|
||||
wsettings <- getDevSettings $ warpSettings foundation
|
||||
app1 <- makeApplication foundation
|
||||
inner (getPort wsettings) foundation app1
|
||||
|
||||
|
||||
---------------------------------------------
|
||||
-- Functions for use in development with GHCi
|
||||
---------------------------------------------
|
||||
|
||||
-- | Run a handler
|
||||
handler :: Handler a -> IO a
|
||||
handler h = do
|
||||
logOpts <- logOptionsHandle stdout True
|
||||
withLogFunc logOpts $ \ logFunc -> do
|
||||
settings <- getAppSettings
|
||||
withFoundation logFunc settings (`unsafeHandler` h)
|
||||
109
src/Control/SingleRun.hs
Normal file
109
src/Control/SingleRun.hs
Normal file
@ -0,0 +1,109 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
-- | Ensure that a function is only being run on a given input in one
|
||||
-- thread at a time. All threads trying to make the call at once
|
||||
-- return the same result.
|
||||
module Control.SingleRun
|
||||
( SingleRun
|
||||
, mkSingleRun
|
||||
, singleRun
|
||||
) where
|
||||
|
||||
import RIO
|
||||
|
||||
-- | Captures all of the locking machinery and the function which is
|
||||
-- run to generate results. Use 'mkSingleRun' to create this value.
|
||||
data SingleRun k v = SingleRun
|
||||
{ srVar :: MVar [(k, MVar (Res v))]
|
||||
-- ^ Keys and the variables containing their blocked
|
||||
-- computations. More ideal would be to use a Map, but we're
|
||||
-- avoiding dependencies outside of base in case this moves into
|
||||
-- auto-update.
|
||||
, srFunc :: forall m . MonadIO m => k -> m v
|
||||
}
|
||||
|
||||
-- | Create a 'SingleRun' value out of a function.
|
||||
mkSingleRun :: MonadIO m => Eq k
|
||||
=> (forall n . MonadIO n => k -> n v)
|
||||
-> m (SingleRun k v)
|
||||
mkSingleRun f = do
|
||||
var <- newMVar []
|
||||
return SingleRun
|
||||
{ srVar = var
|
||||
, srFunc = f
|
||||
}
|
||||
|
||||
data Res v = SyncException SomeException
|
||||
| AsyncException SomeException
|
||||
| Success v
|
||||
|
||||
toRes :: SomeException -> Res v
|
||||
toRes se =
|
||||
case fromException se of
|
||||
Just (SomeAsyncException _) -> AsyncException se
|
||||
Nothing -> SyncException se
|
||||
|
||||
-- | Get the result for the given input. If any other thread is
|
||||
-- currently running this same computation, our thread will block on
|
||||
-- that thread's result and then return it.
|
||||
--
|
||||
-- In the case that the other thread dies from a synchronous
|
||||
-- exception, we will rethrow that same synchronous exception. If,
|
||||
-- however, that other thread dies from an asynchronous exception, we
|
||||
-- will retry.
|
||||
singleRun :: (MonadUnliftIO m, Eq k) => SingleRun k v -> k -> m v
|
||||
singleRun sr@(SingleRun var f) k =
|
||||
-- Mask all exceptions so that we don't get killed between exiting
|
||||
-- the modifyMVar and entering the join, which could leave an
|
||||
-- empty MVar for a result that will never be filled.
|
||||
mask $ \restore ->
|
||||
join $ modifyMVar var $ \pairs ->
|
||||
case lookup k pairs of
|
||||
-- Another thread is already working on this, grab its result
|
||||
Just res -> do
|
||||
let action = restore $ do
|
||||
res' <- readMVar res
|
||||
case res' of
|
||||
-- Other thread died by sync exception, rethrow
|
||||
SyncException e -> throwIO e
|
||||
-- Async exception, ignore and try again
|
||||
AsyncException _ -> singleRun sr k
|
||||
-- Success!
|
||||
Success v -> return v
|
||||
-- Return unmodified pairs
|
||||
return (pairs, action)
|
||||
|
||||
-- No other thread working
|
||||
Nothing -> do
|
||||
-- MVar we'll add to pairs to store the result and
|
||||
-- share with other threads
|
||||
resVar <- newEmptyMVar
|
||||
let action = do
|
||||
-- Run the action and capture all exceptions
|
||||
eres <- try $ restore $ f k
|
||||
|
||||
-- OK, we're done running, so let other
|
||||
-- threads run this again.
|
||||
|
||||
-- NB: as soon as we've modified the MVar, the next
|
||||
-- call to singleRun will think no thread is working and
|
||||
-- start over. Anything waiting on us will get our
|
||||
-- result, but nobody else will. That's ok: singleRun
|
||||
-- just provides a little caching on top of a mutex.
|
||||
modifyMVar_ var $ return . filter (\(k', _) -> k /= k')
|
||||
|
||||
case eres of
|
||||
-- Exception occured. We'll rethrow it,
|
||||
-- and store the exceptional result in the
|
||||
-- result variable.
|
||||
Left e -> do
|
||||
putMVar resVar $ toRes e
|
||||
throwIO e
|
||||
-- Success! Store in the result variable
|
||||
-- and return it
|
||||
Right v -> do
|
||||
putMVar resVar $ Success v
|
||||
return v
|
||||
|
||||
-- Modify pairs to include this variable.
|
||||
return ((k, resVar) : pairs, action)
|
||||
44
src/Data/GhcLinks.hs
Normal file
44
src/Data/GhcLinks.hs
Normal file
@ -0,0 +1,44 @@
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
module Data.GhcLinks
|
||||
( GhcLinks(..)
|
||||
, readGhcLinks
|
||||
) where
|
||||
|
||||
import Control.Monad.State.Strict (execStateT, modify)
|
||||
import qualified Data.HashMap.Strict as HashMap
|
||||
import qualified Data.Yaml as Yaml
|
||||
import RIO
|
||||
import RIO.FilePath
|
||||
import RIO.Text (unpack)
|
||||
import System.Directory
|
||||
import Web.PathPieces
|
||||
|
||||
import Types
|
||||
|
||||
|
||||
newtype GhcLinks = GhcLinks
|
||||
{ ghcLinksMap :: HashMap (SupportedArch, GhcMajorVersion) Text }
|
||||
-- ^ a map from (arch, ver) to yaml
|
||||
|
||||
supportedArches :: [SupportedArch]
|
||||
supportedArches = [minBound .. maxBound]
|
||||
|
||||
readGhcLinks :: FilePath -> IO GhcLinks
|
||||
readGhcLinks dir = do
|
||||
let ghcMajorVersionsPath = dir </> "supported-ghc-major-versions.yaml"
|
||||
Yaml.decodeFileEither ghcMajorVersionsPath >>= \case
|
||||
Left _ -> return $ GhcLinks HashMap.empty
|
||||
Right (ghcMajorVersions :: [GhcMajorVersion]) -> do
|
||||
let opts = [(arch, ver) | arch <- supportedArches, ver <- ghcMajorVersions]
|
||||
hashMap <-
|
||||
flip execStateT HashMap.empty $
|
||||
forM_ opts $ \(arch, ver) -> do
|
||||
let verText = textDisplay ver
|
||||
fileName = "ghc-" <> verText <> "-links.yaml"
|
||||
path = dir </> unpack (toPathPiece arch) </> unpack fileName
|
||||
whenM (liftIO $ doesFileExist path) $ do
|
||||
text <- liftIO $ readFileUtf8 path
|
||||
modify (HashMap.insert (arch, ver) text)
|
||||
return $ GhcLinks hashMap
|
||||
106
src/Data/WebsiteContent.hs
Normal file
106
src/Data/WebsiteContent.hs
Normal file
@ -0,0 +1,106 @@
|
||||
{-# LANGUAGE FlexibleInstances #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE TypeFamilies #-}
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
module Data.WebsiteContent
|
||||
( WebsiteContent (..)
|
||||
, StackRelease (..)
|
||||
, Post (..)
|
||||
, loadWebsiteContent
|
||||
) where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import CMarkGFM
|
||||
import Data.GhcLinks
|
||||
import Data.Yaml
|
||||
import System.FilePath (takeFileName)
|
||||
import Text.Blaze.Html (preEscapedToHtml)
|
||||
import Types
|
||||
|
||||
data WebsiteContent = WebsiteContent
|
||||
{ wcHomepage :: !Html
|
||||
, wcAuthors :: !Html
|
||||
, wcOlderReleases :: !Html
|
||||
, wcGhcLinks :: !GhcLinks
|
||||
, wcStackReleases :: ![StackRelease]
|
||||
, wcPosts :: !(Vector Post)
|
||||
, wcSpamPackages :: !(Set PackageNameP)
|
||||
-- ^ Packages considered spam which should not be displayed.
|
||||
}
|
||||
|
||||
data Post = Post
|
||||
{ postTitle :: !Text
|
||||
, postSlug :: !Text
|
||||
, postAuthor :: !Text
|
||||
, postTime :: !UTCTime
|
||||
, postDescription :: !Text
|
||||
, postBody :: !Html
|
||||
}
|
||||
|
||||
loadWebsiteContent :: FilePath -> IO WebsiteContent
|
||||
loadWebsiteContent dir = do
|
||||
wcHomepage <- readHtml "homepage.html"
|
||||
wcAuthors <- readHtml "authors.html"
|
||||
wcOlderReleases <- readHtml "older-releases.html" `catchIO`
|
||||
\_ -> readMarkdown "older-releases.md"
|
||||
wcGhcLinks <- readGhcLinks $ dir </> "stackage-cli"
|
||||
wcStackReleases <- decodeFileEither (dir </> "stack" </> "releases.yaml")
|
||||
>>= either throwIO return
|
||||
wcPosts <- loadPosts (dir </> "posts") `catchAny` \e -> do
|
||||
putStrLn $ "Error loading posts: " ++ tshow e
|
||||
return mempty
|
||||
wcSpamPackages <- decodeFileEither (dir </> "spam-packages.yaml")
|
||||
>>= either throwIO (return . setFromList)
|
||||
return WebsiteContent {..}
|
||||
where
|
||||
readHtml fp = fmap preEscapedToMarkup $ readFileUtf8 $ dir </> fp
|
||||
readMarkdown fp = fmap (preEscapedToHtml . commonmarkToHtml
|
||||
[optSmart]
|
||||
[extTable, extAutolink])
|
||||
$ readFileUtf8 $ dir </> fp
|
||||
|
||||
loadPosts :: FilePath -> IO (Vector Post)
|
||||
loadPosts dir =
|
||||
fmap (sortBy (\x y -> postTime y `compare` postTime x))
|
||||
$ runConduitRes
|
||||
$ sourceDirectory dir
|
||||
.| concatMapC (stripSuffix ".md")
|
||||
.| mapMC loadPost
|
||||
.| sinkVector
|
||||
where
|
||||
loadPost :: FilePath -> ResourceT IO Post
|
||||
loadPost noExt = handleAny (\e -> throwString $ "Could not parse " ++ noExt ++ ".md: " ++ show e) $ do
|
||||
bs <- readFile $ noExt ++ ".md"
|
||||
let slug = pack $ takeFileName noExt
|
||||
text = filter (/= '\r') $ decodeUtf8 bs
|
||||
(frontmatter, body) <-
|
||||
case lines text of
|
||||
"---":rest ->
|
||||
case break (== "---") rest of
|
||||
(frontmatter, "---":body) -> return (unlines frontmatter, unlines body)
|
||||
_ -> error "Missing closing --- on frontmatter"
|
||||
_ -> error "Does not start with --- frontmatter"
|
||||
case Data.Yaml.decodeEither' $ encodeUtf8 frontmatter of
|
||||
Left e -> throwIO e
|
||||
Right mkPost -> return $ mkPost slug $ preEscapedToHtml $ commonmarkToHtml
|
||||
[optSmart]
|
||||
[extTable, extAutolink]
|
||||
body
|
||||
|
||||
instance (slug ~ Text, body ~ Html) => FromJSON (slug -> body -> Post) where
|
||||
parseJSON = withObject "Post" $ \o -> do
|
||||
postTitle <- o .: "title"
|
||||
postAuthor <- o .: "author"
|
||||
postTime <- o .: "timestamp"
|
||||
postDescription <- o .: "description"
|
||||
return $ \postSlug postBody -> Post {..}
|
||||
|
||||
data StackRelease = StackRelease
|
||||
{ srName :: !Text
|
||||
, srPattern :: !Text
|
||||
}
|
||||
instance FromJSON StackRelease where
|
||||
parseJSON = withObject "StackRelease" $ \o -> StackRelease
|
||||
<$> o .: "name"
|
||||
<*> o .: "pattern"
|
||||
43
src/Distribution/Package/ModuleForest.hs
Normal file
43
src/Distribution/Package/ModuleForest.hs
Normal file
@ -0,0 +1,43 @@
|
||||
-- Adopted from https://github.com/haskell/hackage-server/blob/master/Distribution/Server/Packages/ModuleForest.hs
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
module Distribution.Package.ModuleForest
|
||||
( moduleName
|
||||
, moduleForest
|
||||
, ModuleTree(..)
|
||||
, ModuleForest
|
||||
, NameComponent
|
||||
) where
|
||||
|
||||
import Distribution.ModuleName (ModuleName)
|
||||
import qualified Distribution.ModuleName as ModuleName
|
||||
import RIO
|
||||
import RIO.Text (pack, unpack)
|
||||
|
||||
type NameComponent = Text
|
||||
|
||||
type ModuleForest = [ModuleTree]
|
||||
data ModuleTree = Node { component :: NameComponent
|
||||
, isModule :: Bool
|
||||
, subModules :: ModuleForest
|
||||
}
|
||||
deriving (Show, Eq)
|
||||
|
||||
moduleName :: Text -> ModuleName
|
||||
moduleName = ModuleName.fromString . unpack
|
||||
|
||||
moduleForest :: [ModuleName] -> ModuleForest
|
||||
moduleForest = foldr (addToForest . map pack . ModuleName.components) []
|
||||
|
||||
addToForest :: [NameComponent] -> ModuleForest -> ModuleForest
|
||||
addToForest [] trees = trees
|
||||
addToForest comps [] = mkSubTree comps
|
||||
addToForest comps@(comp1:cs) (t@(component -> comp2):ts) = case
|
||||
compare comp1 comp2 of
|
||||
GT -> t : addToForest comps ts
|
||||
EQ -> Node comp2 (isModule t || null cs) (addToForest cs (subModules t)) : ts
|
||||
LT -> mkSubTree comps ++ t : ts
|
||||
|
||||
mkSubTree :: [Text] -> ModuleForest
|
||||
mkSubTree [] = []
|
||||
mkSubTree (c:cs) = [Node c (null cs) (mkSubTree cs)]
|
||||
166
src/Foundation.hs
Normal file
166
src/Foundation.hs
Normal file
@ -0,0 +1,166 @@
|
||||
{-# LANGUAGE FlexibleInstances #-}
|
||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE TypeFamilies #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
module Foundation where
|
||||
|
||||
import ClassyPrelude.Yesod
|
||||
import Data.WebsiteContent
|
||||
import Settings
|
||||
import Settings.StaticFiles
|
||||
import Stackage.Database
|
||||
import Handler.StackageHome.Types (ApiSnapshotName(..))
|
||||
import Text.Hamlet (hamletFile)
|
||||
import Types
|
||||
import Yesod.AtomFeed
|
||||
import Yesod.Core.Types (Logger)
|
||||
import qualified Yesod.Core.Unsafe as Unsafe
|
||||
import Yesod.GitRepo
|
||||
import Yesod.GitRev (GitRev)
|
||||
import qualified RIO
|
||||
|
||||
-- | The site argument for your application. This can be a good place to
|
||||
-- keep settings and values requiring initialization before your application
|
||||
-- starts running, such as database connections. Every handler will have
|
||||
-- access to the data present here.
|
||||
data App = App
|
||||
{ appSettings :: !AppSettings
|
||||
, appStatic :: !Static -- ^ Settings for static file serving.
|
||||
, appHttpManager :: !Manager
|
||||
, appLogger :: !Logger
|
||||
, appLogFunc :: !RIO.LogFunc
|
||||
, appWebsiteContent :: !(GitRepo WebsiteContent)
|
||||
, appStackageDatabase :: !StackageDatabase
|
||||
, appLatestStackMatcher :: !(IO (Text -> Maybe Text))
|
||||
-- ^ Give a pattern, get a URL
|
||||
, appMirrorStatus :: !(IO (Status, WidgetFor App ()))
|
||||
, appGetHoogleDB :: !(SnapName -> IO (Maybe FilePath))
|
||||
, appGitRev :: !GitRev
|
||||
}
|
||||
|
||||
instance HasHttpManager App where
|
||||
getHttpManager = appHttpManager
|
||||
|
||||
-- This is where we define all of the routes in our application. For a full
|
||||
-- explanation of the syntax, please see:
|
||||
-- http://www.yesodweb.com/book/routing-and-handlers
|
||||
--
|
||||
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
|
||||
-- generates the rest of the code. Please see the linked documentation for an
|
||||
-- explanation for this split.
|
||||
mkYesodData "App" $(parseRoutesFile "config/routes")
|
||||
|
||||
unsafeHandler :: App -> Handler a -> IO a
|
||||
unsafeHandler = Unsafe.fakeHandlerGetLogger appLogger
|
||||
|
||||
defaultLayoutNoContainer :: Widget -> Handler Html
|
||||
defaultLayoutNoContainer = defaultLayoutWithContainer False
|
||||
|
||||
defaultLayoutWithContainer :: Bool -> Widget -> Handler Html
|
||||
defaultLayoutWithContainer insideContainer widget = do
|
||||
mmsg <- getMessage
|
||||
|
||||
-- We break up the default layout into two components:
|
||||
-- default-layout is the contents of the body tag, and
|
||||
-- default-layout-wrapper is the entire page. Since the final
|
||||
-- value passed to hamletToRepHtml cannot be a widget, this allows
|
||||
-- you to use normal widget features in default-layout.
|
||||
|
||||
pc <- widgetToPageContent $ do
|
||||
$(combineStylesheets 'StaticR
|
||||
[ css_normalize_css
|
||||
, css_bootstrap_css
|
||||
, css_bootstrap_responsive_css
|
||||
])
|
||||
$((combineScripts 'StaticR
|
||||
[ js_jquery_js
|
||||
, js_bootstrap_js
|
||||
]))
|
||||
atomLink FeedR "Recent Stackage snapshots"
|
||||
$(widgetFile "default-layout")
|
||||
|
||||
mcurr <- getCurrentRoute
|
||||
let notHome = mcurr /= Just HomeR
|
||||
|
||||
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
|
||||
|
||||
-- Please see the documentation for the Yesod typeclass. There are a number
|
||||
-- of settings which can be configured by overriding methods here.
|
||||
instance Yesod App where
|
||||
approot = ApprootRequest $ \app req ->
|
||||
case appRoot $ appSettings app of
|
||||
Nothing -> getApprootText guessApproot app req
|
||||
Just root -> root
|
||||
|
||||
-- Store session data on the client in encrypted cookies,
|
||||
-- default session idle timeout is 120 minutes
|
||||
makeSessionBackend _ = return Nothing
|
||||
|
||||
defaultLayout = defaultLayoutWithContainer True
|
||||
|
||||
{- MSS 2018-06-21 Not worrying about broken cabal-install anymore
|
||||
|
||||
-- Ideally we would just have an approot that always includes https, and
|
||||
-- redirect users from non-SSL to SSL connections. However, cabal-install
|
||||
-- is broken, and does not support TLS. Therefore, we *don't* force the
|
||||
-- redirect.
|
||||
--
|
||||
-- Nonetheless, we want to keep generated links as https:// links. The
|
||||
-- problem is that sometimes CORS kicks in and breaks a static resource
|
||||
-- when loading from a non-secure page. So we have this ugly hack: whenever
|
||||
-- the destination is a static file, don't include the scheme or hostname.
|
||||
urlRenderOverride y route@StaticR{} =
|
||||
Just $ uncurry (joinPath y "") $ renderRoute route
|
||||
urlRenderOverride _ _ = Nothing
|
||||
-}
|
||||
|
||||
{- Temporarily disable to allow for horizontal scaling
|
||||
-- This function creates static content files in the static folder
|
||||
-- and names them based on a hash of their content. This allows
|
||||
-- expiration dates to be set far in the future without worry of
|
||||
-- users receiving stale content.
|
||||
addStaticContent =
|
||||
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
|
||||
where
|
||||
-- Generate a unique filename based on the content itself
|
||||
genFileName lbs
|
||||
| development = "autogen-" ++ base64md5 lbs
|
||||
| otherwise = base64md5 lbs
|
||||
-}
|
||||
|
||||
-- Place Javascript at bottom of the body tag so the rest of the page loads first
|
||||
jsLoader _ = BottomOfBody
|
||||
|
||||
-- What messages should be logged. The following includes all messages when
|
||||
-- in development, and warnings and errors in production.
|
||||
shouldLogIO _ "CLEANUP" _ = pure False
|
||||
shouldLogIO app _source level = pure $
|
||||
appShouldLogAll (appSettings app)
|
||||
|| level == LevelWarn
|
||||
|| level == LevelError
|
||||
|
||||
makeLogger = return . appLogger
|
||||
|
||||
maximumContentLength _ _ = Just 2000000
|
||||
|
||||
-- This instance is required to use forms. You can modify renderMessage to
|
||||
-- achieve customized and internationalized form validation messages.
|
||||
instance RenderMessage App FormMessage where
|
||||
renderMessage _ _ = defaultFormMessage
|
||||
|
||||
-- Note: previous versions of the scaffolding included a deliver function to
|
||||
-- send emails. Unfortunately, there are too many different options for us to
|
||||
-- give a reasonable default. Instead, the information is available on the
|
||||
-- wiki:
|
||||
--
|
||||
-- https://github.com/yesodweb/yesod/wiki/Sending-email
|
||||
|
||||
instance GetStackageDatabase App Handler where
|
||||
getStackageDatabase = appStackageDatabase <$> getYesod
|
||||
getLogFunc = appLogFunc <$> getYesod
|
||||
|
||||
instance GetStackageDatabase App (WidgetFor App) where
|
||||
getStackageDatabase = appStackageDatabase <$> getYesod
|
||||
getLogFunc = appLogFunc <$> getYesod
|
||||
73
src/Handler/Blog.hs
Normal file
73
src/Handler/Blog.hs
Normal file
@ -0,0 +1,73 @@
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
module Handler.Blog
|
||||
( getBlogHomeR
|
||||
, getBlogPostR
|
||||
, getBlogFeedR
|
||||
) where
|
||||
|
||||
import Data.WebsiteContent
|
||||
import Import
|
||||
import Yesod.AtomFeed (atomLink)
|
||||
import RIO.Time (getCurrentTime)
|
||||
|
||||
getAddPreview :: Handler (Route App -> (Route App, [(Text, Text)]))
|
||||
getAddPreview = do
|
||||
mpreview <- lookupGetParam "preview"
|
||||
case mpreview of
|
||||
Just "true" -> return $ \route -> (route, [("preview", "true")])
|
||||
_ -> return $ \route -> (route, [])
|
||||
|
||||
getBlogHomeR :: Handler ()
|
||||
getBlogHomeR = do
|
||||
cacheSeconds 3600
|
||||
posts <- getPosts
|
||||
case headMay posts of
|
||||
Nothing -> notFound
|
||||
Just post -> do
|
||||
addPreview <- getAddPreview
|
||||
redirect $ addPreview $ BlogPostR (postYear post) (postMonth post) (postSlug post)
|
||||
|
||||
getBlogPostR :: Year -> Month -> Text -> Handler Html
|
||||
getBlogPostR year month slug = do
|
||||
cacheSeconds 3600
|
||||
posts <- getPosts
|
||||
post <- maybe notFound return $ find matches posts
|
||||
now <- getCurrentTime
|
||||
addPreview <- getAddPreview
|
||||
defaultLayout $ do
|
||||
setTitle $ toHtml $ postTitle post
|
||||
atomLink BlogFeedR "Stackage Curator blog"
|
||||
$(widgetFile "blog-post")
|
||||
toWidgetHead [shamlet|<meta name=og:description value=#{postDescription post}>|]
|
||||
where
|
||||
matches p = postYear p == year && postMonth p == month && postSlug p == slug
|
||||
|
||||
getBlogFeedR :: Handler TypedContent
|
||||
getBlogFeedR = do
|
||||
cacheSeconds 3600
|
||||
posts <- fmap (take 10) getPosts
|
||||
latest <- maybe notFound return $ headMay posts
|
||||
newsFeed
|
||||
Feed
|
||||
{ feedTitle = "Stackage Curator blog"
|
||||
, feedLinkSelf = BlogFeedR
|
||||
, feedLinkHome = HomeR
|
||||
, feedAuthor = "The Stackage Curator team"
|
||||
, feedDescription = "Messages from the Stackage Curators about the Stackage project"
|
||||
, feedLanguage = "en"
|
||||
, feedUpdated = postTime latest
|
||||
, feedLogo = Nothing
|
||||
, feedEntries = map toEntry $ toList posts
|
||||
}
|
||||
where
|
||||
toEntry post =
|
||||
FeedEntry
|
||||
{ feedEntryLink = BlogPostR (postYear post) (postMonth post) (postSlug post)
|
||||
, feedEntryUpdated = postTime post
|
||||
, feedEntryTitle = postTitle post
|
||||
, feedEntryContent = postBody post
|
||||
, feedEntryEnclosure = Nothing
|
||||
, feedEntryCategories = []
|
||||
}
|
||||
24
src/Handler/BuildPlan.hs
Normal file
24
src/Handler/BuildPlan.hs
Normal file
@ -0,0 +1,24 @@
|
||||
module Handler.BuildPlan where
|
||||
|
||||
import Import
|
||||
--import Stackage.Types
|
||||
--import Stackage.Database
|
||||
|
||||
getBuildPlanR :: SnapName -> Handler TypedContent
|
||||
getBuildPlanR _slug = track "Handler.BuildPlan.getBuildPlanR" $ do
|
||||
error "temporarily disabled, please open on issue on https://github.com/commercialhaskell/stackage-server/issues/ if you need it"
|
||||
{-
|
||||
fullDeps <- (== Just "true") <$> lookupGetParam "full-deps"
|
||||
spec <- parseSnapshotSpec $ toPathPiece slug
|
||||
let set = setShellCommands simpleCommands
|
||||
$ setSnapshot spec
|
||||
$ setFullDeps fullDeps
|
||||
defaultSettings
|
||||
packages <- lookupGetParams "package" >>= mapM simpleParse
|
||||
when (null packages) $ invalidArgs ["Must provide at least one package"]
|
||||
toInstall <- liftIO $ getBuildPlan set packages
|
||||
selectRep $ do
|
||||
provideRep $ return $ toSimpleText toInstall
|
||||
provideRep $ return $ toJSON toInstall
|
||||
provideRepType "application/x-sh" $ return $ toShellScript set toInstall
|
||||
-}
|
||||
47
src/Handler/Download.hs
Normal file
47
src/Handler/Download.hs
Normal file
@ -0,0 +1,47 @@
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
module Handler.Download
|
||||
( getDownloadR
|
||||
, getDownloadSnapshotsJsonR
|
||||
, getDownloadLtsSnapshotsJsonR
|
||||
, getGhcMajorVersionR
|
||||
, getDownloadGhcLinksR
|
||||
) where
|
||||
|
||||
import RIO (textDisplay)
|
||||
import Import
|
||||
import Data.GhcLinks
|
||||
import Yesod.GitRepo (grContent)
|
||||
import Stackage.Database
|
||||
|
||||
getDownloadR :: Handler Html
|
||||
getDownloadR = track "Hoogle.Download.getDownloadR" $
|
||||
redirectWith status301 InstallR
|
||||
|
||||
getDownloadSnapshotsJsonR :: Handler Value
|
||||
getDownloadSnapshotsJsonR = track "Hoogle.Download.getDownloadSnapshotsJsonR"
|
||||
getDownloadLtsSnapshotsJsonR
|
||||
|
||||
getDownloadLtsSnapshotsJsonR :: Handler Value
|
||||
getDownloadLtsSnapshotsJsonR = track "Hoogle.Download.getDownloadLtsSnapshotsJsonR" snapshotsJSON
|
||||
|
||||
-- Print the ghc major version for the given snapshot.
|
||||
ghcMajorVersionText :: Snapshot -> Text
|
||||
ghcMajorVersionText = textDisplay . keepMajorVersion . ghcVersion . snapshotCompiler
|
||||
|
||||
getGhcMajorVersionR :: SnapName -> Handler Text
|
||||
getGhcMajorVersionR name = track "Hoogle.Download.getGhcMajorVersionR" $ do
|
||||
snapshot <- lookupSnapshot name >>= maybe notFound return
|
||||
return $ ghcMajorVersionText $ entityVal snapshot
|
||||
|
||||
getDownloadGhcLinksR :: SupportedArch -> Text -> Handler TypedContent
|
||||
getDownloadGhcLinksR arch fName =
|
||||
track "Hoogle.Download.getDownloadGhcLinksR" $ do
|
||||
ver <-
|
||||
maybe notFound return $
|
||||
stripPrefix "ghc-" >=> stripSuffix "-links.yaml" >=> ghcMajorVersionFromText $ fName
|
||||
ghcLinks <- getYesod >>= fmap wcGhcLinks . liftIO . grContent . appWebsiteContent
|
||||
case lookup (arch, ver) (ghcLinksMap ghcLinks) of
|
||||
Just text -> return $ TypedContent yamlMimeType $ toContent text
|
||||
Nothing -> notFound
|
||||
where
|
||||
yamlMimeType = "text/yaml"
|
||||
51
src/Handler/DownloadStack.hs
Normal file
51
src/Handler/DownloadStack.hs
Normal file
@ -0,0 +1,51 @@
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
module Handler.DownloadStack
|
||||
( getDownloadStackListR
|
||||
, getDownloadStackR
|
||||
, getLatestMatcher
|
||||
) where
|
||||
|
||||
import Data.Aeson.Parser (json)
|
||||
import Data.Conduit.Attoparsec (sinkParser)
|
||||
import Data.WebsiteContent
|
||||
import Import
|
||||
import Yesod.GitRepo
|
||||
import qualified Data.Aeson.KeyMap as Aeson
|
||||
|
||||
getDownloadStackListR :: Handler Html
|
||||
getDownloadStackListR = track "Handler.DownloadStack.getDownloadStackListR" $ do
|
||||
releases <- getYesod >>= fmap wcStackReleases . liftIO . grContent . appWebsiteContent
|
||||
defaultLayout $ do
|
||||
setTitle "Download Stack"
|
||||
$(widgetFile "download-stack-list")
|
||||
|
||||
getDownloadStackR :: Text -> Handler ()
|
||||
getDownloadStackR pattern' = track "Handler.DownloadStack.getDownloadStackR" $ do
|
||||
matcher <- getYesod >>= liftIO . appLatestStackMatcher
|
||||
maybe notFound redirect $ matcher pattern'
|
||||
|
||||
-- | Creates a function which will find the latest release for a given pattern.
|
||||
getLatestMatcher :: Manager -> IO (Text -> Maybe Text)
|
||||
getLatestMatcher man = do
|
||||
let req = "https://api.github.com/repos/commercialhaskell/stack/releases/latest"
|
||||
{ requestHeaders = [("User-Agent", "Stackage Server")]
|
||||
}
|
||||
val <- flip runReaderT man $ withResponse req
|
||||
$ \res -> runConduit $ responseBody res .| sinkParser json
|
||||
return $ \pattern' -> do
|
||||
let pattern'' = pattern' ++ "."
|
||||
Object top <- return val
|
||||
Array assets <- Aeson.lookup "assets" top
|
||||
headMay $ preferZip $ catMaybes $ map (findMatch pattern'') assets
|
||||
where
|
||||
findMatch pattern' (Object o) = do
|
||||
String name <- Aeson.lookup "name" o
|
||||
guard $ not $ ".asc" `isSuffixOf` name
|
||||
guard $ pattern' `isInfixOf` name
|
||||
String url <- Aeson.lookup "browser_download_url" o
|
||||
Just url
|
||||
findMatch _ _ = Nothing
|
||||
|
||||
preferZip = map snd . sortOn fst . map
|
||||
(\x -> (if ".zip" `isSuffixOf` x then 0 else 1 :: Int, x))
|
||||
122
src/Handler/Feed.hs
Normal file
122
src/Handler/Feed.hs
Normal file
@ -0,0 +1,122 @@
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
module Handler.Feed
|
||||
( getFeedR
|
||||
, getBranchFeedR
|
||||
) where
|
||||
|
||||
import Data.These
|
||||
import Import
|
||||
import RIO.Time (getCurrentTime)
|
||||
import Stackage.Database
|
||||
import Stackage.Snapshot.Diff
|
||||
import Text.Blaze (text)
|
||||
|
||||
getFeedR :: Handler TypedContent
|
||||
getFeedR = track "Handler.Feed.getBranchFeedR" $ getBranchFeed Nothing
|
||||
|
||||
getBranchFeedR :: SnapshotBranch -> Handler TypedContent
|
||||
getBranchFeedR = track "Handler.Feed.getBranchFeedR" . getBranchFeed . Just
|
||||
|
||||
getBranchFeed :: Maybe SnapshotBranch -> Handler TypedContent
|
||||
getBranchFeed mBranch = do
|
||||
cacheSeconds 3600
|
||||
mkFeed mBranch =<< getSnapshots mBranch 20 0
|
||||
|
||||
mkFeed :: Maybe SnapshotBranch -> [Entity Snapshot] -> Handler TypedContent
|
||||
mkFeed _ [] = notFound
|
||||
mkFeed mBranch snaps = do
|
||||
entries <- forM snaps $ \(Entity snapid snap) -> do
|
||||
showsDiff <- doesShowDiff
|
||||
content <-
|
||||
if showsDiff
|
||||
then getContent snapid snap
|
||||
else return mempty
|
||||
return FeedEntry
|
||||
{ feedEntryLink = SnapshotR (snapshotName snap) StackageHomeR
|
||||
, feedEntryUpdated = UTCTime (snapshotCreated snap) 0
|
||||
, feedEntryTitle = snapshotTitle snap
|
||||
, feedEntryContent = content
|
||||
, feedEntryEnclosure = Nothing
|
||||
, feedEntryCategories = []
|
||||
}
|
||||
updated <-
|
||||
case entries of
|
||||
[] -> getCurrentTime
|
||||
x:_ -> return $ feedEntryUpdated x
|
||||
newsFeed Feed
|
||||
{ feedTitle = title
|
||||
, feedLinkSelf = FeedR
|
||||
, feedLinkHome = HomeR
|
||||
, feedAuthor = "Stackage Project"
|
||||
, feedDescription = text title
|
||||
, feedLanguage = "en"
|
||||
, feedUpdated = updated
|
||||
, feedEntries = entries
|
||||
, feedLogo = Nothing
|
||||
}
|
||||
where
|
||||
branchTitle NightlyBranch = "Nightly"
|
||||
branchTitle LtsBranch = "LTS"
|
||||
branchTitle (LtsMajorBranch x) = "LTS-" <> tshow x
|
||||
title = "Recent Stackage " <> maybe "" branchTitle mBranch <> " snapshots"
|
||||
|
||||
doesShowDiff =
|
||||
(fmap fromPathPiece <$> lookupGetParam "withDiff") >>= \case
|
||||
Just (Just False) -> return False
|
||||
Just (Just True) -> return True
|
||||
Just Nothing -> notFound
|
||||
Nothing -> return True
|
||||
|
||||
|
||||
getContent :: SnapshotId -> Snapshot -> Handler Html
|
||||
getContent sid2 snap = do
|
||||
mprev <- snapshotBefore $ snapshotName snap
|
||||
case mprev of
|
||||
Nothing -> return "No previous snapshot found for comparison"
|
||||
Just (sid1, name1) -> do
|
||||
snapDiff <- getSnapshotDiff sid1 sid2
|
||||
let name2 = snapshotName snap
|
||||
withUrlRenderer
|
||||
[hamlet|
|
||||
<p>Difference between #{snapshotPrettyNameShort name1} and #{snapshotPrettyNameShort $ snapshotName snap}
|
||||
<table border=1 cellpadding=5>
|
||||
<thead>
|
||||
<tr>
|
||||
<th align=right>Package name
|
||||
<th align=right>Old
|
||||
<th align=left>New
|
||||
<tbody>
|
||||
$forall (pkgname, VersionChange change, versionDiff) <- toVersionedDiffList snapDiff
|
||||
<tr>
|
||||
<th align=right>#{pkgname}
|
||||
$case change
|
||||
$of This old
|
||||
<td align=right>
|
||||
<a href=@{packageUrl name1 pkgname old}#changes>
|
||||
#{old}
|
||||
<td>
|
||||
$of That new
|
||||
<td align=right>
|
||||
<td>
|
||||
<a href=@{packageUrl name2 pkgname new}#changes>
|
||||
#{new}
|
||||
$of These old new
|
||||
$maybe (common, left, right) <- versionDiff
|
||||
<td align=right>
|
||||
<a href=@{packageUrl name1 pkgname old}#changes>
|
||||
#{common}#
|
||||
<del style="background-color: #fcc">#{left}
|
||||
<td>
|
||||
<a href=@{packageUrl name2 pkgname new}#changes>
|
||||
#{common}#
|
||||
<ins style="background-color: #cfc">#{right}
|
||||
$nothing
|
||||
<td align=right>
|
||||
<a href=@{packageUrl name1 pkgname old}#changes>
|
||||
#{old}
|
||||
<td>
|
||||
<a href=@{packageUrl name2 pkgname new}#changes>
|
||||
#{new}
|
||||
|]
|
||||
150
src/Handler/Haddock.hs
Normal file
150
src/Handler/Haddock.hs
Normal file
@ -0,0 +1,150 @@
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
module Handler.Haddock
|
||||
( getHaddockR
|
||||
, getHaddockBackupR
|
||||
) where
|
||||
|
||||
import Import
|
||||
import qualified Data.Text as T (takeEnd)
|
||||
import Stackage.Database
|
||||
|
||||
makeURL :: SnapName -> [Text] -> Handler Text
|
||||
makeURL snapName rest = do
|
||||
bucketUrl <- getsYesod (appDownloadBucketUrl . appSettings)
|
||||
pure . concat
|
||||
$ bucketUrl
|
||||
: "/"
|
||||
: toPathPiece snapName
|
||||
: map (cons '/') rest
|
||||
|
||||
shouldRedirect :: Bool
|
||||
shouldRedirect = False
|
||||
|
||||
data DocType = DocHtml | DocJson
|
||||
|
||||
getHaddockR :: SnapName -> [Text] -> Handler TypedContent
|
||||
getHaddockR snapName rest
|
||||
| shouldRedirect = do
|
||||
result <- redirectWithVersion snapName rest
|
||||
case result of
|
||||
Just route -> redirect route
|
||||
Nothing -> redirect =<< makeURL snapName rest
|
||||
| Just docType <- mdocType = do
|
||||
cacheSeconds $ 60 * 60 * 24 * 7
|
||||
result <- redirectWithVersion snapName rest
|
||||
case result of
|
||||
Just route -> redirect route
|
||||
Nothing -> do
|
||||
(contentType, plain) <-
|
||||
case docType of
|
||||
DocHtml -> do
|
||||
mstyle <- lookupGetParam "style"
|
||||
return ("text/html; charset=utf-8", mstyle /= Just "stackage")
|
||||
DocJson ->
|
||||
return ("application/jsontml; charset=utf-8", True)
|
||||
req <- parseRequest =<< unpack <$> makeURL snapName rest
|
||||
man <- getHttpManager <$> getYesod
|
||||
(_, res) <- runReaderT (acquireResponse req >>= allocateAcquire) man
|
||||
if plain
|
||||
then respondSource contentType $ responseBody res .| mapC (Chunk . toBuilder)
|
||||
else do
|
||||
extra <- getExtra
|
||||
respondSource contentType $
|
||||
responseBody res .|
|
||||
(do takeUntilChunk "</head>"
|
||||
peekC >>= maybe (return ()) (const $ yield $ encodeUtf8 extra)
|
||||
mapC id) .|
|
||||
mapC (Chunk . toBuilder)
|
||||
| otherwise = redirect =<< makeURL snapName rest
|
||||
where
|
||||
mdocType =
|
||||
case T.takeEnd 5 <$> headMay (reverse rest) of
|
||||
Just ".html" -> Just DocHtml
|
||||
Just ".json" -> Just DocJson
|
||||
_ -> Nothing
|
||||
getExtra = do
|
||||
render <- getUrlRender
|
||||
return $
|
||||
concat
|
||||
[ "<link rel='stylesheet' href='https://fonts.googleapis.com/css?family=Open+Sans'>"
|
||||
, "<link rel='stylesheet' href='"
|
||||
, render $ StaticR haddock_style_css
|
||||
, "'>"
|
||||
]
|
||||
|
||||
|
||||
|
||||
takeUntilChunk :: Monad m => ByteString -> ConduitM ByteString ByteString m ()
|
||||
takeUntilChunk fullNeedle =
|
||||
start
|
||||
where
|
||||
start = await >>= mapM_ start'
|
||||
|
||||
start' bs =
|
||||
case checkNeedle fullNeedle bs of
|
||||
CNNotFound -> yield bs >> start
|
||||
CNFound before after -> yield before >> leftover after
|
||||
CNPartial before after newNeedle -> yield before >> loop (after:) newNeedle
|
||||
|
||||
loop front needle =
|
||||
await >>= mapM_ loop'
|
||||
where
|
||||
loop' bs =
|
||||
if needle `isPrefixOf` bs
|
||||
then leftover $ concat $ front [bs]
|
||||
else
|
||||
case stripPrefix bs needle of
|
||||
Just needle' -> loop (front . (bs:)) needle'
|
||||
Nothing -> yieldMany (front [bs]) >> start
|
||||
|
||||
data CheckNeedle
|
||||
= CNNotFound
|
||||
| CNFound !ByteString
|
||||
!ByteString
|
||||
| CNPartial !ByteString
|
||||
!ByteString
|
||||
!ByteString
|
||||
|
||||
checkNeedle :: ByteString -> ByteString -> CheckNeedle
|
||||
checkNeedle needle bs0 =
|
||||
loop 0
|
||||
where
|
||||
loop idx
|
||||
| idx >= length bs0 = CNNotFound
|
||||
| otherwise =
|
||||
case uncurry checkIndex $ splitAt idx bs0 of
|
||||
CNNotFound -> loop (idx + 1)
|
||||
res -> res
|
||||
|
||||
checkIndex before bs
|
||||
| needle `isPrefixOf` bs = CNFound before bs
|
||||
| Just needle' <- stripPrefix bs needle = CNPartial before bs needle'
|
||||
| otherwise = CNNotFound
|
||||
|
||||
redirectWithVersion ::
|
||||
(GetStackageDatabase env m, MonadHandler m) => SnapName -> [Text] -> m (Maybe (Route App))
|
||||
redirectWithVersion snapName rest =
|
||||
case rest of
|
||||
[pkg, file] | Just pname <- fromPathPiece pkg -> do
|
||||
mspi <- getSnapshotPackageInfo snapName pname
|
||||
case mspi of -- TODO: Should `Nothing` cause a 404 here, since haddock will fail?
|
||||
Nothing -> return Nothing -- error "That package is not part of this snapshot."
|
||||
Just spi -> do
|
||||
return
|
||||
(Just
|
||||
(HaddockR
|
||||
snapName
|
||||
[toPathPiece $ PackageIdentifierP pname (spiVersion spi), file]))
|
||||
_ -> return Nothing
|
||||
|
||||
getHaddockBackupR :: [Text] -> Handler ()
|
||||
getHaddockBackupR (snap':rest)
|
||||
| Just branch <- fromPathPiece snap' = track "Handler.Haddock.getHaddockBackupR" $ do
|
||||
snapName <- newestSnapshot branch >>= maybe notFound pure
|
||||
redirect $ HaddockR snapName rest
|
||||
getHaddockBackupR rest = track "Handler.Haddock.getHaddockBackupR" $ do
|
||||
bucketUrl <- getsYesod (appDownloadBucketUrl . appSettings)
|
||||
redirect
|
||||
$ concat
|
||||
$ bucketUrl
|
||||
: map (cons '/') rest
|
||||
66
src/Handler/Home.hs
Normal file
66
src/Handler/Home.hs
Normal file
@ -0,0 +1,66 @@
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE TupleSections #-}
|
||||
{-# LANGUAGE TypeFamilies #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
module Handler.Home
|
||||
( getHomeR
|
||||
, getHealthzR
|
||||
, getAuthorsR
|
||||
, getInstallR
|
||||
, getOlderReleasesR
|
||||
) where
|
||||
|
||||
import RIO.Time
|
||||
import Import
|
||||
import Stackage.Database
|
||||
import Yesod.GitRepo (grContent)
|
||||
|
||||
getHealthzR :: Handler String
|
||||
getHealthzR = return "This should never be used, we should use the middleware instead"
|
||||
|
||||
-- This is a handler function for the G request method on the HomeR
|
||||
-- resource pattern. All of your resource patterns are defined in
|
||||
-- config/routes
|
||||
--
|
||||
-- The majority of the code you will write in Yesod lives in these handler
|
||||
-- functions. You can spread them across multiple files if you are so
|
||||
-- inclined, or create a single monolithic file.
|
||||
getHomeR :: Handler Html
|
||||
getHomeR = track "Handler.Snapshots.getAllSnapshotsR" $ do
|
||||
cacheSeconds $ 60 * 60
|
||||
now' <- getCurrentTime
|
||||
(map entityVal -> nightly) <-
|
||||
getSnapshots (Just NightlyBranch) 1 0
|
||||
let latestNightly = groupUp now' nightly
|
||||
latestLtsNameWithHoogle <- getLatestLtsNameWithHoogle
|
||||
latestLtsByGhc <- getLatestLtsByGhc
|
||||
let sixMonthsAgo = addUTCTime (-180 * nominalDay) now'
|
||||
mrecentBlog <- headMay . filter (\p -> postTime p > sixMonthsAgo) <$> getPosts
|
||||
|
||||
defaultLayout $ do
|
||||
setTitle "Stackage Server"
|
||||
$(widgetFile "home")
|
||||
where uncrapify now' snapshot =
|
||||
( snapshotName snapshot
|
||||
, snapshotTitle snapshot
|
||||
, dateDiff now' (snapshotCreated snapshot)
|
||||
)
|
||||
groupUp now' = groupBy (on (==) (\(_,_,uploaded) -> uploaded))
|
||||
. map (uncrapify now')
|
||||
|
||||
getAuthorsR :: Handler Html
|
||||
getAuthorsR = contentHelper "Library Authors" wcAuthors
|
||||
|
||||
getInstallR :: Handler ()
|
||||
getInstallR = redirect ("https://haskell-lang.org/get-started" :: Text)
|
||||
|
||||
getOlderReleasesR :: Handler Html
|
||||
getOlderReleasesR = contentHelper "Older Releases" wcOlderReleases
|
||||
|
||||
contentHelper :: Html -> (WebsiteContent -> Html) -> Handler Html
|
||||
contentHelper title accessor = do
|
||||
homepage <- getYesod >>= fmap accessor . liftIO . grContent . appWebsiteContent
|
||||
defaultLayout $ do
|
||||
setTitle title
|
||||
toWidget homepage
|
||||
210
src/Handler/Hoogle.hs
Normal file
210
src/Handler/Hoogle.hs
Normal file
@ -0,0 +1,210 @@
|
||||
{-# LANGUAGE DeriveDataTypeable #-}
|
||||
{-# LANGUAGE BlockArguments #-}
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
module Handler.Hoogle where
|
||||
|
||||
import qualified Data.Text as T
|
||||
import Data.Text.Read (decimal)
|
||||
import qualified Hoogle
|
||||
import Import
|
||||
import Stackage.Database
|
||||
import Text.Blaze.Html (preEscapedToHtml)
|
||||
import qualified Text.HTML.DOM
|
||||
import Text.XML.Cursor (content, fromDocument, ($//))
|
||||
|
||||
getHoogleDB :: SnapName -> Handler (Maybe FilePath)
|
||||
getHoogleDB name = track "Handler.Hoogle.getHoogleDB" do
|
||||
app <- getYesod
|
||||
liftIO $ appGetHoogleDB app name
|
||||
|
||||
getHoogleR :: SnapName -> Handler Html
|
||||
getHoogleR name0 = track "Handler.Hoogle.getHoogleR" do
|
||||
let branch =
|
||||
case name0 of
|
||||
SNLts _ _ -> LtsBranch
|
||||
SNNightly _ -> NightlyBranch
|
||||
name <- newestSnapshot branch >>= maybe notFound return
|
||||
Entity _ snapshot <- lookupSnapshot name >>= maybe notFound return
|
||||
mquery <- lookupGetParam "q"
|
||||
mPackageName <- lookupGetParam "package"
|
||||
mpage <- lookupGetParam "page"
|
||||
exact <- isJust <$> lookupGetParam "exact"
|
||||
mresults' <- lookupGetParam "results"
|
||||
let count' =
|
||||
case decimal <$> mresults' of
|
||||
Just (Right (i, "")) -> min perPage i
|
||||
_ -> perPage
|
||||
page =
|
||||
case decimal <$> mpage of
|
||||
Just (Right (i, "")) -> i
|
||||
_ -> 1
|
||||
offset = (page - 1) * perPage
|
||||
mdatabasePath <- getHoogleDB name
|
||||
dbPath <- maybe (hoogleDatabaseNotAvailableFor name) return mdatabasePath
|
||||
|
||||
urlRender <- getUrlRender
|
||||
HoogleQueryOutput results mtotalCount <-
|
||||
case mquery of
|
||||
Just query -> do
|
||||
let input = HoogleQueryInput
|
||||
{ hqiQueryInput =
|
||||
case mPackageName of
|
||||
Nothing -> query
|
||||
Just pn -> concat ["+", pn, " ", query]
|
||||
, hqiLimitTo = count'
|
||||
, hqiOffsetBy = offset
|
||||
, hqiExact = exact
|
||||
}
|
||||
|
||||
liftIO $ Hoogle.withDatabase dbPath
|
||||
-- NB! I got a segfault when I didn't force with $!
|
||||
$ \db -> return $! runHoogleQuery urlRender name db input
|
||||
Nothing -> return $ HoogleQueryOutput [] Nothing
|
||||
let queryText = fromMaybe "" mquery
|
||||
pageLink p = (SnapshotR name HoogleR
|
||||
, (if exact then (("exact", "true"):) else id)
|
||||
$ maybe id (\q' -> (("q", q'):)) mquery
|
||||
[("page", tshow p)])
|
||||
snapshotLink = SnapshotR name StackageHomeR
|
||||
hoogleForm = $(widgetFile "hoogle-form")
|
||||
defaultLayout do
|
||||
setTitle "Hoogle Search"
|
||||
$(widgetFile "hoogle")
|
||||
|
||||
getHoogleDatabaseR :: SnapName -> Handler Html
|
||||
getHoogleDatabaseR name =
|
||||
track "Handler.Hoogle.getHoogleDatabaseR" do
|
||||
mdatabasePath <- getHoogleDB name
|
||||
case mdatabasePath of
|
||||
Nothing -> hoogleDatabaseNotAvailableFor name
|
||||
Just path -> sendFile "application/octet-stream" path
|
||||
|
||||
hoogleDatabaseNotAvailableFor :: SnapName -> Handler a
|
||||
hoogleDatabaseNotAvailableFor name =
|
||||
track "Handler.Hoogle.hoogleDatabaseNotAvailableFor" do
|
||||
sendResponse =<<
|
||||
defaultLayout
|
||||
(do setTitle "Hoogle database not available"
|
||||
[whamlet|
|
||||
<div .container>
|
||||
<p>The given Hoogle database is not available.
|
||||
<p>
|
||||
<a href=@{SnapshotR name StackageHomeR}>Return to snapshot homepage
|
||||
|])
|
||||
|
||||
getPageCount :: Int -> Int
|
||||
getPageCount totalCount = 1 + div totalCount perPage
|
||||
|
||||
perPage :: Int
|
||||
perPage = 10
|
||||
|
||||
data HoogleQueryInput = HoogleQueryInput
|
||||
{ hqiQueryInput :: !Text
|
||||
, hqiLimitTo :: !Int
|
||||
, hqiOffsetBy :: !Int
|
||||
, hqiExact :: !Bool
|
||||
}
|
||||
deriving (Eq, Show, Ord, Generic)
|
||||
|
||||
data HoogleQueryOutput = HoogleQueryOutput [HoogleResult] (Maybe Int) -- ^ Int == total count
|
||||
deriving (Show, Eq, Generic)
|
||||
instance NFData HoogleQueryOutput
|
||||
|
||||
data HoogleResult = HoogleResult
|
||||
{ hrURL :: !Text
|
||||
, hrSources :: ![(PackageLink, [ModuleLink])]
|
||||
, hrTitle :: !Text -- ^ HTML
|
||||
, hrBody :: !String -- ^ plain text
|
||||
}
|
||||
deriving (Eq, Show, Ord, Generic)
|
||||
|
||||
data PackageLink = PackageLink
|
||||
{ plName :: !PackageNameP
|
||||
, plURL :: !Text
|
||||
}
|
||||
deriving (Eq, Show, Ord, Generic)
|
||||
|
||||
data ModuleLink = ModuleLink
|
||||
{ mlName :: !ModuleNameP
|
||||
, mlURL :: !Text
|
||||
}
|
||||
deriving (Eq, Show, Ord, Generic)
|
||||
|
||||
instance NFData HoogleResult
|
||||
instance NFData PackageLink
|
||||
instance NFData ModuleLink
|
||||
|
||||
runHoogleQuery :: (Route App -> Text)
|
||||
-> SnapName
|
||||
-> Hoogle.Database
|
||||
-> HoogleQueryInput
|
||||
-> HoogleQueryOutput
|
||||
runHoogleQuery renderUrl snapshot hoogledb HoogleQueryInput {..} = HoogleQueryOutput targets mcount
|
||||
where
|
||||
allTargets = Hoogle.searchDatabase hoogledb query
|
||||
targets = take (min 100 hqiLimitTo) $ drop hqiOffsetBy $ map fixResult allTargets
|
||||
query =
|
||||
unpack $
|
||||
hqiQueryInput ++
|
||||
if hqiExact
|
||||
then " is:exact"
|
||||
else ""
|
||||
mcount = limitedLength 0 allTargets
|
||||
limitedLength x [] = Just x
|
||||
limitedLength x (_:rest)
|
||||
| x >= 20 = Nothing
|
||||
| otherwise = limitedLength (x + 1) rest
|
||||
fixResult target@Hoogle.Target {..} =
|
||||
HoogleResult
|
||||
{ hrURL =
|
||||
case sources of
|
||||
[(_, [ModuleLink _ m])] -> m <> haddockAnchorFromUrl targetURL
|
||||
_ -> fromMaybe (T.pack targetURL) $ asum [mModuleLink, mPackageLink]
|
||||
, hrSources = sources
|
||||
, hrTitle
|
||||
-- NOTE: from hoogle documentation:
|
||||
-- HTML span of the item, using 0 for the name and 1 onwards for arguments
|
||||
= T.replace "<0>" "" $ T.replace "</0>" "" $ pack targetItem
|
||||
, hrBody = targetDocs
|
||||
}
|
||||
where
|
||||
sources =
|
||||
toList do
|
||||
(packageLink, mkModuleUrl) <- targetLinks renderUrl snapshot target
|
||||
modName <- parseModuleNameP . fst =<< targetModule
|
||||
Just (packageLink, [ModuleLink modName $ mkModuleUrl modName])
|
||||
item =
|
||||
let doc = Text.HTML.DOM.parseLBS $ encodeUtf8 $ pack targetItem
|
||||
cursor = fromDocument doc
|
||||
in T.concat $ cursor $// content
|
||||
mModuleLink = do
|
||||
"module" <- Just targetType
|
||||
(_packageLink, mkModuleUrl) <- targetLinks renderUrl snapshot target
|
||||
modName <- parseModuleNameP . T.unpack =<< T.stripPrefix "module " item
|
||||
pure $ mkModuleUrl modName
|
||||
mPackageLink = do
|
||||
guard $ isNothing targetPackage
|
||||
"package" <- Just targetType
|
||||
pnameTxt <- T.stripPrefix "package " item
|
||||
pname <- fromPathPiece pnameTxt
|
||||
return $ renderUrl $ SnapshotR snapshot $ StackageSdistR $ PNVName pname
|
||||
haddockAnchorFromUrl = T.pack . ('#' :) . reverse . takeWhile (/= '#') . reverse
|
||||
|
||||
targetLinks ::
|
||||
(Route App -> Text)
|
||||
-> SnapName
|
||||
-> Hoogle.Target
|
||||
-> Maybe (PackageLink, ModuleNameP -> Text)
|
||||
targetLinks renderUrl snapName Hoogle.Target {..} = do
|
||||
(pname, _) <- targetPackage
|
||||
packageName <- parsePackageNameP pname
|
||||
let mkModuleUrl modName = renderUrl (hoogleHaddockUrl snapName packageName modName)
|
||||
return (makePackageLink packageName, mkModuleUrl)
|
||||
|
||||
makePackageLink :: PackageNameP -> PackageLink
|
||||
makePackageLink packageName = PackageLink packageName ("/package/" <> toPathPiece packageName)
|
||||
183
src/Handler/MirrorStatus.hs
Normal file
183
src/Handler/MirrorStatus.hs
Normal file
@ -0,0 +1,183 @@
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
|
||||
module Handler.MirrorStatus
|
||||
( getMirrorStatusR
|
||||
, mkUpdateMirrorStatus
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Control.AutoUpdate
|
||||
import Network.HTTP.Simple
|
||||
import RIO.Time (diffUTCTime, addUTCTime, getCurrentTime)
|
||||
import Text.XML.Stream.Parse
|
||||
import Data.XML.Types (Event (EventContent), Content (ContentText))
|
||||
import qualified Prelude
|
||||
import qualified Data.Aeson.Key as Aeson
|
||||
import qualified Data.Aeson.KeyMap as Aeson
|
||||
|
||||
getMirrorStatusR :: Handler Html
|
||||
getMirrorStatusR = do
|
||||
(status, widget) <- getYesod >>= liftIO . appMirrorStatus
|
||||
defaultLayout widget >>= sendResponseStatus status
|
||||
|
||||
mkUpdateMirrorStatus :: IO (IO (Status, Widget))
|
||||
mkUpdateMirrorStatus = mkAutoUpdate defaultUpdateSettings
|
||||
{ updateAction = go
|
||||
, updateFreq = 1000 * 1000 * 60 * 10 -- every 10 minutes
|
||||
}
|
||||
where
|
||||
go = do
|
||||
-- Ignore updates in the past hour, to give the mirrors a
|
||||
-- chance to process them.
|
||||
now <- getCurrentTime
|
||||
let oneHourAgo = addUTCTime (negate $ 60 * 60) now
|
||||
|
||||
mhackageTime <- getHackageRecent oneHourAgo
|
||||
|
||||
case mhackageTime of
|
||||
Nothing -> return (status500, "No Hackage time found, could just be a lot of recent uploads")
|
||||
Just hackageTime -> goHT hackageTime
|
||||
|
||||
goHT hackageTime = do
|
||||
gitMods <- mapM (\(x, y, z) -> getLastModifiedGit x y z)
|
||||
[]
|
||||
{- FIXME unreliable, and 00-index catches this anyway
|
||||
[ ("commercialhaskell", "all-cabal-files", "current-hackage")
|
||||
, ("commercialhaskell", "all-cabal-hashes", "current-hackage")
|
||||
, ("commercialhaskell", "all-cabal-metadata", "master")
|
||||
]
|
||||
-}
|
||||
tarballMods <- mapM getLastModifiedHTTP
|
||||
[ "http://hackage.fpcomplete.com/00-index.tar.gz"
|
||||
, "http://hackage.fpcomplete.com/01-index.tar.gz"
|
||||
]
|
||||
otherMods <- mapM getLastModifiedHTTP
|
||||
[]
|
||||
{- Dreamhost S3 is far too unstable
|
||||
[ "http://objects-us-west-1.dream.io/hackage-mirror/01-index.tar.gz"
|
||||
, "http://objects-us-west-1.dream.io/hackage-mirror/timestamp.json"
|
||||
]
|
||||
-}
|
||||
let nonHackageMods = gitMods ++ tarballMods
|
||||
allMods = ("Hackage", hackageTime) : nonHackageMods ++ otherMods
|
||||
biggestDiff = Prelude.maximum $ map
|
||||
(\(_, other) -> diffUTCTime hackageTime other)
|
||||
nonHackageMods
|
||||
showLag x =
|
||||
case compare x 0 of
|
||||
EQ -> ""
|
||||
LT -> showDiff (abs x) ++ " (mirror newer)"
|
||||
GT -> showDiff x ++ " (Hackage newer)"
|
||||
showDiff x =
|
||||
let (minutes', seconds) = floor x `divMod` (60 :: Int)
|
||||
(hours, minutes) = minutes' `divMod` 60
|
||||
showInt i
|
||||
| i < 10 = '0' : show i
|
||||
| otherwise = show i
|
||||
showSuffix suffix i
|
||||
| i == 0 = ""
|
||||
| otherwise = showInt i ++ suffix
|
||||
in unwords $ filter (not . null)
|
||||
[ showSuffix "h" hours
|
||||
, showSuffix "m" minutes
|
||||
, showSuffix "s" seconds
|
||||
]
|
||||
widget = do
|
||||
setTitle "Mirror Status"
|
||||
[whamlet|
|
||||
<h1>Mirror Status
|
||||
<table border=1 cellpadding=1>
|
||||
<tr>
|
||||
<th>Name
|
||||
<th>Last updated
|
||||
<th>Lag
|
||||
$forall (name, date) <- allMods
|
||||
<tr>
|
||||
<td>#{name}
|
||||
<td>#{tshow date}
|
||||
<td>#{showLag (diffUTCTime hackageTime date)}
|
||||
$if biggestDiff > 0
|
||||
<p>
|
||||
Biggest lag: #{showLag biggestDiff}
|
||||
$if isTooOld
|
||||
<p style="color:red;font-size:300%">WARNING: Mirrors may be out of sync!
|
||||
|]
|
||||
isTooOld = biggestDiff > (60 * 60)
|
||||
status = if isTooOld then status500 else status200
|
||||
return (status, widget)
|
||||
|
||||
getLastModifiedHTTP :: Text -- ^ url
|
||||
-> IO (Text, UTCTime)
|
||||
getLastModifiedHTTP url = do
|
||||
req <- fmap (setRequestMethod "HEAD") $ parseUrlThrow $ unpack url
|
||||
res <- httpLBS req
|
||||
case getResponseHeader "last-modified" res of
|
||||
[x] -> do
|
||||
date <- parseTimeM
|
||||
True
|
||||
defaultTimeLocale
|
||||
"%a, %_d %b %Y %H:%M:%S %Z"
|
||||
(unpack $ decodeUtf8 x)
|
||||
return (url, date)
|
||||
x -> error $ "invalid last-modified for " ++ show (url, res, x)
|
||||
|
||||
getLastModifiedGit :: Text -- ^ org
|
||||
-> Text -- ^ repo
|
||||
-> Text -- ^ ref
|
||||
-> IO (Text, UTCTime)
|
||||
getLastModifiedGit org repo ref = do
|
||||
req <- parseUrlThrow $ unpack url
|
||||
res <- httpJSON $ addRequestHeader "User-Agent" "Stackage Server" req
|
||||
dateT <- lookupJ "commit" (getResponseBody res)
|
||||
>>= lookupJ "author"
|
||||
>>= lookupJ "date"
|
||||
>>= textJ
|
||||
date <- parseTimeM
|
||||
True
|
||||
defaultTimeLocale
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
(unpack dateT)
|
||||
return (concat [org, "/", repo], date)
|
||||
where
|
||||
url = concat
|
||||
[ "https://api.github.com/repos/"
|
||||
, org
|
||||
, "/"
|
||||
, repo
|
||||
, "/commits/"
|
||||
, ref
|
||||
]
|
||||
|
||||
lookupJ :: MonadThrow m => Text -> Value -> m Value
|
||||
lookupJ key (Object o) =
|
||||
case Aeson.lookup (Aeson.fromText key) o of
|
||||
Nothing -> error $ "Key not found: " ++ show key
|
||||
Just x -> return x
|
||||
lookupJ key val = error $ concat
|
||||
[ "Looking up key "
|
||||
, show key
|
||||
, " on non-object "
|
||||
, show val
|
||||
]
|
||||
|
||||
textJ :: MonadThrow m => Value -> m Text
|
||||
textJ (String t) = return t
|
||||
textJ v = error $ "Invalid value for textJ: " ++ show v
|
||||
|
||||
getHackageRecent :: UTCTime -- ^ latest time to continue
|
||||
-> IO (Maybe UTCTime)
|
||||
getHackageRecent latestTime =
|
||||
httpSink "https://hackage.haskell.org/packages/recent" sink
|
||||
where
|
||||
sink _ = parseBytes def
|
||||
.| concatMapC getDate
|
||||
.| filterC (<= latestTime)
|
||||
.| headC
|
||||
|
||||
getDate :: Event -> Maybe UTCTime
|
||||
getDate (EventContent (ContentText t)) = parseTimeM
|
||||
True
|
||||
defaultTimeLocale
|
||||
"%a %b %_d %H:%M:%S UTC %Y"
|
||||
(unpack t)
|
||||
getDate _ = Nothing
|
||||
64
src/Handler/OldLinks.hs
Normal file
64
src/Handler/OldLinks.hs
Normal file
@ -0,0 +1,64 @@
|
||||
{-# LANGUAGE NoImplicitPrelude #-}
|
||||
module Handler.OldLinks
|
||||
( getOldSnapshotBranchR
|
||||
, getOldSnapshotR
|
||||
) where
|
||||
|
||||
import Import
|
||||
import Stackage.Database
|
||||
import qualified Data.Text.Read as Reader
|
||||
import Network.Wai (rawQueryString)
|
||||
|
||||
data LtsSuffix = LSMajor !Int
|
||||
| LSMinor !Int !Int
|
||||
|
||||
parseLtsSuffix :: Text -> Maybe LtsSuffix
|
||||
parseLtsSuffix t0 = do
|
||||
Right (x, t1) <- Just $ Reader.decimal t0
|
||||
if null t1
|
||||
then return $ LSMajor x
|
||||
else do
|
||||
t2 <- stripPrefix "." t1
|
||||
Right (y, "") <- Just $ Reader.decimal t2
|
||||
return $ LSMinor x y
|
||||
|
||||
redirectWithQueryText :: Text -> Handler a
|
||||
redirectWithQueryText url = do
|
||||
req <- waiRequest
|
||||
redirect $ url ++ decodeUtf8 (rawQueryString req)
|
||||
|
||||
getOldSnapshotBranchR :: SnapshotBranch -> [Text] -> Handler ()
|
||||
getOldSnapshotBranchR LtsBranch pieces = track "Handler.OldLinks.getOldSnapshotBranchR@LtsBranch" $ do
|
||||
(x, y, pieces') <- case pieces of
|
||||
t:ts | Just suffix <- parseLtsSuffix t -> do
|
||||
(x, y) <- case suffix of
|
||||
LSMajor x -> do
|
||||
y <- newestLTSMajor x >>= maybe notFound return
|
||||
return (x, y)
|
||||
LSMinor x y -> return (x, y)
|
||||
return (x, y, ts)
|
||||
_ -> do
|
||||
(x, y) <- newestLTS >>= maybe notFound return
|
||||
return (x, y, pieces)
|
||||
let name = concat ["lts-", tshow x, ".", tshow y]
|
||||
redirectWithQueryText $ concatMap (cons '/') $ name : pieces'
|
||||
|
||||
getOldSnapshotBranchR (LtsMajorBranch x) pieces = track "Handler.OldLinks.getOldSnapshotBranchR@LtsMajorBranch" $ do
|
||||
y <- newestLTSMajor x >>= maybe notFound return
|
||||
let name = concat ["lts-", tshow x, ".", tshow y]
|
||||
redirectWithQueryText $ concatMap (cons '/') $ name : pieces
|
||||
|
||||
getOldSnapshotBranchR NightlyBranch pieces = track "Handler.OldLinks.getOldSnapshotBranchR@NightlyBranch" $ do
|
||||
(day, pieces') <- case pieces of
|
||||
t:ts | Just day <- fromPathPiece t -> return (day, ts)
|
||||
_ -> do
|
||||
day <- newestNightly >>= maybe notFound return
|
||||
return (day, pieces)
|
||||
let name = "nightly-" ++ tshow day
|
||||
redirectWithQueryText $ concatMap (cons '/') $ name : pieces'
|
||||
|
||||
getOldSnapshotR :: Text -> [Text] -> Handler ()
|
||||
getOldSnapshotR t ts = track "Handler.OldLinks.getOldSnapshotR" $
|
||||
case fromPathPiece t :: Maybe SnapName of
|
||||
Just _ -> redirectWithQueryText $ concatMap (cons '/') $ t : ts
|
||||
Nothing -> notFound
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user