-
-
Notifications
You must be signed in to change notification settings - Fork 268
/
PlatformEngines.jl
1415 lines (1257 loc) · 52.1 KB
/
PlatformEngines.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# This file is a part of Julia. License is MIT: https://julialang.org/license
# Content in this file is extracted from BinaryProvider.jl, see LICENSE.method
module PlatformEngines
using SHA, Logging, UUIDs, Random
import ...Pkg: Pkg, TOML, pkg_server, depots, depots1
export probe_platform_engines!, parse_7z_list, parse_tar_list, verify,
download_verify, unpack, package, download_verify_unpack,
list_tarball_files, list_tarball_symlinks
# To reduce method invalidation, it's best to call a logging method that
# avoids introduction of backedges.
# See https://github.com/JuliaLang/julia/pull/35714
const logging_level = isdefined(Base.CoreLogging, :_invoked_min_enabled_level) ? Base.CoreLogging._invoked_min_enabled_level : Base.CoreLogging.min_enabled_level
# In this file, we setup the `gen_download_cmd()`, `gen_unpack_cmd()` and
# `gen_package_cmd()` functions by providing methods to probe the environment
# and determine the most appropriate platform binaries to call.
"""
gen_download_cmd(url::AbstractString, out_path::AbstractString, hdrs::AbstractString...)
Return a `Cmd` that will download resource located at `url` and store it at
the location given by `out_path`.
This method is initialized by `probe_platform_engines!()`, which should be
automatically called upon first import of `BinaryProvider`.
"""
gen_download_cmd = (url::AbstractString, out_path::AbstractString, hdrs::AbstractString...) ->
error("Call `probe_platform_engines!()` before `gen_download_cmd()`")
"""
gen_unpack_cmd(tarball_path::AbstractString, out_path::AbstractString;
excludelist::Union{AbstractString, Nothing} = nothing)
Return a `Cmd` that will unpack the given `tarball_path` into the given
`out_path`. If `out_path` is not already a directory, it will be created.
excludlist is an optional file which contains a list of files that is not unpacked
This option is mainyl used to exclude symlinks from extraction (see: `copyderef`)
This method is initialized by `probe_platform_engines!()`, which should be
automatically called upon first import of `BinaryProvider`.
"""
gen_unpack_cmd = (tarball_path::AbstractString, out_path::AbstractString,
excludelist::Union{AbstractString, Nothing} = nothing) ->
error("Call `probe_platform_engines!()` before `gen_unpack_cmd()`")
"""
gen_package_cmd(in_path::AbstractString, tarball_path::AbstractString)
Return a `Cmd` that will package up the given `in_path` directory into a
tarball located at `tarball_path`.
This method is initialized by `probe_platform_engines!()`, which should be
automatically called upon first import of `BinaryProvider`.
"""
gen_package_cmd = (in_path::AbstractString, tarball_path::AbstractString) ->
error("Call `probe_platform_engines!()` before `gen_package_cmd()`")
"""
gen_list_tarball_cmd(tarball_path::AbstractString)
Return a `Cmd` that will list the files contained within the tarball located at
`tarball_path`. The list will not include directories contained within the
tarball.
This method is initialized by `probe_platform_engines!()`.
"""
gen_list_tarball_cmd = (tarball_path::AbstractString) ->
error("Call `probe_platform_engines!()` before `gen_list_tarball_cmd()`")
"""
parse_tarball_listing(output::AbstractString)
Parses the result of `gen_list_tarball_cmd()` into something useful.
This method is initialized by `probe_platform_engines!()`.
"""
parse_tarball_listing = (output::AbstractString) ->
error("Call `probe_platform_engines!()` before `parse_tarball_listing()`")
"""
parse_symlinks(output::AbstractString)
Returns a regex to parse symlinks from tarball listings.
This method is initialized by `probe_platform_engines!()`.
"""
parse_symlinks = () ->
error("Call `probe_platform_engines!()` before `parse_symlinks()`")
"""
probe_cmd(cmd::Cmd; verbose::Bool = false)
Returns `true` if the given command executes successfully, `false` otherwise.
"""
function probe_cmd(cmd::Cmd; verbose::Bool = false)
if verbose
@info("Probing $(cmd.exec[1]) as a possibility...")
end
try
success(cmd)
if verbose
@info(" Probe successful for $(cmd.exec[1])")
end
return true
catch
return false
end
end
already_probed = false
"""
probe_symlink_creation(dest::AbstractString)
Probes whether we can create a symlink within the given destination directory,
to determine whether a particular filesystem is "symlink-unfriendly".
"""
function probe_symlink_creation(dest::AbstractString)
while !isdir(dest)
dest = dirname(dest)
end
# Build arbitrary (non-existent) file path name
link_path = joinpath(dest, "binaryprovider_symlink_test")
while ispath(link_path)
link_path *= "1"
end
loglevel = logging_level(current_logger())
try
disable_logging(Logging.Warn)
symlink("foo", link_path)
return true
catch e
if isa(e, Base.IOError)
return false
end
rethrow(e)
finally
disable_logging(loglevel-1)
rm(link_path; force=true)
end
end
"""
probe_platform_engines!(;verbose::Bool = false)
Searches the environment for various tools needed to download, unpack, and
package up binaries. Searches for a download engine to be used by
`gen_download_cmd()` and a compression engine to be used by `gen_unpack_cmd()`,
`gen_package_cmd()`, `gen_list_tarball_cmd()` and `parse_tarball_listing()`.
Running this function
will set the global functions to their appropriate implementations given the
environment this package is running on.
This probing function will automatically search for download engines using a
particular ordering; if you wish to override this ordering and use one over all
others, set the `BINARYPROVIDER_DOWNLOAD_ENGINE` environment variable to its
name, and it will be the only engine searched for. For example, put:
ENV["BINARYPROVIDER_DOWNLOAD_ENGINE"] = "fetch"
within your `~/.juliarc.jl` file to force `fetch` to be used over `curl`. If
the given override does not match any of the download engines known to this
function, a warning will be printed and the typical ordering will be performed.
Similarly, if you wish to override the compression engine used, set the
`BINARYPROVIDER_COMPRESSION_ENGINE` environment variable to its name (e.g. `7z`
or `tar`) and it will be the only engine searched for. If the given override
does not match any of the compression engines known to this function, a warning
will be printed and the typical searching will be performed.
If `verbose` is `true`, print out the various engines as they are searched.
"""
function probe_platform_engines!(;verbose::Bool = false)
global already_probed
global gen_download_cmd, gen_list_tarball_cmd, gen_package_cmd
global gen_unpack_cmd, parse_tarball_listing, parse_symlinks
# Quick-escape for Pkg, since we do this a lot
if already_probed
return
end
# download_engines is a list of (test_cmd, download_opts_functor)
# The probulator will check each of them by attempting to run `$test_cmd`,
# and if that works, will set the global download functions appropriately.
download_engines = [
(`curl --help`, (url, path, hdrs...) ->
`curl -H$hdrs -C - -\# -f -o $path -L $url`),
(`wget --help`, (url, path, hdrs...) ->
`wget --tries=5 --header=$hdrs -c -O $path $url`),
(`fetch --help`, (url, path, hdrs...) -> begin
isempty(hdrs) || error("`fetch` does not support passing headers")
`fetch -f $path $url`
end),
(`busybox wget --help`, (url, path, hdrs...) ->
`busybox wget --header=$hdrs -c -O $path $url`),
]
Sys.isapple() && pushfirst!(download_engines,
(`/usr/bin/curl --help`, (url, path, hdrs...) ->
`/usr/bin/curl -H$hdrs -C - -\# -f -o $path -L $url`))
# 7z is rather intensely verbose. We also want to try running not only
# `7z` but also a direct path to the `7z.exe` bundled with Julia on
# windows, so we create generator functions to spit back functors to invoke
# the correct 7z given the path to the executable:
unpack_7z = (exe7z) -> begin
return (tarball_path, out_path, excludelist = nothing) ->
pipeline(pipeline(`$exe7z x $(tarball_path) -y -so`,
`$exe7z x -si -y -ttar -o$(out_path) $(excludelist === nothing ? [] : "-x@$(excludelist)")`);
stdout=devnull, stderr=devnull)
end
package_7z = (exe7z) -> begin
return (in_path, tarball_path) ->
pipeline(pipeline(`$exe7z a -ttar -so a.tar "$(joinpath(".",in_path,"*"))"`,
`$exe7z a -si $(tarball_path)`); stdout=devnull, stderr=devnull)
end
list_7z = (exe7z) -> begin
return (path; verbose = false) ->
pipeline(`$exe7z x $path -so`, `$exe7z l -ttar -y -si $(verbose ? ["-slt"] : [])`)
end
# the regex at the last position is meant for parsing the symlinks from verbose 7z-listing
# "Path = ([^\r\n]+)\r?\n" matches the symlink name which is followed by an optional return and a new line
# (?:[^\r\n]+\r?\n)+ = a group of non-empty lines (information belonging to one file is written as a block of lines followed by an empty line)
# more info on regex and a powerful online tester can be found at https://regex101.com
# Symbolic Link = ([^\r\n]+)"s) matches the source filename
# Demo 7z listing of tar files:
# 7-Zip [64] 16.04 : Copyright (c) 1999-2016 Igor Pavlov : 2016-10-04
#
#
# Listing archive:
# --
# Path =
# Type = tar
# Code Page = UTF-8
#
# ----------
# Path = .
# Folder = +
# Size = 0
# Packed Size = 0
# Modified = 2018-08-22 11:44:23
# Mode = 0rwxrwxr-x
# User = travis
# Group = travis
# Symbolic Link =
# Hard Link =
# Path = .\lib\libpng.a
# Folder = -
# Size = 10
# Packed Size = 0
# Modified = 2018-08-22 11:44:51
# Mode = 0rwxrwxrwx
# User = travis
# Group = travis
# Symbolic Link = libpng16.a
# Hard Link =
#
# Path = .\lib\libpng16.a
# Folder = -
# Size = 334498
# Packed Size = 334848
# Modified = 2018-08-22 11:44:49
# Mode = 0rw-r--r--
# User = travis
# Group = travis
# Symbolic Link =
# Hard Link =
gen_7z = (p) -> (unpack_7z(p), package_7z(p), list_7z(p), parse_7z_list, r"Path = ([^\r\n]+)\r?\n(?:[^\r\n]+\r?\n)+Symbolic Link = ([^\r\n]+)"s)
compression_engines = Tuple[]
(tmpfile, io) = mktemp()
write(io, "Demo file for tar listing (Pkg.jl)")
close(io)
for tar_cmd in [`tar`, `busybox tar`]
# try to determine the tar list format
local symlink_parser
try
# Windows 10 now has a `tar` but it needs the `-f -` flag to use stdin/stdout
# The Windows 10 tar does not work on substituted drives (`subst U: C:\Users`)
# If a drive letter is part of the filename, then tar spits out a warning on stderr:
# "tar: Removing leading drive letter from member names"
# Therefore we cd to tmpdir() first
cd(tempdir()) do
tarListing = read(pipeline(`$tar_cmd -cf - $(basename(tmpfile))`, `$tar_cmd -tvf -`), String)
end
# obtain the text of the line before the filename
m = match(Regex("((?:\\S+\\s+)+?)$tmpfile"), tarListing)[1]
# count the number of words before the filename
nargs = length(split(m, " "; keepempty = false))
# build a regex for catching the symlink:
# "^l" = line starting with l
# "(?:\S+\s+){$nargs} = nargs non-capturing groups of many non-spaces "\S+" and many spaces "\s+"
# "(.+?)" = a non-greedy sequence of characters: the symlink
# "(?: -> (.+?))?" = an optional group of " -> " followed by a non-greedy sequence of characters: the source of the link
# "\r?\$" = matches the end of line with an optional return character for some OSes
# Demo listings
# drwxrwxr-x 0 sabae sabae 0 Sep 5 2018 collapse_the_symlink/
# lrwxrwxrwx 0 sabae sabae 0 Sep 5 2018 collapse_the_symlink/foo -> foo.1
# -rw-rw-r-- 0 sabae sabae 0 Sep 5 2018 collapse_the_symlink/foo.1
# lrwxrwxrwx 0 sabae sabae 0 Sep 5 2018 collapse_the_symlink/foo.1.1 -> foo.1
# lrwxrwxrwx 0 sabae sabae 0 Sep 5 2018 collapse_the_symlink/broken -> obviously_broken
#
# drwxrwxr-x sabae/sabae 0 2018-09-05 18:19 collapse_the_symlink/
# lrwxrwxrwx sabae/sabae 0 2018-09-05 18:19 collapse_the_symlink/foo -> foo.1
#
# lrwxrwxr-x 1000/1000 498007696 2009-11-27 00:14:00 link1 -> source1
# lrw-rw-r-- 1000/1000 1359020032 2019-06-03 12:02:03 link2 -> sourcedir/source2
#
# now a pathological link "2009 link with blanks"
# this can only be tracked by determining the tar format beforehand:
# lrw-rw-r-- 0 1000 1000 1359020032 Jul 8 2009 2009 link with blanks -> target with blanks
symlink_parser = Regex("^l(?:\\S+\\s+){$nargs}(.+?)(?: -> (.+?))?\\r?\$", "m")
catch
# generic expression for symlink parsing
# this will fail, if the symlink contains space characters (which is highly improbable, though)
# "^l.+?" = a line starting with an "l" followed by a sequence of non-greedy characters
# \S+? the filename consisting of non-space characters, the rest as above
symlink_parser = r"^l.+? (\S+?)(?: -> (.+?))?\r?$"m
end
# Some tar's aren't smart enough to auto-guess decompression method. :(
unpack_tar = (tarball_path, out_path, excludelist = nothing) -> begin
Jjz = "z"
if endswith(tarball_path, ".xz")
Jjz = "J"
elseif endswith(tarball_path, ".bz2")
Jjz = "j"
end
return `$tar_cmd --no-same-owner -mx$(Jjz)f $(tarball_path) -C$(out_path) $(excludelist === nothing ? [] : "-X$(excludelist)")`
end
package_tar = (in_path, tarball_path) -> begin
Jjz = "z"
if endswith(tarball_path, ".xz")
Jjz = "J"
elseif endswith(tarball_path, ".bz2")
Jjz = "j"
end
return `$tar_cmd -c$(Jjz)f $tarball_path -C$(in_path) .`
end
list_tar = (in_path; verbose = false) -> begin
Jjz = "z"
if endswith(in_path, ".xz")
Jjz = "J"
elseif endswith(in_path, ".bz2")
Jjz = "j"
end
return `$tar_cmd $(verbose ? "-t$(Jjz)vf" : "-t$(Jjz)f") $in_path`
end
push!(compression_engines, (
`$tar_cmd --help`,
unpack_tar,
package_tar,
list_tar,
parse_tar_list,
symlink_parser
))
end
rm(tmpfile, force = true)
# For windows, we need to tweak a few things, as the tools available differ
@static if Sys.iswindows()
# For download engines, we will most likely want to use powershell.
# Let's generate a functor to return the necessary powershell magics
# to download a file, given a path to the powershell executable
psh_download = (psh_path) -> begin
return (url, path, hdrs...) -> begin
webclient_code = """
[System.Net.ServicePointManager]::SecurityProtocol =
[System.Net.SecurityProtocolType]::Tls12;
\$webclient = (New-Object System.Net.Webclient);
\$webclient.UseDefaultCredentials = \$true;
\$webclient.Proxy.Credentials = \$webclient.Credentials;
\$webclient.Headers.Add("user-agent", \"Pkg.jl (https://github.com/JuliaLang/Pkg.jl)\");
"""
for hdr in hdrs
key, val = split(hdr, r":\s*", limit=2)
webclient_code *= """
\$webclient.Headers.Add($(repr(key)), $(repr(val)));
"""
end
webclient_code *= """
\$webclient.DownloadFile(\"$url\", \"$path\")
"""
replace(webclient_code, "\n" => " ")
return `$psh_path -NoProfile -Command "$webclient_code"`
end
end
# We want to search both the `PATH`, and the direct path for powershell
psh_path = joinpath(get(ENV, "SYSTEMROOT", "C:\\Windows"), "System32\\WindowsPowerShell\\v1.0\\powershell.exe")
prepend!(download_engines, [
(`$psh_path -Command ""`, psh_download(psh_path))
])
prepend!(download_engines, [
(`powershell -Command ""`, psh_download(`powershell`))
])
# We greatly prefer `7z` as a compression engine on Windows
prepend!(compression_engines, [(`7z --help`, gen_7z("7z")...)])
# For purposes of in-buildtree execution, we look in `bin`
exe7z = joinpath(Sys.BINDIR, "7z.exe")
prepend!(compression_engines, [(`$exe7z --help`, gen_7z(exe7z)...)])
# But most commonly, we'll find `7z` sitting in `libexec`, bundled with Julia
exe7z = joinpath(Sys.BINDIR, "..", "libexec", "7z.exe")
prepend!(compression_engines, [(`$exe7z --help`, gen_7z(exe7z)...)])
end
# Allow environment override
if haskey(ENV, "BINARYPROVIDER_DOWNLOAD_ENGINE")
engine = ENV["BINARYPROVIDER_DOWNLOAD_ENGINE"]
es = split(engine)
dl_ngs = let es=es
filter(e -> e[1].exec[1:length(es)] == es, download_engines)
end
if isempty(dl_ngs)
all_ngs = join([d[1].exec[1] for d in download_engines], ", ")
warn_msg = "Ignoring BINARYPROVIDER_DOWNLOAD_ENGINE as its value "
warn_msg *= "of `$(engine)` doesn't match any known valid engines."
warn_msg *= " Try one of `$(all_ngs)`."
@warn(warn_msg)
else
# If BINARYPROVIDER_DOWNLOAD_ENGINE matches one of our download engines,
# then restrict ourselves to looking only at that engine
download_engines = dl_ngs
end
end
if haskey(ENV, "BINARYPROVIDER_COMPRESSION_ENGINE")
engine = ENV["BINARYPROVIDER_COMPRESSION_ENGINE"]
es = split(engine)
comp_ngs = let es=es
filter(e -> e[1].exec[1:length(es)] == es, compression_engines)
end
if isempty(comp_ngs)
all_ngs = join([c[1].exec[1] for c in compression_engines], ", ")
warn_msg = "Ignoring BINARYPROVIDER_COMPRESSION_ENGINE as its "
warn_msg *= "value of `$(engine)` doesn't match any known valid "
warn_msg *= "engines. Try one of `$(all_ngs)`."
@warn(warn_msg)
else
# If BINARYPROVIDER_COMPRESSION_ENGINE matches one of our download
# engines, then restrict ourselves to looking only at that engine
compression_engines = comp_ngs
end
end
download_found = false
compression_found = false
if verbose
@info("Probing for download engine...")
end
# Search for a download engine
for (test, dl_func) in download_engines
if probe_cmd(`$test`; verbose=verbose)
# Set our download command generator
gen_download_cmd = (url, out_path, hdrs...) -> begin
isdefined(Base, :download_url) && (url = Base.download_url(url))
dl_func(url, out_path, hdrs...)
end
download_found = true
if verbose
@info("Found download engine $(test.exec[1])")
end
break
end
end
if verbose
@info("Probing for compression engine...")
end
# Search for a compression engine
for (test::Cmd, unpack, package, list, parse, symlink) in compression_engines
if probe_cmd(`$test`; verbose=verbose)
# Set our compression command generators
gen_unpack_cmd = unpack
gen_package_cmd = package
gen_list_tarball_cmd = list
parse_tarball_listing = parse
parse_symlinks = () -> symlink
if verbose
@info("Found compression engine $(test.exec[1])")
end
compression_found = true
break
end
end
# Build informative error messages in case things go sideways
errmsg = ""
if !download_found
errmsg *= "No download engines found. We looked for: "
errmsg *= join([d[1].exec[1] for d in download_engines], ", ")
errmsg *= ". Install one and ensure it is available on the path.\n"
end
if !compression_found
errmsg *= "No compression engines found. We looked for: "
errmsg *= join([c[1].exec[1] for c in compression_engines], ", ")
errmsg *= ". Install one and ensure it is available on the path.\n"
end
# Error out if we couldn't find something
if !download_found || !compression_found
error(errmsg)
end
already_probed = true
end
"""
parse_7z_list(output::AbstractString)
Given the output of `7z l`, parse out the listed filenames. This funciton used
by `list_tarball_files`.
"""
function parse_7z_list(output::AbstractString)
lines = [chomp(l) for l in split(output, "\n")]
# If we didn't get anything, complain immediately
if isempty(lines)
return []
end
# Remove extraneous "\r" for windows platforms
for idx in 1:length(lines)
if endswith(lines[idx], '\r')
lines[idx] = lines[idx][1:end-1]
end
end
# Find index of " Name". Have to `collect()` as `findfirst()` doesn't work with
# generators: https://github.com/JuliaLang/julia/issues/16884
header_row = findfirst(collect(occursin(" Name", l) && occursin(" Attr", l) for l in lines))
name_idx = findfirst("Name", lines[header_row])[1]
attr_idx = findfirst("Attr", lines[header_row])[1] - 1
# Filter out only the names of files, ignoring directories
lines = [l[name_idx:end] for l in lines if length(l) > name_idx && l[attr_idx] != 'D']
if isempty(lines)
return []
end
# Extract within the bounding lines of ------------
bounds = [i for i in 1:length(lines) if all([c for c in lines[i]] .== Ref('-'))]
lines = lines[bounds[1]+1:bounds[2]-1]
# Eliminate `./` prefix, if it exists
for idx in 1:length(lines)
if startswith(lines[idx], "./") || startswith(lines[idx], ".\\")
lines[idx] = lines[idx][3:end]
end
end
return lines
end
"""
parse_7z_list(output::AbstractString)
Given the output of `tar -t`, parse out the listed filenames. This function
used by `list_tarball_files`.
"""
function parse_tar_list(output::AbstractString)
lines = [chomp(l) for l in split(output, "\n")]
for idx in 1:length(lines)
while endswith(lines[idx], '\r')
lines[idx] = lines[idx][1:end-1]
end
end
# Drop empty lines and and directories
lines = [l for l in lines if !isempty(l) && !endswith(l, '/')]
# Eliminate `./` prefix, if it exists
for idx in 1:length(lines)
if startswith(lines[idx], "./") || startswith(lines[idx], ".\\")
lines[idx] = lines[idx][3:end]
end
end
# make sure paths are always returned in the system's default way
return Sys.iswindows() ? replace.(lines, ['/' => '\\']) : lines
end
is_secure_url(url::AbstractString) =
occursin(r"^(https://|\w+://(127\.0\.0\.1|localhost)(:\d+)?($|/))"i, url)
function get_server_dir(url::AbstractString, server=pkg_server())
server === nothing && return
url == server || startswith(url, "$server/") || return
m = match(r"^\w+://([^\\/]+)(?:$|/)", server)
if m === nothing
@warn "malformed Pkg server value" server
return
end
joinpath(depots1(), "servers", m.captures[1])
end
const AUTH_ERROR_HANDLERS = Pair{Union{String, Regex},Any}[]
function handle_auth_error(url, err; verbose::Bool = false)
handled, should_retry = false, false
for (scheme, handler) in AUTH_ERROR_HANDLERS
occursin(scheme, url) || continue
handled, should_retry = handler(url, pkg_server(), err)
handled && break
end
handled && should_retry && return get_auth_header(url; verbose = verbose)
return nothing
end
"""
register_auth_error_handler(urlscheme::Union{AbstractString, Regex}, f)
Registers `f` as the topmost handler for failures in package server authentication.
A handler is only invoked if `occursin(urlscheme, url)` is true (where `url` is the URL Pkg
is currently trying to download.)
`f` must be a function that takes three input arguments `(url, pkgserver, err)`, where `url` is the
URL currently being downloaded, `pkgserver = Pkg.pkg_server()` the current package server, and
`err` is one of `no-auth-file`, `insecure-connection`, `malformed-file`, `no-access-token`,
`no-refresh-key` or `insecure-refresh-url`.
The handler `f` needs to return a tuple of `Bool`s `(handled, should_retry)`. If `handled` is `false`,
the next handler in the stack will be called, otherwise handling terminates; `get_auth_header` is called again if `should_retry`
is `true`.
`register_auth_error_handler` returns a zero-arg function that can be called to deregister the handler.
"""
function register_auth_error_handler(urlscheme::Union{AbstractString, Regex}, @nospecialize(f))
unique!(pushfirst!(AUTH_ERROR_HANDLERS, urlscheme => f))
return () -> deregister_auth_error_handler(urlscheme, f)
end
"""
deregister_auth_error_handler(urlscheme::Union{AbstractString, Regex}, f)
Removes `f` from the stack of authentication error handlers.
"""
function deregister_auth_error_handler(urlscheme::Union{String, Regex}, @nospecialize(f))
filter!(handler -> !(handler.first == urlscheme && handler.second === f), AUTH_ERROR_HANDLERS)
return nothing
end
function get_auth_header(url::AbstractString; verbose::Bool = false)
server_dir = get_server_dir(url)
server_dir === nothing && return
auth_file = joinpath(server_dir, "auth.toml")
isfile(auth_file) || return handle_auth_error(url, "no-auth-file"; verbose=verbose)
# TODO: check for insecure auth file permissions
if !is_secure_url(url)
@warn "refusing to send auth info over insecure connection" url=url
return handle_auth_error(url, "insecure-connection"; verbose=verbose)
end
# parse the auth file
auth_info = try
TOML.parsefile(auth_file)
catch err
@error "malformed auth file" file=auth_file err=err
return handle_auth_error(url, "malformed-file"; verbose=verbose)
end
# check for an auth token
if !haskey(auth_info, "access_token")
@warn "auth file without access_token field" file=auth_file
return handle_auth_error(url, "no-access-token"; verbose=verbose)
end
auth_header = "Authorization: Bearer $(auth_info["access_token"]::String)"
# handle token expiration and refresh
expires_at = Inf
if haskey(auth_info, "expires_at")
expires_at = min(expires_at, auth_info["expires_at"]::Integer)
end
if haskey(auth_info, "expires_in")
expires_at = min(expires_at, mtime(auth_file) + auth_info["expires_in"]::Integer)
end
# if token is good until ten minutes from now, use it
time_now = time()
if expires_at ≥ time_now + 10*60 # ten minutes
return auth_header
end
if !haskey(auth_info, "refresh_url") || !haskey(auth_info, "refresh_token")
if expires_at ≤ time_now
@warn "expired auth without refresh keys" file=auth_file
end
# try it anyway since we can't refresh
return something(handle_auth_error(url, "no-refresh-key"; verbose=verbose), auth_header)
end
refresh_url = auth_info["refresh_url"]
if !is_secure_url(refresh_url)
@warn "ignoring insecure auth refresh URL" url=refresh_url
return something(handle_auth_error(url, "insecure-refresh-url"; verbose=verbose), auth_header)
end
verbose && @info "Refreshing expired auth token..." file=auth_file
tmp = tempname()
refresh_auth = "Authorization: Bearer $(auth_info["refresh_token"]::String)"
try download(refresh_url, tmp, auth_header=refresh_auth, verbose=verbose)
catch err
@warn "token refresh failure" file=auth_file url=refresh_url err=err
rm(tmp, force=true)
return handle_auth_error(url, "token-refresh-failed"; verbose=verbose)
end
auth_info = try TOML.parsefile(tmp)
catch err
@warn "discarding malformed auth file" url=refresh_url err=err
rm(tmp, force=true)
return something(handle_auth_error(url, "malformed-file"; verbose=verbose), auth_header)
end
if !haskey(auth_info, "access_token")
if haskey(auth_info, "refresh_token")
auth_info["refresh_token"] = "*"^64
end
@warn "discarding auth file without access token" auth=auth_info
rm(tmp, force=true)
return something(handle_auth_error(url, "no-access-token"; verbose=verbose), auth_header)
end
if haskey(auth_info, "expires_in")
expires_in = auth_info["expires_in"]
if expires_in isa Number
expires_at = floor(Int64, time_now + expires_in)
# overwrite expires_at (avoids clock skew issues)
auth_info["expires_at"] = expires_at
end
end
let auth_info = auth_info
open(tmp, write=true) do io
TOML.print(io, auth_info, sorted=true)
end
end
mv(tmp, auth_file, force=true)
return "Authorization: Bearer $(auth_info["access_token"]::String)"
end
function hash_data(strs::AbstractString...)
ctx = SHA.SHA224_CTX()
for str in strs
data = Vector{UInt8}(str)
len = length(data)
while true
push!(data, len % UInt8)
len == 0 && break
len >>= 8
end
SHA.update!(ctx, data)
end
return bytes2hex(@view SHA.digest!(ctx)[1:20])
end
function load_telemetry_file(file::AbstractString)
info = TOML.DictType()
changed = true
if !ispath(file)
for depot in depots()
defaults_file = joinpath(depot, "servers", "telemetry.toml")
if isfile(defaults_file)
try info = TOML.parsefile(defaults_file)
catch err
@warn "ignoring malformed telemetry defaults file" file=defaults_file err=err
end
break
end
end
else
try info = TOML.parsefile(file)
changed = false
catch err
@warn "replacing malformed telemetry file" file=file err=err
end
end
# bail early if fully opted out
get(info, "telemetry", true) === false && return info
# some validity checking helpers
is_valid_uuid(x) = false
is_valid_salt(x) = false
is_valid_hlls(x) = false
is_valid_vars(x) = false
is_valid_uuid(x::Bool) = !x # false is valid, true is not
is_valid_salt(x::Bool) = !x # false is valid, true is not
is_valid_hlls(x::Bool) = !x # false is valid, true is not
is_valid_vars(x::Bool) = true
is_valid_uuid(x::AbstractString) = occursin(Pkg.REPLMode.uuid_re, x)
is_valid_salt(x::AbstractString) = occursin(r"^[0-9a-zA-Z]+$", x)
is_valid_hlls(x::AbstractArray) = length(x) == 2 &&
x[1] isa Integer && 0 ≤ x[1] < 1024 &&
x[2] isa Integer && 0 ≤ x[2] ≤ 64
is_valid_vars(x::AbstractVector) = all(s isa AbstractString for s in x)
# generate or fix system-specific info
if !haskey(info, "client_uuid") || !is_valid_uuid(info["client_uuid"])
info["client_uuid"] = string(uuid4())
changed = true
end
if info["client_uuid"] isa AbstractString &&
(!haskey(info, "secret_salt") || !is_valid_salt(info["secret_salt"]))
info["secret_salt"] = randstring(RandomDevice(), 36)
changed = true
end
if !haskey(info, "HyperLogLog") || !is_valid_hlls(info["HyperLogLog"])
bucket = rand(RandomDevice(), 0:1023)
sample = trailing_zeros(rand(RandomDevice(), UInt64))
info["HyperLogLog"] = [bucket, sample]
changed = true
end
if haskey(info, "ci_variables") && !is_valid_vars(info["ci_variables"])
delete!(info, "ci_variables")
changed = true
end
changed || return info
# write telemetry file atomically (if on same file system)
mkpath(dirname(file))
tmp = tempname()
let info = info
open(tmp, write=true) do io
TOML.print(io, info, sorted=true)
end
end
mv(tmp, file, force=true)
# reparse file in case a different process wrote it first
return load_telemetry_file(file)
end
# based on information in this post:
# https://gh.neting.ccmunity/t5/GitHub-Actions/Have-the-CI-environment-variable-set-by-default/m-p/32358/highlight/true#M1097
const CI_VARIABLES = [
"APPVEYOR",
"CI",
"CI_SERVER",
"CIRCLECI",
"CONTINUOUS_INTEGRATION",
"GITHUB_ACTIONS",
"GITLAB_CI",
"JULIA_CI",
"JULIA_PKGEVAL",
"JULIA_REGISTRYCI_AUTOMERGE",
"TF_BUILD",
"TRAVIS",
]
const telemetry_file_lock = ReentrantLock()
const telemetry_notice_printed = Ref(false)
telemetry_notice(server::AbstractString=pkg_server()) = """
LEGAL NOTICE: package operations send anonymous data about your system to $server (your current package server), including the operating system and Julia versions you are using, and a random client UUID. Running `Pkg.telemetryinfo()` will show exactly what data is sent. See https://julialang.org/legal/data/ for more details about what this data is used for, how long it is retained, and how to opt out of sending it.
"""
function get_telemetry_headers(url::AbstractString, notify::Bool=true)
headers = String[]
server = pkg_server()
server_dir = get_server_dir(url, server)
server_dir === nothing && return headers
push!(headers, "Julia-Pkg-Protocol: 1.0")
telemetry_file = joinpath(server_dir, "telemetry.toml")
notify &= !ispath(telemetry_file)
info = lock(telemetry_file_lock) do
load_telemetry_file(telemetry_file)
end
get(info, "telemetry", true) == false && return headers
# legal (GDPR/CCPA) message about telemetry
if notify && !telemetry_notice_printed[]
telemetry_notice_printed[] = true
@info telemetry_notice()
end
# general system information
push!(headers, "Julia-Version: $VERSION")
system = Pkg.BinaryPlatforms.triplet(Pkg.BinaryPlatforms.platform_key_abi())
push!(headers, "Julia-System: $system")
# install-specific information
if info["client_uuid"] !== false
client_uuid = info["client_uuid"]::String
push!(headers, "Julia-Client-UUID: $client_uuid")
if info["secret_salt"] !== false
secret_salt = info["secret_salt"]::String
salt_hash = hash_data("salt", client_uuid, secret_salt)
project = Base.active_project()
if project !== nothing
project_hash = hash_data("project", project, info["secret_salt"])
push!(headers, "Julia-Project-Hash: $project_hash")
end
end
end
# CI indicator variables
ci_variables = get(info, "ci_variables", CI_VARIABLES)
ci_variables === true && (ci_variables = CI_VARIABLES)
if ci_variables != false
ci_info = String[]
for var in CI_VARIABLES ∩ map(uppercase, ci_variables)
val = get(ENV, var, nothing)
state = val === nothing ? "n" :
lowercase(val) in ("true", "t", "1", "yes", "y") ? "t" :
lowercase(val) in ("false", "f", "0", "no", "n") ? "f" : "o"
push!(ci_info, "$var=$state")
end
if !isempty(ci_info)
push!(headers, "Julia-CI-Variables: "*join(ci_info, ';'))
end
end
# HyperLogLog cardinality estimator sample
if info["HyperLogLog"] != false
bucket, sample = info["HyperLogLog"]
push!(headers, "Julia-HyperLogLog: $bucket,$sample")
end
# interactive session?
push!(headers, "Julia-Interactive: $(isinteractive())")
return headers
end
"""
download(
url::AbstractString,
dest::AbstractString;
verbose::Bool = false,
auth_header::Union{AbstractString, Nothing} = nothing,
)
Download file located at `url`, store it at `dest`, continuing if `dest`
already exists and the server and download engine support it.
"""
function download(
url::AbstractString,
dest::AbstractString;
verbose::Bool = false,
auth_header::Union{AbstractString, Nothing} = nothing,
)
headers = String[]
if auth_header === nothing
auth_header = get_auth_header(url, verbose=verbose)
end
if auth_header !== nothing
push!(headers, auth_header)
end
for header in get_telemetry_headers(url)
push!(headers, header)
end
download_cmd = gen_download_cmd(url, dest, headers...)
if verbose
# @info("Downloading $(url) to $(dest)...")
end
try
run(download_cmd, (devnull, verbose ? stdout : devnull, verbose ? stderr : devnull))
catch e
if isa(e, InterruptException)
rethrow()
end
error("Could not download $(url) to $(dest):\n$(e)")
end
end
"""
download_verify(
url::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
verbose::Bool = false,
force::Bool = false,
quiet_download::Bool = false,
)
Download file located at `url`, verify it matches the given `hash`, and throw
an error if anything goes wrong. If `dest` already exists, just verify it. If
`force` is set to `true`, overwrite the given file if it exists but does not
match the given `hash`.
This method returns `true` if the file was downloaded successfully, `false`
if an existing file was removed due to the use of `force`, and throws an error
if `force` is not set and the already-existent file fails verification, or if
`force` is set, verification fails, and then verification fails again after
redownloading the file.
If `quiet_download` is set to `false`, this method will print to
stdout when downloading a new file. If it is set to `true` (default, and `verbose` is
set to `false`) the downloading process will be completely silent. If
`verbose` is set to `true`, messages about integrity verification will be
printed in addition to messages regarding downloading.
"""
function download_verify(
url::AbstractString,
hash::Union{AbstractString, Nothing},
dest::AbstractString;
verbose::Bool = false,
force::Bool = false,
quiet_download::Bool = false,
)
# Whether the file existed in the first place
file_existed = false