fix incorrect folder name for julia-0.6.x
Former-commit-id: ef2c7401e0876f22d2f7762d182cfbcd5a7d9c70
This commit is contained in:
93
julia-0.6.3/share/julia/base/pkg/cache.jl
Normal file
93
julia-0.6.3/share/julia/base/pkg/cache.jl
Normal file
@@ -0,0 +1,93 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Cache
|
||||
|
||||
import ...LibGit2, ..Dir, ...Pkg.PkgError
|
||||
using ..Types
|
||||
|
||||
rewrite_url_to = "https"
|
||||
|
||||
const GITHUB_REGEX =
|
||||
r"^(?:git@|git://|https://(?:[\w\.\+\-]+@)?)github.com[:/](([^/].+)/(.+?))(?:\.git)?$"i
|
||||
|
||||
path(pkg::AbstractString) = abspath(".cache", pkg)
|
||||
|
||||
function mkcachedir()
|
||||
cache = joinpath(realpath("."), ".cache")
|
||||
if isdir(cache)
|
||||
return
|
||||
end
|
||||
|
||||
@static if is_unix()
|
||||
if Dir.isversioned(pwd())
|
||||
rootcache = joinpath(realpath(".."), ".cache")
|
||||
if !isdir(rootcache)
|
||||
mkdir(rootcache)
|
||||
end
|
||||
symlink(rootcache, cache)
|
||||
return
|
||||
end
|
||||
end
|
||||
mkdir(cache)
|
||||
end
|
||||
|
||||
function prefetch(pkg::AbstractString, url::AbstractString, sha1s::Vector)
|
||||
isdir(".cache") || mkcachedir()
|
||||
|
||||
cache = path(pkg)
|
||||
normalized_url = normalize_url(url)
|
||||
|
||||
repo = if isdir(cache)
|
||||
LibGit2.GitRepo(cache) # open repo, free it at the end
|
||||
else
|
||||
info("Cloning cache of $pkg from $normalized_url")
|
||||
try
|
||||
# clone repo, free it at the end
|
||||
LibGit2.clone(normalized_url, cache, isbare = true, remote_cb = LibGit2.mirror_cb())
|
||||
catch err
|
||||
errmsg = if isa(err, LibGit2.Error.GitError)
|
||||
"Cannot clone $pkg from $normalized_url. $(err.msg)"
|
||||
elseif isa(err, InterruptException)
|
||||
"Package `$pkg` prefetching was interrupted."
|
||||
else
|
||||
"Unknown error: $err"
|
||||
end
|
||||
isdir(cache) && rm(cache, recursive=true)
|
||||
throw(PkgError(errmsg))
|
||||
end
|
||||
end
|
||||
try
|
||||
LibGit2.set_remote_url(repo, normalized_url)
|
||||
in_cache = BitArray(map(sha1->LibGit2.iscommit(sha1, repo), sha1s))
|
||||
if !all(in_cache)
|
||||
info("Updating cache of $pkg...")
|
||||
LibGit2.fetch(repo)
|
||||
in_cache = BitArray(map(sha1->LibGit2.iscommit(sha1, repo), sha1s))
|
||||
end
|
||||
sha1s[.!in_cache]
|
||||
finally
|
||||
close(repo) # closing repo opened/created above
|
||||
end
|
||||
end
|
||||
prefetch(pkg::AbstractString, url::AbstractString, sha1::AbstractString...) =
|
||||
prefetch(pkg, url, AbstractString[sha1...])
|
||||
|
||||
function setprotocol!(proto::AbstractString)
|
||||
global rewrite_url_to
|
||||
|
||||
if length(proto) == 0
|
||||
rewrite_url_to = nothing
|
||||
else
|
||||
rewrite_url_to = proto
|
||||
end
|
||||
end
|
||||
|
||||
function normalize_url(url::AbstractString)
|
||||
global rewrite_url_to
|
||||
|
||||
m = match(GITHUB_REGEX,url)
|
||||
(m === nothing || rewrite_url_to === nothing) ?
|
||||
url : "$rewrite_url_to://github.com/$(m.captures[1]).git"
|
||||
end
|
||||
|
||||
end # module
|
||||
82
julia-0.6.3/share/julia/base/pkg/dir.jl
Normal file
82
julia-0.6.3/share/julia/base/pkg/dir.jl
Normal file
@@ -0,0 +1,82 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Dir
|
||||
|
||||
import ..Pkg: DEFAULT_META, META_BRANCH, PkgError
|
||||
import ...LibGit2, ...LibGit2.with
|
||||
|
||||
const DIR_NAME = ".julia"
|
||||
|
||||
_pkgroot() = abspath(get(ENV,"JULIA_PKGDIR",joinpath(homedir(),DIR_NAME)))
|
||||
isversioned(p::AbstractString) = ((x,y) = (VERSION.major, VERSION.minor); basename(p) == "v$x.$y")
|
||||
|
||||
function path()
|
||||
b = _pkgroot()
|
||||
x, y = VERSION.major, VERSION.minor
|
||||
d = joinpath(b,"v$x.$y")
|
||||
if isdir(d) || !isdir(b) || !isdir(joinpath(b, "METADATA"))
|
||||
return d
|
||||
end
|
||||
return b
|
||||
end
|
||||
path(pkg::AbstractString...) = normpath(path(),pkg...)
|
||||
|
||||
function cd(f::Function, args...; kws...)
|
||||
dir = path()
|
||||
metadata_dir = joinpath(dir, "METADATA")
|
||||
if !isdir(metadata_dir)
|
||||
!haskey(ENV,"JULIA_PKGDIR") ? init() :
|
||||
throw(PkgError("Package metadata directory $metadata_dir doesn't exist; run Pkg.init() to initialize it."))
|
||||
end
|
||||
if haskey(ENV,"JULIA_PKGDIR")
|
||||
withenv("JULIA_PKGDIR" => abspath(ENV["JULIA_PKGDIR"])) do
|
||||
Base.cd(()->f(args...; kws...), dir)
|
||||
end
|
||||
else
|
||||
Base.cd(()->f(args...; kws...), dir)
|
||||
end
|
||||
end
|
||||
|
||||
function init(meta::AbstractString=DEFAULT_META, branch::AbstractString=META_BRANCH)
|
||||
dir = path()
|
||||
info("Initializing package repository $dir")
|
||||
metadata_dir = joinpath(dir, "METADATA")
|
||||
if isdir(metadata_dir)
|
||||
info("Package directory $dir is already initialized.")
|
||||
LibGit2.set_remote_url(metadata_dir, meta)
|
||||
return
|
||||
end
|
||||
local temp_dir = ""
|
||||
try
|
||||
mkpath(dir)
|
||||
temp_dir = mktempdir(dir)
|
||||
Base.cd(temp_dir) do
|
||||
info("Cloning METADATA from $meta")
|
||||
with(LibGit2.clone(meta, "METADATA", branch = branch)) do metadata_repo
|
||||
LibGit2.set_remote_url(metadata_repo, meta)
|
||||
end
|
||||
touch("REQUIRE")
|
||||
touch("META_BRANCH")
|
||||
write("META_BRANCH", branch)
|
||||
end
|
||||
#Move TEMP to METADATA
|
||||
Base.mv(joinpath(temp_dir,"METADATA"), metadata_dir)
|
||||
Base.mv(joinpath(temp_dir,"REQUIRE"), joinpath(dir,"REQUIRE"))
|
||||
Base.mv(joinpath(temp_dir,"META_BRANCH"), joinpath(dir,"META_BRANCH"))
|
||||
rm(temp_dir, recursive=true)
|
||||
catch err
|
||||
ispath(metadata_dir) && rm(metadata_dir, recursive=true)
|
||||
ispath(temp_dir) && rm(temp_dir, recursive=true)
|
||||
rethrow(err)
|
||||
end
|
||||
end
|
||||
|
||||
function getmetabranch()
|
||||
try
|
||||
readline(joinpath(path(),"META_BRANCH"))
|
||||
catch err
|
||||
META_BRANCH
|
||||
end
|
||||
end
|
||||
|
||||
end # module
|
||||
765
julia-0.6.3/share/julia/base/pkg/entry.jl
Normal file
765
julia-0.6.3/share/julia/base/pkg/entry.jl
Normal file
@@ -0,0 +1,765 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Entry
|
||||
|
||||
import Base: thispatch, nextpatch, nextminor, nextmajor, check_new_version
|
||||
import ..Reqs, ..Read, ..Query, ..Resolve, ..Cache, ..Write, ..Dir
|
||||
import ...LibGit2
|
||||
importall ...LibGit2
|
||||
import ...Pkg.PkgError
|
||||
using ..Types
|
||||
|
||||
macro recover(ex)
|
||||
quote
|
||||
try $(esc(ex))
|
||||
catch err
|
||||
show(err)
|
||||
print('\n')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function edit(f::Function, pkg::AbstractString, args...)
|
||||
r = Reqs.read("REQUIRE")
|
||||
reqs = Reqs.parse(r)
|
||||
avail = Read.available()
|
||||
!haskey(avail,pkg) && !haskey(reqs,pkg) && return false
|
||||
rʹ = f(r,pkg,args...)
|
||||
rʹ == r && return false
|
||||
reqsʹ = Reqs.parse(rʹ)
|
||||
reqsʹ != reqs && resolve(reqsʹ,avail)
|
||||
Reqs.write("REQUIRE",rʹ)
|
||||
info("Package database updated")
|
||||
return true
|
||||
end
|
||||
|
||||
function edit()
|
||||
editor = get(ENV,"VISUAL",get(ENV,"EDITOR",nothing))
|
||||
editor !== nothing ||
|
||||
throw(PkgError("set the EDITOR environment variable to an edit command"))
|
||||
editor = Base.shell_split(editor)
|
||||
reqs = Reqs.parse("REQUIRE")
|
||||
run(`$editor REQUIRE`)
|
||||
reqsʹ = Reqs.parse("REQUIRE")
|
||||
reqs == reqsʹ && return info("Nothing to be done")
|
||||
info("Computing changes...")
|
||||
resolve(reqsʹ)
|
||||
end
|
||||
|
||||
function add(pkg::AbstractString, vers::VersionSet)
|
||||
outdated = :maybe
|
||||
@sync begin
|
||||
@async if !edit(Reqs.add,pkg,vers)
|
||||
ispath(pkg) || throw(PkgError("unknown package $pkg"))
|
||||
info("Package $pkg is already installed")
|
||||
end
|
||||
branch = Dir.getmetabranch()
|
||||
outdated = with(GitRepo, "METADATA") do repo
|
||||
if LibGit2.branch(repo) == branch
|
||||
if LibGit2.isdiff(repo, "origin/$branch")
|
||||
outdated = :yes
|
||||
else
|
||||
try
|
||||
LibGit2.fetch(repo)
|
||||
outdated = LibGit2.isdiff(repo, "origin/$branch") ? (:yes) : (:no)
|
||||
end
|
||||
end
|
||||
else
|
||||
:no # user is doing something funky with METADATA
|
||||
end
|
||||
end
|
||||
end
|
||||
if outdated != :no
|
||||
is = outdated == :yes ? "is" : "might be"
|
||||
info("METADATA $is out-of-date — you may not have the latest version of $pkg")
|
||||
info("Use `Pkg.update()` to get the latest versions of your packages")
|
||||
end
|
||||
end
|
||||
add(pkg::AbstractString, vers::VersionNumber...) = add(pkg,VersionSet(vers...))
|
||||
|
||||
function rm(pkg::AbstractString)
|
||||
edit(Reqs.rm,pkg) && return
|
||||
ispath(pkg) || return info("Package $pkg is not installed")
|
||||
info("Removing $pkg (unregistered)")
|
||||
Write.remove(pkg)
|
||||
end
|
||||
|
||||
function available()
|
||||
all_avail = Read.available()
|
||||
avail = AbstractString[]
|
||||
for (pkg, vers) in all_avail
|
||||
any(x->Types.satisfies("julia", VERSION, x[2].requires), vers) && push!(avail, pkg)
|
||||
end
|
||||
sort!(avail, by=lowercase)
|
||||
end
|
||||
|
||||
function available(pkg::AbstractString)
|
||||
avail = Read.available(pkg)
|
||||
if !isempty(avail) || Read.isinstalled(pkg)
|
||||
return sort!(collect(keys(avail)))
|
||||
end
|
||||
throw(PkgError("$pkg is not a package (not registered or installed)"))
|
||||
end
|
||||
|
||||
function installed()
|
||||
pkgs = Dict{String,VersionNumber}()
|
||||
for (pkg,(ver,fix)) in Read.installed()
|
||||
pkgs[pkg] = ver
|
||||
end
|
||||
return pkgs
|
||||
end
|
||||
|
||||
function installed(pkg::AbstractString)
|
||||
avail = Read.available(pkg)
|
||||
if Read.isinstalled(pkg)
|
||||
res = typemin(VersionNumber)
|
||||
if ispath(joinpath(pkg,".git"))
|
||||
LibGit2.with(GitRepo, pkg) do repo
|
||||
res = Read.installed_version(pkg, repo, avail)
|
||||
end
|
||||
end
|
||||
return res
|
||||
end
|
||||
isempty(avail) && throw(PkgError("$pkg is not a package (not registered or installed)"))
|
||||
return nothing # registered but not installed
|
||||
end
|
||||
|
||||
function status(io::IO; pkgname::AbstractString = "")
|
||||
showpkg(pkg) = isempty(pkgname) ? true : (pkg == pkgname)
|
||||
reqs = Reqs.parse("REQUIRE")
|
||||
instd = Read.installed()
|
||||
required = sort!(collect(keys(reqs)))
|
||||
if !isempty(required)
|
||||
showpkg("") && println(io, "$(length(required)) required packages:")
|
||||
for pkg in required
|
||||
if !haskey(instd, pkg)
|
||||
showpkg(pkg) && status(io,pkg,"not found")
|
||||
else
|
||||
ver,fix = pop!(instd,pkg)
|
||||
showpkg(pkg) && status(io,pkg,ver,fix)
|
||||
end
|
||||
end
|
||||
end
|
||||
additional = sort!(collect(keys(instd)))
|
||||
if !isempty(additional)
|
||||
showpkg("") && println(io, "$(length(additional)) additional packages:")
|
||||
for pkg in additional
|
||||
ver,fix = instd[pkg]
|
||||
showpkg(pkg) && status(io,pkg,ver,fix)
|
||||
end
|
||||
end
|
||||
if isempty(required) && isempty(additional)
|
||||
println(io, "No packages installed")
|
||||
end
|
||||
end
|
||||
|
||||
status(io::IO, pkg::AbstractString) = status(io, pkgname = pkg)
|
||||
|
||||
function status(io::IO, pkg::AbstractString, ver::VersionNumber, fix::Bool)
|
||||
@printf io " - %-29s " pkg
|
||||
fix || return println(io,ver)
|
||||
@printf io "%-19s" ver
|
||||
if ispath(pkg,".git")
|
||||
prepo = GitRepo(pkg)
|
||||
try
|
||||
with(LibGit2.head(prepo)) do phead
|
||||
if LibGit2.isattached(prepo)
|
||||
print(io, LibGit2.shortname(phead))
|
||||
else
|
||||
print(io, string(LibGit2.GitHash(phead))[1:8])
|
||||
end
|
||||
end
|
||||
attrs = AbstractString[]
|
||||
isfile("METADATA",pkg,"url") || push!(attrs,"unregistered")
|
||||
LibGit2.isdirty(prepo) && push!(attrs,"dirty")
|
||||
isempty(attrs) || print(io, " (",join(attrs,", "),")")
|
||||
catch err
|
||||
print_with_color(Base.error_color(), io, " broken-repo (unregistered)")
|
||||
finally
|
||||
close(prepo)
|
||||
end
|
||||
else
|
||||
print_with_color(Base.warn_color(), io, "non-repo (unregistered)")
|
||||
end
|
||||
println(io)
|
||||
end
|
||||
|
||||
function status(io::IO, pkg::AbstractString, msg::AbstractString)
|
||||
@printf io " - %-29s %-19s\n" pkg msg
|
||||
end
|
||||
|
||||
function clone(url::AbstractString, pkg::AbstractString)
|
||||
info("Cloning $pkg from $url")
|
||||
ispath(pkg) && throw(PkgError("$pkg already exists"))
|
||||
try
|
||||
LibGit2.with(LibGit2.clone(url, pkg)) do repo
|
||||
LibGit2.set_remote_url(repo, url)
|
||||
end
|
||||
catch err
|
||||
isdir(pkg) && Base.rm(pkg, recursive=true)
|
||||
rethrow(err)
|
||||
end
|
||||
info("Computing changes...")
|
||||
if !edit(Reqs.add, pkg)
|
||||
isempty(Reqs.parse("$pkg/REQUIRE")) && return
|
||||
resolve()
|
||||
end
|
||||
end
|
||||
|
||||
function url_and_pkg(url_or_pkg::AbstractString)
|
||||
if !(':' in url_or_pkg)
|
||||
# no colon, could be a package name
|
||||
url_file = joinpath("METADATA", url_or_pkg, "url")
|
||||
isfile(url_file) && return readchomp(url_file), url_or_pkg
|
||||
end
|
||||
# try to parse as URL or local path
|
||||
m = match(r"(?:^|[/\\])(\w+?)(?:\.jl)?(?:\.git)?$", url_or_pkg)
|
||||
m === nothing && throw(PkgError("can't determine package name from URL: $url_or_pkg"))
|
||||
return url_or_pkg, m.captures[1]
|
||||
end
|
||||
|
||||
clone(url_or_pkg::AbstractString) = clone(url_and_pkg(url_or_pkg)...)
|
||||
|
||||
function checkout(pkg::AbstractString, branch::AbstractString, do_merge::Bool, do_pull::Bool)
|
||||
ispath(pkg,".git") || throw(PkgError("$pkg is not a git repo"))
|
||||
info("Checking out $pkg $branch...")
|
||||
with(GitRepo, pkg) do r
|
||||
LibGit2.transact(r) do repo
|
||||
LibGit2.isdirty(repo) && throw(PkgError("$pkg is dirty, bailing"))
|
||||
LibGit2.branch!(repo, branch, track=LibGit2.Consts.REMOTE_ORIGIN)
|
||||
do_merge && LibGit2.merge!(repo, fastforward=true) # merge changes
|
||||
if do_pull
|
||||
info("Pulling $pkg latest $branch...")
|
||||
LibGit2.fetch(repo)
|
||||
LibGit2.merge!(repo, fastforward=true)
|
||||
end
|
||||
resolve()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function free(pkg::AbstractString)
|
||||
ispath(pkg,".git") || throw(PkgError("$pkg is not a git repo"))
|
||||
Read.isinstalled(pkg) || throw(PkgError("$pkg cannot be freed – not an installed package"))
|
||||
avail = Read.available(pkg)
|
||||
isempty(avail) && throw(PkgError("$pkg cannot be freed – not a registered package"))
|
||||
with(GitRepo, pkg) do repo
|
||||
LibGit2.isdirty(repo) && throw(PkgError("$pkg cannot be freed – repo is dirty"))
|
||||
info("Freeing $pkg")
|
||||
vers = sort!(collect(keys(avail)), rev=true)
|
||||
while true
|
||||
for ver in vers
|
||||
sha1 = avail[ver].sha1
|
||||
LibGit2.iscommit(sha1, repo) || continue
|
||||
return LibGit2.transact(repo) do r
|
||||
LibGit2.isdirty(repo) && throw(PkgError("$pkg is dirty, bailing"))
|
||||
LibGit2.checkout!(repo, sha1)
|
||||
resolve()
|
||||
end
|
||||
end
|
||||
isempty(Cache.prefetch(pkg, Read.url(pkg), [a.sha1 for (v,a)=avail])) && continue
|
||||
throw(PkgError("can't find any registered versions of $pkg to checkout"))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function free(pkgs)
|
||||
try
|
||||
for pkg in pkgs
|
||||
ispath(pkg,".git") || throw(PkgError("$pkg is not a git repo"))
|
||||
Read.isinstalled(pkg) || throw(PkgError("$pkg cannot be freed – not an installed package"))
|
||||
avail = Read.available(pkg)
|
||||
isempty(avail) && throw(PkgError("$pkg cannot be freed – not a registered package"))
|
||||
with(GitRepo, pkg) do repo
|
||||
LibGit2.isdirty(repo) && throw(PkgError("$pkg cannot be freed – repo is dirty"))
|
||||
info("Freeing $pkg")
|
||||
vers = sort!(collect(keys(avail)), rev=true)
|
||||
for ver in vers
|
||||
sha1 = avail[ver].sha1
|
||||
LibGit2.iscommit(sha1, repo) || continue
|
||||
LibGit2.checkout!(repo, sha1)
|
||||
break
|
||||
end
|
||||
end
|
||||
isempty(Cache.prefetch(pkg, Read.url(pkg), [a.sha1 for (v,a)=avail])) && continue
|
||||
throw(PkgError("Can't find any registered versions of $pkg to checkout"))
|
||||
end
|
||||
finally
|
||||
resolve()
|
||||
end
|
||||
end
|
||||
|
||||
function pin(pkg::AbstractString, head::AbstractString)
|
||||
ispath(pkg,".git") || throw(PkgError("$pkg is not a git repo"))
|
||||
should_resolve = true
|
||||
with(GitRepo, pkg) do repo
|
||||
id = if isempty(head) # get HEAD commit
|
||||
# no need to resolve, branch will be from HEAD
|
||||
should_resolve = false
|
||||
LibGit2.head_oid(repo)
|
||||
else
|
||||
LibGit2.revparseid(repo, head)
|
||||
end
|
||||
commit = LibGit2.GitCommit(repo, id)
|
||||
try
|
||||
# note: changing the following naming scheme requires a corresponding change in Read.ispinned()
|
||||
branch = "pinned.$(string(id)[1:8]).tmp"
|
||||
if LibGit2.isattached(repo) && LibGit2.branch(repo) == branch
|
||||
info("Package $pkg is already pinned" * (isempty(head) ? "" : " to the selected commit"))
|
||||
should_resolve = false
|
||||
return
|
||||
end
|
||||
ref = LibGit2.lookup_branch(repo, branch)
|
||||
try
|
||||
if !isnull(ref)
|
||||
if LibGit2.revparseid(repo, branch) != id
|
||||
throw(PkgError("Package $pkg: existing branch $branch has " *
|
||||
"been edited and doesn't correspond to its original commit"))
|
||||
end
|
||||
info("Package $pkg: checking out existing branch $branch")
|
||||
else
|
||||
info("Creating $pkg branch $branch")
|
||||
ref = Nullable(LibGit2.create_branch(repo, branch, commit))
|
||||
end
|
||||
|
||||
# checkout selected branch
|
||||
with(LibGit2.peel(LibGit2.GitTree, get(ref))) do btree
|
||||
LibGit2.checkout_tree(repo, btree)
|
||||
end
|
||||
# switch head to the branch
|
||||
LibGit2.head!(repo, get(ref))
|
||||
finally
|
||||
close(get(ref))
|
||||
end
|
||||
finally
|
||||
close(commit)
|
||||
end
|
||||
end
|
||||
should_resolve && resolve()
|
||||
nothing
|
||||
end
|
||||
pin(pkg::AbstractString) = pin(pkg, "")
|
||||
|
||||
function pin(pkg::AbstractString, ver::VersionNumber)
|
||||
ispath(pkg,".git") || throw(PkgError("$pkg is not a git repo"))
|
||||
Read.isinstalled(pkg) || throw(PkgError("$pkg cannot be pinned – not an installed package"))
|
||||
avail = Read.available(pkg)
|
||||
isempty(avail) && throw(PkgError("$pkg cannot be pinned – not a registered package"))
|
||||
haskey(avail,ver) || throw(PkgError("$pkg – $ver is not a registered version"))
|
||||
pin(pkg, avail[ver].sha1)
|
||||
end
|
||||
|
||||
function update(branch::AbstractString, upkgs::Set{String})
|
||||
info("Updating METADATA...")
|
||||
with(GitRepo, "METADATA") do repo
|
||||
try
|
||||
with(LibGit2.head(repo)) do h
|
||||
if LibGit2.branch(h) != branch
|
||||
if LibGit2.isdirty(repo)
|
||||
throw(PkgError("METADATA is dirty and not on $branch, bailing"))
|
||||
end
|
||||
if !LibGit2.isattached(repo)
|
||||
throw(PkgError("METADATA is detached not on $branch, bailing"))
|
||||
end
|
||||
LibGit2.fetch(repo)
|
||||
LibGit2.checkout_head(repo)
|
||||
LibGit2.branch!(repo, branch, track="refs/remotes/origin/$branch")
|
||||
LibGit2.merge!(repo)
|
||||
end
|
||||
end
|
||||
|
||||
LibGit2.fetch(repo)
|
||||
ff_succeeded = LibGit2.merge!(repo, fastforward=true)
|
||||
if !ff_succeeded
|
||||
LibGit2.rebase!(repo, "origin/$branch")
|
||||
end
|
||||
catch err
|
||||
cex = CapturedException(err, catch_backtrace())
|
||||
throw(PkgError("METADATA cannot be updated. Resolve problems manually in " *
|
||||
Pkg.dir("METADATA") * ".", cex))
|
||||
end
|
||||
end
|
||||
deferred_errors = CompositeException()
|
||||
avail = Read.available()
|
||||
# this has to happen before computing free/fixed
|
||||
for pkg in filter(Read.isinstalled, collect(keys(avail)))
|
||||
try
|
||||
Cache.prefetch(pkg, Read.url(pkg), [a.sha1 for (v,a)=avail[pkg]])
|
||||
catch err
|
||||
cex = CapturedException(err, catch_backtrace())
|
||||
push!(deferred_errors, PkgError("Package $pkg: unable to update cache.", cex))
|
||||
end
|
||||
end
|
||||
instd = Read.installed(avail)
|
||||
reqs = Reqs.parse("REQUIRE")
|
||||
if !isempty(upkgs)
|
||||
for (pkg, (v,f)) in instd
|
||||
satisfies(pkg, v, reqs) || throw(PkgError("Package $pkg: current " *
|
||||
"package status does not satisfy the requirements, cannot do " *
|
||||
"a partial update; use `Pkg.update()`"))
|
||||
end
|
||||
end
|
||||
dont_update = Query.partial_update_mask(instd, avail, upkgs)
|
||||
free = Read.free(instd,dont_update)
|
||||
for (pkg,ver) in free
|
||||
try
|
||||
Cache.prefetch(pkg, Read.url(pkg), [a.sha1 for (v,a)=avail[pkg]])
|
||||
catch err
|
||||
cex = CapturedException(err, catch_backtrace())
|
||||
push!(deferred_errors, PkgError("Package $pkg: unable to update cache.", cex))
|
||||
end
|
||||
end
|
||||
fixed = Read.fixed(avail,instd,dont_update)
|
||||
creds = LibGit2.CachedCredentials()
|
||||
try
|
||||
stopupdate = false
|
||||
for (pkg,ver) in fixed
|
||||
ispath(pkg,".git") || continue
|
||||
pkg in dont_update && continue
|
||||
with(GitRepo, pkg) do repo
|
||||
if LibGit2.isattached(repo)
|
||||
if LibGit2.isdirty(repo)
|
||||
warn("Package $pkg: skipping update (dirty)...")
|
||||
elseif Read.ispinned(repo)
|
||||
info("Package $pkg: skipping update (pinned)...")
|
||||
else
|
||||
prev_sha = string(LibGit2.head_oid(repo))
|
||||
success = true
|
||||
try
|
||||
LibGit2.fetch(repo, payload = Nullable(creds))
|
||||
LibGit2.reset!(creds)
|
||||
LibGit2.merge!(repo, fastforward=true)
|
||||
catch err
|
||||
cex = CapturedException(err, catch_backtrace())
|
||||
push!(deferred_errors, PkgError("Package $pkg cannot be updated.", cex))
|
||||
success = false
|
||||
stopupdate = isa(err, InterruptException)
|
||||
end
|
||||
if success
|
||||
post_sha = string(LibGit2.head_oid(repo))
|
||||
branch = LibGit2.branch(repo)
|
||||
info("Updating $pkg $branch...",
|
||||
prev_sha != post_sha ? " $(prev_sha[1:8]) → $(post_sha[1:8])" : "")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
stopupdate && break
|
||||
if haskey(avail,pkg)
|
||||
try
|
||||
Cache.prefetch(pkg, Read.url(pkg), [a.sha1 for (v,a)=avail[pkg]])
|
||||
catch err
|
||||
cex = CapturedException(err, catch_backtrace())
|
||||
push!(deferred_errors, PkgError("Package $pkg: unable to update cache.", cex))
|
||||
end
|
||||
end
|
||||
end
|
||||
finally
|
||||
Base.securezero!(creds)
|
||||
end
|
||||
info("Computing changes...")
|
||||
resolve(reqs, avail, instd, fixed, free, upkgs)
|
||||
# Don't use instd here since it may have changed
|
||||
updatehook(sort!(collect(keys(installed()))))
|
||||
|
||||
# Print deferred errors
|
||||
length(deferred_errors) > 0 && throw(PkgError("Update finished with errors.", deferred_errors))
|
||||
nothing
|
||||
end
|
||||
|
||||
|
||||
function resolve(
|
||||
reqs :: Dict = Reqs.parse("REQUIRE"),
|
||||
avail :: Dict = Read.available(),
|
||||
instd :: Dict = Read.installed(avail),
|
||||
fixed :: Dict = Read.fixed(avail, instd),
|
||||
have :: Dict = Read.free(instd),
|
||||
upkgs :: Set{String} = Set{String}()
|
||||
)
|
||||
bktrc = Query.init_resolve_backtrace(reqs, fixed)
|
||||
orig_reqs = deepcopy(reqs)
|
||||
Query.check_fixed(reqs, fixed, avail)
|
||||
Query.propagate_fixed!(reqs, bktrc, fixed)
|
||||
deps, conflicts = Query.dependencies(avail, fixed)
|
||||
|
||||
for pkg in keys(reqs)
|
||||
if !haskey(deps,pkg)
|
||||
if "julia" in conflicts[pkg]
|
||||
throw(PkgError("$pkg can't be installed because it has no versions that support $VERSION " *
|
||||
"of julia. You may need to update METADATA by running `Pkg.update()`"))
|
||||
else
|
||||
sconflicts = join(conflicts[pkg], ", ", " and ")
|
||||
throw(PkgError("$pkg's requirements can't be satisfied because " *
|
||||
"of the following fixed packages: $sconflicts"))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Query.check_requirements(reqs, deps, fixed)
|
||||
|
||||
deps = Query.prune_dependencies(reqs, deps, bktrc)
|
||||
want = Resolve.resolve(reqs, deps)
|
||||
|
||||
if !isempty(upkgs)
|
||||
orig_deps, _ = Query.dependencies(avail)
|
||||
Query.check_partial_updates(orig_reqs, orig_deps, want, fixed, upkgs)
|
||||
end
|
||||
|
||||
# compare what is installed with what should be
|
||||
changes = Query.diff(have, want, avail, fixed)
|
||||
isempty(changes) && return info("No packages to install, update or remove")
|
||||
|
||||
# prefetch phase isolates network activity, nothing to roll back
|
||||
missing = []
|
||||
for (pkg,(ver1,ver2)) in changes
|
||||
vers = String[]
|
||||
ver1 !== nothing && push!(vers,LibGit2.head(pkg))
|
||||
ver2 !== nothing && push!(vers,Read.sha1(pkg,ver2))
|
||||
append!(missing,
|
||||
map(sha1->(pkg,(ver1,ver2),sha1),
|
||||
Cache.prefetch(pkg, Read.url(pkg), vers)))
|
||||
end
|
||||
if !isempty(missing)
|
||||
msg = "Missing package versions (possible metadata misconfiguration):"
|
||||
for (pkg,ver,sha1) in missing
|
||||
msg *= " $pkg v$ver [$sha1[1:10]]\n"
|
||||
end
|
||||
throw(PkgError(msg))
|
||||
end
|
||||
|
||||
# try applying changes, roll back everything if anything fails
|
||||
changed = []
|
||||
imported = String[]
|
||||
try
|
||||
for (pkg,(ver1,ver2)) in changes
|
||||
if ver1 === nothing
|
||||
info("Installing $pkg v$ver2")
|
||||
Write.install(pkg, Read.sha1(pkg,ver2))
|
||||
elseif ver2 === nothing
|
||||
info("Removing $pkg v$ver1")
|
||||
Write.remove(pkg)
|
||||
else
|
||||
up = ver1 <= ver2 ? "Up" : "Down"
|
||||
info("$(up)grading $pkg: v$ver1 => v$ver2")
|
||||
Write.update(pkg, Read.sha1(pkg,ver2))
|
||||
pkgsym = Symbol(pkg)
|
||||
if Base.isbindingresolved(Main, pkgsym) && isa(getfield(Main, pkgsym), Module)
|
||||
push!(imported, "- $pkg")
|
||||
end
|
||||
end
|
||||
push!(changed,(pkg,(ver1,ver2)))
|
||||
end
|
||||
catch err
|
||||
for (pkg,(ver1,ver2)) in reverse!(changed)
|
||||
if ver1 === nothing
|
||||
info("Rolling back install of $pkg")
|
||||
@recover Write.remove(pkg)
|
||||
elseif ver2 === nothing
|
||||
info("Rolling back deleted $pkg to v$ver1")
|
||||
@recover Write.install(pkg, Read.sha1(pkg,ver1))
|
||||
else
|
||||
info("Rolling back $pkg from v$ver2 to v$ver1")
|
||||
@recover Write.update(pkg, Read.sha1(pkg,ver1))
|
||||
end
|
||||
end
|
||||
rethrow(err)
|
||||
end
|
||||
if !isempty(imported)
|
||||
warn(join(["The following packages have been updated but were already imported:",
|
||||
imported..., "Restart Julia to use the updated versions."], "\n"))
|
||||
end
|
||||
# re/build all updated/installed packages
|
||||
build(map(x->x[1], filter(x -> x[2][2] !== nothing, changes)))
|
||||
end
|
||||
|
||||
function warnbanner(msg...; label="[ WARNING ]", prefix="")
|
||||
cols = Base.displaysize(STDERR)[2]
|
||||
warn(prefix="", Base.cpad(label,cols,"="))
|
||||
println(STDERR)
|
||||
warn(prefix=prefix, msg...)
|
||||
println(STDERR)
|
||||
warn(prefix="", "="^cols)
|
||||
end
|
||||
|
||||
function build(pkg::AbstractString, build_file::AbstractString, errfile::AbstractString)
|
||||
# To isolate the build from the running Julia process, we execute each build.jl file in
|
||||
# a separate process. Errors are serialized to errfile for later reporting.
|
||||
# TODO: serialize the same way the load cache does, not with strings
|
||||
LOAD_PATH = filter(x -> x isa AbstractString, Base.LOAD_PATH)
|
||||
code = """
|
||||
empty!(Base.LOAD_PATH)
|
||||
append!(Base.LOAD_PATH, $(repr(LOAD_PATH)))
|
||||
empty!(Base.LOAD_CACHE_PATH)
|
||||
append!(Base.LOAD_CACHE_PATH, $(repr(Base.LOAD_CACHE_PATH)))
|
||||
empty!(Base.DL_LOAD_PATH)
|
||||
append!(Base.DL_LOAD_PATH, $(repr(Base.DL_LOAD_PATH)))
|
||||
open("$(escape_string(errfile))", "a") do f
|
||||
pkg, build_file = "$pkg", "$(escape_string(build_file))"
|
||||
try
|
||||
info("Building \$pkg")
|
||||
cd(dirname(build_file)) do
|
||||
evalfile(build_file)
|
||||
end
|
||||
catch err
|
||||
Base.Pkg.Entry.warnbanner(err, label="[ ERROR: \$pkg ]")
|
||||
serialize(f, pkg)
|
||||
serialize(f, err)
|
||||
end
|
||||
end
|
||||
"""
|
||||
cmd = ```
|
||||
$(Base.julia_cmd()) -O0
|
||||
--compilecache=$(Bool(Base.JLOptions().use_compilecache) ? "yes" : "no")
|
||||
--history-file=no
|
||||
--color=$(Base.have_color ? "yes" : "no")
|
||||
--eval $code
|
||||
```
|
||||
|
||||
success(pipeline(cmd, stdout=STDOUT, stderr=STDERR))
|
||||
end
|
||||
|
||||
function build!(pkgs::Vector, seen::Set, errfile::AbstractString)
|
||||
for pkg in pkgs
|
||||
pkg == "julia" && continue
|
||||
pkg in seen ? continue : push!(seen,pkg)
|
||||
Read.isinstalled(pkg) || throw(PkgError("$pkg is not an installed package"))
|
||||
build!(Read.requires_list(pkg), seen, errfile)
|
||||
path = abspath(pkg,"deps","build.jl")
|
||||
isfile(path) || continue
|
||||
build(pkg, path, errfile) || error("Build process failed.")
|
||||
end
|
||||
end
|
||||
|
||||
function build!(pkgs::Vector, errs::Dict, seen::Set=Set())
|
||||
errfile = tempname()
|
||||
touch(errfile) # create empty file
|
||||
try
|
||||
build!(pkgs, seen, errfile)
|
||||
open(errfile, "r") do f
|
||||
while !eof(f)
|
||||
pkg = deserialize(f)
|
||||
err = deserialize(f)
|
||||
errs[pkg] = err
|
||||
end
|
||||
end
|
||||
finally
|
||||
isfile(errfile) && Base.rm(errfile)
|
||||
end
|
||||
end
|
||||
|
||||
function build(pkgs::Vector)
|
||||
errs = Dict()
|
||||
build!(pkgs,errs)
|
||||
isempty(errs) && return
|
||||
println(STDERR)
|
||||
warnbanner(label="[ BUILD ERRORS ]", """
|
||||
WARNING: $(join(keys(errs),", "," and ")) had build errors.
|
||||
|
||||
- packages with build errors remain installed in $(pwd())
|
||||
- build the package(s) and all dependencies with `Pkg.build("$(join(keys(errs),"\", \""))")`
|
||||
- build a single package by running its `deps/build.jl` script
|
||||
""")
|
||||
end
|
||||
build() = build(sort!(collect(keys(installed()))))
|
||||
|
||||
function updatehook!(pkgs::Vector, errs::Dict, seen::Set=Set())
|
||||
for pkg in pkgs
|
||||
pkg in seen && continue
|
||||
updatehook!(Read.requires_list(pkg),errs,push!(seen,pkg))
|
||||
path = abspath(pkg,"deps","update.jl")
|
||||
isfile(path) || continue
|
||||
info("Running update script for $pkg")
|
||||
cd(dirname(path)) do
|
||||
try evalfile(path)
|
||||
catch err
|
||||
warnbanner(err, label="[ ERROR: $pkg ]")
|
||||
errs[pkg] = err
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function updatehook(pkgs::Vector)
|
||||
errs = Dict()
|
||||
updatehook!(pkgs,errs)
|
||||
isempty(errs) && return
|
||||
println(STDERR)
|
||||
warnbanner(label="[ UPDATE ERRORS ]", """
|
||||
WARNING: $(join(keys(errs),", "," and ")) had update errors.
|
||||
|
||||
- Unrelated packages are unaffected
|
||||
- To retry, run Pkg.update() again
|
||||
""")
|
||||
end
|
||||
|
||||
function test!(pkg::AbstractString,
|
||||
errs::Vector{AbstractString},
|
||||
nopkgs::Vector{AbstractString},
|
||||
notests::Vector{AbstractString}; coverage::Bool=false)
|
||||
reqs_path = abspath(pkg,"test","REQUIRE")
|
||||
if isfile(reqs_path)
|
||||
tests_require = Reqs.parse(reqs_path)
|
||||
if (!isempty(tests_require))
|
||||
info("Computing test dependencies for $pkg...")
|
||||
resolve(merge(Reqs.parse("REQUIRE"), tests_require))
|
||||
end
|
||||
end
|
||||
test_path = abspath(pkg,"test","runtests.jl")
|
||||
if !isdir(pkg)
|
||||
push!(nopkgs, pkg)
|
||||
elseif !isfile(test_path)
|
||||
push!(notests, pkg)
|
||||
else
|
||||
info("Testing $pkg")
|
||||
cd(dirname(test_path)) do
|
||||
try
|
||||
color = Base.have_color? "--color=yes" : "--color=no"
|
||||
codecov = coverage? ["--code-coverage=user"] : ["--code-coverage=none"]
|
||||
compilecache = "--compilecache=" * (Bool(Base.JLOptions().use_compilecache) ? "yes" : "no")
|
||||
julia_exe = Base.julia_cmd()
|
||||
run(`$julia_exe --check-bounds=yes $codecov $color $compilecache $test_path`)
|
||||
info("$pkg tests passed")
|
||||
catch err
|
||||
warnbanner(err, label="[ ERROR: $pkg ]")
|
||||
push!(errs,pkg)
|
||||
end
|
||||
end
|
||||
end
|
||||
isfile(reqs_path) && resolve()
|
||||
end
|
||||
|
||||
mutable struct PkgTestError <: Exception
|
||||
msg::String
|
||||
end
|
||||
|
||||
function Base.showerror(io::IO, ex::PkgTestError, bt; backtrace=true)
|
||||
print_with_color(Base.error_color(), io, ex.msg)
|
||||
end
|
||||
|
||||
function test(pkgs::Vector{AbstractString}; coverage::Bool=false)
|
||||
errs = AbstractString[]
|
||||
nopkgs = AbstractString[]
|
||||
notests = AbstractString[]
|
||||
for pkg in pkgs
|
||||
test!(pkg,errs,nopkgs,notests; coverage=coverage)
|
||||
end
|
||||
if !all(isempty, (errs, nopkgs, notests))
|
||||
messages = AbstractString[]
|
||||
if !isempty(errs)
|
||||
push!(messages, "$(join(errs,", "," and ")) had test errors")
|
||||
end
|
||||
if !isempty(nopkgs)
|
||||
msg = length(nopkgs) > 1 ? " are not installed packages" :
|
||||
" is not an installed package"
|
||||
push!(messages, string(join(nopkgs,", ", " and "), msg))
|
||||
end
|
||||
if !isempty(notests)
|
||||
push!(messages, "$(join(notests,", "," and ")) did not provide a test/runtests.jl file")
|
||||
end
|
||||
throw(PkgTestError(join(messages, "and")))
|
||||
end
|
||||
end
|
||||
|
||||
test(;coverage::Bool=false) = test(sort!(AbstractString[keys(installed())...]); coverage=coverage)
|
||||
|
||||
end # module
|
||||
319
julia-0.6.3/share/julia/base/pkg/pkg.jl
Normal file
319
julia-0.6.3/share/julia/base/pkg/pkg.jl
Normal file
@@ -0,0 +1,319 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
"""
|
||||
Pkg
|
||||
|
||||
The `Pkg` module provides package management for Julia.
|
||||
Use
|
||||
`Pkg.status()` for a list of installed packages,
|
||||
`Pkg.add("<pkg name>")` to add a package,
|
||||
`Pkg.update()` to update the installed packages.
|
||||
|
||||
Please see the manual section on packages for more information.
|
||||
"""
|
||||
module Pkg
|
||||
|
||||
export Dir, Types, Reqs, Cache, Read, Query, Resolve, Write, Entry
|
||||
export dir, init, rm, add, available, installed, status, clone, checkout,
|
||||
update, resolve, test, build, free, pin, PkgError, setprotocol!
|
||||
|
||||
const DEFAULT_META = "https://github.com/JuliaLang/METADATA.jl"
|
||||
const META_BRANCH = "metadata-v2"
|
||||
|
||||
mutable struct PkgError <: Exception
|
||||
msg::AbstractString
|
||||
ex::Nullable{Exception}
|
||||
end
|
||||
PkgError(msg::AbstractString) = PkgError(msg, Nullable{Exception}())
|
||||
function Base.showerror(io::IO, pkgerr::PkgError)
|
||||
print(io, pkgerr.msg)
|
||||
if !isnull(pkgerr.ex)
|
||||
pkgex = get(pkgerr.ex)
|
||||
if isa(pkgex, CompositeException)
|
||||
for cex in pkgex
|
||||
print(io, "\n=> ")
|
||||
showerror(io, cex)
|
||||
end
|
||||
else
|
||||
print(io, "\n")
|
||||
showerror(io, pkgex)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
for file in split("dir types reqs cache read query resolve write entry")
|
||||
include("$file.jl")
|
||||
end
|
||||
const cd = Dir.cd
|
||||
|
||||
dir(path...) = Dir.path(path...)
|
||||
|
||||
# remove extension .jl
|
||||
const PKGEXT = ".jl"
|
||||
splitjl(pkg::AbstractString) = endswith(pkg, PKGEXT) ? pkg[1:(end-length(PKGEXT))] : pkg
|
||||
|
||||
"""
|
||||
dir() -> AbstractString
|
||||
|
||||
Returns the absolute path of the package directory. This defaults to
|
||||
`joinpath(homedir(),".julia","v\$(VERSION.major).\$(VERSION.minor)")` on all platforms (i.e.
|
||||
`~/.julia/v$(VERSION.major).$(VERSION.minor)` in UNIX shell syntax). If the `JULIA_PKGDIR`
|
||||
environment variable is set, then that path is used in the returned value as
|
||||
`joinpath(ENV["JULIA_PKGDIR"],"v\$(VERSION.major).\$(VERSION.minor)")`. If `JULIA_PKGDIR` is
|
||||
a relative path, it is interpreted relative to whatever the current working directory is.
|
||||
"""
|
||||
dir()
|
||||
|
||||
"""
|
||||
dir(names...) -> AbstractString
|
||||
|
||||
Equivalent to `normpath(Pkg.dir(),names...)` – i.e. it appends path components to the
|
||||
package directory and normalizes the resulting path. In particular, `Pkg.dir(pkg)` returns
|
||||
the path to the package `pkg`.
|
||||
"""
|
||||
dir(names...)
|
||||
|
||||
"""
|
||||
init(meta::AbstractString=DEFAULT_META, branch::AbstractString=META_BRANCH)
|
||||
|
||||
Initialize `Pkg.dir()` as a package directory. This will be done automatically when the
|
||||
`JULIA_PKGDIR` is not set and `Pkg.dir()` uses its default value. As part of this process,
|
||||
clones a local METADATA git repository from the site and branch specified by its arguments,
|
||||
which are typically not provided. Explicit (non-default) arguments can be used to support a
|
||||
custom METADATA setup.
|
||||
"""
|
||||
init(meta::AbstractString=DEFAULT_META, branch::AbstractString=META_BRANCH) = Dir.init(meta,branch)
|
||||
|
||||
function __init__()
|
||||
vers = "v$(VERSION.major).$(VERSION.minor)"
|
||||
vers = ccall(:jl_uses_cpuid_tag, Cint, ()) == 0 ? vers :
|
||||
joinpath(vers,hex(ccall(:jl_cpuid_tag, UInt64, ()), 2*sizeof(UInt64)))
|
||||
unshift!(Base.LOAD_CACHE_PATH, abspath(Dir._pkgroot(), "lib", vers))
|
||||
end
|
||||
|
||||
"""
|
||||
edit()
|
||||
|
||||
Opens `Pkg.dir("REQUIRE")` in the editor specified by the `VISUAL` or `EDITOR` environment
|
||||
variables; when the editor command returns, it runs `Pkg.resolve()` to determine and install
|
||||
a new optimal set of installed package versions.
|
||||
"""
|
||||
edit() = cd(Entry.edit)
|
||||
|
||||
"""
|
||||
rm(pkg)
|
||||
|
||||
Remove all requirement entries for `pkg` from `Pkg.dir("REQUIRE")` and call `Pkg.resolve()`.
|
||||
"""
|
||||
rm(pkg::AbstractString) = cd(Entry.rm,splitjl(pkg))
|
||||
|
||||
"""
|
||||
add(pkg, vers...)
|
||||
|
||||
Add a requirement entry for `pkg` to `Pkg.dir("REQUIRE")` and call `Pkg.resolve()`. If
|
||||
`vers` are given, they must be `VersionNumber` objects and they specify acceptable version
|
||||
intervals for `pkg`.
|
||||
"""
|
||||
add(pkg::AbstractString, vers::VersionNumber...) = cd(Entry.add,splitjl(pkg),vers...)
|
||||
|
||||
"""
|
||||
available() -> Vector{String}
|
||||
|
||||
Returns the names of available packages.
|
||||
"""
|
||||
available() = cd(Entry.available)
|
||||
|
||||
"""
|
||||
available(pkg) -> Vector{VersionNumber}
|
||||
|
||||
Returns the version numbers available for package `pkg`.
|
||||
"""
|
||||
available(pkg::AbstractString) = cd(Entry.available,splitjl(pkg))
|
||||
|
||||
"""
|
||||
installed() -> Dict{String,VersionNumber}
|
||||
|
||||
Returns a dictionary mapping installed package names to the installed version number of each
|
||||
package.
|
||||
"""
|
||||
installed() = cd(Entry.installed)
|
||||
|
||||
"""
|
||||
installed(pkg) -> Void | VersionNumber
|
||||
|
||||
If `pkg` is installed, return the installed version number. If `pkg` is registered,
|
||||
but not installed, return `nothing`.
|
||||
"""
|
||||
installed(pkg::AbstractString) = cd(Entry.installed,splitjl(pkg))
|
||||
|
||||
"""
|
||||
status()
|
||||
|
||||
Prints out a summary of what packages are installed and what version and state they're in.
|
||||
"""
|
||||
status(io::IO=STDOUT) = cd(Entry.status,io)
|
||||
|
||||
"""
|
||||
status(pkg)
|
||||
|
||||
Prints out a summary of what version and state `pkg`, specifically, is in.
|
||||
"""
|
||||
status(pkg::AbstractString, io::IO=STDOUT) = cd(Entry.status,io,splitjl(pkg))
|
||||
|
||||
"""
|
||||
clone(pkg)
|
||||
|
||||
If `pkg` has a URL registered in `Pkg.dir("METADATA")`, clone it from that URL on the
|
||||
default branch. The package does not need to have any registered versions.
|
||||
"""
|
||||
clone(url_or_pkg::AbstractString) = cd(Entry.clone,url_or_pkg)
|
||||
|
||||
"""
|
||||
clone(url, [pkg])
|
||||
|
||||
Clone a package directly from the git URL `url`. The package does not need to be registered
|
||||
in `Pkg.dir("METADATA")`. The package repo is cloned by the name `pkg` if provided; if not
|
||||
provided, `pkg` is determined automatically from `url`.
|
||||
"""
|
||||
clone(url::AbstractString, pkg::AbstractString) = cd(Entry.clone,url,splitjl(pkg))
|
||||
|
||||
"""
|
||||
checkout(pkg, [branch="master"]; merge=true, pull=true)
|
||||
|
||||
Checkout the `Pkg.dir(pkg)` repo to the branch `branch`. Defaults to checking out the
|
||||
"master" branch. To go back to using the newest compatible released version, use
|
||||
`Pkg.free(pkg)`. Changes are merged (fast-forward only) if the keyword argument `merge ==
|
||||
true`, and the latest version is pulled from the upstream repo if `pull == true`.
|
||||
"""
|
||||
checkout(pkg::AbstractString, branch::AbstractString="master"; merge::Bool=true, pull::Bool=true) =
|
||||
cd(Entry.checkout,splitjl(pkg),branch,merge,pull)
|
||||
|
||||
"""
|
||||
free(pkg)
|
||||
|
||||
Free the package `pkg` to be managed by the package manager again. It calls `Pkg.resolve()`
|
||||
to determine optimal package versions after. This is an inverse for both `Pkg.checkout` and
|
||||
`Pkg.pin`.
|
||||
|
||||
You can also supply an iterable collection of package names, e.g., `Pkg.free(("Pkg1",
|
||||
"Pkg2"))` to free multiple packages at once.
|
||||
"""
|
||||
free(pkg) = cd(Entry.free,splitjl.(pkg))
|
||||
|
||||
"""
|
||||
pin(pkg)
|
||||
|
||||
Pin `pkg` at the current version. To go back to using the newest compatible released
|
||||
version, use `Pkg.free(pkg)`
|
||||
"""
|
||||
pin(pkg::AbstractString) = cd(Entry.pin,splitjl(pkg))
|
||||
|
||||
"""
|
||||
pin(pkg, version)
|
||||
|
||||
Pin `pkg` at registered version `version`.
|
||||
"""
|
||||
pin(pkg::AbstractString, ver::VersionNumber) = cd(Entry.pin,splitjl(pkg),ver)
|
||||
|
||||
"""
|
||||
update(pkgs...)
|
||||
|
||||
Update the metadata repo – kept in `Pkg.dir("METADATA")` – then update any fixed packages
|
||||
that can safely be pulled from their origin; then call `Pkg.resolve()` to determine a new
|
||||
optimal set of packages versions.
|
||||
|
||||
Without arguments, updates all installed packages. When one or more package names are provided as
|
||||
arguments, only those packages and their dependencies are updated.
|
||||
"""
|
||||
update(upkgs::AbstractString...) = cd(Entry.update,Dir.getmetabranch(),Set{String}(splitjl.([upkgs...])))
|
||||
|
||||
"""
|
||||
resolve()
|
||||
|
||||
Determines an optimal, consistent set of package versions to install or upgrade to. The
|
||||
optimal set of package versions is based on the contents of `Pkg.dir("REQUIRE")` and the
|
||||
state of installed packages in `Pkg.dir()`, Packages that are no longer required are moved
|
||||
into `Pkg.dir(".trash")`.
|
||||
"""
|
||||
resolve() = cd(Entry.resolve)
|
||||
|
||||
"""
|
||||
build()
|
||||
|
||||
Run the build scripts for all installed packages in depth-first recursive order.
|
||||
"""
|
||||
build() = cd(Entry.build)
|
||||
|
||||
"""
|
||||
build(pkgs...)
|
||||
|
||||
Run the build script in `deps/build.jl` for each package in `pkgs` and all of their
|
||||
dependencies in depth-first recursive order. This is called automatically by `Pkg.resolve()`
|
||||
on all installed or updated packages.
|
||||
"""
|
||||
build(pkgs::AbstractString...) = cd(Entry.build,[splitjl.(pkgs)...])
|
||||
|
||||
"""
|
||||
test(; coverage=false)
|
||||
|
||||
Run the tests for all installed packages ensuring that each package's test dependencies are
|
||||
installed for the duration of the test. A package is tested by running its
|
||||
`test/runtests.jl` file and test dependencies are specified in `test/REQUIRE`.
|
||||
Coverage statistics for the packages may be generated by passing `coverage=true`.
|
||||
The default behavior is not to run coverage.
|
||||
"""
|
||||
test(;coverage::Bool=false) = cd(Entry.test; coverage=coverage)
|
||||
|
||||
"""
|
||||
test(pkgs...; coverage=false)
|
||||
|
||||
Run the tests for each package in `pkgs` ensuring that each package's test dependencies are
|
||||
installed for the duration of the test. A package is tested by running its
|
||||
`test/runtests.jl` file and test dependencies are specified in `test/REQUIRE`.
|
||||
Coverage statistics for the packages may be generated by passing `coverage=true`.
|
||||
The default behavior is not to run coverage.
|
||||
"""
|
||||
test(pkgs::AbstractString...; coverage::Bool=false) = cd(Entry.test,AbstractString[splitjl.(pkgs)...]; coverage=coverage)
|
||||
|
||||
"""
|
||||
dependents(pkg)
|
||||
|
||||
List the packages that have `pkg` as a dependency.
|
||||
"""
|
||||
dependents(pkg::AbstractString) = Reqs.dependents(splitjl(pkg))
|
||||
|
||||
"""
|
||||
setprotocol!(proto)
|
||||
|
||||
Set the protocol used to access GitHub-hosted packages. Defaults to 'https', with a blank
|
||||
`proto` delegating the choice to the package developer.
|
||||
"""
|
||||
setprotocol!(proto::AbstractString) = Cache.setprotocol!(proto)
|
||||
|
||||
|
||||
# point users to PkgDev
|
||||
register(args...) =
|
||||
error("Pkg.register(pkg,[url]) has been moved to the package PkgDev.jl.\n",
|
||||
"Run Pkg.add(\"PkgDev\") to install PkgDev on Julia v0.5-")
|
||||
|
||||
tag(pkg, ver=nothing, commit=nothing) =
|
||||
error("Pkg.tag(pkg, [ver, [commit]]) has been moved to the package PkgDev.jl.\n",
|
||||
"Run Pkg.add(\"PkgDev\") to install PkgDev on Julia v0.5-")
|
||||
|
||||
publish() =
|
||||
error("Pkg.publish() has been moved to the package PkgDev.jl.\n",
|
||||
"Run Pkg.add(\"PkgDev\") to install PkgDev on Julia v0.5-")
|
||||
|
||||
generate(pkg, license) =
|
||||
error("Pkg.generate(pkg, license) has been moved to the package PkgDev.jl.\n",
|
||||
"Run Pkg.add(\"PkgDev\") to install PkgDev on Julia v0.5-")
|
||||
|
||||
license(lic=nothing) =
|
||||
error("Pkg.license([lic]) has been moved to the package PkgDev.jl.\n",
|
||||
"Run Pkg.add(\"PkgDev\") to install PkgDev on Julia v0.5-")
|
||||
|
||||
submit(pkg, commit=nothing) =
|
||||
error("Pkg.submit(pkg[, commit]) has been moved to the package PkgDev.jl.\n",
|
||||
"Run Pkg.add(\"PkgDev\") to install PkgDev on Julia v0.5-")
|
||||
|
||||
end # module
|
||||
588
julia-0.6.3/share/julia/base/pkg/query.jl
Normal file
588
julia-0.6.3/share/julia/base/pkg/query.jl
Normal file
@@ -0,0 +1,588 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Query
|
||||
|
||||
import ...Pkg.PkgError
|
||||
using ..Types
|
||||
|
||||
function init_resolve_backtrace(reqs::Requires, fix::Dict{String,Fixed} = Dict{String,Fixed}())
|
||||
bktrc = ResolveBacktrace()
|
||||
for (p,f) in fix
|
||||
bktrc[p] = ResolveBacktraceItem(:fixed, f.version)
|
||||
end
|
||||
for (p,vs) in reqs
|
||||
bktrcp = get!(bktrc, p) do; ResolveBacktraceItem() end
|
||||
push!(bktrcp, :required, vs)
|
||||
end
|
||||
return bktrc
|
||||
end
|
||||
|
||||
function check_fixed(reqs::Requires, fix::Dict{String,Fixed}, avail::Dict)
|
||||
for (p1,f1) in fix
|
||||
for p2 in keys(f1.requires)
|
||||
haskey(avail, p2) || haskey(fix, p2) || throw(PkgError("unknown package $p2 required by $p1"))
|
||||
end
|
||||
satisfies(p1, f1.version, reqs) ||
|
||||
warn("$p1 is fixed at $(f1.version) conflicting with top-level requirement: $(reqs[p1])")
|
||||
for (p2,f2) in fix
|
||||
satisfies(p1, f1.version, f2.requires) ||
|
||||
warn("$p1 is fixed at $(f1.version) conflicting with requirement for $p2: $(f2.requires[p1])")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function propagate_fixed!(reqs::Requires, bktrc::ResolveBacktrace, fix::Dict{String,Fixed})
|
||||
for (p,f) in fix
|
||||
merge_requires!(reqs, f.requires)
|
||||
for (rp,rvs) in f.requires
|
||||
bktrcp = get!(bktrc, rp) do; ResolveBacktraceItem() end
|
||||
push!(bktrcp, p=>bktrc[p], rvs)
|
||||
end
|
||||
end
|
||||
for (p,f) in fix
|
||||
delete!(reqs, p)
|
||||
end
|
||||
reqs
|
||||
end
|
||||
|
||||
# Specialized copy for the avail argument below because the deepcopy is slow
|
||||
function availcopy(avail)
|
||||
new_avail = similar(avail)
|
||||
for (pkg, vers_avail) in avail
|
||||
new_vers_avail = similar(vers_avail)
|
||||
for (version, pkg_avail) in vers_avail
|
||||
new_vers_avail[version] = copy(pkg_avail)
|
||||
end
|
||||
new_avail[pkg] = new_vers_avail
|
||||
end
|
||||
return new_avail
|
||||
end
|
||||
|
||||
# Generate a reverse dependency graph (package names only)
|
||||
function gen_backdeps(avail::Dict)
|
||||
backdeps = Dict{String,Set{String}}()
|
||||
for (ap,av) in avail, (v,a) in av, rp in keys(a.requires)
|
||||
s = get!(backdeps, rp) do; Set{String}() end
|
||||
push!(s, ap)
|
||||
end
|
||||
return backdeps
|
||||
end
|
||||
|
||||
function dependencies(avail::Dict, fix::Dict = Dict{String,Fixed}("julia"=>Fixed(VERSION)))
|
||||
avail = availcopy(avail)
|
||||
conflicts = Dict{String,Set{String}}()
|
||||
to_expunge = VersionNumber[]
|
||||
emptied = String[]
|
||||
backdeps = gen_backdeps(avail)
|
||||
|
||||
for (fp,fx) in fix
|
||||
delete!(avail, fp)
|
||||
haskey(backdeps, fp) || continue
|
||||
# for (ap,av) in avail
|
||||
for ap in backdeps[fp]
|
||||
haskey(avail, ap) || continue
|
||||
av = avail[ap]
|
||||
empty!(to_expunge)
|
||||
for (v,a) in av
|
||||
if satisfies(fp, fx.version, a.requires)
|
||||
delete!(a.requires, fp)
|
||||
else
|
||||
conflicts_ap = get!(conflicts, ap) do; Set{String}() end
|
||||
push!(conflicts_ap, fp)
|
||||
# don't delete v from av right away so as not to screw up iteration
|
||||
push!(to_expunge, v)
|
||||
end
|
||||
end
|
||||
for v in to_expunge
|
||||
delete!(av, v)
|
||||
end
|
||||
isempty(av) && push!(emptied, ap)
|
||||
end
|
||||
end
|
||||
while !isempty(emptied)
|
||||
deleted_pkgs = String[]
|
||||
for ap in emptied
|
||||
delete!(avail, ap)
|
||||
push!(deleted_pkgs, ap)
|
||||
end
|
||||
empty!(emptied)
|
||||
|
||||
for dp in deleted_pkgs
|
||||
haskey(backdeps, dp) || continue
|
||||
for ap in backdeps[dp]
|
||||
haskey(avail, ap) || continue
|
||||
av = avail[ap]
|
||||
empty!(to_expunge)
|
||||
for (v,a) in av
|
||||
haskey(a.requires, dp) || continue
|
||||
conflicts_ap = get!(conflicts, ap) do; Set{String}() end
|
||||
union!(conflicts_ap, conflicts[dp])
|
||||
push!(to_expunge, v)
|
||||
end
|
||||
for v in to_expunge
|
||||
delete!(av, v)
|
||||
end
|
||||
isempty(av) && push!(emptied, ap)
|
||||
end
|
||||
end
|
||||
end
|
||||
avail, conflicts
|
||||
end
|
||||
|
||||
function partial_update_mask(instd::Dict{String,Tuple{VersionNumber,Bool}},
|
||||
avail::Dict{String,Dict{VersionNumber,Available}}, upkgs::Set{String})
|
||||
dont_update = Set{String}()
|
||||
isempty(upkgs) && return dont_update
|
||||
avail_new = deepcopy(avail)
|
||||
for p in upkgs
|
||||
haskey(instd, p) || throw(PkgError("Package $p is not installed"))
|
||||
v = instd[p][1]
|
||||
if haskey(avail, p)
|
||||
for vn in keys(avail[p])
|
||||
vn < v && delete!(avail_new[p], vn)
|
||||
end
|
||||
end
|
||||
end
|
||||
avail_new = dependencies_subset(avail_new, upkgs)
|
||||
|
||||
for p in keys(avail)
|
||||
!haskey(avail_new, p) && push!(dont_update, p)
|
||||
end
|
||||
for p in keys(instd)
|
||||
!haskey(avail_new, p) && p ∉ upkgs && push!(dont_update, p)
|
||||
end
|
||||
return dont_update
|
||||
end
|
||||
|
||||
# Try to produce some helpful message in case of a partial update which does not go all the way
|
||||
# (Does not do a full analysis, it only checks requirements and direct dependents.)
|
||||
function check_partial_updates(reqs::Requires,
|
||||
deps::Dict{String,Dict{VersionNumber,Available}},
|
||||
want::Dict{String,VersionNumber},
|
||||
fixed::Dict{String,Fixed},
|
||||
upkgs::Set{String})
|
||||
for p in upkgs
|
||||
if !haskey(want, p)
|
||||
if !haskey(fixed, p)
|
||||
warn("Something went wrong with the update of package $p, please submit a bug report")
|
||||
continue
|
||||
end
|
||||
v = fixed[p].version
|
||||
else
|
||||
v = want[p]
|
||||
if haskey(fixed, p) && v != fixed[p].version
|
||||
warn("Something went wrong with the update of package $p, please submit a bug report")
|
||||
continue
|
||||
end
|
||||
end
|
||||
haskey(deps, p) || continue
|
||||
vers = sort!(collect(keys(deps[p])))
|
||||
higher_vers = vers[vers .> v]
|
||||
isempty(higher_vers) && continue # package p has been set to the highest available version
|
||||
|
||||
# Determine if there are packages which depend on `p` and somehow prevent its update to
|
||||
# the latest version
|
||||
blocking_parents = Set{String}()
|
||||
for (p1,d1) in deps
|
||||
p1 in upkgs && continue # package `p1` is among the ones to be updated, skip the check
|
||||
haskey(fixed, p1) || continue # if package `p1` is not fixed, it can't be blocking
|
||||
r1 = fixed[p1].requires # get `p1` requirements
|
||||
haskey(r1, p) || continue # check if package `p1` requires `p`
|
||||
vs1 = r1[p] # get the versions of `p` allowed by `p1` requirements
|
||||
any(hv in vs1 for hv in higher_vers) && continue # package `p1` would allow some of the higher versions,
|
||||
# therefore it's not responsible for blocking `p`
|
||||
push!(blocking_parents, p1) # package `p1` is blocking the update of `p`
|
||||
end
|
||||
|
||||
# Determine if the update of `p` is prevented by explicit user-provided requirements
|
||||
blocking_reqs = (haskey(reqs, p) && all(hv ∉ reqs[p] for hv in higher_vers))
|
||||
|
||||
# Determine if the update of `p` is prevented by it being fixed (e.g. it's dirty, or pinned...)
|
||||
isfixed = haskey(fixed, p)
|
||||
|
||||
msg = "Package $p was set to version $v, but a higher version $(vers[end]) exists.\n"
|
||||
if isfixed
|
||||
msg *= " The package is fixed. You can try using `Pkg.free(\"$p\")` to update it."
|
||||
elseif blocking_reqs
|
||||
msg *= " The update is prevented by explicit requirements constraints. Edit your REQUIRE file to change this."
|
||||
elseif !isempty(blocking_parents)
|
||||
msg *= string(" To install the latest version, you could try updating these packages as well: ", join(blocking_parents, ", ", " and "), ".")
|
||||
else
|
||||
msg *= " To install the latest version, you could try doing a full update with `Pkg.update()`."
|
||||
end
|
||||
info(msg)
|
||||
end
|
||||
end
|
||||
|
||||
const PackageState = Union{Void,VersionNumber}
|
||||
|
||||
function diff(have::Dict, want::Dict, avail::Dict, fixed::Dict)
|
||||
change = Vector{Tuple{String,Tuple{PackageState,PackageState}}}(0)
|
||||
remove = Vector{Tuple{String,Tuple{PackageState,PackageState}}}(0)
|
||||
|
||||
for pkg in collect(union(keys(have),keys(want)))
|
||||
h, w = haskey(have,pkg), haskey(want,pkg)
|
||||
if h && w
|
||||
if have[pkg] != want[pkg]
|
||||
push!(change, (pkg,(have[pkg], want[pkg])))
|
||||
end
|
||||
elseif h
|
||||
push!(remove, (pkg,(have[pkg],nothing)))
|
||||
elseif w
|
||||
push!(change, (pkg,(nothing,want[pkg])))
|
||||
end
|
||||
end
|
||||
append!(sort!(change), sort!(remove))
|
||||
end
|
||||
|
||||
function check_requirements(reqs::Requires, deps::Dict{String,Dict{VersionNumber,Available}}, fix::Dict)
|
||||
for (p,vs) in reqs
|
||||
if !any(vn->(vn in vs), keys(deps[p]))
|
||||
remaining_vs = VersionSet()
|
||||
err_msg = "fixed packages introduce conflicting requirements for $p: \n"
|
||||
available_list = sort!(collect(keys(deps[p])))
|
||||
for (p1,f1) in fix
|
||||
f1r = f1.requires
|
||||
haskey(f1r, p) || continue
|
||||
err_msg *= " $p1 requires versions $(f1r[p])"
|
||||
if !any([vn in f1r[p] for vn in available_list])
|
||||
err_msg *= " [none of the available versions can satisfy this requirement]"
|
||||
end
|
||||
err_msg *= "\n"
|
||||
remaining_vs = intersect(remaining_vs, f1r[p])
|
||||
end
|
||||
if isempty(remaining_vs)
|
||||
err_msg *= " the requirements are unsatisfiable because their intersection is empty"
|
||||
else
|
||||
err_msg *= " available versions are $(join(available_list, ", ", " and "))"
|
||||
end
|
||||
throw(PkgError(err_msg))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# If there are explicitly required packages, dicards all versions outside
|
||||
# the allowed range.
|
||||
# It also propagates requirements: when all allowed versions of a required package
|
||||
# require some other package, this creates a new implicit requirement.
|
||||
# The propagation is tracked so that in case a contradiction is detected the error
|
||||
# message allows to determine the cause.
|
||||
# This is a pre-pruning step, so it also creates some structures which are later used by pruning
|
||||
function filter_versions(reqs::Requires, deps::Dict{String,Dict{VersionNumber,Available}}, bktrc::ResolveBacktrace)
|
||||
allowed = Dict{String,Dict{VersionNumber,Bool}}()
|
||||
staged = copy(reqs)
|
||||
while !isempty(staged)
|
||||
staged_next = Requires()
|
||||
for (p,vs) in staged
|
||||
# Parse requirements and store allowed versions.
|
||||
depsp = deps[p]
|
||||
if !haskey(allowed, p)
|
||||
allowedp = Dict{VersionNumber,Bool}(vn=>true for vn in keys(depsp))
|
||||
allowed[p] = allowedp
|
||||
seen = false
|
||||
else
|
||||
allowedp = allowed[p]
|
||||
oldallowedp = copy(allowedp)
|
||||
seen = true
|
||||
end
|
||||
for vn in keys(depsp)
|
||||
allowedp[vn] &= vn ∈ vs
|
||||
end
|
||||
@assert !isempty(allowedp)
|
||||
if !any(values(allowedp))
|
||||
err_msg = "Unsatisfiable requirements detected for package $p:\n"
|
||||
err_msg *= string(bktrc[p])
|
||||
err_msg *= """The intersection of the requirements is $(bktrc[p].versionreq).
|
||||
None of the available versions can satisfy this requirement."""
|
||||
throw(PkgError(err_msg))
|
||||
end
|
||||
|
||||
# If we've seen this package already and nothing has changed since
|
||||
# the last time, we stop here.
|
||||
seen && allowedp == oldallowedp && continue
|
||||
|
||||
# Propagate requirements:
|
||||
# if all allowed versions of a required package require some other package,
|
||||
# then compute the union of the allowed versions for that other package, and
|
||||
# treat that as a new requirement.
|
||||
# Start by filtering out the non-allowed versions
|
||||
fdepsp = Dict{VersionNumber,Available}(vn=>depsp[vn] for vn in keys(depsp) if allowedp[vn])
|
||||
# Collect all required packages
|
||||
isreq = Dict{String,Bool}(rp=>true for a in values(fdepsp) for rp in keys(a.requires))
|
||||
# Compute whether a required package appears in all requirements
|
||||
for rp in keys(isreq)
|
||||
isreq[rp] = all(haskey(a.requires, rp) for a in values(fdepsp))
|
||||
end
|
||||
|
||||
# Create a list of candidates for new implicit requirements
|
||||
staged_new = Set{String}()
|
||||
for a in values(fdepsp), (rp,rvs) in a.requires
|
||||
# Skip packages that may not be required
|
||||
isreq[rp] || continue
|
||||
# Compute the union of the version sets
|
||||
if haskey(staged_next, rp)
|
||||
snvs = staged_next[rp]
|
||||
union!(snvs, rvs)
|
||||
else
|
||||
snvs = copy(rvs)
|
||||
staged_next[rp] = snvs
|
||||
end
|
||||
push!(staged_new, rp)
|
||||
end
|
||||
for rp in staged_new
|
||||
@assert isreq[rp]
|
||||
srvs = staged_next[rp]
|
||||
bktrcp = get!(bktrc, rp) do; ResolveBacktraceItem(); end
|
||||
push!(bktrcp, p=>bktrc[p], srvs)
|
||||
if isa(bktrcp.versionreq, VersionSet) && isempty(bktrcp.versionreq)
|
||||
err_msg = "Unsatisfiable requirements detected for package $rp:\n"
|
||||
err_msg *= string(bktrcp)
|
||||
err_msg *= "The intersection of the requirements is empty."
|
||||
throw(PkgError(err_msg))
|
||||
end
|
||||
end
|
||||
end
|
||||
staged = staged_next
|
||||
end
|
||||
|
||||
filtered_deps = Dict{String,Dict{VersionNumber,Available}}()
|
||||
for (p,depsp) in deps
|
||||
filtered_deps[p] = Dict{VersionNumber,Available}()
|
||||
allowedp = get(allowed, p) do; Dict{VersionNumber,Bool}() end
|
||||
fdepsp = filtered_deps[p]
|
||||
for (vn,a) in depsp
|
||||
get(allowedp, vn, true) || continue
|
||||
fdepsp[vn] = a
|
||||
end
|
||||
end
|
||||
|
||||
return filtered_deps, allowed
|
||||
end
|
||||
|
||||
# Reduce the number of versions by creating equivalence classes, and retaining
|
||||
# only the highest version for each equivalence class.
|
||||
# Two versions are equivalent if:
|
||||
# 1) They appear together as dependecies of another package (i.e. for each
|
||||
# dependency relation, they are both required or both not required)
|
||||
# 2) They have the same dependencies
|
||||
# Preliminarily calls filter_versions.
|
||||
function prune_versions(reqs::Requires, deps::Dict{String,Dict{VersionNumber,Available}}, bktrc::ResolveBacktrace)
|
||||
filtered_deps, allowed = filter_versions(reqs, deps, bktrc)
|
||||
if !isempty(reqs)
|
||||
filtered_deps = dependencies_subset(filtered_deps, Set{String}(keys(reqs)))
|
||||
end
|
||||
|
||||
# To each version in each package, we associate a BitVector.
|
||||
# It is going to hold a pattern such that all versions with
|
||||
# the same pattern are equivalent.
|
||||
vmask = Dict{String,Dict{VersionNumber, BitVector}}()
|
||||
|
||||
# For each package, we examine the dependencies of its versions
|
||||
# and put together those which are equal.
|
||||
# While we're at it, we also collect all dependencies into alldeps
|
||||
alldeps = Dict{String,Set{VersionSet}}()
|
||||
for (p,fdepsp) in filtered_deps
|
||||
# Extract unique dependencies lists (aka classes), thereby
|
||||
# assigning an index to each class.
|
||||
uniqdepssets = unique(a.requires for a in values(fdepsp))
|
||||
|
||||
# Store all dependencies seen so far for later use
|
||||
for r in uniqdepssets, (rp,rvs) in r
|
||||
get!(alldeps, rp) do; Set{VersionSet}() end
|
||||
push!(alldeps[rp], rvs)
|
||||
end
|
||||
|
||||
# If the package has just one version, it's uninteresting
|
||||
length(deps[p]) == 1 && continue
|
||||
|
||||
# Grow the pattern by the number of classes
|
||||
luds = length(uniqdepssets)
|
||||
@assert !haskey(vmask, p)
|
||||
vmask[p] = Dict{VersionNumber,BitVector}()
|
||||
vmaskp = vmask[p]
|
||||
for vn in keys(fdepsp)
|
||||
vmaskp[vn] = falses(luds)
|
||||
end
|
||||
for (vn,a) in fdepsp
|
||||
vmind = findfirst(uniqdepssets, a.requires)
|
||||
@assert vmind > 0
|
||||
vm = vmaskp[vn]
|
||||
vm[vmind] = true
|
||||
end
|
||||
end
|
||||
|
||||
# Produce dependency patterns.
|
||||
for (p,vss) in alldeps, vs in vss
|
||||
# packages with just one version, or dependencies
|
||||
# which do not distiguish between versions, are not
|
||||
# interesting
|
||||
(length(deps[p]) == 1 || vs == VersionSet()) && continue
|
||||
|
||||
# Store the dependency info in the patterns
|
||||
@assert haskey(vmask, p)
|
||||
for (vn,vm) in vmask[p]
|
||||
push!(vm, vn in vs)
|
||||
end
|
||||
end
|
||||
|
||||
# At this point, the vmask patterns are computed. We divide them into
|
||||
# classes so that we can keep just one version for each class.
|
||||
pruned_vers = Dict{String,Vector{VersionNumber}}()
|
||||
eq_classes = Dict{String,Dict{VersionNumber,Vector{VersionNumber}}}()
|
||||
for (p, vmaskp) in vmask
|
||||
vmask0_uniq = unique(values(vmaskp))
|
||||
nc = length(vmask0_uniq)
|
||||
classes = [VersionNumber[] for c0 = 1:nc]
|
||||
for (vn,vm) in vmaskp
|
||||
c0 = findfirst(vmask0_uniq, vm)
|
||||
push!(classes[c0], vn)
|
||||
end
|
||||
map(sort!, classes)
|
||||
|
||||
# For each nonempty class, we store only the highest version)
|
||||
pruned_vers[p] = VersionNumber[]
|
||||
prunedp = pruned_vers[p]
|
||||
eq_classes[p] = Dict{VersionNumber,Vector{VersionNumber}}()
|
||||
eqclassp = eq_classes[p]
|
||||
for cl in classes
|
||||
if !isempty(cl)
|
||||
vtop = maximum(cl)
|
||||
push!(prunedp, vtop)
|
||||
@assert !haskey(eqclassp, vtop)
|
||||
eqclassp[vtop] = cl
|
||||
end
|
||||
end
|
||||
sort!(prunedp)
|
||||
end
|
||||
# Put non-allowed versions into eq_classes
|
||||
for (p, allowedp) in allowed
|
||||
haskey(eq_classes, p) || continue
|
||||
eqclassp = eq_classes[p]
|
||||
for (vn, a) in allowedp
|
||||
a && continue
|
||||
eqclassp[vn] = [vn]
|
||||
end
|
||||
end
|
||||
# Put all remaining packages into eq_classes
|
||||
for (p, depsp) in deps
|
||||
haskey(eq_classes, p) && continue
|
||||
eq_classes[p] = Dict{VersionNumber,Vector{VersionNumber}}()
|
||||
eqclassp = eq_classes[p]
|
||||
for vn in keys(depsp)
|
||||
eqclassp[vn] = [vn]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# Recompute deps. We could simplify them, but it's not worth it
|
||||
new_deps = Dict{String,Dict{VersionNumber,Available}}()
|
||||
|
||||
for (p,depsp) in filtered_deps
|
||||
@assert !haskey(new_deps, p)
|
||||
if !haskey(pruned_vers, p)
|
||||
new_deps[p] = depsp
|
||||
continue
|
||||
end
|
||||
new_deps[p] = Dict{VersionNumber,Available}()
|
||||
pruned_versp = pruned_vers[p]
|
||||
for (vn,a) in depsp
|
||||
vn ∈ pruned_versp || continue
|
||||
new_deps[p][vn] = a
|
||||
end
|
||||
end
|
||||
|
||||
#println("pruning stats:")
|
||||
#numvers = 0
|
||||
#numdeps = 0
|
||||
#for (p,d) in deps, (vn,a) in d
|
||||
# numvers += 1
|
||||
# for r in a.requires
|
||||
# numdeps += 1
|
||||
# end
|
||||
#end
|
||||
#numnewvers = 0
|
||||
#numnewdeps = 0
|
||||
#for (p,d) in new_deps, (vn,a) in d
|
||||
# numnewvers += 1
|
||||
# for r in a.requires
|
||||
# numnewdeps += 1
|
||||
# end
|
||||
#end
|
||||
#println(" before: vers=$numvers deps=$numdeps")
|
||||
#println(" after: vers=$numnewvers deps=$numnewdeps")
|
||||
#println()
|
||||
|
||||
return new_deps, eq_classes
|
||||
end
|
||||
prune_versions(deps::Dict{String,Dict{VersionNumber,Available}}) =
|
||||
prune_versions(Dict{String,VersionSet}(), deps, ResolveBacktrace())
|
||||
prune_versions(deps::Dict{String,Dict{VersionNumber,Available}}, bktrc::ResolveBacktrace) =
|
||||
prune_versions(Dict{String,VersionSet}(), deps, bktrc)
|
||||
|
||||
# Build a graph restricted to a subset of the packages
|
||||
function subdeps(deps::Dict{String,Dict{VersionNumber,Available}}, pkgs::Set{String})
|
||||
sub_deps = Dict{String,Dict{VersionNumber,Available}}()
|
||||
for p in pkgs
|
||||
haskey(sub_deps, p) || (sub_deps[p] = Dict{VersionNumber,Available}())
|
||||
sub_depsp = sub_deps[p]
|
||||
for (vn,a) in deps[p]
|
||||
sub_depsp[vn] = a
|
||||
end
|
||||
end
|
||||
|
||||
return sub_deps
|
||||
end
|
||||
|
||||
# Build a subgraph incuding only the (direct and indirect) dependencies
|
||||
# of a given package set
|
||||
function dependencies_subset(deps::Dict{String,Dict{VersionNumber,Available}}, pkgs::Set{String})
|
||||
staged::Set{String} = filter(p->p in keys(deps), pkgs)
|
||||
allpkgs = copy(staged)
|
||||
while !isempty(staged)
|
||||
staged_next = Set{String}()
|
||||
for p in staged, a in values(get(deps, p, Dict{VersionNumber,Available}())), rp in keys(a.requires)
|
||||
rp ∉ allpkgs && rp ≠ "julia" && push!(staged_next, rp)
|
||||
end
|
||||
union!(allpkgs, staged_next)
|
||||
staged = staged_next
|
||||
end
|
||||
|
||||
return subdeps(deps, allpkgs)
|
||||
end
|
||||
|
||||
# Build a subgraph incuding only the (direct and indirect) dependencies and dependants
|
||||
# of a given package set
|
||||
function undirected_dependencies_subset(deps::Dict{String,Dict{VersionNumber,Available}}, pkgs::Set{String})
|
||||
graph = Dict{String, Set{String}}()
|
||||
|
||||
for (p,d) in deps
|
||||
haskey(graph, p) || (graph[p] = Set{String}())
|
||||
for a in values(d), rp in keys(a.requires)
|
||||
push!(graph[p], rp)
|
||||
haskey(graph, rp) || (graph[rp] = Set{String}())
|
||||
push!(graph[rp], p)
|
||||
end
|
||||
end
|
||||
|
||||
staged = pkgs
|
||||
allpkgs = copy(pkgs)
|
||||
while !isempty(staged)
|
||||
staged_next = Set{String}()
|
||||
for p in staged, rp in graph[p]
|
||||
rp ∉ allpkgs && push!(staged_next, rp)
|
||||
end
|
||||
union!(allpkgs, staged_next)
|
||||
staged = staged_next
|
||||
end
|
||||
|
||||
return subdeps(deps, allpkgs)
|
||||
end
|
||||
|
||||
function prune_dependencies(reqs::Requires,
|
||||
deps::Dict{String,Dict{VersionNumber,Available}},
|
||||
bktrc::ResolveBacktrace = init_resolve_backtrace(reqs))
|
||||
deps, _ = prune_versions(reqs, deps, bktrc)
|
||||
return deps
|
||||
end
|
||||
|
||||
end # module
|
||||
251
julia-0.6.3/share/julia/base/pkg/read.jl
Normal file
251
julia-0.6.3/share/julia/base/pkg/read.jl
Normal file
@@ -0,0 +1,251 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Read
|
||||
|
||||
import ...LibGit2, ..Cache, ..Reqs, ...Pkg.PkgError, ..Dir
|
||||
using ..Types
|
||||
|
||||
readstrip(path...) = strip(readstring(joinpath(path...)))
|
||||
|
||||
url(pkg::AbstractString) = readstrip(Dir.path("METADATA"), pkg, "url")
|
||||
sha1(pkg::AbstractString, ver::VersionNumber) =
|
||||
readstrip(Dir.path("METADATA"), pkg, "versions", string(ver), "sha1")
|
||||
|
||||
function available(names=readdir("METADATA"))
|
||||
pkgs = Dict{String,Dict{VersionNumber,Available}}()
|
||||
for pkg in names
|
||||
isfile("METADATA", pkg, "url") || continue
|
||||
versdir = joinpath("METADATA", pkg, "versions")
|
||||
isdir(versdir) || continue
|
||||
for ver in readdir(versdir)
|
||||
ismatch(Base.VERSION_REGEX, ver) || continue
|
||||
isfile(versdir, ver, "sha1") || continue
|
||||
haskey(pkgs,pkg) || (pkgs[pkg] = Dict{VersionNumber,Available}())
|
||||
pkgs[pkg][convert(VersionNumber,ver)] = Available(
|
||||
readchomp(joinpath(versdir,ver,"sha1")),
|
||||
Reqs.parse(joinpath(versdir,ver,"requires"))
|
||||
)
|
||||
end
|
||||
end
|
||||
return pkgs
|
||||
end
|
||||
available(pkg::AbstractString) = get(available([pkg]),pkg,Dict{VersionNumber,Available}())
|
||||
|
||||
function latest(names=readdir("METADATA"))
|
||||
pkgs = Dict{String,Available}()
|
||||
for pkg in names
|
||||
isfile("METADATA", pkg, "url") || continue
|
||||
versdir = joinpath("METADATA", pkg, "versions")
|
||||
isdir(versdir) || continue
|
||||
pkgversions = VersionNumber[]
|
||||
for ver in readdir(versdir)
|
||||
ismatch(Base.VERSION_REGEX, ver) || continue
|
||||
isfile(versdir, ver, "sha1") || continue
|
||||
push!(pkgversions, convert(VersionNumber,ver))
|
||||
end
|
||||
isempty(pkgversions) && continue
|
||||
ver = string(maximum(pkgversions))
|
||||
pkgs[pkg] = Available(
|
||||
readchomp(joinpath(versdir,ver,"sha1")),
|
||||
Reqs.parse(joinpath(versdir,ver,"requires"))
|
||||
)
|
||||
end
|
||||
return pkgs
|
||||
end
|
||||
|
||||
isinstalled(pkg::AbstractString) =
|
||||
pkg != "METADATA" && pkg != "REQUIRE" && pkg[1] != '.' && isdir(pkg)
|
||||
|
||||
function isfixed(pkg::AbstractString, prepo::LibGit2.GitRepo, avail::Dict=available(pkg))
|
||||
isinstalled(pkg) || throw(PkgError("$pkg is not an installed package."))
|
||||
isfile("METADATA", pkg, "url") || return true
|
||||
ispath(pkg, ".git") || return true
|
||||
|
||||
LibGit2.isdirty(prepo) && return true
|
||||
LibGit2.isattached(prepo) && return true
|
||||
LibGit2.need_update(prepo)
|
||||
if isnull(find("REQUIRE", LibGit2.GitIndex(prepo)))
|
||||
isfile(pkg,"REQUIRE") && return true
|
||||
end
|
||||
head = string(LibGit2.head_oid(prepo))
|
||||
for (ver,info) in avail
|
||||
head == info.sha1 && return false
|
||||
end
|
||||
|
||||
cache = Cache.path(pkg)
|
||||
cache_has_head = if isdir(cache)
|
||||
crepo = LibGit2.GitRepo(cache)
|
||||
LibGit2.iscommit(head, crepo)
|
||||
else
|
||||
false
|
||||
end
|
||||
res = true
|
||||
try
|
||||
for (ver,info) in avail
|
||||
if cache_has_head && LibGit2.iscommit(info.sha1, crepo)
|
||||
if LibGit2.is_ancestor_of(head, info.sha1, crepo)
|
||||
res = false
|
||||
break
|
||||
end
|
||||
elseif LibGit2.iscommit(info.sha1, prepo)
|
||||
if LibGit2.is_ancestor_of(head, info.sha1, prepo)
|
||||
res = false
|
||||
break
|
||||
end
|
||||
else
|
||||
Base.warn_once("unknown $pkg commit $(info.sha1[1:8]), metadata may be ahead of package cache")
|
||||
end
|
||||
end
|
||||
finally
|
||||
cache_has_head && LibGit2.close(crepo)
|
||||
end
|
||||
return res
|
||||
end
|
||||
|
||||
function ispinned(pkg::AbstractString)
|
||||
ispath(pkg,".git") || return false
|
||||
LibGit2.with(LibGit2.GitRepo, pkg) do repo
|
||||
return ispinned(repo)
|
||||
end
|
||||
end
|
||||
|
||||
function ispinned(prepo::LibGit2.GitRepo)
|
||||
LibGit2.isattached(prepo) || return false
|
||||
br = LibGit2.branch(prepo)
|
||||
# note: regex is based on the naming scheme used in Entry.pin()
|
||||
return ismatch(r"^pinned\.[0-9a-f]{8}\.tmp$", br)
|
||||
end
|
||||
|
||||
function installed_version(pkg::AbstractString, prepo::LibGit2.GitRepo, avail::Dict=available(pkg))
|
||||
ispath(pkg,".git") || return typemin(VersionNumber)
|
||||
|
||||
# get package repo head hash
|
||||
local head
|
||||
try
|
||||
head = string(LibGit2.head_oid(prepo))
|
||||
catch ex
|
||||
# refs/heads/master does not exist
|
||||
if isa(ex,LibGit2.GitError) &&
|
||||
ex.code == LibGit2.Error.EUNBORNBRANCH
|
||||
head = ""
|
||||
else
|
||||
rethrow(ex)
|
||||
end
|
||||
end
|
||||
isempty(head) && return typemin(VersionNumber)
|
||||
|
||||
vers = collect(keys(filter((ver,info)->info.sha1==head, avail)))
|
||||
!isempty(vers) && return maximum(vers)
|
||||
|
||||
cache = Cache.path(pkg)
|
||||
cache_has_head = if isdir(cache)
|
||||
crepo = LibGit2.GitRepo(cache)
|
||||
LibGit2.iscommit(head, crepo)
|
||||
else
|
||||
false
|
||||
end
|
||||
ancestors = VersionNumber[]
|
||||
descendants = VersionNumber[]
|
||||
try
|
||||
for (ver,info) in avail
|
||||
sha1 = info.sha1
|
||||
base = if cache_has_head && LibGit2.iscommit(sha1, crepo)
|
||||
LibGit2.merge_base(crepo, head, sha1)
|
||||
elseif LibGit2.iscommit(sha1, prepo)
|
||||
LibGit2.merge_base(prepo, head, sha1)
|
||||
else
|
||||
Base.warn_once("unknown $pkg commit $(sha1[1:8]), metadata may be ahead of package cache")
|
||||
continue
|
||||
end
|
||||
string(base) == sha1 && push!(ancestors,ver)
|
||||
string(base) == head && push!(descendants,ver)
|
||||
end
|
||||
finally
|
||||
cache_has_head && LibGit2.close(crepo)
|
||||
end
|
||||
both = sort!(intersect(ancestors,descendants))
|
||||
isempty(both) || warn("$pkg: some versions are both ancestors and descendants of head: $both")
|
||||
if !isempty(descendants)
|
||||
v = minimum(descendants)
|
||||
return VersionNumber(v.major, v.minor, v.patch, ("",), ())
|
||||
elseif !isempty(ancestors)
|
||||
v = maximum(ancestors)
|
||||
return VersionNumber(v.major, v.minor, v.patch, (), ("",))
|
||||
else
|
||||
return typemin(VersionNumber)
|
||||
end
|
||||
end
|
||||
|
||||
function requires_path(pkg::AbstractString, avail::Dict=available(pkg))
|
||||
pkgreq = joinpath(pkg,"REQUIRE")
|
||||
ispath(pkg,".git") || return pkgreq
|
||||
repo = LibGit2.GitRepo(pkg)
|
||||
head = LibGit2.with(LibGit2.GitRepo, pkg) do repo
|
||||
LibGit2.isdirty(repo, "REQUIRE") && return pkgreq
|
||||
LibGit2.need_update(repo)
|
||||
if isnull(find("REQUIRE", LibGit2.GitIndex(repo)))
|
||||
isfile(pkgreq) && return pkgreq
|
||||
end
|
||||
string(LibGit2.head_oid(repo))
|
||||
end
|
||||
for (ver,info) in avail
|
||||
if head == info.sha1
|
||||
return joinpath("METADATA", pkg, "versions", string(ver), "requires")
|
||||
end
|
||||
end
|
||||
return pkgreq
|
||||
end
|
||||
|
||||
requires_list(pkg::AbstractString, avail::Dict=available(pkg)) =
|
||||
collect(keys(Reqs.parse(requires_path(pkg,avail))))
|
||||
|
||||
requires_dict(pkg::AbstractString, avail::Dict=available(pkg)) =
|
||||
Reqs.parse(requires_path(pkg,avail))
|
||||
|
||||
function installed(avail::Dict=available())
|
||||
pkgs = Dict{String,Tuple{VersionNumber,Bool}}()
|
||||
for pkg in readdir()
|
||||
isinstalled(pkg) || continue
|
||||
ap = get(avail,pkg,Dict{VersionNumber,Available}())
|
||||
if ispath(pkg,".git")
|
||||
LibGit2.with(LibGit2.GitRepo, pkg) do repo
|
||||
ver = installed_version(pkg, repo, ap)
|
||||
fixed = isfixed(pkg, repo, ap)
|
||||
pkgs[pkg] = (ver, fixed)
|
||||
end
|
||||
else
|
||||
pkgs[pkg] = (typemin(VersionNumber), true)
|
||||
end
|
||||
end
|
||||
return pkgs
|
||||
end
|
||||
|
||||
function fixed(avail::Dict=available(), inst::Dict=installed(avail), dont_update::Set{String}=Set{String}(),
|
||||
julia_version::VersionNumber=VERSION)
|
||||
pkgs = Dict{String,Fixed}()
|
||||
for (pkg,(ver,fix)) in inst
|
||||
(fix || pkg in dont_update) || continue
|
||||
ap = get(avail,pkg,Dict{VersionNumber,Available}())
|
||||
pkgs[pkg] = Fixed(ver,requires_dict(pkg,ap))
|
||||
end
|
||||
pkgs["julia"] = Fixed(julia_version)
|
||||
return pkgs
|
||||
end
|
||||
|
||||
function free(inst::Dict=installed(), dont_update::Set{String}=Set{String}())
|
||||
pkgs = Dict{String,VersionNumber}()
|
||||
for (pkg,(ver,fix)) in inst
|
||||
(fix || pkg in dont_update) && continue
|
||||
pkgs[pkg] = ver
|
||||
end
|
||||
return pkgs
|
||||
end
|
||||
|
||||
function issue_url(pkg::AbstractString)
|
||||
ispath(pkg,".git") || return ""
|
||||
m = match(LibGit2.GITHUB_REGEX, url(pkg))
|
||||
m === nothing && return ""
|
||||
return "https://github.com/" * m.captures[1] * "/issues"
|
||||
end
|
||||
|
||||
end # module
|
||||
145
julia-0.6.3/share/julia/base/pkg/reqs.jl
Normal file
145
julia-0.6.3/share/julia/base/pkg/reqs.jl
Normal file
@@ -0,0 +1,145 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Reqs
|
||||
|
||||
import Base: ==
|
||||
import ...Pkg.PkgError
|
||||
using ..Types
|
||||
|
||||
# representing lines of REQUIRE files
|
||||
|
||||
abstract type Line end
|
||||
struct Comment <: Line
|
||||
content::AbstractString
|
||||
end
|
||||
struct Requirement <: Line
|
||||
content::AbstractString
|
||||
package::AbstractString
|
||||
versions::VersionSet
|
||||
system::Vector{AbstractString}
|
||||
|
||||
function Requirement(content::AbstractString)
|
||||
fields = split(replace(content, r"#.*$", ""))
|
||||
system = AbstractString[]
|
||||
while !isempty(fields) && fields[1][1] == '@'
|
||||
push!(system,shift!(fields)[2:end])
|
||||
end
|
||||
isempty(fields) && throw(PkgError("invalid requires entry: $content"))
|
||||
package = shift!(fields)
|
||||
all(field->ismatch(Base.VERSION_REGEX, field), fields) ||
|
||||
throw(PkgError("invalid requires entry for $package: $content"))
|
||||
versions = VersionNumber[fields...]
|
||||
issorted(versions) || throw(PkgError("invalid requires entry for $package: $content"))
|
||||
new(content, package, VersionSet(versions), system)
|
||||
end
|
||||
function Requirement(package::AbstractString, versions::VersionSet, system::Vector{AbstractString}=AbstractString[])
|
||||
content = ""
|
||||
for os in system
|
||||
content *= "@$os "
|
||||
end
|
||||
content *= package
|
||||
if versions != VersionSet()
|
||||
for ival in versions.intervals
|
||||
(content *= " $(ival.lower)")
|
||||
ival.upper < typemax(VersionNumber) &&
|
||||
(content *= " $(ival.upper)")
|
||||
end
|
||||
end
|
||||
new(content, package, versions, system)
|
||||
end
|
||||
end
|
||||
|
||||
==(a::Line, b::Line) = a.content == b.content
|
||||
hash(s::Line, h::UInt) = hash(s.content, h + (0x3f5a631add21cb1a % UInt))
|
||||
|
||||
# general machinery for parsing REQUIRE files
|
||||
|
||||
function read(readable::Vector{<:AbstractString})
|
||||
lines = Line[]
|
||||
for line in readable
|
||||
line = chomp(line)
|
||||
push!(lines, ismatch(r"^\s*(?:#|$)", line) ? Comment(line) : Requirement(line))
|
||||
end
|
||||
return lines
|
||||
end
|
||||
|
||||
function read(readable::Union{IO,Base.AbstractCmd})
|
||||
lines = Line[]
|
||||
for line in eachline(readable)
|
||||
push!(lines, ismatch(r"^\s*(?:#|$)", line) ? Comment(line) : Requirement(line))
|
||||
end
|
||||
return lines
|
||||
end
|
||||
read(file::AbstractString) = isfile(file) ? open(read,file) : Line[]
|
||||
|
||||
function write(io::IO, lines::Vector{Line})
|
||||
for line in lines
|
||||
println(io, line.content)
|
||||
end
|
||||
end
|
||||
function write(io::IO, reqs::Requires)
|
||||
for pkg in sort!(collect(keys(reqs)), by=lowercase)
|
||||
println(io, Requirement(pkg, reqs[pkg]).content)
|
||||
end
|
||||
end
|
||||
write(file::AbstractString, r::Union{Vector{Line},Requires}) = open(io->write(io,r), file, "w")
|
||||
|
||||
function parse(lines::Vector{Line})
|
||||
reqs = Requires()
|
||||
for line in lines
|
||||
if isa(line,Requirement)
|
||||
if !isempty(line.system)
|
||||
applies = false
|
||||
if is_windows(); applies |= ("windows" in line.system); end
|
||||
if is_unix(); applies |= ("unix" in line.system); end
|
||||
if is_apple(); applies |= ("osx" in line.system); end
|
||||
if is_linux(); applies |= ("linux" in line.system); end
|
||||
if is_bsd(); applies |= ("bsd" in line.system); end
|
||||
if is_windows(); applies &= !("!windows" in line.system); end
|
||||
if is_unix(); applies &= !("!unix" in line.system); end
|
||||
if is_apple(); applies &= !("!osx" in line.system); end
|
||||
if is_linux(); applies &= !("!linux" in line.system); end
|
||||
if is_bsd(); applies &= !("!bsd" in line.system); end
|
||||
applies || continue
|
||||
end
|
||||
reqs[line.package] = haskey(reqs, line.package) ?
|
||||
intersect(reqs[line.package], line.versions) : line.versions
|
||||
end
|
||||
end
|
||||
return reqs
|
||||
end
|
||||
parse(x) = parse(read(x))
|
||||
|
||||
function dependents(packagename::AbstractString)
|
||||
pkgs = AbstractString[]
|
||||
cd(Pkg.dir()) do
|
||||
for (pkg,latest) in Pkg.Read.latest()
|
||||
if haskey(latest.requires, packagename)
|
||||
push!(pkgs, pkg)
|
||||
end
|
||||
end
|
||||
end
|
||||
pkgs
|
||||
end
|
||||
|
||||
# add & rm – edit the content a requires file
|
||||
|
||||
function add(lines::Vector{Line}, pkg::AbstractString, versions::VersionSet=VersionSet())
|
||||
v = VersionSet[]
|
||||
filtered = filter(lines) do line
|
||||
if !isa(line,Comment) && line.package == pkg && isempty(line.system)
|
||||
push!(v, line.versions)
|
||||
return false
|
||||
end
|
||||
return true
|
||||
end
|
||||
length(v) == 1 && v[1] == intersect(v[1],versions) && return copy(lines)
|
||||
versions = reduce(intersect, versions, v)
|
||||
push!(filtered, Requirement(pkg, versions))
|
||||
end
|
||||
|
||||
rm(lines::Vector{Line}, pkg::AbstractString) = filter(lines) do line
|
||||
isa(line,Comment) || line.package != pkg
|
||||
end
|
||||
|
||||
end # module
|
||||
164
julia-0.6.3/share/julia/base/pkg/resolve.jl
Normal file
164
julia-0.6.3/share/julia/base/pkg/resolve.jl
Normal file
@@ -0,0 +1,164 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Resolve
|
||||
|
||||
include(joinpath("resolve", "versionweight.jl"))
|
||||
include(joinpath("resolve", "interface.jl"))
|
||||
include(joinpath("resolve", "maxsum.jl"))
|
||||
|
||||
using ..Types, ..Query, .PkgToMaxSumInterface, .MaxSum
|
||||
import ...Pkg.PkgError
|
||||
|
||||
export resolve, sanity_check
|
||||
|
||||
# Use the max-sum algorithm to resolve packages dependencies
|
||||
function resolve(reqs::Requires, deps::Dict{String,Dict{VersionNumber,Available}})
|
||||
# init interface structures
|
||||
interface = Interface(reqs, deps)
|
||||
|
||||
# attempt trivial solution first
|
||||
ok, sol = greedysolver(interface)
|
||||
if !ok
|
||||
# trivial solution failed, use maxsum solver
|
||||
graph = Graph(interface)
|
||||
msgs = Messages(interface, graph)
|
||||
|
||||
try
|
||||
sol = maxsum(graph, msgs)
|
||||
catch err
|
||||
isa(err, UnsatError) || rethrow(err)
|
||||
p = interface.pkgs[err.info]
|
||||
# TODO: build tools to analyze the problem, and suggest to use them here.
|
||||
msg =
|
||||
"""
|
||||
resolve is unable to satisfy package requirements.
|
||||
The problem was detected when trying to find a feasible version
|
||||
for package $p.
|
||||
However, this only means that package $p is involved in an
|
||||
unsatisfiable or difficult dependency relation, and the root of
|
||||
the problem may be elsewhere.
|
||||
"""
|
||||
if msgs.num_nondecimated != graph.np
|
||||
msg *= """
|
||||
(you may try increasing the value of the JULIA_PKGRESOLVE_ACCURACY
|
||||
environment variable)
|
||||
"""
|
||||
end
|
||||
## info("ERROR MESSAGE:\n" * msg)
|
||||
throw(PkgError(msg))
|
||||
end
|
||||
|
||||
# verify solution (debug code) and enforce its optimality
|
||||
@assert verify_solution(sol, interface)
|
||||
enforce_optimality!(sol, interface)
|
||||
@assert verify_solution(sol, interface)
|
||||
end
|
||||
|
||||
# return the solution as a Dict mapping package_name => sha1
|
||||
return compute_output_dict(sol, interface)
|
||||
end
|
||||
|
||||
# Scan dependencies for (explicit or implicit) contradictions
|
||||
function sanity_check(deps::Dict{String,Dict{VersionNumber,Available}},
|
||||
pkgs::Set{String} = Set{String}())
|
||||
isempty(pkgs) || (deps = Query.undirected_dependencies_subset(deps, pkgs))
|
||||
|
||||
deps, eq_classes = Query.prune_versions(deps)
|
||||
|
||||
ndeps = Dict{String,Dict{VersionNumber,Int}}()
|
||||
|
||||
for (p,depsp) in deps
|
||||
ndeps[p] = ndepsp = Dict{VersionNumber,Int}()
|
||||
for (vn,a) in depsp
|
||||
ndepsp[vn] = length(a.requires)
|
||||
end
|
||||
end
|
||||
|
||||
vers = [(p,vn) for (p,d) in deps for vn in keys(d)]
|
||||
sort!(vers, by=pvn->(-ndeps[pvn[1]][pvn[2]]))
|
||||
|
||||
nv = length(vers)
|
||||
|
||||
svdict = Dict{Tuple{String,VersionNumber},Int}(vers[i][1:2]=>i for i = 1:nv)
|
||||
|
||||
checked = falses(nv)
|
||||
|
||||
problematic = Vector{Tuple{String,VersionNumber,String}}(0)
|
||||
|
||||
i = 1
|
||||
for (p,vn) in vers
|
||||
ndeps[p][vn] == 0 && break
|
||||
checked[i] && (i += 1; continue)
|
||||
|
||||
fixed = Dict{String,Fixed}(p=>Fixed(vn, deps[p][vn].requires), "julia"=>Fixed(VERSION))
|
||||
sub_reqs = Dict{String,VersionSet}()
|
||||
bktrc = Query.init_resolve_backtrace(sub_reqs, fixed)
|
||||
Query.propagate_fixed!(sub_reqs, bktrc, fixed)
|
||||
sub_deps = Query.dependencies_subset(deps, Set{String}([p]))
|
||||
sub_deps, conflicts = Query.dependencies(sub_deps, fixed)
|
||||
|
||||
try
|
||||
for pkg in keys(sub_reqs)
|
||||
if !haskey(sub_deps, pkg)
|
||||
if "julia" in conflicts[pkg]
|
||||
throw(PkgError("$pkg can't be installed because it has no versions that support $VERSION " *
|
||||
"of julia. You may need to update METADATA by running `Pkg.update()`"))
|
||||
else
|
||||
sconflicts = join(conflicts[pkg], ", ", " and ")
|
||||
throw(PkgError("$pkg's requirements can't be satisfied because " *
|
||||
"of the following fixed packages: $sconflicts"))
|
||||
end
|
||||
end
|
||||
end
|
||||
Query.check_requirements(sub_reqs, sub_deps, fixed)
|
||||
sub_deps = Query.prune_dependencies(sub_reqs, sub_deps, bktrc)
|
||||
catch err
|
||||
isa(err, PkgError) || rethrow(err)
|
||||
## info("ERROR MESSAGE:\n" * err.msg)
|
||||
for vneq in eq_classes[p][vn]
|
||||
push!(problematic, (p, vneq, ""))
|
||||
end
|
||||
i += 1
|
||||
continue
|
||||
end
|
||||
interface = Interface(sub_reqs, sub_deps)
|
||||
|
||||
red_pkgs = interface.pkgs
|
||||
red_np = interface.np
|
||||
red_spp = interface.spp
|
||||
red_pvers = interface.pvers
|
||||
|
||||
ok, sol = greedysolver(interface)
|
||||
|
||||
if !ok
|
||||
try
|
||||
graph = Graph(interface)
|
||||
msgs = Messages(interface, graph)
|
||||
sol = maxsum(graph, msgs)
|
||||
ok = verify_solution(sol, interface)
|
||||
@assert ok
|
||||
catch err
|
||||
isa(err, UnsatError) || rethrow(err)
|
||||
pp = red_pkgs[err.info]
|
||||
for vneq in eq_classes[p][vn]
|
||||
push!(problematic, (p, vneq, pp))
|
||||
end
|
||||
end
|
||||
end
|
||||
if ok
|
||||
for p0 = 1:red_np
|
||||
s0 = sol[p0]
|
||||
if s0 != red_spp[p0]
|
||||
j = svdict[(red_pkgs[p0], red_pvers[p0][s0])]
|
||||
checked[j] = true
|
||||
end
|
||||
end
|
||||
checked[i] = true
|
||||
end
|
||||
i += 1
|
||||
end
|
||||
|
||||
return sort!(problematic)
|
||||
end
|
||||
|
||||
end # module
|
||||
129
julia-0.6.3/share/julia/base/pkg/resolve/fieldvalue.jl
Normal file
129
julia-0.6.3/share/julia/base/pkg/resolve/fieldvalue.jl
Normal file
@@ -0,0 +1,129 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module FieldValues
|
||||
|
||||
using ...VersionWeights
|
||||
importall .....Base.Operators
|
||||
|
||||
export FieldValue, Field, validmax, secondmax
|
||||
|
||||
# FieldValue is a hierarchical numeric type with 6 levels.
|
||||
# When summing two FieldValues, the levels are summed independently.
|
||||
# When comparing them, lower levels take precedence.
|
||||
# The levels are used as such:
|
||||
# l0 : for hard constraints (dependencies and requirements)
|
||||
# l1 : for favoring higher versions of the explicitly required
|
||||
# packages
|
||||
# l2 : for favoring higher versions of all other packages
|
||||
# l3 : for favoring uninstallation of non-needed packages
|
||||
# l4 : for favoring dependants over dependencies
|
||||
# l5 : for symmetry-breaking random noise
|
||||
#
|
||||
struct FieldValue
|
||||
l0::Int
|
||||
l1::VersionWeight
|
||||
l2::VersionWeight
|
||||
l3::Int
|
||||
l4::Int
|
||||
l5::Int128
|
||||
end
|
||||
FieldValue(l0::Integer, l1::VersionWeight, l2::VersionWeight, l3::Integer, l4::Integer) = FieldValue(l0, l1, l2, l3, l4, Int128(0))
|
||||
FieldValue(l0::Integer, l1::VersionWeight, l2::VersionWeight, l3::Integer) = FieldValue(l0, l1, l2, l3, 0)
|
||||
FieldValue(l0::Integer, l1::VersionWeight, l2::VersionWeight) = FieldValue(l0, l1, l2, 0)
|
||||
FieldValue(l0::Integer, l1::VersionWeight) = FieldValue(l0, l1, zero(VersionWeight))
|
||||
FieldValue(l0::Integer) = FieldValue(l0, zero(VersionWeight))
|
||||
FieldValue() = FieldValue(0)
|
||||
|
||||
# This isn't nice, but it's for debugging only anyway
|
||||
function Base.show(io::IO, a::FieldValue)
|
||||
print(io, a.l0)
|
||||
a == FieldValue(a.l0) && return
|
||||
print(io, ".", a.l1)
|
||||
a == FieldValue(a.l0, a.l1) && return
|
||||
print(io, ".", a.l2)
|
||||
a == FieldValue(a.l0, a.l1, a.l2) && return
|
||||
print(io, ".", a.l3)
|
||||
a == FieldValue(a.l0, a.l1, a.l2, a.l3) && return
|
||||
print(io, ".", a.l4)
|
||||
a == FieldValue(a.l0, a.l1, a.l2, a.l3, a.l4) && return
|
||||
print(io, ".", a.l5)
|
||||
return
|
||||
end
|
||||
|
||||
const Field = Vector{FieldValue}
|
||||
|
||||
Base.zero(::Type{FieldValue}) = FieldValue()
|
||||
|
||||
Base.typemin(::Type{FieldValue}) = (x=typemin(Int); y=typemin(VersionWeight); FieldValue(x, y, y, x, x, typemin(Int128)))
|
||||
|
||||
Base.:-(a::FieldValue, b::FieldValue) = FieldValue(a.l0-b.l0, a.l1-b.l1, a.l2-b.l2, a.l3-b.l3, a.l4-b.l4, a.l5-b.l5)
|
||||
Base.:+(a::FieldValue, b::FieldValue) = FieldValue(a.l0+b.l0, a.l1+b.l1, a.l2+b.l2, a.l3+b.l3, a.l4+b.l4, a.l5+b.l5)
|
||||
|
||||
function Base.isless(a::FieldValue, b::FieldValue)
|
||||
a.l0 < b.l0 && return true
|
||||
a.l0 > b.l0 && return false
|
||||
c = cmp(a.l1, b.l1)
|
||||
c < 0 && return true
|
||||
c > 0 && return false
|
||||
c = cmp(a.l2, b.l2)
|
||||
c < 0 && return true
|
||||
c > 0 && return false
|
||||
a.l3 < b.l3 && return true
|
||||
a.l3 > b.l3 && return false
|
||||
a.l4 < b.l4 && return true
|
||||
a.l4 > b.l4 && return false
|
||||
a.l5 < b.l5 && return true
|
||||
return false
|
||||
end
|
||||
|
||||
Base.:(==)(a::FieldValue, b::FieldValue) =
|
||||
a.l0 == b.l0 && a.l1 == b.l1 && a.l2 == b.l2 && a.l3 == b.l3 && a.l4 == b.l4 && a.l5 == b.l5
|
||||
|
||||
Base.abs(a::FieldValue) = FieldValue(abs(a.l0), abs(a.l1), abs(a.l2), abs(a.l3), abs(a.l4), abs(a.l5))
|
||||
|
||||
Base.copy(a::FieldValue) = FieldValue(a.l0, copy(a.l1), copy(a.l2), a.l3, a.l4, a.l5)
|
||||
|
||||
function Base.unsafe_copy!(dest::Field, doffs, src::Field, soffs, n)
|
||||
for i = 1:n
|
||||
dest[doffs+i-1] = copy(src[soffs+i-1])
|
||||
end
|
||||
return dest
|
||||
end
|
||||
|
||||
# if the maximum field has l0 < 0, it means that
|
||||
# some hard constraint is being violated
|
||||
validmax(a::FieldValue) = a.l0 >= 0
|
||||
|
||||
# like usual indmax, but favors the highest indices
|
||||
# in case of a tie
|
||||
function Base.indmax(f::Field)
|
||||
m = typemin(FieldValue)
|
||||
mi = 0
|
||||
for j = length(f):-1:1
|
||||
if f[j] > m
|
||||
m = f[j]
|
||||
mi = j
|
||||
end
|
||||
end
|
||||
@assert mi != 0
|
||||
return mi
|
||||
end
|
||||
|
||||
# secondmax returns the (normalized) value of the second maximum in a
|
||||
# field. It's used to determine the most polarized field.
|
||||
function secondmax(f::Field)
|
||||
m = typemin(FieldValue)
|
||||
m2 = typemin(FieldValue)
|
||||
for i = 1:length(f)
|
||||
a = f[i]
|
||||
if a > m
|
||||
m2 = m
|
||||
m = a
|
||||
elseif a > m2
|
||||
m2 = a
|
||||
end
|
||||
end
|
||||
return m2 - m
|
||||
end
|
||||
|
||||
end
|
||||
364
julia-0.6.3/share/julia/base/pkg/resolve/interface.jl
Normal file
364
julia-0.6.3/share/julia/base/pkg/resolve/interface.jl
Normal file
@@ -0,0 +1,364 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module PkgToMaxSumInterface
|
||||
|
||||
using ...Types, ...Query, ..VersionWeights
|
||||
|
||||
export Interface, compute_output_dict, greedysolver,
|
||||
verify_solution, enforce_optimality!
|
||||
|
||||
# A collection of objects which allow interfacing external (Pkg) and
|
||||
# internal (MaxSum) representation
|
||||
mutable struct Interface
|
||||
# requirements and dependencies, in external representation
|
||||
reqs::Requires
|
||||
deps::Dict{String,Dict{VersionNumber,Available}}
|
||||
|
||||
# packages list
|
||||
pkgs::Vector{String}
|
||||
|
||||
# number of packages
|
||||
np::Int
|
||||
|
||||
# states per package: one per version + uninstalled
|
||||
spp::Vector{Int}
|
||||
|
||||
# pakage dict: associates an index to each package name
|
||||
pdict::Dict{String,Int}
|
||||
|
||||
# package versions: for each package, keep the list of the
|
||||
# possible version numbers; this defines a
|
||||
# mapping from version numbers of a package
|
||||
# to indices
|
||||
pvers::Vector{Vector{VersionNumber}}
|
||||
|
||||
# versions dict: associates a version index to each package
|
||||
# version; such that
|
||||
# pvers[p0][vdict[p0][vn]] = vn
|
||||
vdict::Vector{Dict{VersionNumber,Int}}
|
||||
|
||||
# version weights: the weight for each version of each package
|
||||
# (versions include the uninstalled state; the
|
||||
# higher the weight, the more favored the version)
|
||||
vweight::Vector{Vector{VersionWeight}}
|
||||
|
||||
function Interface(reqs::Requires, deps::Dict{String,Dict{VersionNumber,Available}})
|
||||
# generate pkgs
|
||||
pkgs = sort!(String[keys(deps)...])
|
||||
|
||||
np = length(pkgs)
|
||||
|
||||
# generate pdict
|
||||
pdict = Dict{String,Int}(pkgs[i] => i for i = 1:np)
|
||||
|
||||
# generate spp and pvers
|
||||
spp = Vector{Int}(np)
|
||||
|
||||
pvers = [VersionNumber[] for i = 1:np]
|
||||
|
||||
for (p,depsp) in deps, vn in keys(depsp)
|
||||
p0 = pdict[p]
|
||||
push!(pvers[p0], vn)
|
||||
end
|
||||
for p0 = 1:np
|
||||
sort!(pvers[p0])
|
||||
spp[p0] = length(pvers[p0]) + 1
|
||||
end
|
||||
|
||||
# generate vdict
|
||||
vdict = [Dict{VersionNumber,Int}() for p0 = 1:np]
|
||||
for (p,depsp) in deps
|
||||
p0 = pdict[p]
|
||||
vdict0 = vdict[p0]
|
||||
pvers0 = pvers[p0]
|
||||
for vn in keys(depsp)
|
||||
for v0 in 1:length(pvers0)
|
||||
if pvers0[v0] == vn
|
||||
vdict0[vn] = v0
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
## generate wveights:
|
||||
vweight = Vector{Vector{VersionWeight}}(np)
|
||||
for p0 = 1:np
|
||||
pvers0 = pvers[p0]
|
||||
spp0 = spp[p0]
|
||||
vweight0 = vweight[p0] = Vector{VersionWeight}(spp0)
|
||||
for v0 = 1:spp0-1
|
||||
vweight0[v0] = VersionWeight(pvers0[v0])
|
||||
end
|
||||
vweight0[spp0] = VersionWeight(v"0") # last version means uninstalled
|
||||
end
|
||||
|
||||
return new(reqs, deps, pkgs, np, spp, pdict, pvers, vdict, vweight)
|
||||
end
|
||||
end
|
||||
|
||||
# The output format is a Dict which associates a VersionNumber to each installed package name
|
||||
function compute_output_dict(sol::Vector{Int}, interface::Interface)
|
||||
pkgs = interface.pkgs
|
||||
np = interface.np
|
||||
pvers = interface.pvers
|
||||
spp = interface.spp
|
||||
|
||||
want = Dict{String,VersionNumber}()
|
||||
for p0 = 1:np
|
||||
p = pkgs[p0]
|
||||
s = sol[p0]
|
||||
if s != spp[p0]
|
||||
v = pvers[p0][s]
|
||||
want[p] = v
|
||||
end
|
||||
end
|
||||
|
||||
return want
|
||||
end
|
||||
|
||||
# Produce a trivial solution: try to maximize each version;
|
||||
# bail out as soon as some non-trivial requirements are detected.
|
||||
function greedysolver(interface::Interface)
|
||||
reqs = interface.reqs
|
||||
deps = interface.deps
|
||||
spp = interface.spp
|
||||
pdict = interface.pdict
|
||||
pvers = interface.pvers
|
||||
np = interface.np
|
||||
|
||||
# initialize solution: all uninstalled
|
||||
sol = [spp[p0] for p0 = 1:np]
|
||||
|
||||
# set up required packages to their highest allowed versions
|
||||
for (rp,rvs) in reqs
|
||||
rp0 = pdict[rp]
|
||||
# look for the highest version which satisfies the requirements
|
||||
rv = spp[rp0] - 1
|
||||
while rv > 0
|
||||
rvn = pvers[rp0][rv]
|
||||
rvn ∈ rvs && break
|
||||
rv -= 1
|
||||
end
|
||||
@assert rv > 0
|
||||
sol[rp0] = rv
|
||||
end
|
||||
|
||||
# we start from required packages and explore the graph
|
||||
# following dependencies
|
||||
staged = Set{String}(keys(reqs))
|
||||
seen = copy(staged)
|
||||
|
||||
while !isempty(staged)
|
||||
staged_next = Set{String}()
|
||||
for p in staged
|
||||
p0 = pdict[p]
|
||||
@assert sol[p0] < spp[p0]
|
||||
vn = pvers[p0][sol[p0]]
|
||||
a = deps[p][vn]
|
||||
|
||||
# scan dependencies
|
||||
for (rp,rvs) in a.requires
|
||||
rp0 = pdict[rp]
|
||||
# look for the highest version which satisfies the requirements
|
||||
rv = spp[rp0] - 1
|
||||
while rv > 0
|
||||
rvn = pvers[rp0][rv]
|
||||
rvn ∈ rvs && break
|
||||
rv -= 1
|
||||
end
|
||||
# if we found a version, and the package was uninstalled
|
||||
# or the same version was already selected, we're ok;
|
||||
# otherwise we can't be sure what the optimal configuration is
|
||||
# and we bail out
|
||||
if rv > 0 && (sol[rp0] == spp[rp0] || sol[rp0] == rv)
|
||||
sol[rp0] = rv
|
||||
else
|
||||
return (false, Int[])
|
||||
end
|
||||
|
||||
rp ∈ seen || push!(staged_next, rp)
|
||||
end
|
||||
end
|
||||
union!(seen, staged_next)
|
||||
staged = staged_next
|
||||
end
|
||||
|
||||
@assert verify_solution(sol, interface)
|
||||
|
||||
return true, sol
|
||||
end
|
||||
|
||||
# verifies that the solution fulfills all hard constraints
|
||||
# (requirements and dependencies)
|
||||
function verify_solution(sol::Vector{Int}, interface::Interface)
|
||||
reqs = interface.reqs
|
||||
deps = interface.deps
|
||||
spp = interface.spp
|
||||
pdict = interface.pdict
|
||||
pvers = interface.pvers
|
||||
vdict = interface.vdict
|
||||
|
||||
# verify requirements
|
||||
for (p,vs) in reqs
|
||||
p0 = pdict[p]
|
||||
sol[p0] != spp[p0] || return false
|
||||
vn = pvers[p0][sol[p0]]
|
||||
vn ∈ vs || return false
|
||||
end
|
||||
|
||||
# verify dependencies
|
||||
for (p,d) in deps
|
||||
p0 = pdict[p]
|
||||
vdict0 = vdict[p0]
|
||||
for (vn,a) in d
|
||||
v0 = vdict0[vn]
|
||||
if sol[p0] == v0
|
||||
for (rp, rvs) in a.requires
|
||||
p1 = pdict[rp]
|
||||
if sol[p1] == spp[p1]
|
||||
println("""
|
||||
VERIFICATION ERROR: REQUIRED DEPENDENCY NOT INSTALLED
|
||||
package p=$p (p0=$p0) version=$vn (v0=$v0) requires package rp=$rp in version set rvs=$rvs
|
||||
but package $rp is not being installed (p1=$p1 sol[p1]=$(sol[p1]) == spp[p1]=$(spp[p1]))
|
||||
""")
|
||||
return false
|
||||
end
|
||||
vn1 = pvers[p1][sol[p1]]
|
||||
if vn1 ∉ rvs
|
||||
println("""
|
||||
VERIFICATION ERROR: INVALID VERSION
|
||||
package p=$p (p0=$p0) version=$vn (v0=$v0) requires package rp=$rp in version set rvs=$rvs
|
||||
but package $rp version is being set to $vn1 (p1=$p1 sol[p1]=$(sol[p1]) spp[p1]=$(spp[p1]))
|
||||
""")
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
||||
# Push the given solution to a local optimium if needed
|
||||
function enforce_optimality!(sol::Vector{Int}, interface::Interface)
|
||||
np = interface.np
|
||||
|
||||
reqs = interface.reqs
|
||||
deps = interface.deps
|
||||
pkgs = interface.pkgs
|
||||
spp = interface.spp
|
||||
pdict = interface.pdict
|
||||
pvers = interface.pvers
|
||||
vdict = interface.vdict
|
||||
|
||||
# prepare some useful structures
|
||||
# pdeps[p0][v0] has all dependencies of package p0 version v0
|
||||
pdeps = [Vector{Requires}(spp[p0]-1) for p0 = 1:np]
|
||||
# prevdeps[p1][p0][v0] is the VersionSet of package p1 which package p0 version v0
|
||||
# depends upon
|
||||
prevdeps = [Dict{Int,Dict{Int,VersionSet}}() for p0 = 1:np]
|
||||
|
||||
for (p,d) in deps
|
||||
p0 = pdict[p]
|
||||
vdict0 = vdict[p0]
|
||||
for (vn,a) in d
|
||||
v0 = vdict0[vn]
|
||||
pdeps[p0][v0] = a.requires
|
||||
for (rp, rvs) in a.requires
|
||||
p1 = pdict[rp]
|
||||
if !haskey(prevdeps[p1], p0)
|
||||
prevdeps[p1][p0] = Dict{Int,VersionSet}()
|
||||
end
|
||||
prevdeps[p1][p0][v0] = rvs
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
restart = true
|
||||
while restart
|
||||
restart = false
|
||||
for p0 = 1:np
|
||||
s0 = sol[p0]
|
||||
if s0 >= spp[p0] - 1
|
||||
# either the package is not installed,
|
||||
# or it's already at the maximum version
|
||||
continue
|
||||
end
|
||||
viol = false
|
||||
# check if the higher version has a depencency which
|
||||
# would be violated by the state of the remaining packages
|
||||
for (p,vs) in pdeps[p0][s0+1]
|
||||
p1 = pdict[p]
|
||||
if sol[p1] == spp[p1]
|
||||
# the dependency is violated because
|
||||
# the other package is not being installed
|
||||
viol = true
|
||||
break
|
||||
end
|
||||
vn = pvers[p1][sol[p1]]
|
||||
if vn ∉ vs
|
||||
# the dependency is violated because
|
||||
# the other package version is invalid
|
||||
viol = true
|
||||
break
|
||||
end
|
||||
end
|
||||
viol && continue
|
||||
|
||||
# check if bumping the version would violate some
|
||||
# dependency of another package
|
||||
for (p1,d) in prevdeps[p0]
|
||||
vs = get(d, sol[p1], nothing)
|
||||
vs === nothing && continue
|
||||
vn = pvers[p0][s0+1]
|
||||
if vn ∉ vs
|
||||
# bumping the version would violate
|
||||
# the dependency
|
||||
viol = true
|
||||
break
|
||||
end
|
||||
end
|
||||
viol && continue
|
||||
# So the solution is non-optimal: we bump it manually
|
||||
#warn("nonoptimal solution for package $(interface.pkgs[p0]): sol=$s0")
|
||||
sol[p0] += 1
|
||||
restart = true
|
||||
end
|
||||
end
|
||||
|
||||
# Finally uninstall unneeded packages:
|
||||
# start from the required ones and keep only
|
||||
# the packages reachable from them along the graph
|
||||
uninst = trues(np)
|
||||
staged = Set{String}(keys(reqs))
|
||||
seen = copy(staged)
|
||||
|
||||
while !isempty(staged)
|
||||
staged_next = Set{String}()
|
||||
for p in staged
|
||||
p0 = pdict[p]
|
||||
uninst[p0] = false
|
||||
@assert sol[p0] < spp[p0]
|
||||
vn = pvers[p0][sol[p0]]
|
||||
a = deps[p][vn]
|
||||
|
||||
# scan dependencies
|
||||
for (rp,rvs) in a.requires
|
||||
rp0 = pdict[rp]
|
||||
@assert sol[rp0] < spp[rp0] && pvers[rp0][sol[rp0]] ∈ rvs
|
||||
rp ∈ seen || push!(staged_next, rp)
|
||||
end
|
||||
end
|
||||
union!(seen, staged_next)
|
||||
staged = staged_next
|
||||
end
|
||||
|
||||
for p0 in find(uninst)
|
||||
sol[p0] = spp[p0]
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
end
|
||||
524
julia-0.6.3/share/julia/base/pkg/resolve/maxsum.jl
Normal file
524
julia-0.6.3/share/julia/base/pkg/resolve/maxsum.jl
Normal file
@@ -0,0 +1,524 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module MaxSum
|
||||
|
||||
include("fieldvalue.jl")
|
||||
|
||||
using .FieldValues, ..VersionWeights, ..PkgToMaxSumInterface
|
||||
|
||||
export UnsatError, Graph, Messages, maxsum
|
||||
|
||||
# An exception type used internally to signal that an unsatisfiable
|
||||
# constraint was detected
|
||||
mutable struct UnsatError <: Exception
|
||||
info
|
||||
end
|
||||
|
||||
# Some parameters to drive the decimation process
|
||||
mutable struct MaxSumParams
|
||||
nondec_iterations # number of initial iterations before starting
|
||||
# decimation
|
||||
dec_interval # number of iterations between decimations
|
||||
dec_fraction # fraction of nodes to decimate at every decimation
|
||||
# step
|
||||
|
||||
function MaxSumParams()
|
||||
accuracy = parse(Int, get(ENV, "JULIA_PKGRESOLVE_ACCURACY", "1"))
|
||||
if accuracy <= 0
|
||||
error("JULIA_PKGRESOLVE_ACCURACY must be > 0")
|
||||
end
|
||||
nondec_iterations = accuracy * 20
|
||||
dec_interval = accuracy * 10
|
||||
dec_fraction = 0.05 / accuracy
|
||||
return new(nondec_iterations, dec_interval, dec_fraction)
|
||||
end
|
||||
end
|
||||
|
||||
# Graph holds the graph structure onto which max-sum is run, in
|
||||
# sparse format
|
||||
mutable struct Graph
|
||||
# adjacency matrix:
|
||||
# for each package, has the list of neighbors
|
||||
# indices (both dependencies and dependants)
|
||||
gadj::Vector{Vector{Int}}
|
||||
|
||||
# compatibility mask:
|
||||
# for each package p0 has a list of bool masks.
|
||||
# Each entry in the list gmsk[p0] is relative to the
|
||||
# package p1 as read from gadj[p0].
|
||||
# Each mask has dimension spp1 x spp0, where
|
||||
# spp0 is the number of states of p0, and
|
||||
# spp1 is the number of states of p1.
|
||||
gmsk::Vector{Vector{BitMatrix}}
|
||||
|
||||
# dependency direction:
|
||||
# keeps track of which direction the dependency goes
|
||||
# takes 3 values:
|
||||
# 1 = dependant
|
||||
# -1 = dependency
|
||||
# 0 = both
|
||||
# Used to break symmetry between dependants and
|
||||
# dependencies (introduces a FieldValue at level l3).
|
||||
# The "both" case is for when there are dependency
|
||||
# relations which go both ways, in which case the
|
||||
# noise is left to discriminate in case of ties
|
||||
gdir::Vector{Vector{Int}}
|
||||
|
||||
# adjacency dict:
|
||||
# allows one to retrieve the indices in gadj, so that
|
||||
# gadj[p0][adjdict[p1][p0]] = p1
|
||||
# ("At which index does package p1 appear in gadj[p0]?")
|
||||
adjdict::Vector{Dict{Int,Int}}
|
||||
|
||||
# states per package: same as in Interface
|
||||
spp::Vector{Int}
|
||||
|
||||
# update order: shuffled at each iteration
|
||||
perm::Vector{Int}
|
||||
|
||||
# number of packages (all Vectors above have this length)
|
||||
np::Int
|
||||
|
||||
function Graph(interface::Interface)
|
||||
deps = interface.deps
|
||||
np = interface.np
|
||||
|
||||
spp = interface.spp
|
||||
pdict = interface.pdict
|
||||
pvers = interface.pvers
|
||||
vdict = interface.vdict
|
||||
|
||||
gadj = [Int[] for i = 1:np]
|
||||
gmsk = [BitMatrix[] for i = 1:np]
|
||||
gdir = [Int[] for i = 1:np]
|
||||
adjdict = [Dict{Int,Int}() for i = 1:np]
|
||||
|
||||
for (p,d) in deps
|
||||
p0 = pdict[p]
|
||||
vdict0 = vdict[p0]
|
||||
for (vn,a) in d
|
||||
v0 = vdict0[vn]
|
||||
for (rp, rvs) in a.requires
|
||||
p1 = pdict[rp]
|
||||
|
||||
j0 = 1
|
||||
while j0 <= length(gadj[p0]) && gadj[p0][j0] != p1
|
||||
j0 += 1
|
||||
end
|
||||
j1 = 1
|
||||
while j1 <= length(gadj[p1]) && gadj[p1][j1] != p0
|
||||
j1 += 1
|
||||
end
|
||||
@assert (j0 > length(gadj[p0]) && j1 > length(gadj[p1])) ||
|
||||
(j0 <= length(gadj[p0]) && j1 <= length(gadj[p1]))
|
||||
|
||||
if j0 > length(gadj[p0])
|
||||
push!(gadj[p0], p1)
|
||||
push!(gadj[p1], p0)
|
||||
j0 = length(gadj[p0])
|
||||
j1 = length(gadj[p1])
|
||||
|
||||
adjdict[p1][p0] = j0
|
||||
adjdict[p0][p1] = j1
|
||||
|
||||
bm = trues(spp[p1], spp[p0])
|
||||
bmt = bm'
|
||||
|
||||
push!(gmsk[p0], bm)
|
||||
push!(gmsk[p1], bmt)
|
||||
|
||||
push!(gdir[p0], 1)
|
||||
push!(gdir[p1], -1)
|
||||
else
|
||||
bm = gmsk[p0][j0]
|
||||
bmt = gmsk[p1][j1]
|
||||
if gdir[p0][j0] == -1
|
||||
gdir[p0][j0] = 0
|
||||
gdir[p1][j1] = 0
|
||||
end
|
||||
end
|
||||
|
||||
for v1 = 1:length(pvers[p1])
|
||||
if pvers[p1][v1] ∉ rvs
|
||||
bm[v1, v0] = false
|
||||
bmt[v0, v1] = false
|
||||
end
|
||||
end
|
||||
bm[end,v0] = false
|
||||
bmt[v0,end] = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
perm = [1:np;]
|
||||
|
||||
return new(gadj, gmsk, gdir, adjdict, spp, perm, np)
|
||||
end
|
||||
end
|
||||
|
||||
# Messages has the cavity messages and the total fields, and
|
||||
# gets updated iteratively (and occasionally decimated) until
|
||||
# convergence
|
||||
mutable struct Messages
|
||||
# cavity incoming messages: for each package p0,
|
||||
# for each neighbor p1 of p0,
|
||||
# msg[p0][p1] is a vector of length spp[p0]
|
||||
# messages are normalized (i.e. the max is always 0)
|
||||
msg::Vector{Vector{Field}}
|
||||
|
||||
# overall fields: for each package p0,
|
||||
# fld[p0] is a vector of length spp[p0]
|
||||
# fields are not normalized
|
||||
fld::Vector{Field}
|
||||
|
||||
# backup of the initial value of fld, to be used when resetting
|
||||
initial_fld::Vector{Field}
|
||||
|
||||
# keep track of which variables have been decimated
|
||||
decimated::BitVector
|
||||
num_nondecimated::Int
|
||||
|
||||
function Messages(interface::Interface, graph::Graph)
|
||||
reqs = interface.reqs
|
||||
pkgs = interface.pkgs
|
||||
np = interface.np
|
||||
spp = interface.spp
|
||||
pvers = interface.pvers
|
||||
pdict = interface.pdict
|
||||
vweight = interface.vweight
|
||||
|
||||
# a "deterministic noise" function based on hashes
|
||||
function noise(p0::Int, v0::Int)
|
||||
s = pkgs[p0] * string(v0 == spp[p0] ? "UNINST" : pvers[p0][v0])
|
||||
Int128(hash(s))
|
||||
end
|
||||
|
||||
# external fields: there are 2 terms, a noise to break potential symmetries
|
||||
# and one to favor newest versions over older, and no-version over all
|
||||
fld = [[FieldValue(0, zero(VersionWeight), vweight[p0][v0], (v0==spp[p0]), 0, noise(p0,v0)) for v0 = 1:spp[p0]] for p0 = 1:np]
|
||||
|
||||
# enforce requirements
|
||||
for (rp, rvs) in reqs
|
||||
p0 = pdict[rp]
|
||||
pvers0 = pvers[p0]
|
||||
fld0 = fld[p0]
|
||||
for v0 = 1:spp[p0]-1
|
||||
vn = pvers0[v0]
|
||||
if !in(vn, rvs)
|
||||
# the state is forbidden by requirements
|
||||
fld0[v0] = FieldValue(-1)
|
||||
else
|
||||
# the state is one of those explicitly requested:
|
||||
# favor it at a higer level than normal (upgrade
|
||||
# FieldValue from l2 to l1)
|
||||
fld0[v0] += FieldValue(0, vweight[p0][v0], -vweight[p0][v0])
|
||||
end
|
||||
end
|
||||
# the uninstalled state is forbidden by requirements
|
||||
fld0[spp[p0]] = FieldValue(-1)
|
||||
end
|
||||
# normalize fields
|
||||
for p0 = 1:np
|
||||
m = maximum(fld[p0])
|
||||
for v0 = 1:spp[p0]
|
||||
fld[p0][v0] -= m
|
||||
end
|
||||
end
|
||||
|
||||
initial_fld = deepcopy(fld)
|
||||
|
||||
# initialize cavity messages to 0
|
||||
gadj = graph.gadj
|
||||
msg = [[zeros(FieldValue, spp[p0]) for p1 = 1:length(gadj[p0])] for p0 = 1:np]
|
||||
|
||||
return new(msg, fld, initial_fld, falses(np), np)
|
||||
end
|
||||
end
|
||||
|
||||
function getsolution(msgs::Messages)
|
||||
# the solution is just the location of the maximum in
|
||||
# each field
|
||||
|
||||
fld = msgs.fld
|
||||
np = length(fld)
|
||||
sol = Vector{Int}(np)
|
||||
for p0 = 1:np
|
||||
fld0 = fld[p0]
|
||||
s0 = indmax(fld0)
|
||||
if !validmax(fld0[s0])
|
||||
throw(UnsatError(p0))
|
||||
end
|
||||
sol[p0] = s0
|
||||
end
|
||||
return sol
|
||||
end
|
||||
|
||||
# This is the core of the max-sum solver:
|
||||
# for a given node p0 (i.e. a package) updates all
|
||||
# input cavity messages and fields of its neighbors
|
||||
function update(p0::Int, graph::Graph, msgs::Messages)
|
||||
gadj = graph.gadj
|
||||
gmsk = graph.gmsk
|
||||
gdir = graph.gdir
|
||||
adjdict = graph.adjdict
|
||||
spp = graph.spp
|
||||
np = graph.np
|
||||
msg = msgs.msg
|
||||
fld = msgs.fld
|
||||
decimated = msgs.decimated
|
||||
|
||||
maxdiff = zero(FieldValue)
|
||||
|
||||
gadj0 = gadj[p0]
|
||||
msg0 = msg[p0]
|
||||
fld0 = fld[p0]
|
||||
spp0 = spp[p0]
|
||||
adjdict0 = adjdict[p0]
|
||||
|
||||
# iterate over all neighbors of p0
|
||||
for j0 in 1:length(gadj0)
|
||||
|
||||
p1 = gadj0[j0]
|
||||
decimated[p1] && continue
|
||||
j1 = adjdict0[p1]
|
||||
#@assert j0 == adjdict[p1][p0]
|
||||
bm1 = gmsk[p1][j1]
|
||||
dir1 = gdir[p1][j1]
|
||||
spp1 = spp[p1]
|
||||
msg1 = msg[p1]
|
||||
|
||||
# compute the output cavity message p0->p1
|
||||
cavmsg = fld0 - msg0[j0]
|
||||
|
||||
if dir1 == -1
|
||||
# p0 depends on p1
|
||||
for v0 = 1:spp0-1
|
||||
cavmsg[v0] += FieldValue(0, VersionWeight(0), VersionWeight(0), 0, v0)
|
||||
end
|
||||
end
|
||||
|
||||
# keep the old input cavity message p0->p1
|
||||
oldmsg = msg1[j1]
|
||||
|
||||
# init the new message to minus infinity
|
||||
newmsg = [FieldValue(-1) for v1 = 1:spp1]
|
||||
|
||||
# compute the new message by passing cavmsg
|
||||
# through the constraint encoded in the bitmask
|
||||
# (nearly equivalent to:
|
||||
# newmsg = [maximum(cavmsg[bm1[:,v1]]) for v1 = 1:spp1]
|
||||
# except for the gnrg term)
|
||||
m = FieldValue(-1)
|
||||
for v1 = 1:spp1
|
||||
for v0 = 1:spp0
|
||||
if bm1[v0, v1]
|
||||
newmsg[v1] = max(newmsg[v1], cavmsg[v0])
|
||||
end
|
||||
end
|
||||
if dir1 == 1 && v1 != spp1
|
||||
# p1 depends on p0
|
||||
newmsg[v1] += FieldValue(0, VersionWeight(0), VersionWeight(0), 0, v1)
|
||||
end
|
||||
m = max(m, newmsg[v1])
|
||||
end
|
||||
if !validmax(m)
|
||||
# No state available without violating some
|
||||
# hard constraint
|
||||
throw(UnsatError(p1))
|
||||
end
|
||||
|
||||
# normalize the new message
|
||||
for v1 = 1:spp1
|
||||
newmsg[v1] -= m
|
||||
end
|
||||
|
||||
diff = newmsg - oldmsg
|
||||
maxdiff = max(maxdiff, maximum(abs.(diff)))
|
||||
|
||||
# update the field of p1
|
||||
fld1 = fld[p1]
|
||||
for v1 = 1:spp1
|
||||
fld1[v1] += diff[v1]
|
||||
end
|
||||
|
||||
# put the newly computed message in place
|
||||
msg1[j1] = newmsg
|
||||
end
|
||||
return maxdiff
|
||||
end
|
||||
|
||||
# A simple shuffling machinery for the update order in iterate()
|
||||
# (woulnd't pass any random quality test but it's arguably enough)
|
||||
let step=1
|
||||
global shuffleperm, shuffleperminit
|
||||
shuffleperminit() = (step = 1)
|
||||
function shuffleperm(graph::Graph)
|
||||
perm = graph.perm
|
||||
np = graph.np
|
||||
for j = np:-1:2
|
||||
k = mod(step,j)+1
|
||||
perm[j], perm[k] = perm[k], perm[j]
|
||||
step += isodd(j) ? 1 : k
|
||||
end
|
||||
#@assert isperm(perm)
|
||||
end
|
||||
end
|
||||
|
||||
# Call update for all nodes (i.e. packages) in
|
||||
# random order
|
||||
function iterate(graph::Graph, msgs::Messages)
|
||||
np = graph.np
|
||||
|
||||
maxdiff = zero(FieldValue)
|
||||
shuffleperm(graph)
|
||||
perm = graph.perm
|
||||
for p0 in perm
|
||||
maxdiff0 = update(p0, graph, msgs)
|
||||
maxdiff = max(maxdiff, maxdiff0)
|
||||
end
|
||||
return maxdiff
|
||||
end
|
||||
|
||||
function decimate1(p0::Int, graph::Graph, msgs::Messages)
|
||||
decimated = msgs.decimated
|
||||
fld = msgs.fld
|
||||
adjdict = graph.adjdict
|
||||
gmsk = graph.gmsk
|
||||
|
||||
@assert !decimated[p0]
|
||||
fld0 = fld[p0]
|
||||
s0 = indmax(fld0)
|
||||
# only do the decimation if it is consistent with
|
||||
# the previously decimated nodes
|
||||
for p1 in find(decimated)
|
||||
haskey(adjdict[p0], p1) || continue
|
||||
s1 = indmax(fld[p1])
|
||||
j1 = adjdict[p0][p1]
|
||||
gmsk[p1][j1][s0,s1] || return false
|
||||
end
|
||||
#println("DECIMATING $p0 (s0=$s0 fld=$fld0)")
|
||||
for v0 = 1:length(fld0)
|
||||
v0 == s0 && continue
|
||||
fld0[v0] = FieldValue(-1)
|
||||
end
|
||||
msgs.decimated[p0] = true
|
||||
msgs.num_nondecimated -= 1
|
||||
return true
|
||||
end
|
||||
|
||||
function reset_messages!(msgs::Messages)
|
||||
msg = msgs.msg
|
||||
fld = msgs.fld
|
||||
initial_fld = msgs.initial_fld
|
||||
decimated = msgs.decimated
|
||||
np = length(fld)
|
||||
for p0 = 1:np
|
||||
map(m->fill!(m, zero(FieldValue)), msg[p0])
|
||||
decimated[p0] && continue
|
||||
fld[p0] = copy(initial_fld[p0])
|
||||
end
|
||||
return msgs
|
||||
end
|
||||
|
||||
# If normal convergence fails (or is too slow) fix the most
|
||||
# polarized packages by adding extra infinite fields on every state
|
||||
# but the maximum
|
||||
function decimate(n::Int, graph::Graph, msgs::Messages)
|
||||
#println("DECIMATING $n NODES")
|
||||
adjdict = graph.adjdict
|
||||
fld = msgs.fld
|
||||
decimated = msgs.decimated
|
||||
fldorder = sortperm(fld, by=secondmax)
|
||||
did_dec = false
|
||||
for p0 in fldorder
|
||||
decimated[p0] && continue
|
||||
did_dec |= decimate1(p0, graph, msgs)
|
||||
n -= 1
|
||||
n == 0 && break
|
||||
end
|
||||
@assert n == 0
|
||||
if !did_dec
|
||||
# did not succeed in decimating anything;
|
||||
# try to decimate at least one node
|
||||
for p0 in fldorder
|
||||
decimated[p0] && continue
|
||||
if decimate1(p0, graph, msgs)
|
||||
did_dec = true
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
if !did_dec
|
||||
# still didn't succeed, give up
|
||||
p0 = first(fldorder[.~(decimated)])
|
||||
throw(UnsatError(p0))
|
||||
end
|
||||
|
||||
reset_messages!(msgs)
|
||||
return
|
||||
end
|
||||
|
||||
# In case ties still exist at convergence, break them and
|
||||
# keep converging
|
||||
function break_ties(msgs::Messages)
|
||||
fld = msgs.fld
|
||||
unbroken_ties = Int[]
|
||||
for p0 = 1:length(fld)
|
||||
fld0 = fld[p0]
|
||||
z = 0
|
||||
m = typemin(FieldValue)
|
||||
for v0 = 1:length(fld0)
|
||||
if fld0[v0] > m
|
||||
m = fld0[v0]
|
||||
z = 1
|
||||
elseif fld0[v0] == m
|
||||
z += 1
|
||||
end
|
||||
end
|
||||
if z > 1
|
||||
#println("TIE! p0=$p0")
|
||||
decimate1(p0, msgs) && return false
|
||||
push!(unbroken_ties, p0)
|
||||
end
|
||||
end
|
||||
# If there were ties, but none were broken, bail out
|
||||
isempty(unbroken_ties) || throw(PkgError(first(unbroken_ties)))
|
||||
return true
|
||||
end
|
||||
|
||||
# Iterative solver: run iterate() until convergence
|
||||
# (occasionally calling decimate())
|
||||
function maxsum(graph::Graph, msgs::Messages)
|
||||
params = MaxSumParams()
|
||||
|
||||
it = 0
|
||||
shuffleperminit()
|
||||
while true
|
||||
it += 1
|
||||
maxdiff = iterate(graph, msgs)
|
||||
#println("it = $it maxdiff = $maxdiff")
|
||||
|
||||
if maxdiff == zero(FieldValue)
|
||||
break_ties(msgs) && break
|
||||
continue
|
||||
end
|
||||
if it >= params.nondec_iterations &&
|
||||
(it - params.nondec_iterations) % params.dec_interval == 0
|
||||
numdec = clamp(floor(Int, params.dec_fraction * graph.np), 1, msgs.num_nondecimated)
|
||||
decimate(numdec, graph, msgs)
|
||||
msgs.num_nondecimated == 0 && break
|
||||
end
|
||||
end
|
||||
|
||||
# Finally, decimate everything just to
|
||||
# check against inconsistencies
|
||||
# (old_numnondec is saved just to prevent
|
||||
# wrong messages about accuracy)
|
||||
old_numnondec = msgs.num_nondecimated
|
||||
decimate(msgs.num_nondecimated, graph, msgs)
|
||||
msgs.num_nondecimated = old_numnondec
|
||||
|
||||
return getsolution(msgs)
|
||||
end
|
||||
|
||||
end
|
||||
230
julia-0.6.3/share/julia/base/pkg/resolve/versionweight.jl
Normal file
230
julia-0.6.3/share/julia/base/pkg/resolve/versionweight.jl
Normal file
@@ -0,0 +1,230 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module VersionWeights
|
||||
|
||||
importall ....Base.Operators
|
||||
|
||||
export VersionWeight
|
||||
|
||||
struct HierarchicalValue{T}
|
||||
v::Vector{T}
|
||||
rest::T
|
||||
end
|
||||
|
||||
HierarchicalValue{T}(v::Vector{T}) = HierarchicalValue{T}(v, zero(T))
|
||||
HierarchicalValue(T::Type) = HierarchicalValue(T[])
|
||||
|
||||
Base.zero(::Type{HierarchicalValue{T}}) where {T} = HierarchicalValue(T)
|
||||
|
||||
Base.typemin(::Type{HierarchicalValue{T}}) where {T} = HierarchicalValue(T[], typemin(T))
|
||||
|
||||
for f in (:-, :+)
|
||||
@eval function Base.$f(a::HierarchicalValue{T}, b::HierarchicalValue{T}) where T
|
||||
av = a.v
|
||||
bv = b.v
|
||||
la = length(a.v)
|
||||
lb = length(b.v)
|
||||
l0 = min(la, lb)
|
||||
l1 = max(la, lb)
|
||||
ld = la - lb
|
||||
rv = Vector{T}(l1)
|
||||
rf = ($f)(a.rest, b.rest)
|
||||
@inbounds for i = 1:l0
|
||||
rv[i] = ($f)(av[i], bv[i])
|
||||
end
|
||||
@inbounds for i = l0+1:l0+ld
|
||||
rv[i] = ($f)(av[i], b.rest)
|
||||
end
|
||||
@inbounds for i = l0+1:l0-ld
|
||||
rv[i] = ($f)(a.rest, bv[i])
|
||||
end
|
||||
return HierarchicalValue(rv, rf)
|
||||
end
|
||||
end
|
||||
|
||||
Base.:-(a::HierarchicalValue) = HierarchicalValue(-a.v, -a.rest)
|
||||
|
||||
function Base.cmp(a::HierarchicalValue{T}, b::HierarchicalValue{T}) where T
|
||||
av = a.v
|
||||
bv = b.v
|
||||
la = length(a.v)
|
||||
lb = length(b.v)
|
||||
l0 = min(la, lb)
|
||||
l1 = max(la, lb)
|
||||
ld = la - lb
|
||||
@inbounds for i = 1:l0
|
||||
c = cmp(av[i], bv[i]); c != 0 && return c
|
||||
end
|
||||
@inbounds for i = l0+1:l0+ld
|
||||
c = cmp(av[i], b.rest); c != 0 && return c
|
||||
end
|
||||
@inbounds for i = l0+1:l0-ld
|
||||
c = cmp(a.rest, bv[i]); c != 0 && return c
|
||||
end
|
||||
return cmp(a.rest, b.rest)
|
||||
end
|
||||
Base.isless(a::HierarchicalValue{T}, b::HierarchicalValue{T}) where {T} = cmp(a,b) < 0
|
||||
Base.:(==)(a::HierarchicalValue{T}, b::HierarchicalValue{T}) where {T} = cmp(a,b) == 0
|
||||
|
||||
Base.abs(a::HierarchicalValue{T}) where {T} = HierarchicalValue(T[abs(x) for x in a.v], abs(a.rest))
|
||||
|
||||
Base.copy(a::HierarchicalValue{T}) where {T} = HierarchicalValue(T[copy(x) for x in a.v], copy(a.rest))
|
||||
|
||||
struct VWPreBuildItem
|
||||
nonempty::Int
|
||||
s::HierarchicalValue{Int}
|
||||
i::Int
|
||||
end
|
||||
VWPreBuildItem() = VWPreBuildItem(0, HierarchicalValue(Int), 0)
|
||||
VWPreBuildItem(i::Int) = VWPreBuildItem(1, HierarchicalValue(Int), i)
|
||||
VWPreBuildItem(s::String) = VWPreBuildItem(1, HierarchicalValue(Int[s...]), 0)
|
||||
|
||||
Base.zero(::Type{VWPreBuildItem}) = VWPreBuildItem()
|
||||
|
||||
Base.typemin(::Type{VWPreBuildItem}) = (x=typemin(Int); VWPreBuildItem(x, typemin(HierarchicalValue{Int}), x))
|
||||
|
||||
Base.:-(a::VWPreBuildItem, b::VWPreBuildItem) = VWPreBuildItem(a.nonempty-b.nonempty, a.s-b.s, a.i-b.i)
|
||||
Base.:+(a::VWPreBuildItem, b::VWPreBuildItem) = VWPreBuildItem(a.nonempty+b.nonempty, a.s+b.s, a.i+b.i)
|
||||
|
||||
Base.:-(a::VWPreBuildItem) = VWPreBuildItem(-a.nonempty, -a.s, -a.i)
|
||||
|
||||
function Base.cmp(a::VWPreBuildItem, b::VWPreBuildItem)
|
||||
c = cmp(a.nonempty, b.nonempty); c != 0 && return c
|
||||
c = cmp(a.s, b.s); c != 0 && return c
|
||||
return cmp(a.i, b.i)
|
||||
end
|
||||
Base.isless(a::VWPreBuildItem, b::VWPreBuildItem) = cmp(a,b) < 0
|
||||
Base.:(==)(a::VWPreBuildItem, b::VWPreBuildItem) = cmp(a,b) == 0
|
||||
|
||||
Base.abs(a::VWPreBuildItem) = VWPreBuildItem(abs(a.nonempty), abs(a.s), abs(a.i))
|
||||
|
||||
Base.copy(a::VWPreBuildItem) = VWPreBuildItem(a.nonempty, copy(a.s), a.i)
|
||||
|
||||
struct VWPreBuild
|
||||
nonempty::Int
|
||||
w::HierarchicalValue{VWPreBuildItem}
|
||||
end
|
||||
|
||||
const _vwprebuild_zero = VWPreBuild(0, HierarchicalValue(VWPreBuildItem))
|
||||
|
||||
function VWPreBuild(ispre::Bool, desc::Tuple{Vararg{Union{Int,String}}})
|
||||
isempty(desc) && return _vwprebuild_zero
|
||||
desc == ("",) && return VWPreBuild(ispre ? -1 : 1, HierarchicalValue(VWPreBuildItem[]))
|
||||
hv = HierarchicalValue([VWPreBuildItem(item) for item in desc])
|
||||
return VWPreBuild(ispre ? -1 : 0, hv)
|
||||
end
|
||||
VWPreBuild() = _vwprebuild_zero
|
||||
|
||||
Base.zero(::Type{VWPreBuild}) = VWPreBuild()
|
||||
|
||||
const _vwprebuild_min = VWPreBuild(typemin(Int), typemin(HierarchicalValue{VWPreBuildItem}))
|
||||
Base.typemin(::Type{VWPreBuild}) = _vwprebuild_min
|
||||
|
||||
function Base.:(-)(a::VWPreBuild, b::VWPreBuild)
|
||||
b === _vwprebuild_zero && return a
|
||||
a === _vwprebuild_zero && return -b
|
||||
VWPreBuild(a.nonempty-b.nonempty, a.w-b.w)
|
||||
end
|
||||
function Base.:(+)(a::VWPreBuild, b::VWPreBuild)
|
||||
b === _vwprebuild_zero && return a
|
||||
a === _vwprebuild_zero && return b
|
||||
VWPreBuild(a.nonempty+b.nonempty, a.w+b.w)
|
||||
end
|
||||
|
||||
function Base.:(-)(a::VWPreBuild)
|
||||
a === _vwprebuild_zero && return a
|
||||
VWPreBuild(-a.nonempty, -a.w)
|
||||
end
|
||||
|
||||
@inline function Base.cmp(a::VWPreBuild, b::VWPreBuild)
|
||||
a === _vwprebuild_zero && b === _vwprebuild_zero && return 0
|
||||
c = cmp(a.nonempty, b.nonempty); c != 0 && return c
|
||||
return cmp(a.w, b.w)
|
||||
end
|
||||
Base.isless(a::VWPreBuild, b::VWPreBuild) = cmp(a,b) < 0
|
||||
Base.:(==)(a::VWPreBuild, b::VWPreBuild) = cmp(a,b) == 0
|
||||
|
||||
function Base.abs(a::VWPreBuild)
|
||||
a === _vwprebuild_zero && return a
|
||||
VWPreBuild(abs(a.nonempty), abs(a.w))
|
||||
end
|
||||
|
||||
function Base.copy(a::VWPreBuild)
|
||||
a === _vwprebuild_zero && return a
|
||||
VWPreBuild(a.nonempty, copy(a.w))
|
||||
end
|
||||
|
||||
function Base.deepcopy_internal(a::VWPreBuild, dict::ObjectIdDict)
|
||||
haskey(dict, a) && return dict[a]
|
||||
b = (a === _vwprebuild_zero) ? _vwprebuild_zero : VWPreBuild(a.nonempty, Base.deepcopy_internal(a.w, dict))
|
||||
dict[a] = b
|
||||
return b
|
||||
end
|
||||
|
||||
# The numeric type used to determine how the different
|
||||
# versions of a package should be weighed
|
||||
struct VersionWeight
|
||||
major::Int
|
||||
minor::Int
|
||||
patch::Int
|
||||
prerelease::VWPreBuild
|
||||
build::VWPreBuild
|
||||
end
|
||||
VersionWeight(major::Int, minor::Int, patch::Int, prerelease::VWPreBuild) = VersionWeight(major, minor, patch, prerelease, zero(VWPreBuild))
|
||||
VersionWeight(major::Int, minor::Int, patch::Int) = VersionWeight(major, minor, patch, zero(VWPreBuild))
|
||||
VersionWeight(major::Int, minor::Int) = VersionWeight(major, minor, 0)
|
||||
VersionWeight(major::Int) = VersionWeight(major, 0)
|
||||
VersionWeight() = VersionWeight(0)
|
||||
|
||||
VersionWeight(vn::VersionNumber) =
|
||||
VersionWeight(vn.major, vn.minor, vn.patch,
|
||||
VWPreBuild(true, vn.prerelease), VWPreBuild(false, vn.build))
|
||||
|
||||
Base.zero(::Type{VersionWeight}) = VersionWeight()
|
||||
|
||||
Base.typemin(::Type{VersionWeight}) = (x=typemin(Int); y=typemin(VWPreBuild); VersionWeight(x, x, x, y, y))
|
||||
|
||||
Base.:(-)(a::VersionWeight, b::VersionWeight) =
|
||||
VersionWeight(a.major-b.major, a.minor-b.minor, a.patch-b.patch,
|
||||
a.prerelease-b.prerelease, a.build-b.build)
|
||||
|
||||
Base.:(+)(a::VersionWeight, b::VersionWeight) =
|
||||
VersionWeight(a.major+b.major, a.minor+b.minor, a.patch+b.patch,
|
||||
a.prerelease+b.prerelease, a.build+b.build)
|
||||
|
||||
Base.:(-)(a::VersionWeight) =
|
||||
VersionWeight(-a.major, -a.minor, -a.patch,
|
||||
-a.prerelease, -a.build)
|
||||
|
||||
function Base.cmp(a::VersionWeight, b::VersionWeight)
|
||||
c = cmp(a.major, b.major); c != 0 && return c
|
||||
c = cmp(a.minor, b.minor); c != 0 && return c
|
||||
c = cmp(a.patch, b.patch); c != 0 && return c
|
||||
c = cmp(a.prerelease, b.prerelease); c != 0 && return c
|
||||
return cmp(a.build, b.build)
|
||||
end
|
||||
Base.isless(a::VersionWeight, b::VersionWeight) = cmp(a,b) < 0
|
||||
Base.:(==)(a::VersionWeight, b::VersionWeight) = cmp(a,b) == 0
|
||||
|
||||
Base.abs(a::VersionWeight) =
|
||||
VersionWeight(abs(a.major), abs(a.minor), abs(a.patch),
|
||||
abs(a.prerelease), abs(a.build))
|
||||
|
||||
Base.copy(a::VersionWeight) =
|
||||
VersionWeight(a.major, a.minor, a.patch,
|
||||
copy(a.prerelease), copy(a.build))
|
||||
|
||||
# This isn't nice, but it's for debugging only anyway
|
||||
function Base.show(io::IO, a::VersionWeight)
|
||||
print(io, "(", a.major)
|
||||
a == VersionWeight(a.major) && @goto done
|
||||
print(io, ".", a.minor)
|
||||
a == VersionWeight(a.major, a.minor) && @goto done
|
||||
print(io, ".", a.patch)
|
||||
a.prerelease ≠ _vwprebuild_zero && print(io, "-", a.prerelease)
|
||||
a.build ≠ _vwprebuild_zero && print(io, "+", a.build)
|
||||
@label done
|
||||
print(io, ")")
|
||||
end
|
||||
|
||||
end
|
||||
256
julia-0.6.3/share/julia/base/pkg/types.jl
Normal file
256
julia-0.6.3/share/julia/base/pkg/types.jl
Normal file
@@ -0,0 +1,256 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Types
|
||||
|
||||
export VersionInterval, VersionSet, Requires, Available, Fixed, merge_requires!, satisfies,
|
||||
ResolveBacktraceItem, ResolveBacktrace
|
||||
import Base: show, isempty, in, intersect, union!, union, ==, hash, copy, deepcopy_internal, push!
|
||||
|
||||
struct VersionInterval
|
||||
lower::VersionNumber
|
||||
upper::VersionNumber
|
||||
end
|
||||
VersionInterval(lower::VersionNumber) = VersionInterval(lower,typemax(VersionNumber))
|
||||
VersionInterval() = VersionInterval(typemin(VersionNumber))
|
||||
|
||||
show(io::IO, i::VersionInterval) = print(io, "[$(i.lower),$(i.upper))")
|
||||
isempty(i::VersionInterval) = i.upper <= i.lower
|
||||
in(v::VersionNumber, i::VersionInterval) = i.lower <= v < i.upper
|
||||
intersect(a::VersionInterval, b::VersionInterval) = VersionInterval(max(a.lower,b.lower), min(a.upper,b.upper))
|
||||
==(a::VersionInterval, b::VersionInterval) = a.lower == b.lower && a.upper == b.upper
|
||||
hash(i::VersionInterval, h::UInt) = hash((i.lower, i.upper), h + (0x0f870a92db508386 % UInt))
|
||||
|
||||
function normalize!(ivals::Vector{VersionInterval})
|
||||
# VersionSet internal normalization:
|
||||
# removes empty intervals and fuses intervals without gaps
|
||||
# e.g.:
|
||||
# [0.0.0,1.0.0) ∪ [1.0.0,1.5.0) ∪ [1.6.0,1.6.0) ∪ [2.0.0,∞)
|
||||
# becomes:
|
||||
# [0.0.0,1.5.0) ∪ [2.0.0,∞)
|
||||
# (still assumes that lower bounds are sorted, and intervals do
|
||||
# not overlap)
|
||||
l = length(ivals)
|
||||
l == 0 && return ivals
|
||||
|
||||
lo, up, k0 = ivals[1].lower, ivals[1].upper, 1
|
||||
fusing = false
|
||||
for k = 2:l
|
||||
lo1, up1 = ivals[k].lower, ivals[k].upper
|
||||
if lo1 == up
|
||||
up = up1
|
||||
fusing = true
|
||||
continue
|
||||
end
|
||||
if lo < up
|
||||
# The only purpose of the "fusing" check is to avoid
|
||||
# extra allocations
|
||||
ivals[k0] = fusing ? VersionInterval(lo, up) : ivals[k-1]
|
||||
k0 += 1
|
||||
end
|
||||
fusing = false
|
||||
lo, up = lo1, up1
|
||||
end
|
||||
if lo < up
|
||||
ivals[k0] = fusing ? VersionInterval(lo, up) : ivals[l]
|
||||
k0 += 1
|
||||
end
|
||||
resize!(ivals, k0 - 1)
|
||||
return ivals
|
||||
end
|
||||
|
||||
struct VersionSet
|
||||
intervals::Vector{VersionInterval}
|
||||
VersionSet(intervals::Vector{VersionInterval}) = new(normalize!(intervals))
|
||||
# copy is defined inside the struct block to call `new` directly
|
||||
# without going through `normalize!`
|
||||
Base.copy(vset::VersionSet) = new(copy(vset.intervals))
|
||||
end
|
||||
function VersionSet(versions::Vector{VersionNumber})
|
||||
intervals = VersionInterval[]
|
||||
if isempty(versions)
|
||||
push!(intervals, VersionInterval())
|
||||
else
|
||||
isodd(length(versions)) && push!(versions, typemax(VersionNumber))
|
||||
while !isempty(versions)
|
||||
push!(intervals, VersionInterval(shift!(versions), shift!(versions)))
|
||||
end
|
||||
end
|
||||
VersionSet(intervals)
|
||||
end
|
||||
VersionSet(versions::VersionNumber...) = VersionSet(VersionNumber[versions...])
|
||||
|
||||
const empty_versionset = VersionSet(VersionInterval[])
|
||||
|
||||
# Windows console doesn't like Unicode
|
||||
const _empty_symbol = @static is_windows() ? "empty" : "∅"
|
||||
const _union_symbol = @static is_windows() ? " or " : " ∪ "
|
||||
show(io::IO, s::VersionSet) = isempty(s) ? print(io, _empty_symbol) :
|
||||
join(io, s.intervals, _union_symbol)
|
||||
isempty(s::VersionSet) = all(isempty, s.intervals)
|
||||
in(v::VersionNumber, s::VersionSet) = any(i->in(v,i), s.intervals)
|
||||
function intersect(A::VersionSet, B::VersionSet)
|
||||
(isempty(A) || isempty(B)) && return copy(empty_versionset)
|
||||
ivals = [intersect(a,b) for a in A.intervals for b in B.intervals]
|
||||
sort!(ivals, by=i->i.lower)
|
||||
VersionSet(ivals)
|
||||
end
|
||||
|
||||
union(A::VersionSet, B::VersionSet) = union!(copy(A), B)
|
||||
function union!(A::VersionSet, B::VersionSet)
|
||||
A == B && return A
|
||||
ivals = A.intervals
|
||||
for intB in B.intervals
|
||||
lB, uB = intB.lower, intB.upper
|
||||
k0 = findfirst(i->(i.upper > lB), ivals)
|
||||
if k0 == 0
|
||||
push!(ivals, intB)
|
||||
continue
|
||||
end
|
||||
lB = min(lB, ivals[k0].lower)
|
||||
for k1 = k0:length(ivals)
|
||||
intA = ivals[k1]
|
||||
if uB < intA.lower
|
||||
splice!(ivals, k0:(k1-1), (VersionInterval(lB, uB),))
|
||||
break
|
||||
elseif uB ∈ intA || k1 == length(ivals)
|
||||
splice!(ivals, k0:k1, (VersionInterval(lB, max(uB, intA.upper)),))
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
normalize!(ivals)
|
||||
return A
|
||||
end
|
||||
|
||||
==(A::VersionSet, B::VersionSet) = A.intervals == B.intervals
|
||||
hash(s::VersionSet, h::UInt) = hash(s.intervals, h + (0x2fd2ca6efa023f44 % UInt))
|
||||
deepcopy_internal(vs::VersionSet, ::ObjectIdDict) = copy(vs)
|
||||
|
||||
const Requires = Dict{String,VersionSet}
|
||||
|
||||
function merge_requires!(A::Requires, B::Requires)
|
||||
for (pkg,vers) in B
|
||||
A[pkg] = haskey(A,pkg) ? intersect(A[pkg],vers) : vers
|
||||
end
|
||||
return A
|
||||
end
|
||||
|
||||
satisfies(pkg::AbstractString, ver::VersionNumber, reqs::Requires) =
|
||||
!haskey(reqs, pkg) || in(ver, reqs[pkg])
|
||||
|
||||
struct Available
|
||||
sha1::String
|
||||
requires::Requires
|
||||
end
|
||||
|
||||
==(a::Available, b::Available) = a.sha1 == b.sha1 && a.requires == b.requires
|
||||
hash(a::Available, h::UInt) = hash((a.sha1, a.requires), h + (0xbc8ae0de9d11d972 % UInt))
|
||||
copy(a::Available) = Available(a.sha1, copy(a.requires))
|
||||
|
||||
show(io::IO, a::Available) = isempty(a.requires) ?
|
||||
print(io, "Available(", repr(a.sha1), ")") :
|
||||
print(io, "Available(", repr(a.sha1), ",", a.requires, ")")
|
||||
|
||||
struct Fixed
|
||||
version::VersionNumber
|
||||
requires::Requires
|
||||
end
|
||||
Fixed(v::VersionNumber) = Fixed(v,Requires())
|
||||
|
||||
==(a::Fixed, b::Fixed) = a.version == b.version && a.requires == b.requires
|
||||
hash(f::Fixed, h::UInt) = hash((f.version, f.requires), h + (0x68628b809fd417ca % UInt))
|
||||
|
||||
show(io::IO, f::Fixed) = isempty(f.requires) ?
|
||||
print(io, "Fixed(", repr(f.version), ")") :
|
||||
print(io, "Fixed(", repr(f.version), ",", f.requires, ")")
|
||||
|
||||
# TODO: Available & Fixed are almost the same – merge them?
|
||||
# Free could include the same information too, it just isn't
|
||||
# required by anything that processes these things.
|
||||
|
||||
|
||||
const VersionReq = Union{VersionNumber,VersionSet}
|
||||
const WhyReq = Tuple{VersionReq,Any}
|
||||
|
||||
# This is used to keep track of dependency relations when propagating
|
||||
# requirements, so as to emit useful information in case of unsatisfiable
|
||||
# conditions.
|
||||
# The `versionreq` field keeps track of the remaining allowed versions,
|
||||
# intersecting all requirements.
|
||||
# The `why` field is a Vector which keeps track of the requirements. Each
|
||||
# entry is a Tuple of two elements:
|
||||
# 1) the first element is the version requirement (can be a single VersionNumber
|
||||
# or a VersionSet).
|
||||
# 2) the second element can be either :fixed (for requirements induced by
|
||||
# fixed packages), :required (for requirements induced by explicitly
|
||||
# required packages), or a Pair p=>backtrace_item (for requirements induced
|
||||
# indirectly, where `p` is the package name and `backtrace_item` is
|
||||
# another ResolveBacktraceItem.
|
||||
mutable struct ResolveBacktraceItem
|
||||
versionreq::VersionReq
|
||||
why::Vector{WhyReq}
|
||||
ResolveBacktraceItem() = new(VersionSet(), WhyReq[])
|
||||
ResolveBacktraceItem(reason, versionreq::VersionReq) = new(versionreq, WhyReq[(versionreq,reason)])
|
||||
end
|
||||
|
||||
function push!(ritem::ResolveBacktraceItem, reason, versionset::VersionSet)
|
||||
if isa(ritem.versionreq, VersionSet)
|
||||
ritem.versionreq = ritem.versionreq ∩ versionset
|
||||
elseif ritem.versionreq ∉ versionset
|
||||
ritem.versionreq = copy(empty_versionset)
|
||||
end
|
||||
push!(ritem.why, (versionset,reason))
|
||||
end
|
||||
|
||||
function push!(ritem::ResolveBacktraceItem, reason, version::VersionNumber)
|
||||
if isa(ritem.versionreq, VersionSet)
|
||||
if version ∈ ritem.versionreq
|
||||
ritem.versionreq = version
|
||||
else
|
||||
ritem.versionreq = copy(empty_versionset)
|
||||
end
|
||||
elseif ritem.versionreq ≠ version
|
||||
ritem.versionreq = copy(empty_versionset)
|
||||
end
|
||||
push!(ritem.why, (version,reason))
|
||||
end
|
||||
|
||||
|
||||
show(io::IO, ritem::ResolveBacktraceItem) = _show(io, ritem, "", Set{ResolveBacktraceItem}([ritem]))
|
||||
|
||||
function _show(io::IO, ritem::ResolveBacktraceItem, indent::String, seen::Set{ResolveBacktraceItem})
|
||||
l = length(ritem.why)
|
||||
for (i,(vs,w)) in enumerate(ritem.why)
|
||||
print(io, indent, (i==l ? '└' : '├'), '─')
|
||||
if w ≡ :fixed
|
||||
@assert isa(vs, VersionNumber)
|
||||
println(io, "version $vs set by fixed requirement (package is checked out, dirty or pinned)")
|
||||
elseif w ≡ :required
|
||||
@assert isa(vs, VersionSet)
|
||||
println(io, "version range $vs set by an explicit requirement")
|
||||
else
|
||||
@assert isa(w, Pair{<:AbstractString,ResolveBacktraceItem})
|
||||
if isa(vs, VersionNumber)
|
||||
print(io, "version $vs ")
|
||||
else
|
||||
print(io, "version range $vs ")
|
||||
end
|
||||
print(io, "required by package $(w[1]), ")
|
||||
if isa(w[2].versionreq, VersionSet)
|
||||
println(io, "whose allowed version range is $(w[2].versionreq):")
|
||||
else
|
||||
println(io, "whose only allowed version is $(w[2].versionreq):")
|
||||
end
|
||||
if w[2] ∈ seen
|
||||
println(io, (i==l ? " " : "│ ") * indent, "└─[see above for $(w[1]) backtrace]")
|
||||
continue
|
||||
end
|
||||
push!(seen, w[2])
|
||||
_show(io, w[2], (i==l ? " " : "│ ") * indent, seen)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
const ResolveBacktrace = Dict{AbstractString,ResolveBacktraceItem}
|
||||
|
||||
end # module
|
||||
64
julia-0.6.3/share/julia/base/pkg/write.jl
Normal file
64
julia-0.6.3/share/julia/base/pkg/write.jl
Normal file
@@ -0,0 +1,64 @@
|
||||
# This file is a part of Julia. License is MIT: https://julialang.org/license
|
||||
|
||||
module Write
|
||||
|
||||
import ...LibGit2, ..Cache, ..Read, ...Pkg.PkgError
|
||||
importall ...LibGit2
|
||||
|
||||
function prefetch(pkg::AbstractString, sha1::AbstractString)
|
||||
isempty(Cache.prefetch(pkg, Read.url(pkg), sha1)) && return
|
||||
throw(PkgError("$pkg: couldn't find commit $(sha1[1:10])"))
|
||||
end
|
||||
|
||||
function fetch(repo::GitRepo, pkg::AbstractString, sha1::AbstractString)
|
||||
cache = Cache.path(pkg)
|
||||
LibGit2.fetch(repo, remoteurl=cache, refspecs=["+refs/*:refs/remotes/cache/*"])
|
||||
LibGit2.need_update(repo)
|
||||
LibGit2.iscommit(sha1, repo) && return
|
||||
f = with(GitRepo, cache) do repo
|
||||
LibGit2.iscommit(sha1, repo)
|
||||
end ? "fetch" : "prefetch"
|
||||
url = Read.issue_url(pkg)
|
||||
if isempty(url)
|
||||
throw(PkgError("$pkg: $f failed to get commit $(sha1[1:10]), please file a bug report with the package author."))
|
||||
else
|
||||
throw(PkgError("$pkg: $f failed to get commit $(sha1[1:10]), please file an issue at $url"))
|
||||
end
|
||||
end
|
||||
|
||||
function checkout(repo::GitRepo, pkg::AbstractString, sha1::AbstractString)
|
||||
LibGit2.set_remote_url(repo, Cache.normalize_url(Read.url(pkg)))
|
||||
LibGit2.checkout!(repo, sha1)
|
||||
end
|
||||
|
||||
function install(pkg::AbstractString, sha1::AbstractString)
|
||||
prefetch(pkg, sha1)
|
||||
repo = if isdir(".trash/$pkg")
|
||||
mv(".trash/$pkg", "./$pkg") #TODO check for newer version in cache before moving
|
||||
GitRepo(pkg)
|
||||
else
|
||||
LibGit2.clone(Cache.path(pkg), pkg)
|
||||
end
|
||||
try
|
||||
fetch(repo, pkg, sha1)
|
||||
checkout(repo, pkg, sha1)
|
||||
finally
|
||||
close(repo)
|
||||
end
|
||||
end
|
||||
|
||||
function update(pkg::AbstractString, sha1::AbstractString)
|
||||
prefetch(pkg, sha1)
|
||||
with(GitRepo, pkg) do repo
|
||||
fetch(repo, pkg, sha1)
|
||||
checkout(repo, pkg, sha1)
|
||||
end
|
||||
end
|
||||
|
||||
function remove(pkg::AbstractString)
|
||||
isdir(".trash") || mkdir(".trash")
|
||||
ispath(".trash/$pkg") && rm(".trash/$pkg", recursive=true)
|
||||
mv(pkg, ".trash/$pkg")
|
||||
end
|
||||
|
||||
end # module
|
||||
Reference in New Issue
Block a user