Merge branch 'develop' into features/fastmath

This commit is contained in:
Todd Gamblin 2015-06-06 18:15:52 -07:00
commit 8215c85619
153 changed files with 8979 additions and 1468 deletions

1
.gitignore vendored
View file

@ -4,6 +4,7 @@
*~
.DS_Store
.idea
/etc/spack/*
/etc/spackconfig
/share/spack/dotkit
/share/spack/modules

View file

@ -49,25 +49,39 @@ except OSError:
# clean up the scope and start using spack package instead.
del SPACK_FILE, SPACK_PREFIX, SPACK_LIB_PATH
import llnl.util.tty as tty
from llnl.util.tty.color import *
import spack
from spack.error import SpackError
from external import argparse
# Command parsing
parser = argparse.ArgumentParser(
description='Spack: the Supercomputing PACKage Manager.')
parser.add_argument('-V', '--version', action='version',
version="%s" % spack.spack_version)
parser.add_argument('-v', '--verbose', action='store_true',
help="Print additional output during builds")
formatter_class=argparse.RawTextHelpFormatter,
description="Spack: the Supercomputing PACKage Manager." + colorize("""
spec expressions:
PACKAGE [CONSTRAINTS]
CONSTRAINTS:
@c{@version}
@g{%compiler @compiler_version}
@B{+variant}
@r{-variant} or @r{~variant}
@m{=architecture}
[^DEPENDENCY [CONSTRAINTS] ...]"""))
parser.add_argument('-d', '--debug', action='store_true',
help="Write out debug logs during compile")
parser.add_argument('-k', '--insecure', action='store_true',
help="Do not check ssl certificates when downloading archives.")
help="Do not check ssl certificates when downloading.")
parser.add_argument('-m', '--mock', action='store_true',
help="Use mock packages instead of real ones.")
parser.add_argument('-p', '--profile', action='store_true',
help="Profile execution using cProfile.")
parser.add_argument('-v', '--verbose', action='store_true',
help="Print additional output during builds")
parser.add_argument('-V', '--version', action='version',
version="%s" % spack.spack_version)
# each command module implements a parser() function, to which we pass its
# subparser for setup.
@ -93,6 +107,10 @@ def main():
tty.set_debug(args.debug)
spack.debug = args.debug
if spack.debug:
import spack.util.debug as debug
debug.register_interrupt_handler()
spack.spack_working_dir = working_dir
if args.mock:
from spack.packages import PackageDB

View file

@ -5,7 +5,7 @@
"""
import os
VERSION = (0, 1, 5)
VERSION = (0, 1, 8)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__

View file

@ -1,15 +1,23 @@
<ul class="wy-breadcrumbs">
<li><a href="{{ pathto(master_doc) }}">Docs</a> &raquo;</li>
<li><a href="">{{ title }}</a></li>
<li class="wy-breadcrumbs-aside">
{% if display_github %}
<a href="https://github.com/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst" class="icon icon-github"> Edit on GitHub</a>
{% elif display_bitbucket %}
<a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}.rst'" class="icon icon-bitbucket"> Edit on Bitbucket</a>
{% elif show_source and has_source and sourcename %}
<a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a>
{% endif %}
</li>
</ul>
<hr/>
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="{{ pathto(master_doc) }}">Docs</a> &raquo;</li>
{% for doc in parents %}
<li><a href="{{ doc.link|e }}">{{ doc.title }}</a> &raquo;</li>
{% endfor %}
<li>{{ title }}</li>
<li class="wy-breadcrumbs-aside">
{% if pagename != "search" %}
{% if display_github %}
<a href="https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}{{ source_suffix }}" class="fa fa-github"> Edit on GitHub</a>
{% elif display_bitbucket %}
<a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}{{ source_suffix }}" class="fa fa-bitbucket"> Edit on Bitbucket</a>
{% elif show_source and source_url_prefix %}
<a href="{{ source_url_prefix }}{{ pagename }}{{ source_suffix }}">View page source</a>
{% elif show_source and has_source and sourcename %}
<a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a>
{% endif %}
{% endif %}
</li>
</ul>
<hr/>
</div>

View file

@ -1,28 +1,40 @@
<footer>
{% if next or prev %}
<div class="rst-footer-buttons">
<div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
{% if next %}
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}" accesskey="n">Next <span class="fa fa-arrow-circle-right"></span></a>
{% endif %}
{% if prev %}
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}" accesskey="p"><span class="fa fa-arrow-circle-left"></span> Previous</a>
{% endif %}
</div>
{% endif %}
<hr/>
<p>
&copy; Copyright 2013-2014,
<a href="https://scalability.llnl.gov/">Lawrence Livermore National Laboratory</a>.
<br/>
Written by Todd Gamblin, <a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>, LLNL-CODE-647188
<br/>
<div role="contentinfo">
<p>
{%- if show_copyright %}
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
{%- else %}
{% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
{%- endif %}
{%- endif %}
{%- if last_updated %}
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
<br/>
Written by Todd Gamblin (<a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>) and
many contributors. LLNL-CODE-647188.
{%- if last_updated %}
<br/>
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
{%- endif %}
</p>
</div>
{%- if show_sphinx %}
{% trans %}Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>{% endtrans %}.
{%- endif %}
&nbsp;&nbsp;
{% trans %}<br/><a href="https://www.github.com/snide/sphinx_rtd_theme">Sphinx theme</a> provided by <a href="http://readthedocs.org">Read the Docs</a>{% endtrans %}
</p>
</footer>

View file

@ -12,6 +12,7 @@
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
{{ metatags }}
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% block htmltitle %}
<title>{{ title|striptags|e }}{{ titlesuffix }}</title>
@ -23,40 +24,28 @@
{% endif %}
{# CSS #}
<link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
{# JS #}
{# OPENSEARCH #}
{% if not embedded %}
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT:'{{ url_root }}',
VERSION:'{{ release|e }}',
COLLAPSE_INDEX:false,
FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}',
HAS_SOURCE: {{ has_source|lower }}
};
</script>
{%- for scriptfile in script_files %}
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
{%- endfor %}
{% if use_opensearch %}
<link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/>
{% endif %}
{% endif %}
{# RTD hosts these file themselves, so just load on non RTD builds #}
{# RTD hosts this file, so just load on non RTD builds #}
{% if not READTHEDOCS %}
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
<script type="text/javascript" src="_static/js/theme.js"></script>
{% endif %}
{% for cssfile in css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{% for cssfile in extra_css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{%- block linktags %}
{%- if hasdoc('about') %}
<link rel="author" title="{{ _('About these documents') }}"
@ -85,40 +74,47 @@
{%- endblock %}
{%- block extrahead %} {% endblock %}
<script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/2.6.2/modernizr.min.js"></script>
<!-- Google Analytics -->
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-55665759-2', 'auto');
ga('send', 'pageview');
</script>
{# Keep modernizr in head - http://modernizr.com/docs/#installing #}
<script src="_static/js/modernizr.min.js"></script>
</head>
<body class="wy-body-for-nav">
<body class="wy-body-for-nav" role="document">
<div class="wy-grid-for-nav">
{# SIDE NAV, TOGGLES ON MOBILE #}
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-nav-search">
<a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}</a>
{% block sidebartitle %}
{% if logo and theme_logo_only %}
<a href="{{ pathto(master_doc) }}">
{% else %}
<a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}
{% endif %}
{% if logo %}
{# Not strictly valid HTML, but it's the only way to display/scale it properly, without weird scripting or heaps of work #}
<img src="{{ pathto('_static/' + logo, 1) }}" class="logo" />
{% endif %}
</a>
{% include "searchbox.html" %}
{% endblock %}
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix">
{% set toctree = toctree(maxdepth=2, collapse=False, includehidden=True) %}
{% if toctree %}
{{ toctree }}
{% else %}
<!-- Local TOC -->
<div class="local-toc">{{ toc }}</div>
{% endif %}
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
{% block menu %}
{% set toctree = toctree(maxdepth=4, collapse=False, includehidden=True) %}
{% if toctree %}
{{ toctree }}
{% else %}
<!-- Local TOC -->
<div class="local-toc">{{ toc }}</div>
{% endif %}
{% endblock %}
</div>
&nbsp;
</nav>
@ -126,8 +122,8 @@
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
{# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #}
<nav class="wy-nav-top">
<i data-toggle="wy-nav-top" class="icon icon-reorder"></i>
<nav class="wy-nav-top" role="navigation" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="{{ pathto(master_doc) }}">{{ project }}</a>
</nav>
@ -136,7 +132,9 @@
<div class="wy-nav-content">
<div class="rst-content">
{% include "breadcrumbs.html" %}
{% block body %}{% endblock %}
<div role="main" class="document">
{% block body %}{% endblock %}
</div>
{% include "footer.html" %}
</div>
</div>
@ -145,5 +143,39 @@
</div>
{% include "versions.html" %}
{% if not embedded %}
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT:'{{ url_root }}',
VERSION:'{{ release|e }}',
COLLAPSE_INDEX:false,
FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}',
HAS_SOURCE: {{ has_source|lower }}
};
</script>
{%- for scriptfile in script_files %}
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
{%- endfor %}
{% endif %}
{# RTD hosts this file, so just load on non RTD builds #}
{% if not READTHEDOCS %}
<script type="text/javascript" src="{{ pathto('_static/js/theme.js', 1) }}"></script>
{% endif %}
{# STICKY NAVIGATION #}
{% if theme_sticky_navigation %}
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.StickyNav.enable();
});
</script>
{% endif %}
{%- block footer %} {% endblock %}
</body>
</html>

View file

@ -10,7 +10,7 @@
{%- extends "layout.html" %}
{% set title = _('Search') %}
{% set script_files = script_files + ['_static/searchtools.js'] %}
{% block extrahead %}
{% block footer %}
<script type="text/javascript">
jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); });
</script>

View file

@ -1,5 +1,9 @@
<form id ="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
{%- if builder != 'singlehtml' %}
<div role="search">
<form id="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
{%- endif %}

View file

@ -1 +1,2 @@
.font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}
.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:0.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}
/*# sourceMappingURL=badge_only.css.map */

View file

@ -0,0 +1,7 @@
{
"version": 3,
"mappings": "CAyDA,SAAY,EACV,qBAAsB,EAAE,UAAW,EAqDrC,QAAS,EARP,IAAK,EAAE,AAAC,EACR,+BAAS,EAEP,MAAO,EAAE,IAAK,EACd,MAAO,EAAE,CAAE,EACb,cAAO,EACL,IAAK,EAAE,GAAI,EC1Gb,SAkBC,EAjBC,UAAW,ECFJ,UAAW,EDGlB,UAAW,EAHqC,KAAM,EAItD,SAAU,EAJsD,KAAM,EAapE,EAAG,EAAE,qCAAwB,EAC7B,EAAG,EAAE,0PAAyE,ECZpF,SAAU,EACR,MAAO,EAAE,WAAY,EACrB,UAAW,EAAE,UAAW,EACxB,SAAU,EAAE,KAAM,EAClB,UAAW,EAAE,KAAM,EACnB,UAAW,EAAE,AAAC,EACd,cAAe,EAAE,MAAO,EAG1B,IAAK,EACH,MAAO,EAAE,WAAY,EACrB,cAAe,EAAE,MAAO,EAIxB,KAAG,EACD,MAAO,EAAE,WAAY,EACvB,sCAAiB,EAGf,IAAK,EAAE,MAAY,EAEvB,KAAM,EACJ,cAAe,EAAE,GAAI,EACrB,UAAW,EAAE,EAAG,EAChB,UAAW,EAAE,KAAM,EAEjB,YAAG,EACD,IAAK,EAAE,IAAI,EACb,oDAAiB,EAGf,aAAc,EAAE,OAAQ,EAG9B,cAAe,EACb,MAAO,EAAE,EAAO,EAElB,gBAAiB,EACf,MAAO,EAAE,EAAO,EAElB,oBAAqB,EACnB,MAAO,EAAE,EAAO,EAElB,sBAAuB,EACrB,MAAO,EAAE,EAAO,EAElB,kBAAmB,EACjB,MAAO,EAAE,EAAO,EAElB,oBAAqB,EACnB,MAAO,EAAE,EAAO,EAElB,oBAAqB,EACnB,MAAO,EAAE,EAAO,EAElB,sBAAuB,EACrB,MAAO,EAAE,EAAO,EAElB,qBAAsB,EACpB,MAAO,EAAE,EAAO,EAElB,uBAAwB,EACtB,MAAO,EAAE,EAAO,ECnElB,YAAa,EACX,OAAQ,EAAE,IAAK,EACf,KAAM,EAAE,AAAC,EACT,GAAI,EAAE,AAAC,EACP,IAAK,EC6E+B,IAAK,ED5EzC,IAAK,ECE+B,MAAyB,EDD7D,SAAU,EAAE,MAAkC,EAC9C,SAAU,EAAE,iBAAiC,EAC7C,UAAW,EEAyB,sDAAM,EFC1C,MAAO,EC+E6B,EAAG,ED9EvC,cAAC,EACC,IAAK,ECqE6B,MAAW,EDpE7C,cAAe,EAAE,GAAI,EACvB,6BAAgB,EACd,MAAO,EAAE,GAAI,EACf,iCAAoB,EAClB,MAAO,EAAE,GAAqB,EAC9B,eAAgB,EAAE,MAAkC,EACpD,MAAO,EAAE,IAAK,EACd,SAAU,EAAE,IAAK,EACjB,QAAS,EAAE,EAAG,EACd,KAAM,EAAE,MAAO,EACf,IAAK,ECiD6B,MAAM,EJgC1C,IAAK,EAAE,AAAC,EACR,iFAAS,EAEP,MAAO,EAAE,IAAK,EACd,MAAO,EAAE,CAAE,EACb,uCAAO,EACL,IAAK,EAAE,GAAI,EGrFX,qCAAG,EACD,IAAK,EClB2B,MAAyB,EDmB3D,0CAAQ,EACN,IAAK,EAAE,GAAI,EACb,4CAAU,EACR,IAAK,EAAE,GAAI,EACb,iDAAiB,EACf,eAAgB,ECQgB,MAAI,EDPpC,IAAK,EC0B2B,GAAM,EDzBxC,wDAAwB,EACtB,eAAgB,ECXgB,MAAO,EDYvC,IAAK,ECzB2B,GAAI,ED0BxC,yCAA8B,EAC5B,MAAO,EAAE,IAAK,EAChB,gCAAmB,EACjB,QAAS,EAAE,EAAG,EACd,MAAO,EAAE,GAAqB,EAC9B,IAAK,ECE6B,GAAwB,EDD1D,MAAO,EAAE,GAAI,EACb,mCAAE,EACA,MAAO,EAAE,IAAK,EACd,KAAM,EAAE,EAAG,EACX,KAAM,EAAE,AAAC,EACT,KAAM,EAAE,KAAM,EACd,MAAO,EAAE,AAAC,EACV,SAAU,EAAE,gBAA6C,EAC3D,mCAAE,EACA,MAAO,EAAE,WAAY,EACrB,KAAM,EAAE,AAAC,EACT,qCAAC,EACC,MAAO,EAAE,WAAY,EACrB,MAAO,EAAE,EAAqB,EAC9B,IAAK,ECjDyB,MAAyB,EDkD7D,sBAAW,EACT,IAAK,EAAE,GAAI,EACX,KAAM,EAAE,GAAI,EACZ,IAAK,EAAE,GAAI,EACX,GAAI,EAAE,GAAI,EACV,KAAM,EAAE,GAAI,EACZ,QAAS,ECkByB,IAAK,EDjBvC,iCAAU,EACR,IAAK,EAAE,GAAI,EACb,+BAAQ,EACN,IAAK,EAAE,GAAI,EACb,oDAA+B,EAC7B,SAAU,EAAE,IAAK,EACjB,6DAAQ,EACN,IAAK,EAAE,GAAI,EACb,+DAAU,EACR,IAAK,EAAE,GAAI,EACf,2CAAoB,EAClB,IAAK,EAAE,GAAI,EACX,KAAM,EAAE,GAAI,EACZ,UAAW,EAAE,GAAI,EACjB,MAAO,EAAE,IAAuB,EAChC,MAAO,EAAE,IAAK,EACd,SAAU,EAAE,KAAM,EGhDpB,mCAAsB,EHmDxB,YAAa,EACX,IAAK,EAAE,EAAG,EACV,MAAO,EAAE,GAAI,EACb,kBAAO,EACL,MAAO,EAAE,IAAK,EAClB,EAAG,EACD,IAAK,EAAE,GAAI,EACX,KAAM,EAAE,GAAI",
"sources": ["../../../bower_components/wyrm/sass/wyrm_core/_mixin.sass","../../../bower_components/bourbon/dist/css3/_font-face.scss","../../../sass/_theme_badge_fa.sass","../../../sass/_theme_badge.sass","../../../bower_components/wyrm/sass/wyrm_core/_wy_variables.sass","../../../sass/_theme_variables.sass","../../../bower_components/neat/app/assets/stylesheets/grid/_media.scss"],
"names": [],
"file": "badge_only.css"
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

View file

@ -280,8 +280,8 @@
<glyph unicode="&#xf113;" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 q0 -87 -27 -168q136 -160 136 -398z" />
<glyph unicode="&#xf114;" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" />
<glyph unicode="&#xf115;" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z " />
<glyph unicode="&#xf116;" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" />
<glyph unicode="&#xf117;" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" />
<glyph unicode="&#xf116;" horiz-adv-x="1792" />
<glyph unicode="&#xf117;" horiz-adv-x="1792" />
<glyph unicode="&#xf118;" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf119;" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf11a;" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
@ -310,7 +310,7 @@
<glyph unicode="&#xf133;" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" />
<glyph unicode="&#xf134;" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" />
<glyph unicode="&#xf135;" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" />
<glyph unicode="&#xf136;" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" />
<glyph unicode="&#xf136;" horiz-adv-x="1792" d="M1745 763l-164 -763h-334l178 832q13 56 -15 88q-27 33 -83 33h-169l-204 -953h-334l204 953h-286l-204 -953h-334l204 953l-153 327h1276q101 0 189.5 -40.5t147.5 -113.5q60 -73 81 -168.5t0 -194.5z" />
<glyph unicode="&#xf137;" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf138;" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf139;" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
@ -342,7 +342,7 @@
<glyph unicode="&#xf155;" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 t53 -63.5t31.5 -76.5t13 -94z" />
<glyph unicode="&#xf156;" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf157;" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" />
<glyph unicode="&#xf158;" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t-23 -9h-224v-512q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v64q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1280 1376v-160q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h960q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf158;" horiz-adv-x="1280" d="M1043 971q0 100 -65 162t-171 62h-320v-448h320q106 0 171 62t65 162zM1280 971q0 -193 -126.5 -315t-326.5 -122h-340v-118h505q14 0 23 -9t9 -23v-128q0 -14 -9 -23t-23 -9h-505v-192q0 -14 -9.5 -23t-22.5 -9h-167q-14 0 -23 9t-9 23v192h-224q-14 0 -23 9t-9 23v128 q0 14 9 23t23 9h224v118h-224q-14 0 -23 9t-9 23v149q0 13 9 22.5t23 9.5h224v629q0 14 9 23t23 9h539q200 0 326.5 -122t126.5 -315z" />
<glyph unicode="&#xf159;" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf15a;" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" />
<glyph unicode="&#xf15b;" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" />
@ -390,10 +390,25 @@
<glyph unicode="&#xf188;" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" />
<glyph unicode="&#xf189;" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q17 19 38 30q53 26 239 24 q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 q39 5 64 -2.5t31 -16.5z" />
<glyph unicode="&#xf18a;" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" />
<glyph unicode="&#xf18b;" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 328 185zM1920 640q0 -201 -129 -351q-145 70 -234.5 218 t-89.5 328v341q194 -32 323.5 -184t129.5 -352z" />
<glyph unicode="&#xf18c;" horiz-adv-x="1792" />
<glyph unicode="&#xf18d;" horiz-adv-x="1792" />
<glyph unicode="&#xf18e;" horiz-adv-x="1792" />
<glyph unicode="&#xf18b;" d="M1133 -34q-171 -94 -368 -94q-196 0 -367 94q138 87 235.5 211t131.5 268q35 -144 132.5 -268t235.5 -211zM638 1394v-485q0 -252 -126.5 -459.5t-330.5 -306.5q-181 215 -181 495q0 187 83.5 349.5t229.5 269.5t325 137zM1536 638q0 -280 -181 -495 q-204 99 -330.5 306.5t-126.5 459.5v485q179 -30 325 -137t229.5 -269.5t83.5 -349.5z" />
<glyph unicode="&#xf18c;" horiz-adv-x="1408" d="M1402 433q-32 -80 -76 -138t-91 -88.5t-99 -46.5t-101.5 -14.5t-96.5 8.5t-86.5 22t-69.5 27.5t-46 22.5l-17 10q-113 -228 -289.5 -359.5t-384.5 -132.5q-19 0 -32 13t-13 32t13 31.5t32 12.5q173 1 322.5 107.5t251.5 294.5q-36 -14 -72 -23t-83 -13t-91 2.5t-93 28.5 t-92 59t-84.5 100t-74.5 146q114 47 214 57t167.5 -7.5t124.5 -56.5t88.5 -77t56.5 -82q53 131 79 291q-7 -1 -18 -2.5t-46.5 -2.5t-69.5 0.5t-81.5 10t-88.5 23t-84 42.5t-75 65t-54.5 94.5t-28.5 127.5q70 28 133.5 36.5t112.5 -1t92 -30t73.5 -50t56 -61t42 -63t27.5 -56 t16 -39.5l4 -16q12 122 12 195q-8 6 -21.5 16t-49 44.5t-63.5 71.5t-54 93t-33 112.5t12 127t70 138.5q73 -25 127.5 -61.5t84.5 -76.5t48 -85t20.5 -89t-0.5 -85.5t-13 -76.5t-19 -62t-17 -42l-7 -15q1 -5 1 -50.5t-1 -71.5q3 7 10 18.5t30.5 43t50.5 58t71 55.5t91.5 44.5 t112 14.5t132.5 -24q-2 -78 -21.5 -141.5t-50 -104.5t-69.5 -71.5t-81.5 -45.5t-84.5 -24t-80 -9.5t-67.5 1t-46.5 4.5l-17 3q-23 -147 -73 -283q6 7 18 18.5t49.5 41t77.5 52.5t99.5 42t117.5 20t129 -23.5t137 -77.5z" />
<glyph unicode="&#xf18d;" horiz-adv-x="1280" d="M1259 283v-66q0 -85 -57.5 -144.5t-138.5 -59.5h-57l-260 -269v269h-529q-81 0 -138.5 59.5t-57.5 144.5v66h1238zM1259 609v-255h-1238v255h1238zM1259 937v-255h-1238v255h1238zM1259 1077v-67h-1238v67q0 84 57.5 143.5t138.5 59.5h846q81 0 138.5 -59.5t57.5 -143.5z " />
<glyph unicode="&#xf18e;" d="M1152 640q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192h-352q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h352v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198 t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf190;" d="M1152 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-352v-192q0 -14 -9 -23t-23 -9q-12 0 -24 10l-319 319q-9 9 -9 23t9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h352q13 0 22.5 -9.5t9.5 -22.5zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198 t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf191;" d="M1024 960v-640q0 -26 -19 -45t-45 -19q-20 0 -37 12l-448 320q-27 19 -27 52t27 52l448 320q17 12 37 12q26 0 45 -19t19 -45zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5z M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
<glyph unicode="&#xf192;" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5 t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf193;" horiz-adv-x="1664" d="M1023 349l102 -204q-58 -179 -210 -290t-339 -111q-156 0 -288.5 77.5t-210 210t-77.5 288.5q0 181 104.5 330t274.5 211l17 -131q-122 -54 -195 -165.5t-73 -244.5q0 -185 131.5 -316.5t316.5 -131.5q126 0 232.5 65t165 175.5t49.5 236.5zM1571 249l58 -114l-256 -128 q-13 -7 -29 -7q-40 0 -57 35l-239 477h-472q-24 0 -42.5 16.5t-21.5 40.5l-96 779q-2 16 6 42q14 51 57 82.5t97 31.5q66 0 113 -47t47 -113q0 -69 -52 -117.5t-120 -41.5l37 -289h423v-128h-407l16 -128h455q40 0 57 -35l228 -455z" />
<glyph unicode="&#xf194;" d="M1254 899q16 85 -21 132q-52 65 -187 45q-17 -3 -41 -12.5t-57.5 -30.5t-64.5 -48.5t-59.5 -70t-44.5 -91.5q80 7 113.5 -16t26.5 -99q-5 -52 -52 -143q-43 -78 -71 -99q-44 -32 -87 14q-23 24 -37.5 64.5t-19 73t-10 84t-8.5 71.5q-23 129 -34 164q-12 37 -35.5 69 t-50.5 40q-57 16 -127 -25q-54 -32 -136.5 -106t-122.5 -102v-7q16 -8 25.5 -26t21.5 -20q21 -3 54.5 8.5t58 10.5t41.5 -30q11 -18 18.5 -38.5t15 -48t12.5 -40.5q17 -46 53 -187q36 -146 57 -197q42 -99 103 -125q43 -12 85 -1.5t76 31.5q131 77 250 237 q104 139 172.5 292.5t82.5 226.5zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
<glyph unicode="&#xf195;" horiz-adv-x="1152" d="M1152 704q0 -191 -94.5 -353t-256.5 -256.5t-353 -94.5h-160q-14 0 -23 9t-9 23v611l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v93l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v250q0 14 9 23t23 9h160 q14 0 23 -9t9 -23v-181l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-93l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-487q188 13 318 151t130 328q0 14 9 23t23 9h160q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf196;" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-352v-352q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v352h-352q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h352v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-352h352q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832 q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" />
<glyph unicode="&#xf197;" horiz-adv-x="1792" />
<glyph unicode="&#xf198;" horiz-adv-x="1792" />
<glyph unicode="&#xf199;" horiz-adv-x="1792" />
<glyph unicode="&#xf19a;" horiz-adv-x="1792" />
<glyph unicode="&#xf19b;" horiz-adv-x="1792" />
<glyph unicode="&#xf19c;" horiz-adv-x="1792" />
<glyph unicode="&#xf19d;" horiz-adv-x="1792" />
<glyph unicode="&#xf19e;" horiz-adv-x="1792" />
<glyph unicode="&#xf500;" horiz-adv-x="1792" />
</font>
</defs></svg>

Before

Width:  |  Height:  |  Size: 193 KiB

After

Width:  |  Height:  |  Size: 197 KiB

File diff suppressed because one or more lines are too long

View file

@ -1,16 +1,113 @@
$( document ).ready(function() {
// Shift nav in mobile when clicking the menu.
$("[data-toggle='wy-nav-top']").click(function() {
$("[data-toggle='wy-nav-shift']").toggleClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
});
// Close menu when you click a link.
$(".wy-menu-vertical .current ul li a").click(function() {
$("[data-toggle='wy-nav-shift']").removeClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
});
$("[data-toggle='rst-current-version']").click(function() {
$("[data-toggle='rst-versions']").toggleClass("shift-up");
});
$("table.docutils:not(.field-list").wrap("<div class='wy-table-responsive'></div>");
function toggleCurrent (elem) {
var parent_li = elem.closest('li');
parent_li.siblings('li.current').removeClass('current');
parent_li.siblings().find('li.current').removeClass('current');
parent_li.find('> ul li.current').removeClass('current');
parent_li.toggleClass('current');
}
$(document).ready(function() {
// Shift nav in mobile when clicking the menu.
$(document).on('click', "[data-toggle='wy-nav-top']", function() {
$("[data-toggle='wy-nav-shift']").toggleClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
});
// Nav menu link click operations
$(document).on('click', ".wy-menu-vertical .current ul li a", function() {
var target = $(this);
// Close menu when you click a link.
$("[data-toggle='wy-nav-shift']").removeClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
// Handle dynamic display of l3 and l4 nav lists
toggleCurrent(target);
if (typeof(window.SphinxRtdTheme) != 'undefined') {
window.SphinxRtdTheme.StickyNav.hashChange();
}
});
$(document).on('click', "[data-toggle='rst-current-version']", function() {
$("[data-toggle='rst-versions']").toggleClass("shift-up");
});
// Make tables responsive
$("table.docutils:not(.field-list)").wrap("<div class='wy-table-responsive'></div>");
// Add expand links to all parents of nested ul
$('.wy-menu-vertical ul').siblings('a').each(function () {
var link = $(this);
expand = $('<span class="toctree-expand"></span>');
expand.on('click', function (ev) {
toggleCurrent(link);
ev.stopPropagation();
return false;
});
link.prepend(expand);
});
});
// Sphinx theme state
window.SphinxRtdTheme = (function (jquery) {
var stickyNav = (function () {
var navBar,
win,
winScroll = false,
linkScroll = false,
winPosition = 0,
enable = function () {
init();
reset();
win.on('hashchange', reset);
// Set scrolling
win.on('scroll', function () {
if (!linkScroll) {
winScroll = true;
}
});
setInterval(function () {
if (winScroll) {
winScroll = false;
var newWinPosition = win.scrollTop(),
navPosition = navBar.scrollTop(),
newNavPosition = navPosition + (newWinPosition - winPosition);
navBar.scrollTop(newNavPosition);
winPosition = newWinPosition;
}
}, 25);
},
init = function () {
navBar = jquery('nav.wy-nav-side:first');
win = jquery(window);
},
reset = function () {
// Get anchor from URL and open up nested nav
var anchor = encodeURI(window.location.hash);
if (anchor) {
try {
var link = $('.wy-menu-vertical')
.find('[href="' + anchor + '"]');
$('.wy-menu-vertical li.toctree-l1 li.current')
.removeClass('current');
link.closest('li.toctree-l2').addClass('current');
link.closest('li.toctree-l3').addClass('current');
link.closest('li.toctree-l4').addClass('current');
}
catch (err) {
console.log("Error expanding nav for anchor", err);
}
}
},
hashChange = function () {
linkScroll = true;
win.one('hashchange', function () {
linkScroll = false;
});
};
jquery(init);
return {
enable: enable,
hashChange: hashChange
};
}());
return {
StickyNav: stickyNav
};
}($));

View file

@ -5,3 +5,5 @@ stylesheet = css/theme.css
[options]
typekit_id = hiw1hhg
analytics_id =
sticky_navigation = False
logo_only =

View file

@ -1,10 +1,10 @@
{% if READTHEDOCS %}
{# Add rst-badge after rst-versions for small badge style. #}
<div class="rst-versions" data-toggle="rst-versions">
<div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions">
<span class="rst-current-version" data-toggle="rst-current-version">
<span class="icon icon-book"> Read the Docs</span>
v: {{ current_version }}
<span class="icon icon-caret-down"></span>
<span class="fa fa-book"> Read the Docs</span>
v: {{ current_version }}
<span class="fa fa-caret-down"></span>
</span>
<div class="rst-other-versions">
<dl>

View file

@ -3,18 +3,16 @@
Basic usage
=====================
Spack is implemented as a single command (``spack``) with many
*subcommands*. Only a small subset of commands is needed for typical
usage.
The ``spack`` command has many *subcommands*. You'll only need a
small subset of them for typical usage.
Listing available packages
------------------------------
The first thing you likely want to do with spack is to install some
software. Before that, you need to know what's available. You can
see available package names either using the :ref:`package-list`, or
using the commands below.
To install software with Spack, you need to know what software is
available. You can see a list of available package names at the
:ref:`package-list` webpage, or using the ``spack list`` command.
.. _spack-list:
@ -43,54 +41,55 @@ To get more information on a particular package from `spack list`, use
.. command-output:: spack info mpich
Most of the information is self-explanatory. *Safe versions* are
versions that Spack has a checksum for, and Spack will use the
checksum to ensure they downloaded without any errors or malicious
attacks. :ref:`Dependencies <sec-specs>` and :ref:`virtual
dependencies <sec-virtual-dependencies>`, are described in more detail
later.
Most of the information is self-explanatory. The *safe versions* are
versions that Spack knows the checksum for, and it will use the
checksum to verify that these versions download without errors or
viruses.
:ref:`Dependencies <sec-specs>` and :ref:`virtual dependencies
<sec-virtual-dependencies>` are described in more detail later.
.. _spack-versions:
``spack versions``
~~~~~~~~~~~~~~~~~~~~~~~~
To see *more* available versions of a package, run ``spack versions``,
for example:
To see *more* available versions of a package, run ``spack versions``.
For example:
.. command-output:: spack versions libelf
There are two sections in the output. *Safe versions* are ones that
have already been checksummed. Spack goes a step further, though, and
also shows you what versions are available out on the web---these are
*remote versions*. Spack gets this information by scraping it
directly from web pages. Depending on the package, Spack may or may
not be able to find any remote versions.
There are two sections in the output. *Safe versions* are versions
for which Spack has a checksum on file. It can verify that these
versions are downloaded correctly.
In many cases, Spack can also show you what versions are available out
on the web---these are *remote versions*. Spack gets this information
by scraping it directly from package web pages. Depending on the
package and how its releases are organized, Spack may or may not be
able to find remote versions.
Installing and uninstalling
------------------------------
Now that you know how to list available packages and versions, you're
ready to start installing things.
.. _spack-install:
``spack install``
~~~~~~~~~~~~~~~~~~~~~
``spack install`` will install any package shown by ``spack list``.
To install the latest version of a package, along with all of its
dependencies, simply give it a package name:
For example, To install the latest version of the ``mpileaks``
package, you might type this:
.. code-block:: sh
$ spack install mpileaks
If `mpileaks` depends on other packages, Spack will install those
first. It then fetches the tarball for ``mpileaks``, expands it,
verifies that it was downloaded without errors, builds it, and
installs it in its own directory under ``$SPACK_HOME/opt``. You'll see
If `mpileaks` depends on other packages, Spack will install the
dependencies first. It then fetches the ``mpileaks`` tarball, expands
it, verifies that it was downloaded without errors, builds it, and
installs it in its own directory under ``$SPACK_ROOT/opt``. You'll see
a number of messages from spack, a lot of build output, and a message
that the packages is installed:
@ -139,11 +138,11 @@ an installation. Spack is unique in that it can also configure the
configurations of the same version of a package, one built with boost
1.39.0, and the other version built with version 1.43.0, can coexist.
This can all be done on the command line using special syntax. Spack
calls the descriptor used to refer to a particular package
configuration a **spec**. In the command lines above, both
``mpileaks`` and ``mpileaks@3.0.4`` are specs. Specs are described in
detail in :ref:`sec-specs`.
This can all be done on the command line using the *spec* syntax.
Spack calls the descriptor used to refer to a particular package
configuration a **spec**. In the commands above, ``mpileaks`` and
``mpileaks@3.0.4``. We'll talk more about how you can use them to
customize an installation in :ref:`sec-specs`.
.. _spack-uninstall:

View file

@ -87,14 +87,14 @@
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Spack'
copyright = u'2013-2014, Lawrence Livermore National Laboratory'
copyright = u'2013-2015, Lawrence Livermore National Laboratory.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -149,7 +149,7 @@
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = []
#html_theme_options = [('show_copyright', False)]
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_themes"]
@ -203,7 +203,7 @@
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
#html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True

View file

@ -12,20 +12,30 @@ Getting spack is easy. You can clone it from the `github repository
$ git clone https://github.com/scalability-llnl/spack.git
This will create a directory called ``spack``. We'll assume that the
full path to this directory is in some environment called
``SPACK_HOME``. Add ``$SPACK_HOME/bin`` to your path and you're ready
to go:
full path to this directory is in the ``SPACK_ROOT`` environment
variable. Add ``$SPACK_ROOT/bin`` to your path and you're ready to
go:
.. code-block:: sh
$ export PATH=spack/bin:$PATH
$ export PATH=$SPACK_ROOT/bin:$PATH
$ spack install libelf
In general, most of your interactions with Spack will be through the
``spack`` command.
For a richer experience, use Spack's `shell support
<http://scalability-llnl.github.io/spack/basic_usage.html#environment-modules>`_:
.. code-block:: sh
Install
# For bash users
$ . $SPACK_ROOT/share/spack/setup-env.sh
# For tcsh or csh users (note you must set SPACK_ROOT)
$ setenv SPACK_ROOT /path/to/spack
$ source $SPACK_ROOT/share/spack/setup-env.csh
This automatically adds Spack to your ``PATH``.
Installation
--------------------
You don't need to install Spack; it's ready to run as soon as you
@ -39,6 +49,7 @@ functionality. To install spack in a new directory, simply type:
$ spack bootstrap /my/favorite/prefix
This will install a new spack script in /my/favorite/prefix/bin, which
you can use just like you would the regular spack script. Each copy
of spack installs packages into its own ``$PREFIX/opt`` directory.
This will install a new spack script in ``/my/favorite/prefix/bin``,
which you can use just like you would the regular spack script. Each
copy of spack installs packages into its own ``$PREFIX/opt``
directory.

View file

@ -18,8 +18,8 @@ configurations can coexist on the same system.
Most importantly, Spack is *simple*. It offers a simple *spec* syntax
so that users can specify versions and configuration options
concisely. Spack is also simple for package authors: package files
are writtin in pure Python, and specs allow package authors to write a
single build script for many different builds of the same package.
are writtin in pure Python, and specs allow package authors to
maintain a single file for many different builds of the same package.
See the :doc:`features` for examples and highlights.

10
lib/spack/env/cc vendored
View file

@ -86,19 +86,19 @@ done
#
command=$(basename "$0")
case "$command" in
cc|gcc|c89|c99|clang)
cc|gcc|c89|c99|clang|xlc)
command="$SPACK_CC"
language="C"
;;
c++|CC|g++|clang++)
c++|CC|g++|clang++|xlC)
command="$SPACK_CXX"
language="C++"
;;
f77)
f77|xlf)
command="$SPACK_F77"
language="Fortran 77"
;;
fc|f90|f95)
fc|f90|f95|xlf90)
command="$SPACK_FC"
language="Fortran 90"
;;
@ -262,7 +262,7 @@ done
# Include all -L's and prefix/whatever dirs in rpath
for dir in "${libraries[@]}"; do
[ "$dir" != "." ] && rpaths+=("$dir")
[[ dir = $SPACK_INSTALL* ]] && rpaths+=("$dir")
done
rpaths+=("$SPACK_PREFIX/lib")
rpaths+=("$SPACK_PREFIX/lib64")

View file

@ -28,6 +28,11 @@
So far:
argparse: We include our own version to be Python 2.6 compatible.
pyqver2: External script to query required python version of python source code.
Used for ensuring 2.6 compatibility.
pyqver2: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
functools: Used for implementation of total_ordering.
yaml: Used for config files.
"""

19
lib/spack/external/yaml/LICENSE vendored Normal file
View file

@ -0,0 +1,19 @@
Copyright (c) 2006 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

35
lib/spack/external/yaml/README vendored Normal file
View file

@ -0,0 +1,35 @@
PyYAML - The next generation YAML parser and emitter for Python.
To install, type 'python setup.py install'.
By default, the setup.py script checks whether LibYAML is installed
and if so, builds and installs LibYAML bindings. To skip the check
and force installation of LibYAML bindings, use the option '--with-libyaml':
'python setup.py --with-libyaml install'. To disable the check and
skip building and installing LibYAML bindings, use '--without-libyaml':
'python setup.py --without-libyaml install'.
When LibYAML bindings are installed, you may use fast LibYAML-based
parser and emitter as follows:
>>> yaml.load(stream, Loader=yaml.CLoader)
>>> yaml.dump(data, Dumper=yaml.CDumper)
PyYAML includes a comprehensive test suite. To run the tests,
type 'python setup.py test'.
For more information, check the PyYAML homepage:
'http://pyyaml.org/wiki/PyYAML'.
For PyYAML tutorial and reference, see:
'http://pyyaml.org/wiki/PyYAMLDocumentation'.
Post your questions and opinions to the YAML-Core mailing list:
'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
Submit bug reports and feature requests to the PyYAML bug tracker:
'http://pyyaml.org/newticket?component=pyyaml'.
PyYAML is written by Kirill Simonov <xi@resolvent.net>. It is released
under the MIT license. See the file LICENSE for more details.

315
lib/spack/external/yaml/__init__.py vendored Normal file
View file

@ -0,0 +1,315 @@
from error import *
from tokens import *
from events import *
from nodes import *
from loader import *
from dumper import *
__version__ = '3.10'
try:
from cyaml import *
__with_libyaml__ = True
except ImportError:
__with_libyaml__ = False
def scan(stream, Loader=Loader):
"""
Scan a YAML stream and produce scanning tokens.
"""
loader = Loader(stream)
try:
while loader.check_token():
yield loader.get_token()
finally:
loader.dispose()
def parse(stream, Loader=Loader):
"""
Parse a YAML stream and produce parsing events.
"""
loader = Loader(stream)
try:
while loader.check_event():
yield loader.get_event()
finally:
loader.dispose()
def compose(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding representation tree.
"""
loader = Loader(stream)
try:
return loader.get_single_node()
finally:
loader.dispose()
def compose_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding representation trees.
"""
loader = Loader(stream)
try:
while loader.check_node():
yield loader.get_node()
finally:
loader.dispose()
def load(stream, Loader=Loader):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
"""
loader = Loader(stream)
try:
return loader.get_single_data()
finally:
loader.dispose()
def load_all(stream, Loader=Loader):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
"""
loader = Loader(stream)
try:
while loader.check_data():
yield loader.get_data()
finally:
loader.dispose()
def safe_load(stream):
"""
Parse the first YAML document in a stream
and produce the corresponding Python object.
Resolve only basic YAML tags.
"""
return load(stream, SafeLoader)
def safe_load_all(stream):
"""
Parse all YAML documents in a stream
and produce corresponding Python objects.
Resolve only basic YAML tags.
"""
return load_all(stream, SafeLoader)
def emit(events, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None):
"""
Emit YAML parsing events into a stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
from StringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
try:
for event in events:
dumper.emit(event)
finally:
dumper.dispose()
if getvalue:
return getvalue()
def serialize_all(nodes, stream=None, Dumper=Dumper,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding='utf-8', explicit_start=None, explicit_end=None,
version=None, tags=None):
"""
Serialize a sequence of representation trees into a YAML stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
if encoding is None:
from StringIO import StringIO
else:
from cStringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
try:
dumper.open()
for node in nodes:
dumper.serialize(node)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()
def serialize(node, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a representation tree into a YAML stream.
If stream is None, return the produced string instead.
"""
return serialize_all([node], stream, Dumper=Dumper, **kwds)
def dump_all(documents, stream=None, Dumper=Dumper,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding='utf-8', explicit_start=None, explicit_end=None,
version=None, tags=None):
"""
Serialize a sequence of Python objects into a YAML stream.
If stream is None, return the produced string instead.
"""
getvalue = None
if stream is None:
if encoding is None:
from StringIO import StringIO
else:
from cStringIO import StringIO
stream = StringIO()
getvalue = stream.getvalue
dumper = Dumper(stream, default_style=default_style,
default_flow_style=default_flow_style,
canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
encoding=encoding, version=version, tags=tags,
explicit_start=explicit_start, explicit_end=explicit_end)
try:
dumper.open()
for data in documents:
dumper.represent(data)
dumper.close()
finally:
dumper.dispose()
if getvalue:
return getvalue()
def dump(data, stream=None, Dumper=Dumper, **kwds):
"""
Serialize a Python object into a YAML stream.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, Dumper=Dumper, **kwds)
def safe_dump_all(documents, stream=None, **kwds):
"""
Serialize a sequence of Python objects into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
def safe_dump(data, stream=None, **kwds):
"""
Serialize a Python object into a YAML stream.
Produce only basic YAML tags.
If stream is None, return the produced string instead.
"""
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
def add_implicit_resolver(tag, regexp, first=None,
Loader=Loader, Dumper=Dumper):
"""
Add an implicit scalar detector.
If an implicit scalar value matches the given regexp,
the corresponding tag is assigned to the scalar.
first is a sequence of possible initial characters or None.
"""
Loader.add_implicit_resolver(tag, regexp, first)
Dumper.add_implicit_resolver(tag, regexp, first)
def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
"""
Add a path based resolver for the given tag.
A path is a list of keys that forms a path
to a node in the representation tree.
Keys can be string values, integers, or None.
"""
Loader.add_path_resolver(tag, path, kind)
Dumper.add_path_resolver(tag, path, kind)
def add_constructor(tag, constructor, Loader=Loader):
"""
Add a constructor for the given tag.
Constructor is a function that accepts a Loader instance
and a node object and produces the corresponding Python object.
"""
Loader.add_constructor(tag, constructor)
def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
"""
Add a multi-constructor for the given tag prefix.
Multi-constructor is called for a node if its tag starts with tag_prefix.
Multi-constructor accepts a Loader instance, a tag suffix,
and a node object and produces the corresponding Python object.
"""
Loader.add_multi_constructor(tag_prefix, multi_constructor)
def add_representer(data_type, representer, Dumper=Dumper):
"""
Add a representer for the given type.
Representer is a function accepting a Dumper instance
and an instance of the given data type
and producing the corresponding representation node.
"""
Dumper.add_representer(data_type, representer)
def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
"""
Add a representer for the given type.
Multi-representer is a function accepting a Dumper instance
and an instance of the given data type or subtype
and producing the corresponding representation node.
"""
Dumper.add_multi_representer(data_type, multi_representer)
class YAMLObjectMetaclass(type):
"""
The metaclass for YAMLObject.
"""
def __init__(cls, name, bases, kwds):
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
cls.yaml_dumper.add_representer(cls, cls.to_yaml)
class YAMLObject(object):
"""
An object that can dump itself to a YAML stream
and load itself from a YAML stream.
"""
__metaclass__ = YAMLObjectMetaclass
__slots__ = () # no direct instantiation, so allow immutable subclasses
yaml_loader = Loader
yaml_dumper = Dumper
yaml_tag = None
yaml_flow_style = None
def from_yaml(cls, loader, node):
"""
Convert a representation node to a Python object.
"""
return loader.construct_yaml_object(node, cls)
from_yaml = classmethod(from_yaml)
def to_yaml(cls, dumper, data):
"""
Convert a Python object to a representation node.
"""
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
flow_style=cls.yaml_flow_style)
to_yaml = classmethod(to_yaml)

139
lib/spack/external/yaml/composer.py vendored Normal file
View file

@ -0,0 +1,139 @@
__all__ = ['Composer', 'ComposerError']
from error import MarkedYAMLError
from events import *
from nodes import *
class ComposerError(MarkedYAMLError):
pass
class Composer(object):
def __init__(self):
self.anchors = {}
def check_node(self):
# Drop the STREAM-START event.
if self.check_event(StreamStartEvent):
self.get_event()
# If there are more documents available?
return not self.check_event(StreamEndEvent)
def get_node(self):
# Get the root node of the next document.
if not self.check_event(StreamEndEvent):
return self.compose_document()
def get_single_node(self):
# Drop the STREAM-START event.
self.get_event()
# Compose a document if the stream is not empty.
document = None
if not self.check_event(StreamEndEvent):
document = self.compose_document()
# Ensure that the stream contains no more documents.
if not self.check_event(StreamEndEvent):
event = self.get_event()
raise ComposerError("expected a single document in the stream",
document.start_mark, "but found another document",
event.start_mark)
# Drop the STREAM-END event.
self.get_event()
return document
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
self.anchors = {}
return node
def compose_node(self, parent, index):
if self.check_event(AliasEvent):
event = self.get_event()
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
% anchor.encode('utf-8'), event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
if anchor is not None:
if anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
% anchor.encode('utf-8'), self.anchors[anchor].start_mark,
"second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.ascend_resolver()
return node
def compose_scalar_node(self, anchor):
event = self.get_event()
tag = event.tag
if tag is None or tag == u'!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
def compose_mapping_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
#key_event = self.peek_event()
item_key = self.compose_node(node, None)
#if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
#node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node

678
lib/spack/external/yaml/constructor.py vendored Normal file
View file

@ -0,0 +1,678 @@
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
'ConstructorError']
from error import *
from nodes import *
import datetime
import binascii, re, sys, types
class ConstructorError(MarkedYAMLError):
pass
class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
def __init__(self):
self.constructed_objects = {}
self.recursive_objects = {}
self.state_generators = []
self.deep_construct = False
def check_data(self):
# If there are more documents available?
return self.check_node()
def get_data(self):
# Construct and return the next document.
if self.check_node():
return self.construct_document(self.get_node())
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
if node is not None:
return self.construct_document(node)
return None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
for generator in state_generators:
for dummy in generator:
pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
return data
def construct_object(self, node, deep=False):
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
tag_suffix = None
if node.tag in self.yaml_constructors:
constructor = self.yaml_constructors[node.tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if node.tag.startswith(tag_prefix):
tag_suffix = node.tag[len(tag_prefix):]
constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
tag_suffix = node.tag
constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
constructor = self.__class__.construct_mapping
if tag_suffix is None:
data = constructor(self, node)
else:
data = constructor(self, tag_suffix, node)
if isinstance(data, types.GeneratorType):
generator = data
data = generator.next()
if self.deep_construct:
for dummy in generator:
pass
else:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
if deep:
self.deep_construct = old_deep
return data
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
raise ConstructorError(None, None,
"expected a scalar node, but found %s" % node.id,
node.start_mark)
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found already in-use key (%s)" % key, key_node.start_mark)
mapping[key] = value
return mapping
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
def add_constructor(cls, tag, constructor):
if not 'yaml_constructors' in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
add_constructor = classmethod(add_constructor)
def add_multi_constructor(cls, tag_prefix, multi_constructor):
if not 'yaml_multi_constructors' in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
add_multi_constructor = classmethod(add_multi_constructor)
class SafeConstructor(BaseConstructor):
def construct_scalar(self, node):
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == u'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
def flatten_mapping(self, node):
merge = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == u'tag:yaml.org,2002:merge':
del node.value[index]
if isinstance(value_node, MappingNode):
self.flatten_mapping(value_node)
merge.extend(value_node.value)
elif isinstance(value_node, SequenceNode):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping",
node.start_mark,
"expected a mapping for merging, but found %s"
% subnode.id, subnode.start_mark)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
raise ConstructorError("while constructing a mapping", node.start_mark,
"expected a mapping or list of mappings for merging, but found %s"
% value_node.id, value_node.start_mark)
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
else:
index += 1
if merge:
node.value = merge + node.value
def construct_mapping(self, node, deep=False):
if isinstance(node, MappingNode):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
def construct_yaml_null(self, node):
self.construct_scalar(node)
return None
bool_values = {
u'yes': True,
u'no': False,
u'true': True,
u'false': False,
u'on': True,
u'off': False,
}
def construct_yaml_bool(self, node):
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
def construct_yaml_int(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '')
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '0':
return 0
elif value.startswith('0b'):
return sign*int(value[2:], 2)
elif value.startswith('0x'):
return sign*int(value[2:], 16)
elif value[0] == '0':
return sign*int(value, 8)
elif ':' in value:
digits = [int(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*int(value)
inf_value = 1e300
while inf_value != inf_value*inf_value:
inf_value *= inf_value
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '').lower()
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
return sign*self.inf_value
elif value == '.nan':
return self.nan_value
elif ':' in value:
digits = [float(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*float(value)
def construct_yaml_binary(self, node):
value = self.construct_scalar(node)
try:
return str(value).decode('base64')
except (binascii.Error, UnicodeEncodeError), exc:
raise ConstructorError(None, None,
"failed to decode base64 data: %s" % exc, node.start_mark)
timestamp_regexp = re.compile(
ur'''^(?P<year>[0-9][0-9][0-9][0-9])
-(?P<month>[0-9][0-9]?)
-(?P<day>[0-9][0-9]?)
(?:(?:[Tt]|[ \t]+)
(?P<hour>[0-9][0-9]?)
:(?P<minute>[0-9][0-9])
:(?P<second>[0-9][0-9])
(?:\.(?P<fraction>[0-9]*))?
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
def construct_yaml_timestamp(self, node):
value = self.construct_scalar(node)
match = self.timestamp_regexp.match(node.value)
values = match.groupdict()
year = int(values['year'])
month = int(values['month'])
day = int(values['day'])
if not values['hour']:
return datetime.date(year, month, day)
hour = int(values['hour'])
minute = int(values['minute'])
second = int(values['second'])
fraction = 0
if values['fraction']:
fraction = values['fraction'][:6]
while len(fraction) < 6:
fraction += '0'
fraction = int(fraction)
delta = None
if values['tz_sign']:
tz_hour = int(values['tz_hour'])
tz_minute = int(values['tz_minute'] or 0)
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
if delta:
data -= delta
return data
def construct_yaml_omap(self, node):
# Note: we do not check for duplicate keys, because it's too
# CPU-expensive.
omap = []
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
omap.append((key, value))
def construct_yaml_pairs(self, node):
# Note: the same code as `construct_yaml_omap`.
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
pairs.append((key, value))
def construct_yaml_set(self, node):
data = set()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
try:
return value.encode('ascii')
except UnicodeEncodeError:
return value
def construct_yaml_seq(self, node):
data = []
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
data = {}
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_object(self, node, cls):
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
state = self.construct_mapping(node)
data.__dict__.update(state)
def construct_undefined(self, node):
raise ConstructorError(None, None,
"could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
node.start_mark)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:null',
SafeConstructor.construct_yaml_null)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:bool',
SafeConstructor.construct_yaml_bool)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:int',
SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:float',
SafeConstructor.construct_yaml_float)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:binary',
SafeConstructor.construct_yaml_binary)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:timestamp',
SafeConstructor.construct_yaml_timestamp)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:omap',
SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:pairs',
SafeConstructor.construct_yaml_pairs)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:set',
SafeConstructor.construct_yaml_set)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:str',
SafeConstructor.construct_yaml_str)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:seq',
SafeConstructor.construct_yaml_seq)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:map',
SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None,
SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
def construct_python_str(self, node):
return self.construct_scalar(node).encode('utf-8')
def construct_python_unicode(self, node):
return self.construct_scalar(node)
def construct_python_long(self, node):
return long(self.construct_yaml_int(node))
def construct_python_complex(self, node):
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
def find_python_module(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python module", mark,
"expected non-empty name appended to the tag", mark)
try:
__import__(name)
except ImportError, exc:
raise ConstructorError("while constructing a Python module", mark,
"cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
return sys.modules[name]
def find_python_name(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python object", mark,
"expected non-empty name appended to the tag", mark)
if u'.' in name:
module_name, object_name = name.rsplit('.', 1)
else:
module_name = '__builtin__'
object_name = name
try:
__import__(module_name)
except ImportError, exc:
raise ConstructorError("while constructing a Python object", mark,
"cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
module = sys.modules[module_name]
if not hasattr(module, object_name):
raise ConstructorError("while constructing a Python object", mark,
"cannot find %r in the module %r" % (object_name.encode('utf-8'),
module.__name__), mark)
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_module(suffix, node.start_mark)
class classobj: pass
def make_python_instance(self, suffix, node,
args=None, kwds=None, newobj=False):
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
if newobj and isinstance(cls, type(self.classobj)) \
and not args and not kwds:
instance = self.classobj()
instance.__class__ = cls
return instance
elif newobj and isinstance(cls, type):
return cls.__new__(cls, *args, **kwds)
else:
return cls(*args, **kwds)
def set_python_instance_state(self, instance, state):
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
slotstate = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
instance.__dict__.update(state)
elif state:
slotstate.update(state)
for key, value in slotstate.items():
setattr(object, key, value)
def construct_python_object(self, suffix, node):
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
yield instance
deep = hasattr(instance, '__setstate__')
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
def construct_python_object_apply(self, suffix, node, newobj=False):
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
# kwds: { ... keywords ... }
# state: ... state ...
# listitems: [ ... listitems ... ]
# dictitems: { ... dictitems ... }
# or short format:
# !!python/object/apply [ ... arguments ... ]
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
kwds = {}
state = {}
listitems = []
dictitems = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
listitems = value.get('listitems', [])
dictitems = value.get('dictitems', {})
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
if state:
self.set_python_instance_state(instance, state)
if listitems:
instance.extend(listitems)
if dictitems:
for key in dictitems:
instance[key] = dictitems[key]
return instance
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/bool',
Constructor.construct_yaml_bool)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/str',
Constructor.construct_python_str)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/unicode',
Constructor.construct_python_unicode)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/int',
Constructor.construct_yaml_int)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/long',
Constructor.construct_python_long)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/float',
Constructor.construct_yaml_float)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/complex',
Constructor.construct_python_complex)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/list',
Constructor.construct_yaml_seq)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/tuple',
Constructor.construct_python_tuple)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/dict',
Constructor.construct_yaml_map)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/name:',
Constructor.construct_python_name)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/module:',
Constructor.construct_python_module)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object:',
Constructor.construct_python_object)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/apply:',
Constructor.construct_python_object_apply)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new)

62
lib/spack/external/yaml/dumper.py vendored Normal file
View file

@ -0,0 +1,62 @@
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
from emitter import *
from serializer import *
from representer import *
from resolver import *
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
SafeRepresenter.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=None,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style)
Resolver.__init__(self)

1140
lib/spack/external/yaml/emitter.py vendored Normal file

File diff suppressed because it is too large Load diff

75
lib/spack/external/yaml/error.py vendored Normal file
View file

@ -0,0 +1,75 @@
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
class Mark(object):
def __init__(self, name, index, line, column, buffer, pointer):
self.name = name
self.index = index
self.line = line
self.column = column
self.buffer = buffer
self.pointer = pointer
def get_snippet(self, indent=4, max_length=75):
if self.buffer is None:
return None
head = ''
start = self.pointer
while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029':
start -= 1
if self.pointer-start > max_length/2-1:
head = ' ... '
start += 5
break
tail = ''
end = self.pointer
while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029':
end += 1
if end-self.pointer > max_length/2-1:
tail = ' ... '
end -= 5
break
snippet = self.buffer[start:end].encode('utf-8')
return ' '*indent + head + snippet + tail + '\n' \
+ ' '*(indent+self.pointer-start+len(head)) + '^'
def __str__(self):
snippet = self.get_snippet()
where = " in \"%s\", line %d, column %d" \
% (self.name, self.line+1, self.column+1)
if snippet is not None:
where += ":\n"+snippet
return where
class YAMLError(Exception):
pass
class MarkedYAMLError(YAMLError):
def __init__(self, context=None, context_mark=None,
problem=None, problem_mark=None, note=None):
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
def __str__(self):
lines = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None \
and (self.problem is None or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
if self.note is not None:
lines.append(self.note)
return '\n'.join(lines)

86
lib/spack/external/yaml/events.py vendored Normal file
View file

@ -0,0 +1,86 @@
# Abstract classes.
class Event(object):
def __init__(self, start_mark=None, end_mark=None):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
self.anchor = anchor
self.start_mark = start_mark
self.end_mark = end_mark
class CollectionStartEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
flow_style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class CollectionEndEvent(Event):
pass
# Implementations.
class StreamStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None, encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndEvent(Event):
pass
class DocumentStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None, version=None, tags=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
self.version = version
self.tags = tags
class DocumentEndEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
class AliasEvent(NodeEvent):
pass
class ScalarEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, value,
start_mark=None, end_mark=None, style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class SequenceStartEvent(CollectionStartEvent):
pass
class SequenceEndEvent(CollectionEndEvent):
pass
class MappingStartEvent(CollectionStartEvent):
pass
class MappingEndEvent(CollectionEndEvent):
pass

40
lib/spack/external/yaml/loader.py vendored Normal file
View file

@ -0,0 +1,40 @@
__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
from reader import *
from scanner import *
from parser import *
from composer import *
from constructor import *
from resolver import *
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
BaseConstructor.__init__(self)
BaseResolver.__init__(self)
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
SafeConstructor.__init__(self)
Resolver.__init__(self)
class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
def __init__(self, stream):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
Constructor.__init__(self)
Resolver.__init__(self)

49
lib/spack/external/yaml/nodes.py vendored Normal file
View file

@ -0,0 +1,49 @@
class Node(object):
def __init__(self, tag, value, start_mark, end_mark):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
value = self.value
#if isinstance(value, list):
# if len(value) == 0:
# value = '<empty>'
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = '<%d items>' % len(value)
#else:
# if len(value) > 75:
# value = repr(value[:70]+u' ... ')
# else:
# value = repr(value)
value = repr(value)
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
class ScalarNode(Node):
id = 'scalar'
def __init__(self, tag, value,
start_mark=None, end_mark=None, style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class CollectionNode(Node):
def __init__(self, tag, value,
start_mark=None, end_mark=None, flow_style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class SequenceNode(CollectionNode):
id = 'sequence'
class MappingNode(CollectionNode):
id = 'mapping'

589
lib/spack/external/yaml/parser.py vendored Normal file
View file

@ -0,0 +1,589 @@
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
# block_node_or_indentless_sequence ::=
# ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# FIRST sets:
#
# stream: { STREAM-START }
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_sequence: { BLOCK-SEQUENCE-START }
# block_mapping: { BLOCK-MAPPING-START }
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
# indentless_sequence: { ENTRY }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_sequence: { FLOW-SEQUENCE-START }
# flow_mapping: { FLOW-MAPPING-START }
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
__all__ = ['Parser', 'ParserError']
from error import MarkedYAMLError
from tokens import *
from events import *
from scanner import *
class ParserError(MarkedYAMLError):
pass
class Parser(object):
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
DEFAULT_TAGS = {
u'!': u'!',
u'!!': u'tag:yaml.org,2002:',
}
def __init__(self):
self.current_event = None
self.yaml_version = None
self.tag_handles = {}
self.states = []
self.marks = []
self.state = self.parse_stream_start
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self):
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self):
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self):
# Parse the stream start.
token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self):
# Parse any extra document end indicators.
while self.check_token(DocumentEndToken):
self.get_token()
# Parse an explicit document.
if not self.check_token(StreamEndToken):
token = self.peek_token()
start_mark = token.start_mark
version, tags = self.process_directives()
if not self.check_token(DocumentStartToken):
raise ParserError(None, None,
"expected '<document start>', but found %r"
% self.peek_token().id,
self.peek_token().start_mark)
token = self.get_token()
end_mark = token.end_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=True, version=version, tags=tags)
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.get_token()
event = StreamEndEvent(token.start_mark, token.end_mark)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self):
# Parse the document end.
token = self.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.check_token(DocumentEndToken):
token = self.get_token()
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark,
explicit=explicit)
# Prepare the next state.
self.state = self.parse_document_start
return event
def parse_document_content(self):
if self.check_token(DirectiveToken,
DocumentStartToken, DocumentEndToken, StreamEndToken):
event = self.process_empty_scalar(self.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self):
self.yaml_version = None
self.tag_handles = {}
while self.check_token(DirectiveToken):
token = self.get_token()
if token.name == u'YAML':
if self.yaml_version is not None:
raise ParserError(None, None,
"found duplicate YAML directive", token.start_mark)
major, minor = token.value
if major != 1:
raise ParserError(None, None,
"found incompatible YAML document (version 1.* is required)",
token.start_mark)
self.yaml_version = token.value
elif token.name == u'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(None, None,
"duplicate tag handle %r" % handle.encode('utf-8'),
token.start_mark)
self.tag_handles[handle] = prefix
if self.tag_handles:
value = self.yaml_version, self.tag_handles.copy()
else:
value = self.yaml_version, None
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self):
return self.parse_node(block=True)
def parse_flow_node(self):
return self.parse_node()
def parse_block_node_or_indentless_sequence(self):
return self.parse_node(block=True, indentless_sequence=True)
def parse_node(self, block=False, indentless_sequence=False):
if self.check_token(AliasToken):
token = self.get_token()
event = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
else:
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.check_token(AnchorToken):
token = self.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.check_token(TagToken):
token = self.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
elif self.check_token(TagToken):
token = self.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
if self.check_token(AnchorToken):
token = self.get_token()
end_mark = token.end_mark
anchor = token.value
if tag is not None:
handle, suffix = tag
if handle is not None:
if handle not in self.tag_handles:
raise ParserError("while parsing a node", start_mark,
"found undefined tag handle %r" % handle.encode('utf-8'),
tag_mark)
tag = self.tag_handles[handle]+suffix
else:
tag = suffix
#if tag == u'!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
if start_mark is None:
start_mark = end_mark = self.peek_token().start_mark
event = None
implicit = (tag is None or tag == u'!')
if indentless_sequence and self.check_token(BlockEntryToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
else:
if self.check_token(ScalarToken):
token = self.get_token()
end_mark = token.end_mark
if (token.plain and tag is None) or tag == u'!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
else:
implicit = (False, False)
event = ScalarEvent(anchor, tag, implicit, token.value,
start_mark, end_mark, style=token.style)
self.state = self.states.pop()
elif self.check_token(FlowSequenceStartToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_sequence_first_entry
elif self.check_token(FlowMappingStartToken):
end_mark = self.peek_token().end_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_mapping_first_key
elif block and self.check_token(BlockSequenceStartToken):
end_mark = self.peek_token().start_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_sequence_first_entry
elif block and self.check_token(BlockMappingStartToken):
end_mark = self.peek_token().start_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(anchor, tag, (implicit, False), u'',
start_mark, end_mark)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.peek_token()
raise ParserError("while parsing a %s node" % node, start_mark,
"expected the node content, but found %r" % token.id,
token.start_mark)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
def parse_block_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block collection", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
def parse_indentless_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken,
KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.peek_token()
event = SequenceEndEvent(token.start_mark, token.start_mark)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self):
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block mapping", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_block_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first=False):
if not self.check_token(FlowSequenceEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow sequence", self.marks[-1],
"expected ',' or ']', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.peek_token()
event = MappingStartEvent(None, None, True,
token.start_mark, token.end_mark,
flow_style=True)
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self):
self.state = self.parse_flow_sequence_entry
token = self.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first=False):
if not self.check_token(FlowMappingEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1],
"expected ',' or '}', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif not self.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self):
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.peek_token().start_mark)
def process_empty_scalar(self, mark):
return ScalarEvent(None, None, (True, False), u'', mark, mark)

189
lib/spack/external/yaml/reader.py vendored Normal file
View file

@ -0,0 +1,189 @@
# This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
__all__ = ['Reader', 'ReaderError']
from error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, str):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to unicode,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `str` object,
# - a `unicode` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream, name=None):
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = u''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, unicode):
self.name = "<unicode string>" if name is None else name
self.check_printable(stream)
self.buffer = stream+u'\0'
elif isinstance(stream, str):
self.name = "<string>" if name is None else name
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>") if name is None else name
self.eof = False
self.raw_buffer = ''
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in u'\n\x85\u2028\u2029' \
or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
self.line += 1
self.column = 0
elif ch != u'\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and len(self.raw_buffer) < 2:
self.update_raw()
if not isinstance(self.raw_buffer, unicode):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError, exc:
character = exc.object[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += u'\0'
self.raw_buffer = None
break
def update_raw(self, size=1024):
data = self.stream.read(size)
if data:
self.raw_buffer += data
self.stream_pointer += len(data)
else:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass

484
lib/spack/external/yaml/representer.py vendored Normal file
View file

@ -0,0 +1,484 @@
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
'RepresenterError']
from error import *
from nodes import *
import datetime
import sys, copy_reg, types
class RepresenterError(YAMLError):
pass
class BaseRepresenter(object):
yaml_representers = {}
yaml_multi_representers = {}
def __init__(self, default_style=None, default_flow_style=None):
self.default_style = default_style
self.default_flow_style = default_flow_style
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def represent(self, data):
node = self.represent_data(data)
self.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def get_classobj_bases(self, cls):
bases = [cls]
for base in cls.__bases__:
bases.extend(self.get_classobj_bases(base))
return bases
def represent_data(self, data):
if self.ignore_aliases(data):
self.alias_key = None
else:
self.alias_key = id(data)
if self.alias_key is not None:
if self.alias_key in self.represented_objects:
node = self.represented_objects[self.alias_key]
#if node is None:
# raise RepresenterError("recursive objects are not allowed: %r" % data)
return node
#self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
if type(data) is types.InstanceType:
data_types = self.get_classobj_bases(data.__class__)+list(data_types)
if data_types[0] in self.yaml_representers:
node = self.yaml_representers[data_types[0]](self, data)
else:
for data_type in data_types:
if data_type in self.yaml_multi_representers:
node = self.yaml_multi_representers[data_type](self, data)
break
else:
if None in self.yaml_multi_representers:
node = self.yaml_multi_representers[None](self, data)
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, unicode(data))
#if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
def add_representer(cls, data_type, representer):
if not 'yaml_representers' in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
add_representer = classmethod(add_representer)
def add_multi_representer(cls, data_type, representer):
if not 'yaml_multi_representers' in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
add_multi_representer = classmethod(add_multi_representer)
def represent_scalar(self, tag, value, style=None):
if style is None:
style = self.default_style
node = ScalarNode(tag, value, style=style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
return node
def represent_sequence(self, tag, sequence, flow_style=None):
value = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
for item in sequence:
node_item = self.represent_data(item)
if not (isinstance(node_item, ScalarNode) and not node_item.style):
best_style = False
value.append(node_item)
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
mapping = mapping.items()
mapping.sort()
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def ignore_aliases(self, data):
return False
class SafeRepresenter(BaseRepresenter):
def ignore_aliases(self, data):
if data in [None, ()]:
return True
if isinstance(data, (str, unicode, bool, int, float)):
return True
def represent_none(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:null',
u'null')
def represent_str(self, data):
tag = None
style = None
try:
data = unicode(data, 'ascii')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
try:
data = unicode(data, 'utf-8')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
data = data.encode('base64')
tag = u'tag:yaml.org,2002:binary'
style = '|'
return self.represent_scalar(tag, data, style=style)
def represent_unicode(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
def represent_bool(self, data):
if data:
value = u'true'
else:
value = u'false'
return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
def represent_int(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
def represent_long(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
inf_value = 1e300
while repr(inf_value) != repr(inf_value*inf_value):
inf_value *= inf_value
def represent_float(self, data):
if data != data or (data == 0.0 and data == 1.0):
value = u'.nan'
elif data == self.inf_value:
value = u'.inf'
elif data == -self.inf_value:
value = u'-.inf'
else:
value = unicode(repr(data)).lower()
# Note that in some cases `repr(data)` represents a float number
# without the decimal parts. For instance:
# >>> repr(1e17)
# '1e17'
# Unfortunately, this is not a valid float representation according
# to the definition of the `!!float` tag. We fix this by adding
# '.0' before the 'e' symbol.
if u'.' not in value and u'e' in value:
value = value.replace(u'e', u'.0e', 1)
return self.represent_scalar(u'tag:yaml.org,2002:float', value)
def represent_list(self, data):
#pairs = (len(data) > 0 and isinstance(data, list))
#if pairs:
# for item in data:
# if not isinstance(item, tuple) or len(item) != 2:
# pairs = False
# break
#if not pairs:
return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
#value = []
#for item_key, item_value in data:
# value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
# [(item_key, item_value)]))
#return SequenceNode(u'tag:yaml.org,2002:pairs', value)
def represent_dict(self, data):
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
def represent_set(self, data):
value = {}
for key in data:
value[key] = None
return self.represent_mapping(u'tag:yaml.org,2002:set', value)
def represent_date(self, data):
value = unicode(data.isoformat())
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
def represent_datetime(self, data):
value = unicode(data.isoformat(' '))
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
def represent_yaml_object(self, tag, data, cls, flow_style=None):
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data):
raise RepresenterError("cannot represent an object: %s" % data)
SafeRepresenter.add_representer(type(None),
SafeRepresenter.represent_none)
SafeRepresenter.add_representer(str,
SafeRepresenter.represent_str)
SafeRepresenter.add_representer(unicode,
SafeRepresenter.represent_unicode)
SafeRepresenter.add_representer(bool,
SafeRepresenter.represent_bool)
SafeRepresenter.add_representer(int,
SafeRepresenter.represent_int)
SafeRepresenter.add_representer(long,
SafeRepresenter.represent_long)
SafeRepresenter.add_representer(float,
SafeRepresenter.represent_float)
SafeRepresenter.add_representer(list,
SafeRepresenter.represent_list)
SafeRepresenter.add_representer(tuple,
SafeRepresenter.represent_list)
SafeRepresenter.add_representer(dict,
SafeRepresenter.represent_dict)
SafeRepresenter.add_representer(set,
SafeRepresenter.represent_set)
SafeRepresenter.add_representer(datetime.date,
SafeRepresenter.represent_date)
SafeRepresenter.add_representer(datetime.datetime,
SafeRepresenter.represent_datetime)
SafeRepresenter.add_representer(None,
SafeRepresenter.represent_undefined)
class Representer(SafeRepresenter):
def represent_str(self, data):
tag = None
style = None
try:
data = unicode(data, 'ascii')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
try:
data = unicode(data, 'utf-8')
tag = u'tag:yaml.org,2002:python/str'
except UnicodeDecodeError:
data = data.encode('base64')
tag = u'tag:yaml.org,2002:binary'
style = '|'
return self.represent_scalar(tag, data, style=style)
def represent_unicode(self, data):
tag = None
try:
data.encode('ascii')
tag = u'tag:yaml.org,2002:python/unicode'
except UnicodeEncodeError:
tag = u'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data)
def represent_long(self, data):
tag = u'tag:yaml.org,2002:int'
if int(data) is not data:
tag = u'tag:yaml.org,2002:python/long'
return self.represent_scalar(tag, unicode(data))
def represent_complex(self, data):
if data.imag == 0.0:
data = u'%r' % data.real
elif data.real == 0.0:
data = u'%rj' % data.imag
elif data.imag > 0:
data = u'%r+%rj' % (data.real, data.imag)
else:
data = u'%r%rj' % (data.real, data.imag)
return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
def represent_tuple(self, data):
return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
def represent_name(self, data):
name = u'%s.%s' % (data.__module__, data.__name__)
return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
def represent_module(self, data):
return self.represent_scalar(
u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
def represent_instance(self, data):
# For instances of classic classes, we use __getinitargs__ and
# __getstate__ to serialize the data.
# If data.__getinitargs__ exists, the object must be reconstructed by
# calling cls(**args), where args is a tuple returned by
# __getinitargs__. Otherwise, the cls.__init__ method should never be
# called and the class instance is created by instantiating a trivial
# class and assigning to the instance's __class__ variable.
# If data.__getstate__ exists, it returns the state of the object.
# Otherwise, the state of the object is data.__dict__.
# We produce either a !!python/object or !!python/object/new node.
# If data.__getinitargs__ does not exist and state is a dictionary, we
# produce a !!python/object node . Otherwise we produce a
# !!python/object/new node.
cls = data.__class__
class_name = u'%s.%s' % (cls.__module__, cls.__name__)
args = None
state = None
if hasattr(data, '__getinitargs__'):
args = list(data.__getinitargs__())
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__
if args is None and isinstance(state, dict):
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+class_name, state)
if isinstance(state, dict) and not state:
return self.represent_sequence(
u'tag:yaml.org,2002:python/object/new:'+class_name, args)
value = {}
if args:
value['args'] = args
value['state'] = state
return self.represent_mapping(
u'tag:yaml.org,2002:python/object/new:'+class_name, value)
def represent_object(self, data):
# We use __reduce__ API to save the data. data.__reduce__ returns
# a tuple of length 2-5:
# (function, args, state, listitems, dictitems)
# For reconstructing, we calls function(*args), then set its state,
# listitems, and dictitems if they are not None.
# A special case is when function.__name__ == '__newobj__'. In this
# case we create the object with args[0].__new__(*args).
# Another special case is when __reduce__ returns a string - we don't
# support it.
# We produce a !!python/object, !!python/object/new or
# !!python/object/apply node.
cls = type(data)
if cls in copy_reg.dispatch_table:
reduce = copy_reg.dispatch_table[cls](data)
elif hasattr(data, '__reduce_ex__'):
reduce = data.__reduce_ex__(2)
elif hasattr(data, '__reduce__'):
reduce = data.__reduce__()
else:
raise RepresenterError("cannot represent object: %r" % data)
reduce = (list(reduce)+[None]*5)[:5]
function, args, state, listitems, dictitems = reduce
args = list(args)
if state is None:
state = {}
if listitems is not None:
listitems = list(listitems)
if dictitems is not None:
dictitems = dict(dictitems)
if function.__name__ == '__newobj__':
function = args[0]
args = args[1:]
tag = u'tag:yaml.org,2002:python/object/new:'
newobj = True
else:
tag = u'tag:yaml.org,2002:python/object/apply:'
newobj = False
function_name = u'%s.%s' % (function.__module__, function.__name__)
if not args and not listitems and not dictitems \
and isinstance(state, dict) and newobj:
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+function_name, state)
if not listitems and not dictitems \
and isinstance(state, dict) and not state:
return self.represent_sequence(tag+function_name, args)
value = {}
if args:
value['args'] = args
if state or not isinstance(state, dict):
value['state'] = state
if listitems:
value['listitems'] = listitems
if dictitems:
value['dictitems'] = dictitems
return self.represent_mapping(tag+function_name, value)
Representer.add_representer(str,
Representer.represent_str)
Representer.add_representer(unicode,
Representer.represent_unicode)
Representer.add_representer(long,
Representer.represent_long)
Representer.add_representer(complex,
Representer.represent_complex)
Representer.add_representer(tuple,
Representer.represent_tuple)
Representer.add_representer(type,
Representer.represent_name)
Representer.add_representer(types.ClassType,
Representer.represent_name)
Representer.add_representer(types.FunctionType,
Representer.represent_name)
Representer.add_representer(types.BuiltinFunctionType,
Representer.represent_name)
Representer.add_representer(types.ModuleType,
Representer.represent_module)
Representer.add_multi_representer(types.InstanceType,
Representer.represent_instance)
Representer.add_multi_representer(object,
Representer.represent_object)

224
lib/spack/external/yaml/resolver.py vendored Normal file
View file

@ -0,0 +1,224 @@
__all__ = ['BaseResolver', 'Resolver']
from error import *
from nodes import *
import re
class ResolverError(YAMLError):
pass
class BaseResolver(object):
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
yaml_implicit_resolvers = {}
yaml_path_resolvers = {}
def __init__(self):
self.resolver_exact_paths = []
self.resolver_prefix_paths = []
def add_implicit_resolver(cls, tag, regexp, first):
if not 'yaml_implicit_resolvers' in cls.__dict__:
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
add_implicit_resolver = classmethod(add_implicit_resolver)
def add_path_resolver(cls, tag, path, kind=None):
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
# tuples `(node_check, index_check)`. `node_check` is a node class:
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
# matches any kind of a node. `index_check` could be `None`, a boolean
# value, a string value, or a number. `None` and `False` match against
# any _value_ of sequence and mapping nodes. `True` matches against
# any _key_ of a mapping node. A string `index_check` matches against
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
if not 'yaml_path_resolvers' in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
node_check, index_check = element
elif len(element) == 1:
node_check = element[0]
index_check = True
else:
raise ResolverError("Invalid path element: %s" % element)
else:
node_check = None
index_check = element
if node_check is str:
node_check = ScalarNode
elif node_check is list:
node_check = SequenceNode
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
and not isinstance(node_check, basestring) \
and node_check is not None:
raise ResolverError("Invalid node checker: %s" % node_check)
if not isinstance(index_check, (basestring, int)) \
and index_check is not None:
raise ResolverError("Invalid index checker: %s" % index_check)
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
elif kind is list:
kind = SequenceNode
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
and kind is not None:
raise ResolverError("Invalid node kind: %s" % kind)
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
add_path_resolver = classmethod(add_path_resolver)
def descend_resolver(self, current_node, current_index):
if not self.yaml_path_resolvers:
return
exact_paths = {}
prefix_paths = []
if current_node:
depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(depth, path, kind,
current_node, current_index):
if len(path) > depth:
prefix_paths.append((path, kind))
else:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
for path, kind in self.yaml_path_resolvers:
if not path:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
prefix_paths.append((path, kind))
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
def ascend_resolver(self):
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
if isinstance(node_check, basestring):
if current_node.tag != node_check:
return
elif node_check is not None:
if not isinstance(current_node, node_check):
return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
and current_index is None:
return
if isinstance(index_check, basestring):
if not (isinstance(current_index, ScalarNode)
and index_check == current_index.value):
return
elif isinstance(index_check, int) and not isinstance(index_check, bool):
if index_check != current_index:
return
return True
def resolve(self, kind, value, implicit):
if kind is ScalarNode and implicit[0]:
if value == u'':
resolvers = self.yaml_implicit_resolvers.get(u'', [])
else:
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
resolvers += self.yaml_implicit_resolvers.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return tag
implicit = implicit[1]
if self.yaml_path_resolvers:
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return exact_paths[kind]
if None in exact_paths:
return exact_paths[None]
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
class Resolver(BaseResolver):
pass
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:bool',
re.compile(ur'''^(?:yes|Yes|YES|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list(u'yYnNtTfFoO'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:float',
re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|[-+]?\.(?:inf|Inf|INF)
|\.(?:nan|NaN|NAN))$''', re.X),
list(u'-+0123456789.'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:int',
re.compile(ur'''^(?:[-+]?0b[0-1_]+
|[-+]?0[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
list(u'-+0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:merge',
re.compile(ur'^(?:<<)$'),
[u'<'])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:null',
re.compile(ur'''^(?: ~
|null|Null|NULL
| )$''', re.X),
[u'~', u'n', u'N', u''])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:timestamp',
re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list(u'0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:value',
re.compile(ur'^(?:=)$'),
[u'='])
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:yaml',
re.compile(ur'^(?:!|&|\*)$'),
list(u'!&*'))

1457
lib/spack/external/yaml/scanner.py vendored Normal file

File diff suppressed because it is too large Load diff

111
lib/spack/external/yaml/serializer.py vendored Normal file
View file

@ -0,0 +1,111 @@
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()

104
lib/spack/external/yaml/tokens.py vendored Normal file
View file

@ -0,0 +1,104 @@
class Token(object):
def __init__(self, start_mark, end_mark):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in self.__dict__
if not key.endswith('_mark')]
attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
#class BOMToken(Token):
# id = '<byte order mark>'
class DirectiveToken(Token):
id = '<directive>'
def __init__(self, name, value, start_mark, end_mark):
self.name = name
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class DocumentStartToken(Token):
id = '<document start>'
class DocumentEndToken(Token):
id = '<document end>'
class StreamStartToken(Token):
id = '<stream start>'
def __init__(self, start_mark=None, end_mark=None,
encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndToken(Token):
id = '<stream end>'
class BlockSequenceStartToken(Token):
id = '<block sequence start>'
class BlockMappingStartToken(Token):
id = '<block mapping start>'
class BlockEndToken(Token):
id = '<block end>'
class FlowSequenceStartToken(Token):
id = '['
class FlowMappingStartToken(Token):
id = '{'
class FlowSequenceEndToken(Token):
id = ']'
class FlowMappingEndToken(Token):
id = '}'
class KeyToken(Token):
id = '?'
class ValueToken(Token):
id = ':'
class BlockEntryToken(Token):
id = '-'
class FlowEntryToken(Token):
id = ','
class AliasToken(Token):
id = '<alias>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class AnchorToken(Token):
id = '<anchor>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class TagToken(Token):
id = '<tag>'
def __init__(self, value, start_mark, end_mark):
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
class ScalarToken(Token):
id = '<scalar>'
def __init__(self, value, plain, start_mark, end_mark, style=None):
self.value = value
self.plain = plain
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style

View file

@ -152,7 +152,7 @@ def copy_mode(src, dest):
def install(src, dest):
"""Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest))
tty.debug("Installing %s to %s" % (src, dest))
shutil.copy(src, dest)
set_install_permissions(dest)
copy_mode(src, dest)
@ -160,7 +160,7 @@ def install(src, dest):
def install_tree(src, dest, **kwargs):
"""Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest))
tty.debug("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs)
for s, d in traverse_tree(src, dest, follow_nonexisting=False):

View file

@ -126,22 +126,20 @@ def caller_locals():
del stack
def get_calling_package_name():
def get_calling_module_name():
"""Make sure that the caller is a class definition, and return the
module's name.
enclosing module's name.
"""
stack = inspect.stack()
try:
# get calling function name (the relation)
relation = stack[1][3]
# Make sure locals contain __module__
caller_locals = stack[2][0].f_locals
finally:
del stack
if not '__module__' in caller_locals:
raise ScopeError(relation)
raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!")
module_name = caller_locals['__module__']
base_name = module_name.split('.')[-1]
@ -322,6 +320,24 @@ def match(string):
return match
def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used
like an object."""
class wrapper(object):
def __getattr__(self, name): return dictionary[name]
def __setattr__(self, name, value): dictionary[name] = value
def setdefault(self, *args): return dictionary.setdefault(*args)
def get(self, *args): return dictionary.get(*args)
def keys(self): return dictionary.keys()
def values(self): return dictionary.values()
def items(self): return dictionary.items()
def __iter__(self): return iter(dictionary)
return wrapper()
class RequiredAttributeError(ValueError):
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)

View file

@ -36,6 +36,14 @@
_verbose = False
indent = " "
def is_verbose():
return _verbose
def is_debug():
return _debug
def set_debug(flag):
global _debug
_debug = flag

View file

@ -99,6 +99,10 @@ def __init__(self, message):
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
# Force color even if stdout is not a tty.
_force_color = False
class match_to_ansi(object):
def __init__(self, color=True):
self.color = color
@ -162,7 +166,7 @@ def cwrite(string, stream=sys.stdout, color=None):
then it will be set based on stream.isatty().
"""
if color is None:
color = stream.isatty()
color = stream.isatty() or _force_color
stream.write(colorize(string, color=color))
@ -189,7 +193,7 @@ def write(self, string, **kwargs):
if raw:
color=True
else:
color = self._stream.isatty()
color = self._stream.isatty() or _force_color
raw_write(colorize(string, color=color))
def writelines(self, sequence, **kwargs):

View file

@ -0,0 +1,178 @@
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Utility classes for logging the output of blocks of code.
"""
import sys
import os
import re
import select
import inspect
import llnl.util.tty as tty
import llnl.util.tty.color as color
# Use this to strip escape sequences
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
def _strip(line):
"""Strip color and control characters from a line."""
return _escape.sub('', line)
class _SkipWithBlock():
"""Special exception class used to skip a with block."""
pass
class log_output(object):
"""Redirects output and error of enclosed block to a file.
Usage:
with log_output(open('logfile.txt', 'w')):
# do things ... output will be logged.
or:
with log_output(open('logfile.txt', 'w'), echo=True):
# do things ... output will be logged
# and also printed to stdout.
Closes the provided stream when done with the block.
If echo is True, also prints the output to stdout.
"""
def __init__(self, stream, echo=False, force_color=False, debug=False):
self.stream = stream
# various output options
self.echo = echo
self.force_color = force_color
self.debug = debug
def trace(self, frame, event, arg):
"""Jumps to __exit__ on the child process."""
raise _SkipWithBlock()
def __enter__(self):
"""Redirect output from the with block to a file.
This forks the with block as a separate process, with stdout
and stderr redirected back to the parent via a pipe. If
echo is set, also writes to standard out.
"""
# remember these values for later.
self._force_color = color._force_color
self._debug = tty._debug
read, write = os.pipe()
self.pid = os.fork()
if self.pid:
# Parent: read from child, skip the with block.
os.close(write)
read_file = os.fdopen(read, 'r', 0)
with self.stream as log_file:
while True:
rlist, w, x = select.select([read_file], [], [])
if not rlist:
break
line = read_file.readline()
if not line:
break
# Echo to stdout if requested.
if self.echo:
sys.stdout.write(line)
# Stripped output to log file.
log_file.write(_strip(line))
read_file.flush()
read_file.close()
# Set a trace function to skip the with block.
sys.settrace(lambda *args, **keys: None)
frame = inspect.currentframe(1)
frame.f_trace = self.trace
else:
# Child: redirect output, execute the with block.
os.close(read)
# Save old stdout and stderr
self._stdout = os.dup(sys.stdout.fileno())
self._stderr = os.dup(sys.stderr.fileno())
# redirect to the pipe.
os.dup2(write, sys.stdout.fileno())
os.dup2(write, sys.stderr.fileno())
if self.force_color:
color._force_color = True
if self.debug:
tty._debug = True
def __exit__(self, exc_type, exception, traceback):
"""Exits on child, handles skipping the with block on parent."""
# Child should just exit here.
if self.pid == 0:
# Flush the log to disk.
sys.stdout.flush()
sys.stderr.flush()
if exception:
# Restore stdout on the child if there's an exception,
# and let it be raised normally.
#
# This assumes that even if the exception is caught,
# the child will exit with a nonzero return code. If
# it doesn't, the child process will continue running.
#
# TODO: think about how this works outside install.
# TODO: ideally would propagate exception to parent...
os.dup2(self._stdout, sys.stdout.fileno())
os.dup2(self._stderr, sys.stderr.fileno())
return False
else:
# Die quietly if there was no exception.
os._exit(0)
else:
# If the child exited badly, parent also should exit.
pid, returncode = os.waitpid(self.pid, 0)
if returncode != 0:
os._exit(1)
# restore output options.
color._force_color = self._force_color
tty._debug = self._debug
# Suppresses exception if it's our own.
return exc_type is _SkipWithBlock

View file

@ -42,7 +42,8 @@
hooks_path = join_path(module_path, "hooks")
var_path = join_path(prefix, "var", "spack")
stage_path = join_path(var_path, "stage")
install_path = join_path(prefix, "opt")
opt_path = join_path(prefix, "opt")
install_path = join_path(opt_path, "spack")
share_path = join_path(prefix, "share", "spack")
#
@ -65,8 +66,8 @@
# This controls how spack lays out install prefixes and
# stage directories.
#
from spack.directory_layout import SpecHashDirectoryLayout
install_layout = SpecHashDirectoryLayout(install_path)
from spack.directory_layout import YamlDirectoryLayout
install_layout = YamlDirectoryLayout(install_path)
#
# This controls how things are concretized in spack.
@ -146,9 +147,9 @@
from llnl.util.filesystem import *
__all__ += llnl.util.filesystem.__all__
import spack.relations
from spack.relations import *
__all__ += spack.relations.__all__
import spack.directives
from spack.directives import *
__all__ += spack.directives.__all__
import spack.util.executable
from spack.util.executable import *

View file

@ -52,6 +52,7 @@
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_INSTALL = 'SPACK_INSTALL'
SPACK_DEBUG = 'SPACK_DEBUG'
SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
@ -67,19 +68,19 @@ class MakeExecutable(Executable):
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
everything.
"""
def __init__(self, name, parallel):
def __init__(self, name, jobs):
super(MakeExecutable, self).__init__(name)
self.parallel = parallel
self.jobs = jobs
def __call__(self, *args, **kwargs):
parallel = kwargs.get('parallel', self.parallel)
disable_parallel = env_flag(SPACK_NO_PARALLEL_MAKE)
disable = env_flag(SPACK_NO_PARALLEL_MAKE)
parallel = not disable and kwargs.get('parallel', self.jobs > 1)
if parallel and not disable_parallel:
jobs = "-j%d" % multiprocessing.cpu_count()
if parallel:
jobs = "-j%d" % self.jobs
args = (jobs,) + args
super(MakeExecutable, self).__call__(*args, **kwargs)
return super(MakeExecutable, self).__call__(*args, **kwargs)
def set_compiler_environment_variables(pkg):
@ -125,6 +126,9 @@ def set_build_environment_variables(pkg):
# Install prefix
os.environ[SPACK_PREFIX] = pkg.prefix
# Install root prefix
os.environ[SPACK_INSTALL] = spack.install_path
# Remove these vars from the environment during build becaus they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
@ -159,15 +163,21 @@ def set_module_variables_for_package(pkg):
"""
m = pkg.module
m.make = MakeExecutable('make', pkg.parallel)
m.gmake = MakeExecutable('gmake', pkg.parallel)
# number of jobs spack will to build with.
jobs = multiprocessing.cpu_count()
if not pkg.parallel:
jobs = 1
elif pkg.make_jobs:
jobs = pkg.make_jobs
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable('make', jobs)
m.gmake = MakeExecutable('gmake', jobs)
# easy shortcut to os.environ
m.env = os.environ
# number of jobs spack prefers to build with.
m.make_jobs = multiprocessing.cpu_count()
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')

View file

@ -38,17 +38,11 @@ def setup_parser(subparser):
def activate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues.
specs = spack.cmd.parse_specs(args.spec, concretize=True)
specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
tty.die("activate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored in dir layout.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0])
if not spec.package.is_extension:
tty.die("%s is not an extension." % spec.name)

View file

@ -68,7 +68,7 @@ def compiler_add(args):
spack.compilers.add_compilers_to_config('user', *compilers)
n = len(compilers)
tty.msg("Added %d new compiler%s to %s" % (
n, 's' if n > 1 else '', spack.config.get_filename('user')))
n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers')))
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
else:
tty.msg("Found no new compilers")

View file

@ -43,42 +43,27 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
set_parser = sp.add_parser('set', help='Set configuration values.')
set_parser.add_argument('key', help="Key to set value for.")
set_parser.add_argument('value', nargs='?', default=None,
help="Value to associate with key")
get_parser = sp.add_parser('get', help='Get configuration values.')
get_parser.add_argument('key', help="Key to get value for.")
get_parser = sp.add_parser('get', help='Print configuration values.')
get_parser.add_argument('category', help="Configuration category to print.")
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
def config_set(args):
# default scope for writing is 'user'
if not args.scope:
args.scope = 'user'
config = spack.config.get_config(args.scope)
config.set_value(args.key, args.value)
config.write()
edit_parser.add_argument('category', help="Configuration category to edit")
def config_get(args):
config = spack.config.get_config(args.scope)
print config.get_value(args.key)
spack.config.print_category(args.category)
def config_edit(args):
if not args.scope:
args.scope = 'user'
config_file = spack.config.get_filename(args.scope)
if not args.category:
args.category = None
config_file = spack.config.get_config_scope_filename(args.scope, args.category)
spack.editor(config_file)
def config(parser, args):
action = { 'set' : config_set,
'get' : config_get,
action = { 'get' : config_get,
'edit' : config_edit }
action[args.config_command](args)

View file

@ -26,7 +26,6 @@
import os
import hashlib
import re
from contextlib import closing
from external.ordereddict import OrderedDict
import llnl.util.tty as tty
@ -192,7 +191,7 @@ def create(parser, args):
tty.die("Could not fetch any tarballs for %s." % name)
# Write out a template for the file
with closing(open(pkg_path, "w")) as pkg_file:
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
package_template.substitute(
name=name,

View file

@ -44,15 +44,10 @@ def setup_parser(subparser):
def deactivate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues.
specs = spack.cmd.parse_specs(args.spec, concretize=True)
specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
tty.die("deactivate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored properly.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0])
pkg = spec.package
@ -67,9 +62,6 @@ def deactivate(parser, args):
ext_pkg.do_deactivate(force=True)
elif pkg.is_extension:
# TODO: store DAG info properly (see above)
spec.normalize()
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
@ -81,10 +73,6 @@ def deactivate(parser, args):
for name in topo_order:
espec = index[name]
epkg = espec.package
# TODO: store DAG info properly (see above)
epkg.spec.normalize()
if epkg.extends(pkg.extendee_spec):
if epkg.activated or args.force:

View file

@ -0,0 +1,93 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import os
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
from spack.cmd.edit import edit_package
from spack.stage import DIYStage
description = "Do-It-Yourself: build from an existing source directory."
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'--keep-prefix', action='store_true',
help="Don't remove the install prefix if installation fails.")
subparser.add_argument(
'--skip-patch', action='store_true',
help="Skip patching for the DIY build.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND verison.")
def diy(self, args):
if not args.spec:
tty.die("spack diy requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack diy only takes one spec.")
spec = specs[0]
if not spack.db.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, True)
return
if not spec.version.concrete:
tty.die("spack diy spec must have a single, concrete version.")
spec.concretize()
package = spack.db.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
package.do_install(
keep_prefix=args.keep_prefix,
ignore_deps=args.ignore_deps,
keep_stage=True) # don't remove source dir for DIY.

View file

@ -24,7 +24,6 @@
##############################################################################
import os
import string
from contextlib import closing
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, join_path
@ -54,6 +53,27 @@ def install(self, spec, prefix):
""")
def edit_package(name, force=False):
path = spack.db.filename_for_package_name(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
"to edit a new file." % name)
else:
mkdirp(os.path.dirname(path))
with open(path, "w") as pkg_file:
pkg_file.write(
package_template.substitute(
name=name, class_name=mod_to_class(name)))
spack.editor(path)
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', dest='force', action='store_true',
@ -75,27 +95,12 @@ def edit(parser, args):
path = join_path(spack.cmd.command_path, name + ".py")
if not os.path.exists(path):
tty.die("No command named '%s'." % name)
spack.editor(path)
else:
# By default open the directory where packages or commands live.
if not name:
path = spack.packages_path
spack.editor(path)
else:
path = spack.db.filename_for_package_name(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not args.force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
"to edit a new file." % name)
else:
mkdirp(os.path.dirname(path))
with closing(open(path, "w")) as pkg_file:
pkg_file.write(
package_template.substitute(name=name, class_name=mod_to_class(name)))
# If everything checks out, go ahead and edit.
spack.editor(path)
edit_package(name, args.force)

View file

@ -40,9 +40,6 @@
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
'-l', '--long', action='store_const', dest='mode', const='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to package install directories')
@ -50,13 +47,30 @@ def setup_parser(subparser):
'-d', '--deps', action='store_const', dest='mode', const='deps',
help='Show full dependency DAG of installed packages')
subparser.add_argument(
'-l', '--long', action='store_true', dest='long',
help='Show dependency hashes as well as versions.')
subparser.add_argument(
'-L', '--very-long', action='store_true', dest='very_long',
help='Show dependency hashes as well as versions.')
subparser.add_argument(
'query_specs', nargs=argparse.REMAINDER,
help='optional specs to filter results')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
@ -81,17 +95,26 @@ def display_specs(specs, **kwargs):
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print gray_hash(spec, hlen),
print format % (abbrv, spec.prefix)
elif mode == 'deps':
for spec in specs:
print spec.tree(indent=4, format='$_$@$+$#', color=True),
print spec.tree(
format='$_$@$+',
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
elif mode in ('short', 'long'):
fmt = '$-_$@$+'
if mode == 'long':
fmt += '$#'
colify(s.format(fmt, color=True) for s in specs)
elif mode == 'short':
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-_$@$+', color=True)
return string
colify(fmt(s) for s in specs)
else:
raise ValueError(
@ -125,5 +148,6 @@ def find(parser, args):
if sys.stdout.isatty():
tty.msg("%d installed packages." % len(specs))
display_specs(specs, mode=args.mode)
display_specs(specs, mode=args.mode,
long=args.long,
very_long=args.very_long)

View file

@ -22,12 +22,22 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import textwrap
from llnl.util.tty.colify import *
import spack
import spack.fetch_strategy as fs
description = "Get detailed information on a particular package"
def padder(str_list, extra=0):
"""Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra
def pad(string):
string = str(string)
padding = max(0, length - len(string))
return string + (padding * ' ')
return pad
def setup_parser(subparser):
subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
@ -42,13 +52,24 @@ def print_text_info(pkg):
print "Safe versions: "
if not pkg.versions:
print("None.")
print("None")
else:
maxlen = max(len(str(v)) for v in pkg.versions)
fmt = "%%-%ss" % maxlen
pad = padder(pkg.versions, 4)
for v in reversed(sorted(pkg.versions)):
f = fs.for_package_version(pkg, v)
print " " + (fmt % v) + " " + str(f)
print " %s%s" % (pad(v), str(f))
print
print "Variants:"
if not pkg.variants:
print "None"
else:
pad = padder(pkg.variants, 4)
for name in sorted(pkg.variants):
v = pkg.variants[name]
print " %s%s" % (
pad(('+' if v.default else '-') + name + ':'),
"\n".join(textwrap.wrap(v.description)))
print
print "Dependencies:"

View file

@ -22,9 +22,10 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
@ -34,6 +35,9 @@ def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'-j', '--jobs', action='store', type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument(
'--keep-prefix', action='store_true', dest='keep_prefix',
help="Don't remove the install prefix if installation fails.")
@ -43,6 +47,9 @@ def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
help="Display verbose build output while installing.")
subparser.add_argument(
'--fake', action='store_true', dest='fake',
help="Fake install. Just remove the prefix and touch a fake file in it.")
@ -54,13 +61,20 @@ def install(parser, args):
if not args.packages:
tty.die("install requires at least one package argument")
if args.jobs is not None:
if args.jobs <= 0:
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False
spack.do_checksum = False # TODO: remove this global.
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
package.do_install(keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps,
fake=args.fake)
package.do_install(
keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps,
make_jobs=args.jobs,
verbose=args.verbose,
fake=args.fake)

View file

@ -54,6 +54,8 @@ def setup_parser(subparser):
help="Top-level packages directory for Spack.")
directories.add_argument(
'-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
directories.add_argument(
'-S', '--stages', action='store_true', help="Top level Stage directory.")
directories.add_argument(
'-b', '--build-dir', action='store_true',
help="Checked out or expanded source directory for a spec (requires it to be staged first).")
@ -72,6 +74,9 @@ def location(parser, args):
elif args.packages:
print spack.db.root
elif args.stages:
print spack.stage_path
else:
specs = spack.cmd.parse_specs(args.spec)
if not specs:

View file

@ -75,27 +75,22 @@ def mirror_add(args):
if url.startswith('/'):
url = 'file://' + url
config = spack.config.get_config('user')
config.set_value('mirror', args.name, 'url', url)
config.write()
mirror_dict = { args.name : url }
spack.config.add_to_mirror_config({ args.name : url })
def mirror_remove(args):
"""Remove a mirror by name."""
config = spack.config.get_config('user')
name = args.name
if not config.has_named_section('mirror', name):
rmd_something = spack.config.remove_from_config('mirrors', name)
if not rmd_something:
tty.die("No such mirror: %s" % name)
config.remove_named_section('mirror', name)
config.write()
def mirror_list(args):
"""Print out available mirrors to the console."""
config = spack.config.get_config()
sec_names = config.get_section_names('mirror')
sec_names = spack.config.get_mirror_config()
if not sec_names:
tty.msg("No mirrors configured.")
return
@ -103,13 +98,12 @@ def mirror_list(args):
max_len = max(len(s) for s in sec_names)
fmt = "%%-%ds%%s" % (max_len + 4)
for name in sec_names:
val = config.get_value('mirror', name, 'url')
for name, val in sec_names.iteritems():
print fmt % (name, val)
def _read_specs_from_file(filename):
with closing(open(filename, "r")) as stream:
with open(filename, "r") as stream:
for i, string in enumerate(stream):
try:
s = Spec(string)

View file

@ -27,7 +27,6 @@
import code
from external import argparse
import platform
from contextlib import closing
import spack
@ -44,13 +43,13 @@ def python(parser, args):
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
if os.path.isfile(startup_file):
with closing(open(startup_file)) as startup:
with open(startup_file) as startup:
console.runsource(startup.read(), startup_file, 'exec')
python_args = args.python_args
if python_args:
sys.argv = python_args
with closing(open(python_args[0])) as file:
with open(python_args[0]) as file:
console.runsource(file.read(), python_args[0], 'exec')
else:
console.interact("Spack version %s\nPython %s, %s %s"""

View file

@ -36,7 +36,6 @@ def setup_parser(subparser):
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
dir_parser = subparser.add_mutually_exclusive_group()
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to stage")
@ -52,4 +51,3 @@ def stage(parser, args):
for spec in specs:
package = spack.db.get(spec)
package.do_stage()

View file

@ -45,7 +45,7 @@
_imported_compilers_module = 'spack.compilers'
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
_default_order = ['gcc', 'intel', 'pgi', 'clang']
_default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc']
def _auto_compiler_spec(function):
def converter(cspec_like):
@ -60,24 +60,25 @@ def _get_config():
first."""
# If any configuration file has compilers, just stick with the
# ones already configured.
config = spack.config.get_config()
config = spack.config.get_compilers_config()
existing = [spack.spec.CompilerSpec(s)
for s in config.get_section_names('compiler')]
for s in config]
if existing:
return config
compilers = find_compilers(*get_path('PATH'))
new_compilers = [
c for c in compilers if c.spec not in existing]
add_compilers_to_config('user', *new_compilers)
add_compilers_to_config('user', *compilers)
# After writing compilers to the user config, return a full config
# from all files.
return spack.config.get_config(refresh=True)
return spack.config.get_compilers_config()
@memoized
_cached_default_compiler = None
def default_compiler():
global _cached_default_compiler
if _cached_default_compiler:
return _cached_default_compiler
versions = []
for name in _default_order: # TODO: customize order.
versions = find(name)
@ -86,7 +87,8 @@ def default_compiler():
if not versions:
raise NoCompilersError()
return sorted(versions)[-1]
_cached_default_compiler = sorted(versions)[-1]
return _cached_default_compiler
def find_compilers(*path):
@ -122,20 +124,18 @@ def find_compilers(*path):
def add_compilers_to_config(scope, *compilers):
config = spack.config.get_config(scope)
compiler_config_tree = {}
for compiler in compilers:
add_compiler(config, compiler)
config.write()
compiler_entry = {}
for c in _required_instance_vars:
val = getattr(compiler, c)
if not val:
val = "None"
compiler_entry[c] = val
compiler_config_tree[str(compiler.spec)] = compiler_entry
spack.config.add_to_compiler_config(compiler_config_tree, scope)
def add_compiler(config, compiler):
def setup_field(cspec, name, exe):
path = exe if exe else "None"
config.set_value('compiler', cspec, name, path)
for c in _required_instance_vars:
setup_field(compiler.spec, c, getattr(compiler, c))
def supported_compilers():
"""Return a set of names of compilers supported by Spack.
@ -157,8 +157,7 @@ def all_compilers():
available to build with. These are instances of CompilerSpec.
"""
configuration = _get_config()
return [spack.spec.CompilerSpec(s)
for s in configuration.get_section_names('compiler')]
return [spack.spec.CompilerSpec(s) for s in configuration]
@_auto_compiler_spec
@ -176,7 +175,7 @@ def compilers_for_spec(compiler_spec):
config = _get_config()
def get_compiler(cspec):
items = dict((k,v) for k,v in config.items('compiler "%s"' % cspec))
items = config[str(cspec)]
if not all(n in items for n in _required_instance_vars):
raise InvalidCompilerConfigurationError(cspec)

View file

@ -0,0 +1,97 @@
# -*- coding: utf-8 -*-
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by François Bissey, francois.bissey@canterbury.ac.nz, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
class Xl(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['xlc','xlc_r']
# Subclasses use possible names of C++ compiler
cxx_names = ['xlC','xlC_r','xlc++','xlc++_r']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['xlf','xlf_r']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
@property
def cxx11_flag(self):
if self.version < ver('13.1'):
tty.die("Only xlC 13.1 and above have some c++11 support.")
else:
return "-qlanglvl=extended0x"
@classmethod
def default_version(self, comp):
"""The '-qversion' is the standard option fo XL compilers.
Output looks like this::
IBM XL C/C++ for Linux, V11.1 (5724-X14)
Version: 11.01.0000.0000
or::
IBM XL Fortran for Linux, V13.1 (5724-X16)
Version: 13.01.0000.0000
or::
IBM XL C/C++ for AIX, V11.1 (5724-X13)
Version: 11.01.0000.0009
or::
IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0
Version: 09.00.0000.0017
"""
return get_compiler_version(
comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
@classmethod
def fc_version(cls, fc):
"""The fortran and C/C++ versions of the XL compiler are always two units apart.
By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1.
Having such a difference in version number is confusing spack quite a lot.
Most notably if you keep the versions as is the default xl compiler will only
have fortran and no C/C++.
So we associate the Fortran compiler with the version associated to the C/C++
compiler.
One last stumble. Version numbers over 10 have at least a .1 those under 10
a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can
such a compiler mix and possibly older version of AIX and linux on power.
"""
fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2
if cver < 10 :
cver = cver - 0.1
return str(cver)
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View file

@ -101,6 +101,16 @@ def concretize_architecture(self, spec):
spec.architecture = spack.architecture.sys_type()
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
the default variants from the package specification.
"""
for name, variant in spec.package.variants.items():
if name not in spec.variants:
spec.variants[name] = spack.spec.VariantSpec(
name, variant.default)
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler

View file

@ -28,452 +28,313 @@
===============================
When Spack runs, it pulls configuration data from several config
files, much like bash shells. In Spack, there are two configuration
scopes:
directories, each of which contains configuration files. In Spack,
there are two configuration scopes:
1. ``site``: Spack loads site-wide configuration options from
``$(prefix)/etc/spackconfig``.
``$(prefix)/etc/spack/``.
2. ``user``: Spack next loads per-user configuration options from
~/.spackconfig.
If user options have the same names as site options, the user options
take precedence.
~/.spack/.
Spack may read configuration files from both of these locations. When
configurations conflict, the user config options take precedence over
the site configurations. Each configuration directory may contain
several configuration files, such as compilers.yaml or mirrors.yaml.
Configuration file format
===============================
Configuration files are formatted using .gitconfig syntax, which is
much like Windows .INI format. This format is implemented by Python's
ConfigParser class, and it's easy to read and versatile.
Configuration files are formatted using YAML syntax.
This format is implemented by Python's
yaml class, and it's easy to read and versatile.
The file is divided into sections, like this ``compiler`` section::
The config files are structured as trees, like this ``compiler`` section::
[compiler]
cc = /usr/bin/gcc
compilers:
chaos_5_x86_64_ib:
gcc@4.4.7:
cc: /usr/bin/gcc
cxx: /usr/bin/g++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
bgqos_0:
xlc@12.1:
cc: /usr/local/bin/mpixlc
...
In each section there are options (cc), and each option has a value
(/usr/bin/gcc).
In this example, entries like ''compilers'' and ''xlc@12.1'' are used to
categorize entries beneath them in the tree. At the root of the tree,
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
Borrowing from git, we also allow named sections, e.g.:
Spack returns these trees as nested dicts. The dict for the above example
would looks like:
[compiler "gcc@4.7.3"]
cc = /usr/bin/gcc
{ 'compilers' :
{ 'chaos_5_x86_64_ib' :
{ 'gcc@4.4.7' :
{ 'cc' : '/usr/bin/gcc',
'cxx' : '/usr/bin/g++'
'f77' : '/usr/bin/gfortran'
'fc' : '/usr/bin/gfortran' }
}
{ 'bgqos_0' :
{ 'cc' : '/usr/local/bin/mpixlc' }
}
}
This is a compiler section, but it's for the specific compiler,
``gcc@4.7.3``. ``gcc@4.7.3`` is the name.
Keys
===============================
Together, the section, name, and option, separated by periods, are
called a ``key``. Keys can be used on the command line to set
configuration options explicitly (this is also borrowed from git).
For example, to change the C compiler used by gcc@4.7.3, you could do
this:
spack config compiler.gcc@4.7.3.cc /usr/local/bin/gcc
That will create a named compiler section in the user's .spackconfig
like the one shown above.
Some routines, like get_mirrors_config and get_compilers_config may strip
off the top-levels of the tree and return subtrees.
"""
import os
import re
import inspect
import ConfigParser as cp
import exceptions
import sys
from external.ordereddict import OrderedDict
from llnl.util.lang import memoized
import spack.error
__all__ = [
'SpackConfigParser', 'get_config', 'SpackConfigurationError',
'InvalidConfigurationScopeError', 'InvalidSectionNameError',
'ReadOnlySpackConfigError', 'ConfigParserError', 'NoOptionError',
'NoSectionError']
from external import yaml
from external.yaml.error import MarkedYAMLError
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
_named_section_re = r'([^ ]+) "([^"]+)"'
_config_sections = {}
class _ConfigCategory:
name = None
filename = None
merge = True
def __init__(self, n, f, m):
self.name = n
self.filename = f
self.merge = m
self.files_read_from = []
self.result_dict = {}
_config_sections[n] = self
_ConfigCategory('compilers', 'compilers.yaml', True)
_ConfigCategory('mirrors', 'mirrors.yaml', True)
_ConfigCategory('view', 'views.yaml', True)
_ConfigCategory('order', 'orders.yaml', True)
"""Names of scopes and their corresponding configuration files."""
_scopes = OrderedDict({
'site' : os.path.join(spack.etc_path, 'spackconfig'),
'user' : os.path.expanduser('~/.spackconfig')
})
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
('user', os.path.expanduser('~/.spack'))]
_field_regex = r'^([\w-]*)' \
r'(?:\.(.*(?=.)))?' \
r'(?:\.([\w-]+))?$'
_compiler_by_arch = {}
_read_config_file_result = {}
def _read_config_file(filename):
"""Read a given YAML configuration file"""
global _read_config_file_result
if filename in _read_config_file_result:
return _read_config_file_result[filename]
_section_regex = r'^([\w-]*)\s*' \
r'\"([^"]*\)\"$'
try:
with open(filename) as f:
ydict = yaml.load(f)
except MarkedYAMLError, e:
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
except exceptions.IOError, e:
_read_config_file_result[filename] = None
return None
_read_config_file_result[filename] = ydict
return ydict
# Cache of configs -- we memoize this for performance.
_config = {}
def clear_config_caches():
"""Clears the caches for configuration files, which will cause them
to be re-read upon the next request"""
for key,s in _config_sections.iteritems():
s.files_read_from = []
s.result_dict = {}
spack.config._read_config_file_result = {}
spack.config._compiler_by_arch = {}
spack.compilers._cached_default_compiler = None
def get_config(scope=None, **kwargs):
"""Get a Spack configuration object, which can be used to set options.
With no arguments, this returns a SpackConfigParser with config
options loaded from all config files. This is how client code
should read Spack configuration options.
def _merge_dicts(d1, d2):
"""Recursively merges two configuration trees, with entries
in d2 taking precedence over d1"""
if not d1:
return d2.copy()
if not d2:
return d1
Optionally, a scope parameter can be provided. Valid scopes
are ``site`` and ``user``. If a scope is provided, only the
options from that scope's configuration file are loaded. The
caller can set or unset options, then call ``write()`` on the
config object to write it back out to the original config file.
for key2, val2 in d2.iteritems():
if not key2 in d1:
d1[key2] = val2
continue
val1 = d1[key2]
if isinstance(val1, dict) and isinstance(val2, dict):
d1[key2] = _merge_dicts(val1, val2)
continue
if isinstance(val1, list) and isinstance(val2, list):
val1.extend(val2)
seen = set()
d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
continue
d1[key2] = val2
return d1
By default, this will cache configurations and return the last
read version of the config file. If the config file is
modified and you need to refresh, call get_config with the
refresh=True keyword argument. This will force all files to be
re-read.
"""
refresh = kwargs.get('refresh', False)
if refresh:
_config.clear()
if scope not in _config:
if scope is None:
_config[scope] = SpackConfigParser([path for path in _scopes.values()])
elif scope not in _scopes:
raise UnknownConfigurationScopeError(scope)
def get_config(category_name):
"""Get the confguration tree for the names category. Strips off the
top-level category entry from the dict"""
global config_scopes
category = _config_sections[category_name]
if category.result_dict:
return category.result_dict
category.result_dict = {}
for scope, scope_path in config_scopes:
path = os.path.join(scope_path, category.filename)
result = _read_config_file(path)
if not result:
continue
if not category_name in result:
continue
category.files_read_from.insert(0, path)
result = result[category_name]
if category.merge:
category.result_dict = _merge_dicts(category.result_dict, result)
else:
_config[scope] = SpackConfigParser(_scopes[scope])
return _config[scope]
category.result_dict = result
return category.result_dict
def get_filename(scope):
"""Get the filename for a particular config scope."""
if not scope in _scopes:
raise UnknownConfigurationScopeError(scope)
return _scopes[scope]
def get_compilers_config(arch=None):
"""Get the compiler configuration from config files for the given
architecture. Strips off the architecture component of the
configuration"""
global _compiler_by_arch
if not arch:
arch = spack.architecture.sys_type()
if arch in _compiler_by_arch:
return _compiler_by_arch[arch]
def _parse_key(key):
"""Return the section, name, and option the field describes.
Values are returned in a 3-tuple.
e.g.:
The field name ``compiler.gcc@4.7.3.cc`` refers to the 'cc' key
in a section that looks like this:
[compiler "gcc@4.7.3"]
cc = /usr/local/bin/gcc
* The section is ``compiler``
* The name is ``gcc@4.7.3``
* The key is ``cc``
"""
match = re.search(_field_regex, key)
if match:
return match.groups()
cc_config = get_config('compilers')
if arch in cc_config and 'all' in cc_config:
arch_compiler = dict(cc_config[arch])
_compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
elif arch in cc_config:
_compiler_by_arch[arch] = cc_config[arch]
elif 'all' in cc_config:
_compiler_by_arch[arch] = cc_config['all']
else:
raise InvalidSectionNameError(key)
_compiler_by_arch[arch] = {}
return _compiler_by_arch[arch]
def _make_section_name(section, name):
if not name:
return section
return '%s "%s"' % (section, name)
def get_mirror_config():
"""Get the mirror configuration from config files"""
return get_config('mirrors')
def _autokey(fun):
"""Allow a function to be called with a string key like
'compiler.gcc.cc', or with the section, name, and option
separated. Function should take at least three args, e.g.:
fun(self, section, name, option, [...])
This will allow the function above to be called normally or
with a string key, e.g.:
fun(self, key, [...])
"""
argspec = inspect.getargspec(fun)
fun_nargs = len(argspec[0])
def string_key_func(*args):
nargs = len(args)
if nargs == fun_nargs - 2:
section, name, option = _parse_key(args[1])
return fun(args[0], section, name, option, *args[2:])
elif nargs == fun_nargs:
return fun(*args)
else:
raise TypeError(
"%s takes %d or %d args (found %d)."
% (fun.__name__, fun_nargs - 2, fun_nargs, len(args)))
return string_key_func
def get_config_scope_dirname(scope):
"""For a scope return the config directory"""
global config_scopes
for s,p in config_scopes:
if s == scope:
return p
tty.die("Unknown scope %s. Valid options are %s" %
(scope, ", ".join([s for s,p in config_scopes])))
class SpackConfigParser(cp.RawConfigParser):
"""Slightly modified from Python's raw config file parser to accept
leading whitespace and preserve comments.
"""
# Slightly modify Python option expressions to allow leading whitespace
OPTCRE = re.compile(r'\s*' + cp.RawConfigParser.OPTCRE.pattern)
def __init__(self, file_or_files):
cp.RawConfigParser.__init__(self, dict_type=OrderedDict)
if isinstance(file_or_files, basestring):
self.read([file_or_files])
self.filename = file_or_files
else:
self.read(file_or_files)
self.filename = None
def get_config_scope_filename(scope, category_name):
"""For some scope and category, get the name of the configuration file"""
if not category_name in _config_sections:
tty.die("Unknown config category %s. Valid options are: %s" %
(category_name, ", ".join([s for s in _config_sections])))
return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
@_autokey
def set_value(self, section, name, option, value):
"""Set the value for a key. If the key is in a section or named
section that does not yet exist, add that section.
"""
sn = _make_section_name(section, name)
if not self.has_section(sn):
self.add_section(sn)
# Allow valueless config options to be set like this:
# spack config set mirror https://foo.bar.com
#
# Instead of this, which parses incorrectly:
# spack config set mirror.https://foo.bar.com
#
if option is None:
option = value
value = None
self.set(sn, option, value)
@_autokey
def get_value(self, section, name, option):
"""Get the value for a key. Raises NoOptionError or NoSectionError if
the key is not present."""
sn = _make_section_name(section, name)
def add_to_config(category_name, addition_dict, scope=None):
"""Merge a new dict into a configuration tree and write the new
configuration to disk"""
global _read_config_file_result
get_config(category_name)
category = _config_sections[category_name]
#If scope is specified, use it. Otherwise use the last config scope that
#we successfully parsed data from.
file = None
path = None
if not scope and not category.files_read_from:
scope = 'user'
if scope:
try:
if not option:
# TODO: format this better
return self.items(sn)
dir = get_config_scope_dirname(scope)
if not os.path.exists(dir):
mkdirp(dir)
path = os.path.join(dir, category.filename)
file = open(path, 'w')
except exceptions.IOError, e:
pass
else:
for p in category.files_read_from:
try:
file = open(p, 'w')
except exceptions.IOError, e:
pass
if file:
path = p
break;
if not file:
tty.die('Unable to write to config file %s' % path)
return self.get(sn, option)
#Merge the new information into the existing file info, then write to disk
new_dict = _read_config_file_result[path]
if new_dict and category_name in new_dict:
new_dict = new_dict[category_name]
new_dict = _merge_dicts(new_dict, addition_dict)
new_dict = { category_name : new_dict }
_read_config_file_result[path] = new_dict
yaml.dump(new_dict, stream=file, default_flow_style=False)
file.close()
# Wrap ConfigParser exceptions in SpackExceptions
except cp.NoOptionError, e: raise NoOptionError(e)
except cp.NoSectionError, e: raise NoSectionError(e)
except cp.Error, e: raise ConfigParserError(e)
#Merge the new information into the cached results
category.result_dict = _merge_dicts(category.result_dict, addition_dict)
@_autokey
def has_value(self, section, name, option):
"""Return whether the configuration file has a value for a
particular key."""
sn = _make_section_name(section, name)
return self.has_option(sn, option)
def add_to_mirror_config(addition_dict, scope=None):
"""Add mirrors to the configuration files"""
add_to_config('mirrors', addition_dict, scope)
def has_named_section(self, section, name):
sn = _make_section_name(section, name)
return self.has_section(sn)
def add_to_compiler_config(addition_dict, scope=None, arch=None):
"""Add compilerss to the configuration files"""
if not arch:
arch = spack.architecture.sys_type()
add_to_config('compilers', { arch : addition_dict }, scope)
clear_config_caches()
def remove_named_section(self, section, name):
sn = _make_section_name(section, name)
self.remove_section(sn)
def remove_from_config(category_name, key_to_rm, scope=None):
"""Remove a configuration key and write a new configuration to disk"""
global config_scopes
get_config(category_name)
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
category = _config_sections[category_name]
rmd_something = False
for s in scopes_to_rm_from:
path = get_config_scope_filename(scope, category_name)
result = _read_config_file(path)
if not result:
continue
if not key_to_rm in result[category_name]:
continue
with open(path, 'w') as f:
result[category_name].pop(key_to_rm, None)
yaml.dump(result, stream=f, default_flow_style=False)
category.result_dict.pop(key_to_rm, None)
rmd_something = True
return rmd_something
def get_section_names(self, sectype):
"""Get all named sections with the specified type.
A named section looks like this:
[compiler "gcc@4.7"]
Names of sections are returned as a list, e.g.:
['gcc@4.7', 'intel@12.3', 'pgi@4.2']
You can get items in the sections like this:
"""
sections = []
for secname in self.sections():
match = re.match(_named_section_re, secname)
if match:
t, name = match.groups()
if t == sectype:
sections.append(name)
return sections
def write(self, path_or_fp=None):
"""Write this configuration out to a file.
If called with no arguments, this will write the
configuration out to the file from which it was read. If
this config was read from multiple files, e.g. site
configuration and then user configuration, write will
simply raise an error.
If called with a path or file object, this will write the
configuration out to the supplied path or file object.
"""
if path_or_fp is None:
if not self.filename:
raise ReadOnlySpackConfigError()
path_or_fp = self.filename
if isinstance(path_or_fp, basestring):
path_or_fp = open(path_or_fp, 'w')
self._write(path_or_fp)
def _read(self, fp, fpname):
"""This is a copy of Python 2.6's _read() method, with support for
continuation lines removed."""
cursect = None # None, or a dictionary
optname = None
comment = 0
lineno = 0
e = None # None, or an exception
while True:
line = fp.readline()
if not line:
break
lineno = lineno + 1
# comment or blank line?
if ((line.strip() == '' or line[0] in '#;') or
(line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR")):
self._sections["comment-%d" % comment] = line
comment += 1
# a section header or option header?
else:
# is it a section header?
mo = self.SECTCRE.match(line)
if mo:
sectname = mo.group('header')
if sectname in self._sections:
cursect = self._sections[sectname]
elif sectname == cp.DEFAULTSECT:
cursect = self._defaults
else:
cursect = self._dict()
cursect['__name__'] = sectname
self._sections[sectname] = cursect
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise cp.MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
mo = self.OPTCRE.match(line)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
if vi in ('=', ':') and ';' in optval:
# ';' is a comment delimiter only if it follows
# a spacing character
pos = optval.find(';')
if pos != -1 and optval[pos-1].isspace():
optval = optval[:pos]
optval = optval.strip()
# allow empty values
if optval == '""':
optval = ''
optname = self.optionxform(optname.rstrip())
cursect[optname] = optval
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
# raised at the end of the file and will contain a
# list of all bogus lines
if not e:
e = cp.ParsingError(fpname)
e.append(lineno, repr(line))
# if any parsing errors occurred, raise an exception
if e:
raise e
def _write(self, fp):
"""Write an .ini-format representation of the configuration state.
This is taken from the default Python 2.6 source. It writes 4
spaces at the beginning of lines instead of no leading space.
"""
if self._defaults:
fp.write("[%s]\n" % cp.DEFAULTSECT)
for (key, value) in self._defaults.items():
fp.write(" %s = %s\n" % (key, str(value).replace('\n', '\n\t')))
fp.write("\n")
for section in self._sections:
# Handles comments and blank lines.
if isinstance(self._sections[section], basestring):
fp.write(self._sections[section])
continue
else:
# Allow leading whitespace
fp.write("[%s]\n" % section)
for (key, value) in self._sections[section].items():
if key != "__name__":
fp.write(" %s = %s\n" %
(key, str(value).replace('\n', '\n\t')))
class SpackConfigurationError(spack.error.SpackError):
def __init__(self, *args):
super(SpackConfigurationError, self).__init__(*args)
class InvalidConfigurationScopeError(SpackConfigurationError):
def __init__(self, scope):
super(InvalidConfigurationScopeError, self).__init__(
"Invalid configuration scope: '%s'" % scope,
"Options are: %s" % ", ".join(*_scopes.values()))
class InvalidSectionNameError(SpackConfigurationError):
"""Raised when the name for a section is invalid."""
def __init__(self, name):
super(InvalidSectionNameError, self).__init__(
"Invalid section specifier: '%s'" % name)
class ReadOnlySpackConfigError(SpackConfigurationError):
"""Raised when user attempts to write to a config read from multiple files."""
def __init__(self):
super(ReadOnlySpackConfigError, self).__init__(
"Can only write to a single-file SpackConfigParser")
class ConfigParserError(SpackConfigurationError):
"""Wrapper for the Python ConfigParser's errors"""
def __init__(self, error):
super(ConfigParserError, self).__init__(str(error))
self.error = error
class NoOptionError(ConfigParserError):
"""Wrapper for ConfigParser NoOptionError"""
def __init__(self, error):
super(NoOptionError, self).__init__(error)
class NoSectionError(ConfigParserError):
"""Wrapper for ConfigParser NoOptionError"""
def __init__(self, error):
super(NoSectionError, self).__init__(error)
"""Print a configuration to stdout"""
def print_category(category_name):
if not category_name in _config_sections:
tty.die("Unknown config category %s. Valid options are: %s" %
(category_name, ", ".join([s for s in _config_sections])))
yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False)

View file

@ -0,0 +1,277 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""This package contains directives that can be used within a package.
Directives are functions that can be called inside a package
definition to modify the package, for example:
class OpenMpi(Package):
depends_on("hwloc")
provides("mpi")
...
``provides`` and ``depends_on`` are spack directives.
The available directives are:
* ``version``
* ``depends_on``
* ``provides``
* ``extends``
* ``patch``
* ``variant``
"""
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version',
'variant' ]
import re
import inspect
from llnl.util.lang import *
import spack
import spack.spec
import spack.error
import spack.url
from spack.version import Version
from spack.patch import Patch
from spack.variant import Variant
from spack.spec import Spec, parse_anonymous_spec
#
# This is a list of all directives, built up as they are defined in
# this file.
#
directives = {}
def ensure_dicts(pkg):
"""Ensure that a package has all the dicts required by directives."""
for name, d in directives.items():
d.ensure_dicts(pkg)
class directive(object):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or depenency information. Directives
are one of the key pieces of Spack's package "langauge", which is
embedded in python.
Here's an example directive:
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
def __init__(self, dicts=None):
if isinstance(dicts, basestring):
dicts = (dicts,)
elif type(dicts) not in (list, tuple):
raise TypeError(
"dicts arg must be list, tuple, or string. Found %s."
% type(dicts))
self.dicts = dicts
def ensure_dicts(self, pkg):
"""Ensure that a package has the dicts required by this directive."""
for d in self.dicts:
if not hasattr(pkg, d):
setattr(pkg, d, {})
attr = getattr(pkg, d)
if not isinstance(attr, dict):
raise spack.error.SpackError(
"Package %s has non-dict %s attribute!" % (pkg, d))
def __call__(self, directive_function):
directives[directive_function.__name__] = self
def wrapped(*args, **kwargs):
pkg = DictWrapper(caller_locals())
self.ensure_dicts(pkg)
pkg.name = get_calling_module_name()
return directive_function(pkg, *args, **kwargs)
return wrapped
@directive('versions')
def version(pkg, ver, checksum=None, **kwargs):
"""Adds a version and metadata describing how to fetch it.
Metadata is just stored as a dict in the package's versions
dictionary. Package must turn it into a valid fetch strategy
later.
"""
# TODO: checksum vs md5 distinction is confusing -- fix this.
# special case checksum for backward compatibility
if checksum:
kwargs['md5'] = checksum
# Store kwargs for the package to later with a fetch_strategy.
pkg.versions[Version(ver)] = kwargs
def _depends_on(pkg, spec, when=None):
if when is None:
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
else:
conditions[when_spec] = dep_spec
@directive('dependencies')
def depends_on(pkg, spec, when=None):
"""Creates a dict of deps with specs defining when they apply."""
_depends_on(pkg, spec, when=when)
@directive(('extendees', 'dependencies'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
needs to be installed into the prefix of the Python installation.
Spack handles this by installing modules into their own prefix,
but allowing ONE module version to be symlinked into a parent
Python install at a time.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
mechanism.
"""
if pkg.extendees:
raise DirectiveError("Packages can extend at most one other package.")
when = kwargs.pop('when', pkg.name)
_depends_on(pkg, spec, when=when)
pkg.extendees[spec] = (Spec(spec), kwargs)
@directive('provided')
def provides(pkg, *specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency.
"""
spec_string = kwargs.get('when', pkg.name)
provider_spec = parse_anonymous_spec(spec_string, pkg.name)
for string in specs:
for provided_spec in spack.spec.parse(string):
if pkg.name == provided_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
pkg.provided[provided_spec] = provider_spec
@directive('patches')
def patch(pkg, url_or_filename, level=1, when=None):
"""Packages can declare patches to apply to source. You can
optionally provide a when spec to indicate that a particular
patch should only be applied when the package's spec meets
certain conditions (e.g. a particular version).
"""
if when is None:
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
if when_spec not in pkg.patches:
pkg.patches[when_spec] = [Patch(pkg.name, url_or_filename, level)]
else:
# if this spec is identical to some other, then append this
# patch to the existing list.
pkg.patches[when_spec].append(Patch(pkg.name, url_or_filename, level))
@directive('variants')
def variant(pkg, name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description."""
default = bool(default)
description = str(description).strip()
if not re.match(spack.spec.identifier_re, name):
raise DirectiveError("Invalid variant name in %s: '%s'" % (pkg.name, name))
pkg.variants[name] = Variant(default, description)
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
def __init__(self, directive, message):
super(DirectiveError, self).__init__(message)
self.directive = directive
class CircularReferenceError(DirectiveError):
"""This is raised when something depends on itself."""
def __init__(self, directive, package):
super(CircularReferenceError, self).__init__(
directive,
"Package '%s' cannot pass itself to %s." % (package, directive))
self.package = package

View file

@ -27,14 +27,14 @@
import exceptions
import hashlib
import shutil
import glob
import tempfile
from contextlib import closing
from external import yaml
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path, mkdirp
import spack
from spack.spec import Spec
from spack.error import SpackError
@ -81,7 +81,7 @@ def relative_path_for_spec(self, spec):
raise NotImplementedError()
def make_path_for_spec(self, spec):
def create_install_directory(self, spec):
"""Creates the installation directory for a spec."""
raise NotImplementedError()
@ -131,7 +131,7 @@ def path_for_spec(self, spec):
return os.path.join(self.root, path)
def remove_path_for_spec(self, spec):
def remove_install_directory(self, spec):
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
"""
@ -153,94 +153,71 @@ def remove_path_for_spec(self, spec):
path = os.path.dirname(path)
def traverse_dirs_at_depth(root, depth, path_tuple=(), curdepth=0):
"""For each directory at <depth> within <root>, return a tuple representing
the ancestors of that directory.
"""
if curdepth == depth and curdepth != 0:
yield path_tuple
elif depth > curdepth:
for filename in os.listdir(root):
child = os.path.join(root, filename)
if os.path.isdir(child):
child_tuple = path_tuple + (filename,)
for tup in traverse_dirs_at_depth(
child, depth, child_tuple, curdepth+1):
yield tup
class SpecHashDirectoryLayout(DirectoryLayout):
class YamlDirectoryLayout(DirectoryLayout):
"""Lays out installation directories like this::
<install_root>/
<install root>/
<architecture>/
<compiler>/
name@version+variant-<dependency_hash>
<compiler>-<compiler version>/
<name>-<version>-<variants>-<hash>
Where dependency_hash is a SHA-1 hash prefix for the full package spec.
This accounts for dependencies.
The hash here is a SHA-1 hash for the full DAG plus the build
spec. TODO: implement the build spec.
If there is ever a hash collision, you won't be able to install a new
package unless you use a larger prefix. However, the full spec is stored
in a file called .spec in each directory, so you can migrate an entire
install directory to a new hash size pretty easily.
TODO: make a tool to migrate install directories to different hash sizes.
To avoid special characters (like ~) in the directory name,
only enabled variants are included in the install path.
Disabled variants are omitted.
"""
def __init__(self, root, **kwargs):
"""Prefix size is number of characters in the SHA-1 prefix to use
to make each hash unique.
"""
spec_file_name = kwargs.get('spec_file_name', '.spec')
extension_file_name = kwargs.get('extension_file_name', '.extensions')
super(SpecHashDirectoryLayout, self).__init__(root)
self.spec_file_name = spec_file_name
self.extension_file_name = extension_file_name
super(YamlDirectoryLayout, self).__init__(root)
self.metadata_dir = kwargs.get('metadata_dir', '.spack')
self.hash_len = kwargs.get('hash_len', None)
self.spec_file_name = 'spec.yaml'
self.extension_file_name = 'extensions.yaml'
self.build_log_name = 'build.out' # TODO: use config file.
# Cache of already written/read extension maps.
self._extension_maps = {}
@property
def hidden_file_paths(self):
return ('.spec', '.extensions')
return (self.metadata_dir,)
def relative_path_for_spec(self, spec):
_check_concrete(spec)
dir_name = spec.format('$_$@$+$#')
return join_path(spec.architecture, spec.compiler, dir_name)
enabled_variants = (
'-' + v.name for v in spec.variants.values()
if v.enabled)
dir_name = "%s-%s%s-%s" % (
spec.name,
spec.version,
''.join(enabled_variants),
spec.dag_hash(self.hash_len))
path = join_path(
spec.architecture,
"%s-%s" % (spec.compiler.name, spec.compiler.version),
dir_name)
return path
def write_spec(self, spec, path):
"""Write a spec out to a file."""
with closing(open(path, 'w')) as spec_file:
spec_file.write(spec.tree(ids=False, cover='nodes'))
_check_concrete(spec)
with open(path, 'w') as f:
spec.to_yaml(f)
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
with closing(open(path)) as spec_file:
# Specs from files are assumed normal and concrete
spec = Spec(spec_file.read().replace('\n', ''))
with open(path) as f:
spec = Spec.from_yaml(f)
if all(spack.db.exists(s.name) for s in spec.traverse()):
copy = spec.copy()
# TODO: It takes a lot of time to normalize every spec on read.
# TODO: Storing graph info with spec files would fix this.
copy.normalize()
if copy.concrete:
return copy # These are specs spack still understands.
# If we get here, either the spec is no longer in spack, or
# something about its dependencies has changed. So we need to
# just assume the read spec is correct. We'll lose graph
# information if we do this, but this is just for best effort
# for commands like uninstall and find. Currently Spack
# doesn't do anything that needs the graph info after install.
# TODO: store specs with full connectivity information, so
# that we don't have to normalize or reconstruct based on
# changing dependencies in the Spack tree.
# Specs read from actual installations are always concrete
spec._normal = True
spec._concrete = True
return spec
@ -249,10 +226,19 @@ def read_spec(self, path):
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
return join_path(self.path_for_spec(spec), self.spec_file_name)
return join_path(self.metadata_path(spec), self.spec_file_name)
def make_path_for_spec(self, spec):
def metadata_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir)
def build_log_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_log_name)
def create_install_directory(self, spec):
_check_concrete(spec)
path = self.path_for_spec(spec)
@ -267,16 +253,13 @@ def make_path_for_spec(self, spec):
if installed_spec == self.spec:
raise InstallDirectoryAlreadyExistsError(path)
spec_hash = self.hash_spec(spec)
installed_hash = self.hash_spec(installed_spec)
if installed_spec == spec_hash:
if spec.dag_hash() == installed_spec.dag_hash():
raise SpecHashCollisionError(installed_hash, spec_hash)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match SHA-1 hash!'
% spec_file_path)
'Spec file in %s does not match hash!' % spec_file_path)
mkdirp(path)
mkdirp(self.metadata_path(spec))
self.write_spec(spec, spec_file_path)
@ -285,25 +268,50 @@ def all_specs(self):
if not os.path.isdir(self.root):
return []
specs = []
for path in traverse_dirs_at_depth(self.root, 3):
arch, compiler, last_dir = path
spec_file_path = join_path(
self.root, arch, compiler, last_dir, self.spec_file_name)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path)
specs.append(spec)
return specs
pattern = join_path(
self.root, '*', '*', '*', self.metadata_dir, self.spec_file_name)
spec_files = glob.glob(pattern)
return [self.read_spec(s) for s in spec_files]
@memoized
def specs_by_hash(self):
by_hash = {}
for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec
return by_hash
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file"""
_check_concrete(spec)
return join_path(self.path_for_spec(spec), self.extension_file_name)
return join_path(self.metadata_path(spec), self.extension_file_name)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
tmp = tempfile.NamedTemporaryFile(
prefix=basename, dir=dirname, delete=False)
# write tmp file
with tmp:
yaml.dump({
'extensions' : [
{ ext.name : {
'hash' : ext.dag_hash(),
'path' : str(ext.prefix)
}} for ext in sorted(extensions.values())]
}, tmp, default_flow_style=False)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currnetly
"""Get a dict<name -> spec> for all extensions currently
installed for this package."""
_check_concrete(spec)
@ -313,16 +321,26 @@ def _extension_map(self, spec):
self._extension_maps[spec] = {}
else:
by_hash = self.specs_by_hash()
exts = {}
with closing(open(path)) as ext_file:
for line in ext_file:
try:
spec = Spec(line.strip())
exts[spec.name] = spec
except spack.error.SpackError, e:
# TODO: do something better here -- should be
# resilient to corrupt files.
raise InvalidExtensionSpecError(str(e))
with open(path) as ext_file:
yaml_file = yaml.load(ext_file)
for entry in yaml_file['extensions']:
name = next(iter(entry))
dag_hash = entry[name]['hash']
prefix = entry[name]['path']
if not dag_hash in by_hash:
raise InvalidExtensionSpecError(
"Spec %s not found in %s." % (dag_hash, prefix))
ext_spec = by_hash[dag_hash]
if not prefix == ext_spec.prefix:
raise InvalidExtensionSpecError(
"Prefix %s does not match spec with hash %s: %s"
% (prefix, dag_hash, ext_spec))
exts[ext_spec.name] = ext_spec
self._extension_maps[spec] = exts
return self._extension_maps[spec]
@ -330,6 +348,7 @@ def _extension_map(self, spec):
def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API."""
_check_concrete(spec)
return self._extension_map(spec).copy()
@ -349,23 +368,6 @@ def check_activated(self, spec, ext_spec):
raise NoSuchExtensionError(spec, ext_spec)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
tmp = tempfile.NamedTemporaryFile(
prefix=basename, dir=dirname, delete=False)
# Write temp file.
with closing(tmp):
for extension in sorted(extensions.values()):
tmp.write("%s\n" % extension)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def add_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@ -399,9 +401,9 @@ def __init__(self, message):
class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an SpecHashDirectoryLayout."""
"""Raised when there is a hash collision in an install layout."""
def __init__(self, installed_spec, new_spec):
super(SpecHashDirectoryLayout, self).__init__(
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
% installed_spec, new_spec)
@ -422,7 +424,7 @@ def __init__(self, message):
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when make_path_for_sec is called unnecessarily."""
"""Raised when create_install_directory is called unnecessarily."""
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
@ -455,5 +457,3 @@ def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."% (
ext_spec.short_spec, spec.short_spec))

View file

@ -30,7 +30,12 @@ class SpackError(Exception):
def __init__(self, message, long_message=None):
super(SpackError, self).__init__()
self.message = message
self.long_message = long_message
self._long_message = long_message
@property
def long_message(self):
return self._long_message
def __str__(self):

View file

@ -220,13 +220,22 @@ def expand(self):
os.chdir(tarball_container)
decompress(self.archive_file)
# If the tarball *didn't* explode, move
# the expanded directory up & remove the protector directory.
# Check for an exploding tarball, i.e. one that doesn't expand
# to a single directory. If the tarball *didn't* explode,
# move contents up & remove the container directory.
#
# NOTE: The tar program on Mac OS X will encode HFS metadata
# in hidden files, which can end up *alongside* a single
# top-level directory. We ignore hidden files to accomodate
# these "semi-exploding" tarballs.
files = os.listdir(tarball_container)
if len(files) == 1:
expanded_dir = os.path.join(tarball_container, files[0])
non_hidden = filter(lambda f: not f.startswith('.'), files)
if len(non_hidden) == 1:
expanded_dir = os.path.join(tarball_container, non_hidden[0])
if os.path.isdir(expanded_dir):
shutil.move(expanded_dir, self.stage.path)
for f in files:
shutil.move(os.path.join(tarball_container, f),
os.path.join(self.stage.path, f))
os.rmdir(tarball_container)
# Set the wd back to the stage when done.
@ -363,10 +372,6 @@ def __init__(self, **kwargs):
'git', 'tag', 'branch', 'commit', **kwargs)
self._git = None
# For git fetch branches and tags the same way.
if not self.branch:
self.branch = self.tag
@property
def git_version(self):
@ -422,6 +427,12 @@ def fetch(self):
self.git(*args)
self.stage.chdir_to_source()
# For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone
# --branch, but older ones fail.
if self.tag:
self.git('checkout', self.tag)
def archive(self, destination):
super(GitFetchStrategy, self).archive(destination, exclude='.git')

View file

@ -50,7 +50,6 @@
import textwrap
import shutil
from glob import glob
from contextlib import closing
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
@ -152,7 +151,7 @@ def write(self):
if not self.paths:
return
with closing(open(self.file_name, 'w')) as f:
with open(self.file_name, 'w') as f:
self._write(f)

View file

@ -195,7 +195,7 @@ def install(self, prefix):
"""
class when(object):
def __init__(self, spec):
pkg = get_calling_package_name()
pkg = get_calling_module_name()
self.spec = parse_anonymous_spec(spec, pkg)
def __call__(self, method):

View file

@ -36,7 +36,7 @@
import os
import re
import time
import inspect
import itertools
import subprocess
import platform as py_platform
import multiprocessing
@ -45,22 +45,23 @@
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.tty.log import log_output
from llnl.util.link_tree import LinkTree
from llnl.util.filesystem import *
from llnl.util.lang import *
import spack
import spack.spec
import spack.error
import spack.compilers
import spack.mirror
import spack.hooks
import spack.build_environment as build_env
import spack.url as url
import spack.directives
import spack.build_environment
import spack.url
import spack.util.web
import spack.fetch_strategy as fs
from spack.version import *
from spack.stage import Stage
from spack.util.web import get_pages
from spack.util.compression import allowed_archive, extension
from spack.util.executable import ProcessError
@ -301,38 +302,15 @@ class SomePackage(Package):
clean() (some of them do this), and others to provide custom behavior.
"""
#
# These variables are defaults for the various "relations".
#
"""Map of information about Versions of this package.
Map goes: Version -> dict of attributes"""
versions = {}
"""Specs of dependency packages, keyed by name."""
dependencies = {}
"""Specs of virtual packages provided by this package, keyed by name."""
provided = {}
"""Specs of conflicting packages, keyed by name. """
conflicted = {}
"""Patches to apply to newly expanded source, if any."""
patches = {}
"""Specs of package this one extends, or None.
Currently, ppackages can extend at most one other package.
"""
extendees = {}
#
# These are default values for instance variables.
#
"""By default we build in parallel. Subclasses can override this."""
parallel = True
"""# jobs to use for parallel make. If set, overrides default of ncpus."""
make_jobs = None
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
@ -347,20 +325,8 @@ def __init__(self, spec):
if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:]
# Sanity check some required variables that could be
# overridden by package authors.
def ensure_has_dict(attr_name):
if not hasattr(self, attr_name):
raise PackageError("Package %s must define %s" % attr_name)
attr = getattr(self, attr_name)
if not isinstance(attr, dict):
raise PackageError("Package %s has non-dict %s attribute!"
% (self.name, attr_name))
ensure_has_dict('versions')
ensure_has_dict('dependencies')
ensure_has_dict('conflicted')
ensure_has_dict('patches')
# Sanity check attributes required by Spack directives.
spack.directives.ensure_dicts(type(self))
# Check versions in the versions dict.
for v in self.versions:
@ -462,8 +428,8 @@ def url_for_version(self, version):
return version_urls[version]
# If we have no idea, try to substitute the version.
return url.substitute_version(self.nearest_url(version),
self.url_version(version))
return spack.url.substitute_version(self.nearest_url(version),
self.url_version(version))
@property
@ -478,6 +444,12 @@ def stage(self):
return self._stage
@stage.setter
def stage(self, stage):
"""Allow a stage object to be set to override the default."""
self._stage = stage
@property
def fetcher(self):
if not self.spec.versions.concrete:
@ -568,41 +540,6 @@ def preorder_traversal(self, visited=None, **kwargs):
yield pkg
def validate_dependencies(self):
"""Ensure that this package and its dependencies all have consistent
constraints on them.
NOTE that this will NOT find sanity problems through a virtual
dependency. Virtual deps complicate the problem because we
don't know in advance which ones conflict with others in the
dependency DAG. If there's more than one virtual dependency,
it's a full-on SAT problem, so hold off on this for now.
The vdeps are actually skipped in preorder_traversal, so see
that for details.
TODO: investigate validating virtual dependencies.
"""
# This algorithm just attempts to merge all the constraints on the same
# package together, loses information about the source of the conflict.
# What we'd really like to know is exactly which two constraints
# conflict, but that algorithm is more expensive, so we'll do it
# the simple, less informative way for now.
merged = spack.spec.DependencyMap()
try:
for pkg in self.preorder_traversal():
for name, spec in pkg.dependencies.iteritems():
if name not in merged:
merged[name] = spec.copy()
else:
merged[name].constrain(spec)
except spack.spec.UnsatisfiableSpecError, e:
raise InvalidPackageDependencyError(
"Package %s has inconsistent dependency constraints: %s"
% (self.name, e.message))
def provides(self, vpkg_name):
"""True if this package provides a virtual package with the specified name."""
return vpkg_name in self.provided
@ -655,7 +592,7 @@ def url_version(self, version):
def remove_prefix(self):
"""Removes the prefix for a package along with any empty parent directories."""
spack.install_layout.remove_path_for_spec(self.spec)
spack.install_layout.remove_install_directory(self.spec)
def do_fetch(self):
@ -775,16 +712,28 @@ def do_fake_install(self):
mkdirp(self.prefix.man1)
def do_install(self, **kwargs):
"""This class should call this version of the install method.
Package implementations should override install().
"""
# whether to keep the prefix on failure. Default is to destroy it.
keep_prefix = kwargs.get('keep_prefix', False)
keep_stage = kwargs.get('keep_stage', False)
ignore_deps = kwargs.get('ignore_deps', False)
fake_install = kwargs.get('fake', False)
def _build_logger(self, log_path):
"""Create a context manager to log build output."""
def do_install(self,
keep_prefix=False, keep_stage=False, ignore_deps=False,
skip_patch=False, verbose=False, make_jobs=None, fake=False):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
Args:
keep_prefix -- Keep install prefix on failure. By default, destroys it.
keep_stage -- Keep stage on successful build. By default, destroys it.
ignore_deps -- Do not install dependencies before installing this package.
fake -- Don't really build -- install fake stub files instead.
skip_patch -- Skip patch stage of build if True.
verbose -- Display verbose build output (by default, suppresses it)
make_jobs -- Number of make jobs to use for install. Default is ncpus.
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages.")
@ -795,16 +744,22 @@ def do_install(self, **kwargs):
tty.msg("Installing %s" % self.name)
if not ignore_deps:
self.do_install_dependencies(**kwargs)
self.do_install_dependencies(
keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps,
fake=fake, skip_patch=skip_patch, verbose=verbose,
make_jobs=make_jobs)
start_time = time.time()
if not fake_install:
self.do_patch()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
# create the install directory. The install layout
# handles this in case so that it can use whatever
# package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec)
spack.install_layout.create_install_directory(self.spec)
def cleanup():
if not keep_prefix:
@ -825,16 +780,26 @@ def real_work():
spack.hooks.pre_install(self)
# Set up process's build environment before running install.
self.stage.chdir_to_source()
if fake_install:
if fake:
self.do_fake_install()
else:
# Subclasses implement install() to do the real work.
self.install(self.spec, self.prefix)
# Do the real install in the source directory.
self.stage.chdir_to_source()
# This redirects I/O to a build log (and optionally to the terminal)
log_path = join_path(os.getcwd(), 'spack-build.out')
log_file = open(log_path, 'w')
with log_output(log_file, verbose, sys.stdout.isatty(), True):
self.install(self.spec, self.prefix)
# Ensure that something was actually installed.
self._sanity_check_install()
# Move build log into install directory on success
if not fake:
log_install_path = spack.install_layout.build_log_path(self.spec)
install(log_path, log_install_path)
# On successful install, remove the stage.
if not keep_stage:
self.stage.destroy()
@ -849,6 +814,9 @@ def real_work():
print_pkg(self.prefix)
except ProcessError, e:
# Annotate with location of build log.
e.build_log = log_path
# One of the processes returned an error code.
# Suppress detailed stack trace here unless in debug mode
if spack.debug:
@ -858,13 +826,14 @@ def real_work():
# Still need to clean up b/c there was an error.
cleanup()
os._exit(1)
except:
# other exceptions just clean up and raise.
cleanup()
raise
build_env.fork(self, real_work)
spack.build_environment.fork(self, real_work)
# Once everything else is done, run post install hooks
spack.hooks.post_install(self)
@ -924,9 +893,7 @@ def install(self, spec, prefix):
raise InstallError("Package %s provides no install method!" % self.name)
def do_uninstall(self, **kwargs):
force = kwargs.get('force', False)
def do_uninstall(self, force=False):
if not self.installed:
raise InstallError(str(self.spec) + " is not installed.")
@ -969,27 +936,23 @@ def _sanity_check_extension(self):
raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
def do_activate(self, **kwargs):
def do_activate(self, force=False):
"""Called on an etension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
"""
self._sanity_check_extension()
force = kwargs.get('force', False)
# TODO: get rid of this normalize - DAG handling.
self.spec.normalize()
spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec)
spack.install_layout.check_extension_conflict(
self.extendee_spec, self.spec)
# Activate any package dependencies that are also extensions.
if not force:
for spec in self.spec.traverse(root=False):
if spec.package.extends(self.extendee_spec):
# TODO: fix this normalize() requirement -- revisit DAG handling.
spec.package.spec.normalize()
if not spec.package.activated:
spec.package.do_activate(**kwargs)
spec.package.do_activate(force=force)
self.extendee_spec.package.activate(self, **self.extendee_args)
@ -1015,6 +978,7 @@ def ignore(filename):
conflict = tree.find_conflict(self.prefix, ignore=ignore)
if conflict:
raise ExtensionConflictError(conflict)
tree.merge(self.prefix, ignore=ignore)
@ -1142,12 +1106,13 @@ def find_versions_of_archive(*archive_urls, **kwargs):
if list_url:
list_urls.add(list_url)
for aurl in archive_urls:
list_urls.add(url.find_list_url(aurl))
list_urls.add(spack.url.find_list_url(aurl))
# Grab some web pages to scrape.
page_map = {}
for lurl in list_urls:
page_map.update(get_pages(lurl, depth=list_depth))
pages = spack.util.web.get_pages(lurl, depth=list_depth)
page_map.update(pages)
# Scrape them for archive URLs
regexes = []
@ -1156,7 +1121,7 @@ def find_versions_of_archive(*archive_urls, **kwargs):
# the version part of the URL. The capture group is converted
# to a generic wildcard, so we can use this to extract things
# on a page that look like archive URLs.
url_regex = url.wildcard_version(aurl)
url_regex = spack.url.wildcard_version(aurl)
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
@ -1221,13 +1186,6 @@ def __init__(self, message, long_msg=None):
super(PackageError, self).__init__(message, long_msg)
class InvalidPackageDependencyError(PackageError):
"""Raised when package specification is inconsistent with requirements of
its dependencies."""
def __init__(self, message):
super(InvalidPackageDependencyError, self).__init__(message)
class PackageVersionError(PackageError):
"""Raised when a version URL cannot automatically be determined."""
def __init__(self, version):

View file

@ -1,215 +0,0 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
This package contains relationships that can be defined among packages.
Relations are functions that can be called inside a package definition,
for example:
class OpenMPI(Package):
depends_on("hwloc")
provides("mpi")
...
The available relations are:
depends_on
Above, the OpenMPI package declares that it "depends on" hwloc. This means
that the hwloc package needs to be installed before OpenMPI can be
installed. When a user runs 'spack install openmpi', spack will fetch
hwloc and install it first.
provides
This is useful when more than one package can satisfy a dependence. Above,
OpenMPI declares that it "provides" mpi. Other implementations of the MPI
interface, like mvapich and mpich, also provide mpi, e.g.:
class Mvapich(Package):
provides("mpi")
...
class Mpich(Package):
provides("mpi")
...
Instead of depending on openmpi, mvapich, or mpich, another package can
declare that it depends on "mpi":
class Mpileaks(Package):
depends_on("mpi")
...
Now the user can pick which MPI they would like to build with when they
install mpileaks. For example, the user could install 3 instances of
mpileaks, one for each MPI version, by issuing these three commands:
spack install mpileaks ^openmpi
spack install mpileaks ^mvapich
spack install mpileaks ^mpich
"""
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ]
import re
import inspect
from llnl.util.lang import *
import spack
import spack.spec
import spack.error
import spack.url
from spack.version import Version
from spack.patch import Patch
from spack.spec import Spec, parse_anonymous_spec
def version(ver, checksum=None, **kwargs):
"""Adds a version and metadata describing how to fetch it.
Metadata is just stored as a dict in the package's versions
dictionary. Package must turn it into a valid fetch strategy
later.
"""
pkg = caller_locals()
versions = pkg.setdefault('versions', {})
# special case checksum for backward compatibility
if checksum:
kwargs['md5'] = checksum
# Store the kwargs for the package to use later when constructing
# a fetch strategy.
versions[Version(ver)] = kwargs
def depends_on(*specs):
"""Adds a dependencies local variable in the locals of
the calling class, based on args. """
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
for string in specs:
for spec in spack.spec.parse(string):
if pkg == spec.name:
raise CircularReferenceError('depends_on', pkg)
dependencies[spec.name] = spec
def extends(spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
needs to be installed into the prefix of the Python installation.
Spack handles this by installing modules into their own prefix,
but allowing ONE module version to be symlinked into a parent
Python install at a time.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
mechanism.
"""
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
extendees = clocals.setdefault('extendees', {})
if extendees:
raise RelationError("Packages can extend at most one other package.")
spec = Spec(spec)
if pkg == spec.name:
raise CircularReferenceError('extends', pkg)
dependencies[spec.name] = spec
extendees[spec.name] = (spec, kwargs)
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency.
"""
pkg = get_calling_package_name()
spec_string = kwargs.get('when', pkg)
provider_spec = parse_anonymous_spec(spec_string, pkg)
provided = caller_locals().setdefault("provided", {})
for string in specs:
for provided_spec in spack.spec.parse(string):
if pkg == provided_spec.name:
raise CircularReferenceError('depends_on', pkg)
provided[provided_spec] = provider_spec
def patch(url_or_filename, **kwargs):
"""Packages can declare patches to apply to source. You can
optionally provide a when spec to indicate that a particular
patch should only be applied when the package's spec meets
certain conditions (e.g. a particular version).
"""
pkg = get_calling_package_name()
level = kwargs.get('level', 1)
when_spec = parse_anonymous_spec(kwargs.get('when', pkg), pkg)
patches = caller_locals().setdefault('patches', {})
if when_spec not in patches:
patches[when_spec] = [Patch(pkg, url_or_filename, level)]
else:
# if this spec is identical to some other, then append this
# patch to the existing list.
patches[when_spec].append(Patch(pkg, url_or_filename, level))
def conflicts(*specs):
"""Packages can declare conflicts with other packages.
This can be as specific as you like: use regular spec syntax.
NOT YET IMPLEMENTED.
"""
# TODO: implement conflicts
pass
class RelationError(spack.error.SpackError):
"""This is raised when something is wrong with a package relation."""
def __init__(self, relation, message):
super(RelationError, self).__init__(message)
self.relation = relation
class ScopeError(RelationError):
"""This is raised when a relation is called from outside a spack package."""
def __init__(self, relation):
super(ScopeError, self).__init__(
relation,
"Must invoke '%s' from inside a class definition!" % relation)
class CircularReferenceError(RelationError):
"""This is raised when something depends on itself."""
def __init__(self, relation, package):
super(CircularReferenceError, self).__init__(
relation,
"Package '%s' cannot pass itself to %s." % (package, relation))
self.package = package

View file

@ -1,5 +1,5 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
@ -93,8 +93,11 @@
import sys
import itertools
import hashlib
import base64
from StringIO import StringIO
from operator import attrgetter
from external import yaml
from external.yaml.error import MarkedYAMLError
import llnl.util.tty as tty
from llnl.util.lang import *
@ -110,6 +113,9 @@
from spack.util.prefix import Prefix
from spack.virtual import ProviderIndex
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
# Convenient names for color formats so that other things can use them
compiler_color = '@g'
version_color = '@c'
@ -117,6 +123,7 @@
enabled_variant_color = '@B'
disabled_variant_color = '@r'
dependency_color = '@.'
hash_color = '@K'
"""This map determines the coloring of specs when using color output.
We make the fields different colors to enhance readability.
@ -126,7 +133,8 @@
'=' : architecture_color,
'+' : enabled_variant_color,
'~' : disabled_variant_color,
'^' : dependency_color }
'^' : dependency_color,
'#' : hash_color }
"""Regex used for splitting by spec field separators."""
_separators = '[%s]' % ''.join(color_formats.keys())
@ -214,20 +222,24 @@ def _autospec(self, compiler_spec_like):
return CompilerSpec(compiler_spec_like)
def satisfies(self, other):
def satisfies(self, other, strict=False):
other = self._autospec(other)
return (self.name == other.name and
self.versions.satisfies(other.versions))
self.versions.satisfies(other.versions, strict=strict))
def constrain(self, other):
"""Intersect self's versions with other.
Return whether the CompilerSpec changed.
"""
other = self._autospec(other)
# ensure that other will actually constrain this spec.
if not other.satisfies(self):
raise UnsatisfiableCompilerSpecError(other, self)
self.versions.intersect(other.versions)
return self.versions.intersect(other.versions)
@property
@ -255,6 +267,18 @@ def _cmp_key(self):
return (self.name, self.versions)
def to_dict(self):
d = {'name' : self.name}
d.update(self.versions.to_dict())
return { 'compiler' : d }
@staticmethod
def from_dict(d):
d = d['compiler']
return CompilerSpec(d['name'], VersionList.from_dict(d))
def __str__(self):
out = self.name
if self.versions and self.versions != _any_version:
@ -267,7 +291,7 @@ def __repr__(self):
@key_ordering
class Variant(object):
class VariantSpec(object):
"""Variants are named, build-time options for a package. Names depend
on the particular package being built, and each named variant can
be enabled or disabled.
@ -282,7 +306,7 @@ def _cmp_key(self):
def copy(self):
return Variant(self.name, self.enabled)
return VariantSpec(self.name, self.enabled)
def __str__(self):
@ -291,9 +315,52 @@ def __str__(self):
class VariantMap(HashableMap):
def satisfies(self, other):
return all(self[key].enabled == other[key].enabled
for key in other if key in self)
def __init__(self, spec):
super(VariantMap, self).__init__()
self.spec = spec
def satisfies(self, other, strict=False):
if strict or self.spec._concrete:
return all(k in self and self[k].enabled == other[k].enabled
for k in other)
else:
return all(self[k].enabled == other[k].enabled
for k in other if k in self)
def constrain(self, other):
"""Add all variants in other that aren't in self to self.
Raises an error if any common variants don't match.
Return whether the spec changed.
"""
if other.spec._concrete:
for k in self:
if k not in other:
raise UnsatisfiableVariantSpecError(self[k], '<absent>')
changed = False
for k in other:
if k in self:
if self[k].enabled != other[k].enabled:
raise UnsatisfiableVariantSpecError(self[k], other[k])
else:
self[k] = other[k].copy()
changed =True
return changed
@property
def concrete(self):
return self.spec._concrete or all(
v in self for v in self.spec.package.variants)
def copy(self):
clone = VariantMap(None)
for name, variant in self.items():
clone[name] = variant.copy()
return clone
def __str__(self):
@ -340,10 +407,11 @@ def __init__(self, spec_like, *dep_like, **kwargs):
self.name = other.name
self.dependents = other.dependents
self.versions = other.versions
self.variants = other.variants
self.architecture = other.architecture
self.compiler = other.compiler
self.dependencies = other.dependencies
self.variants = other.variants
self.variants.spec = self
# Specs are by default not assumed to be normal, but in some
# cases we've read them from a file want to assume normal.
@ -372,7 +440,7 @@ def _add_variant(self, name, enabled):
"""Called by the parser to add a variant."""
if name in self.variants: raise DuplicateVariantError(
"Cannot specify variant '%s' twice" % name)
self.variants[name] = Variant(name, enabled)
self.variants[name] = VariantSpec(name, enabled)
def _set_compiler(self, compiler):
@ -436,14 +504,15 @@ def virtual(self):
@property
def concrete(self):
"""A spec is concrete if it can describe only ONE build of a package.
If any of the name, version, architecture, compiler, or depdenencies
are ambiguous,then it is not concrete.
If any of the name, version, architecture, compiler,
variants, or depdenencies are ambiguous,then it is not concrete.
"""
if self._concrete:
return True
self._concrete = bool(not self.virtual
and self.versions.concrete
and self.variants.concrete
and self.architecture
and self.compiler and self.compiler.concrete
and self.dependencies.concrete)
@ -564,18 +633,91 @@ def prefix(self):
return Prefix(spack.install_layout.path_for_spec(self))
def dep_hash(self, length=None):
"""Return a hash representing all dependencies of this spec
(direct and indirect).
def dag_hash(self, length=None):
"""Return a hash of the entire spec DAG, including connectivity."""
yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
return base64.b32encode(sha.digest()).lower()[:length]
def to_node_dict(self):
d = {
'variants' : dict(
(name,v.enabled) for name, v in self.variants.items()),
'arch' : self.architecture,
'dependencies' : dict((d, self.dependencies[d].dag_hash())
for d in sorted(self.dependencies))
}
if self.compiler:
d.update(self.compiler.to_dict())
else:
d['compiler'] = None
d.update(self.versions.to_dict())
return { self.name : d }
def to_yaml(self, stream=None):
node_list = []
for s in self.traverse(order='pre'):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
return yaml.dump({ 'spec' : node_list },
stream=stream, default_flow_style=False)
@staticmethod
def from_node_dict(node):
name = next(iter(node))
node = node[name]
spec = Spec(name)
spec.versions = VersionList.from_dict(node)
spec.architecture = node['arch']
if node['compiler'] is None:
spec.compiler = None
else:
spec.compiler = CompilerSpec.from_dict(node)
for name, enabled in node['variants'].items():
spec.variants[name] = VariantSpec(name, enabled)
return spec
@staticmethod
def from_yaml(stream):
"""Construct a spec from YAML.
Parameters:
stream -- string or file object to read from.
TODO: currently discards hashes. Include hashes when they
represent more than the DAG does.
If you want this hash to be consistent, you should
concretize the spec first so that it is not ambiguous.
"""
sha = hashlib.sha1()
sha.update(self.dep_string())
full_hash = sha.hexdigest()
deps = {}
spec = None
return full_hash[:length]
try:
yfile = yaml.load(stream)
except MarkedYAMLError, e:
raise SpackYAMLError("error parsing YMAL spec:", str(e))
for node in yfile['spec']:
name = next(iter(node))
dep = Spec.from_node_dict(node)
if not spec:
spec = dep
deps[dep.name] = dep
for node in yfile['spec']:
name = next(iter(node))
for dep_name in node[name]['dependencies']:
deps[name].dependencies[dep_name] = deps[dep_name]
return spec
def _concretize_helper(self, presets=None, visited=None):
@ -604,6 +746,7 @@ def _concretize_helper(self, presets=None, visited=None):
spack.concretizer.concretize_architecture(self)
spack.concretizer.concretize_compiler(self)
spack.concretizer.concretize_version(self)
spack.concretizer.concretize_variants(self)
presets[self.name] = self
visited.add(self.name)
@ -736,80 +879,156 @@ def flatten(self):
self._add_dependency(dep)
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
If the package depends on <name> in this configuration, return
the dependency. If no conditions are True (and we don't
depend on it), return None.
"""
pkg = spack.db.get(self.name)
conditions = pkg.dependencies[name]
# evaluate when specs to figure out constraints on the dependency.
dep = None
for when_spec, dep_spec in conditions.items():
sat = self.satisfies(when_spec, strict=True)
if sat:
if dep is None:
dep = Spec(name)
try:
dep.constrain(dep_spec)
except UnsatisfiableSpecError, e:
e.message = ("Conflicting conditional dependencies on package "
"%s for spec %s" % (self.name, self))
raise e
return dep
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
Raise an exception if there is a conflicting virtual
dependency already in this spec.
"""
assert(vdep.virtual)
providers = provider_index.providers_for(vdep)
# If there is a provider for the vpkg, then use that instead of
# the virtual package.
if providers:
# Can't have multiple providers for the same thing in one spec.
if len(providers) > 1:
raise MultipleProviderError(vdep, providers)
return providers[0]
else:
# The user might have required something insufficient for
# pkg_dep -- so we'll get a conflict. e.g., user asked for
# mpi@:1.1 but some package required mpi@2.1:.
required = provider_index.providers_for(vdep.name)
if len(required) > 1:
raise MultipleProviderError(vdep, required)
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
def _merge_dependency(self, dep, visited, spec_deps, provider_index):
"""Merge the dependency into this spec.
This is the core of the normalize() method. There are a few basic steps:
* If dep is virtual, evaluate whether it corresponds to an
existing concrete dependency, and merge if so.
* If it's real and it provides some virtual dep, see if it provides
what some virtual dependency wants and merge if so.
* Finally, if none of the above, merge dependency and its
constraints into this spec.
This method returns True if the spec was changed, False otherwise.
"""
changed = False
# If it's a virtual dependency, try to find a provider and
# merge that.
if dep.virtual:
visited.add(dep.name)
provider = self._find_provider(dep, provider_index)
if provider:
dep = provider
else:
# if it's a real dependency, check whether it provides
# something already required in the spec.
index = ProviderIndex([dep], restrict=True)
for vspec in (v for v in spec_deps.values() if v.virtual):
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
changed = True
else:
required = index.providers_for(vspec.name)
if required:
raise UnsatisfiableProviderSpecError(required[0], dep)
provider_index.update(dep)
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
spec_deps[dep.name] = dep.copy()
# Constrain package information with spec info
try:
changed |= spec_deps[dep.name].constrain(dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
if dep.name not in self.dependencies:
self._add_dependency(dependency)
changed = True
changed |= dependency._normalize_helper(visited, spec_deps, provider_index)
return changed
def _normalize_helper(self, visited, spec_deps, provider_index):
"""Recursive helper function for _normalize."""
if self.name in visited:
return
return False
visited.add(self.name)
# if we descend into a virtual spec, there's nothing more
# to normalize. Concretize will finish resolving it later.
if self.virtual:
return
return False
# Combine constraints from package deps with constraints from
# the spec, until nothing changes.
any_change = False
changed = True
# Combine constraints from package dependencies with
# constraints on the spec's dependencies.
pkg = spack.db.get(self.name)
for name, pkg_dep in self.package.dependencies.items():
# If it's a virtual dependency, try to find a provider
if pkg_dep.virtual:
providers = provider_index.providers_for(pkg_dep)
while changed:
changed = False
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
# If there is a provider for the vpkg, then use that instead of
# the virtual package.
if providers:
# Can't have multiple providers for the same thing in one spec.
if len(providers) > 1:
raise MultipleProviderError(pkg_dep, providers)
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
pkg_dep, visited, spec_deps, provider_index)
pkg_dep = providers[0]
name = pkg_dep.name
any_change |= changed
else:
# The user might have required something insufficient for
# pkg_dep -- so we'll get a conflict. e.g., user asked for
# mpi@:1.1 but some package required mpi@2.1:.
required = provider_index.providers_for(name)
if len(required) > 1:
raise MultipleProviderError(pkg_dep, required)
elif required:
raise UnsatisfiableProviderSpecError(
required[0], pkg_dep)
else:
# if it's a real dependency, check whether it provides something
# already required in the spec.
index = ProviderIndex([pkg_dep], restrict=True)
for vspec in (v for v in spec_deps.values() if v.virtual):
if index.providers_for(vspec):
vspec._replace_with(pkg_dep)
del spec_deps[vspec.name]
else:
required = index.providers_for(vspec.name)
if required:
raise UnsatisfiableProviderSpecError(
required[0], pkg_dep)
provider_index.update(pkg_dep)
if name not in spec_deps:
# If the spec doesn't reference a dependency that this package
# needs, then clone it from the package description.
spec_deps[name] = pkg_dep.copy()
try:
# Constrain package information with spec info
spec_deps[name].constrain(pkg_dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
e.message %= (spec_deps[name], name, e.constraint_type,
e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[name]
self._add_dependency(dependency)
dependency._normalize_helper(visited, spec_deps, provider_index)
return any_change
def normalize(self, **kwargs):
@ -836,19 +1055,14 @@ def normalize(self, **kwargs):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
# Ensure that the package & dep descriptions are consistent & sane
if not self.virtual:
self.package.validate_dependencies()
# Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies(copy=False)
# Figure out which of the user-provided deps provide virtual deps.
# Remove virtual deps that are already provided by something in the spec
spec_packages = [d.package for d in spec_deps.values() if not d.virtual]
# Initialize index of virtual dependency providers
index = ProviderIndex(spec_deps.values(), restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
visited = set()
self._normalize_helper(visited, spec_deps, index)
@ -856,12 +1070,6 @@ def normalize(self, **kwargs):
# actually deps of this package. Raise an error.
extra = set(spec_deps.keys()).difference(visited)
# Also subtract out all the packags that provide a needed vpkg
vdeps = [v for v in self.package.virtual_dependencies()]
vpkg_providers = index.providers_for(*vdeps)
extra.difference_update(p.name for p in vpkg_providers)
# Anything left over is not a valid part of the spec.
if extra:
raise InvalidDependencyException(
@ -893,10 +1101,18 @@ def validate_names(self):
if not compilers.supported(spec.compiler):
raise UnsupportedCompilerError(spec.compiler.name)
# Ensure that variants all exist.
for vname, variant in spec.variants.items():
if vname not in spec.package.variants:
raise UnknownVariantError(spec.name, vname)
def constrain(self, other, **kwargs):
def constrain(self, other, deps=True):
"""Merge the constraints of other with self.
Returns True if the spec changed as a result, False if not.
"""
other = self._autospec(other)
constrain_deps = kwargs.get('deps', True)
if not self.name == other.name:
raise UnsatisfiableSpecNameError(self.name, other.name)
@ -915,23 +1131,32 @@ def constrain(self, other, **kwargs):
raise UnsatisfiableArchitectureSpecError(self.architecture,
other.architecture)
changed = False
if self.compiler is not None and other.compiler is not None:
self.compiler.constrain(other.compiler)
changed |= self.compiler.constrain(other.compiler)
elif self.compiler is None:
changed |= (self.compiler != other.compiler)
self.compiler = other.compiler
self.versions.intersect(other.versions)
self.variants.update(other.variants)
self.architecture = self.architecture or other.architecture
changed |= self.versions.intersect(other.versions)
changed |= self.variants.constrain(other.variants)
if constrain_deps:
self._constrain_dependencies(other)
old = self.architecture
self.architecture = self.architecture or other.architecture
changed |= (self.architecture != old)
if deps:
changed |= self._constrain_dependencies(other)
return changed
def _constrain_dependencies(self, other):
"""Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
if not self.dependencies or not other.dependencies:
return
return False
# TODO: might want more detail than this, e.g. specific deps
# in violation. if this becomes a priority get rid of this
@ -940,12 +1165,17 @@ def _constrain_dependencies(self, other):
raise UnsatisfiableDependencySpecError(other, self)
# Handle common first-order constraints directly
changed = False
for name in self.common_dependencies(other):
self[name].constrain(other[name], deps=False)
changed |= self[name].constrain(other[name], deps=False)
# Update with additional constraints from other spec
for name in other.dep_difference(self):
self._add_dependency(other[name].copy())
changed = True
return changed
def common_dependencies(self, other):
@ -979,44 +1209,74 @@ def _autospec(self, spec_like):
return parse_anonymous_spec(spec_like, self.name)
def satisfies(self, other, **kwargs):
def satisfies(self, other, deps=True, strict=False):
"""Determine if this spec satisfies all constraints of another.
There are two senses for satisfies:
* `loose` (default): the absence of a constraint in self
implies that it *could* be satisfied by other, so we only
check that there are no conflicts with other for
constraints that this spec actually has.
* `strict`: strict means that we *must* meet all the
constraints specified on other.
"""
other = self._autospec(other)
satisfy_deps = kwargs.get('deps', True)
# First thing we care about is whether the name matches
if self.name != other.name:
return False
# All these attrs have satisfies criteria of their own,
# but can be None to indicate no constraints.
for s, o in ((self.versions, other.versions),
(self.variants, other.variants),
(self.compiler, other.compiler)):
if s and o and not s.satisfies(o):
if self.versions and other.versions:
if not self.versions.satisfies(other.versions, strict=strict):
return False
elif strict and (self.versions or other.versions):
return False
# None indicates no constraints when not strict.
if self.compiler and other.compiler:
if not self.compiler.satisfies(other.compiler, strict=strict):
return False
elif strict and (other.compiler and not self.compiler):
return False
if not self.variants.satisfies(other.variants, strict=strict):
return False
# Architecture satisfaction is currently just string equality.
# Can be None for unconstrained, though.
if (self.architecture and other.architecture and
self.architecture != other.architecture):
# If not strict, None means unconstrained.
if self.architecture and other.architecture:
if self.architecture != other.architecture:
return False
elif strict and (other.architecture and not self.architecture):
return False
# If we need to descend into dependencies, do it, otherwise we're done.
if satisfy_deps:
return self.satisfies_dependencies(other)
if deps:
return self.satisfies_dependencies(other, strict=strict)
else:
return True
def satisfies_dependencies(self, other):
def satisfies_dependencies(self, other, strict=False):
"""This checks constraints on common dependencies against each other."""
# if either spec doesn't restrict dependencies then both are compatible.
if not self.dependencies or not other.dependencies:
other = self._autospec(other)
if strict:
if other.dependencies and not self.dependencies:
return False
if not all(dep in self.dependencies for dep in other.dependencies):
return False
elif not self.dependencies or not other.dependencies:
# if either spec doesn't restrict dependencies then both are compatible.
return True
# Handle first-order constraints directly
for name in self.common_dependencies(other):
if not self[name].satisfies(other[name]):
if not self[name].satisfies(other[name], deps=False):
return False
# For virtual dependencies, we need to dig a little deeper.
@ -1061,11 +1321,12 @@ def _dup(self, other, **kwargs):
# Local node attributes get copied first.
self.name = other.name
self.versions = other.versions.copy()
self.variants = other.variants.copy()
self.architecture = other.architecture
self.compiler = other.compiler.copy() if other.compiler else None
self.dependents = DependencyMap()
self.dependencies = DependencyMap()
self.variants = other.variants.copy()
self.variants.spec = self
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
@ -1117,7 +1378,7 @@ def __contains__(self, spec):
"""
spec = self._autospec(spec)
for s in self.traverse():
if s.satisfies(spec):
if s.satisfies(spec, strict=True):
return True
return False
@ -1208,7 +1469,7 @@ def format(self, format_string='$_$@$%@$+$=', **kwargs):
$%@ Compiler & compiler version
$+ Options
$= Architecture
$# Dependencies' 8-char sha1 prefix
$# 7-char prefix of DAG hash
$$ $
Optionally you can provide a width, e.g. $20_ for a 20-wide name.
@ -1264,8 +1525,7 @@ def write(s, c):
if self.architecture:
write(fmt % (c + str(self.architecture)), c)
elif c == '#':
if self.dependencies:
out.write(fmt % ('-' + self.dep_hash(8)))
out.write('-' + fmt % (self.dag_hash(7)))
elif c == '$':
if fmt != '':
raise ValueError("Can't use format width with $$.")
@ -1274,7 +1534,8 @@ def write(s, c):
elif compiler:
if c == '@':
if self.compiler and self.compiler.versions:
if (self.compiler and self.compiler.versions and
self.compiler.versions != _any_version):
write(c + str(self.compiler.versions), '%')
elif c == '$':
escape = True
@ -1311,12 +1572,15 @@ def tree(self, **kwargs):
cover = kwargs.pop('cover', 'nodes')
indent = kwargs.pop('indent', 0)
fmt = kwargs.pop('format', '$_$@$%@$+$=')
prefix = kwargs.pop('prefix', None)
check_kwargs(kwargs, self.tree)
out = ""
cur_id = 0
ids = {}
for d, node in self.traverse(order='pre', cover=cover, depth=True):
if prefix is not None:
out += prefix(node)
out += " " * indent
if depth:
out += "%-4d" % d
@ -1354,6 +1618,8 @@ def __init__(self):
(r'\~', lambda scanner, val: self.token(OFF, val)),
(r'\%', lambda scanner, val: self.token(PCT, val)),
(r'\=', lambda scanner, val: self.token(EQ, val)),
# This is more liberal than identifier_re (see above).
# Checked by check_identifier() for better error messages.
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
(r'\s+', lambda scanner, val: None)])
@ -1399,7 +1665,7 @@ def spec(self):
spec = Spec.__new__(Spec)
spec.name = self.token.value
spec.versions = VersionList()
spec.variants = VariantMap()
spec.variants = VariantMap(spec)
spec.architecture = None
spec.compiler = None
spec.dependents = DependencyMap()
@ -1580,6 +1846,13 @@ def __init__(self, compiler_name):
"The '%s' compiler is not yet supported." % compiler_name)
class UnknownVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice."""
def __init__(self, pkg, variant):
super(UnknownVariantError, self).__init__(
"Package %s has no variant %s!" % (pkg, variant))
class DuplicateArchitectureError(SpecError):
"""Raised when the same architecture occurs in a spec twice."""
def __init__(self, message):
@ -1683,3 +1956,7 @@ class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
def __init__(self, provided, required):
super(UnsatisfiableDependencySpecError, self).__init__(
provided, required, "dependency")
class SpackYAMLError(spack.error.SpackError):
def __init__(self, msg, yaml_error):
super(SpackError, self).__init__(msg, str(yaml_error))

View file

@ -309,15 +309,44 @@ def destroy(self):
os.chdir(os.path.dirname(self.path))
class DIYStage(object):
"""Simple class that allows any directory to be a spack stage."""
def __init__(self, path):
self.archive_file = None
self.path = path
self.source_path = path
def chdir(self):
if os.path.isdir(self.path):
os.chdir(self.path)
else:
tty.die("Setup failed: no such directory: " + self.path)
def chdir_to_source(self):
self.chdir()
def fetch(self):
tty.msg("No need to fetch for DIY.")
def check(self):
tty.msg("No checksum needed for DIY.")
def expand_archive(self):
tty.msg("Using source directory: %s" % self.source_path)
def restage(self):
tty.die("Cannot restage DIY stage.")
def destroy(self):
# No need to destroy DIY stage.
pass
def _get_mirrors():
"""Get mirrors from spack configuration."""
config = spack.config.get_config()
config = spack.config.get_mirror_config()
return [val for name, val in config.iteritems()]
mirrors = []
sec_names = config.get_section_names('mirror')
for name in sec_names:
mirrors.append(config.get_value('mirror', name, 'url'))
return mirrors
def ensure_access(file=spack.stage_path):

View file

@ -52,7 +52,10 @@
'mirror',
'url_extrapolate',
'cc',
'link_tree']
'link_tree',
'spec_yaml',
'optional_deps',
'make_executable']
def list_tests():

View file

@ -35,7 +35,13 @@ def check_spec(self, abstract, concrete):
self.assertEqual(abstract.versions, concrete.versions)
if abstract.variants:
self.assertEqual(abstract.versions, concrete.versions)
for name in abstract.variants:
avariant = abstract.variants[name]
cvariant = concrete.variants[name]
self.assertEqual(avariant.enabled, cvariant.enabled)
for name in abstract.package.variants:
self.assertTrue(name in concrete.variants)
if abstract.compiler and abstract.compiler.concrete:
self.assertEqual(abstract.compiler, concrete.compiler)
@ -66,6 +72,12 @@ def test_concretize_dag(self):
self.check_concretize('libelf')
def test_concretize_variant(self):
self.check_concretize('mpich+debug')
self.check_concretize('mpich~debug')
self.check_concretize('mpich')
def test_concretize_with_virtual(self):
self.check_concretize('mpileaks ^mpi')
self.check_concretize('mpileaks ^mpi@:1.1')

View file

@ -26,44 +26,49 @@
import shutil
import os
from tempfile import mkdtemp
import spack
from spack.packages import PackageDB
from spack.test.mock_packages_test import *
from spack.config import *
class ConfigTest(MockPackagesTest):
def setUp(self):
self.initmock()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
class ConfigTest(unittest.TestCase):
def tearDown(self):
self.cleanmock()
shutil.rmtree(self.tmp_dir, True)
@classmethod
def setUp(cls):
cls.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
@classmethod
def tearDown(cls):
shutil.rmtree(cls.tmp_dir, True)
def get_path(self):
return os.path.join(ConfigTest.tmp_dir, "spackconfig")
def check_config(self, comps):
config = spack.config.get_compilers_config()
compiler_list = ['cc', 'cxx', 'f77', 'f90']
for key in comps:
for c in compiler_list:
if comps[key][c] == '/bad':
continue
self.assertEqual(comps[key][c], config[key][c])
def test_write_key(self):
config = SpackConfigParser(self.get_path())
config.set_value('compiler.cc', 'a')
config.set_value('compiler.cxx', 'b')
config.set_value('compiler', 'gcc@4.7.3', 'cc', 'c')
config.set_value('compiler', 'gcc@4.7.3', 'cxx', 'd')
config.write()
a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
"gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
"clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
config = SpackConfigParser(self.get_path())
b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
"icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
"clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
self.assertEqual(config.get_value('compiler.cc'), 'a')
self.assertEqual(config.get_value('compiler.cxx'), 'b')
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cc'), 'c')
self.assertEqual(config.get_value('compiler', 'gcc@4.7.3', 'cxx'), 'd')
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
self.assertEqual(config.get_value('compiler', None, 'cc'), 'a')
self.assertEqual(config.get_value('compiler', None, 'cxx'), 'b')
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cc'), 'c')
self.assertEqual(config.get_value('compiler.gcc@4.7.3.cxx'), 'd')
self.check_config(a_comps)
self.check_config(b_comps)
spack.config.clear_config_caches()
self.check_config(a_comps)
self.check_config(b_comps)
self.assertRaises(NoOptionError, config.get_value, 'compiler', None, 'fc')

View file

@ -29,14 +29,17 @@
import tempfile
import shutil
import os
from contextlib import closing
from llnl.util.filesystem import *
import spack
from spack.spec import Spec
from spack.packages import PackageDB
from spack.directory_layout import SpecHashDirectoryLayout
from spack.directory_layout import YamlDirectoryLayout
# number of packages to test (to reduce test time)
max_packages = 10
class DirectoryLayoutTest(unittest.TestCase):
"""Tests that a directory layout works correctly and produces a
@ -44,7 +47,7 @@ class DirectoryLayoutTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.layout = SpecHashDirectoryLayout(self.tmpdir)
self.layout = YamlDirectoryLayout(self.tmpdir)
def tearDown(self):
@ -59,7 +62,9 @@ def test_read_and_write_spec(self):
finally that the directory can be removed by the directory
layout.
"""
for pkg in spack.db.all_packages():
packages = list(spack.db.all_packages())[:max_packages]
for pkg in packages:
spec = pkg.spec
# If a spec fails to concretize, just skip it. If it is a
@ -69,7 +74,7 @@ def test_read_and_write_spec(self):
except:
continue
self.layout.make_path_for_spec(spec)
self.layout.create_install_directory(spec)
install_dir = self.layout.path_for_spec(spec)
spec_path = self.layout.spec_file_path(spec)
@ -89,8 +94,8 @@ def test_read_and_write_spec(self):
self.assertTrue(spec_from_file.concrete)
# Ensure that specs that come out "normal" are really normal.
with closing(open(spec_path)) as spec_file:
read_separately = Spec(spec_file.read())
with open(spec_path) as spec_file:
read_separately = Spec.from_yaml(spec_file.read())
read_separately.normalize()
self.assertEqual(read_separately, spec_from_file)
@ -98,11 +103,11 @@ def test_read_and_write_spec(self):
read_separately.concretize()
self.assertEqual(read_separately, spec_from_file)
# Make sure the dep hash of the read-in spec is the same
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
# Make sure the hash of the read-in spec is the same
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
# Ensure directories are properly removed
self.layout.remove_path_for_spec(spec)
self.layout.remove_install_directory(spec)
self.assertFalse(os.path.isdir(install_dir))
self.assertFalse(os.path.exists(install_dir))
@ -120,12 +125,14 @@ def test_handle_unknown_package(self):
"""
mock_db = PackageDB(spack.mock_packages_path)
not_in_mock = set(spack.db.all_package_names()).difference(
not_in_mock = set.difference(
set(spack.db.all_package_names()),
set(mock_db.all_package_names()))
packages = list(not_in_mock)[:max_packages]
# Create all the packages that are not in mock.
installed_specs = {}
for pkg_name in not_in_mock:
for pkg_name in packages:
spec = spack.db.get(pkg_name).spec
# If a spec fails to concretize, just skip it. If it is a
@ -135,7 +142,7 @@ def test_handle_unknown_package(self):
except:
continue
self.layout.make_path_for_spec(spec)
self.layout.create_install_directory(spec)
installed_specs[spec] = self.layout.path_for_spec(spec)
tmp = spack.db
@ -144,12 +151,29 @@ def test_handle_unknown_package(self):
# Now check that even without the package files, we know
# enough to read a spec from the spec file.
for spec, path in installed_specs.items():
spec_from_file = self.layout.read_spec(join_path(path, '.spec'))
spec_from_file = self.layout.read_spec(
join_path(path, '.spack', 'spec.yaml'))
# To satisfy these conditions, directory layouts need to
# read in concrete specs from their install dirs somehow.
self.assertEqual(path, self.layout.path_for_spec(spec_from_file))
self.assertEqual(spec, spec_from_file)
self.assertEqual(spec.dep_hash(), spec_from_file.dep_hash())
self.assertTrue(spec.eq_dag(spec_from_file))
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
spack.db = tmp
def test_find(self):
"""Test that finding specs within an install layout works."""
packages = list(spack.db.all_packages())[:max_packages]
installed_specs = {}
for pkg in packages:
spec = pkg.spec.concretized()
installed_specs[spec.name] = spec
self.layout.create_install_directory(spec)
found_specs = dict((s.name, s) for s in self.layout.all_specs())
for name, spec in found_specs.items():
self.assertTrue(name in found_specs)
self.assertTrue(found_specs[name].eq_dag(spec))

View file

@ -26,7 +26,6 @@
import unittest
import shutil
import tempfile
from contextlib import closing
from llnl.util.filesystem import *
@ -109,7 +108,7 @@ def test_fetch_master(self):
})
def ztest_fetch_branch(self):
def test_fetch_branch(self):
"""Test fetching a branch."""
self.try_fetch(self.repo.branch, self.repo.branch_file, {
'git' : self.repo.path,
@ -117,7 +116,7 @@ def ztest_fetch_branch(self):
})
def ztest_fetch_tag(self):
def test_fetch_tag(self):
"""Test fetching a tag."""
self.try_fetch(self.repo.tag, self.repo.tag_file, {
'git' : self.repo.path,
@ -125,7 +124,7 @@ def ztest_fetch_tag(self):
})
def ztest_fetch_commit(self):
def test_fetch_commit(self):
"""Test fetching a particular commit."""
self.try_fetch(self.repo.r1, self.repo.r1_file, {
'git' : self.repo.path,

View file

@ -24,9 +24,6 @@
##############################################################################
import os
import unittest
import shutil
import tempfile
from contextlib import closing
from llnl.util.filesystem import *

View file

@ -26,14 +26,13 @@
import unittest
import shutil
import tempfile
from contextlib import closing
from llnl.util.filesystem import *
import spack
from spack.stage import Stage
from spack.fetch_strategy import URLFetchStrategy
from spack.directory_layout import SpecHashDirectoryLayout
from spack.directory_layout import YamlDirectoryLayout
from spack.util.executable import which
from spack.test.mock_packages_test import *
from spack.test.mock_repo import MockArchive
@ -55,7 +54,7 @@ def setUp(self):
# installed pkgs and mock packages.
self.tmpdir = tempfile.mkdtemp()
self.orig_layout = spack.install_layout
spack.install_layout = SpecHashDirectoryLayout(self.tmpdir)
spack.install_layout = YamlDirectoryLayout(self.tmpdir)
def tearDown(self):

View file

@ -26,7 +26,6 @@
import unittest
import shutil
import tempfile
from contextlib import closing
from llnl.util.filesystem import *
from llnl.util.link_tree import LinkTree

View file

@ -0,0 +1,125 @@
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
Tests for Spack's built-in parallel make support.
This just tests whether the right args are getting passed to make.
"""
import os
import unittest
import tempfile
import shutil
from llnl.util.filesystem import *
from spack.util.environment import path_put_first
from spack.build_environment import MakeExecutable
class MakeExecutableTest(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
make_exe = join_path(self.tmpdir, 'make')
with open(make_exe, 'w') as f:
f.write('#!/bin/sh\n')
f.write('echo "$@"')
os.chmod(make_exe, 0700)
path_put_first('PATH', [self.tmpdir])
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_make_normal(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(return_output=True).strip(), '-j8')
self.assertEqual(make('install', return_output=True).strip(), '-j8 install')
def test_make_explicit(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(parallel=True, return_output=True).strip(), '-j8')
self.assertEqual(make('install', parallel=True, return_output=True).strip(), '-j8 install')
def test_make_one_job(self):
make = MakeExecutable('make', 1)
self.assertEqual(make(return_output=True).strip(), '')
self.assertEqual(make('install', return_output=True).strip(), 'install')
def test_make_parallel_false(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(parallel=False, return_output=True).strip(), '')
self.assertEqual(make('install', parallel=False, return_output=True).strip(), 'install')
def test_make_parallel_disabled(self):
make = MakeExecutable('make', 8)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
self.assertEqual(make(return_output=True).strip(), '')
self.assertEqual(make('install', return_output=True).strip(), 'install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
self.assertEqual(make(return_output=True).strip(), '')
self.assertEqual(make('install', return_output=True).strip(), 'install')
# These don't disable (false and random string)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
self.assertEqual(make(return_output=True).strip(), '-j8')
self.assertEqual(make('install', return_output=True).strip(), '-j8 install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
self.assertEqual(make(return_output=True).strip(), '-j8')
self.assertEqual(make('install', return_output=True).strip(), '-j8 install')
del os.environ['SPACK_NO_PARALLEL_MAKE']
def test_make_parallel_precedence(self):
make = MakeExecutable('make', 8)
# These should work
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
self.assertEqual(make(parallel=True, return_output=True).strip(), '')
self.assertEqual(make('install', parallel=True, return_output=True).strip(), 'install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
self.assertEqual(make(parallel=True, return_output=True).strip(), '')
self.assertEqual(make('install', parallel=True, return_output=True).strip(), 'install')
# These don't disable (false and random string)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
self.assertEqual(make(parallel=True, return_output=True).strip(), '-j8')
self.assertEqual(make('install', parallel=True, return_output=True).strip(), '-j8 install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
self.assertEqual(make(parallel=True, return_output=True).strip(), '-j8')
self.assertEqual(make('install', parallel=True, return_output=True).strip(), '-j8 install')
del os.environ['SPACK_NO_PARALLEL_MAKE']

View file

@ -31,29 +31,40 @@
def set_pkg_dep(pkg, spec):
"""Alters dependence information for a pacakge.
"""Alters dependence information for a package.
Use this to mock up constraints.
"""
spec = Spec(spec)
spack.db.get(pkg).dependencies[spec.name] = spec
spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec }
class MockPackagesTest(unittest.TestCase):
def setUp(self):
def initmock(self):
# Use the mock packages database for these tests. This allows
# us to set up contrived packages that don't interfere with
# real ones.
self.real_db = spack.db
spack.db = PackageDB(spack.mock_packages_path)
self.real_scopes = spack.config._scopes
spack.config._scopes = {
'site' : spack.mock_site_config,
'user' : spack.mock_user_config }
spack.config.clear_config_caches()
self.real_scopes = spack.config.config_scopes
spack.config.config_scopes = [
('site', spack.mock_site_config),
('user', spack.mock_user_config)]
def cleanmock(self):
"""Restore the real packages path after any test."""
spack.db = self.real_db
spack.config.config_scopes = self.real_scopes
spack.config.clear_config_caches()
def setUp(self):
self.initmock()
def tearDown(self):
"""Restore the real packages path after any test."""
spack.db = self.real_db
spack.config._scopes = self.real_scopes
self.cleanmock()

View file

@ -24,7 +24,6 @@
##############################################################################
import os
import shutil
from contextlib import closing
from llnl.util.filesystem import *
@ -67,7 +66,7 @@ def __init__(self):
with working_dir(self.path):
configure = join_path(self.path, 'configure')
with closing(open(configure, 'w')) as cfg_file:
with open(configure, 'w') as cfg_file:
cfg_file.write(
"#!/bin/sh\n"
"prefix=$(echo $1 | sed 's/--prefix=//')\n"

View file

@ -0,0 +1,94 @@
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import unittest
import spack
from spack.spec import Spec, CompilerSpec
from spack.test.mock_packages_test import *
class ConcretizeTest(MockPackagesTest):
def check_normalize(self, spec_string, expected):
spec = Spec(spec_string)
spec.normalize()
self.assertEqual(spec, expected)
self.assertTrue(spec.eq_dag(expected))
def test_normalize_simple_conditionals(self):
self.check_normalize('optional-dep-test', Spec('optional-dep-test'))
self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a'))
self.check_normalize('optional-dep-test+a',
Spec('optional-dep-test+a', Spec('a')))
self.check_normalize('optional-dep-test@1.1',
Spec('optional-dep-test@1.1', Spec('b')))
self.check_normalize('optional-dep-test%intel',
Spec('optional-dep-test%intel', Spec('c')))
self.check_normalize('optional-dep-test%intel@64.1',
Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test%intel@64.1.2',
Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test%clang@35',
Spec('optional-dep-test%clang@35', Spec('e')))
def test_multiple_conditionals(self):
self.check_normalize('optional-dep-test+a@1.1',
Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b')))
self.check_normalize('optional-dep-test+a%intel',
Spec('optional-dep-test+a%intel', Spec('a'), Spec('c')))
self.check_normalize('optional-dep-test@1.1%intel',
Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c')))
self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a',
Spec('optional-dep-test@1.1%intel@64.1.2+a',
Spec('b'), Spec('a'), Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test@1.1%clang@36.5+a',
Spec('optional-dep-test@1.1%clang@36.5+a',
Spec('b'), Spec('a'), Spec('e')))
def test_chained_mpi(self):
self.check_normalize('optional-dep-test-2+mpi',
Spec('optional-dep-test-2+mpi',
Spec('optional-dep-test+mpi',
Spec('mpi'))))
def test_transitive_chain(self):
# Each of these dependencies comes from a conditional
# dependency on another. This requires iterating to evaluate
# the whole chain.
self.check_normalize('optional-dep-test+f',
Spec('optional-dep-test+f', Spec('f'), Spec('g'), Spec('mpi')))

Some files were not shown because too many files have changed in this diff Show more