External: add macholib and altgraph needed to relocate Mach-o binaries on Linux (#12909)
This commit is contained in:
parent
90236bc9f5
commit
321e956fa9
26 changed files with 5433 additions and 0 deletions
|
@ -83,3 +83,11 @@ PackageLicenseDeclared: MIT
|
||||||
PackageName: six
|
PackageName: six
|
||||||
PackageHomePage: https://pypi.python.org/pypi/six
|
PackageHomePage: https://pypi.python.org/pypi/six
|
||||||
PackageLicenseDeclared: MIT
|
PackageLicenseDeclared: MIT
|
||||||
|
|
||||||
|
PackageName: macholib
|
||||||
|
PackageHomePage: https://macholib.readthedocs.io/en/latest/index.html
|
||||||
|
PackageLicenseDeclared: MIT
|
||||||
|
|
||||||
|
PackageName: altgraph
|
||||||
|
PackageHomePage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||||
|
PackageLicenseDeclared: MIT
|
||||||
|
|
14
lib/spack/external/__init__.py
vendored
14
lib/spack/external/__init__.py
vendored
|
@ -119,4 +119,18 @@
|
||||||
* Homepage: https://pypi.python.org/pypi/six
|
* Homepage: https://pypi.python.org/pypi/six
|
||||||
* Usage: Python 2 and 3 compatibility utilities.
|
* Usage: Python 2 and 3 compatibility utilities.
|
||||||
* Version: 1.11.0
|
* Version: 1.11.0
|
||||||
|
|
||||||
|
macholib
|
||||||
|
--------
|
||||||
|
|
||||||
|
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||||
|
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||||
|
* Version: 1.12
|
||||||
|
|
||||||
|
altgraph
|
||||||
|
--------
|
||||||
|
|
||||||
|
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||||
|
* Usage: dependency of macholib
|
||||||
|
* Version: 0.16.1
|
||||||
"""
|
"""
|
||||||
|
|
309
lib/spack/external/altgraph/Dot.py
vendored
Normal file
309
lib/spack/external/altgraph/Dot.py
vendored
Normal file
|
@ -0,0 +1,309 @@
|
||||||
|
'''
|
||||||
|
altgraph.Dot - Interface to the dot language
|
||||||
|
============================================
|
||||||
|
|
||||||
|
The :py:mod:`~altgraph.Dot` module provides a simple interface to the
|
||||||
|
file format used in the
|
||||||
|
`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
|
||||||
|
program. The module is intended to offload the most tedious part of the process
|
||||||
|
(the **dot** file generation) while transparently exposing most of its
|
||||||
|
features.
|
||||||
|
|
||||||
|
To display the graphs or to generate image files the
|
||||||
|
`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
|
||||||
|
package needs to be installed on the system, moreover the :command:`dot` and
|
||||||
|
:command:`dotty` programs must be accesible in the program path so that they
|
||||||
|
can be ran from processes spawned within the module.
|
||||||
|
|
||||||
|
Example usage
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Here is a typical usage::
|
||||||
|
|
||||||
|
from altgraph import Graph, Dot
|
||||||
|
|
||||||
|
# create a graph
|
||||||
|
edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
|
||||||
|
graph = Graph.Graph(edges)
|
||||||
|
|
||||||
|
# create a dot representation of the graph
|
||||||
|
dot = Dot.Dot(graph)
|
||||||
|
|
||||||
|
# display the graph
|
||||||
|
dot.display()
|
||||||
|
|
||||||
|
# save the dot representation into the mydot.dot file
|
||||||
|
dot.save_dot(file_name='mydot.dot')
|
||||||
|
|
||||||
|
# save dot file as gif image into the graph.gif file
|
||||||
|
dot.save_img(file_name='graph', file_type='gif')
|
||||||
|
|
||||||
|
Directed graph and non-directed graph
|
||||||
|
-------------------------------------
|
||||||
|
|
||||||
|
Dot class can use for both directed graph and non-directed graph
|
||||||
|
by passing ``graphtype`` parameter.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
# create directed graph(default)
|
||||||
|
dot = Dot.Dot(graph, graphtype="digraph")
|
||||||
|
|
||||||
|
# create non-directed graph
|
||||||
|
dot = Dot.Dot(graph, graphtype="graph")
|
||||||
|
|
||||||
|
Customizing the output
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
The graph drawing process may be customized by passing
|
||||||
|
valid :command:`dot` parameters for the nodes and edges. For a list of all
|
||||||
|
parameters see the `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
|
||||||
|
documentation.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
# customizing the way the overall graph is drawn
|
||||||
|
dot.style(size='10,10', rankdir='RL', page='5, 5' , ranksep=0.75)
|
||||||
|
|
||||||
|
# customizing node drawing
|
||||||
|
dot.node_style(1, label='BASE_NODE',shape='box', color='blue' )
|
||||||
|
dot.node_style(2, style='filled', fillcolor='red')
|
||||||
|
|
||||||
|
# customizing edge drawing
|
||||||
|
dot.edge_style(1, 2, style='dotted')
|
||||||
|
dot.edge_style(3, 5, arrowhead='dot', label='binds', labelangle='90')
|
||||||
|
dot.edge_style(4, 5, arrowsize=2, style='bold')
|
||||||
|
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
|
||||||
|
display all graphics styles. To verify the output save it to an image file
|
||||||
|
and look at it that way.
|
||||||
|
|
||||||
|
Valid attributes
|
||||||
|
----------------
|
||||||
|
|
||||||
|
- dot styles, passed via the :py:meth:`Dot.style` method::
|
||||||
|
|
||||||
|
rankdir = 'LR' (draws the graph horizontally, left to right)
|
||||||
|
ranksep = number (rank separation in inches)
|
||||||
|
|
||||||
|
- node attributes, passed via the :py:meth:`Dot.node_style` method::
|
||||||
|
|
||||||
|
style = 'filled' | 'invisible' | 'diagonals' | 'rounded'
|
||||||
|
shape = 'box' | 'ellipse' | 'circle' | 'point' | 'triangle'
|
||||||
|
|
||||||
|
- edge attributes, passed via the :py:meth:`Dot.edge_style` method::
|
||||||
|
|
||||||
|
style = 'dashed' | 'dotted' | 'solid' | 'invis' | 'bold'
|
||||||
|
arrowhead = 'box' | 'crow' | 'diamond' | 'dot' | 'inv' | 'none'
|
||||||
|
| 'tee' | 'vee'
|
||||||
|
weight = number (the larger the number the closer the nodes will be)
|
||||||
|
|
||||||
|
- valid `graphviz colors
|
||||||
|
<http://www.research.att.com/~erg/graphviz/info/colors.html>`_
|
||||||
|
|
||||||
|
- for more details on how to control the graph drawing process see the
|
||||||
|
`graphviz reference
|
||||||
|
<http://www.research.att.com/sw/tools/graphviz/refs.html>`_.
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from altgraph import GraphError
|
||||||
|
|
||||||
|
|
||||||
|
class Dot(object):
|
||||||
|
'''
|
||||||
|
A class providing a **graphviz** (dot language) representation
|
||||||
|
allowing a fine grained control over how the graph is being
|
||||||
|
displayed.
|
||||||
|
|
||||||
|
If the :command:`dot` and :command:`dotty` programs are not in the current
|
||||||
|
system path their location needs to be specified in the contructor.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, graph=None, nodes=None, edgefn=None, nodevisitor=None,
|
||||||
|
edgevisitor=None, name="G", dot='dot', dotty='dotty',
|
||||||
|
neato='neato', graphtype="digraph"):
|
||||||
|
'''
|
||||||
|
Initialization.
|
||||||
|
'''
|
||||||
|
self.name, self.attr = name, {}
|
||||||
|
|
||||||
|
assert graphtype in ['graph', 'digraph']
|
||||||
|
self.type = graphtype
|
||||||
|
|
||||||
|
self.temp_dot = "tmp_dot.dot"
|
||||||
|
self.temp_neo = "tmp_neo.dot"
|
||||||
|
|
||||||
|
self.dot, self.dotty, self.neato = dot, dotty, neato
|
||||||
|
|
||||||
|
# self.nodes: node styles
|
||||||
|
# self.edges: edge styles
|
||||||
|
self.nodes, self.edges = {}, {}
|
||||||
|
|
||||||
|
if graph is not None and nodes is None:
|
||||||
|
nodes = graph
|
||||||
|
if graph is not None and edgefn is None:
|
||||||
|
def edgefn(node, graph=graph):
|
||||||
|
return graph.out_nbrs(node)
|
||||||
|
if nodes is None:
|
||||||
|
nodes = ()
|
||||||
|
|
||||||
|
seen = set()
|
||||||
|
for node in nodes:
|
||||||
|
if nodevisitor is None:
|
||||||
|
style = {}
|
||||||
|
else:
|
||||||
|
style = nodevisitor(node)
|
||||||
|
if style is not None:
|
||||||
|
self.nodes[node] = {}
|
||||||
|
self.node_style(node, **style)
|
||||||
|
seen.add(node)
|
||||||
|
if edgefn is not None:
|
||||||
|
for head in seen:
|
||||||
|
for tail in (n for n in edgefn(head) if n in seen):
|
||||||
|
if edgevisitor is None:
|
||||||
|
edgestyle = {}
|
||||||
|
else:
|
||||||
|
edgestyle = edgevisitor(head, tail)
|
||||||
|
if edgestyle is not None:
|
||||||
|
if head not in self.edges:
|
||||||
|
self.edges[head] = {}
|
||||||
|
self.edges[head][tail] = {}
|
||||||
|
self.edge_style(head, tail, **edgestyle)
|
||||||
|
|
||||||
|
def style(self, **attr):
|
||||||
|
'''
|
||||||
|
Changes the overall style
|
||||||
|
'''
|
||||||
|
self.attr = attr
|
||||||
|
|
||||||
|
def display(self, mode='dot'):
|
||||||
|
'''
|
||||||
|
Displays the current graph via dotty
|
||||||
|
'''
|
||||||
|
|
||||||
|
if mode == 'neato':
|
||||||
|
self.save_dot(self.temp_neo)
|
||||||
|
neato_cmd = "%s -o %s %s" % (
|
||||||
|
self.neato, self.temp_dot, self.temp_neo)
|
||||||
|
os.system(neato_cmd)
|
||||||
|
else:
|
||||||
|
self.save_dot(self.temp_dot)
|
||||||
|
|
||||||
|
plot_cmd = "%s %s" % (self.dotty, self.temp_dot)
|
||||||
|
os.system(plot_cmd)
|
||||||
|
|
||||||
|
def node_style(self, node, **kwargs):
|
||||||
|
'''
|
||||||
|
Modifies a node style to the dot representation.
|
||||||
|
'''
|
||||||
|
if node not in self.edges:
|
||||||
|
self.edges[node] = {}
|
||||||
|
self.nodes[node] = kwargs
|
||||||
|
|
||||||
|
def all_node_style(self, **kwargs):
|
||||||
|
'''
|
||||||
|
Modifies all node styles
|
||||||
|
'''
|
||||||
|
for node in self.nodes:
|
||||||
|
self.node_style(node, **kwargs)
|
||||||
|
|
||||||
|
def edge_style(self, head, tail, **kwargs):
|
||||||
|
'''
|
||||||
|
Modifies an edge style to the dot representation.
|
||||||
|
'''
|
||||||
|
if tail not in self.nodes:
|
||||||
|
raise GraphError("invalid node %s" % (tail,))
|
||||||
|
|
||||||
|
try:
|
||||||
|
if tail not in self.edges[head]:
|
||||||
|
self.edges[head][tail] = {}
|
||||||
|
self.edges[head][tail] = kwargs
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError("invalid edge %s -> %s " % (head, tail))
|
||||||
|
|
||||||
|
def iterdot(self):
|
||||||
|
# write graph title
|
||||||
|
if self.type == 'digraph':
|
||||||
|
yield 'digraph %s {\n' % (self.name,)
|
||||||
|
elif self.type == 'graph':
|
||||||
|
yield 'graph %s {\n' % (self.name,)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise GraphError("unsupported graphtype %s" % (self.type,))
|
||||||
|
|
||||||
|
# write overall graph attributes
|
||||||
|
for attr_name, attr_value in sorted(self.attr.items()):
|
||||||
|
yield '%s="%s";' % (attr_name, attr_value)
|
||||||
|
yield '\n'
|
||||||
|
|
||||||
|
# some reusable patterns
|
||||||
|
cpatt = '%s="%s",' # to separate attributes
|
||||||
|
epatt = '];\n' # to end attributes
|
||||||
|
|
||||||
|
# write node attributes
|
||||||
|
for node_name, node_attr in sorted(self.nodes.items()):
|
||||||
|
yield '\t"%s" [' % (node_name,)
|
||||||
|
for attr_name, attr_value in sorted(node_attr.items()):
|
||||||
|
yield cpatt % (attr_name, attr_value)
|
||||||
|
yield epatt
|
||||||
|
|
||||||
|
# write edge attributes
|
||||||
|
for head in sorted(self.edges):
|
||||||
|
for tail in sorted(self.edges[head]):
|
||||||
|
if self.type == 'digraph':
|
||||||
|
yield '\t"%s" -> "%s" [' % (head, tail)
|
||||||
|
else:
|
||||||
|
yield '\t"%s" -- "%s" [' % (head, tail)
|
||||||
|
for attr_name, attr_value in \
|
||||||
|
sorted(self.edges[head][tail].items()):
|
||||||
|
yield cpatt % (attr_name, attr_value)
|
||||||
|
yield epatt
|
||||||
|
|
||||||
|
# finish file
|
||||||
|
yield '}\n'
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self.iterdot()
|
||||||
|
|
||||||
|
def save_dot(self, file_name=None):
|
||||||
|
'''
|
||||||
|
Saves the current graph representation into a file
|
||||||
|
'''
|
||||||
|
|
||||||
|
if not file_name:
|
||||||
|
warnings.warn(DeprecationWarning, "always pass a file_name")
|
||||||
|
file_name = self.temp_dot
|
||||||
|
|
||||||
|
with open(file_name, "w") as fp:
|
||||||
|
for chunk in self.iterdot():
|
||||||
|
fp.write(chunk)
|
||||||
|
|
||||||
|
def save_img(self, file_name=None, file_type="gif", mode='dot'):
|
||||||
|
'''
|
||||||
|
Saves the dot file as an image file
|
||||||
|
'''
|
||||||
|
|
||||||
|
if not file_name:
|
||||||
|
warnings.warn(DeprecationWarning, "always pass a file_name")
|
||||||
|
file_name = "out"
|
||||||
|
|
||||||
|
if mode == 'neato':
|
||||||
|
self.save_dot(self.temp_neo)
|
||||||
|
neato_cmd = "%s -o %s %s" % (
|
||||||
|
self.neato, self.temp_dot, self.temp_neo)
|
||||||
|
os.system(neato_cmd)
|
||||||
|
plot_cmd = self.dot
|
||||||
|
else:
|
||||||
|
self.save_dot(self.temp_dot)
|
||||||
|
plot_cmd = self.dot
|
||||||
|
|
||||||
|
file_name = "%s.%s" % (file_name, file_type)
|
||||||
|
create_cmd = "%s -T%s %s -o %s" % (
|
||||||
|
plot_cmd, file_type, self.temp_dot, file_name)
|
||||||
|
os.system(create_cmd)
|
680
lib/spack/external/altgraph/Graph.py
vendored
Normal file
680
lib/spack/external/altgraph/Graph.py
vendored
Normal file
|
@ -0,0 +1,680 @@
|
||||||
|
"""
|
||||||
|
altgraph.Graph - Base Graph class
|
||||||
|
=================================
|
||||||
|
|
||||||
|
..
|
||||||
|
#--Version 2.1
|
||||||
|
#--Bob Ippolito October, 2004
|
||||||
|
|
||||||
|
#--Version 2.0
|
||||||
|
#--Istvan Albert June, 2004
|
||||||
|
|
||||||
|
#--Version 1.0
|
||||||
|
#--Nathan Denny, May 27, 1999
|
||||||
|
"""
|
||||||
|
|
||||||
|
from altgraph import GraphError
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
|
||||||
|
class Graph(object):
|
||||||
|
"""
|
||||||
|
The Graph class represents a directed graph with *N* nodes and *E* edges.
|
||||||
|
|
||||||
|
Naming conventions:
|
||||||
|
|
||||||
|
- the prefixes such as *out*, *inc* and *all* will refer to methods
|
||||||
|
that operate on the outgoing, incoming or all edges of that node.
|
||||||
|
|
||||||
|
For example: :py:meth:`inc_degree` will refer to the degree of the node
|
||||||
|
computed over the incoming edges (the number of neighbours linking to
|
||||||
|
the node).
|
||||||
|
|
||||||
|
- the prefixes such as *forw* and *back* will refer to the
|
||||||
|
orientation of the edges used in the method with respect to the node.
|
||||||
|
|
||||||
|
For example: :py:meth:`forw_bfs` will start at the node then use the
|
||||||
|
outgoing edges to traverse the graph (goes forward).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, edges=None):
|
||||||
|
"""
|
||||||
|
Initialization
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.next_edge = 0
|
||||||
|
self.nodes, self.edges = {}, {}
|
||||||
|
self.hidden_edges, self.hidden_nodes = {}, {}
|
||||||
|
|
||||||
|
if edges is not None:
|
||||||
|
for item in edges:
|
||||||
|
if len(item) == 2:
|
||||||
|
head, tail = item
|
||||||
|
self.add_edge(head, tail)
|
||||||
|
elif len(item) == 3:
|
||||||
|
head, tail, data = item
|
||||||
|
self.add_edge(head, tail, data)
|
||||||
|
else:
|
||||||
|
raise GraphError("Cannot create edge from %s" % (item,))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<Graph: %d nodes, %d edges>' % (
|
||||||
|
self.number_of_nodes(), self.number_of_edges())
|
||||||
|
|
||||||
|
def add_node(self, node, node_data=None):
|
||||||
|
"""
|
||||||
|
Adds a new node to the graph. Arbitrary data can be attached to the
|
||||||
|
node via the node_data parameter. Adding the same node twice will be
|
||||||
|
silently ignored.
|
||||||
|
|
||||||
|
The node must be a hashable value.
|
||||||
|
"""
|
||||||
|
#
|
||||||
|
# the nodes will contain tuples that will store incoming edges,
|
||||||
|
# outgoing edges and data
|
||||||
|
#
|
||||||
|
# index 0 -> incoming edges
|
||||||
|
# index 1 -> outgoing edges
|
||||||
|
|
||||||
|
if node in self.hidden_nodes:
|
||||||
|
# Node is present, but hidden
|
||||||
|
return
|
||||||
|
|
||||||
|
if node not in self.nodes:
|
||||||
|
self.nodes[node] = ([], [], node_data)
|
||||||
|
|
||||||
|
def add_edge(self, head_id, tail_id, edge_data=1, create_nodes=True):
|
||||||
|
"""
|
||||||
|
Adds a directed edge going from head_id to tail_id.
|
||||||
|
Arbitrary data can be attached to the edge via edge_data.
|
||||||
|
It may create the nodes if adding edges between nonexisting ones.
|
||||||
|
|
||||||
|
:param head_id: head node
|
||||||
|
:param tail_id: tail node
|
||||||
|
:param edge_data: (optional) data attached to the edge
|
||||||
|
:param create_nodes: (optional) creates the head_id or tail_id
|
||||||
|
node in case they did not exist
|
||||||
|
"""
|
||||||
|
# shorcut
|
||||||
|
edge = self.next_edge
|
||||||
|
|
||||||
|
# add nodes if on automatic node creation
|
||||||
|
if create_nodes:
|
||||||
|
self.add_node(head_id)
|
||||||
|
self.add_node(tail_id)
|
||||||
|
|
||||||
|
# update the corresponding incoming and outgoing lists in the nodes
|
||||||
|
# index 0 -> incoming edges
|
||||||
|
# index 1 -> outgoing edges
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.nodes[tail_id][0].append(edge)
|
||||||
|
self.nodes[head_id][1].append(edge)
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid nodes %s -> %s' % (head_id, tail_id))
|
||||||
|
|
||||||
|
# store edge information
|
||||||
|
self.edges[edge] = (head_id, tail_id, edge_data)
|
||||||
|
|
||||||
|
self.next_edge += 1
|
||||||
|
|
||||||
|
def hide_edge(self, edge):
|
||||||
|
"""
|
||||||
|
Hides an edge from the graph. The edge may be unhidden at some later
|
||||||
|
time.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
head_id, tail_id, edge_data = \
|
||||||
|
self.hidden_edges[edge] = self.edges[edge]
|
||||||
|
self.nodes[tail_id][0].remove(edge)
|
||||||
|
self.nodes[head_id][1].remove(edge)
|
||||||
|
del self.edges[edge]
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid edge %s' % edge)
|
||||||
|
|
||||||
|
def hide_node(self, node):
|
||||||
|
"""
|
||||||
|
Hides a node from the graph. The incoming and outgoing edges of the
|
||||||
|
node will also be hidden. The node may be unhidden at some later time.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
all_edges = self.all_edges(node)
|
||||||
|
self.hidden_nodes[node] = (self.nodes[node], all_edges)
|
||||||
|
for edge in all_edges:
|
||||||
|
self.hide_edge(edge)
|
||||||
|
del self.nodes[node]
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid node %s' % node)
|
||||||
|
|
||||||
|
def restore_node(self, node):
|
||||||
|
"""
|
||||||
|
Restores a previously hidden node back into the graph and restores
|
||||||
|
all of its incoming and outgoing edges.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.nodes[node], all_edges = self.hidden_nodes[node]
|
||||||
|
for edge in all_edges:
|
||||||
|
self.restore_edge(edge)
|
||||||
|
del self.hidden_nodes[node]
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid node %s' % node)
|
||||||
|
|
||||||
|
def restore_edge(self, edge):
|
||||||
|
"""
|
||||||
|
Restores a previously hidden edge back into the graph.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
head_id, tail_id, data = self.hidden_edges[edge]
|
||||||
|
self.nodes[tail_id][0].append(edge)
|
||||||
|
self.nodes[head_id][1].append(edge)
|
||||||
|
self.edges[edge] = head_id, tail_id, data
|
||||||
|
del self.hidden_edges[edge]
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid edge %s' % edge)
|
||||||
|
|
||||||
|
def restore_all_edges(self):
|
||||||
|
"""
|
||||||
|
Restores all hidden edges.
|
||||||
|
"""
|
||||||
|
for edge in list(self.hidden_edges.keys()):
|
||||||
|
try:
|
||||||
|
self.restore_edge(edge)
|
||||||
|
except GraphError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def restore_all_nodes(self):
|
||||||
|
"""
|
||||||
|
Restores all hidden nodes.
|
||||||
|
"""
|
||||||
|
for node in list(self.hidden_nodes.keys()):
|
||||||
|
self.restore_node(node)
|
||||||
|
|
||||||
|
def __contains__(self, node):
|
||||||
|
"""
|
||||||
|
Test whether a node is in the graph
|
||||||
|
"""
|
||||||
|
return node in self.nodes
|
||||||
|
|
||||||
|
def edge_by_id(self, edge):
|
||||||
|
"""
|
||||||
|
Returns the edge that connects the head_id and tail_id nodes
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
head, tail, data = self.edges[edge]
|
||||||
|
except KeyError:
|
||||||
|
head, tail = None, None
|
||||||
|
raise GraphError('Invalid edge %s' % edge)
|
||||||
|
|
||||||
|
return (head, tail)
|
||||||
|
|
||||||
|
def edge_by_node(self, head, tail):
|
||||||
|
"""
|
||||||
|
Returns the edge that connects the head_id and tail_id nodes
|
||||||
|
"""
|
||||||
|
for edge in self.out_edges(head):
|
||||||
|
if self.tail(edge) == tail:
|
||||||
|
return edge
|
||||||
|
return None
|
||||||
|
|
||||||
|
def number_of_nodes(self):
|
||||||
|
"""
|
||||||
|
Returns the number of nodes
|
||||||
|
"""
|
||||||
|
return len(self.nodes)
|
||||||
|
|
||||||
|
def number_of_edges(self):
|
||||||
|
"""
|
||||||
|
Returns the number of edges
|
||||||
|
"""
|
||||||
|
return len(self.edges)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""
|
||||||
|
Iterates over all nodes in the graph
|
||||||
|
"""
|
||||||
|
return iter(self.nodes)
|
||||||
|
|
||||||
|
def node_list(self):
|
||||||
|
"""
|
||||||
|
Return a list of the node ids for all visible nodes in the graph.
|
||||||
|
"""
|
||||||
|
return list(self.nodes.keys())
|
||||||
|
|
||||||
|
def edge_list(self):
|
||||||
|
"""
|
||||||
|
Returns an iterator for all visible nodes in the graph.
|
||||||
|
"""
|
||||||
|
return list(self.edges.keys())
|
||||||
|
|
||||||
|
def number_of_hidden_edges(self):
|
||||||
|
"""
|
||||||
|
Returns the number of hidden edges
|
||||||
|
"""
|
||||||
|
return len(self.hidden_edges)
|
||||||
|
|
||||||
|
def number_of_hidden_nodes(self):
|
||||||
|
"""
|
||||||
|
Returns the number of hidden nodes
|
||||||
|
"""
|
||||||
|
return len(self.hidden_nodes)
|
||||||
|
|
||||||
|
def hidden_node_list(self):
|
||||||
|
"""
|
||||||
|
Returns the list with the hidden nodes
|
||||||
|
"""
|
||||||
|
return list(self.hidden_nodes.keys())
|
||||||
|
|
||||||
|
def hidden_edge_list(self):
|
||||||
|
"""
|
||||||
|
Returns a list with the hidden edges
|
||||||
|
"""
|
||||||
|
return list(self.hidden_edges.keys())
|
||||||
|
|
||||||
|
def describe_node(self, node):
|
||||||
|
"""
|
||||||
|
return node, node data, outgoing edges, incoming edges for node
|
||||||
|
"""
|
||||||
|
incoming, outgoing, data = self.nodes[node]
|
||||||
|
return node, data, outgoing, incoming
|
||||||
|
|
||||||
|
def describe_edge(self, edge):
|
||||||
|
"""
|
||||||
|
return edge, edge data, head, tail for edge
|
||||||
|
"""
|
||||||
|
head, tail, data = self.edges[edge]
|
||||||
|
return edge, data, head, tail
|
||||||
|
|
||||||
|
def node_data(self, node):
|
||||||
|
"""
|
||||||
|
Returns the data associated with a node
|
||||||
|
"""
|
||||||
|
return self.nodes[node][2]
|
||||||
|
|
||||||
|
def edge_data(self, edge):
|
||||||
|
"""
|
||||||
|
Returns the data associated with an edge
|
||||||
|
"""
|
||||||
|
return self.edges[edge][2]
|
||||||
|
|
||||||
|
def update_edge_data(self, edge, edge_data):
|
||||||
|
"""
|
||||||
|
Replace the edge data for a specific edge
|
||||||
|
"""
|
||||||
|
self.edges[edge] = self.edges[edge][0:2] + (edge_data,)
|
||||||
|
|
||||||
|
def head(self, edge):
|
||||||
|
"""
|
||||||
|
Returns the node of the head of the edge.
|
||||||
|
"""
|
||||||
|
return self.edges[edge][0]
|
||||||
|
|
||||||
|
def tail(self, edge):
|
||||||
|
"""
|
||||||
|
Returns node of the tail of the edge.
|
||||||
|
"""
|
||||||
|
return self.edges[edge][1]
|
||||||
|
|
||||||
|
def out_nbrs(self, node):
|
||||||
|
"""
|
||||||
|
List of nodes connected by outgoing edges
|
||||||
|
"""
|
||||||
|
return [self.tail(n) for n in self.out_edges(node)]
|
||||||
|
|
||||||
|
def inc_nbrs(self, node):
|
||||||
|
"""
|
||||||
|
List of nodes connected by incoming edges
|
||||||
|
"""
|
||||||
|
return [self.head(n) for n in self.inc_edges(node)]
|
||||||
|
|
||||||
|
def all_nbrs(self, node):
|
||||||
|
"""
|
||||||
|
List of nodes connected by incoming and outgoing edges
|
||||||
|
"""
|
||||||
|
return list(dict.fromkeys(self.inc_nbrs(node) + self.out_nbrs(node)))
|
||||||
|
|
||||||
|
def out_edges(self, node):
|
||||||
|
"""
|
||||||
|
Returns a list of the outgoing edges
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return list(self.nodes[node][1])
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid node %s' % node)
|
||||||
|
|
||||||
|
def inc_edges(self, node):
|
||||||
|
"""
|
||||||
|
Returns a list of the incoming edges
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return list(self.nodes[node][0])
|
||||||
|
except KeyError:
|
||||||
|
raise GraphError('Invalid node %s' % node)
|
||||||
|
|
||||||
|
def all_edges(self, node):
|
||||||
|
"""
|
||||||
|
Returns a list of incoming and outging edges.
|
||||||
|
"""
|
||||||
|
return set(self.inc_edges(node) + self.out_edges(node))
|
||||||
|
|
||||||
|
def out_degree(self, node):
|
||||||
|
"""
|
||||||
|
Returns the number of outgoing edges
|
||||||
|
"""
|
||||||
|
return len(self.out_edges(node))
|
||||||
|
|
||||||
|
def inc_degree(self, node):
|
||||||
|
"""
|
||||||
|
Returns the number of incoming edges
|
||||||
|
"""
|
||||||
|
return len(self.inc_edges(node))
|
||||||
|
|
||||||
|
def all_degree(self, node):
|
||||||
|
"""
|
||||||
|
The total degree of a node
|
||||||
|
"""
|
||||||
|
return self.inc_degree(node) + self.out_degree(node)
|
||||||
|
|
||||||
|
def _topo_sort(self, forward=True):
|
||||||
|
"""
|
||||||
|
Topological sort.
|
||||||
|
|
||||||
|
Returns a list of nodes where the successors (based on outgoing and
|
||||||
|
incoming edges selected by the forward parameter) of any given node
|
||||||
|
appear in the sequence after that node.
|
||||||
|
"""
|
||||||
|
topo_list = []
|
||||||
|
queue = deque()
|
||||||
|
indeg = {}
|
||||||
|
|
||||||
|
# select the operation that will be performed
|
||||||
|
if forward:
|
||||||
|
get_edges = self.out_edges
|
||||||
|
get_degree = self.inc_degree
|
||||||
|
get_next = self.tail
|
||||||
|
else:
|
||||||
|
get_edges = self.inc_edges
|
||||||
|
get_degree = self.out_degree
|
||||||
|
get_next = self.head
|
||||||
|
|
||||||
|
for node in self.node_list():
|
||||||
|
degree = get_degree(node)
|
||||||
|
if degree:
|
||||||
|
indeg[node] = degree
|
||||||
|
else:
|
||||||
|
queue.append(node)
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
curr_node = queue.popleft()
|
||||||
|
topo_list.append(curr_node)
|
||||||
|
for edge in get_edges(curr_node):
|
||||||
|
tail_id = get_next(edge)
|
||||||
|
if tail_id in indeg:
|
||||||
|
indeg[tail_id] -= 1
|
||||||
|
if indeg[tail_id] == 0:
|
||||||
|
queue.append(tail_id)
|
||||||
|
|
||||||
|
if len(topo_list) == len(self.node_list()):
|
||||||
|
valid = True
|
||||||
|
else:
|
||||||
|
# the graph has cycles, invalid topological sort
|
||||||
|
valid = False
|
||||||
|
|
||||||
|
return (valid, topo_list)
|
||||||
|
|
||||||
|
def forw_topo_sort(self):
|
||||||
|
"""
|
||||||
|
Topological sort.
|
||||||
|
|
||||||
|
Returns a list of nodes where the successors (based on outgoing edges)
|
||||||
|
of any given node appear in the sequence after that node.
|
||||||
|
"""
|
||||||
|
return self._topo_sort(forward=True)
|
||||||
|
|
||||||
|
def back_topo_sort(self):
|
||||||
|
"""
|
||||||
|
Reverse topological sort.
|
||||||
|
|
||||||
|
Returns a list of nodes where the successors (based on incoming edges)
|
||||||
|
of any given node appear in the sequence after that node.
|
||||||
|
"""
|
||||||
|
return self._topo_sort(forward=False)
|
||||||
|
|
||||||
|
def _bfs_subgraph(self, start_id, forward=True):
|
||||||
|
"""
|
||||||
|
Private method creates a subgraph in a bfs order.
|
||||||
|
|
||||||
|
The forward parameter specifies whether it is a forward or backward
|
||||||
|
traversal.
|
||||||
|
"""
|
||||||
|
if forward:
|
||||||
|
get_bfs = self.forw_bfs
|
||||||
|
get_nbrs = self.out_nbrs
|
||||||
|
else:
|
||||||
|
get_bfs = self.back_bfs
|
||||||
|
get_nbrs = self.inc_nbrs
|
||||||
|
|
||||||
|
g = Graph()
|
||||||
|
bfs_list = get_bfs(start_id)
|
||||||
|
for node in bfs_list:
|
||||||
|
g.add_node(node)
|
||||||
|
|
||||||
|
for node in bfs_list:
|
||||||
|
for nbr_id in get_nbrs(node):
|
||||||
|
if forward:
|
||||||
|
g.add_edge(node, nbr_id)
|
||||||
|
else:
|
||||||
|
g.add_edge(nbr_id, node)
|
||||||
|
|
||||||
|
return g
|
||||||
|
|
||||||
|
def forw_bfs_subgraph(self, start_id):
|
||||||
|
"""
|
||||||
|
Creates and returns a subgraph consisting of the breadth first
|
||||||
|
reachable nodes based on their outgoing edges.
|
||||||
|
"""
|
||||||
|
return self._bfs_subgraph(start_id, forward=True)
|
||||||
|
|
||||||
|
def back_bfs_subgraph(self, start_id):
|
||||||
|
"""
|
||||||
|
Creates and returns a subgraph consisting of the breadth first
|
||||||
|
reachable nodes based on the incoming edges.
|
||||||
|
"""
|
||||||
|
return self._bfs_subgraph(start_id, forward=False)
|
||||||
|
|
||||||
|
def iterdfs(self, start, end=None, forward=True):
|
||||||
|
"""
|
||||||
|
Collecting nodes in some depth first traversal.
|
||||||
|
|
||||||
|
The forward parameter specifies whether it is a forward or backward
|
||||||
|
traversal.
|
||||||
|
"""
|
||||||
|
visited, stack = set([start]), deque([start])
|
||||||
|
|
||||||
|
if forward:
|
||||||
|
get_edges = self.out_edges
|
||||||
|
get_next = self.tail
|
||||||
|
else:
|
||||||
|
get_edges = self.inc_edges
|
||||||
|
get_next = self.head
|
||||||
|
|
||||||
|
while stack:
|
||||||
|
curr_node = stack.pop()
|
||||||
|
yield curr_node
|
||||||
|
if curr_node == end:
|
||||||
|
break
|
||||||
|
for edge in sorted(get_edges(curr_node)):
|
||||||
|
tail = get_next(edge)
|
||||||
|
if tail not in visited:
|
||||||
|
visited.add(tail)
|
||||||
|
stack.append(tail)
|
||||||
|
|
||||||
|
def iterdata(self, start, end=None, forward=True, condition=None):
|
||||||
|
"""
|
||||||
|
Perform a depth-first walk of the graph (as ``iterdfs``)
|
||||||
|
and yield the item data of every node where condition matches. The
|
||||||
|
condition callback is only called when node_data is not None.
|
||||||
|
"""
|
||||||
|
|
||||||
|
visited, stack = set([start]), deque([start])
|
||||||
|
|
||||||
|
if forward:
|
||||||
|
get_edges = self.out_edges
|
||||||
|
get_next = self.tail
|
||||||
|
else:
|
||||||
|
get_edges = self.inc_edges
|
||||||
|
get_next = self.head
|
||||||
|
|
||||||
|
get_data = self.node_data
|
||||||
|
|
||||||
|
while stack:
|
||||||
|
curr_node = stack.pop()
|
||||||
|
curr_data = get_data(curr_node)
|
||||||
|
if curr_data is not None:
|
||||||
|
if condition is not None and not condition(curr_data):
|
||||||
|
continue
|
||||||
|
yield curr_data
|
||||||
|
if curr_node == end:
|
||||||
|
break
|
||||||
|
for edge in get_edges(curr_node):
|
||||||
|
tail = get_next(edge)
|
||||||
|
if tail not in visited:
|
||||||
|
visited.add(tail)
|
||||||
|
stack.append(tail)
|
||||||
|
|
||||||
|
def _iterbfs(self, start, end=None, forward=True):
|
||||||
|
"""
|
||||||
|
The forward parameter specifies whether it is a forward or backward
|
||||||
|
traversal. Returns a list of tuples where the first value is the hop
|
||||||
|
value the second value is the node id.
|
||||||
|
"""
|
||||||
|
queue, visited = deque([(start, 0)]), set([start])
|
||||||
|
|
||||||
|
# the direction of the bfs depends on the edges that are sampled
|
||||||
|
if forward:
|
||||||
|
get_edges = self.out_edges
|
||||||
|
get_next = self.tail
|
||||||
|
else:
|
||||||
|
get_edges = self.inc_edges
|
||||||
|
get_next = self.head
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
curr_node, curr_step = queue.popleft()
|
||||||
|
yield (curr_node, curr_step)
|
||||||
|
if curr_node == end:
|
||||||
|
break
|
||||||
|
for edge in get_edges(curr_node):
|
||||||
|
tail = get_next(edge)
|
||||||
|
if tail not in visited:
|
||||||
|
visited.add(tail)
|
||||||
|
queue.append((tail, curr_step + 1))
|
||||||
|
|
||||||
|
def forw_bfs(self, start, end=None):
|
||||||
|
"""
|
||||||
|
Returns a list of nodes in some forward BFS order.
|
||||||
|
|
||||||
|
Starting from the start node the breadth first search proceeds along
|
||||||
|
outgoing edges.
|
||||||
|
"""
|
||||||
|
return [node for node, step in self._iterbfs(start, end, forward=True)]
|
||||||
|
|
||||||
|
def back_bfs(self, start, end=None):
|
||||||
|
"""
|
||||||
|
Returns a list of nodes in some backward BFS order.
|
||||||
|
|
||||||
|
Starting from the start node the breadth first search proceeds along
|
||||||
|
incoming edges.
|
||||||
|
"""
|
||||||
|
return [node for node, _ in self._iterbfs(start, end, forward=False)]
|
||||||
|
|
||||||
|
def forw_dfs(self, start, end=None):
|
||||||
|
"""
|
||||||
|
Returns a list of nodes in some forward DFS order.
|
||||||
|
|
||||||
|
Starting with the start node the depth first search proceeds along
|
||||||
|
outgoing edges.
|
||||||
|
"""
|
||||||
|
return list(self.iterdfs(start, end, forward=True))
|
||||||
|
|
||||||
|
def back_dfs(self, start, end=None):
|
||||||
|
"""
|
||||||
|
Returns a list of nodes in some backward DFS order.
|
||||||
|
|
||||||
|
Starting from the start node the depth first search proceeds along
|
||||||
|
incoming edges.
|
||||||
|
"""
|
||||||
|
return list(self.iterdfs(start, end, forward=False))
|
||||||
|
|
||||||
|
def connected(self):
|
||||||
|
"""
|
||||||
|
Returns :py:data:`True` if the graph's every node can be reached from
|
||||||
|
every other node.
|
||||||
|
"""
|
||||||
|
node_list = self.node_list()
|
||||||
|
for node in node_list:
|
||||||
|
bfs_list = self.forw_bfs(node)
|
||||||
|
if len(bfs_list) != len(node_list):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def clust_coef(self, node):
|
||||||
|
"""
|
||||||
|
Computes and returns the local clustering coefficient of node.
|
||||||
|
|
||||||
|
The local cluster coefficient is proportion of the actual number of
|
||||||
|
edges between neighbours of node and the maximum number of edges
|
||||||
|
between those neighbours.
|
||||||
|
|
||||||
|
See "Local Clustering Coefficient" on
|
||||||
|
<http://en.wikipedia.org/wiki/Clustering_coefficient>
|
||||||
|
for a formal definition.
|
||||||
|
"""
|
||||||
|
num = 0
|
||||||
|
nbr_set = set(self.out_nbrs(node))
|
||||||
|
|
||||||
|
if node in nbr_set:
|
||||||
|
nbr_set.remove(node) # loop defense
|
||||||
|
|
||||||
|
for nbr in nbr_set:
|
||||||
|
sec_set = set(self.out_nbrs(nbr))
|
||||||
|
if nbr in sec_set:
|
||||||
|
sec_set.remove(nbr) # loop defense
|
||||||
|
num += len(nbr_set & sec_set)
|
||||||
|
|
||||||
|
nbr_num = len(nbr_set)
|
||||||
|
if nbr_num:
|
||||||
|
clust_coef = float(num) / (nbr_num * (nbr_num - 1))
|
||||||
|
else:
|
||||||
|
clust_coef = 0.0
|
||||||
|
return clust_coef
|
||||||
|
|
||||||
|
def get_hops(self, start, end=None, forward=True):
|
||||||
|
"""
|
||||||
|
Computes the hop distance to all nodes centered around a node.
|
||||||
|
|
||||||
|
First order neighbours are at hop 1, their neigbours are at hop 2 etc.
|
||||||
|
Uses :py:meth:`forw_bfs` or :py:meth:`back_bfs` depending on the value
|
||||||
|
of the forward parameter. If the distance between all neighbouring
|
||||||
|
nodes is 1 the hop number corresponds to the shortest distance between
|
||||||
|
the nodes.
|
||||||
|
|
||||||
|
:param start: the starting node
|
||||||
|
:param end: ending node (optional). When not specified will search the
|
||||||
|
whole graph.
|
||||||
|
:param forward: directionality parameter (optional).
|
||||||
|
If C{True} (default) it uses L{forw_bfs} otherwise L{back_bfs}.
|
||||||
|
:return: returns a list of tuples where each tuple contains the
|
||||||
|
node and the hop.
|
||||||
|
|
||||||
|
Typical usage::
|
||||||
|
|
||||||
|
>>> print (graph.get_hops(1, 8))
|
||||||
|
>>> [(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
|
||||||
|
# node 1 is at 0 hops
|
||||||
|
# node 2 is at 1 hop
|
||||||
|
# ...
|
||||||
|
# node 8 is at 5 hops
|
||||||
|
"""
|
||||||
|
if forward:
|
||||||
|
return list(self._iterbfs(start=start, end=end, forward=True))
|
||||||
|
else:
|
||||||
|
return list(self._iterbfs(start=start, end=end, forward=False))
|
166
lib/spack/external/altgraph/GraphAlgo.py
vendored
Normal file
166
lib/spack/external/altgraph/GraphAlgo.py
vendored
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
'''
|
||||||
|
altgraph.GraphAlgo - Graph algorithms
|
||||||
|
=====================================
|
||||||
|
'''
|
||||||
|
from altgraph import GraphError
|
||||||
|
|
||||||
|
|
||||||
|
def dijkstra(graph, start, end=None):
|
||||||
|
"""
|
||||||
|
Dijkstra's algorithm for shortest paths
|
||||||
|
|
||||||
|
`David Eppstein, UC Irvine, 4 April 2002
|
||||||
|
<http://www.ics.uci.edu/~eppstein/161/python/>`_
|
||||||
|
|
||||||
|
`Python Cookbook Recipe
|
||||||
|
<http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466>`_
|
||||||
|
|
||||||
|
Find shortest paths from the start node to all nodes nearer than or
|
||||||
|
equal to the end node.
|
||||||
|
|
||||||
|
Dijkstra's algorithm is only guaranteed to work correctly when all edge
|
||||||
|
lengths are positive. This code does not verify this property for all
|
||||||
|
edges (only the edges examined until the end vertex is reached), but will
|
||||||
|
correctly compute shortest paths even for some graphs with negative edges,
|
||||||
|
and will raise an exception if it discovers that a negative edge has
|
||||||
|
caused it to make a mistake.
|
||||||
|
|
||||||
|
Adapted to altgraph by Istvan Albert, Pennsylvania State University -
|
||||||
|
June, 9 2004
|
||||||
|
"""
|
||||||
|
D = {} # dictionary of final distances
|
||||||
|
P = {} # dictionary of predecessors
|
||||||
|
Q = _priorityDictionary() # estimated distances of non-final vertices
|
||||||
|
Q[start] = 0
|
||||||
|
|
||||||
|
for v in Q:
|
||||||
|
D[v] = Q[v]
|
||||||
|
if v == end:
|
||||||
|
break
|
||||||
|
|
||||||
|
for w in graph.out_nbrs(v):
|
||||||
|
edge_id = graph.edge_by_node(v, w)
|
||||||
|
vwLength = D[v] + graph.edge_data(edge_id)
|
||||||
|
if w in D:
|
||||||
|
if vwLength < D[w]:
|
||||||
|
raise GraphError(
|
||||||
|
"Dijkstra: found better path to already-final vertex")
|
||||||
|
elif w not in Q or vwLength < Q[w]:
|
||||||
|
Q[w] = vwLength
|
||||||
|
P[w] = v
|
||||||
|
|
||||||
|
return (D, P)
|
||||||
|
|
||||||
|
|
||||||
|
def shortest_path(graph, start, end):
|
||||||
|
"""
|
||||||
|
Find a single shortest path from the *start* node to the *end* node.
|
||||||
|
The input has the same conventions as dijkstra(). The output is a list of
|
||||||
|
the nodes in order along the shortest path.
|
||||||
|
|
||||||
|
**Note that the distances must be stored in the edge data as numeric data**
|
||||||
|
"""
|
||||||
|
|
||||||
|
D, P = dijkstra(graph, start, end)
|
||||||
|
Path = []
|
||||||
|
while 1:
|
||||||
|
Path.append(end)
|
||||||
|
if end == start:
|
||||||
|
break
|
||||||
|
end = P[end]
|
||||||
|
Path.reverse()
|
||||||
|
return Path
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Utility classes and functions
|
||||||
|
#
|
||||||
|
class _priorityDictionary(dict):
|
||||||
|
'''
|
||||||
|
Priority dictionary using binary heaps (internal use only)
|
||||||
|
|
||||||
|
David Eppstein, UC Irvine, 8 Mar 2002
|
||||||
|
|
||||||
|
Implements a data structure that acts almost like a dictionary, with
|
||||||
|
two modifications:
|
||||||
|
|
||||||
|
1. D.smallest() returns the value x minimizing D[x]. For this to
|
||||||
|
work correctly, all values D[x] stored in the dictionary must be
|
||||||
|
comparable.
|
||||||
|
|
||||||
|
2. iterating "for x in D" finds and removes the items from D in sorted
|
||||||
|
order. Each item is not removed until the next item is requested,
|
||||||
|
so D[x] will still return a useful value until the next iteration
|
||||||
|
of the for-loop. Each operation takes logarithmic amortized time.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
'''
|
||||||
|
Initialize priorityDictionary by creating binary heap of pairs
|
||||||
|
(value,key). Note that changing or removing a dict entry will not
|
||||||
|
remove the old pair from the heap until it is found by smallest()
|
||||||
|
or until the heap is rebuilt.
|
||||||
|
'''
|
||||||
|
self.__heap = []
|
||||||
|
dict.__init__(self)
|
||||||
|
|
||||||
|
def smallest(self):
|
||||||
|
'''
|
||||||
|
Find smallest item after removing deleted items from front of heap.
|
||||||
|
'''
|
||||||
|
if len(self) == 0:
|
||||||
|
raise IndexError("smallest of empty priorityDictionary")
|
||||||
|
heap = self.__heap
|
||||||
|
while heap[0][1] not in self or self[heap[0][1]] != heap[0][0]:
|
||||||
|
lastItem = heap.pop()
|
||||||
|
insertionPoint = 0
|
||||||
|
while 1:
|
||||||
|
smallChild = 2*insertionPoint+1
|
||||||
|
if smallChild+1 < len(heap) and \
|
||||||
|
heap[smallChild] > heap[smallChild+1]:
|
||||||
|
smallChild += 1
|
||||||
|
if smallChild >= len(heap) or lastItem <= heap[smallChild]:
|
||||||
|
heap[insertionPoint] = lastItem
|
||||||
|
break
|
||||||
|
heap[insertionPoint] = heap[smallChild]
|
||||||
|
insertionPoint = smallChild
|
||||||
|
return heap[0][1]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
'''
|
||||||
|
Create destructive sorted iterator of priorityDictionary.
|
||||||
|
'''
|
||||||
|
def iterfn():
|
||||||
|
while len(self) > 0:
|
||||||
|
x = self.smallest()
|
||||||
|
yield x
|
||||||
|
del self[x]
|
||||||
|
return iterfn()
|
||||||
|
|
||||||
|
def __setitem__(self, key, val):
|
||||||
|
'''
|
||||||
|
Change value stored in dictionary and add corresponding pair to heap.
|
||||||
|
Rebuilds the heap if the number of deleted items gets large, to avoid
|
||||||
|
memory leakage.
|
||||||
|
'''
|
||||||
|
dict.__setitem__(self, key, val)
|
||||||
|
heap = self.__heap
|
||||||
|
if len(heap) > 2 * len(self):
|
||||||
|
self.__heap = [(v, k) for k, v in self.items()]
|
||||||
|
self.__heap.sort()
|
||||||
|
else:
|
||||||
|
newPair = (val, key)
|
||||||
|
insertionPoint = len(heap)
|
||||||
|
heap.append(None)
|
||||||
|
while insertionPoint > 0 and newPair < heap[(insertionPoint-1)//2]:
|
||||||
|
heap[insertionPoint] = heap[(insertionPoint-1)//2]
|
||||||
|
insertionPoint = (insertionPoint-1)//2
|
||||||
|
heap[insertionPoint] = newPair
|
||||||
|
|
||||||
|
def setdefault(self, key, val):
|
||||||
|
'''
|
||||||
|
Reimplement setdefault to pass through our customized __setitem__.
|
||||||
|
'''
|
||||||
|
if key not in self:
|
||||||
|
self[key] = val
|
||||||
|
return self[key]
|
73
lib/spack/external/altgraph/GraphStat.py
vendored
Normal file
73
lib/spack/external/altgraph/GraphStat.py
vendored
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
'''
|
||||||
|
altgraph.GraphStat - Functions providing various graph statistics
|
||||||
|
=================================================================
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def degree_dist(graph, limits=(0, 0), bin_num=10, mode='out'):
|
||||||
|
'''
|
||||||
|
Computes the degree distribution for a graph.
|
||||||
|
|
||||||
|
Returns a list of tuples where the first element of the tuple is the
|
||||||
|
center of the bin representing a range of degrees and the second element
|
||||||
|
of the tuple are the number of nodes with the degree falling in the range.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
....
|
||||||
|
'''
|
||||||
|
|
||||||
|
deg = []
|
||||||
|
if mode == 'inc':
|
||||||
|
get_deg = graph.inc_degree
|
||||||
|
else:
|
||||||
|
get_deg = graph.out_degree
|
||||||
|
|
||||||
|
for node in graph:
|
||||||
|
deg.append(get_deg(node))
|
||||||
|
|
||||||
|
if not deg:
|
||||||
|
return []
|
||||||
|
|
||||||
|
results = _binning(values=deg, limits=limits, bin_num=bin_num)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
_EPS = 1.0/(2.0**32)
|
||||||
|
|
||||||
|
|
||||||
|
def _binning(values, limits=(0, 0), bin_num=10):
|
||||||
|
'''
|
||||||
|
Bins data that falls between certain limits, if the limits are (0, 0) the
|
||||||
|
minimum and maximum values are used.
|
||||||
|
|
||||||
|
Returns a list of tuples where the first element of the tuple is the
|
||||||
|
center of the bin and the second element of the tuple are the counts.
|
||||||
|
'''
|
||||||
|
if limits == (0, 0):
|
||||||
|
min_val, max_val = min(values) - _EPS, max(values) + _EPS
|
||||||
|
else:
|
||||||
|
min_val, max_val = limits
|
||||||
|
|
||||||
|
# get bin size
|
||||||
|
bin_size = (max_val - min_val)/float(bin_num)
|
||||||
|
bins = [0] * (bin_num)
|
||||||
|
|
||||||
|
# will ignore these outliers for now
|
||||||
|
for value in values:
|
||||||
|
try:
|
||||||
|
if (value - min_val) >= 0:
|
||||||
|
index = int((value - min_val)/float(bin_size))
|
||||||
|
bins[index] += 1
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# make it ready for an x,y plot
|
||||||
|
result = []
|
||||||
|
center = (bin_size/2) + min_val
|
||||||
|
for i, y in enumerate(bins):
|
||||||
|
x = center + bin_size * i
|
||||||
|
result.append((x, y))
|
||||||
|
|
||||||
|
return result
|
144
lib/spack/external/altgraph/GraphUtil.py
vendored
Normal file
144
lib/spack/external/altgraph/GraphUtil.py
vendored
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
'''
|
||||||
|
altgraph.GraphUtil - Utility classes and functions
|
||||||
|
==================================================
|
||||||
|
'''
|
||||||
|
|
||||||
|
import random
|
||||||
|
from collections import deque
|
||||||
|
from altgraph import Graph
|
||||||
|
from altgraph import GraphError
|
||||||
|
|
||||||
|
|
||||||
|
def generate_random_graph(
|
||||||
|
node_num, edge_num, self_loops=False, multi_edges=False):
|
||||||
|
'''
|
||||||
|
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
|
||||||
|
*node_num* nodes randomly connected by *edge_num* edges.
|
||||||
|
'''
|
||||||
|
g = Graph.Graph()
|
||||||
|
|
||||||
|
if not multi_edges:
|
||||||
|
if self_loops:
|
||||||
|
max_edges = node_num * node_num
|
||||||
|
else:
|
||||||
|
max_edges = node_num * (node_num-1)
|
||||||
|
|
||||||
|
if edge_num > max_edges:
|
||||||
|
raise GraphError(
|
||||||
|
"inconsistent arguments to 'generate_random_graph'")
|
||||||
|
|
||||||
|
nodes = range(node_num)
|
||||||
|
|
||||||
|
for node in nodes:
|
||||||
|
g.add_node(node)
|
||||||
|
|
||||||
|
while 1:
|
||||||
|
head = random.choice(nodes)
|
||||||
|
tail = random.choice(nodes)
|
||||||
|
|
||||||
|
# loop defense
|
||||||
|
if head == tail and not self_loops:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# multiple edge defense
|
||||||
|
if g.edge_by_node(head, tail) is not None and not multi_edges:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# add the edge
|
||||||
|
g.add_edge(head, tail)
|
||||||
|
if g.number_of_edges() >= edge_num:
|
||||||
|
break
|
||||||
|
|
||||||
|
return g
|
||||||
|
|
||||||
|
|
||||||
|
def generate_scale_free_graph(
|
||||||
|
steps, growth_num, self_loops=False, multi_edges=False):
|
||||||
|
'''
|
||||||
|
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
|
||||||
|
will have *steps* \* *growth_num* nodes and a scale free (powerlaw)
|
||||||
|
connectivity. Starting with a fully connected graph with *growth_num*
|
||||||
|
nodes at every step *growth_num* nodes are added to the graph and are
|
||||||
|
connected to existing nodes with a probability proportional to the degree
|
||||||
|
of these existing nodes.
|
||||||
|
'''
|
||||||
|
# FIXME: The code doesn't seem to do what the documentation claims.
|
||||||
|
graph = Graph.Graph()
|
||||||
|
|
||||||
|
# initialize the graph
|
||||||
|
store = []
|
||||||
|
for i in range(growth_num):
|
||||||
|
for j in range(i + 1, growth_num):
|
||||||
|
store.append(i)
|
||||||
|
store.append(j)
|
||||||
|
graph.add_edge(i, j)
|
||||||
|
|
||||||
|
# generate
|
||||||
|
for node in range(growth_num, steps * growth_num):
|
||||||
|
graph.add_node(node)
|
||||||
|
while graph.out_degree(node) < growth_num:
|
||||||
|
nbr = random.choice(store)
|
||||||
|
|
||||||
|
# loop defense
|
||||||
|
if node == nbr and not self_loops:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# multi edge defense
|
||||||
|
if graph.edge_by_node(node, nbr) and not multi_edges:
|
||||||
|
continue
|
||||||
|
|
||||||
|
graph.add_edge(node, nbr)
|
||||||
|
|
||||||
|
for nbr in graph.out_nbrs(node):
|
||||||
|
store.append(node)
|
||||||
|
store.append(nbr)
|
||||||
|
|
||||||
|
return graph
|
||||||
|
|
||||||
|
|
||||||
|
def filter_stack(graph, head, filters):
|
||||||
|
"""
|
||||||
|
Perform a walk in a depth-first order starting
|
||||||
|
at *head*.
|
||||||
|
|
||||||
|
Returns (visited, removes, orphans).
|
||||||
|
|
||||||
|
* visited: the set of visited nodes
|
||||||
|
* removes: the list of nodes where the node
|
||||||
|
data does not all *filters*
|
||||||
|
* orphans: tuples of (last_good, node),
|
||||||
|
where node is not in removes, is directly
|
||||||
|
reachable from a node in *removes* and
|
||||||
|
*last_good* is the closest upstream node that is not
|
||||||
|
in *removes*.
|
||||||
|
"""
|
||||||
|
|
||||||
|
visited, removes, orphans = set([head]), set(), set()
|
||||||
|
stack = deque([(head, head)])
|
||||||
|
get_data = graph.node_data
|
||||||
|
get_edges = graph.out_edges
|
||||||
|
get_tail = graph.tail
|
||||||
|
|
||||||
|
while stack:
|
||||||
|
last_good, node = stack.pop()
|
||||||
|
data = get_data(node)
|
||||||
|
if data is not None:
|
||||||
|
for filtfunc in filters:
|
||||||
|
if not filtfunc(data):
|
||||||
|
removes.add(node)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
last_good = node
|
||||||
|
for edge in get_edges(node):
|
||||||
|
tail = get_tail(edge)
|
||||||
|
if last_good is not node:
|
||||||
|
orphans.add((last_good, tail))
|
||||||
|
if tail not in visited:
|
||||||
|
visited.add(tail)
|
||||||
|
stack.append((last_good, tail))
|
||||||
|
|
||||||
|
orphans = [
|
||||||
|
(lg, tl)
|
||||||
|
for (lg, tl) in orphans if tl not in removes]
|
||||||
|
|
||||||
|
return visited, removes, orphans
|
212
lib/spack/external/altgraph/ObjectGraph.py
vendored
Normal file
212
lib/spack/external/altgraph/ObjectGraph.py
vendored
Normal file
|
@ -0,0 +1,212 @@
|
||||||
|
"""
|
||||||
|
altgraph.ObjectGraph - Graph of objects with an identifier
|
||||||
|
==========================================================
|
||||||
|
|
||||||
|
A graph of objects that have a "graphident" attribute.
|
||||||
|
graphident is the key for the object in the graph
|
||||||
|
"""
|
||||||
|
|
||||||
|
from altgraph import GraphError
|
||||||
|
from altgraph.Graph import Graph
|
||||||
|
from altgraph.GraphUtil import filter_stack
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectGraph(object):
|
||||||
|
"""
|
||||||
|
A graph of objects that have a "graphident" attribute.
|
||||||
|
graphident is the key for the object in the graph
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, graph=None, debug=0):
|
||||||
|
if graph is None:
|
||||||
|
graph = Graph()
|
||||||
|
self.graphident = self
|
||||||
|
self.graph = graph
|
||||||
|
self.debug = debug
|
||||||
|
self.indent = 0
|
||||||
|
graph.add_node(self, None)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s>' % (type(self).__name__,)
|
||||||
|
|
||||||
|
def flatten(self, condition=None, start=None):
|
||||||
|
"""
|
||||||
|
Iterate over the subgraph that is entirely reachable by condition
|
||||||
|
starting from the given start node or the ObjectGraph root
|
||||||
|
"""
|
||||||
|
if start is None:
|
||||||
|
start = self
|
||||||
|
start = self.getRawIdent(start)
|
||||||
|
return self.graph.iterdata(start=start, condition=condition)
|
||||||
|
|
||||||
|
def nodes(self):
|
||||||
|
for ident in self.graph:
|
||||||
|
node = self.graph.node_data(ident)
|
||||||
|
if node is not None:
|
||||||
|
yield self.graph.node_data(ident)
|
||||||
|
|
||||||
|
def get_edges(self, node):
|
||||||
|
if node is None:
|
||||||
|
node = self
|
||||||
|
start = self.getRawIdent(node)
|
||||||
|
_, _, outraw, incraw = self.graph.describe_node(start)
|
||||||
|
|
||||||
|
def iter_edges(lst, n):
|
||||||
|
seen = set()
|
||||||
|
for tpl in (self.graph.describe_edge(e) for e in lst):
|
||||||
|
ident = tpl[n]
|
||||||
|
if ident not in seen:
|
||||||
|
yield self.findNode(ident)
|
||||||
|
seen.add(ident)
|
||||||
|
return iter_edges(outraw, 3), iter_edges(incraw, 2)
|
||||||
|
|
||||||
|
def edgeData(self, fromNode, toNode):
|
||||||
|
if fromNode is None:
|
||||||
|
fromNode = self
|
||||||
|
start = self.getRawIdent(fromNode)
|
||||||
|
stop = self.getRawIdent(toNode)
|
||||||
|
edge = self.graph.edge_by_node(start, stop)
|
||||||
|
return self.graph.edge_data(edge)
|
||||||
|
|
||||||
|
def updateEdgeData(self, fromNode, toNode, edgeData):
|
||||||
|
if fromNode is None:
|
||||||
|
fromNode = self
|
||||||
|
start = self.getRawIdent(fromNode)
|
||||||
|
stop = self.getRawIdent(toNode)
|
||||||
|
edge = self.graph.edge_by_node(start, stop)
|
||||||
|
self.graph.update_edge_data(edge, edgeData)
|
||||||
|
|
||||||
|
def filterStack(self, filters):
|
||||||
|
"""
|
||||||
|
Filter the ObjectGraph in-place by removing all edges to nodes that
|
||||||
|
do not match every filter in the given filter list
|
||||||
|
|
||||||
|
Returns a tuple containing the number of:
|
||||||
|
(nodes_visited, nodes_removed, nodes_orphaned)
|
||||||
|
"""
|
||||||
|
visited, removes, orphans = filter_stack(self.graph, self, filters)
|
||||||
|
|
||||||
|
for last_good, tail in orphans:
|
||||||
|
self.graph.add_edge(last_good, tail, edge_data='orphan')
|
||||||
|
|
||||||
|
for node in removes:
|
||||||
|
self.graph.hide_node(node)
|
||||||
|
|
||||||
|
return len(visited)-1, len(removes), len(orphans)
|
||||||
|
|
||||||
|
def removeNode(self, node):
|
||||||
|
"""
|
||||||
|
Remove the given node from the graph if it exists
|
||||||
|
"""
|
||||||
|
ident = self.getIdent(node)
|
||||||
|
if ident is not None:
|
||||||
|
self.graph.hide_node(ident)
|
||||||
|
|
||||||
|
def removeReference(self, fromnode, tonode):
|
||||||
|
"""
|
||||||
|
Remove all edges from fromnode to tonode
|
||||||
|
"""
|
||||||
|
if fromnode is None:
|
||||||
|
fromnode = self
|
||||||
|
fromident = self.getIdent(fromnode)
|
||||||
|
toident = self.getIdent(tonode)
|
||||||
|
if fromident is not None and toident is not None:
|
||||||
|
while True:
|
||||||
|
edge = self.graph.edge_by_node(fromident, toident)
|
||||||
|
if edge is None:
|
||||||
|
break
|
||||||
|
self.graph.hide_edge(edge)
|
||||||
|
|
||||||
|
def getIdent(self, node):
|
||||||
|
"""
|
||||||
|
Get the graph identifier for a node
|
||||||
|
"""
|
||||||
|
ident = self.getRawIdent(node)
|
||||||
|
if ident is not None:
|
||||||
|
return ident
|
||||||
|
node = self.findNode(node)
|
||||||
|
if node is None:
|
||||||
|
return None
|
||||||
|
return node.graphident
|
||||||
|
|
||||||
|
def getRawIdent(self, node):
|
||||||
|
"""
|
||||||
|
Get the identifier for a node object
|
||||||
|
"""
|
||||||
|
if node is self:
|
||||||
|
return node
|
||||||
|
ident = getattr(node, 'graphident', None)
|
||||||
|
return ident
|
||||||
|
|
||||||
|
def __contains__(self, node):
|
||||||
|
return self.findNode(node) is not None
|
||||||
|
|
||||||
|
def findNode(self, node):
|
||||||
|
"""
|
||||||
|
Find the node on the graph
|
||||||
|
"""
|
||||||
|
ident = self.getRawIdent(node)
|
||||||
|
if ident is None:
|
||||||
|
ident = node
|
||||||
|
try:
|
||||||
|
return self.graph.node_data(ident)
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def addNode(self, node):
|
||||||
|
"""
|
||||||
|
Add a node to the graph referenced by the root
|
||||||
|
"""
|
||||||
|
self.msg(4, "addNode", node)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.graph.restore_node(node.graphident)
|
||||||
|
except GraphError:
|
||||||
|
self.graph.add_node(node.graphident, node)
|
||||||
|
|
||||||
|
def createReference(self, fromnode, tonode, edge_data=None):
|
||||||
|
"""
|
||||||
|
Create a reference from fromnode to tonode
|
||||||
|
"""
|
||||||
|
if fromnode is None:
|
||||||
|
fromnode = self
|
||||||
|
fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
|
||||||
|
if fromident is None or toident is None:
|
||||||
|
return
|
||||||
|
self.msg(4, "createReference", fromnode, tonode, edge_data)
|
||||||
|
self.graph.add_edge(fromident, toident, edge_data=edge_data)
|
||||||
|
|
||||||
|
def createNode(self, cls, name, *args, **kw):
|
||||||
|
"""
|
||||||
|
Add a node of type cls to the graph if it does not already exist
|
||||||
|
by the given name
|
||||||
|
"""
|
||||||
|
m = self.findNode(name)
|
||||||
|
if m is None:
|
||||||
|
m = cls(name, *args, **kw)
|
||||||
|
self.addNode(m)
|
||||||
|
return m
|
||||||
|
|
||||||
|
def msg(self, level, s, *args):
|
||||||
|
"""
|
||||||
|
Print a debug message with the given level
|
||||||
|
"""
|
||||||
|
if s and level <= self.debug:
|
||||||
|
print("%s%s %s" % (
|
||||||
|
" " * self.indent, s, ' '.join(map(repr, args))))
|
||||||
|
|
||||||
|
def msgin(self, level, s, *args):
|
||||||
|
"""
|
||||||
|
Print a debug message and indent
|
||||||
|
"""
|
||||||
|
if level <= self.debug:
|
||||||
|
self.msg(level, s, *args)
|
||||||
|
self.indent = self.indent + 1
|
||||||
|
|
||||||
|
def msgout(self, level, s, *args):
|
||||||
|
"""
|
||||||
|
Dedent and print a debug message
|
||||||
|
"""
|
||||||
|
if level <= self.debug:
|
||||||
|
self.indent = self.indent - 1
|
||||||
|
self.msg(level, s, *args)
|
147
lib/spack/external/altgraph/__init__.py
vendored
Normal file
147
lib/spack/external/altgraph/__init__.py
vendored
Normal file
|
@ -0,0 +1,147 @@
|
||||||
|
'''
|
||||||
|
altgraph - a python graph library
|
||||||
|
=================================
|
||||||
|
|
||||||
|
altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
|
||||||
|
to use newer Python 2.3+ features, including additional support used by the
|
||||||
|
py2app suite (modulegraph and macholib, specifically).
|
||||||
|
|
||||||
|
altgraph is a python based graph (network) representation and manipulation
|
||||||
|
package. It has started out as an extension to the
|
||||||
|
`graph_lib module
|
||||||
|
<http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
|
||||||
|
written by Nathan Denny it has been significantly optimized and expanded.
|
||||||
|
|
||||||
|
The :class:`altgraph.Graph.Graph` class is loosely modeled after the
|
||||||
|
`LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
|
||||||
|
(Library of Efficient Datatypes) representation. The library
|
||||||
|
includes methods for constructing graphs, BFS and DFS traversals,
|
||||||
|
topological sort, finding connected components, shortest paths as well as a
|
||||||
|
number graph statistics functions. The library can also visualize graphs
|
||||||
|
via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_.
|
||||||
|
|
||||||
|
The package contains the following modules:
|
||||||
|
|
||||||
|
- the :py:mod:`altgraph.Graph` module contains the
|
||||||
|
:class:`~altgraph.Graph.Graph` class that stores the graph data
|
||||||
|
|
||||||
|
- the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms
|
||||||
|
operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances)
|
||||||
|
|
||||||
|
- the :py:mod:`altgraph.GraphStat` module contains functions for
|
||||||
|
computing statistical measures on graphs
|
||||||
|
|
||||||
|
- the :py:mod:`altgraph.GraphUtil` module contains functions for
|
||||||
|
generating, reading and saving graphs
|
||||||
|
|
||||||
|
- the :py:mod:`altgraph.Dot` module contains functions for displaying
|
||||||
|
graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
|
||||||
|
|
||||||
|
- the :py:mod:`altgraph.ObjectGraph` module implements a graph of
|
||||||
|
objects with a unique identifier
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
Download and unpack the archive then type::
|
||||||
|
|
||||||
|
python setup.py install
|
||||||
|
|
||||||
|
This will install the library in the default location. For instructions on
|
||||||
|
how to customize the install procedure read the output of::
|
||||||
|
|
||||||
|
python setup.py --help install
|
||||||
|
|
||||||
|
To verify that the code works run the test suite::
|
||||||
|
|
||||||
|
python setup.py test
|
||||||
|
|
||||||
|
Example usage
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Lets assume that we want to analyze the graph below (links to the full picture)
|
||||||
|
GRAPH_IMG. Our script then might look the following way::
|
||||||
|
|
||||||
|
from altgraph import Graph, GraphAlgo, Dot
|
||||||
|
|
||||||
|
# these are the edges
|
||||||
|
edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5),
|
||||||
|
(6,14), (14,15), (6, 15), (5,7), (7, 8), (7,13), (12,8),
|
||||||
|
(8,13), (11,12), (11,9), (13,11), (9,13), (13,10) ]
|
||||||
|
|
||||||
|
# creates the graph
|
||||||
|
graph = Graph.Graph()
|
||||||
|
for head, tail in edges:
|
||||||
|
graph.add_edge(head, tail)
|
||||||
|
|
||||||
|
# do a forward bfs from 1 at most to 20
|
||||||
|
print(graph.forw_bfs(1))
|
||||||
|
|
||||||
|
This will print the nodes in some breadth first order::
|
||||||
|
|
||||||
|
[1, 2, 3, 4, 5, 7, 8, 13, 11, 10, 12, 9]
|
||||||
|
|
||||||
|
If we wanted to get the hop-distance from node 1 to node 8
|
||||||
|
we coud write::
|
||||||
|
|
||||||
|
print(graph.get_hops(1, 8))
|
||||||
|
|
||||||
|
This will print the following::
|
||||||
|
|
||||||
|
[(1, 0), (2, 1), (3, 1), (4, 2), (5, 3), (7, 4), (8, 5)]
|
||||||
|
|
||||||
|
Node 1 is at 0 hops since it is the starting node, nodes 2,3 are 1 hop away ...
|
||||||
|
node 8 is 5 hops away. To find the shortest distance between two nodes you
|
||||||
|
can use::
|
||||||
|
|
||||||
|
print(GraphAlgo.shortest_path(graph, 1, 12))
|
||||||
|
|
||||||
|
It will print the nodes on one (if there are more) the shortest paths::
|
||||||
|
|
||||||
|
[1, 2, 4, 5, 7, 13, 11, 12]
|
||||||
|
|
||||||
|
To display the graph we can use the GraphViz backend::
|
||||||
|
|
||||||
|
dot = Dot.Dot(graph)
|
||||||
|
|
||||||
|
# display the graph on the monitor
|
||||||
|
dot.display()
|
||||||
|
|
||||||
|
# save it in an image file
|
||||||
|
dot.save_img(file_name='graph', file_type='gif')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
..
|
||||||
|
@author: U{Istvan Albert<http://www.personal.psu.edu/staff/i/u/iua1/>}
|
||||||
|
|
||||||
|
@license: MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2004 Istvan Albert unless otherwise noted.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to
|
||||||
|
deal in the Software without restriction, including without limitation the
|
||||||
|
rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||||
|
and/or sell copies of the Software, and to permit persons to whom the
|
||||||
|
Software is furnished to do so.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
||||||
|
@requires: Python 2.3 or higher
|
||||||
|
|
||||||
|
@newfield contributor: Contributors:
|
||||||
|
@contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
|
||||||
|
|
||||||
|
'''
|
||||||
|
import pkg_resources
|
||||||
|
__version__ = pkg_resources.require('altgraph')[0].version
|
||||||
|
|
||||||
|
|
||||||
|
class GraphError(ValueError):
|
||||||
|
pass
|
435
lib/spack/external/macholib/MachO.py
vendored
Normal file
435
lib/spack/external/macholib/MachO.py
vendored
Normal file
|
@ -0,0 +1,435 @@
|
||||||
|
"""
|
||||||
|
Utilities for reading and writing Mach-O headers
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import struct
|
||||||
|
import os
|
||||||
|
|
||||||
|
from .mach_o import MH_FILETYPE_SHORTNAMES, LC_DYSYMTAB, LC_SYMTAB
|
||||||
|
from .mach_o import load_command, S_ZEROFILL, section_64, section
|
||||||
|
from .mach_o import LC_REGISTRY, LC_ID_DYLIB, LC_SEGMENT, fat_header
|
||||||
|
from .mach_o import LC_SEGMENT_64, MH_CIGAM_64, MH_MAGIC_64, FAT_MAGIC
|
||||||
|
from .mach_o import mach_header, fat_arch64, FAT_MAGIC_64, fat_arch
|
||||||
|
from .mach_o import LC_REEXPORT_DYLIB, LC_PREBOUND_DYLIB, LC_LOAD_WEAK_DYLIB
|
||||||
|
from .mach_o import LC_LOAD_UPWARD_DYLIB, LC_LOAD_DYLIB, mach_header_64
|
||||||
|
from .mach_o import MH_CIGAM, MH_MAGIC
|
||||||
|
from .ptypes import sizeof
|
||||||
|
|
||||||
|
from macholib.util import fileview
|
||||||
|
try:
|
||||||
|
from macholib.compat import bytes
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
unicode
|
||||||
|
except NameError:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
range = xrange # noqa: F821
|
||||||
|
|
||||||
|
__all__ = ['MachO']
|
||||||
|
|
||||||
|
_RELOCATABLE = set((
|
||||||
|
# relocatable commands that should be used for dependency walking
|
||||||
|
LC_LOAD_DYLIB,
|
||||||
|
LC_LOAD_UPWARD_DYLIB,
|
||||||
|
LC_LOAD_WEAK_DYLIB,
|
||||||
|
LC_PREBOUND_DYLIB,
|
||||||
|
LC_REEXPORT_DYLIB,
|
||||||
|
))
|
||||||
|
|
||||||
|
_RELOCATABLE_NAMES = {
|
||||||
|
LC_LOAD_DYLIB: 'load_dylib',
|
||||||
|
LC_LOAD_UPWARD_DYLIB: 'load_upward_dylib',
|
||||||
|
LC_LOAD_WEAK_DYLIB: 'load_weak_dylib',
|
||||||
|
LC_PREBOUND_DYLIB: 'prebound_dylib',
|
||||||
|
LC_REEXPORT_DYLIB: 'reexport_dylib',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _shouldRelocateCommand(cmd):
|
||||||
|
"""
|
||||||
|
Should this command id be investigated for relocation?
|
||||||
|
"""
|
||||||
|
return cmd in _RELOCATABLE
|
||||||
|
|
||||||
|
|
||||||
|
def lc_str_value(offset, cmd_info):
|
||||||
|
"""
|
||||||
|
Fetch the actual value of a field of type "lc_str"
|
||||||
|
"""
|
||||||
|
cmd_load, cmd_cmd, cmd_data = cmd_info
|
||||||
|
|
||||||
|
offset -= sizeof(cmd_load) + sizeof(cmd_cmd)
|
||||||
|
return cmd_data[offset:].strip(b'\x00')
|
||||||
|
|
||||||
|
|
||||||
|
class MachO(object):
|
||||||
|
"""
|
||||||
|
Provides reading/writing the Mach-O header of a specific existing file
|
||||||
|
"""
|
||||||
|
# filename - the original filename of this mach-o
|
||||||
|
# sizediff - the current deviation from the initial mach-o size
|
||||||
|
# header - the mach-o header
|
||||||
|
# commands - a list of (load_command, somecommand, data)
|
||||||
|
# data is either a str, or a list of segment structures
|
||||||
|
# total_size - the current mach-o header size (including header)
|
||||||
|
# low_offset - essentially, the maximum mach-o header size
|
||||||
|
# id_cmd - the index of my id command, or None
|
||||||
|
|
||||||
|
def __init__(self, filename):
|
||||||
|
|
||||||
|
# supports the ObjectGraph protocol
|
||||||
|
self.graphident = filename
|
||||||
|
self.filename = filename
|
||||||
|
self.loader_path = os.path.dirname(filename)
|
||||||
|
|
||||||
|
# initialized by load
|
||||||
|
self.fat = None
|
||||||
|
self.headers = []
|
||||||
|
with open(filename, 'rb') as fp:
|
||||||
|
self.load(fp)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<MachO filename=%r>" % (self.filename,)
|
||||||
|
|
||||||
|
def load(self, fh):
|
||||||
|
assert fh.tell() == 0
|
||||||
|
header = struct.unpack('>I', fh.read(4))[0]
|
||||||
|
fh.seek(0)
|
||||||
|
if header in (FAT_MAGIC, FAT_MAGIC_64):
|
||||||
|
self.load_fat(fh)
|
||||||
|
else:
|
||||||
|
fh.seek(0, 2)
|
||||||
|
size = fh.tell()
|
||||||
|
fh.seek(0)
|
||||||
|
self.load_header(fh, 0, size)
|
||||||
|
|
||||||
|
def load_fat(self, fh):
|
||||||
|
self.fat = fat_header.from_fileobj(fh)
|
||||||
|
if self.fat.magic == FAT_MAGIC:
|
||||||
|
archs = [fat_arch.from_fileobj(fh)
|
||||||
|
for i in range(self.fat.nfat_arch)]
|
||||||
|
elif self.fat.magic == FAT_MAGIC_64:
|
||||||
|
archs = [fat_arch64.from_fileobj(fh)
|
||||||
|
for i in range(self.fat.nfat_arch)]
|
||||||
|
else:
|
||||||
|
raise ValueError("Unknown fat header magic: %r" % (self.fat.magic))
|
||||||
|
|
||||||
|
for arch in archs:
|
||||||
|
self.load_header(fh, arch.offset, arch.size)
|
||||||
|
|
||||||
|
def rewriteLoadCommands(self, *args, **kw):
|
||||||
|
changed = False
|
||||||
|
for header in self.headers:
|
||||||
|
if header.rewriteLoadCommands(*args, **kw):
|
||||||
|
changed = True
|
||||||
|
return changed
|
||||||
|
|
||||||
|
def load_header(self, fh, offset, size):
|
||||||
|
fh.seek(offset)
|
||||||
|
header = struct.unpack('>I', fh.read(4))[0]
|
||||||
|
fh.seek(offset)
|
||||||
|
if header == MH_MAGIC:
|
||||||
|
magic, hdr, endian = MH_MAGIC, mach_header, '>'
|
||||||
|
elif header == MH_CIGAM:
|
||||||
|
magic, hdr, endian = MH_CIGAM, mach_header, '<'
|
||||||
|
elif header == MH_MAGIC_64:
|
||||||
|
magic, hdr, endian = MH_MAGIC_64, mach_header_64, '>'
|
||||||
|
elif header == MH_CIGAM_64:
|
||||||
|
magic, hdr, endian = MH_CIGAM_64, mach_header_64, '<'
|
||||||
|
else:
|
||||||
|
raise ValueError("Unknown Mach-O header: 0x%08x in %r" % (
|
||||||
|
header, fh))
|
||||||
|
hdr = MachOHeader(self, fh, offset, size, magic, hdr, endian)
|
||||||
|
self.headers.append(hdr)
|
||||||
|
|
||||||
|
def write(self, f):
|
||||||
|
for header in self.headers:
|
||||||
|
header.write(f)
|
||||||
|
|
||||||
|
|
||||||
|
class MachOHeader(object):
|
||||||
|
"""
|
||||||
|
Provides reading/writing the Mach-O header of a specific existing file
|
||||||
|
"""
|
||||||
|
# filename - the original filename of this mach-o
|
||||||
|
# sizediff - the current deviation from the initial mach-o size
|
||||||
|
# header - the mach-o header
|
||||||
|
# commands - a list of (load_command, somecommand, data)
|
||||||
|
# data is either a str, or a list of segment structures
|
||||||
|
# total_size - the current mach-o header size (including header)
|
||||||
|
# low_offset - essentially, the maximum mach-o header size
|
||||||
|
# id_cmd - the index of my id command, or None
|
||||||
|
|
||||||
|
def __init__(self, parent, fh, offset, size, magic, hdr, endian):
|
||||||
|
self.MH_MAGIC = magic
|
||||||
|
self.mach_header = hdr
|
||||||
|
|
||||||
|
# These are all initialized by self.load()
|
||||||
|
self.parent = parent
|
||||||
|
self.offset = offset
|
||||||
|
self.size = size
|
||||||
|
|
||||||
|
self.endian = endian
|
||||||
|
self.header = None
|
||||||
|
self.commands = None
|
||||||
|
self.id_cmd = None
|
||||||
|
self.sizediff = None
|
||||||
|
self.total_size = None
|
||||||
|
self.low_offset = None
|
||||||
|
self.filetype = None
|
||||||
|
self.headers = []
|
||||||
|
|
||||||
|
self.load(fh)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<%s filename=%r offset=%d size=%d endian=%r>" % (
|
||||||
|
type(self).__name__, self.parent.filename, self.offset, self.size,
|
||||||
|
self.endian)
|
||||||
|
|
||||||
|
def load(self, fh):
|
||||||
|
fh = fileview(fh, self.offset, self.size)
|
||||||
|
fh.seek(0)
|
||||||
|
|
||||||
|
self.sizediff = 0
|
||||||
|
kw = {'_endian_': self.endian}
|
||||||
|
header = self.mach_header.from_fileobj(fh, **kw)
|
||||||
|
self.header = header
|
||||||
|
# if header.magic != self.MH_MAGIC:
|
||||||
|
# raise ValueError("header has magic %08x, expecting %08x" % (
|
||||||
|
# header.magic, self.MH_MAGIC))
|
||||||
|
|
||||||
|
cmd = self.commands = []
|
||||||
|
|
||||||
|
self.filetype = self.get_filetype_shortname(header.filetype)
|
||||||
|
|
||||||
|
read_bytes = 0
|
||||||
|
low_offset = sys.maxsize
|
||||||
|
for i in range(header.ncmds):
|
||||||
|
# read the load command
|
||||||
|
cmd_load = load_command.from_fileobj(fh, **kw)
|
||||||
|
|
||||||
|
# read the specific command
|
||||||
|
klass = LC_REGISTRY.get(cmd_load.cmd, None)
|
||||||
|
if klass is None:
|
||||||
|
raise ValueError("Unknown load command: %d" % (cmd_load.cmd,))
|
||||||
|
cmd_cmd = klass.from_fileobj(fh, **kw)
|
||||||
|
|
||||||
|
if cmd_load.cmd == LC_ID_DYLIB:
|
||||||
|
# remember where this command was
|
||||||
|
if self.id_cmd is not None:
|
||||||
|
raise ValueError("This dylib already has an id")
|
||||||
|
self.id_cmd = i
|
||||||
|
|
||||||
|
if cmd_load.cmd in (LC_SEGMENT, LC_SEGMENT_64):
|
||||||
|
# for segment commands, read the list of segments
|
||||||
|
segs = []
|
||||||
|
# assert that the size makes sense
|
||||||
|
if cmd_load.cmd == LC_SEGMENT:
|
||||||
|
section_cls = section
|
||||||
|
else: # LC_SEGMENT_64
|
||||||
|
section_cls = section_64
|
||||||
|
|
||||||
|
expected_size = (
|
||||||
|
sizeof(klass) + sizeof(load_command) +
|
||||||
|
(sizeof(section_cls) * cmd_cmd.nsects)
|
||||||
|
)
|
||||||
|
if cmd_load.cmdsize != expected_size:
|
||||||
|
raise ValueError("Segment size mismatch")
|
||||||
|
# this is a zero block or something
|
||||||
|
# so the beginning is wherever the fileoff of this command is
|
||||||
|
if cmd_cmd.nsects == 0:
|
||||||
|
if cmd_cmd.filesize != 0:
|
||||||
|
low_offset = min(low_offset, cmd_cmd.fileoff)
|
||||||
|
else:
|
||||||
|
# this one has multiple segments
|
||||||
|
for j in range(cmd_cmd.nsects):
|
||||||
|
# read the segment
|
||||||
|
seg = section_cls.from_fileobj(fh, **kw)
|
||||||
|
# if the segment has a size and is not zero filled
|
||||||
|
# then its beginning is the offset of this segment
|
||||||
|
not_zerofill = ((seg.flags & S_ZEROFILL) != S_ZEROFILL)
|
||||||
|
if seg.offset > 0 and seg.size > 0 and not_zerofill:
|
||||||
|
low_offset = min(low_offset, seg.offset)
|
||||||
|
if not_zerofill:
|
||||||
|
c = fh.tell()
|
||||||
|
fh.seek(seg.offset)
|
||||||
|
sd = fh.read(seg.size)
|
||||||
|
seg.add_section_data(sd)
|
||||||
|
fh.seek(c)
|
||||||
|
segs.append(seg)
|
||||||
|
# data is a list of segments
|
||||||
|
cmd_data = segs
|
||||||
|
|
||||||
|
# XXX: Disabled for now because writing back doesn't work
|
||||||
|
# elif cmd_load.cmd == LC_CODE_SIGNATURE:
|
||||||
|
# c = fh.tell()
|
||||||
|
# fh.seek(cmd_cmd.dataoff)
|
||||||
|
# cmd_data = fh.read(cmd_cmd.datasize)
|
||||||
|
# fh.seek(c)
|
||||||
|
# elif cmd_load.cmd == LC_SYMTAB:
|
||||||
|
# c = fh.tell()
|
||||||
|
# fh.seek(cmd_cmd.stroff)
|
||||||
|
# cmd_data = fh.read(cmd_cmd.strsize)
|
||||||
|
# fh.seek(c)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# data is a raw str
|
||||||
|
data_size = (
|
||||||
|
cmd_load.cmdsize - sizeof(klass) - sizeof(load_command)
|
||||||
|
)
|
||||||
|
cmd_data = fh.read(data_size)
|
||||||
|
cmd.append((cmd_load, cmd_cmd, cmd_data))
|
||||||
|
read_bytes += cmd_load.cmdsize
|
||||||
|
|
||||||
|
# make sure the header made sense
|
||||||
|
if read_bytes != header.sizeofcmds:
|
||||||
|
raise ValueError("Read %d bytes, header reports %d bytes" % (
|
||||||
|
read_bytes, header.sizeofcmds))
|
||||||
|
self.total_size = sizeof(self.mach_header) + read_bytes
|
||||||
|
self.low_offset = low_offset
|
||||||
|
|
||||||
|
def walkRelocatables(self, shouldRelocateCommand=_shouldRelocateCommand):
|
||||||
|
"""
|
||||||
|
for all relocatable commands
|
||||||
|
yield (command_index, command_name, filename)
|
||||||
|
"""
|
||||||
|
for (idx, (lc, cmd, data)) in enumerate(self.commands):
|
||||||
|
if shouldRelocateCommand(lc.cmd):
|
||||||
|
name = _RELOCATABLE_NAMES[lc.cmd]
|
||||||
|
ofs = cmd.name - sizeof(lc.__class__) - sizeof(cmd.__class__)
|
||||||
|
yield idx, name, data[ofs:data.find(b'\x00', ofs)].decode(
|
||||||
|
sys.getfilesystemencoding())
|
||||||
|
|
||||||
|
def rewriteInstallNameCommand(self, loadcmd):
|
||||||
|
"""Rewrite the load command of this dylib"""
|
||||||
|
if self.id_cmd is not None:
|
||||||
|
self.rewriteDataForCommand(self.id_cmd, loadcmd)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def changedHeaderSizeBy(self, bytes):
|
||||||
|
self.sizediff += bytes
|
||||||
|
if (self.total_size + self.sizediff) > self.low_offset:
|
||||||
|
print(
|
||||||
|
"WARNING: Mach-O header in %r may be too large to relocate" % (
|
||||||
|
self.parent.filename,))
|
||||||
|
|
||||||
|
def rewriteLoadCommands(self, changefunc):
|
||||||
|
"""
|
||||||
|
Rewrite the load commands based upon a change dictionary
|
||||||
|
"""
|
||||||
|
data = changefunc(self.parent.filename)
|
||||||
|
changed = False
|
||||||
|
if data is not None:
|
||||||
|
if self.rewriteInstallNameCommand(
|
||||||
|
data.encode(sys.getfilesystemencoding())):
|
||||||
|
changed = True
|
||||||
|
for idx, name, filename in self.walkRelocatables():
|
||||||
|
data = changefunc(filename)
|
||||||
|
if data is not None:
|
||||||
|
if self.rewriteDataForCommand(idx, data.encode(
|
||||||
|
sys.getfilesystemencoding())):
|
||||||
|
changed = True
|
||||||
|
return changed
|
||||||
|
|
||||||
|
def rewriteDataForCommand(self, idx, data):
|
||||||
|
lc, cmd, old_data = self.commands[idx]
|
||||||
|
hdrsize = sizeof(lc.__class__) + sizeof(cmd.__class__)
|
||||||
|
align = struct.calcsize('Q')
|
||||||
|
data = data + (b'\x00' * (align - (len(data) % align)))
|
||||||
|
newsize = hdrsize + len(data)
|
||||||
|
self.commands[idx] = (lc, cmd, data)
|
||||||
|
self.changedHeaderSizeBy(newsize - lc.cmdsize)
|
||||||
|
lc.cmdsize, cmd.name = newsize, hdrsize
|
||||||
|
return True
|
||||||
|
|
||||||
|
def synchronize_size(self):
|
||||||
|
if (self.total_size + self.sizediff) > self.low_offset:
|
||||||
|
raise ValueError(
|
||||||
|
("New Mach-O header is too large to relocate in %r "
|
||||||
|
"(new size=%r, max size=%r, delta=%r)") % (
|
||||||
|
self.parent.filename, self.total_size + self.sizediff,
|
||||||
|
self.low_offset, self.sizediff))
|
||||||
|
self.header.sizeofcmds += self.sizediff
|
||||||
|
self.total_size = sizeof(self.mach_header) + self.header.sizeofcmds
|
||||||
|
self.sizediff = 0
|
||||||
|
|
||||||
|
def write(self, fileobj):
|
||||||
|
fileobj = fileview(fileobj, self.offset, self.size)
|
||||||
|
fileobj.seek(0)
|
||||||
|
|
||||||
|
# serialize all the mach-o commands
|
||||||
|
self.synchronize_size()
|
||||||
|
|
||||||
|
self.header.to_fileobj(fileobj)
|
||||||
|
for lc, cmd, data in self.commands:
|
||||||
|
lc.to_fileobj(fileobj)
|
||||||
|
cmd.to_fileobj(fileobj)
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
if isinstance(data, unicode):
|
||||||
|
fileobj.write(data.encode(sys.getfilesystemencoding()))
|
||||||
|
|
||||||
|
elif isinstance(data, (bytes, str)):
|
||||||
|
fileobj.write(data)
|
||||||
|
else:
|
||||||
|
# segments..
|
||||||
|
for obj in data:
|
||||||
|
obj.to_fileobj(fileobj)
|
||||||
|
else:
|
||||||
|
if isinstance(data, str):
|
||||||
|
fileobj.write(data.encode(sys.getfilesystemencoding()))
|
||||||
|
|
||||||
|
elif isinstance(data, bytes):
|
||||||
|
fileobj.write(data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# segments..
|
||||||
|
for obj in data:
|
||||||
|
obj.to_fileobj(fileobj)
|
||||||
|
|
||||||
|
# zero out the unused space, doubt this is strictly necessary
|
||||||
|
# and is generally probably already the case
|
||||||
|
fileobj.write(b'\x00' * (self.low_offset - fileobj.tell()))
|
||||||
|
|
||||||
|
def getSymbolTableCommand(self):
|
||||||
|
for lc, cmd, data in self.commands:
|
||||||
|
if lc.cmd == LC_SYMTAB:
|
||||||
|
return cmd
|
||||||
|
return None
|
||||||
|
|
||||||
|
def getDynamicSymbolTableCommand(self):
|
||||||
|
for lc, cmd, data in self.commands:
|
||||||
|
if lc.cmd == LC_DYSYMTAB:
|
||||||
|
return cmd
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_filetype_shortname(self, filetype):
|
||||||
|
if filetype in MH_FILETYPE_SHORTNAMES:
|
||||||
|
return MH_FILETYPE_SHORTNAMES[filetype]
|
||||||
|
else:
|
||||||
|
return 'unknown'
|
||||||
|
|
||||||
|
|
||||||
|
def main(fn):
|
||||||
|
m = MachO(fn)
|
||||||
|
seen = set()
|
||||||
|
for header in m.headers:
|
||||||
|
for idx, name, other in header.walkRelocatables():
|
||||||
|
if other not in seen:
|
||||||
|
seen.add(other)
|
||||||
|
print('\t' + name + ": " + other)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
files = sys.argv[1:] or ['/bin/ls']
|
||||||
|
for fn in files:
|
||||||
|
print(fn)
|
||||||
|
main(fn)
|
138
lib/spack/external/macholib/MachOGraph.py
vendored
Normal file
138
lib/spack/external/macholib/MachOGraph.py
vendored
Normal file
|
@ -0,0 +1,138 @@
|
||||||
|
"""
|
||||||
|
Utilities for reading and writing Mach-O headers
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from altgraph.ObjectGraph import ObjectGraph
|
||||||
|
|
||||||
|
from macholib.dyld import dyld_find
|
||||||
|
from macholib.MachO import MachO
|
||||||
|
from macholib.itergraphreport import itergraphreport
|
||||||
|
|
||||||
|
__all__ = ['MachOGraph']
|
||||||
|
|
||||||
|
try:
|
||||||
|
unicode
|
||||||
|
except NameError:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
class MissingMachO(object):
|
||||||
|
def __init__(self, filename):
|
||||||
|
self.graphident = filename
|
||||||
|
self.headers = ()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s graphident=%r>' % (type(self).__name__, self.graphident)
|
||||||
|
|
||||||
|
|
||||||
|
class MachOGraph(ObjectGraph):
|
||||||
|
"""
|
||||||
|
Graph data structure of Mach-O dependencies
|
||||||
|
"""
|
||||||
|
def __init__(self, debug=0, graph=None, env=None, executable_path=None):
|
||||||
|
super(MachOGraph, self).__init__(debug=debug, graph=graph)
|
||||||
|
self.env = env
|
||||||
|
self.trans_table = {}
|
||||||
|
self.executable_path = executable_path
|
||||||
|
|
||||||
|
def locate(self, filename, loader=None):
|
||||||
|
if not isinstance(filename, (str, unicode)):
|
||||||
|
raise TypeError("%r is not a string" % (filename,))
|
||||||
|
if filename.startswith('@loader_path/') and loader is not None:
|
||||||
|
fn = self.trans_table.get((loader.filename, filename))
|
||||||
|
if fn is None:
|
||||||
|
loader_path = loader.loader_path
|
||||||
|
|
||||||
|
try:
|
||||||
|
fn = dyld_find(
|
||||||
|
filename, env=self.env,
|
||||||
|
executable_path=self.executable_path,
|
||||||
|
loader_path=loader_path)
|
||||||
|
self.trans_table[(loader.filename, filename)] = fn
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
fn = self.trans_table.get(filename)
|
||||||
|
if fn is None:
|
||||||
|
try:
|
||||||
|
fn = dyld_find(
|
||||||
|
filename, env=self.env,
|
||||||
|
executable_path=self.executable_path)
|
||||||
|
self.trans_table[filename] = fn
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
return fn
|
||||||
|
|
||||||
|
def findNode(self, name, loader=None):
|
||||||
|
assert isinstance(name, (str, unicode))
|
||||||
|
data = super(MachOGraph, self).findNode(name)
|
||||||
|
if data is not None:
|
||||||
|
return data
|
||||||
|
newname = self.locate(name, loader=loader)
|
||||||
|
if newname is not None and newname != name:
|
||||||
|
return self.findNode(newname)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def run_file(self, pathname, caller=None):
|
||||||
|
assert isinstance(pathname, (str, unicode))
|
||||||
|
self.msgin(2, "run_file", pathname)
|
||||||
|
m = self.findNode(pathname, loader=caller)
|
||||||
|
if m is None:
|
||||||
|
if not os.path.exists(pathname):
|
||||||
|
raise ValueError('%r does not exist' % (pathname,))
|
||||||
|
m = self.createNode(MachO, pathname)
|
||||||
|
self.createReference(caller, m, edge_data='run_file')
|
||||||
|
self.scan_node(m)
|
||||||
|
self.msgout(2, '')
|
||||||
|
return m
|
||||||
|
|
||||||
|
def load_file(self, name, caller=None):
|
||||||
|
assert isinstance(name, (str, unicode))
|
||||||
|
self.msgin(2, "load_file", name, caller)
|
||||||
|
m = self.findNode(name, loader=caller)
|
||||||
|
if m is None:
|
||||||
|
newname = self.locate(name, loader=caller)
|
||||||
|
if newname is not None and newname != name:
|
||||||
|
return self.load_file(newname, caller=caller)
|
||||||
|
if os.path.exists(name):
|
||||||
|
m = self.createNode(MachO, name)
|
||||||
|
self.scan_node(m)
|
||||||
|
else:
|
||||||
|
m = self.createNode(MissingMachO, name)
|
||||||
|
self.msgout(2, '')
|
||||||
|
return m
|
||||||
|
|
||||||
|
def scan_node(self, node):
|
||||||
|
self.msgin(2, 'scan_node', node)
|
||||||
|
for header in node.headers:
|
||||||
|
for idx, name, filename in header.walkRelocatables():
|
||||||
|
assert isinstance(name, (str, unicode))
|
||||||
|
assert isinstance(filename, (str, unicode))
|
||||||
|
m = self.load_file(filename, caller=node)
|
||||||
|
self.createReference(node, m, edge_data=name)
|
||||||
|
self.msgout(2, '', node)
|
||||||
|
|
||||||
|
def itergraphreport(self, name='G'):
|
||||||
|
nodes = map(self.graph.describe_node, self.graph.iterdfs(self))
|
||||||
|
describe_edge = self.graph.describe_edge
|
||||||
|
return itergraphreport(nodes, describe_edge, name=name)
|
||||||
|
|
||||||
|
def graphreport(self, fileobj=None):
|
||||||
|
if fileobj is None:
|
||||||
|
fileobj = sys.stdout
|
||||||
|
fileobj.writelines(self.itergraphreport())
|
||||||
|
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
g = MachOGraph()
|
||||||
|
for arg in args:
|
||||||
|
g.run_file(arg)
|
||||||
|
g.graphreport()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main(sys.argv[1:] or ['/bin/ls'])
|
169
lib/spack/external/macholib/MachOStandalone.py
vendored
Normal file
169
lib/spack/external/macholib/MachOStandalone.py
vendored
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from macholib.MachOGraph import MachOGraph, MissingMachO
|
||||||
|
from macholib.util import iter_platform_files, in_system_path, mergecopy, \
|
||||||
|
mergetree, flipwritable, has_filename_filter
|
||||||
|
from macholib.dyld import framework_info
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
|
||||||
|
class ExcludedMachO(MissingMachO):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FilteredMachOGraph(MachOGraph):
|
||||||
|
def __init__(self, delegate, *args, **kwargs):
|
||||||
|
super(FilteredMachOGraph, self).__init__(*args, **kwargs)
|
||||||
|
self.delegate = delegate
|
||||||
|
|
||||||
|
def createNode(self, cls, name):
|
||||||
|
cls = self.delegate.getClass(name, cls)
|
||||||
|
res = super(FilteredMachOGraph, self).createNode(cls, name)
|
||||||
|
return self.delegate.update_node(res)
|
||||||
|
|
||||||
|
def locate(self, filename, loader=None):
|
||||||
|
newname = super(FilteredMachOGraph, self).locate(filename, loader)
|
||||||
|
print("locate", filename, loader, "->", newname)
|
||||||
|
if newname is None:
|
||||||
|
return None
|
||||||
|
return self.delegate.locate(newname, loader=loader)
|
||||||
|
|
||||||
|
|
||||||
|
class MachOStandalone(object):
|
||||||
|
def __init__(
|
||||||
|
self, base, dest=None, graph=None, env=None,
|
||||||
|
executable_path=None):
|
||||||
|
self.base = os.path.join(os.path.abspath(base), '')
|
||||||
|
if dest is None:
|
||||||
|
dest = os.path.join(self.base, 'Contents', 'Frameworks')
|
||||||
|
self.dest = dest
|
||||||
|
self.mm = FilteredMachOGraph(
|
||||||
|
self, graph=graph, env=env, executable_path=executable_path)
|
||||||
|
self.changemap = {}
|
||||||
|
self.excludes = []
|
||||||
|
self.pending = deque()
|
||||||
|
|
||||||
|
def update_node(self, m):
|
||||||
|
return m
|
||||||
|
|
||||||
|
def getClass(self, name, cls):
|
||||||
|
if in_system_path(name):
|
||||||
|
return ExcludedMachO
|
||||||
|
for base in self.excludes:
|
||||||
|
if name.startswith(base):
|
||||||
|
return ExcludedMachO
|
||||||
|
return cls
|
||||||
|
|
||||||
|
def locate(self, filename, loader=None):
|
||||||
|
if in_system_path(filename):
|
||||||
|
return filename
|
||||||
|
if filename.startswith(self.base):
|
||||||
|
return filename
|
||||||
|
for base in self.excludes:
|
||||||
|
if filename.startswith(base):
|
||||||
|
return filename
|
||||||
|
if filename in self.changemap:
|
||||||
|
return self.changemap[filename]
|
||||||
|
info = framework_info(filename)
|
||||||
|
if info is None:
|
||||||
|
res = self.copy_dylib(filename)
|
||||||
|
self.changemap[filename] = res
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
res = self.copy_framework(info)
|
||||||
|
self.changemap[filename] = res
|
||||||
|
return res
|
||||||
|
|
||||||
|
def copy_dylib(self, filename):
|
||||||
|
# When the filename is a symlink use the basename of the target of
|
||||||
|
# the link as the name in standalone bundle. This avoids problems
|
||||||
|
# when two libraries link to the same dylib but using different
|
||||||
|
# symlinks.
|
||||||
|
if os.path.islink(filename):
|
||||||
|
dest = os.path.join(
|
||||||
|
self.dest, os.path.basename(os.path.realpath(filename)))
|
||||||
|
else:
|
||||||
|
dest = os.path.join(self.dest, os.path.basename(filename))
|
||||||
|
|
||||||
|
if not os.path.exists(dest):
|
||||||
|
self.mergecopy(filename, dest)
|
||||||
|
return dest
|
||||||
|
|
||||||
|
def mergecopy(self, src, dest):
|
||||||
|
return mergecopy(src, dest)
|
||||||
|
|
||||||
|
def mergetree(self, src, dest):
|
||||||
|
return mergetree(src, dest)
|
||||||
|
|
||||||
|
def copy_framework(self, info):
|
||||||
|
dest = os.path.join(self.dest, info['shortname'] + '.framework')
|
||||||
|
destfn = os.path.join(self.dest, info['name'])
|
||||||
|
src = os.path.join(info['location'], info['shortname'] + '.framework')
|
||||||
|
if not os.path.exists(dest):
|
||||||
|
self.mergetree(src, dest)
|
||||||
|
self.pending.append((destfn, iter_platform_files(dest)))
|
||||||
|
return destfn
|
||||||
|
|
||||||
|
def run(self, platfiles=None, contents=None):
|
||||||
|
mm = self.mm
|
||||||
|
if contents is None:
|
||||||
|
contents = '@executable_path/..'
|
||||||
|
if platfiles is None:
|
||||||
|
platfiles = iter_platform_files(self.base)
|
||||||
|
|
||||||
|
for fn in platfiles:
|
||||||
|
mm.run_file(fn)
|
||||||
|
|
||||||
|
while self.pending:
|
||||||
|
fmwk, files = self.pending.popleft()
|
||||||
|
ref = mm.findNode(fmwk)
|
||||||
|
for fn in files:
|
||||||
|
mm.run_file(fn, caller=ref)
|
||||||
|
|
||||||
|
changemap = {}
|
||||||
|
skipcontents = os.path.join(os.path.dirname(self.dest), '')
|
||||||
|
machfiles = []
|
||||||
|
|
||||||
|
for node in mm.flatten(has_filename_filter):
|
||||||
|
machfiles.append(node)
|
||||||
|
dest = os.path.join(
|
||||||
|
contents, os.path.normpath(node.filename[len(skipcontents):]))
|
||||||
|
changemap[node.filename] = dest
|
||||||
|
|
||||||
|
def changefunc(path):
|
||||||
|
if path.startswith('@loader_path/'):
|
||||||
|
# XXX: This is a quick hack for py2app: In that
|
||||||
|
# usecase paths like this are found in the load
|
||||||
|
# commands of relocatable wheels. Those don't
|
||||||
|
# need rewriting.
|
||||||
|
return path
|
||||||
|
|
||||||
|
res = mm.locate(path)
|
||||||
|
rv = changemap.get(res)
|
||||||
|
if rv is None and path.startswith('@loader_path/'):
|
||||||
|
rv = changemap.get(mm.locate(mm.trans_table.get(
|
||||||
|
(node.filename, path))))
|
||||||
|
return rv
|
||||||
|
|
||||||
|
for node in machfiles:
|
||||||
|
fn = mm.locate(node.filename)
|
||||||
|
if fn is None:
|
||||||
|
continue
|
||||||
|
rewroteAny = False
|
||||||
|
for header in node.headers:
|
||||||
|
if node.rewriteLoadCommands(changefunc):
|
||||||
|
rewroteAny = True
|
||||||
|
if rewroteAny:
|
||||||
|
old_mode = flipwritable(fn)
|
||||||
|
try:
|
||||||
|
with open(fn, 'rb+') as f:
|
||||||
|
for header in node.headers:
|
||||||
|
f.seek(0)
|
||||||
|
node.write(f)
|
||||||
|
f.seek(0, 2)
|
||||||
|
f.flush()
|
||||||
|
finally:
|
||||||
|
flipwritable(fn, old_mode)
|
||||||
|
|
||||||
|
allfiles = [mm.locate(node.filename) for node in machfiles]
|
||||||
|
return set(filter(None, allfiles))
|
86
lib/spack/external/macholib/SymbolTable.py
vendored
Normal file
86
lib/spack/external/macholib/SymbolTable.py
vendored
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
"""
|
||||||
|
Class to read the symbol table from a Mach-O header
|
||||||
|
"""
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
from macholib.mach_o import relocation_info, dylib_reference, dylib_module
|
||||||
|
from macholib.mach_o import dylib_table_of_contents, nlist, nlist_64
|
||||||
|
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
|
||||||
|
import sys
|
||||||
|
|
||||||
|
__all__ = ['SymbolTable']
|
||||||
|
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
range = xrange # noqa: F821
|
||||||
|
|
||||||
|
|
||||||
|
class SymbolTable(object):
|
||||||
|
def __init__(self, macho, header=None, openfile=None):
|
||||||
|
if openfile is None:
|
||||||
|
openfile = open
|
||||||
|
if header is None:
|
||||||
|
header = macho.headers[0]
|
||||||
|
self.macho_header = header
|
||||||
|
with openfile(macho.filename, 'rb') as fh:
|
||||||
|
self.symtab = header.getSymbolTableCommand()
|
||||||
|
self.dysymtab = header.getDynamicSymbolTableCommand()
|
||||||
|
|
||||||
|
if self.symtab is not None:
|
||||||
|
self.nlists = self.readSymbolTable(fh)
|
||||||
|
|
||||||
|
if self.dysymtab is not None:
|
||||||
|
self.readDynamicSymbolTable(fh)
|
||||||
|
|
||||||
|
def readSymbolTable(self, fh):
|
||||||
|
cmd = self.symtab
|
||||||
|
fh.seek(self.macho_header.offset + cmd.stroff)
|
||||||
|
strtab = fh.read(cmd.strsize)
|
||||||
|
fh.seek(self.macho_header.offset + cmd.symoff)
|
||||||
|
nlists = []
|
||||||
|
|
||||||
|
if self.macho_header.MH_MAGIC in [MH_MAGIC_64, MH_CIGAM_64]:
|
||||||
|
cls = nlist_64
|
||||||
|
else:
|
||||||
|
cls = nlist
|
||||||
|
|
||||||
|
for i in range(cmd.nsyms):
|
||||||
|
cmd = cls.from_fileobj(fh, _endian_=self.macho_header.endian)
|
||||||
|
if cmd.n_un == 0:
|
||||||
|
nlists.append((cmd, ''))
|
||||||
|
else:
|
||||||
|
nlists.append(
|
||||||
|
(cmd, strtab[cmd.n_un:strtab.find(b'\x00', cmd.n_un)]))
|
||||||
|
return nlists
|
||||||
|
|
||||||
|
def readDynamicSymbolTable(self, fh):
|
||||||
|
cmd = self.dysymtab
|
||||||
|
nlists = self.nlists
|
||||||
|
|
||||||
|
self.localsyms = nlists[cmd.ilocalsym:cmd.ilocalsym+cmd.nlocalsym]
|
||||||
|
self.extdefsyms = nlists[cmd.iextdefsym:cmd.iextdefsym+cmd.nextdefsym]
|
||||||
|
self.undefsyms = nlists[cmd.iundefsym:cmd.iundefsym+cmd.nundefsym]
|
||||||
|
if cmd.tocoff == 0:
|
||||||
|
self.toc = None
|
||||||
|
else:
|
||||||
|
self.toc = self.readtoc(fh, cmd.tocoff, cmd.ntoc)
|
||||||
|
|
||||||
|
def readtoc(self, fh, off, n):
|
||||||
|
fh.seek(self.macho_header.offset + off)
|
||||||
|
return [dylib_table_of_contents.from_fileobj(fh) for i in range(n)]
|
||||||
|
|
||||||
|
def readmodtab(self, fh, off, n):
|
||||||
|
fh.seek(self.macho_header.offset + off)
|
||||||
|
return [dylib_module.from_fileobj(fh) for i in range(n)]
|
||||||
|
|
||||||
|
def readsym(self, fh, off, n):
|
||||||
|
fh.seek(self.macho_header.offset + off)
|
||||||
|
refs = []
|
||||||
|
for i in range(n):
|
||||||
|
ref = dylib_reference.from_fileobj(fh)
|
||||||
|
isym, flags = divmod(ref.isym_flags, 256)
|
||||||
|
refs.append((self.nlists[isym], flags))
|
||||||
|
return refs
|
||||||
|
|
||||||
|
def readrel(self, fh, off, n):
|
||||||
|
fh.seek(self.macho_header.offset + off)
|
||||||
|
return [relocation_info.from_fileobj(fh) for i in range(n)]
|
8
lib/spack/external/macholib/__init__.py
vendored
Normal file
8
lib/spack/external/macholib/__init__.py
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
"""
|
||||||
|
Enough Mach-O to make your head spin.
|
||||||
|
|
||||||
|
See the relevant header files in /usr/include/mach-o
|
||||||
|
|
||||||
|
And also Apple's documentation.
|
||||||
|
"""
|
||||||
|
__version__ = '1.10'
|
83
lib/spack/external/macholib/__main__.py
vendored
Normal file
83
lib/spack/external/macholib/__main__.py
vendored
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
from __future__ import print_function, absolute_import
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from macholib.util import is_platform_file
|
||||||
|
from macholib import macho_dump
|
||||||
|
from macholib import macho_standalone
|
||||||
|
|
||||||
|
gCommand = None
|
||||||
|
|
||||||
|
|
||||||
|
def check_file(fp, path, callback):
|
||||||
|
if not os.path.exists(path):
|
||||||
|
print(
|
||||||
|
'%s: %s: No such file or directory' % (gCommand, path),
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
is_plat = is_platform_file(path)
|
||||||
|
|
||||||
|
except IOError as msg:
|
||||||
|
print('%s: %s: %s' % (gCommand, path, msg), file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
if is_plat:
|
||||||
|
callback(fp, path)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def walk_tree(callback, paths):
|
||||||
|
err = 0
|
||||||
|
|
||||||
|
for base in paths:
|
||||||
|
if os.path.isdir(base):
|
||||||
|
for root, dirs, files in os.walk(base):
|
||||||
|
for fn in files:
|
||||||
|
err |= check_file(
|
||||||
|
sys.stdout, os.path.join(root, fn), callback)
|
||||||
|
else:
|
||||||
|
err |= check_file(sys.stdout, base, callback)
|
||||||
|
|
||||||
|
return err
|
||||||
|
|
||||||
|
|
||||||
|
def print_usage(fp):
|
||||||
|
print("Usage:", file=fp)
|
||||||
|
print(" python -mmacholib [help|--help]", file=fp)
|
||||||
|
print(" python -mmacholib dump FILE ...", file=fp)
|
||||||
|
print(" python -mmacholib find DIR ...", file=fp)
|
||||||
|
print(" python -mmacholib standalone DIR ...", file=fp)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
global gCommand
|
||||||
|
if len(sys.argv) < 3:
|
||||||
|
print_usage(sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
gCommand = sys.argv[1]
|
||||||
|
|
||||||
|
if gCommand == 'dump':
|
||||||
|
walk_tree(macho_dump.print_file, sys.argv[2:])
|
||||||
|
|
||||||
|
elif gCommand == 'find':
|
||||||
|
walk_tree(lambda fp, path: print(path, file=fp), sys.argv[2:])
|
||||||
|
|
||||||
|
elif gCommand == 'standalone':
|
||||||
|
for dn in sys.argv[2:]:
|
||||||
|
macho_standalone.standaloneApp(dn)
|
||||||
|
|
||||||
|
elif gCommand in ('help', '--help'):
|
||||||
|
print_usage(sys.stdout)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print_usage(sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
48
lib/spack/external/macholib/_cmdline.py
vendored
Normal file
48
lib/spack/external/macholib/_cmdline.py
vendored
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
"""
|
||||||
|
Internal helpers for basic commandline tools
|
||||||
|
"""
|
||||||
|
from __future__ import print_function, absolute_import
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from macholib.util import is_platform_file
|
||||||
|
|
||||||
|
|
||||||
|
def check_file(fp, path, callback):
|
||||||
|
if not os.path.exists(path):
|
||||||
|
print('%s: %s: No such file or directory' % (
|
||||||
|
sys.argv[0], path), file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
is_plat = is_platform_file(path)
|
||||||
|
|
||||||
|
except IOError as msg:
|
||||||
|
print('%s: %s: %s' % (sys.argv[0], path, msg), file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
if is_plat:
|
||||||
|
callback(fp, path)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def main(callback):
|
||||||
|
args = sys.argv[1:]
|
||||||
|
name = os.path.basename(sys.argv[0])
|
||||||
|
err = 0
|
||||||
|
|
||||||
|
if not args:
|
||||||
|
print("Usage: %s filename..." % (name,), file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
for base in args:
|
||||||
|
if os.path.isdir(base):
|
||||||
|
for root, dirs, files in os.walk(base):
|
||||||
|
for fn in files:
|
||||||
|
err |= check_file(
|
||||||
|
sys.stdout, os.path.join(root, fn), callback)
|
||||||
|
else:
|
||||||
|
err |= check_file(sys.stdout, base, callback)
|
||||||
|
|
||||||
|
return err
|
190
lib/spack/external/macholib/dyld.py
vendored
Normal file
190
lib/spack/external/macholib/dyld.py
vendored
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
"""
|
||||||
|
dyld emulation
|
||||||
|
"""
|
||||||
|
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from macholib.framework import framework_info
|
||||||
|
from macholib.dylib import dylib_info
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'dyld_find', 'framework_find',
|
||||||
|
'framework_info', 'dylib_info',
|
||||||
|
]
|
||||||
|
|
||||||
|
# These are the defaults as per man dyld(1)
|
||||||
|
#
|
||||||
|
_DEFAULT_FRAMEWORK_FALLBACK = [
|
||||||
|
os.path.expanduser("~/Library/Frameworks"),
|
||||||
|
"/Library/Frameworks",
|
||||||
|
"/Network/Library/Frameworks",
|
||||||
|
"/System/Library/Frameworks",
|
||||||
|
]
|
||||||
|
|
||||||
|
_DEFAULT_LIBRARY_FALLBACK = [
|
||||||
|
os.path.expanduser("~/lib"),
|
||||||
|
"/usr/local/lib",
|
||||||
|
"/lib",
|
||||||
|
"/usr/lib",
|
||||||
|
]
|
||||||
|
|
||||||
|
# XXX: Is this function still needed?
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
def _ensure_utf8(s):
|
||||||
|
if isinstance(s, unicode): # noqa: F821
|
||||||
|
return s.encode('utf8')
|
||||||
|
return s
|
||||||
|
else:
|
||||||
|
def _ensure_utf8(s):
|
||||||
|
if s is not None and not isinstance(s, str):
|
||||||
|
raise ValueError(s)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _dyld_env(env, var):
|
||||||
|
if env is None:
|
||||||
|
env = os.environ
|
||||||
|
rval = env.get(var)
|
||||||
|
if rval is None or rval == '':
|
||||||
|
return []
|
||||||
|
return rval.split(':')
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_image_suffix(env=None):
|
||||||
|
if env is None:
|
||||||
|
env = os.environ
|
||||||
|
return env.get('DYLD_IMAGE_SUFFIX')
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_framework_path(env=None):
|
||||||
|
return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_library_path(env=None):
|
||||||
|
return _dyld_env(env, 'DYLD_LIBRARY_PATH')
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_fallback_framework_path(env=None):
|
||||||
|
return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_fallback_library_path(env=None):
|
||||||
|
return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_image_suffix_search(iterator, env=None):
|
||||||
|
"""For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
|
||||||
|
suffix = dyld_image_suffix(env)
|
||||||
|
if suffix is None:
|
||||||
|
return iterator
|
||||||
|
|
||||||
|
def _inject(iterator=iterator, suffix=suffix):
|
||||||
|
for path in iterator:
|
||||||
|
if path.endswith('.dylib'):
|
||||||
|
yield path[:-len('.dylib')] + suffix + '.dylib'
|
||||||
|
else:
|
||||||
|
yield path + suffix
|
||||||
|
yield path
|
||||||
|
|
||||||
|
return _inject()
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_override_search(name, env=None):
|
||||||
|
# If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
|
||||||
|
# framework name, use the first file that exists in the framework
|
||||||
|
# path if any. If there is none go on to search the DYLD_LIBRARY_PATH
|
||||||
|
# if any.
|
||||||
|
|
||||||
|
framework = framework_info(name)
|
||||||
|
|
||||||
|
if framework is not None:
|
||||||
|
for path in dyld_framework_path(env):
|
||||||
|
yield os.path.join(path, framework['name'])
|
||||||
|
|
||||||
|
# If DYLD_LIBRARY_PATH is set then use the first file that exists
|
||||||
|
# in the path. If none use the original name.
|
||||||
|
for path in dyld_library_path(env):
|
||||||
|
yield os.path.join(path, os.path.basename(name))
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_executable_path_search(name, executable_path=None):
|
||||||
|
# If we haven't done any searching and found a library and the
|
||||||
|
# dylib_name starts with "@executable_path/" then construct the
|
||||||
|
# library name.
|
||||||
|
if name.startswith('@executable_path/') and executable_path is not None:
|
||||||
|
yield os.path.join(executable_path, name[len('@executable_path/'):])
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_loader_search(name, loader_path=None):
|
||||||
|
# If we haven't done any searching and found a library and the
|
||||||
|
# dylib_name starts with "@loader_path/" then construct the
|
||||||
|
# library name.
|
||||||
|
if name.startswith('@loader_path/') and loader_path is not None:
|
||||||
|
yield os.path.join(loader_path, name[len('@loader_path/'):])
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_default_search(name, env=None):
|
||||||
|
yield name
|
||||||
|
|
||||||
|
framework = framework_info(name)
|
||||||
|
|
||||||
|
if framework is not None:
|
||||||
|
fallback_framework_path = dyld_fallback_framework_path(env)
|
||||||
|
|
||||||
|
if fallback_framework_path:
|
||||||
|
for path in fallback_framework_path:
|
||||||
|
yield os.path.join(path, framework['name'])
|
||||||
|
|
||||||
|
else:
|
||||||
|
for path in _DEFAULT_FRAMEWORK_FALLBACK:
|
||||||
|
yield os.path.join(path, framework['name'])
|
||||||
|
|
||||||
|
fallback_library_path = dyld_fallback_library_path(env)
|
||||||
|
if fallback_library_path:
|
||||||
|
for path in fallback_library_path:
|
||||||
|
yield os.path.join(path, os.path.basename(name))
|
||||||
|
|
||||||
|
else:
|
||||||
|
for path in _DEFAULT_LIBRARY_FALLBACK:
|
||||||
|
yield os.path.join(path, os.path.basename(name))
|
||||||
|
|
||||||
|
|
||||||
|
def dyld_find(name, executable_path=None, env=None, loader_path=None):
|
||||||
|
"""
|
||||||
|
Find a library or framework using dyld semantics
|
||||||
|
"""
|
||||||
|
name = _ensure_utf8(name)
|
||||||
|
executable_path = _ensure_utf8(executable_path)
|
||||||
|
for path in dyld_image_suffix_search(chain(
|
||||||
|
dyld_override_search(name, env),
|
||||||
|
dyld_executable_path_search(name, executable_path),
|
||||||
|
dyld_loader_search(name, loader_path),
|
||||||
|
dyld_default_search(name, env),
|
||||||
|
), env):
|
||||||
|
if os.path.isfile(path):
|
||||||
|
return path
|
||||||
|
raise ValueError("dylib %s could not be found" % (name,))
|
||||||
|
|
||||||
|
|
||||||
|
def framework_find(fn, executable_path=None, env=None):
|
||||||
|
"""
|
||||||
|
Find a framework using dyld semantics in a very loose manner.
|
||||||
|
|
||||||
|
Will take input such as:
|
||||||
|
Python
|
||||||
|
Python.framework
|
||||||
|
Python.framework/Versions/Current
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return dyld_find(fn, executable_path=executable_path, env=env)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
fmwk_index = fn.rfind('.framework')
|
||||||
|
if fmwk_index == -1:
|
||||||
|
fmwk_index = len(fn)
|
||||||
|
fn += '.framework'
|
||||||
|
fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
|
||||||
|
return dyld_find(fn, executable_path=executable_path, env=env)
|
43
lib/spack/external/macholib/dylib.py
vendored
Normal file
43
lib/spack/external/macholib/dylib.py
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
"""
|
||||||
|
Generic dylib path manipulation
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
__all__ = ['dylib_info']
|
||||||
|
|
||||||
|
_DYLIB_RE = re.compile(r"""(?x)
|
||||||
|
(?P<location>^.*)(?:^|/)
|
||||||
|
(?P<name>
|
||||||
|
(?P<shortname>\w+?)
|
||||||
|
(?:\.(?P<version>[^._]+))?
|
||||||
|
(?:_(?P<suffix>[^._]+))?
|
||||||
|
\.dylib$
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def dylib_info(filename):
|
||||||
|
"""
|
||||||
|
A dylib name can take one of the following four forms:
|
||||||
|
Location/Name.SomeVersion_Suffix.dylib
|
||||||
|
Location/Name.SomeVersion.dylib
|
||||||
|
Location/Name_Suffix.dylib
|
||||||
|
Location/Name.dylib
|
||||||
|
|
||||||
|
returns None if not found or a mapping equivalent to:
|
||||||
|
dict(
|
||||||
|
location='Location',
|
||||||
|
name='Name.SomeVersion_Suffix.dylib',
|
||||||
|
shortname='Name',
|
||||||
|
version='SomeVersion',
|
||||||
|
suffix='Suffix',
|
||||||
|
)
|
||||||
|
|
||||||
|
Note that SomeVersion and Suffix are optional and may be None
|
||||||
|
if not present.
|
||||||
|
"""
|
||||||
|
is_dylib = _DYLIB_RE.match(filename)
|
||||||
|
if not is_dylib:
|
||||||
|
return None
|
||||||
|
return is_dylib.groupdict()
|
43
lib/spack/external/macholib/framework.py
vendored
Normal file
43
lib/spack/external/macholib/framework.py
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
"""
|
||||||
|
Generic framework path manipulation
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
__all__ = ['framework_info']
|
||||||
|
|
||||||
|
_STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
|
||||||
|
(?P<location>^.*)(?:^|/)
|
||||||
|
(?P<name>
|
||||||
|
(?P<shortname>[-_A-Za-z0-9]+).framework/
|
||||||
|
(?:Versions/(?P<version>[^/]+)/)?
|
||||||
|
(?P=shortname)
|
||||||
|
(?:_(?P<suffix>[^_]+))?
|
||||||
|
)$
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
def framework_info(filename):
|
||||||
|
"""
|
||||||
|
A framework name can take one of the following four forms:
|
||||||
|
Location/Name.framework/Versions/SomeVersion/Name_Suffix
|
||||||
|
Location/Name.framework/Versions/SomeVersion/Name
|
||||||
|
Location/Name.framework/Name_Suffix
|
||||||
|
Location/Name.framework/Name
|
||||||
|
|
||||||
|
returns None if not found, or a mapping equivalent to:
|
||||||
|
dict(
|
||||||
|
location='Location',
|
||||||
|
name='Name.framework/Versions/SomeVersion/Name_Suffix',
|
||||||
|
shortname='Name',
|
||||||
|
version='SomeVersion',
|
||||||
|
suffix='Suffix',
|
||||||
|
)
|
||||||
|
|
||||||
|
Note that SomeVersion and Suffix are optional and may be None
|
||||||
|
if not present
|
||||||
|
"""
|
||||||
|
is_framework = _STRICT_FRAMEWORK_RE.match(filename)
|
||||||
|
if not is_framework:
|
||||||
|
return None
|
||||||
|
return is_framework.groupdict()
|
73
lib/spack/external/macholib/itergraphreport.py
vendored
Normal file
73
lib/spack/external/macholib/itergraphreport.py
vendored
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
"""
|
||||||
|
Utilities for creating dot output from a MachOGraph
|
||||||
|
|
||||||
|
XXX: need to rewrite this based on altgraph.Dot
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
try:
|
||||||
|
from itertools import imap
|
||||||
|
except ImportError:
|
||||||
|
imap = map
|
||||||
|
|
||||||
|
__all__ = ['itergraphreport']
|
||||||
|
|
||||||
|
|
||||||
|
def itergraphreport(nodes, describe_edge, name='G'):
|
||||||
|
edges = deque()
|
||||||
|
nodetoident = {}
|
||||||
|
|
||||||
|
def nodevisitor(node, data, outgoing, incoming):
|
||||||
|
return {'label': str(node)}
|
||||||
|
|
||||||
|
def edgevisitor(edge, data, head, tail):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
yield 'digraph %s {\n' % (name,)
|
||||||
|
attr = dict(rankdir='LR', concentrate='true')
|
||||||
|
cpatt = '%s="%s"'
|
||||||
|
for item in attr.iteritems():
|
||||||
|
yield '\t%s;\n' % (cpatt % item,)
|
||||||
|
|
||||||
|
# find all packages (subgraphs)
|
||||||
|
for (node, data, outgoing, incoming) in nodes:
|
||||||
|
nodetoident[node] = getattr(data, 'identifier', node)
|
||||||
|
|
||||||
|
# create sets for subgraph, write out descriptions
|
||||||
|
for (node, data, outgoing, incoming) in nodes:
|
||||||
|
# update edges
|
||||||
|
for edge in imap(describe_edge, outgoing):
|
||||||
|
edges.append(edge)
|
||||||
|
|
||||||
|
# describe node
|
||||||
|
yield '\t"%s" [%s];\n' % (
|
||||||
|
node,
|
||||||
|
','.join([
|
||||||
|
(cpatt % item) for item in
|
||||||
|
nodevisitor(node, data, outgoing, incoming).iteritems()
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
|
||||||
|
graph = []
|
||||||
|
|
||||||
|
while edges:
|
||||||
|
edge, data, head, tail = edges.popleft()
|
||||||
|
if data in ('run_file', 'load_dylib'):
|
||||||
|
graph.append((edge, data, head, tail))
|
||||||
|
|
||||||
|
def do_graph(edges, tabs):
|
||||||
|
edgestr = tabs + '"%s" -> "%s" [%s];\n'
|
||||||
|
# describe edge
|
||||||
|
for (edge, data, head, tail) in edges:
|
||||||
|
attribs = edgevisitor(edge, data, head, tail)
|
||||||
|
yield edgestr % (
|
||||||
|
head,
|
||||||
|
tail,
|
||||||
|
','.join([(cpatt % item) for item in attribs.iteritems()]),
|
||||||
|
)
|
||||||
|
|
||||||
|
for s in do_graph(graph, '\t'):
|
||||||
|
yield s
|
||||||
|
|
||||||
|
yield '}\n'
|
1665
lib/spack/external/macholib/mach_o.py
vendored
Normal file
1665
lib/spack/external/macholib/mach_o.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
58
lib/spack/external/macholib/macho_dump.py
vendored
Normal file
58
lib/spack/external/macholib/macho_dump.py
vendored
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from macholib._cmdline import main as _main
|
||||||
|
from macholib.MachO import MachO
|
||||||
|
from macholib.mach_o import get_cpu_subtype, CPU_TYPE_NAMES
|
||||||
|
from macholib.mach_o import MH_CIGAM_64, MH_MAGIC_64
|
||||||
|
|
||||||
|
ARCH_MAP = {
|
||||||
|
('<', '64-bit'): 'x86_64',
|
||||||
|
('<', '32-bit'): 'i386',
|
||||||
|
('>', '64-bit'): 'ppc64',
|
||||||
|
('>', '32-bit'): 'ppc',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def print_file(fp, path):
|
||||||
|
print(path, file=fp)
|
||||||
|
m = MachO(path)
|
||||||
|
for header in m.headers:
|
||||||
|
seen = set()
|
||||||
|
|
||||||
|
if header.MH_MAGIC == MH_MAGIC_64 or header.MH_MAGIC == MH_CIGAM_64:
|
||||||
|
sz = '64-bit'
|
||||||
|
else:
|
||||||
|
sz = '32-bit'
|
||||||
|
|
||||||
|
arch = CPU_TYPE_NAMES.get(
|
||||||
|
header.header.cputype, header.header.cputype)
|
||||||
|
|
||||||
|
subarch = get_cpu_subtype(
|
||||||
|
header.header.cputype, header.header.cpusubtype)
|
||||||
|
|
||||||
|
print(' [%s endian=%r size=%r arch=%r subarch=%r]' % (
|
||||||
|
header.__class__.__name__, header.endian, sz, arch, subarch),
|
||||||
|
file=fp)
|
||||||
|
for idx, name, other in header.walkRelocatables():
|
||||||
|
if other not in seen:
|
||||||
|
seen.add(other)
|
||||||
|
print('\t' + other, file=fp)
|
||||||
|
print('', file=fp)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print(
|
||||||
|
"WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' "
|
||||||
|
"instead")
|
||||||
|
_main(print_file)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
sys.exit(main())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
21
lib/spack/external/macholib/macho_find.py
vendored
Normal file
21
lib/spack/external/macholib/macho_find.py
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
|
from macholib._cmdline import main as _main
|
||||||
|
|
||||||
|
|
||||||
|
def print_file(fp, path):
|
||||||
|
print(path, file=fp)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print(
|
||||||
|
"WARNING: 'macho_find' is deprecated, "
|
||||||
|
"use 'python -mmacholib dump' instead")
|
||||||
|
_main(print_file)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
main()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
31
lib/spack/external/macholib/macho_standalone.py
vendored
Normal file
31
lib/spack/external/macholib/macho_standalone.py
vendored
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from macholib.MachOStandalone import MachOStandalone
|
||||||
|
from macholib.util import strip_files
|
||||||
|
|
||||||
|
|
||||||
|
def standaloneApp(path):
|
||||||
|
if not (os.path.isdir(path) and os.path.exists(
|
||||||
|
os.path.join(path, 'Contents'))):
|
||||||
|
print(
|
||||||
|
'%s: %s does not look like an app bundle' % (sys.argv[0], path))
|
||||||
|
sys.exit(1)
|
||||||
|
files = MachOStandalone(path).run()
|
||||||
|
strip_files(files)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print(
|
||||||
|
"WARNING: 'macho_standalone' is deprecated, use "
|
||||||
|
"'python -mmacholib standalone' instead")
|
||||||
|
if not sys.argv[1:]:
|
||||||
|
raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],))
|
||||||
|
for fn in sys.argv[1:]:
|
||||||
|
standaloneApp(fn)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
331
lib/spack/external/macholib/ptypes.py
vendored
Normal file
331
lib/spack/external/macholib/ptypes.py
vendored
Normal file
|
@ -0,0 +1,331 @@
|
||||||
|
"""
|
||||||
|
This module defines packable types, that is types than can be easily
|
||||||
|
converted to a binary format as used in MachO headers.
|
||||||
|
"""
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
from itertools import izip, imap
|
||||||
|
except ImportError:
|
||||||
|
izip, imap = zip, map
|
||||||
|
from itertools import chain, starmap
|
||||||
|
|
||||||
|
__all__ = """
|
||||||
|
sizeof
|
||||||
|
BasePackable
|
||||||
|
Structure
|
||||||
|
pypackable
|
||||||
|
p_char
|
||||||
|
p_byte
|
||||||
|
p_ubyte
|
||||||
|
p_short
|
||||||
|
p_ushort
|
||||||
|
p_int
|
||||||
|
p_uint
|
||||||
|
p_long
|
||||||
|
p_ulong
|
||||||
|
p_longlong
|
||||||
|
p_ulonglong
|
||||||
|
p_int8
|
||||||
|
p_uint8
|
||||||
|
p_int16
|
||||||
|
p_uint16
|
||||||
|
p_int32
|
||||||
|
p_uint32
|
||||||
|
p_int64
|
||||||
|
p_uint64
|
||||||
|
p_float
|
||||||
|
p_double
|
||||||
|
""".split()
|
||||||
|
|
||||||
|
|
||||||
|
def sizeof(s):
|
||||||
|
"""
|
||||||
|
Return the size of an object when packed
|
||||||
|
"""
|
||||||
|
if hasattr(s, '_size_'):
|
||||||
|
return s._size_
|
||||||
|
|
||||||
|
elif isinstance(s, bytes):
|
||||||
|
return len(s)
|
||||||
|
|
||||||
|
raise ValueError(s)
|
||||||
|
|
||||||
|
|
||||||
|
class MetaPackable(type):
|
||||||
|
"""
|
||||||
|
Fixed size struct.unpack-able types use from_tuple as their designated
|
||||||
|
initializer
|
||||||
|
"""
|
||||||
|
def from_mmap(cls, mm, ptr, **kw):
|
||||||
|
return cls.from_str(mm[ptr:ptr+cls._size_], **kw)
|
||||||
|
|
||||||
|
def from_fileobj(cls, f, **kw):
|
||||||
|
return cls.from_str(f.read(cls._size_), **kw)
|
||||||
|
|
||||||
|
def from_str(cls, s, **kw):
|
||||||
|
endian = kw.get('_endian_', cls._endian_)
|
||||||
|
return cls.from_tuple(struct.unpack(endian + cls._format_, s), **kw)
|
||||||
|
|
||||||
|
def from_tuple(cls, tpl, **kw):
|
||||||
|
return cls(tpl[0], **kw)
|
||||||
|
|
||||||
|
|
||||||
|
class BasePackable(object):
|
||||||
|
_endian_ = '>'
|
||||||
|
|
||||||
|
def to_str(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def to_fileobj(self, f):
|
||||||
|
f.write(self.to_str())
|
||||||
|
|
||||||
|
def to_mmap(self, mm, ptr):
|
||||||
|
mm[ptr:ptr+self._size_] = self.to_str()
|
||||||
|
|
||||||
|
|
||||||
|
# This defines a class with a custom metaclass, we'd normally
|
||||||
|
# use "class Packable(BasePackable, metaclass=MetaPackage)",
|
||||||
|
# but that syntax is not valid in Python 2 (and likewise the
|
||||||
|
# python 2 syntax is not valid in Python 3)
|
||||||
|
def _make():
|
||||||
|
def to_str(self):
|
||||||
|
cls = type(self)
|
||||||
|
endian = getattr(self, '_endian_', cls._endian_)
|
||||||
|
return struct.pack(endian + cls._format_, self)
|
||||||
|
return MetaPackable("Packable", (BasePackable,), {'to_str': to_str})
|
||||||
|
|
||||||
|
|
||||||
|
Packable = _make()
|
||||||
|
del _make
|
||||||
|
|
||||||
|
|
||||||
|
def pypackable(name, pytype, format):
|
||||||
|
"""
|
||||||
|
Create a "mix-in" class with a python type and a
|
||||||
|
Packable with the given struct format
|
||||||
|
"""
|
||||||
|
size, items = _formatinfo(format)
|
||||||
|
|
||||||
|
def __new__(cls, *args, **kwds):
|
||||||
|
if '_endian_' in kwds:
|
||||||
|
_endian_ = kwds.pop('_endian_')
|
||||||
|
else:
|
||||||
|
_endian_ = cls._endian_
|
||||||
|
|
||||||
|
result = pytype.__new__(cls, *args, **kwds)
|
||||||
|
result._endian_ = _endian_
|
||||||
|
return result
|
||||||
|
|
||||||
|
return type(Packable)(name, (pytype, Packable), {
|
||||||
|
'_format_': format,
|
||||||
|
'_size_': size,
|
||||||
|
'_items_': items,
|
||||||
|
'__new__': __new__,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _formatinfo(format):
|
||||||
|
"""
|
||||||
|
Calculate the size and number of items in a struct format.
|
||||||
|
"""
|
||||||
|
size = struct.calcsize(format)
|
||||||
|
return size, len(struct.unpack(format, b'\x00' * size))
|
||||||
|
|
||||||
|
|
||||||
|
class MetaStructure(MetaPackable):
|
||||||
|
"""
|
||||||
|
The metaclass of Structure objects that does all the magic.
|
||||||
|
|
||||||
|
Since we can assume that all Structures have a fixed size,
|
||||||
|
we can do a bunch of calculations up front and pack or
|
||||||
|
unpack the whole thing in one struct call.
|
||||||
|
"""
|
||||||
|
def __new__(cls, clsname, bases, dct):
|
||||||
|
fields = dct['_fields_']
|
||||||
|
names = []
|
||||||
|
types = []
|
||||||
|
structmarks = []
|
||||||
|
format = ''
|
||||||
|
items = 0
|
||||||
|
size = 0
|
||||||
|
|
||||||
|
def struct_property(name, typ):
|
||||||
|
|
||||||
|
def _get(self):
|
||||||
|
return self._objects_[name]
|
||||||
|
|
||||||
|
def _set(self, obj):
|
||||||
|
if type(obj) is not typ:
|
||||||
|
obj = typ(obj)
|
||||||
|
self._objects_[name] = obj
|
||||||
|
|
||||||
|
return property(_get, _set, typ.__name__)
|
||||||
|
|
||||||
|
for name, typ in fields:
|
||||||
|
dct[name] = struct_property(name, typ)
|
||||||
|
names.append(name)
|
||||||
|
types.append(typ)
|
||||||
|
format += typ._format_
|
||||||
|
size += typ._size_
|
||||||
|
if (typ._items_ > 1):
|
||||||
|
structmarks.append((items, typ._items_, typ))
|
||||||
|
items += typ._items_
|
||||||
|
|
||||||
|
dct['_structmarks_'] = structmarks
|
||||||
|
dct['_names_'] = names
|
||||||
|
dct['_types_'] = types
|
||||||
|
dct['_size_'] = size
|
||||||
|
dct['_items_'] = items
|
||||||
|
dct['_format_'] = format
|
||||||
|
return super(MetaStructure, cls).__new__(cls, clsname, bases, dct)
|
||||||
|
|
||||||
|
def from_tuple(cls, tpl, **kw):
|
||||||
|
values = []
|
||||||
|
current = 0
|
||||||
|
for begin, length, typ in cls._structmarks_:
|
||||||
|
if begin > current:
|
||||||
|
values.extend(tpl[current:begin])
|
||||||
|
current = begin + length
|
||||||
|
values.append(typ.from_tuple(tpl[begin:current], **kw))
|
||||||
|
values.extend(tpl[current:])
|
||||||
|
return cls(*values, **kw)
|
||||||
|
|
||||||
|
|
||||||
|
# See metaclass discussion earlier in this file
|
||||||
|
def _make():
|
||||||
|
class_dict = {}
|
||||||
|
class_dict['_fields_'] = ()
|
||||||
|
|
||||||
|
def as_method(function):
|
||||||
|
class_dict[function.__name__] = function
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
if len(args) == 1 and not kwargs and type(args[0]) is type(self):
|
||||||
|
kwargs = args[0]._objects_
|
||||||
|
args = ()
|
||||||
|
self._objects_ = {}
|
||||||
|
iargs = chain(izip(self._names_, args), kwargs.items())
|
||||||
|
for key, value in iargs:
|
||||||
|
if key not in self._names_ and key != "_endian_":
|
||||||
|
raise TypeError
|
||||||
|
setattr(self, key, value)
|
||||||
|
for key, typ in izip(self._names_, self._types_):
|
||||||
|
if key not in self._objects_:
|
||||||
|
self._objects_[key] = typ()
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def _get_packables(self):
|
||||||
|
for obj in imap(self._objects_.__getitem__, self._names_):
|
||||||
|
if hasattr(obj, '_get_packables'):
|
||||||
|
for obj in obj._get_packables():
|
||||||
|
yield obj
|
||||||
|
|
||||||
|
else:
|
||||||
|
yield obj
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def to_str(self):
|
||||||
|
return struct.pack(
|
||||||
|
self._endian_ + self._format_, *self._get_packables())
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __cmp__(self, other):
|
||||||
|
if type(other) is not type(self):
|
||||||
|
raise TypeError(
|
||||||
|
'Cannot compare objects of type %r to objects of type %r' % (
|
||||||
|
type(other), type(self)))
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
_cmp = cmp # noqa: F821
|
||||||
|
else:
|
||||||
|
def _cmp(a, b):
|
||||||
|
if a < b:
|
||||||
|
return -1
|
||||||
|
elif a > b:
|
||||||
|
return 1
|
||||||
|
elif a == b:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
raise TypeError()
|
||||||
|
|
||||||
|
for cmpval in starmap(
|
||||||
|
_cmp, izip(self._get_packables(), other._get_packables())):
|
||||||
|
if cmpval != 0:
|
||||||
|
return cmpval
|
||||||
|
return 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __eq__(self, other):
|
||||||
|
r = self.__cmp__(other)
|
||||||
|
return r == 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __ne__(self, other):
|
||||||
|
r = self.__cmp__(other)
|
||||||
|
return r != 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __lt__(self, other):
|
||||||
|
r = self.__cmp__(other)
|
||||||
|
return r < 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __le__(self, other):
|
||||||
|
r = self.__cmp__(other)
|
||||||
|
return r <= 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __gt__(self, other):
|
||||||
|
r = self.__cmp__(other)
|
||||||
|
return r > 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __ge__(self, other):
|
||||||
|
r = self.__cmp__(other)
|
||||||
|
return r >= 0
|
||||||
|
|
||||||
|
@as_method
|
||||||
|
def __repr__(self):
|
||||||
|
result = []
|
||||||
|
result.append('<')
|
||||||
|
result.append(type(self).__name__)
|
||||||
|
for nm in self._names_:
|
||||||
|
result.append(' %s=%r' % (nm, getattr(self, nm)))
|
||||||
|
result.append('>')
|
||||||
|
return ''.join(result)
|
||||||
|
|
||||||
|
return MetaStructure("Structure", (BasePackable,), class_dict)
|
||||||
|
|
||||||
|
|
||||||
|
Structure = _make()
|
||||||
|
del _make
|
||||||
|
|
||||||
|
try:
|
||||||
|
long
|
||||||
|
except NameError:
|
||||||
|
long = int
|
||||||
|
|
||||||
|
# export common packables with predictable names
|
||||||
|
p_char = pypackable('p_char', bytes, 'c')
|
||||||
|
p_int8 = pypackable('p_int8', int, 'b')
|
||||||
|
p_uint8 = pypackable('p_uint8', int, 'B')
|
||||||
|
p_int16 = pypackable('p_int16', int, 'h')
|
||||||
|
p_uint16 = pypackable('p_uint16', int, 'H')
|
||||||
|
p_int32 = pypackable('p_int32', int, 'i')
|
||||||
|
p_uint32 = pypackable('p_uint32', long, 'I')
|
||||||
|
p_int64 = pypackable('p_int64', long, 'q')
|
||||||
|
p_uint64 = pypackable('p_uint64', long, 'Q')
|
||||||
|
p_float = pypackable('p_float', float, 'f')
|
||||||
|
p_double = pypackable('p_double', float, 'd')
|
||||||
|
|
||||||
|
# Deprecated names, need trick to emit deprecation warning.
|
||||||
|
p_byte = p_int8
|
||||||
|
p_ubyte = p_uint8
|
||||||
|
p_short = p_int16
|
||||||
|
p_ushort = p_uint16
|
||||||
|
p_int = p_long = p_int32
|
||||||
|
p_uint = p_ulong = p_uint32
|
||||||
|
p_longlong = p_int64
|
||||||
|
p_ulonglong = p_uint64
|
258
lib/spack/external/macholib/util.py
vendored
Normal file
258
lib/spack/external/macholib/util.py
vendored
Normal file
|
@ -0,0 +1,258 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
import struct
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from macholib import mach_o
|
||||||
|
|
||||||
|
MAGIC = [
|
||||||
|
struct.pack('!L', getattr(mach_o, 'MH_' + _))
|
||||||
|
for _ in ['MAGIC', 'CIGAM', 'MAGIC_64', 'CIGAM_64']
|
||||||
|
]
|
||||||
|
FAT_MAGIC_BYTES = struct.pack('!L', mach_o.FAT_MAGIC)
|
||||||
|
MAGIC_LEN = 4
|
||||||
|
STRIPCMD = ['/usr/bin/strip', '-x', '-S', '-']
|
||||||
|
|
||||||
|
try:
|
||||||
|
unicode
|
||||||
|
except NameError:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
def fsencoding(s, encoding=sys.getfilesystemencoding()):
|
||||||
|
"""
|
||||||
|
Ensure the given argument is in filesystem encoding (not unicode)
|
||||||
|
"""
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode(encoding)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def move(src, dst):
|
||||||
|
"""
|
||||||
|
move that ensures filesystem encoding of paths
|
||||||
|
"""
|
||||||
|
shutil.move(fsencoding(src), fsencoding(dst))
|
||||||
|
|
||||||
|
|
||||||
|
def copy2(src, dst):
|
||||||
|
"""
|
||||||
|
copy2 that ensures filesystem encoding of paths
|
||||||
|
"""
|
||||||
|
shutil.copy2(fsencoding(src), fsencoding(dst))
|
||||||
|
|
||||||
|
|
||||||
|
def flipwritable(fn, mode=None):
|
||||||
|
"""
|
||||||
|
Flip the writability of a file and return the old mode. Returns None
|
||||||
|
if the file is already writable.
|
||||||
|
"""
|
||||||
|
if os.access(fn, os.W_OK):
|
||||||
|
return None
|
||||||
|
old_mode = os.stat(fn).st_mode
|
||||||
|
os.chmod(fn, stat.S_IWRITE | old_mode)
|
||||||
|
return old_mode
|
||||||
|
|
||||||
|
|
||||||
|
class fileview(object):
|
||||||
|
"""
|
||||||
|
A proxy for file-like objects that exposes a given view of a file
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, fileobj, start, size):
|
||||||
|
self._fileobj = fileobj
|
||||||
|
self._start = start
|
||||||
|
self._end = start + size
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<fileview [%d, %d] %r>' % (
|
||||||
|
self._start, self._end, self._fileobj)
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
return self._fileobj.tell() - self._start
|
||||||
|
|
||||||
|
def _checkwindow(self, seekto, op):
|
||||||
|
if not (self._start <= seekto <= self._end):
|
||||||
|
raise IOError("%s to offset %d is outside window [%d, %d]" % (
|
||||||
|
op, seekto, self._start, self._end))
|
||||||
|
|
||||||
|
def seek(self, offset, whence=0):
|
||||||
|
seekto = offset
|
||||||
|
if whence == 0:
|
||||||
|
seekto += self._start
|
||||||
|
elif whence == 1:
|
||||||
|
seekto += self._fileobj.tell()
|
||||||
|
elif whence == 2:
|
||||||
|
seekto += self._end
|
||||||
|
else:
|
||||||
|
raise IOError("Invalid whence argument to seek: %r" % (whence,))
|
||||||
|
self._checkwindow(seekto, 'seek')
|
||||||
|
self._fileobj.seek(seekto)
|
||||||
|
|
||||||
|
def write(self, bytes):
|
||||||
|
here = self._fileobj.tell()
|
||||||
|
self._checkwindow(here, 'write')
|
||||||
|
self._checkwindow(here + len(bytes), 'write')
|
||||||
|
self._fileobj.write(bytes)
|
||||||
|
|
||||||
|
def read(self, size=sys.maxsize):
|
||||||
|
if size < 0:
|
||||||
|
raise ValueError(
|
||||||
|
"Invalid size %s while reading from %s", size, self._fileobj)
|
||||||
|
here = self._fileobj.tell()
|
||||||
|
self._checkwindow(here, 'read')
|
||||||
|
bytes = min(size, self._end - here)
|
||||||
|
return self._fileobj.read(bytes)
|
||||||
|
|
||||||
|
|
||||||
|
def mergecopy(src, dest):
|
||||||
|
"""
|
||||||
|
copy2, but only if the destination isn't up to date
|
||||||
|
"""
|
||||||
|
if os.path.exists(dest) and \
|
||||||
|
os.stat(dest).st_mtime >= os.stat(src).st_mtime:
|
||||||
|
return
|
||||||
|
|
||||||
|
copy2(src, dest)
|
||||||
|
|
||||||
|
|
||||||
|
def mergetree(src, dst, condition=None, copyfn=mergecopy, srcbase=None):
|
||||||
|
"""
|
||||||
|
Recursively merge a directory tree using mergecopy().
|
||||||
|
"""
|
||||||
|
src = fsencoding(src)
|
||||||
|
dst = fsencoding(dst)
|
||||||
|
if srcbase is None:
|
||||||
|
srcbase = src
|
||||||
|
names = map(fsencoding, os.listdir(src))
|
||||||
|
try:
|
||||||
|
os.makedirs(dst)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
errors = []
|
||||||
|
for name in names:
|
||||||
|
srcname = os.path.join(src, name)
|
||||||
|
dstname = os.path.join(dst, name)
|
||||||
|
if condition is not None and not condition(srcname):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
if os.path.islink(srcname):
|
||||||
|
# XXX: This is naive at best, should check srcbase(?)
|
||||||
|
realsrc = os.readlink(srcname)
|
||||||
|
os.symlink(realsrc, dstname)
|
||||||
|
elif os.path.isdir(srcname):
|
||||||
|
mergetree(
|
||||||
|
srcname, dstname,
|
||||||
|
condition=condition, copyfn=copyfn, srcbase=srcbase)
|
||||||
|
else:
|
||||||
|
copyfn(srcname, dstname)
|
||||||
|
except (IOError, os.error) as why:
|
||||||
|
errors.append((srcname, dstname, why))
|
||||||
|
if errors:
|
||||||
|
raise IOError(errors)
|
||||||
|
|
||||||
|
|
||||||
|
def sdk_normalize(filename):
|
||||||
|
"""
|
||||||
|
Normalize a path to strip out the SDK portion, normally so that it
|
||||||
|
can be decided whether it is in a system path or not.
|
||||||
|
"""
|
||||||
|
if filename.startswith('/Developer/SDKs/'):
|
||||||
|
pathcomp = filename.split('/')
|
||||||
|
del pathcomp[1:4]
|
||||||
|
filename = '/'.join(pathcomp)
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
NOT_SYSTEM_FILES = []
|
||||||
|
|
||||||
|
|
||||||
|
def in_system_path(filename):
|
||||||
|
"""
|
||||||
|
Return True if the file is in a system path
|
||||||
|
"""
|
||||||
|
fn = sdk_normalize(os.path.realpath(filename))
|
||||||
|
if fn.startswith('/usr/local/'):
|
||||||
|
return False
|
||||||
|
elif fn.startswith('/System/') or fn.startswith('/usr/'):
|
||||||
|
if fn in NOT_SYSTEM_FILES:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def has_filename_filter(module):
|
||||||
|
"""
|
||||||
|
Return False if the module does not have a filename attribute
|
||||||
|
"""
|
||||||
|
return getattr(module, 'filename', None) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def get_magic():
|
||||||
|
"""
|
||||||
|
Get a list of valid Mach-O header signatures, not including the fat header
|
||||||
|
"""
|
||||||
|
return MAGIC
|
||||||
|
|
||||||
|
|
||||||
|
def is_platform_file(path):
|
||||||
|
"""
|
||||||
|
Return True if the file is Mach-O
|
||||||
|
"""
|
||||||
|
if not os.path.exists(path) or os.path.islink(path):
|
||||||
|
return False
|
||||||
|
# If the header is fat, we need to read into the first arch
|
||||||
|
with open(path, 'rb') as fileobj:
|
||||||
|
bytes = fileobj.read(MAGIC_LEN)
|
||||||
|
if bytes == FAT_MAGIC_BYTES:
|
||||||
|
# Read in the fat header
|
||||||
|
fileobj.seek(0)
|
||||||
|
header = mach_o.fat_header.from_fileobj(fileobj, _endian_='>')
|
||||||
|
if header.nfat_arch < 1:
|
||||||
|
return False
|
||||||
|
# Read in the first fat arch header
|
||||||
|
arch = mach_o.fat_arch.from_fileobj(fileobj, _endian_='>')
|
||||||
|
fileobj.seek(arch.offset)
|
||||||
|
# Read magic off the first header
|
||||||
|
bytes = fileobj.read(MAGIC_LEN)
|
||||||
|
for magic in MAGIC:
|
||||||
|
if bytes == magic:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def iter_platform_files(dst):
|
||||||
|
"""
|
||||||
|
Walk a directory and yield each full path that is a Mach-O file
|
||||||
|
"""
|
||||||
|
for root, dirs, files in os.walk(dst):
|
||||||
|
for fn in files:
|
||||||
|
fn = os.path.join(root, fn)
|
||||||
|
if is_platform_file(fn):
|
||||||
|
yield fn
|
||||||
|
|
||||||
|
|
||||||
|
def strip_files(files, argv_max=(256 * 1024)):
|
||||||
|
"""
|
||||||
|
Strip a list of files
|
||||||
|
"""
|
||||||
|
tostrip = [(fn, flipwritable(fn)) for fn in files]
|
||||||
|
while tostrip:
|
||||||
|
cmd = list(STRIPCMD)
|
||||||
|
flips = []
|
||||||
|
pathlen = sum([len(s) + 1 for s in cmd])
|
||||||
|
while pathlen < argv_max:
|
||||||
|
if not tostrip:
|
||||||
|
break
|
||||||
|
added, flip = tostrip.pop()
|
||||||
|
pathlen += len(added) + 1
|
||||||
|
cmd.append(added)
|
||||||
|
flips.append((added, flip))
|
||||||
|
else:
|
||||||
|
cmd.pop()
|
||||||
|
tostrip.append(flips.pop())
|
||||||
|
os.spawnv(os.P_WAIT, cmd[0], cmd)
|
||||||
|
for args in flips:
|
||||||
|
flipwritable(*args)
|
Loading…
Reference in a new issue