rip out broken CDA

This commit is contained in:
George Hotz 2015-02-22 14:17:34 -08:00
parent dda2548997
commit 950e97b213
15 changed files with 1 additions and 13176 deletions

View File

@ -107,6 +107,7 @@ a -- make ascii at iaddr
d -- make data at iaddr
u -- make undefined at iaddr
== Installation on Windows (experimental) ==
Install git
@ -116,16 +117,6 @@ Install pip
Run install.bat
== Using CDA ==
Pass either --cda to QIRA, or just call "cda binary"
The binary must have DWARF data, and the source files must exist on the system
If you used --cda, you have to go to localhost:3002/cda
Everything should be intuitive, except press '/' to regex search
== Session state ==
clnum -- Selected changelist number

View File

@ -1,134 +0,0 @@
#!/usr/bin/env python2.7
import os
import sys
import cda_config
basedir = os.path.dirname(os.path.realpath(__file__))
#sys.path.append(basedir+"/clang/llvm/tools/clang/bindings/python")
import clang.cindex as ci
ci.Config.set_library_file(cda_config.LIBCLANG_PATH)
import pickle
from clang.cindex import CursorKind
import json
from hashlib import sha1
# debug
DEBUG = 0
# cache generated
file_cache = {}
object_cache = {}
xref_cache = {}
# a single index for the runtime of the server
index = ci.Index.create()
def parse_node(node, d, filename, care):
#print node.location.file
if node.location.file != None and str(node.location.file) != filename:
return
ref = node.referenced
if type(ref) != type(None):
usr = ref.get_usr()
#print " "*d, node.kind, node.spelling, node.displayname, node.location, node.extent.start.offset, node.extent.end.offset, node.get_usr(), "****", ref.spelling, ref.location, ref.get_usr()
else:
usr = None
if DEBUG == 1:
print " "*d, node.kind, node.spelling, node.displayname, node.location, node.location.offset, node.extent.start.offset, node.extent.end.offset, usr
"""
if DEBUG == 1:
print " "*d, node.kind, node.spelling, node.displayname, node.location, node.location.offset, node.extent.start.offset, node.extent.end.offset, usr
"""
#print dir(node)
"""
print ref, node.get_usr()
print ref.location
for i in deff:
print i
"""
klass = str(node.kind).split('.')[-1]
(start, end) = (None, None)
if node.kind in [CursorKind.STRING_LITERAL, CursorKind.INTEGER_LITERAL, CursorKind.TYPE_REF, CursorKind.TEMPLATE_REF]:
#if node.kind in [CursorKind.STRING_LITERAL, CursorKind.TYPE_REF, CursorKind.TEMPLATE_REF]:
start = node.extent.start.offset
end = node.extent.end.offset
elif node.kind in [CursorKind.FUNCTION_DECL, CursorKind.FUNCTION_TEMPLATE, CursorKind.VAR_DECL, CursorKind.CLASS_DECL, CursorKind.CXX_METHOD, CursorKind.CLASS_TEMPLATE, CursorKind.PARM_DECL]:
start = node.location.offset
end = node.location.offset + len(node.spelling)
elif node.kind in [CursorKind.MEMBER_REF_EXPR]:
#print node.location.offset, node.extent.start.offset, node.extent.end.offset
if node.location.offset != 0:
start = node.location.offset
else:
start = node.extent.start.offset
end = node.extent.end.offset
#end = node.location.offset + len(node.displayname)
elif node.kind in [CursorKind.DECL_REF_EXPR]:
start = node.location.offset
end = node.extent.end.offset
if end != None:
care.append((start, end, klass, usr))
if end != None and usr != None and node.location.line > 0:
newval = filename+"#"+str(node.location.line)
if node.is_definition():
# defining the object
if usr in object_cache:
object_cache[usr].append(newval)
else:
object_cache[usr] = [newval]
else:
# xref
if usr in xref_cache:
xref_cache[usr].append(newval)
else:
xref_cache[usr] = [newval]
# link here is good
for child in node.get_children():
parse_node(child, d+1, filename, care)
def parse_file(filename, args=[]):
# traversal attack
exargs = ["-I", cda_config.CLANG_INCLUDES]
tu = index.parse(filename, args=exargs+args)
# bad shit happened
bad = False
for m in tu.diagnostics:
if m.severity >= 3:
print m
bad = True
if bad == True:
#raise Exception("parsing issue")
print "parsing issue"
# extract the things we care about
care = []
parse_node(tu.cursor, 0, filename, care)
care = sorted(care)
# get file data
rdat = open(filename).read()
return (care, rdat)
def parse_files(files, args=[]):
# for unbuilt clang
for fn in files:
print "CDA: caching",fn
try:
file_cache[fn] = parse_file(fn, args)
except Exception as e:
print "CDA: error on",fn,":",e
dat = (object_cache, file_cache, xref_cache)
return dat

View File

@ -1,101 +0,0 @@
#!/usr/bin/env python2.7
import os
import sys
import cgi
from flask import Flask,redirect,request,Blueprint
from html import XHTML
app = Blueprint('cda',__name__)
# escape on the real
def escape(s, crap=False):
return s.replace("<", "&lt;").replace(">", "&gt;").replace(" ", "&nbsp;").replace("\n", "<br/>").replace("\t", "&nbsp;"*4).replace("\x00", " ")
cgi.escape = escape
@app.route("/list")
def list():
h = XHTML().html
for f in sorted(file_cache.keys()):
h.body.div.a(f, href="#"+f+",0,")
return str(h)
# only path that should be here now
@app.route("/cda")
def home():
# generate html
h = XHTML().html
h.head.link(rel="stylesheet", href="/cdastatic/cda.css")
h.head.script(src="/cdastatic/socket.io.min.js")
h.head.script(src="/cdastatic/jquery-2.1.0.js")
h.head.script(src="/cdastatic/jquery.scrollTo.min.js")
h.head.script(src="/cdastatic/cda.js?"+os.urandom(16).encode("hex"))
body = h.body
prog = body.div(id="program")
xrefs = body.div(id="xrefs")
return str(h)
@app.route("/x/<b64xref>")
def display_xref(b64xref):
xref = b64xref.decode("base64")
h = XHTML().html
h.head.link(rel="stylesheet", href="/cdastatic/cda.css")
body = h.body(klass="xref")
body.div.div(xref, klass="xrefstitle")
if xref in xref_cache:
for obj in xref_cache[xref]:
linkobj = obj.replace("#",",")+","+b64xref
body.div.a(obj, onclick="location.replace('#"+linkobj+"')", klass="filelink")
return str(body)
@app.route("/f")
def display_file():
path = request.query_string
if path not in file_cache:
return "file "+str(path)+" not found"
# generate the HTML
h = XHTML().html
body = h.body
body.div(path, id='filename')
#body.iframe(id='bottomframe')
# get parsed file
(care, rdat) = file_cache[path]
# add line numbers
lc = len(rdat.split("\n"))
ln = body.div(id="ln")
for linenum in range(lc):
ln.span("%5d \n" % (linenum+1), id="l"+str(linenum+1), onclick='go_to_line('+str(linenum+1)+')')
# add the code
#print object_cache
p = body.div(id="code")
last = 0
for (start, end, klass, usr) in care:
if last > start:
# this is not the proper fix
#print "OMG ISSUE ",last,start,klass,usr
continue
p.span(rdat[last:start])
if usr != None:
if usr in object_cache:
#p.span(klass=klass, usr=usr).a(rdat[start:end], href="/f/"+object_cache[usr][0])
#if usr in xref_cache:
#p.span(rdat[start:end], klass=klass+"\x00link", usr=usr, targets='\x00'.join(object_cache[usr]), xrefs='\x00'.join(xref_cache[usr]))
#else:
p.span(rdat[start:end], klass=klass+"\x00link", name=usr, targets='\x00'.join(object_cache[usr]))
else:
p.span(rdat[start:end], klass=klass, name=usr)
else:
p.span(rdat[start:end], klass=klass)
last = end
p.span(rdat[last:])
return str(body)
def set_cache(cache):
global object_cache, file_cache, xref_cache
(object_cache, file_cache, xref_cache) = cache
print "CDA: read",len(file_cache),"files",len(object_cache),"objects",len(xref_cache),"xrefs"

View File

@ -1,7 +0,0 @@
#ci.Config.set_library_file(basedir+"/clang/build/Release+Asserts/lib/libclang.so")
#ci.Config.set_library_file(basedir+"/libclang.so")
LIBCLANG_PATH = '/usr/lib/x86_64-linux-gnu/libclang.so.1'
#args.append(basedir+"/clang-latest/build/Release+Asserts/lib/clang/3.4.2/include")
#args.append(basedir+"/include")
CLANG_INCLUDES = '/usr/lib/llvm-3.4/lib/clang/3.4/include'

View File

@ -1,24 +0,0 @@
#===- __init__.py - Clang Python Bindings --------------------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
r"""
Clang Library Bindings
======================
This package provides access to the Clang compiler and libraries.
The available modules are:
cindex
Bindings for the Clang indexing library.
"""
__all__ = ['cindex']

File diff suppressed because it is too large Load Diff

View File

@ -1,34 +0,0 @@
#===- enumerations.py - Python Enumerations ------------------*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
"""
Clang Enumerations
==================
This module provides static definitions of enumerations that exist in libclang.
Enumerations are typically defined as a list of tuples. The exported values are
typically munged into other types or classes at module load time.
All enumerations are centrally defined in this file so they are all grouped
together and easier to audit. And, maybe even one day this file will be
automatically generated by scanning the libclang headers!
"""
# Maps to CXTokenKind. Note that libclang maintains a separate set of token
# enumerations from the C++ API.
TokenKinds = [
('PUNCTUATION', 0),
('KEYWORD', 1),
('IDENTIFIER', 2),
('LITERAL', 3),
('COMMENT', 4),
]
__all__ = ['TokenKinds']

View File

@ -1,35 +0,0 @@
#!/bin/bash -e
#sudo pip install html
mkdir -p clang-latest
cd clang-latest
if [ ! -f .downloaded_clang ]; then
echo "downloading"
wget http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.gz
wget http://llvm.org/releases/3.4/compiler-rt-3.4.src.tar.gz
wget http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz
touch .downloaded_clang
fi
echo "extracting"
tar xf llvm-3.4.2.src.tar.gz
tar xf cfe-3.4.2.src.tar.gz
tar xf compiler-rt-3.4.src.tar.gz
echo "making symlinks"
ln -sf llvm-3.4.2.src llvm
ln -sf ../../cfe-3.4.2.src llvm/tools/clang
ln -sf ../../compiler-rt-3.4 llvm/projects/compiler-rt
rm -f ../clang
ln -s clang-latest/llvm/tools/clang/bindings/python/clang ../clang
rm -f ../include
ln -s clang-latest/cfe-3.4.2.src/lib/Headers ../include
# don't actually build clang because it takes forever and sucks
exit 0
mkdir -p build
cd build
../llvm/configure --enable-optimized
make -j $(grep processor < /proc/cpuinfo | wc -l)

Binary file not shown.

View File

@ -1,139 +0,0 @@
body {
font-family: monospace;
}
a {
color: inherit;
}
span {
white-space: nowrap;
}
.link {
/*background-color: #F0F0F0;*/
text-decoration: underline;
/*font-style: italic;*/
}
.xref {
background-color: #EEEEEE;
padding: 5px;
}
.xrefstitle {
color: blue;
}
#ln {
display: inline-block;
float: left;
color: gray;
}
.line_highlighted {
background-color: #EE8888;
}
.highlighted {
background-color: #EEEE88;
}
#code {
overflow: hidden;
}
.dirlink {
color: blue;
}
.filelink {
color: black;
text-decoration: underline;
}
/* declaring a function in the global scope */
.FUNCTION_DECL {
color: red;
}
/* C++ method in class */
.CXX_METHOD {
color: #AAAAAA;
}
/* a variable or function reference */
.DECL_REF_EXPR {
color: #00AA00;
}
/* calling a C++ member function */
.MEMBER_REF_EXPR {
color: #AAAA55;
}
/* string in quotes */
.STRING_LITERAL {
color: blue;
}
/* integer */
.INTEGER_LITERAL {
color: purple;
}
.VAR_DECL {
color: orange;
}
.PARM_DECL {
color: orange;
}
.CLASS_DECL {
color: #00AAAA;
}
.CLASS_TEMPLATE {
color: #00AAAA;
}
.TYPE_REF {
color: #AAAA00;
}
.TEMPLATE_REF {
color: #AA4400;
}
iframe {
margin: 0;
padding: 0;
border: 0 none;
}
body {
margin: 0;
padding: 0;
}
#xrefs {
width: 100%;
height: 19%;
border-top: 1px solid black;
position: fixed;
bottom: 0;
left: 0;
background-color: #EEEEEE;
overflow-y: scroll;
}
#program {
padding: 5px;
height: 80%;
overflow-y: scroll;
}
#filename {
display: none;
}

View File

@ -1,142 +0,0 @@
// connect to the QIRA stream
stream = io.connect("http://localhost:3002/cda");
var current_line = "";
function go_to_filename_line(filename, line) {
//p('setline');
var b64xref = location.hash.split(",")[1];
if (b64xref === undefined) b64xref = "";
else b64xref = ","+b64xref;
//location.replace("/f?"+filename+"#"+line+b64xref);
session[0] = filename;
session[1] = line;
}
stream.on('setline', function(filename, line) {
current_line = filename+"#"+line;
go_to_filename_line(filename, line);
});
function p(s) {
console.log(s);
}
var highlighted = $();
// ugh hacks
var sline = undefined;
var sfile = undefined;
var sb64xref = undefined;
function refresh() {
if (location.hash == "") {
$.ajax("/list").done(function(a) {
$('#program')[0].innerHTML = a;
});
sfile = undefined;
sline = undefined;
return;
}
//p(location.hash);
var file = location.hash.substr(1).split(",")[0];
var ln = location.hash.split(",")[1];
var b64xref = location.hash.split(",")[2];
if (sfile !== file) {
$.ajax("/f?"+file).done(function(a) {
$('#program')[0].innerHTML = a;
sfile = file;
sline = undefined;
refresh();
});
}
if (sline != parseInt(ln)) {
highlighted.removeClass("line_highlighted")
highlighted = $("#l" + ln)
if (highlighted.length > 0) {
highlighted.addClass("line_highlighted");
$('#program').scrollTo(highlighted, {offset: -150})
var new_line = file+"#"+parseInt(ln);
if (current_line != new_line) {
stream.emit('navigateline', sfile, parseInt(ln))
current_line = new_line;
}
sline = parseInt(ln);
}
}
if (b64xref !== undefined && b64xref !== "" && b64xref != sb64xref) {
selected.removeClass('highlighted');
selected = $(document.getElementsByName(atob(b64xref)));
selected.addClass('highlighted');
$.ajax("/x/"+b64xref).done(function(a) {
//p(a);
$('#xrefs')[0].innerHTML = a;
sb64xref = b64xref;
});
}
}
// all of the session is stored in the hash
var session = [];
var selected = $();
// 0 = filename
// 1 = linenumber
// 2 = xref
for (var i = 0; i < 3; i++) {
session.__defineSetter__(i, function(val) {
var tmp = location.hash.substr(1).split(",");
if (this == 0 && val.indexOf("#") != -1) {
tmp[0] = val.split("#")[0];
tmp[1] = val.split("#")[1];
} else {
tmp[this] = val;
}
if (this == 2) {
location.replace("#"+tmp.join(","));
} else {
// for back and forward
location = "#"+tmp.join(",");
}
}.bind(i));
}
function link_click_handler(e) {
var usr = e.target.getAttribute('name');
session[2] = btoa(usr);
}
function link_dblclick_handler(e) {
var targets = e.target.getAttribute('targets').split(" ");
var usr = e.target.getAttribute('name');
session[0] = targets[0];
}
function go_to_line(line) {
session[1] = line;
}
window.onmousedown = function() { return false; };
// when the page loads we need to check the hash
window.onload = function() {
$('#program').on('click', '.link', link_click_handler);
$('#program').on('dblclick', '.link', link_dblclick_handler);
$(window).on('hashchange', refresh);
refresh();
};
window.onkeydown = function(e) {
if (e.keyCode == 191) { // /
re = prompt("enter regex")
$.ajax("/s/"+btoa(re)).done(function(a) {
$('#xrefs')[0].innerHTML = a;
});
}
};

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +0,0 @@
/**
* Copyright (c) 2007-2014 Ariel Flesler - aflesler<a>gmail<d>com | http://flesler.blogspot.com
* Licensed under MIT
* @author Ariel Flesler
* @version 1.4.13
*/
;(function(k){'use strict';k(['jquery'],function($){var j=$.scrollTo=function(a,b,c){return $(window).scrollTo(a,b,c)};j.defaults={axis:'xy',duration:parseFloat($.fn.jquery)>=1.3?0:1,limit:!0};j.window=function(a){return $(window)._scrollable()};$.fn._scrollable=function(){return this.map(function(){var a=this,isWin=!a.nodeName||$.inArray(a.nodeName.toLowerCase(),['iframe','#document','html','body'])!=-1;if(!isWin)return a;var b=(a.contentWindow||a).document||a.ownerDocument||a;return/webkit/i.test(navigator.userAgent)||b.compatMode=='BackCompat'?b.body:b.documentElement})};$.fn.scrollTo=function(f,g,h){if(typeof g=='object'){h=g;g=0}if(typeof h=='function')h={onAfter:h};if(f=='max')f=9e9;h=$.extend({},j.defaults,h);g=g||h.duration;h.queue=h.queue&&h.axis.length>1;if(h.queue)g/=2;h.offset=both(h.offset);h.over=both(h.over);return this._scrollable().each(function(){if(f==null)return;var d=this,$elem=$(d),targ=f,toff,attr={},win=$elem.is('html,body');switch(typeof targ){case'number':case'string':if(/^([+-]=?)?\d+(\.\d+)?(px|%)?$/.test(targ)){targ=both(targ);break}targ=win?$(targ):$(targ,this);if(!targ.length)return;case'object':if(targ.is||targ.style)toff=(targ=$(targ)).offset()}var e=$.isFunction(h.offset)&&h.offset(d,targ)||h.offset;$.each(h.axis.split(''),function(i,a){var b=a=='x'?'Left':'Top',pos=b.toLowerCase(),key='scroll'+b,old=d[key],max=j.max(d,a);if(toff){attr[key]=toff[pos]+(win?0:old-$elem.offset()[pos]);if(h.margin){attr[key]-=parseInt(targ.css('margin'+b))||0;attr[key]-=parseInt(targ.css('border'+b+'Width'))||0}attr[key]+=e[pos]||0;if(h.over[pos])attr[key]+=targ[a=='x'?'width':'height']()*h.over[pos]}else{var c=targ[pos];attr[key]=c.slice&&c.slice(-1)=='%'?parseFloat(c)/100*max:c}if(h.limit&&/^\d+$/.test(attr[key]))attr[key]=attr[key]<=0?0:Math.min(attr[key],max);if(!i&&h.queue){if(old!=attr[key])animate(h.onAfterFirst);delete attr[key]}});animate(h.onAfter);function animate(a){$elem.animate(attr,g,h.easing,a&&function(){a.call(this,targ,h)})}}).end()};j.max=function(a,b){var c=b=='x'?'Width':'Height',scroll='scroll'+c;if(!$(a).is('html,body'))return a[scroll]-$(a)[c.toLowerCase()]();var d='client'+c,html=a.ownerDocument.documentElement,body=a.ownerDocument.body;return Math.max(html[scroll],body[scroll])-Math.min(html[d],body[d])};function both(a){return $.isFunction(a)||typeof a=='object'?a:{top:a,left:a}}return j})}(typeof define==='function'&&define.amd?define:function(a,b){if(typeof module!=='undefined'&&module.exports){module.exports=b(require('jquery'))}else{b(jQuery)}}));

File diff suppressed because one or more lines are too long

View File

@ -1,16 +0,0 @@
#!/bin/bash -e
echo "installing cda packages"
sudo apt-get install libclang-3.4-dev
echo "installing codesearch"
pushd .
cd cda
wget -O /tmp/cs.zip https://codesearch.googlecode.com/files/codesearch-0.01-linux-amd64.zip
unzip -o /tmp/cs.zip
rm /tmp/cs.zip
ln -sf codesearch-0.01 codesearch-latest
popd
echo "making symlink"
sudo ln -sf $(pwd)/qira /usr/local/bin/cda