Skip to content

Commit

Permalink
add h2 database support
Browse files Browse the repository at this point in the history
  • Loading branch information
wh1t3p1g committed Jan 10, 2021
1 parent 7300e84 commit 9a7e7f9
Show file tree
Hide file tree
Showing 53 changed files with 712 additions and 703 deletions.
13 changes: 8 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,10 @@ https://repo.spring.io/release/org/springframework/spring/4.1.4.RELEASE/
neo4j 第一次运行新库时 耗时长(第二次之后有缓存,会快一点,但时间长了会有很大的硬盘占用,可以删除库再新建)
```
初始化 节点限制 可以极大的加快载入速度
CREATE CONSTRAINT ON (c:Class) ASSERT c.uuid IS UNIQUE;
CREATE CONSTRAINT ON (m:Method) ASSERT m.uuid IS UNIQUE;
CREATE INDEX ON :Class(name);
CREATE INDEX ON :Method(signature,subSignature);
CREATE CONSTRAINT ON (c:Class) ASSERT c.ID IS UNIQUE;
CREATE CONSTRAINT ON (m:Method) ASSERT m.ID IS UNIQUE;
CREATE INDEX FOR (n:Class) ON (n.NAME);
CREATE INDEX FOR (n1:Method) ON (n1.SIGNATURE,n1.SUB_SIGNATURE);
:schema 查看表库
:sysinfo 查看数据库信息
```
Expand Down Expand Up @@ -96,4 +96,7 @@ https://github.com/Fraunhofer-AISEC/codyze 是否存在参考价值
1. 检索出所有调用sink函数的函数
2. 通过保存的分析状态,判断这里调用sink函数的参数是否是可控的
3. 如果是可控的,找到可控对应的位置/来源,继续往上层找
4. 直到最后到达source点
4. 直到最后到达source点


TODO : BeanComparator compare 的 alias边没有做好
42 changes: 42 additions & 0 deletions doc/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
把插件 neo配置好后,用他自带的浏览器执行
```
CREATE CONSTRAINT ON (c:Class) ASSERT c.ID IS UNIQUE;
CREATE CONSTRAINT ON (m:Method) ASSERT m.ID IS UNIQUE;
CREATE INDEX FOR (n:Class) ON (n.NAME);
CREATE INDEX FOR (n1:Method) ON (n1.SIGNATURE,n1.SUB_SIGNATURE);
```
接下来依次导入csv文件,替换$path
导入Methods
```
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true, mapping:{ IS_STATIC: {type:'boolean'}, HAS_PARAMETERS:{type:'boolean'}, IS_SINK: { type: 'boolean'}, IS_SOURCE: { type: 'boolean'}, IS_POLLUTED: { type: 'boolean'}, IS_SERIALIZABLE: {type:'boolean'} }}) YIELD map AS row RETURN row", "MERGE(m:Method {ID:row.ID} ) ON CREATE SET m = row", {batchSize:5000, iterateList:true, parallel:true})
```
导入Classes
```
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true, mapping:{ HAS_SUPER_CLASS: {type:'boolean'}, HAS_INTERFACES: {type:'boolean'}, IS_INTERFACE: {type:'boolean'}, SERIALIZABLE:{type:'boolean'}}}) YIELD map AS row RETURN row","MERGE (c:Class {NAME:row.NAME}) ON CREATE SET c = row", {batchSize:5000, iterateList:true, parallel:true})
```
导入边信息
```
// extends
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true}) YIELD map AS row RETURN row","MATCH( c1:Class {ID:row.SOURCE} ) MATCH ( c2:Class { ID:row.TARGET } ) MERGE (c1) -[e:EXTENDS { ID:row.ID }] -> (c2)", {batchSize:1000, iterateList:true, parallel:false})
// interface
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true}) YIELD map AS row RETURN row","MATCH( c1:Class {ID:row.SOURCE} ) MATCH ( c2:Class { ID:row.TARGET } ) MERGE (c1) -[e:INTERFACE { ID:row.ID }] -> (c2)", {batchSize:1000, iterateList:true, parallel:false})
// has
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true}) YIELD map AS row RETURN row","MATCH(c:Class{ID:row.CLASS_REF}) MATCH(m:Method { ID:row.METHOD_REF }) MERGE (c) -[e:HAS { ID:row.ID }]-> (m)", {batchSize:1000, iterateList:true, parallel:false})
// call
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true, mapping:{ IS_POLLUTED: {type: 'boolean'} }}) YIELD map AS row RETURN row","MATCH ( m1:Method {ID:row.SOURCE} ) MATCH ( m2:Method {ID:row.TARGET }) MERGE (m1)-[e:CALL {ID:row.ID, LINE_NUM:row.LINE_NUM, IS_POLLUTED:row.IS_POLLUTED, POLLUTED_POSITION:row.POLLUTED_POSITION, REAL_CALL_TYPE:row.REAL_CALL_TYPE, INVOKER_TYPE: row.INVOKER_TYPE }]->(m2)", {batchSize:5000, iterateList:true, parallel:false})
// alias
CALL apoc.periodic.iterate("CALL apoc.load.csv('file://$path', {header:true}) YIELD map AS row RETURN row","MATCH ( m1:Method {ID:row.SOURCE} ) MATCH ( m2:Method {ID:row.TARGET }) MERGE (m1)-[e:ALIAS {ID:row.ID}]-(m2)", {batchSize:1000, iterateList:true, parallel:false})
```


怎么找利用链
```
match (from:Method {IS_SINK:true})
match (to:Method {NAME:"readObject"})
call apoc.algo.allSimplePaths(from, to, "<CALL|ALIAS", 12) yield path
return * limit 20
```
如果人工判断call边有问题,删除固定边
```
match ()-[c:CALL]->() where c.ID in ["cebe8399-45a0-43ee-b877-0ba04b8d1537"] delete c
```
32 changes: 16 additions & 16 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ services:
# ports:
# - 7474:7474
# - 7687:7687
mongo:
image: mongo
ports:
- 27017:27017
environment:
- MONGO_INITDB_ROOT_USERNAME=root
- MONGO_INITDB_ROOT_PASSWORD=password
mongo-express:
image: mongo-express
ports:
- 8081:8081
depends_on:
- mongo
environment:
- ME_CONFIG_MONGODB_ADMINUSERNAME=root
- ME_CONFIG_MONGODB_ADMINPASSWORD=password
# mongo:
# image: mongo
# ports:
# - 27017:27017
# environment:
# - MONGO_INITDB_ROOT_USERNAME=root
# - MONGO_INITDB_ROOT_PASSWORD=password
# mongo-express:
# image: mongo-express
# ports:
# - 8081:8081
# depends_on:
# - mongo
# environment:
# - ME_CONFIG_MONGODB_ADMINUSERNAME=root
# - ME_CONFIG_MONGODB_ADMINPASSWORD=password
14 changes: 12 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,24 @@
<artifactId>spring-boot-starter-data-neo4j</artifactId>
</dependency>

<!-- <dependency>-->
<!-- <groupId>org.springframework.boot</groupId>-->
<!-- <artifactId>spring-boot-starter-data-mongodb</artifactId>-->
<!-- </dependency>-->

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>
<artifactId>spring-boot-starter-cache</artifactId>
</dependency>

<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>

<dependency>
Expand Down
6 changes: 1 addition & 5 deletions rules/knowledges.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
{"function": "newBufferedWriter", "type": "sink", "actions": {}, "polluted": [0]}
]},
{"name":"java.lang.Class", "rules": [
{"function": "forName", "type": "sink", "actions": {"return": "param-0"}, "polluted": [0]},
{"function": "forName", "type": "sink", "actions": {"return": "param-0"}, "polluted": [0,2]},
{"function": "getMethod", "type": "sink", "actions": {"return": "this"}, "polluted": [0,1]},
{"function": "getDeclaredMethod", "type": "sink", "actions": {"return": "this"}, "polluted": [-1,0]},
{"function": "getConstructor", "type": "know", "actions": {"return": "this"}, "polluted": []}
Expand Down Expand Up @@ -63,10 +63,6 @@
{"name":"java.io.Serializable", "rules": [
{"function": "readObject", "type": "source", "actions": {}, "polluted": []}
]},
{"name":"any.object", "rules": [
{"function": "set.*", "type": "source", "actions": {}, "polluted": []},
{"function": "get.*", "type": "source", "actions": {}, "polluted": []}
]},


{"name":"javax.swing.plaf.nimbus.NimbusDefaults", "rules": [
Expand Down
44 changes: 30 additions & 14 deletions src/main/java/tabby/TabbyApplication.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.domain.EntityScan;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.data.neo4j.repository.config.EnableNeo4jRepositories;
import org.springframework.retry.annotation.EnableRetry;
Expand All @@ -19,12 +18,14 @@

import javax.annotation.Resource;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executor;

@Slf4j
@SpringBootApplication
@EnableAsync
@EnableCaching
@EnableRetry
@EntityScan("tabby.db.bean")
@EnableNeo4jRepositories("tabby.db.repository.neo4j")
Expand All @@ -37,6 +38,10 @@ public class TabbyApplication {

private boolean isJDKOnly = false;

private boolean isSaveOnly = false;

private boolean isJDKProcess = false;

@Resource
private ApplicationArguments arguments;

Expand All @@ -48,24 +53,34 @@ public static void main(String[] args) {
CommandLineRunner run(){
return args -> {
try{
if(arguments.containsOption("isJDKOnly")){
isJDKOnly = true;
}
if(!isJDKOnly && arguments.getNonOptionArgs().size() != 1){
throw new IllegalArgumentException("target not set!");
Map<String, String> jdkDependencies = analyser.getJdkDependencies();
Map<String, String> classpaths = new HashMap<>(jdkDependencies);
Map<String, String> targets = new HashMap<>();
if(arguments.containsOption("isJDKProcess")){
isJDKProcess = true;
}
SootConfiguration.initSootOption();
if(isJDKOnly){
analyser.runSootAnalysis(null, isJDKOnly);
}else{
if(arguments.containsOption("isSaveOnly")){
analyser.save();
}else if(arguments.containsOption("isJDKOnly")){
targets.putAll(jdkDependencies);
}else if(arguments.getNonOptionArgs().size() != 1){
target = arguments.getNonOptionArgs().get(0);
String path = String.join(File.separator, System.getProperty("user.dir"), target);
if(FileUtils.fileExists(path)){
analyser.runSootAnalysis(path, isJDKOnly);
}else{
if(!FileUtils.fileExists(path)){
throw new IllegalArgumentException("target not exists!");
}
Map<String, String> files = FileUtils.getTargetDirectoryJarFiles(path);
classpaths.putAll(files);
targets.putAll(files);
if(isJDKProcess){
targets.putAll(jdkDependencies);
}
}else{
throw new IllegalArgumentException("target not set!");
}

SootConfiguration.initSootOption();
analyser.runSootAnalysis(targets, new ArrayList<>(classpaths.values()) );
}catch (IllegalArgumentException e){
log.error(e.getMessage() +
"\nPlease use java -jar tabby target_directory [--isJDKOnly] !" +
Expand All @@ -86,4 +101,5 @@ public Executor taskExecutor() {
executor.initialize();
return executor;
}

}
16 changes: 9 additions & 7 deletions src/main/java/tabby/config/GlobalConfiguration.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,19 @@
public class GlobalConfiguration {

public static String RULES_PATH = String.join(File.separator, System.getProperty("user.dir"), "rules");
public static String DATABASE_PATH = String.join(File.separator, System.getProperty("user.dir"), "graphdb.mv.db");
public static String DATABASE_TRACE_PATH = String.join(File.separator, System.getProperty("user.dir"), "graphdb.trace.db");
public static String KNOWLEDGE_PATH = String.join(File.separator, RULES_PATH, "knowledges.json");
public static String IGNORE_PATH = String.join(File.separator, RULES_PATH, "ignore.json");
public static String CACHE_PATH = String.join(File.separator, System.getProperty("user.dir"), "docker", "cache");
public static String RUNTIME_CACHE_PATH = String.join(File.separator, CACHE_PATH, "runtime.json");
public static String CLASSES_CACHE_PATH = String.join(File.separator,CACHE_PATH, "classes.csv");
public static String METHODS_CACHE_PATH = String.join(File.separator,CACHE_PATH, "methods.csv");
public static String CALL_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "calls.csv");
public static String ALIAS_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "aliases.csv");
public static String EXTEND_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "extends.csv");
public static String HAS_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "has.csv");
public static String INTERFACE_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "interfaces.csv");
public static String CLASSES_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_CLASSES.csv");
public static String METHODS_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_METHODS.csv");
public static String CALL_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_CALL.csv");
public static String ALIAS_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_ALIAS.csv");
public static String EXTEND_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_EXTEND.csv");
public static String HAS_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_HAS.csv");
public static String INTERFACE_RELATIONSHIP_CACHE_PATH = String.join(File.separator,CACHE_PATH, "GRAPHDB_PUBLIC_INTERFACES.csv");
public static List<String[]> CSV_HEADERS = new ArrayList<>(Arrays.asList(
new String[]{"uuid", "name", "superClass", "interfaces", "isInterface", "hasSuperClass", "hasInterfaces", "fields"},// class
new String[]{"uuid", "name", "signature", "subSignature", "modifiers", "isStatic", "hasParameters", "isSink", "isSource", "isPolluted", "parameters", "actions", "pollutedPosition", "returnType"},// method
Expand Down
4 changes: 0 additions & 4 deletions src/main/java/tabby/config/SootConfiguration.java
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,7 @@ public static void initSootOption(){
Options.v().set_no_writeout_body_releasing(true);

// 设置自定义的package
// PhaseOptions.v().setPhaseOption("bb", "off");
PhaseOptions.v().setPhaseOption("cg","on");
// PhaseOptions.v().setPhaseOption("cg.spark","on");
// PhaseOptions.v().setPhaseOption("jj", "on");
// PhaseOptions.v().setPhaseOption("cg.spark", "cs-demand:true");
// enableSpark();

// PhaseOptions.v().setPhaseOption("jtp.callGraphTransformer", "off");
Expand Down
32 changes: 15 additions & 17 deletions src/main/java/tabby/core/Analyser.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import tabby.core.scanner.CallGraphScanner;
import tabby.core.scanner.ClassInfoScanner;
import tabby.util.ClassLoaderUtils;
import tabby.util.FileUtils;

import java.io.File;
import java.io.IOException;
Expand All @@ -41,33 +40,29 @@ public class Analyser {
@Autowired
private RulesContainer rulesContainer;

public void runSootAnalysis(String path, boolean isOnlyJDK){
public void runSootAnalysis(Map<String, String> targets, List<String> classpaths){
try{
Map<String, String> classpaths = new HashMap<>(getJdkDependencies());
List<String> targets = new ArrayList<>();
if(!isOnlyJDK){
classpaths.putAll(FileUtils.getTargetDirectoryJarFiles(path));
}
Scene.v().setSootClassPath(String.join(File.pathSeparator, classpaths.values()));

classpaths.forEach((filename, filepath) -> {
Scene.v().setSootClassPath(String.join(File.pathSeparator, classpaths));
List<String> stuff = new ArrayList<>();
targets.forEach((filename, filepath) -> {
if(!rulesContainer.isIgnore(filename)){
targets.add(filepath);
stuff.add(filepath);
}
});

Options.v().set_process_dir(targets);
rulesContainer.getIgnored().addAll(stuff);
Options.v().set_process_dir(stuff);
Main.v().autoSetOptions();
Scene.v().loadNecessaryClasses();
List<String> runtimeClasses = ClassLoaderUtils.getAllClasses(targets);
List<String> runtimeClasses = ClassLoaderUtils.getAllClasses(stuff);

// 类信息抽取
classInfoScanner.run(runtimeClasses);
// 函数调用分析
log.info("Run soot packs!");
PackManager.v().runPacks();
callGraphScanner.run(dataContainer.getSavedMethodRefs().values());
clean(); // clean caches
// clean(); // clean caches
rulesContainer.saveStatus();

// if (!Options.v().oaat()) {
// PackManager.v().writeOutput();
Expand All @@ -78,11 +73,14 @@ public void runSootAnalysis(String path, boolean isOnlyJDK){
} else {
return;
}
}catch (IOException e){

}
}

public void save(){
dataContainer.save2CSV();
// dataContainer.save2Neo4j();
}

public Map<String, String> getJdkDependencies(){
String javaHome = System.getProperty("java.home");
String[] jre = new String[]{"lib/resources.jar","lib/rt.jar","lib/jsse.jar","lib/jce.jar","lib/charsets.jar","lib/ext/cldrdata.jar","lib/ext/dnsns.jar","lib/ext/jaccess.jar","lib/ext/localedata.jar","lib/ext/nashorn.jar","lib/ext/sunec.jar","lib/ext/sunjce_provider.jar","lib/ext/sunpkcs11.jar","lib/ext/zipfs.jar","lib/management-agent.jar"};
Expand Down
Loading

0 comments on commit 9a7e7f9

Please sign in to comment.