Skip to content

Commit

Permalink
first put
Browse files Browse the repository at this point in the history
  • Loading branch information
gitpangbo committed Sep 2, 2016
0 parents commit a62f683
Show file tree
Hide file tree
Showing 112 changed files with 2,755 additions and 0 deletions.
66 changes: 66 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>MyProject</groupId>
<artifactId>MyProject</artifactId>
<version>0.0.1-SNAPSHOT</version>

<dependencies>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.7</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>

<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.8.2</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.2.0</version>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.2.0</version>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.2.0</version>
</dependency>

</dependencies>

<build>
<sourceDirectory>src</sourceDirectory>
<resources>
<resource>
<directory>src</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
</plugins>
</build>
</project>
30 changes: 30 additions & 0 deletions src/main/java/org/kirito/TestMain.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.kirito;
/**
* @author pangbo
* @version 创建时间:2016年5月5日 下午5:30:37
* 类说明
*/
public class TestMain {

public static void main(String[] args) {
//String s = null;
String a = "kirito";
String b = "kirito";
String c = new String("kirito");

System.out.println("a equals b: " + a.equals(b));
System.out.println("a equals c: " + a.equals(c));
System.out.println("b equals c: " + b.equals(c));

if(a == b){
System.out.println("a == b: true");
}
if(a == c){
System.out.println("a == c: true");
}

/*System.out.println("a == b: " + a==b?"true":"false");
System.out.println("a == c: " + a==c?"true":"false");
System.out.println("b == c: " + b==c?"true":"false");*/
}
}
43 changes: 43 additions & 0 deletions src/main/java/org/kirito/algorithms/Quick.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
package org.kirito.algorithms;

class Quick {

public static void main(String[] args) {
int[] array = { 2, 4, 9, 3, 6, 7, 1, 5 };
new Quick().sort(array, 0, array.length - 1);
}

public void sort(int arr[], int low, int high) {
int left = low;
int right = high;
int povit = arr[low];

while (left < right) {
while (left < right && arr[right] >= povit)
right--;
if (left < right) {
int temp = arr[right];
arr[right] = arr[left];
arr[left] = temp;
left++;
}

while (left < right && arr[left] <= povit)
left++;

if (left < right) {
int temp = arr[right];
arr[right] = arr[left];
arr[left] = temp;
right--;
}
}
System.out.println(arr);
System.out.print("l=" + (left + 1) + "h=" + (right + 1) + "povit="
+ povit + "\n");
if (left > low)
sort(arr, low, left - 1);
if (right < high)
sort(arr, left + 1, high);
}
}
29 changes: 29 additions & 0 deletions src/main/java/org/kirito/hdp/mr/WCMapper.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package org.kirito.hdp.mr;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class WCMapper extends Mapper<LongWritable, Text, Text, LongWritable>{

@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
//accept
String line = value.toString();
//split
String[] words = line.split("\t");
//loop
for(String w : words){
//send
context.write(new Text(w), new LongWritable(1));
}
}





}
25 changes: 25 additions & 0 deletions src/main/java/org/kirito/hdp/mr/WCReducer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package org.kirito.hdp.mr;

import java.io.IOException;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class WCReducer extends Reducer<Text, LongWritable, Text, LongWritable>{

@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context)
throws IOException, InterruptedException {
//define a counter
long counter = 0;
//loop
for(LongWritable l : values){
counter += l.get();
}
//write
context.write(key, new LongWritable(counter));
}


}
37 changes: 37 additions & 0 deletions src/main/java/org/kirito/hdp/mr/WordCount.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package org.kirito.hdp.mr;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WordCount {

public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.setInt("mapreduce.client.submit.file.replication", 20);
Job job = Job.getInstance(conf);

//notice
job.setJarByClass(WordCount.class);

//set mapper`s property
job.setMapperClass(WCMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));

//set reducer`s property
job.setReducerClass(WCReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
FileOutputFormat.setOutputPath(job, new Path(args[1]));

//submit
job.waitForCompletion(true);
}

}
89 changes: 89 additions & 0 deletions src/main/java/org/kirito/hdp/mr/dc/DataBean.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
package org.kirito.hdp.mr.dc;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.Writable;

public class DataBean implements Writable{

private String tel;

private long upPayLoad;

private long downPayLoad;

private long totalPayLoad;


public DataBean(){}

public DataBean(String tel, long upPayLoad, long downPayLoad) {
super();
this.tel = tel;
this.upPayLoad = upPayLoad;
this.downPayLoad = downPayLoad;
this.totalPayLoad = upPayLoad + downPayLoad;
}



@Override
public String toString() {
return this.upPayLoad + "\t" + this.downPayLoad + "\t" + this.totalPayLoad;
}

// notice : 1 type 2 order
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(tel);
out.writeLong(upPayLoad);
out.writeLong(downPayLoad);
out.writeLong(totalPayLoad);
}

@Override
public void readFields(DataInput in) throws IOException {
this.tel = in.readUTF();
this.upPayLoad = in.readLong();
this.downPayLoad = in.readLong();
this.totalPayLoad = in.readLong();

}

public String getTel() {
return tel;
}

public void setTel(String tel) {
this.tel = tel;
}

public long getUpPayLoad() {
return upPayLoad;
}

public void setUpPayLoad(long upPayLoad) {
this.upPayLoad = upPayLoad;
}

public long getDownPayLoad() {
return downPayLoad;
}

public void setDownPayLoad(long downPayLoad) {
this.downPayLoad = downPayLoad;
}

public long getTotalPayLoad() {
return totalPayLoad;
}

public void setTotalPayLoad(long totalPayLoad) {
this.totalPayLoad = totalPayLoad;
}



}
Loading

0 comments on commit a62f683

Please sign in to comment.