Skip to content
This repository has been archived by the owner on Jul 19, 2022. It is now read-only.

Phd spring cloud #1

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .classpath
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="**/*.java" including="**/*.java" kind="src" output="target/test-classes" path="src/test/resources"/>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
Expand Down
2 changes: 1 addition & 1 deletion .project
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>PCF-demo</name>
<name>PCF-demo_PHD</name>
<comment>NO_M2ECLIPSE_SUPPORT: Project files created with the maven-eclipse-plugin are not supported in M2Eclipse.</comment>
<projects>
</projects>
Expand Down
4 changes: 4 additions & 0 deletions .settings/org.eclipse.m2e.core.prefs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1
2 changes: 1 addition & 1 deletion .settings/org.eclipse.wst.common.component
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
<wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/resources"/>
<property name="context-root" value="xd-demo-client"/>
<property name="java-output-path" value="/xd-demo-client/target/classes"/>
<property name="context-root" value="pcfdemo"/>
</wb-module>
</project-modules>
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@ Notice it will alert no RabbitMQ service is bound.. the link "Stream Data" will

Now, bind a RabbitMQ service. Re-push the app.
Click "Stream Data" and see the fun start. Click on a state to detail orders going throw it.

Additional fun: click "Kill App" and watch the application crashing.. it will show as "crashed" when you visualize events (cf events <app_name>). Health manager will automatically restart the app for you. => makes a good demo, too.
8 changes: 5 additions & 3 deletions manifest.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
---
applications:
- name: pcfdemo
memory: 512M
memory: 1G
instances: 1
host: pcfdemo
host: pcfdemophd
path: ./target/pcfdemo.war
services:
- rabbit
- phd-service
- rabbit-service

71 changes: 64 additions & 7 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
<name>Springframework Maven Repository</name>
<url>http://repo.springsource.org/milestone</url>
</repository>

</repositories>

<modelVersion>4.0.0</modelVersion>
Expand All @@ -25,7 +26,15 @@
<dependencies>



<dependency>
<groupId>GemFireXD-client</groupId>
<artifactId>com.pivotal</artifactId>
<version>1.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/src/main/webapp/WEB-INF/lib/gemfirexd-1.0.6.jar</systemPath>
</dependency>


<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit</artifactId>
Expand Down Expand Up @@ -56,18 +65,61 @@
<version>${org.springframework-version}</version>
</dependency>

<!-- jdbc -->
<dependency>
<groupId>commons-dbcp</groupId>
<artifactId>commons-dbcp</artifactId>
<version>1.4</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${org.springframework-version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.3-1100-jdbc3</version>
</dependency>

<!-- cloud -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-service-connector</artifactId>
<version>0.9.2</version>
<artifactId>spring-cloud-spring-service-connector</artifactId>
<version>1.1.1.BUILD-SNAPSHOT</version>
</dependency>
<!-- If you intend to deploy the app on Cloud Foundry, add the following -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>cloudfoundry-connector</artifactId>
<version>0.9.2</version>
</dependency>
<artifactId>spring-cloud-cloudfoundry-connector</artifactId>
<version>1.1.1.BUILD-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-pcf-connector</artifactId>
<version>1.1.1.BUILD-SNAPSHOT</version>
</dependency>

<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
<version>2.2.2</version>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.2.0</version>
<exclusions>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-compiler</artifactId>
</exclusion>
<exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-runtime</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--
<dependency>
<groupId>org.springframework</groupId>
Expand Down Expand Up @@ -163,6 +215,11 @@
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>1.9.13</version>
</dependency>

<dependency>
<groupId>org.springframework.data</groupId>
Expand Down
127 changes: 116 additions & 11 deletions src/main/java/com/pivotal/example/xd/BootstrapDataPopulator.java
Original file line number Diff line number Diff line change
@@ -1,25 +1,130 @@
package com.pivotal.example.xd;

import java.util.logging.Logger;
import java.sql.Connection;
import java.sql.ResultSet;

import javax.sql.DataSource;

import org.apache.log4j.Logger;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

@Service
@Service
public class BootstrapDataPopulator implements InitializingBean {

private final Logger LOG = Logger.getLogger(BootstrapDataPopulator.class.getName());


@Autowired @Qualifier("hawqDataSource") DataSource hawqDataSource;
@Autowired @Qualifier("gemfirexdDataSource") DataSource gemfirexdDataSource;
@Autowired org.apache.hadoop.conf.Configuration hadoopConfiguration;

static Logger logger = Logger.getLogger(BootstrapDataPopulator.class);

String gemXDURI = null;
String gemXDUser = null;
String gemXDPass = null;
String nameNode= null;
String dir = null;

//@Autowired
//private ApplicationContext applicationContext;

public static final String CREATE_DISK_STORE_DDL="" +
" CREATE HDFSSTORE streamingstore " +
" NameNode '_NAMENODE_' " +
" HomeDir '/user/gfxd/' " +
" BatchSize 10 "+
" QueuePersistent true "+
" MaxWriteOnlyFileSize 200;";

public static final String CREATE_TABLE_DDL="" +
" CREATE TABLE ORDERS " +
" (ORDER_ID INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY, " +
" STATE VARCHAR(2) NOT NULL, " +
" VALUE INT NOT NULL ) " +
" PARTITION BY PRIMARY KEY " +
//" EXPIRE ENTRY WITH TIMETOLIVE 300 ACTION DESTROY "+
" HDFSSTORE (streamingstore) WRITEONLY;" ;

public static final String INSERT_ORDER="" +
" INSERT INTO ORDERS (STATE, VALUE) VALUES (?,?);" ;

public static final String SELECT_ORDER="" +
" select STATE,sum(VALUE) AS \"SUM\" from orders group by STATE" ;

public static final String CREATE_HAWQ_TABLE_DDL="" +
" CREATE TABLE customers " +
"( " +
"customer_id TEXT," +
"first_name TEXT," +
"last_name TEXT," +
"gender TEXT" +
") " +
"WITH (appendonly=true, compresstype=quicklz) DISTRIBUTED RANDOMLY;" ;

public static final String INSERT_CUSTOMER="" +
" INSERT INTO CUSTOMERS (customer_id, first_name, last_name, gender) VALUES (?,?,?,?);" ;

public static final String SELECT_CUSTOMER_LNAME="" +
"SELECT * FROM CUSTOMERS WHERE LAST_NAME=(?);";

public static final String SELECT_CUSTOMERS="" +
"SELECT * FROM CUSTOMERS;";

@Transactional
@Override
@Transactional()
public void afterPropertiesSet() throws Exception {
LOG.info("Bootstrapping data...");

// Create DB table

LOG.info("...Bootstrapping completed");
logger.warn("Bootstrapping data...");

Connection conn = gemfirexdDataSource.getConnection();
nameNode = hadoopConfiguration.get("fs.defaultFS").toString();
// Create HDFS Disk Store if not existing.
try{
String ddl = CREATE_DISK_STORE_DDL.replaceAll("_NAMENODE_", nameNode);
logger.warn("EXECUTING DDL: "+ddl);
conn.createStatement().executeUpdate(ddl);
logger.warn("CREATED DISK STORE");
}
catch(Exception e){
logger.fatal("Exception trying to create hdfs disk store. Maybe it already exists?",e);
}

// check if table already exists
java.sql.DatabaseMetaData metadata = conn.getMetaData();

ResultSet rs = metadata.getTables(null, null, "ORDERS", null);
if (rs.next()){
logger.warn("ORDERS TABLE ALREADY EXISTS.. SKIPPING CREATION. ");
}
else{
try{
String ddl = CREATE_TABLE_DDL;
logger.warn("EXECUTING DDL: "+ddl);
conn.createStatement().executeUpdate(ddl);
logger.warn("CREATED TABLE");
conn.commit();
}
catch(Exception e){
logger.fatal("Exception trying to create table", e);
//e.printStackTrace();
}
}
conn.close();
//Creating a HAWQ Table
conn = hawqDataSource.getConnection();
try {
String ddl = CREATE_HAWQ_TABLE_DDL;
logger.warn("EXECUTING DDL: "+ddl);
conn.createStatement().executeUpdate(ddl);
logger.warn("CREATED TABLE");


} catch (Exception e) {
logger.error("Error creating HAWQ table", e);
}

logger.warn("...Bootstrapping completed");
}


Expand Down
33 changes: 33 additions & 0 deletions src/main/java/com/pivotal/example/xd/CloudConfig.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package com.pivotal.example.xd;

import javax.sql.DataSource;

import org.apache.hadoop.conf.Configuration;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.cloud.config.java.AbstractCloudConfig;
import org.springframework.context.annotation.Bean;

@org.springframework.context.annotation.Configuration
public class CloudConfig extends AbstractCloudConfig {

@Bean
public ConnectionFactory rabbitConnectionFactory() {
return connectionFactory().rabbitConnectionFactory();
}

@Bean
public DataSource hawqDataSource() {
return connectionFactory().dataSource("phd-service/hawq");
}

@Bean
public DataSource gemfirexdDataSource() {
return connectionFactory().dataSource("phd-service/gemfirexd");
}

@Bean
public Configuration hadoopConfiguration() {
return connectionFactory().service(Configuration.class);
}

}
Loading