Follow

List of steps to install & configure Endpoint 3.0

INITIAL CONFIGURATION ON BOTH SERVERS

sudo yum install wget unzip openssh-server redhat-lsb-core

useradd -m -d /home/interset -s /bin/bash -c "Interset User" -U interset
echo "# User rules for Interset" >> /etc/sudoers.d/90-cloud-init-users
echo "%interset ALL=(ALL) ALL" >> /etc/sudoers.d/90-cloud-init-users
echo "interset ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers.d/90-cloud-init-
passwd interset

sudo mkdir /opt/interset
sudo chown interset:interset /opt/interset

CHANGE TO INTERSET USER

ssh-keygen

cd ~/.ssh
cat id_rsa.pub >> authorized_keys
chmod 700 ~/.ssh
chmod 600 ~/.ssh/authorized_keys

ssh-copy-id interset@<reporting server>
scp ~/.ssh/id_rsa interset@<reporting server>:~/.ssh
scp ~/.ssh/id_rsa.pub interset@<reporting server>:~/.ssh

SWITCH TO ROOT USER

cd /opt
wget --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u45-b14/jdk-8u45-linux-x64.tar.gz
tar xvf jdk-8u45-linux-x64.tar.gz
ln -s jdk1.8.0_45 java
ln -s jdk1.8.0_45 /usr/bin/java
update-alternatives —install /usr/bin/java java /opt/jdk1.8.0_45/bin/java 1
alternatives --config java

su - interset
echo " " >> ~/.bashrc
echo " export JAVA_HOME=/opt/java " >> ~/.bashrc echo "
export PATH=$PATH:/opt/java/bin " >> ~/.bashrc
source ~/.bashrc
exit

SWITCH TO ROOT USER

cd /tmp
wget http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
rpm -Uvh epel-release-6-8.noarch.rpm

vi /etc/yum.repos.d/nginx.repo

[nginx]
name=nginx repo
baseurl=http://nginx.org/packages/rhel/6/x86_64/
gpgcheck=0
enabled=1

vi /etc/yum.repos.d/datastax.repo

[datastax]
name=DataStax Repo for Apache Cassandra
baseurl=http://rpm.datastax.com/community
gpgcheck=0
enabled=1

cd /tmp
wget https://s3-us-west-1.amazonaws.com/theta-deployment/infrastructure/jsvc.gz
gzip -d jsvc.gz
mv jsvc /usr/bin
chmod a+x /usr/bin/jsvc

sudo su -
echo 0 > /selinux/enforce

vi /etc/selinux/config

SELINUX=disabled

visudo

#Defaults requiretty

vi /etc/security/limits.conf

* hard nofile 500000
* soft nofile 500000

vi /etc/security/limits.d/90-nproc.conf
* soft nproc 32768
* hard memlock unlimited
* soft memlock unlimited

REBOOT BOTH SERVERS

 

 

INTERSET USER ON ANALYTICS SERVER

cd /opt/interset
wget --trust-server-names http://interset.s3-website-us-east-1.amazonaws.com/interset-endpoint-analytics
tar xvfz interset-endpoint-3.1.1.216-deploy.tar.gz
ln -s interset-endpoint-3.1.1.216 interset-endpoint

cd /opt/interset
wget http://mirror.csclub.uwaterloo.ca/apache/zookeeper/zookeeper-3.4.6/zookeeper-3.4.6.tar.gz
tar xvf zookeeper-3.4.6.tar.gz
sudo ln -s zookeeper-3.4.6 zookeeper
mkdir -p /data/interset/zookeeper

cp /opt/interset/zookeeper/conf/zoo_sample.cfg /opt/interset/zookeeper/conf/zoo.cfg
vi /opt/interset/zookeeper/conf/zoo.cfg

tickTime=2000
dataDir=/data/interset/zookeeper
clientPort=2181

sudo ln -s /opt/interset/interset-endpoint/bin/zookeeper /etc/init.d/
sudo chkconfig --add zookeeper
sudo service zookeeper start

cd /opt/interset
wget http://mirror.csclub.uwaterloo.ca/apache/kafka/0.8.2.0/kafka_2.10-0.8.2.0.tgz
tar xvf kafka_2.10-0.8.2.0.tgz
sudo ln -s kafka_2.10-0.8.2.0 kafka
mkdir -p /data/interset/kafka-logs

cd /opt/interset/kafka/config
cp server.properties server.properties.interset
vi server.properties.interset

log.dirs=/data/interset/kafka-logs
log.retention.hours=720
zookeeper.connect=<hostname>:2181

sudo ln -s /opt/interset/interset-endpoint/bin/kafka-server /etc/init.d/
sudo chkconfig --add kafka-server
sudo service kafka-server start

sudo yum install cassandra21 cassandra21-tools

sudo vi /etc/cassandra/default.conf/cassandra.yaml

cluster_name=interset

sudo vi /etc/cassandra/default.conf/cassandra-env.sh

MAX_HEAP_SIZE=16G
HEAP_NEWSIZE=800M

sudo chkconfig --add cassandra
sudo service cassandra start
cassandra-cli

create keyspace Flow2;

nodetool -h localhost status

cd /opt/interset
tar xvf /opt/interset/interset-endpoint/install/flow_4.5.0.4583.tgz
ln -s flow_4.5.0.4583 flow
sudo ln -s /opt/interset/flow/bin/flow /etc/init.d/
sudo chkconfig --add flow
sudo service flow start

cd /opt/interset
tar xvfz /opt/interset/interset-endpoint/install/eventrules_4.5.0.442.tgz
ln -s eventrules_4.5.0.442 eventrules
cp /opt/interset/eventrules/flow/gridiron/flowd/plugins/rulesengineservice.py /opt/interset/flow/gridiron/flowd/plugins
cp -r /opt/interset/eventrules/flow/static_content/rules /opt/interset/flow/static_content
sudo service flow restart
sudo ln -s /opt/interset/eventrules/bin/rules /etc/init.d/
sudo chkconfig --add rules
sudo service rules start

sudo yum install nginx
sudo cp /opt/interset/flow/externalresources/nginx_flow.conf /etc/nginx/conf.d/flow.conf
sudo rm /etc/nginx/conf.d/default.conf
sudo rm /etc/nginx/conf.d/example_ssl.conf

sudo su -
cd /etc/nginx
openssl req -new -x509 -nodes -out filetrek.crt -keyout filetrek.key
chmod 600 filetrek.key
exit

sudo service nginx restart

cd /tmp
wget https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.5.0.noarch.rpm
sudo yum install elasticsearch-1.5.0.noarch.rpm
cd /opt/interset
cp interset-endpoint/install/elasticsearch-river-kafka-1.3.1-plugin.zip .
ln -s elasticsearch-river-kafka-1.3.1-plugin.zip elasticsearch-river-kafka-plugin.zip

sudo vi /etc/elasticsearch/elasticsearch.yml

cluster.name: interset
node.name: "<hostname>"
discovery.zen.ping.multicast.enabled: false
# Disable scripting. See here:
# http://www.elastic.co/guide/en/elasticsearch/reference/current/modulesscripting.html
script.groovy.sandbox.enabled: false
bootstrap.mlockall: true

sudo /sbin/chkconfig --add elasticsearch
sudo service elasticsearch restart

 

 

INTERSET USER ON REPORTING SERVER

cd /opt/interset
wget https://s3-us-west-2.amazonaws.com/interset-releases/Interset3/reporting-3.1.5.1424-deploy.tar.gz
tar xvf reporting-3.1.5.1424-deploy.tar.gz
ln -s reporting-3.1.5.1424 reporting

sudo mkdir -p /var/interset/reportGen
sudo chown interset:interset /var/interset/reportGen
sudo yum install texlive texlive-latex-extra texlive-xetex latex-xcolor
sudo yum install pgf fonts-lato ImageMagick poppler-utils
echo " " >> ~/.bashrc
echo "export PATH=\$PATH:/opt/interset/reporting/reportGen/bin" >> ~/.bashrc
source ~/.bashrc

cd /opt/interset/reporting
vi investigator.yml

remove database-analytics section
domain: <FQDN or hostname used to access system>
elasticsearchURL: http://<analytics server>:9200

mkdir /opt/interset/reporting/logs

sudo yum install nginx
sudo cp /opt/interset/reporting/nginx.conf /etc/nginx/conf.d
sudo rm /etc/nginx/conf.d/default.conf
sudo rm /etc/nginx/conf.d/example_ssl.conf
sudo service nginx restart

cd /opt/interset/reporting/search-linux-x64
vi config/kibana.yml

elasticsearch_url: "http://<analytics server>:9200"

nohup /opt/interset/reporting/search-linux-x64/bin/kibana &

cd /opt/interset/reporting
java -jar investigator-3-SNAPSHOT.jar db migrate investigator.yml
nohup java -jar /opt/interset/reporting/investigator-3-SNAPSHOT.jar server /opt/interset/reporting/investigator.yml &

vi /opt/interset/interset-endpoint/conf/index.yml

indexPath: /opt/interset/interset-endpoint/search_tmp

crontab -e

0 0 * * * /opt/interset/interset-endpoint/bin/index.sh

 


IF ADDITIONAL/NEW TENANTS ARE REQUIRED

INTERSET USER ON ANALYTICS SERVER

/opt/interset/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic interset_wdc_alerts_0 --partitions 8 --replication-factor 1
/opt/interset/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic interset_wdc_events_0 --partitions 8 --replication-factor 1
/opt/interset/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic interset_wdc_interpretations_0 --partitions 8 --replication-factor 1

cd /opt/interset/interset-endpoint
bin/kafka-river-tenant-install 0 localhost
sudo service elasticsearch restart
sudo yum install jq
cd /opt/interset/reporting
./kibana-index-pattern-creation.sh <analytics server> 0

 

Was this article helpful?
0 out of 0 found this helpful
Have more questions? Submit a request

Comments

Powered by Zendesk