Skip to content

Commit

Permalink
docker integrated
Browse files Browse the repository at this point in the history
  • Loading branch information
hkulekci committed Oct 15, 2017
1 parent 1a10c9a commit 2e06d6b
Show file tree
Hide file tree
Showing 16 changed files with 255 additions and 6 deletions.
9 changes: 9 additions & 0 deletions .docker/elasticsearch/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM docker.elastic.co/elasticsearch/elasticsearch:5.6.2

RUN bin/elasticsearch-plugin install analysis-icu --batch
RUN bin/elasticsearch-plugin install ingest-attachment --batch
RUN bin/elasticsearch-plugin remove x-pack --purge

ENV ES_JAVA_OPTS "-Xms1g -Xmx1g"

EXPOSE 9200:9200 9300:9300
1 change: 1 addition & 0 deletions .docker/elasticsearch/config/elasticsearch.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
network.host: 0.0.0.0
15 changes: 15 additions & 0 deletions .docker/elasticsearch/config/logging.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# you can override this using by setting a system property, for example -Des.logger.level=DEBUG
es.logger.level: INFO
rootLogger: ${es.logger.level}, console
logger:
# log action execution errors for easier debugging
action: DEBUG
# reduce the logging for aws, too much is logged under the default INFO
com.amazonaws: WARN

appender:
console:
type: console
layout:
type: consolePattern
conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
5 changes: 5 additions & 0 deletions .docker/kibana/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
FROM docker.elastic.co/kibana/kibana:5.6.2

ENV ELASTICSEARCH_URL "http://elasticsearch:9200"

EXPOSE 5601:5601
18 changes: 18 additions & 0 deletions .docker/logstash/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FROM docker.elastic.co/logstash/logstash:5.6.2

WORKDIR /usr/share/logstash
RUN bin/logstash-plugin install logstash-input-jdbc
RUN bin/logstash-plugin remove x-pack
RUN bin/logstash-plugin install logstash-filter-json_encode
RUN bin/logstash-plugin install logstash-filter-json
RUN bin/logstash-plugin install logstash-filter-mutate

COPY conf/logstash.conf /etc/logstash/conf.d/logstash.conf
COPY conf/template.json /etc/logstash/conf.d/template.json
COPY connector/* /usr/share/logstash/

# https://github.com/elastic/logstash-docker/issues/45
RUN sed -i '/xpack/d' /usr/share/logstash/config/logstash.yml

EXPOSE 5044:5044
CMD ["logstash", "-f", "/etc/logstash/conf.d/logstash.conf", "--config.reload.automatic", "--debug", "--verbose"]
34 changes: 34 additions & 0 deletions .docker/logstash/conf/logstash.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
input {
jdbc {
jdbc_driver_library => "mysql-connector-java-5.1.36-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://163.172.108.190:8989/node_es_example"
jdbc_user => "root"
jdbc_password => "root"
schedule => "* * * * *"
statement => "CALL fetchDataForElastic(:sql_last_value);"
}
}

filter {
json {
source => "categories"
target => "categories"
}
mutate { remove_field => [ "@version", "@timestamp" ] }
}

output {
stdout
{
codec => rubydebug
}

elasticsearch
{
hosts => ["elasticsearch:9200"]
index => "products"
document_type => "product"
document_id => "%{id}"
}
}
49 changes: 49 additions & 0 deletions .docker/logstash/conf/template.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@

{
"template": "myindex*",
"settings": {
"index.refresh_interval": "5s"
},
"mappings": {
"myindex": {
"dynamic_templates": [
{
"message_field": {
"mapping": {
"fielddata": {
"format": "disabled"
},
"omit_norms": true,
"type": "text"
},
"match_mapping_type": "string",
"match": "message"
}
},
{
"string_fields": {
"match_mapping_type": "string",
"match": "*",
"mapping": {
"fielddata": {
"format": "disabled"
},
"omit_norms": true,
"type": "text",
"fields": {
"raw": {
"ignore_above": 256,
"type": "keyword"
}
}
}
}
}
],
"_all": {
"enabled": true
}
}
},
"aliases": {}
}
Binary file not shown.
31 changes: 31 additions & 0 deletions .docker/mysql/mysqld.cnf
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA

#
# The MySQL Server configuration file.
#
# For explanations see
# http://dev.mysql.com/doc/mysql/en/server-system-variables.html

[mysqld]
port = 33060
pid-file = /var/run/mysqld/mysqld.pid
socket = /var/run/mysqld/mysqld.sock
datadir = /var/lib/mysql
#log-error = /var/log/mysql/error.log
# By default we only accept connections from localhost
bind-address = 0.0.0.0
# Disabling symbolic-links is recommended to prevent assorted security risks
symbolic-links=0
24 changes: 21 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,31 @@
## Node Elasticsearch Example
# Node Elasticsearch Example

## Starting Services

```
docker-compose up elasticsearch kibana
docker-compose up mysql
docker-compose up app
```

## Database Structure and Initialization

Please check `data/sample_data.sql` for our sample data.
Connect the mysql container with following configuration:

```
Host: mysql
User : root
Password : 123456
Port : 33060
```

And import `data/sample_data.sql` file for our sample data.


## Elastic Integration

First of all create your `products` index and `product` type.
Then use Kibana Console interface to be able to create your index. First of all
create your `products` index and `product` type.

```
DELETE products
Expand Down
5 changes: 5 additions & 0 deletions data/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
mysql-data/*
nodes/*

!mysql-data/.gitkeep
!nodes/.gitkeep
Empty file added data/mysql-data/.gitkeep
Empty file.
Empty file added data/nodes/.gitkeep
Empty file.
58 changes: 58 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
version: '2'

services:
elasticsearch:
build: ./.docker/elasticsearch
ports:
- "9200:9200"
- "9300:9300"
volumes:
- "./data:/usr/share/elasticsearch/data"

kibana:
build: ./.docker/kibana
ports:
- "5601:5601"
links:
- elasticsearch

redis:
image: redis:alpine
ports:
- "6379:6379"

logstash:
build: ./.docker/logstash
volumes:
- "./sample:/tmp/data/"
ports:
- "5044:5044"
links:
- elasticsearch

mysql:
image: "mysql:5.7.19"
restart: always
ports:
- "33060:3306"
environment:
MYSQL_ROOT_PASSWORD: "123456"
volumes:
- ./data/mysql-data:/var/lib/mysql
- './.docker/mysql/mysqld.cnf:/etc/mysql/mysql.conf.d/mysqld.cnf'

app:
image: "node:8"
user: "node"
working_dir: /home/node/app
environment:
- NODE_ENV=production
volumes:
- ./:/home/node/app
ports:
- "3000:3000"
command: "npm start"
links:
- elasticsearch
- mysql
- redis
1 change: 1 addition & 0 deletions libraries/database.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ var pool = mysql.createPool({
user: process.env.MYSQL_USER,
password: process.env.MYSQL_PASS,
database: process.env.MYSQL_DB,
port: process.env.MYSQL_PORT,
debug: process.env.MYSQL_DEBUG == 'true' ? true : false,
connectionLimit: 10,
supportBigNumbers: true,
Expand Down
11 changes: 8 additions & 3 deletions sample.env
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
MYSQL_HOST=127.0.0.1
MYSQL_HOST=mysql
MYSQL_USER=root
MYSQL_PASS=
MYSQL_DB=node_es_example
MYSQL_PASS=123456
MYSQL_PORT=33060
MYSQL_DB=node_es_example
MYSQL_DEBUG=false

ELASTIC_HOST="elasticsearch:9200"
ELASTIC_LOG=trace

0 comments on commit 2e06d6b

Please sign in to comment.