Skip to content

Commit 6b7f3b3

Browse files
Alberto Jesus Gutierrez JuanesAlberto Jesus Gutierrez Juanes
Alberto Jesus Gutierrez Juanes
authored and
Alberto Jesus Gutierrez Juanes
committed
HDFS FEature
1 parent 33513b2 commit 6b7f3b3

File tree

9 files changed

+198
-11
lines changed

9 files changed

+198
-11
lines changed

README.md

+5-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# fog-hadoop
2-
[![Gem Version](https://badge.fury.io/rb/fog-hadoop.svg)](https://badge.fury.io/rb/fog-hadoop)[![Build Status](https://travis-ci.org/fog/fog-hadoop.svg?branch=master)](https://travis-ci.org/fog/fog-hadoop) [![Code Climate](https://lima.codeclimate.com/github/fog/fog-hadoop/badges/gpa.svg)](https://lima.codeclimate.com/github/fog/fog-hadoop) [![Test Coverage](https://lima.codeclimate.com/github/fog/fog-hadoop/badges/coverage.svg)](https://lima.codeclimate.com/github/fog/fog-hadoop/coverage)
2+
[![Gem Version](https://badge.fury.io/rb/fog-hadoop.svg)](https://badge.fury.io/rb/fog-hadoop)[![Build Status](https://travis-ci.org/fog/fog-hadoop.svg?branch=master)](https://travis-ci.org/fog/fog-hadoop) [![Code Climate](https://lima.codeclimate.com/github/fog/fog-hadoop/badges/gpa.svg)](https://lima.codeclimate.com/github/fog/fog-hadoop) [![Coverage Status](https://coveralls.io/repos/fog/fog-hadoop/badge.svg?branch=master&service=github)](https://coveralls.io/github/fog/fog-hadoop?branch=master)
33

44
## Fog connector for Hadoop
55

@@ -33,12 +33,14 @@ Connection parameters:
3333

3434
```ruby
3535
@connection_params = {
36-
hadoop_compute_api_url: "http://<resource_manager_host>:8088/"
36+
hadoop_compute_api_url: "http://<resource_manager_host>:8088/",
37+
hadoop_storage_api_url: "http://<namenode>:50070/"
3738
}
3839
```
3940

4041
* Use [Yarn](docs/yarn.md)
41-
42+
* Use [HDFS](docs/hdfs.md)
43+
4244
# Support
4345

4446
Check in this [link](supported.md)

docs/hdfs.md

+44
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
#HDFS use samples
2+
3+
```ruby
4+
@connection_params = {
5+
hadoop_storage_api_url: "http://<namenode>:50070/"
6+
}
7+
```
8+
9+
## Get Metrics
10+
11+
```
12+
storage = Fog::Storage::Hadoop.new(@connection_params)
13+
storage.get_metrics
14+
```
15+
16+
```
17+
[
18+
{
19+
"name" => "Hadoop:service=NameNode,name=FSNamesystemState",
20+
"modelerType" => "org.apache.hadoop.hdfs.server.namenode.FSNamesystem",
21+
"CapacityTotal" => 15636578304,
22+
"CapacityUsed" => 49152,
23+
"CapacityRemaining" => 11699875840,
24+
"TotalLoad" => 2,
25+
"SnapshotStats" => "{\"SnapshottableDirectories\":0,\"Snapshots\":0}",
26+
"BlocksTotal" => 0,
27+
"MaxObjects" => 0,
28+
"FilesTotal" => 6,
29+
"PendingReplicationBlocks" => 0,
30+
"UnderReplicatedBlocks" => 0,
31+
"ScheduledReplicationBlocks" => 0,
32+
"PendingDeletionBlocks" => 0,
33+
"FSState" => "Operational",
34+
"NumLiveDataNodes" => 2,
35+
"NumDeadDataNodes" => 0,
36+
"NumDecomLiveDataNodes" => 0,
37+
"NumDecomDeadDataNodes" => 0,
38+
"NumDecommissioningDataNodes" => 0,
39+
"NumStaleDataNodes" => 0
40+
}
41+
]
42+
```
43+
44+

fog-hadoop.gemspec

+1
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ Gem::Specification.new do |spec|
5151
spec.add_development_dependency('rbvmomi')
5252
spec.add_development_dependency('yard')
5353
spec.add_development_dependency('thor')
54+
spec.add_development_dependency('coveralls')
5455
spec.add_development_dependency('rbovirt', '0.0.24')
5556
spec.add_development_dependency('shindo', '~> 0.3.4')
5657
spec.add_development_dependency('fission')

lib/fog/hadoop.rb

+9-5
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,22 @@ module Compute
77
autoload :Hadoop, File.expand_path('../hadoop/compute', __FILE__)
88
end
99

10+
module Storage
11+
autoload :Hadoop, File.expand_path('../hadoop/storage', __FILE__)
12+
end
13+
1014
module Hadoop
1115
extend Fog::Provider
1216

13-
14-
# Miscs
15-
## Startup Script
16-
#autoload :Script, File.expand_path('../sakuracloud/script', __FILE__)
17-
1817
service(:compute, 'Compute')
18+
service(:storage, 'Storage')
1919

2020
def self.yarn_endpoint
2121
"/ws/v1/cluster"
2222
end
23+
24+
def self.hdfs_endpoint
25+
"/jmx"
26+
end
2327
end
2428
end
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
module Fog
2+
module Storage
3+
class Hadoop
4+
class Real
5+
def get_metrics
6+
request(
7+
:expects => 200,
8+
:method => 'GET',
9+
:path => "#{Fog::Hadoop.hdfs_endpoint}?qry=Hadoop:service=NameNode,name=FSNamesystemState"
10+
).body["beans"]
11+
end
12+
end
13+
class Mock
14+
def get_metrics
15+
response = Excon::Response.new
16+
response.status = 200
17+
response.body =[
18+
{
19+
"name" => "Hadoop:service=NameNode,name=FSNamesystemState",
20+
"modelerType" => "org.apache.hadoop.hdfs.server.namenode.FSNamesystem",
21+
"CapacityTotal" => 15636578304,
22+
"CapacityUsed" => 49152,
23+
"CapacityRemaining" => 11699875840,
24+
"TotalLoad" => 2,
25+
"SnapshotStats" => "{\"SnapshottableDirectories\":0,\"Snapshots\":0}",
26+
"BlocksTotal" => 0,
27+
"MaxObjects" => 0,
28+
"FilesTotal" => 6,
29+
"PendingReplicationBlocks" => 0,
30+
"UnderReplicatedBlocks" => 0,
31+
"ScheduledReplicationBlocks" => 0,
32+
"PendingDeletionBlocks" => 0,
33+
"FSState" => "Operational",
34+
"NumLiveDataNodes" => 2,
35+
"NumDeadDataNodes" => 0,
36+
"NumDecomLiveDataNodes" => 0,
37+
"NumDecomDeadDataNodes" => 0,
38+
"NumDecommissioningDataNodes" => 0,
39+
"NumStaleDataNodes" => 0
40+
}
41+
]
42+
43+
response
44+
end
45+
end
46+
end
47+
end
48+
end

lib/fog/hadoop/storage.rb

+30
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
module Fog
2+
module Storage
3+
class Hadoop < Fog::Service
4+
requires :hadoop_storage_api_url
5+
6+
recognizes :storage_api_url
7+
8+
9+
request_path 'fog/hadoop/requests/storage'
10+
request :get_metrics
11+
12+
class Real
13+
include Fog::Hadoop::Utils::Request
14+
15+
def initialize(options = {})
16+
17+
@storage_api_url = options[:hadoop_storage_api_url] || 'https://localhost:50070/'
18+
Fog.credentials[:@storage_api_url] = options[:hadoop_storage_api_url]
19+
@connection = Fog::Core::Connection.new(@storage_api_url)
20+
end
21+
end
22+
23+
class Mock
24+
def initialize(options = {})
25+
@storage_api_url = options[:hadoop_storage_api_url] || 'https://localhost:50070/'
26+
end
27+
end
28+
end #SakuraCloud
29+
end #Compute
30+
end

supported.md

+8-1
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,11 @@
1616
| Yarn | Info |/ws/v1/cluster/info |
1717
| | AppStats |/ws/v1/cluster/appstatistics |
1818
| | Metrics |/ws/v1/cluster/metrics |
19-
| | Nodes |/ws/v1/cluster/nodes |
19+
| | Nodes |/ws/v1/cluster/nodes |
20+
21+
## HDFS
22+
23+
| Service | GET |Notes |
24+
|------------------|---------------|----------------------------------------------------------------------|
25+
| HDFS | Metrics |/jmx?qry=Hadoop:service=NameNode,name=FSNamesystemState |
26+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
Shindo.tests('Fog::STORAGE[:hadoop] | metrics request', ['hadoop', 'STORAGE']) do
2+
3+
@metrics_format = [{
4+
"name" => String,
5+
"modelerType" => String,
6+
"CapacityTotal" => Integer,
7+
"CapacityUsed" => Integer,
8+
"CapacityRemaining" => Integer,
9+
"TotalLoad" => Integer,
10+
"SnapshotStats" => String,
11+
"BlocksTotal" => Integer,
12+
"MaxObjects" => Integer,
13+
"FilesTotal" => Integer,
14+
"PendingReplicationBlocks" => Integer,
15+
"UnderReplicatedBlocks" => Integer,
16+
"ScheduledReplicationBlocks" => Integer,
17+
"PendingDeletionBlocks" => Integer,
18+
"FSState" => String,
19+
"NumLiveDataNodes" => Integer,
20+
"NumDeadDataNodes" => Integer,
21+
"NumDecomLiveDataNodes" => Integer,
22+
"NumDecomDeadDataNodes" => Integer,
23+
"NumDecommissioningDataNodes" => Integer,
24+
"NumStaleDataNodes" => Integer
25+
}]
26+
27+
tests('success') do
28+
tests('#get_metrics') do
29+
metrics = hadoop_storage_service.get_metrics
30+
test 'returns a Hash' do
31+
metrics.body.is_a? Array
32+
end
33+
if Fog.mock?
34+
tests('Metrics').formats(@metrics_format, false) do
35+
metrics.body
36+
end
37+
else
38+
returns(200) { metrics.status }
39+
returns(true) { metrics.body.is_a? Array }
40+
end
41+
end
42+
end
43+
end

tests/helper.rb

+10-2
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,26 @@
55
require 'fog/test_helpers'
66
require 'fog/hadoop'
77
require "simplecov"
8-
SimpleCov.start
8+
require 'coveralls'
99

10+
SimpleCov.start
11+
Coveralls.wear!
1012
if ENV['FOG_MOCK'] == 'true'
1113
Fog.mock!
1214
end
1315

1416
if Fog.mock?
1517
Fog.credentials = {
16-
hadoop_compute_api_url: 'http://localhost:8088'
18+
hadoop_compute_api_url: 'http://localhost:8088',
19+
hadoop_storage_api_url: 'http://localhost:50070'
1720
}.merge(Fog.credentials)
1821
end
1922

2023
def hadoop_compute_service
2124
Fog::Compute[:hadoop]
2225
end
26+
27+
def hadoop_storage_service
28+
Fog::Storage[:hadoop]
29+
end
30+

0 commit comments

Comments
 (0)