Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ php-WebHDFS is a PHP client for [WebHDFS](http://hadoop.apache.org/docs/r2.0.3-a
## Install via composer

```bash
composer require simpleenergy/php-webhdfs
composer require dreamfactory/php-webhdfs
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Changes look good, but I think we should still keep the original package name. I'll be glad to merge your changes as a new version if you remove this.

```

## Usage
Expand Down Expand Up @@ -149,4 +149,4 @@ $hdfs->setReplication('user/hadoop-username/file.txt', '2');
```php
$hdfs = new WebHDFS('mynamenode.hadoop.com', '50070', 'hadoop-username');
$response = $hdfs->setTimes('user/hadoop-username/file.txt');
```
```
13 changes: 10 additions & 3 deletions composer.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
{
"name": "simpleenergy/php-webhdfs",
"name": "dreamfactory/php-webhdfs",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

and of course this one too

"description": "PHP WebHDFS, forked from https://github.com/simpleenergy/php-WebHDFS",
"minimum-stability": "stable",
"repositories": [
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here

{
"type": "vcs",
"url": "https://github.com/dreamfactorysoftware/php-WebHDFS"
}
],
"license": "MIT",
"version": "1.0.7",
"version": "1.1.0",
"authors": [
{
"name": "tranch-xiao",
Expand All @@ -28,7 +34,8 @@
],
"require": {
"php": ">=5.4.0",
"ext-curl": "*"
"ext-curl": "*",
"ext-json": "*"
},
"autoload": {
"psr-0": {
Expand Down
62 changes: 49 additions & 13 deletions src/org/apache/hadoop/WebHDFS.php
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public function create(

$options = array(
'op' => 'CREATE',
'overwrite' => $overwrite,
'overwrite' => $overwrite ? 'true' : 'false',
'blocksize' => $blocksize,
'replication' => $replication,
'permission' => $permission,
Expand All @@ -79,7 +79,7 @@ public function createWithData(
) {
$options = array(
'op' => 'CREATE',
'overwrite' => $overwrite,
'overwrite' => $overwrite ? 'true' : 'false',
'blocksize' => $blockSize,
'replication' => $replication,
'permission' => $permission,
Expand Down Expand Up @@ -200,7 +200,15 @@ private function _listStatus($path, $cleanLastRequest = false)
}
}

public function listFiles($path, $recursive = false, $includeFileMetaData = false, $maxAmountOfFiles = false)
/**
* @param string $path
* @param bool $recursive
* @param bool $includeFileMetaData
* @param int $maxDepth max depth to search recursively. When value below zero, it will search to the end of tree
* @return array
* @throws WebHDFS_Exception
*/
public function listFiles($path, $recursive = false, $includeFileMetaData = false, $maxDepth = -1)
{
$result = array();
$listStatusResult = $this->_listStatus($path);
Expand All @@ -211,21 +219,27 @@ public function listFiles($path, $recursive = false, $includeFileMetaData = fals
switch ($fileEntity->type) {
case 'DIRECTORY':
if ($recursive === true) {
$result = array_merge($result,
$this->listFiles($path.$fileEntity->pathSuffix.'/', true, $includeFileMetaData,
$maxAmountOfFiles - sizeof($result)));
$result = array_merge(
$result,
$this->listFiles(
$this->concatPath([$path, $fileEntity->pathSuffix]),
true,
$includeFileMetaData,
$maxDepth - 1
)
);
}
break;
default:
if ($includeFileMetaData === true) {
$fileEntity->path = $path.$fileEntity->pathSuffix;
$fileEntity->path = $this->concatPath([$path, $fileEntity->pathSuffix]);
$result[] = $fileEntity;
} else {
$result[] = $path.$fileEntity->pathSuffix;
$result[] = $this->concatPath([$path, $fileEntity->pathSuffix]);
}
}
// recursion will be interrupted since we subtract the amount of the current result set from the maxAmountOfFiles amount with calling the next recursion
if (sizeof($result) >= $maxAmountOfFiles) {
if ($maxDepth === 0) {
break;
}
}
Expand All @@ -247,15 +261,19 @@ public function listDirectories($path, $recursive = false, $includeFileMetaData
switch ($fileEntity->type) {
case 'DIRECTORY':
if ($includeFileMetaData === true) {
$fileEntity->path = $path.$fileEntity->pathSuffix;
$fileEntity->path = $this->concatPath([$path, $fileEntity->pathSuffix]);
$result[] = $fileEntity;
} else {
$result[] = $path.$fileEntity->pathSuffix;
$result[] = $this->concatPath([$path, $fileEntity->pathSuffix]);
}
if ($recursive === true) {
$result = array_merge($result,
$this->listDirectories($path.$fileEntity->pathSuffix.'/', $recursive,
$includeFileMetaData));
$this->listDirectories(
$this->concatPath([$path, $fileEntity->pathSuffix]),
$recursive,
$includeFileMetaData
)
);
}
break;
}
Expand Down Expand Up @@ -445,4 +463,22 @@ private function getResponseErrorException($responseData)

return new WebHDFS_Exception($exceptionMessage, $exceptionCode);
}

private function concatPath(array $paths) {
$result = '';
foreach ($paths as $path) {
if (!$result || preg_match('/.+\/$/', $result)) {
$result .= $path;
continue;
}

$result .= '/' . $path;
}
return $this->removeMultiSlashFromPath($result);
}

private function removeMultiSlashFromPath($path) {
return preg_replace('/(\/)\/+/', '$1', $path);
}

}